From 3c80f2f9143951e6e0a78cd9e21425f338122f29 Mon Sep 17 00:00:00 2001 From: Weves Date: Sat, 18 Jan 2025 16:29:29 -0800 Subject: [PATCH] Enable location support for Vertex AI --- backend/onyx/llm/chat_llm.py | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/backend/onyx/llm/chat_llm.py b/backend/onyx/llm/chat_llm.py index 260042e01b8..51afd02d652 100644 --- a/backend/onyx/llm/chat_llm.py +++ b/backend/onyx/llm/chat_llm.py @@ -275,17 +275,22 @@ def __init__( # addtional kwargs (and some kwargs MUST be passed in rather than set as # env variables) if custom_config: - # Specifically pass in "vertex_credentials" as a model_kwarg to the - # completion call for vertex AI. More details here: + # Specifically pass in "vertex_credentials" / "vertex_location" as a + # model_kwarg to the completion call for vertex AI. More details here: # https://docs.litellm.ai/docs/providers/vertex vertex_credentials_key = "vertex_credentials" - vertex_credentials = custom_config.get(vertex_credentials_key) - if vertex_credentials and model_provider == "vertex_ai": - model_kwargs[vertex_credentials_key] = vertex_credentials - else: - # standard case - for k, v in custom_config.items(): - os.environ[k] = v + vertex_location_key = "vertex_location" + for k, v in custom_config.items(): + if model_provider == "vertex_ai": + if k == vertex_credentials_key: + model_kwargs[k] = v + continue + elif k == vertex_location_key: + model_kwargs[k] = v + continue + + # for all values, set them as env variables + os.environ[k] = v if extra_headers: model_kwargs.update({"extra_headers": extra_headers})