(feat) pass vertex_ai/ as custom_llm_provider

This commit is contained in:
ishaan-jaff 2023-12-13 19:02:24 +03:00
parent 6846f656e6
commit 86e626edab
2 changed files with 2 additions and 2 deletions

View file

@ -95,7 +95,7 @@ def completion(
mode = ""
request_str = ""
if model in litellm.vertex_chat_models:
if model in litellm.vertex_chat_models or ("chat" in model): # to catch chat-bison@003 or chat-bison@004 when google will release it
chat_model = ChatModel.from_pretrained(model)
mode = "chat"
request_str += f"chat_model = ChatModel.from_pretrained({model})\n"

View file

@ -1136,7 +1136,7 @@ def completion(
)
return response
response = model_response
elif model in litellm.vertex_chat_models or model in litellm.vertex_code_chat_models or model in litellm.vertex_text_models or model in litellm.vertex_code_text_models:
elif model in litellm.vertex_chat_models or model in litellm.vertex_code_chat_models or model in litellm.vertex_text_models or model in litellm.vertex_code_text_models or custom_llm_provider == "vertex_ai":
vertex_ai_project = (litellm.vertex_project
or get_secret("VERTEXAI_PROJECT"))
vertex_ai_location = (litellm.vertex_location