fix(vertex_ai_partner.py): default vertex ai llama3.1 api to use all openai params

Poor vertex docs - not clear what can/can't work

Fixes https://github.com/BerriAI/litellm/issues/5090
This commit is contained in:
Krrish Dholakia 2024-08-07 11:08:06 -07:00
parent 7e0cd39de7
commit 2f402b1cdd

View file

@ -94,18 +94,14 @@ class VertexAILlama3Config:
} }
def get_supported_openai_params(self): def get_supported_openai_params(self):
return [ return litellm.OpenAIConfig().get_supported_openai_params(model="gpt-3.5-turbo")
"max_tokens",
"stream",
]
def map_openai_params(self, non_default_params: dict, optional_params: dict): def map_openai_params(self, non_default_params: dict, optional_params: dict):
for param, value in non_default_params.items(): return litellm.OpenAIConfig().map_openai_params(
if param == "max_tokens": non_default_params=non_default_params,
optional_params["max_tokens"] = value optional_params=optional_params,
if param == "stream": model="gpt-3.5-turbo",
optional_params["stream"] = value )
return optional_params
class VertexAIPartnerModels(BaseLLM): class VertexAIPartnerModels(BaseLLM):