diff --git a/litellm/llms/vertex_ai_partner.py b/litellm/llms/vertex_ai_partner.py index 378ee7290..24586a3fe 100644 --- a/litellm/llms/vertex_ai_partner.py +++ b/litellm/llms/vertex_ai_partner.py @@ -96,11 +96,13 @@ class VertexAILlama3Config: def get_supported_openai_params(self): return litellm.OpenAIConfig().get_supported_openai_params(model="gpt-3.5-turbo") - def map_openai_params(self, non_default_params: dict, optional_params: dict): + def map_openai_params( + self, non_default_params: dict, optional_params: dict, model: str + ): return litellm.OpenAIConfig().map_openai_params( non_default_params=non_default_params, optional_params=optional_params, - model="gpt-3.5-turbo", + model=model, ) diff --git a/litellm/utils.py b/litellm/utils.py index 98c8b0184..a20e96172 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -3190,6 +3190,7 @@ def get_optional_params( optional_params = litellm.VertexAILlama3Config().map_openai_params( non_default_params=non_default_params, optional_params=optional_params, + model=model, ) elif custom_llm_provider == "vertex_ai" and model in litellm.vertex_mistral_models: supported_params = get_supported_openai_params(