diff --git a/litellm/llms/vertex_ai_partner.py b/litellm/llms/vertex_ai_partner.py index 08780be76..378ee7290 100644 --- a/litellm/llms/vertex_ai_partner.py +++ b/litellm/llms/vertex_ai_partner.py @@ -94,18 +94,14 @@ class VertexAILlama3Config: } def get_supported_openai_params(self): - return [ - "max_tokens", - "stream", - ] + return litellm.OpenAIConfig().get_supported_openai_params(model="gpt-3.5-turbo") def map_openai_params(self, non_default_params: dict, optional_params: dict): - for param, value in non_default_params.items(): - if param == "max_tokens": - optional_params["max_tokens"] = value - if param == "stream": - optional_params["stream"] = value - return optional_params + return litellm.OpenAIConfig().map_openai_params( + non_default_params=non_default_params, + optional_params=optional_params, + model="gpt-3.5-turbo", + ) class VertexAIPartnerModels(BaseLLM):