From 2f402b1cdd67f99eeaa357d52b6f32f12a060fe7 Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Wed, 7 Aug 2024 11:08:06 -0700 Subject: [PATCH] fix(vertex_ai_partner.py): default vertex ai llama3.1 api to use all openai params Poor vertex docs - not clear what can/can't work Fixes https://github.com/BerriAI/litellm/issues/5090 --- litellm/llms/vertex_ai_partner.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/litellm/llms/vertex_ai_partner.py b/litellm/llms/vertex_ai_partner.py index 08780be76..378ee7290 100644 --- a/litellm/llms/vertex_ai_partner.py +++ b/litellm/llms/vertex_ai_partner.py @@ -94,18 +94,14 @@ class VertexAILlama3Config: } def get_supported_openai_params(self): - return [ - "max_tokens", - "stream", - ] + return litellm.OpenAIConfig().get_supported_openai_params(model="gpt-3.5-turbo") def map_openai_params(self, non_default_params: dict, optional_params: dict): - for param, value in non_default_params.items(): - if param == "max_tokens": - optional_params["max_tokens"] = value - if param == "stream": - optional_params["stream"] = value - return optional_params + return litellm.OpenAIConfig().map_openai_params( + non_default_params=non_default_params, + optional_params=optional_params, + model="gpt-3.5-turbo", + ) class VertexAIPartnerModels(BaseLLM):