fix(gemini/): add gemini/ route optional param mapping support (#9677)

Fixes https://github.com/BerriAI/litellm/issues/9654
This commit is contained in:
Krish Dholakia 2025-04-02 08:56:32 -07:00 committed by GitHub
parent de3979e507
commit d1abb9b68b
2 changed files with 1 additions and 2 deletions

View file

@ -26,7 +26,6 @@ model_list:
api_key: os.environ/OPENROUTER_API_KEY api_key: os.environ/OPENROUTER_API_KEY
api_base: http://0.0.0.0:8090 api_base: http://0.0.0.0:8090
litellm_settings: litellm_settings:
num_retries: 0 num_retries: 0
callbacks: ["prometheus"] callbacks: ["prometheus"]

View file

@ -2624,7 +2624,7 @@ def get_optional_params_embeddings( # noqa: PLR0915
non_default_params=non_default_params, optional_params={}, kwargs=kwargs non_default_params=non_default_params, optional_params={}, kwargs=kwargs
) )
return optional_params return optional_params
elif custom_llm_provider == "vertex_ai": elif custom_llm_provider == "vertex_ai" or custom_llm_provider == "gemini":
supported_params = get_supported_openai_params( supported_params = get_supported_openai_params(
model=model, model=model,
custom_llm_provider="vertex_ai", custom_llm_provider="vertex_ai",