diff --git a/litellm/proxy/_new_secret_config.yaml b/litellm/proxy/_new_secret_config.yaml index 1f8d442b7d..8e0c45d389 100644 --- a/litellm/proxy/_new_secret_config.yaml +++ b/litellm/proxy/_new_secret_config.yaml @@ -26,7 +26,6 @@ model_list: api_key: os.environ/OPENROUTER_API_KEY api_base: http://0.0.0.0:8090 - litellm_settings: num_retries: 0 callbacks: ["prometheus"] diff --git a/litellm/utils.py b/litellm/utils.py index 6629bd1375..865c104eb7 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -2624,7 +2624,7 @@ def get_optional_params_embeddings( # noqa: PLR0915 non_default_params=non_default_params, optional_params={}, kwargs=kwargs ) return optional_params - elif custom_llm_provider == "vertex_ai": + elif custom_llm_provider == "vertex_ai" or custom_llm_provider == "gemini": supported_params = get_supported_openai_params( model=model, custom_llm_provider="vertex_ai",