fix(utils.py): fix openrouter supported params

Fixes https://github.com/BerriAI/litellm/issues/4508
This commit is contained in:
Krrish Dholakia 2024-07-05 12:06:20 -07:00
parent 261cc2a0d4
commit 017af34866

View file

@ -2431,7 +2431,10 @@ def get_optional_params(
non_default_params=passed_params, optional_params=optional_params non_default_params=passed_params, optional_params=optional_params
) )
) )
elif custom_llm_provider == "vertex_ai" or custom_llm_provider == "vertex_ai_beta": elif (
custom_llm_provider == "vertex_ai"
or custom_llm_provider == "vertex_ai_beta"
):
optional_params = litellm.VertexAIConfig().map_special_auth_params( optional_params = litellm.VertexAIConfig().map_special_auth_params(
non_default_params=passed_params, optional_params=optional_params non_default_params=passed_params, optional_params=optional_params
) )
@ -3856,6 +3859,8 @@ def get_supported_openai_params(
"top_logprobs", "top_logprobs",
"response_format", "response_format",
"stop", "stop",
"tools",
"tool_choice",
] ]
elif custom_llm_provider == "mistral" or custom_llm_provider == "codestral": elif custom_llm_provider == "mistral" or custom_llm_provider == "codestral":
# mistal and codestral api have the exact same params # mistal and codestral api have the exact same params