forked from phoenix/litellm-mirror
fix(utils.py): fix openrouter supported params
Fixes https://github.com/BerriAI/litellm/issues/4508
This commit is contained in:
parent
261cc2a0d4
commit
017af34866
1 changed files with 7 additions and 2 deletions
|
@ -2431,7 +2431,10 @@ def get_optional_params(
|
||||||
non_default_params=passed_params, optional_params=optional_params
|
non_default_params=passed_params, optional_params=optional_params
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
elif custom_llm_provider == "vertex_ai" or custom_llm_provider == "vertex_ai_beta":
|
elif (
|
||||||
|
custom_llm_provider == "vertex_ai"
|
||||||
|
or custom_llm_provider == "vertex_ai_beta"
|
||||||
|
):
|
||||||
optional_params = litellm.VertexAIConfig().map_special_auth_params(
|
optional_params = litellm.VertexAIConfig().map_special_auth_params(
|
||||||
non_default_params=passed_params, optional_params=optional_params
|
non_default_params=passed_params, optional_params=optional_params
|
||||||
)
|
)
|
||||||
|
@ -3856,6 +3859,8 @@ def get_supported_openai_params(
|
||||||
"top_logprobs",
|
"top_logprobs",
|
||||||
"response_format",
|
"response_format",
|
||||||
"stop",
|
"stop",
|
||||||
|
"tools",
|
||||||
|
"tool_choice",
|
||||||
]
|
]
|
||||||
elif custom_llm_provider == "mistral" or custom_llm_provider == "codestral":
|
elif custom_llm_provider == "mistral" or custom_llm_provider == "codestral":
|
||||||
# mistal and codestral api have the exact same params
|
# mistal and codestral api have the exact same params
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue