forked from phoenix/litellm-mirror
fix(utils.py): fix openrouter supported params
Fixes https://github.com/BerriAI/litellm/issues/4508
This commit is contained in:
parent
261cc2a0d4
commit
017af34866
1 changed files with 7 additions and 2 deletions
|
@ -2412,7 +2412,7 @@ def get_optional_params(
|
|||
): # allow dynamically setting vertex ai init logic
|
||||
continue
|
||||
passed_params[k] = v
|
||||
|
||||
|
||||
optional_params: Dict = {}
|
||||
|
||||
common_auth_dict = litellm.common_cloud_provider_auth_params
|
||||
|
@ -2431,7 +2431,10 @@ def get_optional_params(
|
|||
non_default_params=passed_params, optional_params=optional_params
|
||||
)
|
||||
)
|
||||
elif custom_llm_provider == "vertex_ai" or custom_llm_provider == "vertex_ai_beta":
|
||||
elif (
|
||||
custom_llm_provider == "vertex_ai"
|
||||
or custom_llm_provider == "vertex_ai_beta"
|
||||
):
|
||||
optional_params = litellm.VertexAIConfig().map_special_auth_params(
|
||||
non_default_params=passed_params, optional_params=optional_params
|
||||
)
|
||||
|
@ -3856,6 +3859,8 @@ def get_supported_openai_params(
|
|||
"top_logprobs",
|
||||
"response_format",
|
||||
"stop",
|
||||
"tools",
|
||||
"tool_choice",
|
||||
]
|
||||
elif custom_llm_provider == "mistral" or custom_llm_provider == "codestral":
|
||||
# mistal and codestral api have the exact same params
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue