fix(utils.py): fix openrouter params

Fixes https://github.com/BerriAI/litellm/issues/4488
This commit is contained in:
Krrish Dholakia 2024-07-01 08:23:46 -07:00
parent 49a72ffb51
commit 5e521bd36e

View file

@ -3842,23 +3842,18 @@ def get_supported_openai_params(
return litellm.AzureOpenAIConfig().get_supported_openai_params()
elif custom_llm_provider == "openrouter":
return [
"functions",
"function_call",
"temperature",
"top_p",
"n",
"stream",
"stop",
"max_tokens",
"presence_penalty",
"frequency_penalty",
"logit_bias",
"user",
"response_format",
"presence_penalty",
"repetition_penalty",
"seed",
"tools",
"tool_choice",
"max_retries",
"max_tokens",
"logit_bias",
"logprobs",
"top_logprobs",
"response_format",
"stop",
]
elif custom_llm_provider == "mistral" or custom_llm_provider == "codestral":
# mistal and codestral api have the exact same params