diff --git a/litellm/utils.py b/litellm/utils.py index 7302bea753..66df7c5039 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -3842,23 +3842,18 @@ def get_supported_openai_params( return litellm.AzureOpenAIConfig().get_supported_openai_params() elif custom_llm_provider == "openrouter": return [ - "functions", - "function_call", "temperature", "top_p", - "n", - "stream", - "stop", - "max_tokens", - "presence_penalty", "frequency_penalty", - "logit_bias", - "user", - "response_format", + "presence_penalty", + "repetition_penalty", "seed", - "tools", - "tool_choice", - "max_retries", + "max_tokens", + "logit_bias", + "logprobs", + "top_logprobs", + "response_format", + "stop", ] elif custom_llm_provider == "mistral" or custom_llm_provider == "codestral": # mistal and codestral api have the exact same params