This commit is contained in:
Peter Wilson 2025-04-24 00:53:58 -07:00 committed by GitHub
commit 6053a53b64
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -2979,84 +2979,51 @@ def get_optional_params( # noqa: PLR0915
"thinking": None, "thinking": None,
} }
# filter out those parameters that were passed with non-default values # Parameters that can be supplied by the user that we don't want to include in non-default-params.
excluded_non_default_params = {
"additional_drop_params",
"allowed_openai_params",
"api_version",
"custom_llm_provider",
"drop_params",
"messages"
"model",
}
# From the parameters passed into this function, filter for parameters with non-default values.
non_default_params = { non_default_params = {
k: v k: v
for k, v in passed_params.items() for k, v in passed_params.items()
if ( if (
k != "model" k not in excluded_non_default_params
and k != "custom_llm_provider"
and k != "api_version"
and k != "drop_params"
and k != "allowed_openai_params"
and k != "additional_drop_params"
and k != "messages"
and k in default_params and k in default_params
and v != default_params[k] and v != default_params[k]
and _should_drop_param(k=k, additional_drop_params=additional_drop_params) and not _should_drop_param(k=k, additional_drop_params=additional_drop_params)
is False
) )
} }
## raise exception if function calling passed in for a provider that doesn't support it ## raise exception if function calling passed in for a provider that doesn't support it
if ( if any(key in non_default_params for key in ("functions", "function_call", "tools")):
"functions" in non_default_params functions_unsupported_model_key = "functions_unsupported_model"
or "function_call" in non_default_params
or "tools" in non_default_params # Handle Ollama as a special case (ollama actually supports JSON output)
): if custom_llm_provider == "ollama":
if ( optional_params["format"] = "json"
custom_llm_provider == "ollama" litellm.add_function_to_prompt = True # so that main.py adds the function call to the prompt
and custom_llm_provider != "text-completion-openai" non_default_params.pop("tool_choice", None) # causes ollama requests to hang
and custom_llm_provider != "azure"
and custom_llm_provider != "vertex_ai" # Handle all other providers that are not OpenAI-compatible
and custom_llm_provider != "anyscale" if litellm.add_function_to_prompt and (custom_llm_provider not in litellm.openai_compatible_providers):
and custom_llm_provider != "together_ai" # Attempt to add the supplied function call to the prompt, preferring tools > functions > function_call
and custom_llm_provider != "groq" function_call_value = non_default_params.pop("tools",
and custom_llm_provider != "nvidia_nim" non_default_params.pop("functions",
and custom_llm_provider != "cerebras" non_default_params.pop("function_call", None)))
and custom_llm_provider != "xai" optional_params[functions_unsupported_model_key] = function_call_value
and custom_llm_provider != "ai21_chat" else:
and custom_llm_provider != "volcengine" raise UnsupportedParamsError(
and custom_llm_provider != "deepseek" status_code=500,
and custom_llm_provider != "codestral" message=f"Function calling is not supported by {custom_llm_provider}.",
and custom_llm_provider != "mistral" )
and custom_llm_provider != "anthropic"
and custom_llm_provider != "cohere_chat"
and custom_llm_provider != "cohere"
and custom_llm_provider != "bedrock"
and custom_llm_provider != "ollama_chat"
and custom_llm_provider != "openrouter"
and custom_llm_provider not in litellm.openai_compatible_providers
):
if custom_llm_provider == "ollama":
# ollama actually supports json output
optional_params["format"] = "json"
litellm.add_function_to_prompt = (
True # so that main.py adds the function call to the prompt
)
if "tools" in non_default_params:
optional_params["functions_unsupported_model"] = (
non_default_params.pop("tools")
)
non_default_params.pop(
"tool_choice", None
) # causes ollama requests to hang
elif "functions" in non_default_params:
optional_params["functions_unsupported_model"] = (
non_default_params.pop("functions")
)
elif (
litellm.add_function_to_prompt
): # if user opts to add it to prompt instead
optional_params["functions_unsupported_model"] = non_default_params.pop(
"tools", non_default_params.pop("functions", None)
)
else:
raise UnsupportedParamsError(
status_code=500,
message=f"Function calling is not supported by {custom_llm_provider}.",
)
provider_config: Optional[BaseConfig] = None provider_config: Optional[BaseConfig] = None
if custom_llm_provider is not None and custom_llm_provider in [ if custom_llm_provider is not None and custom_llm_provider in [