fix(utils.py): support openrouter function calling

This commit is contained in:
Krrish Dholakia 2024-05-17 08:02:24 -07:00
parent 186f11612a
commit 26aefc7cd6

View file

@ -5127,7 +5127,7 @@ def get_optional_params(
or "tools" in non_default_params or "tools" in non_default_params
): ):
if ( if (
custom_llm_provider != "openai" custom_llm_provider == "ollama"
and custom_llm_provider != "text-completion-openai" and custom_llm_provider != "text-completion-openai"
and custom_llm_provider != "azure" and custom_llm_provider != "azure"
and custom_llm_provider != "vertex_ai" and custom_llm_provider != "vertex_ai"
@ -5141,6 +5141,8 @@ def get_optional_params(
and custom_llm_provider != "cohere" and custom_llm_provider != "cohere"
and custom_llm_provider != "bedrock" and custom_llm_provider != "bedrock"
and custom_llm_provider != "ollama_chat" and custom_llm_provider != "ollama_chat"
and custom_llm_provider != "openrouter"
and custom_llm_provider not in litellm.openai_compatible_providers
): ):
if custom_llm_provider == "ollama": if custom_llm_provider == "ollama":
# ollama actually supports json output # ollama actually supports json output