(feat) support mistral function calling

This commit is contained in:
ishaan-jaff 2024-02-28 17:13:04 -08:00
parent 938bb58585
commit e5269fdb7c

View file

@ -4057,6 +4057,7 @@ def get_optional_params(
and custom_llm_provider != "vertex_ai" and custom_llm_provider != "vertex_ai"
and custom_llm_provider != "anyscale" and custom_llm_provider != "anyscale"
and custom_llm_provider != "together_ai" and custom_llm_provider != "together_ai"
and custom_llm_provider != "mistral"
): ):
if custom_llm_provider == "ollama" or custom_llm_provider == "ollama_chat": if custom_llm_provider == "ollama" or custom_llm_provider == "ollama_chat":
# ollama actually supports json output # ollama actually supports json output