(feat) support mistral function calling

This commit is contained in:
ishaan-jaff 2024-02-28 17:13:04 -08:00
parent 11d0e8d3f6
commit ea4e7f79cb

View file

@ -4057,6 +4057,7 @@ def get_optional_params(
and custom_llm_provider != "vertex_ai"
and custom_llm_provider != "anyscale"
and custom_llm_provider != "together_ai"
and custom_llm_provider != "mistral"
):
if custom_llm_provider == "ollama" or custom_llm_provider == "ollama_chat":
# ollama actually supports json output