diff --git a/litellm/utils.py b/litellm/utils.py index b025cd60a9..94d1040b99 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -4057,6 +4057,7 @@ def get_optional_params( and custom_llm_provider != "vertex_ai" and custom_llm_provider != "anyscale" and custom_llm_provider != "together_ai" + and custom_llm_provider != "mistral" ): if custom_llm_provider == "ollama" or custom_llm_provider == "ollama_chat": # ollama actually supports json output