From 1426594d3fc00dca8e1c06e34b63db30c11303c9 Mon Sep 17 00:00:00 2001 From: Marmik Pandya Date: Fri, 29 Dec 2023 22:26:22 +0530 Subject: [PATCH] add support for mistral json mode via anyscale --- litellm/utils.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/litellm/utils.py b/litellm/utils.py index b1b8a3173..57ed9417a 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -3576,15 +3576,15 @@ def get_optional_params( "max_tokens", "stop", "frequency_penalty", - "presence_penalty", + "presence_penalty" ] - if model == "mistralai/Mistral-7B-Instruct-v0.1": - supported_params += ["functions", "function_call", "tools", "tool_choice"] + if model in ["mistralai/Mistral-7B-Instruct-v0.1", "mistralai/Mixtral-8x7B-Instruct-v0.1"]: + supported_params += ["functions", "function_call", "tools", "tool_choice", "response_format"] _check_valid_arg(supported_params=supported_params) optional_params = non_default_params if temperature is not None: if ( - temperature == 0 and model == "mistralai/Mistral-7B-Instruct-v0.1" + temperature == 0 and model in ["mistralai/Mistral-7B-Instruct-v0.1", "mistralai/Mixtral-8x7B-Instruct-v0.1"] ): # this model does no support temperature == 0 temperature = 0.0001 # close to 0 optional_params["temperature"] = temperature