fix(utils.py): support response_format for mistral ai api

This commit is contained in:
Krrish Dholakia 2024-03-11 10:23:41 -07:00
parent 1f211b10e1
commit 312a9d8c26

View file

@ -5143,6 +5143,7 @@ def get_supported_openai_params(model: str, custom_llm_provider: str):
"max_tokens", "max_tokens",
"tools", "tools",
"tool_choice", "tool_choice",
"response_format",
] ]
elif custom_llm_provider == "replicate": elif custom_llm_provider == "replicate":
return [ return [