fix support azure/mistral models

This commit is contained in:
Ishaan Jaff 2024-04-05 09:32:39 -07:00
parent ab60d7c8fb
commit 5ce80d82d3
2 changed files with 12 additions and 7 deletions

View file

@ -5575,13 +5575,15 @@ def get_llm_provider(
# AZURE AI-Studio Logic - Azure AI Studio supports AZURE/Cohere
# If User passes azure/command-r-plus -> we should send it to cohere_chat/command-r-plus
if (
model.split("/", 1)[0] == "azure"
and model.split("/", 1)[1] in litellm.cohere_chat_models
):
custom_llm_provider = "openai"
model = model.split("/", 1)[1]
return model, custom_llm_provider, dynamic_api_key, api_base
if model.split("/", 1)[0] == "azure":
model_name = model.split("/", 1)[1]
if (
model_name in litellm.cohere_chat_models
or model_name in litellm.mistral_chat_models
):
custom_llm_provider = "openai"
model = model_name
return model, custom_llm_provider, dynamic_api_key, api_base
if custom_llm_provider:
return model, custom_llm_provider, dynamic_api_key, api_base