forked from phoenix/litellm-mirror
Merge pull request #2868 from BerriAI/litellm_add_command_r_on_proxy
Add Azure Command-r-plus on litellm proxy
This commit is contained in:
commit
faa0d38087
4 changed files with 147 additions and 23 deletions
|
@ -5578,6 +5578,19 @@ def get_formatted_prompt(
|
|||
return prompt
|
||||
|
||||
|
||||
def _is_non_openai_azure_model(model: str) -> bool:
|
||||
try:
|
||||
model_name = model.split("/", 1)[1]
|
||||
if (
|
||||
model_name in litellm.cohere_chat_models
|
||||
or f"mistral/{model_name}" in litellm.mistral_chat_models
|
||||
):
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
return False
|
||||
|
||||
|
||||
def get_llm_provider(
|
||||
model: str,
|
||||
custom_llm_provider: Optional[str] = None,
|
||||
|
@ -5591,13 +5604,8 @@ def get_llm_provider(
|
|||
# AZURE AI-Studio Logic - Azure AI Studio supports AZURE/Cohere
|
||||
# If User passes azure/command-r-plus -> we should send it to cohere_chat/command-r-plus
|
||||
if model.split("/", 1)[0] == "azure":
|
||||
model_name = model.split("/", 1)[1]
|
||||
if (
|
||||
model_name in litellm.cohere_chat_models
|
||||
or f"mistral/{model_name}" in litellm.mistral_chat_models
|
||||
):
|
||||
if _is_non_openai_azure_model(model):
|
||||
custom_llm_provider = "openai"
|
||||
model = model_name
|
||||
return model, custom_llm_provider, dynamic_api_key, api_base
|
||||
|
||||
if custom_llm_provider:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue