forked from phoenix/litellm-mirror
fix(router.py): adding mistral to list of openai-compatible endpoints
This commit is contained in:
parent
69f545cf36
commit
d2e9798de9
1 changed files with 1 additions and 0 deletions
|
@ -900,6 +900,7 @@ class Router:
|
||||||
or custom_llm_provider == "mistral"
|
or custom_llm_provider == "mistral"
|
||||||
or custom_llm_provider == "openai"
|
or custom_llm_provider == "openai"
|
||||||
or custom_llm_provider == "azure"
|
or custom_llm_provider == "azure"
|
||||||
|
or custom_llm_provider == "mistral"
|
||||||
or "ft:gpt-3.5-turbo" in model_name
|
or "ft:gpt-3.5-turbo" in model_name
|
||||||
or model_name in litellm.open_ai_embedding_models
|
or model_name in litellm.open_ai_embedding_models
|
||||||
):
|
):
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue