mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
fix(router.py): fix set_client init to check if custom_llm_provider is azure not if in model name
fixes issue where 'azure_ai/' was being init as azureopenai client
This commit is contained in:
parent
c0540e764d
commit
06b297a6e8
2 changed files with 27 additions and 2 deletions
|
@ -3090,7 +3090,7 @@ class Router:
|
|||
if not, add it - https://github.com/BerriAI/litellm/issues/2279
|
||||
"""
|
||||
if (
|
||||
is_azure_ai_studio_model == True
|
||||
is_azure_ai_studio_model is True
|
||||
and api_base is not None
|
||||
and isinstance(api_base, str)
|
||||
and not api_base.endswith("/v1/")
|
||||
|
@ -3174,7 +3174,7 @@ class Router:
|
|||
organization = litellm.get_secret(organization_env_name)
|
||||
litellm_params["organization"] = organization
|
||||
|
||||
if "azure" in model_name:
|
||||
if custom_llm_provider == "azure" or custom_llm_provider == "azure_text":
|
||||
if api_base is None or not isinstance(api_base, str):
|
||||
filtered_litellm_params = {
|
||||
k: v
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue