forked from phoenix/litellm-mirror
feat(utils.py): check env var for api base for openai-compatible endpoints
Closes https://github.com/BerriAI/litellm/issues/5066
This commit is contained in:
parent
63e853e161
commit
511f4d33d1
3 changed files with 64 additions and 22 deletions
|
@ -1,10 +1,4 @@
|
|||
model_list:
|
||||
- model_name: "gpt-4"
|
||||
- model_name: "*"
|
||||
litellm_params:
|
||||
model: "gpt-4"
|
||||
- model_name: "gpt-4"
|
||||
litellm_params:
|
||||
model: "gpt-4o"
|
||||
- model_name: "gpt-4o-mini"
|
||||
litellm_params:
|
||||
model: "gpt-4o-mini"
|
||||
model: "*"
|
|
@ -56,3 +56,15 @@ def test_get_llm_provider_mistral_custom_api_base():
|
|||
api_base
|
||||
== "https://mistral-large-fr-ishaan.francecentral.inference.ai.azure.com/v1"
|
||||
)
|
||||
|
||||
|
||||
def test_get_llm_provider_deepseek_custom_api_base():
|
||||
os.environ["DEEPSEEK_API_BASE"] = "MY-FAKE-BASE"
|
||||
model, custom_llm_provider, dynamic_api_key, api_base = litellm.get_llm_provider(
|
||||
model="deepseek/deep-chat",
|
||||
)
|
||||
assert custom_llm_provider == "deepseek"
|
||||
assert model == "deep-chat"
|
||||
assert api_base == "MY-FAKE-BASE"
|
||||
|
||||
os.environ.pop("DEEPSEEK_API_BASE")
|
||||
|
|
|
@ -4478,44 +4478,72 @@ def get_llm_provider(
|
|||
model = model.split("/", 1)[1]
|
||||
if custom_llm_provider == "perplexity":
|
||||
# perplexity is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.perplexity.ai
|
||||
api_base = api_base or "https://api.perplexity.ai"
|
||||
api_base = api_base or get_secret("PERPLEXITY_API_BASE") or "https://api.perplexity.ai" # type: ignore
|
||||
dynamic_api_key = api_key or get_secret("PERPLEXITYAI_API_KEY")
|
||||
elif custom_llm_provider == "anyscale":
|
||||
# anyscale is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.endpoints.anyscale.com/v1
|
||||
api_base = api_base or "https://api.endpoints.anyscale.com/v1"
|
||||
api_base = api_base or get_secret("ANYSCALE_API_BASE") or "https://api.endpoints.anyscale.com/v1" # type: ignore
|
||||
dynamic_api_key = api_key or get_secret("ANYSCALE_API_KEY")
|
||||
elif custom_llm_provider == "deepinfra":
|
||||
# deepinfra is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.endpoints.anyscale.com/v1
|
||||
api_base = api_base or "https://api.deepinfra.com/v1/openai"
|
||||
api_base = api_base or get_secret("DEEPINFRA_API_BASE") or "https://api.deepinfra.com/v1/openai" # type: ignore
|
||||
dynamic_api_key = api_key or get_secret("DEEPINFRA_API_KEY")
|
||||
elif custom_llm_provider == "empower":
|
||||
api_base = api_base or "https://app.empower.dev/api/v1"
|
||||
api_base = (
|
||||
api_base
|
||||
or str(get_secret("EMPOWER_API_BASE"))
|
||||
or "https://app.empower.dev/api/v1"
|
||||
)
|
||||
dynamic_api_key = api_key or get_secret("EMPOWER_API_KEY")
|
||||
elif custom_llm_provider == "groq":
|
||||
# groq is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.groq.com/openai/v1
|
||||
api_base = api_base or "https://api.groq.com/openai/v1"
|
||||
api_base = (
|
||||
api_base
|
||||
or str(get_secret("GROQ_API_BASE"))
|
||||
or "https://api.groq.com/openai/v1"
|
||||
)
|
||||
dynamic_api_key = api_key or get_secret("GROQ_API_KEY")
|
||||
elif custom_llm_provider == "nvidia_nim":
|
||||
# nvidia_nim is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.endpoints.anyscale.com/v1
|
||||
api_base = api_base or "https://integrate.api.nvidia.com/v1"
|
||||
api_base = (
|
||||
api_base
|
||||
or str(get_secret("NVIDIA_NIM_API_BASE"))
|
||||
or "https://integrate.api.nvidia.com/v1"
|
||||
)
|
||||
dynamic_api_key = api_key or get_secret("NVIDIA_NIM_API_KEY")
|
||||
elif custom_llm_provider == "volcengine":
|
||||
# volcengine is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.endpoints.anyscale.com/v1
|
||||
api_base = api_base or "https://ark.cn-beijing.volces.com/api/v3"
|
||||
api_base = (
|
||||
api_base
|
||||
or str(get_secret("VOLCENGINE_API_BASE"))
|
||||
or "https://ark.cn-beijing.volces.com/api/v3"
|
||||
)
|
||||
dynamic_api_key = api_key or get_secret("VOLCENGINE_API_KEY")
|
||||
elif custom_llm_provider == "codestral":
|
||||
# codestral is openai compatible, we just need to set this to custom_openai and have the api_base be https://codestral.mistral.ai/v1
|
||||
api_base = api_base or "https://codestral.mistral.ai/v1"
|
||||
api_base = (
|
||||
api_base
|
||||
or str(get_secret("CODESTRAL_API_BASE"))
|
||||
or "https://codestral.mistral.ai/v1"
|
||||
)
|
||||
dynamic_api_key = api_key or get_secret("CODESTRAL_API_KEY")
|
||||
elif custom_llm_provider == "deepseek":
|
||||
# deepseek is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.deepseek.com/v1
|
||||
api_base = api_base or "https://api.deepseek.com/v1"
|
||||
api_base = (
|
||||
api_base
|
||||
or str(get_secret("DEEPSEEK_API_BASE"))
|
||||
or "https://api.deepseek.com/v1"
|
||||
)
|
||||
dynamic_api_key = api_key or get_secret("DEEPSEEK_API_KEY")
|
||||
elif custom_llm_provider == "fireworks_ai":
|
||||
# fireworks is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.fireworks.ai/inference/v1
|
||||
if not model.startswith("accounts/"):
|
||||
model = f"accounts/fireworks/models/{model}"
|
||||
api_base = api_base or "https://api.fireworks.ai/inference/v1"
|
||||
api_base = (
|
||||
api_base
|
||||
or str(get_secret("FIREWORKS_API_BASE"))
|
||||
or "https://api.fireworks.ai/inference/v1"
|
||||
)
|
||||
dynamic_api_key = api_key or (
|
||||
get_secret("FIREWORKS_API_KEY")
|
||||
or get_secret("FIREWORKS_AI_API_KEY")
|
||||
|
@ -4523,7 +4551,7 @@ def get_llm_provider(
|
|||
or get_secret("FIREWORKS_AI_TOKEN")
|
||||
)
|
||||
elif custom_llm_provider == "azure_ai":
|
||||
api_base = api_base or get_secret("AZURE_AI_API_BASE") # type: ignore
|
||||
api_base = api_base or str(get_secret("AZURE_AI_API_BASE"))
|
||||
dynamic_api_key = api_key or get_secret("AZURE_AI_API_KEY")
|
||||
elif custom_llm_provider == "github":
|
||||
api_base = api_base or get_secret("GITHUB_API_BASE") or "https://models.inference.ai.azure.com" # type: ignore
|
||||
|
@ -4549,10 +4577,18 @@ def get_llm_provider(
|
|||
)
|
||||
elif custom_llm_provider == "voyage":
|
||||
# voyage is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.voyageai.com/v1
|
||||
api_base = "https://api.voyageai.com/v1"
|
||||
api_base = (
|
||||
api_base
|
||||
or str(get_secret("VOYAGE_API_BASE"))
|
||||
or "https://api.voyageai.com/v1"
|
||||
)
|
||||
dynamic_api_key = api_key or get_secret("VOYAGE_API_KEY")
|
||||
elif custom_llm_provider == "together_ai":
|
||||
api_base = "https://api.together.xyz/v1"
|
||||
api_base = (
|
||||
api_base
|
||||
or str(get_secret("TOGETHER_AI_API_BASE"))
|
||||
or "https://api.together.xyz/v1"
|
||||
)
|
||||
dynamic_api_key = api_key or (
|
||||
get_secret("TOGETHER_API_KEY")
|
||||
or get_secret("TOGETHER_AI_API_KEY")
|
||||
|
@ -4562,7 +4598,7 @@ def get_llm_provider(
|
|||
elif custom_llm_provider == "friendliai":
|
||||
api_base = (
|
||||
api_base
|
||||
or get_secret("FRIENDLI_API_BASE")
|
||||
or str(get_secret("FRIENDLI_API_BASE"))
|
||||
or "https://inference.friendli.ai/v1"
|
||||
)
|
||||
dynamic_api_key = (
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue