mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
fix(utils.py): add missing providers + models to validate_environment
Closes https://github.com/BerriAI/litellm/issues/3190
This commit is contained in:
parent
c8ceab872f
commit
16522a5351
2 changed files with 72 additions and 3 deletions
|
@ -542,7 +542,11 @@ models_by_provider: dict = {
|
||||||
"together_ai": together_ai_models,
|
"together_ai": together_ai_models,
|
||||||
"baseten": baseten_models,
|
"baseten": baseten_models,
|
||||||
"openrouter": openrouter_models,
|
"openrouter": openrouter_models,
|
||||||
"vertex_ai": vertex_chat_models + vertex_text_models,
|
"vertex_ai": vertex_chat_models
|
||||||
|
+ vertex_text_models
|
||||||
|
+ vertex_anthropic_models
|
||||||
|
+ vertex_vision_models
|
||||||
|
+ vertex_language_models,
|
||||||
"ai21": ai21_models,
|
"ai21": ai21_models,
|
||||||
"bedrock": bedrock_models,
|
"bedrock": bedrock_models,
|
||||||
"petals": petals_models,
|
"petals": petals_models,
|
||||||
|
|
|
@ -6768,7 +6768,7 @@ def validate_environment(model: Optional[str] = None) -> dict:
|
||||||
keys_in_environment = True
|
keys_in_environment = True
|
||||||
else:
|
else:
|
||||||
missing_keys.append("NLP_CLOUD_API_KEY")
|
missing_keys.append("NLP_CLOUD_API_KEY")
|
||||||
elif custom_llm_provider == "bedrock":
|
elif custom_llm_provider == "bedrock" or custom_llm_provider == "sagemaker":
|
||||||
if (
|
if (
|
||||||
"AWS_ACCESS_KEY_ID" in os.environ
|
"AWS_ACCESS_KEY_ID" in os.environ
|
||||||
and "AWS_SECRET_ACCESS_KEY" in os.environ
|
and "AWS_SECRET_ACCESS_KEY" in os.environ
|
||||||
|
@ -6782,11 +6782,72 @@ def validate_environment(model: Optional[str] = None) -> dict:
|
||||||
keys_in_environment = True
|
keys_in_environment = True
|
||||||
else:
|
else:
|
||||||
missing_keys.append("OLLAMA_API_BASE")
|
missing_keys.append("OLLAMA_API_BASE")
|
||||||
|
elif custom_llm_provider == "anyscale":
|
||||||
|
if "ANYSCALE_API_KEY" in os.environ:
|
||||||
|
keys_in_environment = True
|
||||||
|
else:
|
||||||
|
missing_keys.append("ANYSCALE_API_KEY")
|
||||||
|
elif custom_llm_provider == "deepinfra":
|
||||||
|
if "DEEPINFRA_API_KEY" in os.environ:
|
||||||
|
keys_in_environment = True
|
||||||
|
else:
|
||||||
|
missing_keys.append("DEEPINFRA_API_KEY")
|
||||||
|
elif custom_llm_provider == "gemini":
|
||||||
|
if "GEMINI_API_KEY" in os.environ:
|
||||||
|
keys_in_environment = True
|
||||||
|
else:
|
||||||
|
missing_keys.append("GEMINI_API_KEY")
|
||||||
|
elif custom_llm_provider == "groq":
|
||||||
|
if "GROQ_API_KEY" in os.environ:
|
||||||
|
keys_in_environment = True
|
||||||
|
else:
|
||||||
|
missing_keys.append("GROQ_API_KEY")
|
||||||
|
elif custom_llm_provider == "mistral":
|
||||||
|
if "MISTRAL_API_KEY" in os.environ:
|
||||||
|
keys_in_environment = True
|
||||||
|
else:
|
||||||
|
missing_keys.append("MISTRAL_API_KEY")
|
||||||
|
elif custom_llm_provider == "palm":
|
||||||
|
if "PALM_API_KEY" in os.environ:
|
||||||
|
keys_in_environment = True
|
||||||
|
else:
|
||||||
|
missing_keys.append("PALM_API_KEY")
|
||||||
|
elif custom_llm_provider == "perplexity":
|
||||||
|
if "PERPLEXITYAI_API_KEY" in os.environ:
|
||||||
|
keys_in_environment = True
|
||||||
|
else:
|
||||||
|
missing_keys.append("PERPLEXITYAI_API_KEY")
|
||||||
|
elif custom_llm_provider == "voyage":
|
||||||
|
if "VOYAGE_API_KEY" in os.environ:
|
||||||
|
keys_in_environment = True
|
||||||
|
else:
|
||||||
|
missing_keys.append("VOYAGE_API_KEY")
|
||||||
|
elif custom_llm_provider == "fireworks_ai":
|
||||||
|
if (
|
||||||
|
"FIREWORKS_AI_API_KEY" in os.environ
|
||||||
|
or "FIREWORKS_API_KEY" in os.environ
|
||||||
|
or "FIREWORKSAI_API_KEY" in os.environ
|
||||||
|
or "FIREWORKS_AI_TOKEN" in os.environ
|
||||||
|
):
|
||||||
|
keys_in_environment = True
|
||||||
|
else:
|
||||||
|
missing_keys.append("FIREWORKS_AI_API_KEY")
|
||||||
|
elif custom_llm_provider == "cloudflare":
|
||||||
|
if "CLOUDFLARE_API_KEY" in os.environ and (
|
||||||
|
"CLOUDFLARE_ACCOUNT_ID" in os.environ
|
||||||
|
or "CLOUDFLARE_API_BASE" in os.environ
|
||||||
|
):
|
||||||
|
keys_in_environment = True
|
||||||
|
else:
|
||||||
|
missing_keys.append("CLOUDFLARE_API_KEY")
|
||||||
|
missing_keys.append("CLOUDFLARE_API_BASE")
|
||||||
else:
|
else:
|
||||||
## openai - chatcompletion + text completion
|
## openai - chatcompletion + text completion
|
||||||
if (
|
if (
|
||||||
model in litellm.open_ai_chat_completion_models
|
model in litellm.open_ai_chat_completion_models
|
||||||
or model in litellm.open_ai_text_completion_models
|
or model in litellm.open_ai_text_completion_models
|
||||||
|
or model in litellm.open_ai_embedding_models
|
||||||
|
or model in litellm.openai_image_generation_models
|
||||||
):
|
):
|
||||||
if "OPENAI_API_KEY" in os.environ:
|
if "OPENAI_API_KEY" in os.environ:
|
||||||
keys_in_environment = True
|
keys_in_environment = True
|
||||||
|
@ -6817,7 +6878,11 @@ def validate_environment(model: Optional[str] = None) -> dict:
|
||||||
else:
|
else:
|
||||||
missing_keys.append("OPENROUTER_API_KEY")
|
missing_keys.append("OPENROUTER_API_KEY")
|
||||||
## vertex - text + chat models
|
## vertex - text + chat models
|
||||||
elif model in litellm.vertex_chat_models or model in litellm.vertex_text_models:
|
elif (
|
||||||
|
model in litellm.vertex_chat_models
|
||||||
|
or model in litellm.vertex_text_models
|
||||||
|
or model in litellm.models_by_provider["vertex_ai"]
|
||||||
|
):
|
||||||
if "VERTEXAI_PROJECT" in os.environ and "VERTEXAI_LOCATION" in os.environ:
|
if "VERTEXAI_PROJECT" in os.environ and "VERTEXAI_LOCATION" in os.environ:
|
||||||
keys_in_environment = True
|
keys_in_environment = True
|
||||||
else:
|
else:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue