forked from phoenix/litellm-mirror
add enum for all llm providers LlmProviders
This commit is contained in:
parent
d4b9a1307d
commit
a0e4510f53
1 changed files with 61 additions and 56 deletions
|
@ -24,6 +24,7 @@ from litellm.proxy._types import (
|
||||||
)
|
)
|
||||||
import httpx
|
import httpx
|
||||||
import dotenv
|
import dotenv
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
litellm_mode = os.getenv("LITELLM_MODE", "DEV") # "PRODUCTION", "DEV"
|
litellm_mode = os.getenv("LITELLM_MODE", "DEV") # "PRODUCTION", "DEV"
|
||||||
if litellm_mode == "DEV":
|
if litellm_mode == "DEV":
|
||||||
|
@ -678,62 +679,66 @@ model_list = (
|
||||||
+ gemini_models
|
+ gemini_models
|
||||||
)
|
)
|
||||||
|
|
||||||
provider_list: List = [
|
|
||||||
"openai",
|
class LlmProviders(str, Enum):
|
||||||
"custom_openai",
|
OPENAI = "openai"
|
||||||
"text-completion-openai",
|
CUSTOM_OPENAI = "custom_openai"
|
||||||
"cohere",
|
TEXT_COMPLETION_OPENAI = "text-completion-openai"
|
||||||
"cohere_chat",
|
COHERE = "cohere"
|
||||||
"clarifai",
|
COHERE_CHAT = "cohere_chat"
|
||||||
"anthropic",
|
CLARIFAI = "clarifai"
|
||||||
"replicate",
|
ANTHROPIC = "anthropic"
|
||||||
"huggingface",
|
REPLICATE = "replicate"
|
||||||
"together_ai",
|
HUGGINGFACE = "huggingface"
|
||||||
"openrouter",
|
TOGETHER_AI = "together_ai"
|
||||||
"vertex_ai",
|
OPENROUTER = "openrouter"
|
||||||
"vertex_ai_beta",
|
VERTEX_AI = "vertex_ai"
|
||||||
"palm",
|
VERTEX_AI_BETA = "vertex_ai_beta"
|
||||||
"gemini",
|
PALM = "palm"
|
||||||
"ai21",
|
GEMINI = "gemini"
|
||||||
"baseten",
|
AI21 = "ai21"
|
||||||
"azure",
|
BASETEN = "baseten"
|
||||||
"azure_text",
|
AZURE = "azure"
|
||||||
"azure_ai",
|
AZURE_TEXT = "azure_text"
|
||||||
"sagemaker",
|
AZURE_AI = "azure_ai"
|
||||||
"sagemaker_chat",
|
SAGEMAKER = "sagemaker"
|
||||||
"bedrock",
|
SAGEMAKER_CHAT = "sagemaker_chat"
|
||||||
"vllm",
|
BEDROCK = "bedrock"
|
||||||
"nlp_cloud",
|
VLLM = "vllm"
|
||||||
"petals",
|
NLP_CLOUD = "nlp_cloud"
|
||||||
"oobabooga",
|
PETALS = "petals"
|
||||||
"ollama",
|
OOBABOOGA = "oobabooga"
|
||||||
"ollama_chat",
|
OLLAMA = "ollama"
|
||||||
"deepinfra",
|
OLLAMA_CHAT = "ollama_chat"
|
||||||
"perplexity",
|
DEEPINFRA = "deepinfra"
|
||||||
"anyscale",
|
PERPLEXITY = "perplexity"
|
||||||
"mistral",
|
ANYSCALE = "anyscale"
|
||||||
"groq",
|
MISTRAL = "mistral"
|
||||||
"nvidia_nim",
|
GROQ = "groq"
|
||||||
"cerebras",
|
NVIDIA_NIM = "nvidia_nim"
|
||||||
"ai21_chat",
|
CEREBRAS = "cerebras"
|
||||||
"volcengine",
|
AI21_CHAT = "ai21_chat"
|
||||||
"codestral",
|
VOLCENGINE = "volcengine"
|
||||||
"text-completion-codestral",
|
CODESTRAL = "codestral"
|
||||||
"deepseek",
|
TEXT_COMPLETION_CODESTRAL = "text-completion-codestral"
|
||||||
"maritalk",
|
DEEPSEEK = "deepseek"
|
||||||
"voyage",
|
MARITALK = "maritalk"
|
||||||
"cloudflare",
|
VOYAGE = "voyage"
|
||||||
"xinference",
|
CLOUDFLARE = "cloudflare"
|
||||||
"fireworks_ai",
|
XINFERENCE = "xinference"
|
||||||
"friendliai",
|
FIREWORKS_AI = "fireworks_ai"
|
||||||
"watsonx",
|
FRIENDLIAI = "friendliai"
|
||||||
"triton",
|
WATSONX = "watsonx"
|
||||||
"predibase",
|
TRITON = "triton"
|
||||||
"databricks",
|
PREDIBASE = "predibase"
|
||||||
"empower",
|
DATABRICKS = "databricks"
|
||||||
"github",
|
EMPOWER = "empower"
|
||||||
"custom", # custom apis
|
GITHUB = "github"
|
||||||
]
|
CUSTOM = "custom"
|
||||||
|
|
||||||
|
|
||||||
|
provider_list: List[Union[LlmProviders, str]] = list(LlmProviders)
|
||||||
|
|
||||||
|
|
||||||
models_by_provider: dict = {
|
models_by_provider: dict = {
|
||||||
"openai": open_ai_chat_completion_models + open_ai_text_completion_models,
|
"openai": open_ai_chat_completion_models + open_ai_text_completion_models,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue