fix use provider specific routing

This commit is contained in:
Ishaan Jaff 2024-08-07 14:37:20 -07:00
parent 218ba0f470
commit f1ffa82062
4 changed files with 35 additions and 15 deletions

View file

@ -86,12 +86,16 @@ model_list:
model: openai/*
api_key: os.environ/OPENAI_API_KEY
# Pass through all llm requests to litellm.completion/litellm.embedding
# if user passes model="anthropic/claude-3-opus-20240229" proxy will make requests to anthropic claude-3-opus-20240229 using ANTHROPIC_API_KEY
- model_name: "*"
# provider specific wildcard routing
- model_name: "anthropic/*"
litellm_params:
model: "*"
model: "anthropic/*"
api_key: os.environ/ANTHROPIC_API_KEY
- model_name: "groq/*"
litellm_params:
model: "groq/*"
api_key: os.environ/GROQ_API_KEY
- model_name: mistral-embed
litellm_params:
model: mistral/mistral-embed