litellm-mirror/litellm/tests/example_config_yaml/load_balancer.yaml
2023-12-04 14:49:59 -08:00

28 lines
No EOL
886 B
YAML

litellm_settings:
drop_params: True
# Model-specific settings
model_list: # use the same model_name for using the litellm router. LiteLLM will use the router between gpt-3.5-turbo
- model_name: gpt-3.5-turbo # litellm will
litellm_params:
model: gpt-3.5-turbo
api_key: sk-uj6F
tpm: 20000 # [OPTIONAL] REPLACE with your openai tpm
rpm: 3 # [OPTIONAL] REPLACE with your openai rpm
- model_name: gpt-3.5-turbo
litellm_params:
model: gpt-3.5-turbo
api_key: sk-Imn
tpm: 20000 # [OPTIONAL] REPLACE with your openai tpm
rpm: 3 # [OPTIONAL] REPLACE with your openai rpm
- model_name: gpt-3.5-turbo
litellm_params:
model: openrouter/gpt-3.5-turbo
- model_name: mistral-7b-instruct
litellm_params:
model: mistralai/mistral-7b-instruct
environment_variables:
REDIS_HOST: localhost
REDIS_PASSWORD:
REDIS_PORT: