(docs) - proxy_config.yaml

This commit is contained in:
ishaan-jaff 2023-12-11 09:06:27 -08:00
parent ee3c9d19a2
commit 43b0da3ebc

View file

@ -11,8 +11,10 @@ model_list:
output_cost_per_token: 0.00003
max_tokens: 4096
base_model: gpt-3.5-turbo
- model_name: openai-gpt-3.5
- model_name: BEDROCK_GROUP
litellm_params:
model: bedrock/cohere.command-text-v14
- model_name: Azure OpenAI GPT-4 Canada-East (External)
litellm_params:
model: gpt-3.5-turbo
api_key: os.environ/OPENAI_API_KEY
@ -41,11 +43,12 @@ model_list:
mode: completion
litellm_settings:
# cache: True
# setting callback class
# callbacks: custom_callbacks.proxy_handler_instance # sets litellm.callbacks = [proxy_handler_instance]
model_group_alias_map: {"gpt-4": "openai-gpt-3.5"} # all requests with gpt-4 model_name, get sent to openai-gpt-3.5
general_settings:
environment_variables:
# otel: True # OpenTelemetry Logger
# master_key: sk-1234 # [OPTIONAL] Only use this if you to require all calls to contain this key (Authorization: Bearer sk-1234)