forked from phoenix/litellm-mirror
(docs) - proxy_config.yaml
This commit is contained in:
parent
ee3c9d19a2
commit
43b0da3ebc
1 changed files with 7 additions and 4 deletions
|
@ -11,8 +11,10 @@ model_list:
|
||||||
output_cost_per_token: 0.00003
|
output_cost_per_token: 0.00003
|
||||||
max_tokens: 4096
|
max_tokens: 4096
|
||||||
base_model: gpt-3.5-turbo
|
base_model: gpt-3.5-turbo
|
||||||
|
- model_name: BEDROCK_GROUP
|
||||||
- model_name: openai-gpt-3.5
|
litellm_params:
|
||||||
|
model: bedrock/cohere.command-text-v14
|
||||||
|
- model_name: Azure OpenAI GPT-4 Canada-East (External)
|
||||||
litellm_params:
|
litellm_params:
|
||||||
model: gpt-3.5-turbo
|
model: gpt-3.5-turbo
|
||||||
api_key: os.environ/OPENAI_API_KEY
|
api_key: os.environ/OPENAI_API_KEY
|
||||||
|
@ -41,11 +43,12 @@ model_list:
|
||||||
mode: completion
|
mode: completion
|
||||||
|
|
||||||
litellm_settings:
|
litellm_settings:
|
||||||
|
# cache: True
|
||||||
# setting callback class
|
# setting callback class
|
||||||
# callbacks: custom_callbacks.proxy_handler_instance # sets litellm.callbacks = [proxy_handler_instance]
|
# callbacks: custom_callbacks.proxy_handler_instance # sets litellm.callbacks = [proxy_handler_instance]
|
||||||
model_group_alias_map: {"gpt-4": "openai-gpt-3.5"} # all requests with gpt-4 model_name, get sent to openai-gpt-3.5
|
|
||||||
|
|
||||||
|
|
||||||
general_settings:
|
general_settings:
|
||||||
|
|
||||||
|
environment_variables:
|
||||||
# otel: True # OpenTelemetry Logger
|
# otel: True # OpenTelemetry Logger
|
||||||
# master_key: sk-1234 # [OPTIONAL] Only use this if you to require all calls to contain this key (Authorization: Bearer sk-1234)
|
# master_key: sk-1234 # [OPTIONAL] Only use this if you to require all calls to contain this key (Authorization: Bearer sk-1234)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue