(fix) fix default dockerfile startup

This commit is contained in:
ishaan-jaff 2024-03-11 11:26:21 -07:00
parent 942b5e4145
commit 53c67d302a

View file

@ -4,13 +4,13 @@ model_list:
model: azure/chatgpt-v-2 model: azure/chatgpt-v-2
api_base: https://openai-gpt-4-test-v-1.openai.azure.com/ api_base: https://openai-gpt-4-test-v-1.openai.azure.com/
api_version: "2023-05-15" api_version: "2023-05-15"
api_key: os.environ/AZURE_API_KEY # The `os.environ/` prefix tells litellm to read this from the env. See https://docs.litellm.ai/docs/simple_proxy#load-api-keys-from-vault api_key: sk-defaultKey # use `os.environ/AZURE_API_KEY` for production. The `os.environ/` prefix tells litellm to read this from the env. See https://docs.litellm.ai/docs/simple_proxy#load-api-keys-from-vault
- model_name: gpt-4 - model_name: gpt-4
litellm_params: litellm_params:
model: azure/chatgpt-v-2 model: azure/chatgpt-v-2
api_base: https://openai-gpt-4-test-v-1.openai.azure.com/ api_base: https://openai-gpt-4-test-v-1.openai.azure.com/
api_version: "2023-05-15" api_version: "2023-05-15"
api_key: os.environ/AZURE_API_KEY # The `os.environ/` prefix tells litellm to read this from the env. See https://docs.litellm.ai/docs/simple_proxy#load-api-keys-from-vault api_key: sk-defaultKey # use `os.environ/AZURE_API_KEY` for production. The `os.environ/` prefix tells litellm to read this from the env. See https://docs.litellm.ai/docs/simple_proxy#load-api-keys-from-vault
- model_name: sagemaker-completion-model - model_name: sagemaker-completion-model
litellm_params: litellm_params:
model: sagemaker/berri-benchmarking-Llama-2-70b-chat-hf-4 model: sagemaker/berri-benchmarking-Llama-2-70b-chat-hf-4
@ -18,7 +18,7 @@ model_list:
- model_name: text-embedding-ada-002 - model_name: text-embedding-ada-002
litellm_params: litellm_params:
model: azure/azure-embedding-model model: azure/azure-embedding-model
api_key: os.environ/AZURE_API_KEY api_key: sk-defaultKey # use `os.environ/AZURE_API_KEY` for production. The `os.environ/` prefix tells litellm to read this from the env. See https://docs.litellm.ai/docs/simple_proxy#load-api-keys-from-vault
api_base: https://openai-gpt-4-test-v-1.openai.azure.com/ api_base: https://openai-gpt-4-test-v-1.openai.azure.com/
api_version: "2023-05-15" api_version: "2023-05-15"
model_info: model_info:
@ -26,13 +26,10 @@ model_list:
base_model: text-embedding-ada-002 base_model: text-embedding-ada-002
- model_name: dall-e-2 - model_name: dall-e-2
litellm_params: litellm_params:
model: azure/ model: azure/dall-e-2
api_version: 2023-06-01-preview api_version: 2023-06-01-preview
api_base: https://openai-gpt-4-test-v-1.openai.azure.com/ api_base: https://openai-gpt-4-test-v-1.openai.azure.com/
api_key: os.environ/AZURE_API_KEY api_key: sk-defaultKey # use `os.environ/AZURE_API_KEY` for production. The `os.environ/` prefix tells litellm to read this from the env. See https://docs.litellm.ai/docs/simple_proxy#load-api-keys-from-vault
- model_name: openai-dall-e-3
litellm_params:
model: dall-e-3
litellm_settings: litellm_settings:
drop_params: True drop_params: True