diff --git a/litellm/proxy/proxy_config.yaml b/litellm/proxy/proxy_config.yaml index c9b9e92b2..61d121ffc 100644 --- a/litellm/proxy/proxy_config.yaml +++ b/litellm/proxy/proxy_config.yaml @@ -1,43 +1,4 @@ model_list: - - - model_name: gpt-3.5-turbo - litellm_params: - model: azure/chatgpt-v-2 - api_base: https://openai-gpt-4-test-v-1.openai.azure.com/ - api_version: "2023-05-15" - api_key: os.environ/AZURE_API_KEY # The `os.environ/` prefix tells litellm to read this from the env. See https://docs.litellm.ai/docs/simple_proxy#load-api-keys-from-vault - - model_name: gpt-3.5-turbo-large - litellm_params: - model: "gpt-3.5-turbo-1106" - api_key: os.environ/OPENAI_API_KEY - - model_name: gpt-4 - litellm_params: - model: azure/chatgpt-v-2 - api_base: https://openai-gpt-4-test-v-1.openai.azure.com/ - api_version: "2023-05-15" - api_key: os.environ/AZURE_API_KEY # The `os.environ/` prefix tells litellm to read this from the env. See https://docs.litellm.ai/docs/simple_proxy#load-api-keys-from-vault - - model_name: sagemaker-completion-model - litellm_params: - model: sagemaker/berri-benchmarking-Llama-2-70b-chat-hf-4 - input_cost_per_second: 0.000420 - - model_name: text-embedding-ada-002 - litellm_params: - model: azure/azure-embedding-model - api_key: os.environ/AZURE_API_KEY - api_base: https://openai-gpt-4-test-v-1.openai.azure.com/ - api_version: "2023-05-15" - model_info: - mode: embedding - base_model: text-embedding-ada-002 - - model_name: dall-e-2 - litellm_params: - model: azure/ - api_version: 2023-06-01-preview - api_base: https://openai-gpt-4-test-v-1.openai.azure.com/ - api_key: os.environ/AZURE_API_KEY - - model_name: openai-dall-e-3 - litellm_params: - model: dall-e-3 - model_name: fake-openai-endpoint litellm_params: model: openai/fake diff --git a/pyproject.toml b/pyproject.toml index 64247c618..f61da2a85 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "litellm" -version = "1.34.37" +version = "1.34.38" description = "Library to easily interface with LLM API providers" authors = ["BerriAI"] license = "MIT" @@ -80,7 +80,7 @@ requires = ["poetry-core", "wheel"] build-backend = "poetry.core.masonry.api" [tool.commitizen] -version = "1.34.37" +version = "1.34.38" version_files = [ "pyproject.toml:^version" ]