diff --git a/deploy/kubernetes/kub.yaml b/deploy/kubernetes/kub.yaml index 34d4b9d6d..686ffecf0 100644 --- a/deploy/kubernetes/kub.yaml +++ b/deploy/kubernetes/kub.yaml @@ -14,20 +14,26 @@ spec: spec: containers: - name: litellm-container - image: ghcr.io/berriai/litellm:main-latest - imagePullPolicy: Always + image: ghcr.io/berriai/litellm-database:main-v1.34.4.dev2 env: - name: AZURE_API_KEY - value: "d6f****" + value: "d6f82361954b450188295b448e2091ca" - name: AZURE_API_BASE - value: "https://openai" + value: "https://openai-gpt-4-test-v-1.openai.azure.com/" - name: LITELLM_MASTER_KEY value: "sk-1234" - name: DATABASE_URL - value: "postgresql://ishaan:*********" + value: "postgresql://ishaan:HogdJCvK89Xe@ep-bitter-fog-a573fgz9.us-east-2.aws.neon.tech/local?sslmode=require" + - name: SLACK_WEBHOOK_URL + value: "https://hooks.slack.com/services/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" + - name: ishu + value: "ishaanGMGhj" args: - "--config" - "/app/proxy_config.yaml" # Update the path to mount the config file + - "--run_gunicorn" + - "--num_workers" + - "4" volumeMounts: # Define volume mount for proxy_config.yaml - name: config-volume mountPath: /app diff --git a/proxy_server_config.yaml b/proxy_server_config.yaml index f69c89cc5..30033b28b 100644 --- a/proxy_server_config.yaml +++ b/proxy_server_config.yaml @@ -7,8 +7,8 @@ model_list: api_key: os.environ/AZURE_API_KEY # The `os.environ/` prefix tells litellm to read this from the env. See https://docs.litellm.ai/docs/simple_proxy#load-api-keys-from-vault - model_name: gpt-3.5-turbo-large litellm_params: - "model": "gpt-3.5-turbo-1106" - "api_key": os.environ/OPENAI_API_KEY + model: "gpt-3.5-turbo-1106" + api_key: os.environ/OPENAI_API_KEY - model_name: gpt-4 litellm_params: model: azure/chatgpt-v-2 diff --git a/pyproject.toml b/pyproject.toml index 767cf36c2..b6eb22792 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "litellm" -version = "1.34.4" +version = "1.34.5" description = "Library to easily interface with LLM API providers" authors = ["BerriAI"] license = "MIT" @@ -80,7 +80,7 @@ requires = ["poetry-core", "wheel"] build-backend = "poetry.core.masonry.api" [tool.commitizen] -version = "1.34.4" +version = "1.34.5" version_files = [ "pyproject.toml:^version" ]