build(openai_proxy): docker build fixes

This commit is contained in:
Krrish Dholakia 2023-10-25 13:33:24 -07:00
parent 81349f3025
commit aa57c63091
6 changed files with 34 additions and 8 deletions

1
.gitignore vendored
View file

@ -12,3 +12,4 @@ secrets.toml
litellm/proxy/litellm_secrets.toml
litellm/proxy/api_log.json
.idea/
router_config.yaml

View file

@ -1,6 +1,11 @@
FROM python:3.10
ENV LITELLM_CONFIG_PATH="/litellm.secrets.toml"
# Define a build argument for the config file path
ARG CONFIG_FILE
# Copy the custom config file (if provided) into the Docker image
COPY $CONFIG_FILE /app/config.yaml
COPY . /app
WORKDIR /app
RUN pip install -r requirements.txt

View file

@ -6,7 +6,10 @@ from fastapi.middleware.cors import CORSMiddleware
import json
import os
from typing import Optional
try:
from utils import set_callbacks, load_router_config
except ImportError:
from openai_proxy.utils import set_callbacks, load_router_config
import dotenv
dotenv.load_dotenv() # load env variables

View file

@ -1,4 +1,20 @@
import os, litellm
import pkg_resources
def get_package_version(package_name):
try:
package = pkg_resources.get_distribution(package_name)
return package.version
except pkg_resources.DistributionNotFound:
return None
# Usage example
package_name = "litellm"
version = get_package_version(package_name)
if version:
print(f"The version of {package_name} is {version}")
else:
print(f"{package_name} is not installed")
import yaml
import dotenv
from typing import Optional
@ -29,7 +45,7 @@ def set_callbacks():
def load_router_config(router: Optional[litellm.Router]):
config = {}
config_file = 'config.yaml'
config_file = '/app/config.yaml'
if os.path.exists(config_file):
with open(config_file, 'r') as file:

View file

@ -1,3 +1,4 @@
litellm
openai
fastapi
uvicorn

View file

@ -1,7 +1,7 @@
model_list:
- model_name: gpt-3.5-turbo
litellm_params:
model: azure/chatgpt-v-2
model: azure/chatgpt-v-2 # azure/<your-deployment-name>
api_key: your_azure_api_key
api_version: your_azure_api_version
api_base: your_azure_api_base