mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 10:14:26 +00:00
build(openai_proxy): docker build fixes
This commit is contained in:
parent
81349f3025
commit
aa57c63091
6 changed files with 34 additions and 8 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -11,4 +11,5 @@ secrets.toml
|
|||
.gitignore
|
||||
litellm/proxy/litellm_secrets.toml
|
||||
litellm/proxy/api_log.json
|
||||
.idea/
|
||||
.idea/
|
||||
router_config.yaml
|
||||
|
|
|
@ -1,6 +1,11 @@
|
|||
FROM python:3.10
|
||||
|
||||
ENV LITELLM_CONFIG_PATH="/litellm.secrets.toml"
|
||||
# Define a build argument for the config file path
|
||||
ARG CONFIG_FILE
|
||||
|
||||
# Copy the custom config file (if provided) into the Docker image
|
||||
COPY $CONFIG_FILE /app/config.yaml
|
||||
|
||||
COPY . /app
|
||||
WORKDIR /app
|
||||
RUN pip install -r requirements.txt
|
||||
|
|
|
@ -6,7 +6,10 @@ from fastapi.middleware.cors import CORSMiddleware
|
|||
import json
|
||||
import os
|
||||
from typing import Optional
|
||||
from utils import set_callbacks, load_router_config
|
||||
try:
|
||||
from utils import set_callbacks, load_router_config
|
||||
except ImportError:
|
||||
from openai_proxy.utils import set_callbacks, load_router_config
|
||||
import dotenv
|
||||
dotenv.load_dotenv() # load env variables
|
||||
|
||||
|
|
|
@ -1,4 +1,20 @@
|
|||
import os, litellm
|
||||
import pkg_resources
|
||||
|
||||
def get_package_version(package_name):
|
||||
try:
|
||||
package = pkg_resources.get_distribution(package_name)
|
||||
return package.version
|
||||
except pkg_resources.DistributionNotFound:
|
||||
return None
|
||||
|
||||
# Usage example
|
||||
package_name = "litellm"
|
||||
version = get_package_version(package_name)
|
||||
if version:
|
||||
print(f"The version of {package_name} is {version}")
|
||||
else:
|
||||
print(f"{package_name} is not installed")
|
||||
import yaml
|
||||
import dotenv
|
||||
from typing import Optional
|
||||
|
@ -29,7 +45,7 @@ def set_callbacks():
|
|||
|
||||
def load_router_config(router: Optional[litellm.Router]):
|
||||
config = {}
|
||||
config_file = 'config.yaml'
|
||||
config_file = '/app/config.yaml'
|
||||
|
||||
if os.path.exists(config_file):
|
||||
with open(config_file, 'r') as file:
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
litellm
|
||||
openai
|
||||
fastapi
|
||||
uvicorn
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
model_list:
|
||||
- model_name: gpt-3.5-turbo
|
||||
litellm_params:
|
||||
model: azure/chatgpt-v-2
|
||||
model: azure/chatgpt-v-2 # azure/<your-deployment-name>
|
||||
api_key: your_azure_api_key
|
||||
api_version: your_azure_api_version
|
||||
api_base: your_azure_api_base
|
||||
|
@ -23,6 +23,6 @@ model_list:
|
|||
rpm: 9000 # REPLACE with your openai rpm
|
||||
|
||||
environment_variables:
|
||||
REDIS_HOST: your_redis_host
|
||||
REDIS_PASSWORD: your_redis_password
|
||||
REDIS_PORT: your_redis_port
|
||||
REDIS_HOST: your_redis_host
|
||||
REDIS_PASSWORD: your_redis_password
|
||||
REDIS_PORT: your_redis_port
|
Loading…
Add table
Add a link
Reference in a new issue