forked from phoenix/litellm-mirror
build(openai_proxy): docker build fixes
This commit is contained in:
parent
81349f3025
commit
aa57c63091
6 changed files with 34 additions and 8 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -12,3 +12,4 @@ secrets.toml
|
||||||
litellm/proxy/litellm_secrets.toml
|
litellm/proxy/litellm_secrets.toml
|
||||||
litellm/proxy/api_log.json
|
litellm/proxy/api_log.json
|
||||||
.idea/
|
.idea/
|
||||||
|
router_config.yaml
|
||||||
|
|
|
@ -1,6 +1,11 @@
|
||||||
FROM python:3.10
|
FROM python:3.10
|
||||||
|
|
||||||
ENV LITELLM_CONFIG_PATH="/litellm.secrets.toml"
|
# Define a build argument for the config file path
|
||||||
|
ARG CONFIG_FILE
|
||||||
|
|
||||||
|
# Copy the custom config file (if provided) into the Docker image
|
||||||
|
COPY $CONFIG_FILE /app/config.yaml
|
||||||
|
|
||||||
COPY . /app
|
COPY . /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
RUN pip install -r requirements.txt
|
RUN pip install -r requirements.txt
|
||||||
|
|
|
@ -6,7 +6,10 @@ from fastapi.middleware.cors import CORSMiddleware
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
try:
|
||||||
from utils import set_callbacks, load_router_config
|
from utils import set_callbacks, load_router_config
|
||||||
|
except ImportError:
|
||||||
|
from openai_proxy.utils import set_callbacks, load_router_config
|
||||||
import dotenv
|
import dotenv
|
||||||
dotenv.load_dotenv() # load env variables
|
dotenv.load_dotenv() # load env variables
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,20 @@
|
||||||
import os, litellm
|
import os, litellm
|
||||||
|
import pkg_resources
|
||||||
|
|
||||||
|
def get_package_version(package_name):
|
||||||
|
try:
|
||||||
|
package = pkg_resources.get_distribution(package_name)
|
||||||
|
return package.version
|
||||||
|
except pkg_resources.DistributionNotFound:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Usage example
|
||||||
|
package_name = "litellm"
|
||||||
|
version = get_package_version(package_name)
|
||||||
|
if version:
|
||||||
|
print(f"The version of {package_name} is {version}")
|
||||||
|
else:
|
||||||
|
print(f"{package_name} is not installed")
|
||||||
import yaml
|
import yaml
|
||||||
import dotenv
|
import dotenv
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
@ -29,7 +45,7 @@ def set_callbacks():
|
||||||
|
|
||||||
def load_router_config(router: Optional[litellm.Router]):
|
def load_router_config(router: Optional[litellm.Router]):
|
||||||
config = {}
|
config = {}
|
||||||
config_file = 'config.yaml'
|
config_file = '/app/config.yaml'
|
||||||
|
|
||||||
if os.path.exists(config_file):
|
if os.path.exists(config_file):
|
||||||
with open(config_file, 'r') as file:
|
with open(config_file, 'r') as file:
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
litellm
|
||||||
openai
|
openai
|
||||||
fastapi
|
fastapi
|
||||||
uvicorn
|
uvicorn
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
model_list:
|
model_list:
|
||||||
- model_name: gpt-3.5-turbo
|
- model_name: gpt-3.5-turbo
|
||||||
litellm_params:
|
litellm_params:
|
||||||
model: azure/chatgpt-v-2
|
model: azure/chatgpt-v-2 # azure/<your-deployment-name>
|
||||||
api_key: your_azure_api_key
|
api_key: your_azure_api_key
|
||||||
api_version: your_azure_api_version
|
api_version: your_azure_api_version
|
||||||
api_base: your_azure_api_base
|
api_base: your_azure_api_base
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue