(fix) proxy server use set_callbacks & load dotenv

This commit is contained in:
ishaan-jaff 2023-10-23 14:52:20 -07:00
parent e9ac3ca130
commit 5733b10f04
3 changed files with 13 additions and 4 deletions

View file

@ -5,6 +5,9 @@ from fastapi.responses import StreamingResponse, FileResponse
from fastapi.middleware.cors import CORSMiddleware
import json
import os
from utils import set_callbacks
import dotenv
dotenv.load_dotenv() # load env variables
app = FastAPI(docs_url="/", title="LiteLLM API")
router = APIRouter()
@ -17,9 +20,7 @@ app.add_middleware(
allow_methods=["*"],
allow_headers=["*"],
)
if ("LANGUFSE_PUBLIC_KEY" in os.environ and "LANGUFSE_SECRET_KEY" in os.environ) or "LANGFUSE_HOST" in os.environ:
litellm.success_callback = ["langfuse"]
set_callbacks() # sets litellm callbacks for logging if they exist in the environment
#### API ENDPOINTS ####
@router.post("/v1/models")

View file

@ -2,4 +2,5 @@ openai
fastapi
uvicorn
boto3
litellm
litellm
dotenv

7
openai-proxy/utils.py Normal file
View file

@ -0,0 +1,7 @@
import os, litellm
import dotenv
dotenv.load_dotenv() # load env variables
def set_callbacks():
if ("LANGUFSE_PUBLIC_KEY" in os.environ and "LANGUFSE_SECRET_KEY" in os.environ) or "LANGFUSE_HOST" in os.environ:
litellm.success_callback = ["langfuse"]