From 5733b10f0423cda680a078b7c32213179992ea79 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Mon, 23 Oct 2023 14:52:20 -0700 Subject: [PATCH] (fix) proxy server use set_callbacks & load dotenv --- openai-proxy/main.py | 7 ++++--- openai-proxy/requirements.txt | 3 ++- openai-proxy/utils.py | 7 +++++++ 3 files changed, 13 insertions(+), 4 deletions(-) create mode 100644 openai-proxy/utils.py diff --git a/openai-proxy/main.py b/openai-proxy/main.py index a74a2b60d..9bba0d96b 100644 --- a/openai-proxy/main.py +++ b/openai-proxy/main.py @@ -5,6 +5,9 @@ from fastapi.responses import StreamingResponse, FileResponse from fastapi.middleware.cors import CORSMiddleware import json import os +from utils import set_callbacks +import dotenv +dotenv.load_dotenv() # load env variables app = FastAPI(docs_url="/", title="LiteLLM API") router = APIRouter() @@ -17,9 +20,7 @@ app.add_middleware( allow_methods=["*"], allow_headers=["*"], ) - -if ("LANGUFSE_PUBLIC_KEY" in os.environ and "LANGUFSE_SECRET_KEY" in os.environ) or "LANGFUSE_HOST" in os.environ: - litellm.success_callback = ["langfuse"] +set_callbacks() # sets litellm callbacks for logging if they exist in the environment #### API ENDPOINTS #### @router.post("/v1/models") diff --git a/openai-proxy/requirements.txt b/openai-proxy/requirements.txt index b7d6e99bf..8dda011eb 100644 --- a/openai-proxy/requirements.txt +++ b/openai-proxy/requirements.txt @@ -2,4 +2,5 @@ openai fastapi uvicorn boto3 -litellm \ No newline at end of file +litellm +dotenv \ No newline at end of file diff --git a/openai-proxy/utils.py b/openai-proxy/utils.py new file mode 100644 index 000000000..3a469a416 --- /dev/null +++ b/openai-proxy/utils.py @@ -0,0 +1,7 @@ +import os, litellm +import dotenv +dotenv.load_dotenv() # load env variables + +def set_callbacks(): + if ("LANGUFSE_PUBLIC_KEY" in os.environ and "LANGUFSE_SECRET_KEY" in os.environ) or "LANGFUSE_HOST" in os.environ: + litellm.success_callback = ["langfuse"]