From 91f691cd8b55f938a7372a176d986680c30c8064 Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Mon, 23 Oct 2023 17:59:36 -0700 Subject: [PATCH] fix(openai-proxy): add set_verbose flag for proxy to see logs --- openai-proxy/.env.template | 3 +++ openai-proxy/utils.py | 8 ++++++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/openai-proxy/.env.template b/openai-proxy/.env.template index 2926981266..a05bf0670f 100644 --- a/openai-proxy/.env.template +++ b/openai-proxy/.env.template @@ -20,12 +20,15 @@ COHERE_API_KEY = "" ## LOGGING ## +SET_VERBOSE = "False" # set to 'True' to see detailed input/output logs + ### LANGFUSE LANGFUSE_PUBLIC_KEY = "" LANGFUSE_SECRET_KEY = "" # Optional, defaults to https://cloud.langfuse.com LANGFUSE_HOST = "" # optional + ## CACHING ## ### REDIS diff --git a/openai-proxy/utils.py b/openai-proxy/utils.py index 841ff8b873..70a6f0e6c3 100644 --- a/openai-proxy/utils.py +++ b/openai-proxy/utils.py @@ -4,16 +4,20 @@ dotenv.load_dotenv() # load env variables def set_callbacks(): ## LOGGING + if len(os.getenv("SET_VERBOSE")) > 0: + if os.getenv("SET_VERBOSE") == "True": + litellm.set_verbose = True + else: + litellm.set_verbose = False + ### LANGFUSE if (len(os.getenv("LANGFUSE_PUBLIC_KEY", "")) > 0 and len(os.getenv("LANGFUSE_SECRET_KEY", ""))) > 0 or len(os.getenv("LANGFUSE_HOST", "")) > 0: - print(f"sets langfuse integration") litellm.success_callback = ["langfuse"] ## CACHING ### REDIS print(f"redis host: {len(os.getenv('REDIS_HOST', ''))}; redis port: {len(os.getenv('REDIS_PORT', ''))}; redis password: {len(os.getenv('REDIS_PASSWORD'))}") if len(os.getenv("REDIS_HOST", "")) > 0 and len(os.getenv("REDIS_PORT", "")) > 0 and len(os.getenv("REDIS_PASSWORD", "")) > 0: - print(f"sets caching integration") from litellm.caching import Cache litellm.cache = Cache(type="redis", host=os.getenv("REDIS_HOST"), port=os.getenv("REDIS_PORT"), password=os.getenv("REDIS_PASSWORD"))