diff --git a/openai-proxy/.env.template b/openai-proxy/.env.template index 292698126..a05bf0670 100644 --- a/openai-proxy/.env.template +++ b/openai-proxy/.env.template @@ -20,12 +20,15 @@ COHERE_API_KEY = "" ## LOGGING ## +SET_VERBOSE = "False" # set to 'True' to see detailed input/output logs + ### LANGFUSE LANGFUSE_PUBLIC_KEY = "" LANGFUSE_SECRET_KEY = "" # Optional, defaults to https://cloud.langfuse.com LANGFUSE_HOST = "" # optional + ## CACHING ## ### REDIS diff --git a/openai-proxy/utils.py b/openai-proxy/utils.py index 841ff8b87..70a6f0e6c 100644 --- a/openai-proxy/utils.py +++ b/openai-proxy/utils.py @@ -4,16 +4,20 @@ dotenv.load_dotenv() # load env variables def set_callbacks(): ## LOGGING + if len(os.getenv("SET_VERBOSE")) > 0: + if os.getenv("SET_VERBOSE") == "True": + litellm.set_verbose = True + else: + litellm.set_verbose = False + ### LANGFUSE if (len(os.getenv("LANGFUSE_PUBLIC_KEY", "")) > 0 and len(os.getenv("LANGFUSE_SECRET_KEY", ""))) > 0 or len(os.getenv("LANGFUSE_HOST", "")) > 0: - print(f"sets langfuse integration") litellm.success_callback = ["langfuse"] ## CACHING ### REDIS print(f"redis host: {len(os.getenv('REDIS_HOST', ''))}; redis port: {len(os.getenv('REDIS_PORT', ''))}; redis password: {len(os.getenv('REDIS_PASSWORD'))}") if len(os.getenv("REDIS_HOST", "")) > 0 and len(os.getenv("REDIS_PORT", "")) > 0 and len(os.getenv("REDIS_PASSWORD", "")) > 0: - print(f"sets caching integration") from litellm.caching import Cache litellm.cache = Cache(type="redis", host=os.getenv("REDIS_HOST"), port=os.getenv("REDIS_PORT"), password=os.getenv("REDIS_PASSWORD"))