(feat) openai-proxy add print statements letting users know which callback is switched on

This commit is contained in:
ishaan-jaff 2023-10-25 10:49:14 -07:00
parent e118ce50e1
commit 907bff33f0

View file

@ -9,18 +9,22 @@ def set_callbacks():
if len(os.getenv("SET_VERBOSE", "")) > 0:
if os.getenv("SET_VERBOSE") == "True":
litellm.set_verbose = True
print("\033[92mLiteLLM: Switched on verbose logging\033[0m")
else:
litellm.set_verbose = False
### LANGFUSE
if (len(os.getenv("LANGFUSE_PUBLIC_KEY", "")) > 0 and len(os.getenv("LANGFUSE_SECRET_KEY", ""))) > 0 or len(os.getenv("LANGFUSE_HOST", "")) > 0:
litellm.success_callback = ["langfuse"]
print("\033[92mLiteLLM: Switched on Langfuse feature\033[0m")
## CACHING
### REDIS
if len(os.getenv("REDIS_HOST", "")) > 0 and len(os.getenv("REDIS_PORT", "")) > 0 and len(os.getenv("REDIS_PASSWORD", "")) > 0:
from litellm.caching import Cache
litellm.cache = Cache(type="redis", host=os.getenv("REDIS_HOST"), port=os.getenv("REDIS_PORT"), password=os.getenv("REDIS_PASSWORD"))
print("\033[92mLiteLLM: Switched on Redis caching\033[0m")
def load_router_config(router: Optional[litellm.Router]):