fix(openai-proxy/utils.py): adding caching

This commit is contained in:
Krrish Dholakia 2023-10-23 17:00:56 -07:00
parent ea0c65d146
commit c34e9d73ff
5 changed files with 111 additions and 7 deletions

View file

@ -3,5 +3,20 @@ import dotenv
dotenv.load_dotenv() # load env variables
def set_callbacks():
if ("LANGFUSE_PUBLIC_KEY" in os.environ and "LANGFUSE_SECRET_KEY" in os.environ) or "LANGFUSE_HOST" in os.environ:
## LOGGING
### LANGFUSE
if (len(os.getenv("LANGFUSE_PUBLIC_KEY", "")) > 0 and len(os.getenv("LANGFUSE_SECRET_KEY", ""))) > 0 or len(os.getenv("LANGFUSE_HOST", "")) > 0:
print(f"sets langfuse integration")
litellm.success_callback = ["langfuse"]
## CACHING
### REDIS
print(f"redis host: {len(os.getenv('REDIS_HOST', ''))}; redis port: {len(os.getenv('REDIS_PORT', ''))}; redis password: {len(os.getenv('REDIS_PASSWORD'))}")
if len(os.getenv("REDIS_HOST", "")) > 0 and len(os.getenv("REDIS_PORT", "")) > 0 and len(os.getenv("REDIS_PASSWORD", "")) > 0:
print(f"sets caching integration")
from litellm.caching import Cache
litellm.cache = Cache(type="redis", host=os.getenv("REDIS_HOST"), port=os.getenv("REDIS_PORT"), password=os.getenv("REDIS_PASSWORD"))