mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
fix(openai-proxy/utils.py): adding caching
This commit is contained in:
parent
ea0c65d146
commit
c34e9d73ff
5 changed files with 111 additions and 7 deletions
|
@ -3,5 +3,20 @@ import dotenv
|
|||
dotenv.load_dotenv() # load env variables
|
||||
|
||||
def set_callbacks():
|
||||
if ("LANGFUSE_PUBLIC_KEY" in os.environ and "LANGFUSE_SECRET_KEY" in os.environ) or "LANGFUSE_HOST" in os.environ:
|
||||
## LOGGING
|
||||
### LANGFUSE
|
||||
if (len(os.getenv("LANGFUSE_PUBLIC_KEY", "")) > 0 and len(os.getenv("LANGFUSE_SECRET_KEY", ""))) > 0 or len(os.getenv("LANGFUSE_HOST", "")) > 0:
|
||||
print(f"sets langfuse integration")
|
||||
litellm.success_callback = ["langfuse"]
|
||||
|
||||
## CACHING
|
||||
### REDIS
|
||||
print(f"redis host: {len(os.getenv('REDIS_HOST', ''))}; redis port: {len(os.getenv('REDIS_PORT', ''))}; redis password: {len(os.getenv('REDIS_PASSWORD'))}")
|
||||
if len(os.getenv("REDIS_HOST", "")) > 0 and len(os.getenv("REDIS_PORT", "")) > 0 and len(os.getenv("REDIS_PASSWORD", "")) > 0:
|
||||
print(f"sets caching integration")
|
||||
from litellm.caching import Cache
|
||||
litellm.cache = Cache(type="redis", host=os.getenv("REDIS_HOST"), port=os.getenv("REDIS_PORT"), password=os.getenv("REDIS_PASSWORD"))
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue