From b13a93d9bc61f0cd43d2c09fa694ef83d1c73bee Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Mon, 24 Jun 2024 17:24:59 -0700 Subject: [PATCH] cleanup InMemoryCache --- litellm/caching.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/litellm/caching.py b/litellm/caching.py index ceb8e70b16..c46dd3af8b 100644 --- a/litellm/caching.py +++ b/litellm/caching.py @@ -65,16 +65,13 @@ class BaseCache: class InMemoryCache(BaseCache): - def __init__(self, default_ttl: Optional[float] = 120.0): + def __init__(self, max_size_in_memory: Optional[int] = 200): """ - default_ttl [float]: If default_ttl is 6 seconds, every 6 seconds the cache will be set to {} - this is done to prevent overuse of System RAM + max_size_in_memory [int]: Maximum number of items in cache. done to prevent memory leaks. Use 200 items as a default """ - # if users don't provider one, use the default litellm cache - max_size_in_memory = 1000 - self.cache_dict: LRUCache = LRUCache(maxsize=max_size_in_memory) - self.ttl_dict: LRUCache = LRUCache(maxsize=max_size_in_memory) - self.default_ttl = default_ttl or 120.0 + self.max_size_in_memory = max_size_in_memory or 200 + self.cache_dict: LRUCache = LRUCache(maxsize=self.max_size_in_memory) + self.ttl_dict: LRUCache = LRUCache(maxsize=self.max_size_in_memory) def set_cache(self, key, value, **kwargs): print_verbose("InMemoryCache: set_cache")