From 511dd18e4beb6289e65a3d8876d95f758fd24e27 Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Thu, 27 Jun 2024 20:58:29 -0700 Subject: [PATCH] remove debug print statement --- litellm/caching.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/litellm/caching.py b/litellm/caching.py index 19c1431a2b..64488289a8 100644 --- a/litellm/caching.py +++ b/litellm/caching.py @@ -97,19 +97,13 @@ class InMemoryCache(BaseCache): """ for key in list(self.ttl_dict.keys()): if time.time() > self.ttl_dict[key]: - print( # noqa - "Cache Evicting item key=", - key, - "ttl=", - self.ttl_dict[key], - "size of cache=", - len(self.cache_dict), - ) self.cache_dict.pop(key, None) self.ttl_dict.pop(key, None) def set_cache(self, key, value, **kwargs): - print_verbose("InMemoryCache: set_cache") + print_verbose( + "InMemoryCache: set_cache. current size= {}".format(len(self.cache_dict)) + ) if len(self.cache_dict) >= self.max_size_in_memory: # only evict when cache is full self.evict_cache()