From 8b12a2e5dc83071c20ad6c2e2ef692d578c93085 Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Wed, 2 Apr 2025 14:52:55 -0700 Subject: [PATCH] fix pod lock manager --- litellm/proxy/proxy_config.yaml | 9 +++++++++ litellm/proxy/utils.py | 1 + 2 files changed, 10 insertions(+) diff --git a/litellm/proxy/proxy_config.yaml b/litellm/proxy/proxy_config.yaml index 17658df903..fe8d73d26a 100644 --- a/litellm/proxy/proxy_config.yaml +++ b/litellm/proxy/proxy_config.yaml @@ -4,3 +4,12 @@ model_list: model: openai/fake api_key: fake-key api_base: https://exampleopenaiendpoint-production.up.railway.app/ + +general_settings: + use_redis_transaction_buffer: true + +litellm_settings: + cache: True + cache_params: + type: redis + supported_call_types: [] \ No newline at end of file diff --git a/litellm/proxy/utils.py b/litellm/proxy/utils.py index 0b87444628..eb733e7370 100644 --- a/litellm/proxy/utils.py +++ b/litellm/proxy/utils.py @@ -349,6 +349,7 @@ class ProxyLogging: if redis_cache is not None: self.internal_usage_cache.dual_cache.redis_cache = redis_cache self.db_spend_update_writer.redis_update_buffer.redis_cache = redis_cache + self.db_spend_update_writer.pod_lock_manager.redis_cache = redis_cache def _init_litellm_callbacks(self, llm_router: Optional[Router] = None): litellm.logging_callback_manager.add_litellm_callback(self.max_parallel_request_limiter) # type: ignore