fix(redis_cache.py): add 5s default timeout

This commit is contained in:
Krrish Dholakia 2025-03-17 14:27:36 -07:00
parent 113a29705f
commit 08895a2c71
4 changed files with 28 additions and 5 deletions

View file

@ -182,9 +182,7 @@ def init_redis_cluster(redis_kwargs) -> redis.RedisCluster:
"REDIS_CLUSTER_NODES environment variable is not valid JSON. Please ensure it's properly formatted." "REDIS_CLUSTER_NODES environment variable is not valid JSON. Please ensure it's properly formatted."
) )
verbose_logger.debug( verbose_logger.debug("init_redis_cluster: startup nodes are being initialized.")
"init_redis_cluster: startup nodes are being initialized."
)
from redis.cluster import ClusterNode from redis.cluster import ClusterNode
args = _get_redis_cluster_kwargs() args = _get_redis_cluster_kwargs()
@ -307,7 +305,6 @@ def get_redis_async_client(
return _init_async_redis_sentinel(redis_kwargs) return _init_async_redis_sentinel(redis_kwargs)
return async_redis.Redis( return async_redis.Redis(
socket_timeout=5,
**redis_kwargs, **redis_kwargs,
) )

View file

@ -54,6 +54,7 @@ class RedisCache(BaseCache):
redis_flush_size: Optional[int] = 100, redis_flush_size: Optional[int] = 100,
namespace: Optional[str] = None, namespace: Optional[str] = None,
startup_nodes: Optional[List] = None, # for redis-cluster startup_nodes: Optional[List] = None, # for redis-cluster
socket_timeout: Optional[float] = 5.0, # default 5 second timeout
**kwargs, **kwargs,
): ):
@ -70,6 +71,9 @@ class RedisCache(BaseCache):
redis_kwargs["password"] = password redis_kwargs["password"] = password
if startup_nodes is not None: if startup_nodes is not None:
redis_kwargs["startup_nodes"] = startup_nodes redis_kwargs["startup_nodes"] = startup_nodes
if socket_timeout is not None:
redis_kwargs["socket_timeout"] = socket_timeout
### HEALTH MONITORING OBJECT ### ### HEALTH MONITORING OBJECT ###
if kwargs.get("service_logger_obj", None) is not None and isinstance( if kwargs.get("service_logger_obj", None) is not None and isinstance(
kwargs["service_logger_obj"], ServiceLogging kwargs["service_logger_obj"], ServiceLogging
@ -556,6 +560,7 @@ class RedisCache(BaseCache):
## LOGGING ## ## LOGGING ##
end_time = time.time() end_time = time.time()
_duration = end_time - start_time _duration = end_time - start_time
asyncio.create_task( asyncio.create_task(
self.service_logger_obj.async_service_success_hook( self.service_logger_obj.async_service_success_hook(
service=ServiceTypes.REDIS, service=ServiceTypes.REDIS,

View file

@ -6,4 +6,12 @@ model_list:
api_base: os.environ/AZURE_API_BASE api_base: os.environ/AZURE_API_BASE
litellm_settings: litellm_settings:
callbacks: ["prometheus"] callbacks: ["prometheus"]
router_settings:
routing_strategy: usage-based-routing-v2 # 👈 KEY CHANGE
redis_host: os.environ/REDIS_HOST
redis_password: os.environ/REDIS_PASSWORD
redis_port: os.environ/REDIS_PORT

View file

@ -1,9 +1,13 @@
import asyncio
import json import json
import os import os
import sys import sys
import time
from unittest.mock import MagicMock, patch from unittest.mock import MagicMock, patch
import httpx
import pytest import pytest
import respx
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
sys.path.insert( sys.path.insert(
@ -39,3 +43,12 @@ async def test_redis_cache_async_increment(namespace):
mock_redis_instance.incrbyfloat.assert_called_once_with( mock_redis_instance.incrbyfloat.assert_called_once_with(
name=expected_key, amount=1 name=expected_key, amount=1
) )
@pytest.mark.asyncio
async def test_redis_client_init_with_socket_timeout():
redis_cache = RedisCache(socket_timeout=1.0)
assert redis_cache.redis_kwargs["socket_timeout"] == 1.0
client = redis_cache.init_async_client()
assert client is not None
assert client.connection_pool.connection_kwargs["socket_timeout"] == 1.0