mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
test(test_prometheus_services.py): fix testing to handle caching ping in init
This commit is contained in:
parent
5bc0437680
commit
b3a8c2885b
3 changed files with 56 additions and 18 deletions
|
@ -149,6 +149,14 @@ class RedisCache(BaseCache):
|
||||||
if password is not None:
|
if password is not None:
|
||||||
redis_kwargs["password"] = password
|
redis_kwargs["password"] = password
|
||||||
|
|
||||||
|
### HEALTH MONITORING OBJECT ###
|
||||||
|
if kwargs.get("service_logger_obj", None) is not None and isinstance(
|
||||||
|
kwargs["service_logger_obj"], ServiceLogging
|
||||||
|
):
|
||||||
|
self.service_logger_obj = kwargs.pop("service_logger_obj")
|
||||||
|
else:
|
||||||
|
self.service_logger_obj = ServiceLogging()
|
||||||
|
|
||||||
redis_kwargs.update(kwargs)
|
redis_kwargs.update(kwargs)
|
||||||
self.redis_client = get_redis_client(**redis_kwargs)
|
self.redis_client = get_redis_client(**redis_kwargs)
|
||||||
self.redis_kwargs = redis_kwargs
|
self.redis_kwargs = redis_kwargs
|
||||||
|
@ -165,12 +173,10 @@ class RedisCache(BaseCache):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
### HEALTH MONITORING OBJECT ###
|
|
||||||
self.service_logger_obj = ServiceLogging()
|
|
||||||
|
|
||||||
### ASYNC HEALTH PING ###
|
### ASYNC HEALTH PING ###
|
||||||
try:
|
try:
|
||||||
asyncio.get_running_loop().create_task(self.ping())
|
# asyncio.get_running_loop().create_task(self.ping())
|
||||||
|
result = asyncio.get_running_loop().create_task(self.ping())
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -607,13 +613,31 @@ class RedisCache(BaseCache):
|
||||||
"""
|
"""
|
||||||
Tests if the sync redis client is correctly setup.
|
Tests if the sync redis client is correctly setup.
|
||||||
"""
|
"""
|
||||||
print_verbose(f"Pinging Async Redis Cache")
|
print_verbose(f"Pinging Sync Redis Cache")
|
||||||
|
start_time = time.time()
|
||||||
try:
|
try:
|
||||||
response = self.redis_client.ping()
|
response = self.redis_client.ping()
|
||||||
print_verbose(f"Redis Cache PING: {response}")
|
print_verbose(f"Redis Cache PING: {response}")
|
||||||
|
## LOGGING ##
|
||||||
|
end_time = time.time()
|
||||||
|
_duration = end_time - start_time
|
||||||
|
self.service_logger_obj.service_success_hook(
|
||||||
|
service=ServiceTypes.REDIS,
|
||||||
|
duration=_duration,
|
||||||
|
call_type="sync_ping",
|
||||||
|
)
|
||||||
return response
|
return response
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# NON blocking - notify users Redis is throwing an exception
|
# NON blocking - notify users Redis is throwing an exception
|
||||||
|
## LOGGING ##
|
||||||
|
end_time = time.time()
|
||||||
|
_duration = end_time - start_time
|
||||||
|
self.service_logger_obj.service_failure_hook(
|
||||||
|
service=ServiceTypes.REDIS,
|
||||||
|
duration=_duration,
|
||||||
|
error=e,
|
||||||
|
call_type="sync_ping",
|
||||||
|
)
|
||||||
print_verbose(
|
print_verbose(
|
||||||
f"LiteLLM Redis Cache PING: - Got exception from REDIS : {str(e)}"
|
f"LiteLLM Redis Cache PING: - Got exception from REDIS : {str(e)}"
|
||||||
)
|
)
|
||||||
|
@ -622,14 +646,35 @@ class RedisCache(BaseCache):
|
||||||
|
|
||||||
async def ping(self) -> bool:
|
async def ping(self) -> bool:
|
||||||
_redis_client = self.init_async_client()
|
_redis_client = self.init_async_client()
|
||||||
|
start_time = time.time()
|
||||||
async with _redis_client as redis_client:
|
async with _redis_client as redis_client:
|
||||||
print_verbose(f"Pinging Async Redis Cache")
|
print_verbose(f"Pinging Async Redis Cache")
|
||||||
try:
|
try:
|
||||||
response = await redis_client.ping()
|
response = await redis_client.ping()
|
||||||
print_verbose(f"Redis Cache PING: {response}")
|
## LOGGING ##
|
||||||
|
end_time = time.time()
|
||||||
|
_duration = end_time - start_time
|
||||||
|
asyncio.create_task(
|
||||||
|
self.service_logger_obj.async_service_success_hook(
|
||||||
|
service=ServiceTypes.REDIS,
|
||||||
|
duration=_duration,
|
||||||
|
call_type="async_ping",
|
||||||
|
)
|
||||||
|
)
|
||||||
return response
|
return response
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# NON blocking - notify users Redis is throwing an exception
|
# NON blocking - notify users Redis is throwing an exception
|
||||||
|
## LOGGING ##
|
||||||
|
end_time = time.time()
|
||||||
|
_duration = end_time - start_time
|
||||||
|
asyncio.create_task(
|
||||||
|
self.service_logger_obj.async_service_failure_hook(
|
||||||
|
service=ServiceTypes.REDIS,
|
||||||
|
duration=_duration,
|
||||||
|
error=e,
|
||||||
|
call_type="async_ping",
|
||||||
|
)
|
||||||
|
)
|
||||||
print_verbose(
|
print_verbose(
|
||||||
f"LiteLLM Redis Cache PING: - Got exception from REDIS : {str(e)}"
|
f"LiteLLM Redis Cache PING: - Got exception from REDIS : {str(e)}"
|
||||||
)
|
)
|
||||||
|
|
|
@ -30,7 +30,6 @@ class PrometheusServicesLogger:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
"Missing prometheus_client. Run `pip install prometheus-client`"
|
"Missing prometheus_client. Run `pip install prometheus-client`"
|
||||||
)
|
)
|
||||||
print("INITIALIZES PROMETHEUS SERVICE LOGGER!")
|
|
||||||
|
|
||||||
self.Histogram = Histogram
|
self.Histogram = Histogram
|
||||||
self.Counter = Counter
|
self.Counter = Counter
|
||||||
|
|
|
@ -67,21 +67,15 @@ async def test_completion_with_caching_bad_call():
|
||||||
litellm.set_verbose = True
|
litellm.set_verbose = True
|
||||||
sl = ServiceLogging(mock_testing=True)
|
sl = ServiceLogging(mock_testing=True)
|
||||||
try:
|
try:
|
||||||
litellm.cache = Cache(type="redis", host="hello-world")
|
from litellm.caching import RedisCache
|
||||||
|
|
||||||
litellm.service_callback = ["prometheus_system"]
|
litellm.service_callback = ["prometheus_system"]
|
||||||
|
|
||||||
litellm.cache.cache.service_logger_obj = sl
|
RedisCache(host="hello-world", **{"service_logger_obj": sl})
|
||||||
|
|
||||||
messages = [{"role": "user", "content": "Hey, how's it going?"}]
|
|
||||||
response1 = await acompletion(
|
|
||||||
model="gpt-3.5-turbo", messages=messages, caching=True
|
|
||||||
)
|
|
||||||
response1 = await acompletion(
|
|
||||||
model="gpt-3.5-turbo", messages=messages, caching=True
|
|
||||||
)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
pass
|
print(f"Receives exception = {str(e)}")
|
||||||
|
|
||||||
|
await asyncio.sleep(5)
|
||||||
assert sl.mock_testing_async_failure_hook > 0
|
assert sl.mock_testing_async_failure_hook > 0
|
||||||
assert sl.mock_testing_async_success_hook == 0
|
assert sl.mock_testing_async_success_hook == 0
|
||||||
assert sl.mock_testing_sync_success_hook == 0
|
assert sl.mock_testing_sync_success_hook == 0
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue