Merge branch 'main' into litellm_vertex_ai_streaming_fix

This commit is contained in:
Krish Dholakia 2024-02-06 14:33:54 -08:00 committed by GitHub
commit f70bbc7b2e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 44 additions and 17 deletions

View file

@ -4076,19 +4076,29 @@ async def health_readiness():
cache_type = None
if litellm.cache is not None:
from litellm.caching import RedisSemanticCache
cache_type = litellm.cache.type
if prisma_client is not None: # if db passed in, check if it's connected
if prisma_client.db.is_connected() == True:
response_object = {"db": "connected"}
if isinstance(litellm.cache.cache, RedisSemanticCache):
# ping the cache
try:
index_info = await litellm.cache.cache._index_info()
except Exception as e:
index_info = "index does not exist - error: " + str(e)
cache_type = {"type": cache_type, "index_info": index_info}
return {
"status": "healthy",
"db": "connected",
"cache": cache_type,
"litellm_version": version,
"success_callbacks": litellm.success_callback,
}
if prisma_client is not None: # if db passed in, check if it's connected
await prisma_client.health_check() # test the db connection
response_object = {"db": "connected"}
return {
"status": "healthy",
"db": "connected",
"cache": cache_type,
"litellm_version": version,
"success_callbacks": litellm.success_callback,
}
else:
return {
"status": "healthy",