mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
Merge branch 'main' into litellm_vertex_ai_streaming_fix
This commit is contained in:
commit
f70bbc7b2e
8 changed files with 44 additions and 17 deletions
|
@ -4076,19 +4076,29 @@ async def health_readiness():
|
|||
|
||||
cache_type = None
|
||||
if litellm.cache is not None:
|
||||
from litellm.caching import RedisSemanticCache
|
||||
|
||||
cache_type = litellm.cache.type
|
||||
|
||||
if prisma_client is not None: # if db passed in, check if it's connected
|
||||
if prisma_client.db.is_connected() == True:
|
||||
response_object = {"db": "connected"}
|
||||
if isinstance(litellm.cache.cache, RedisSemanticCache):
|
||||
# ping the cache
|
||||
try:
|
||||
index_info = await litellm.cache.cache._index_info()
|
||||
except Exception as e:
|
||||
index_info = "index does not exist - error: " + str(e)
|
||||
cache_type = {"type": cache_type, "index_info": index_info}
|
||||
|
||||
return {
|
||||
"status": "healthy",
|
||||
"db": "connected",
|
||||
"cache": cache_type,
|
||||
"litellm_version": version,
|
||||
"success_callbacks": litellm.success_callback,
|
||||
}
|
||||
if prisma_client is not None: # if db passed in, check if it's connected
|
||||
await prisma_client.health_check() # test the db connection
|
||||
response_object = {"db": "connected"}
|
||||
|
||||
return {
|
||||
"status": "healthy",
|
||||
"db": "connected",
|
||||
"cache": cache_type,
|
||||
"litellm_version": version,
|
||||
"success_callbacks": litellm.success_callback,
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"status": "healthy",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue