diff --git a/litellm/_logging.py b/litellm/_logging.py index e3b54a012..0bd82a6bd 100644 --- a/litellm/_logging.py +++ b/litellm/_logging.py @@ -28,3 +28,4 @@ verbose_logger = logging.getLogger("LiteLLM") # Add the handler to the logger verbose_router_logger.addHandler(handler) verbose_proxy_logger.addHandler(handler) +verbose_logger.addHandler(handler) diff --git a/litellm/caching.py b/litellm/caching.py index 8ad19e102..38174c2ab 100644 --- a/litellm/caching.py +++ b/litellm/caching.py @@ -12,10 +12,12 @@ import time, logging import json, traceback, ast, hashlib from typing import Optional, Literal, List, Union, Any from openai._models import BaseModel as OpenAIObject +from litellm._logging import verbose_logger def print_verbose(print_statement): try: + verbose_logger.debug(print_statement) if litellm.set_verbose: print(print_statement) # noqa except: @@ -175,7 +177,7 @@ class S3Cache(BaseCache): CacheControl=cache_control, ContentType="application/json", ContentLanguage="en", - ContentDisposition=f"inline; filename=\"{key}.json\"" + ContentDisposition=f'inline; filename="{key}.json"', ) else: cache_control = "immutable, max-age=31536000, s-maxage=31536000" @@ -187,7 +189,7 @@ class S3Cache(BaseCache): CacheControl=cache_control, ContentType="application/json", ContentLanguage="en", - ContentDisposition=f"inline; filename=\"{key}.json\"" + ContentDisposition=f'inline; filename="{key}.json"', ) except Exception as e: # NON blocking - notify users S3 is throwing an exception diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index fafc41457..8ce2ee1c2 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -2879,11 +2879,27 @@ async def health_readiness(): Unprotected endpoint for checking if worker can receive requests """ global prisma_client + + cache_type = None + if litellm.cache is not None: + cache_type = litellm.cache.type if prisma_client is not None: # if db passed in, check if it's connected if prisma_client.db.is_connected() == True: - return {"status": "healthy", "db": "connected"} + response_object = {"db": "connected"} + + return { + "status": "healthy", + "db": "connected", + "cache": cache_type, + "success_callbacks": litellm.success_callback, + } else: - return {"status": "healthy", "db": "Not connected"} + return { + "status": "healthy", + "db": "Not connected", + "cache": cache_type, + "success_callbacks": litellm.success_callback, + } raise HTTPException(status_code=503, detail="Service Unhealthy")