mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
All checks were successful
Read Version from pyproject.toml / read-version (push) Successful in 14s
* fixes for redis cache ping serialization * fix cache ping check * fix cache health check ui * working error details on ui * ui expand / collapse error * move cache health check to diff file * fix displaying error from cache health check * ui allow copying errors * ui cache health fixes * show redis details * clean up cache health page * ui polish fixes * fix error handling on cache health page * fix redis_cache_params on cache ping response * error handling * cache health ping response * fx error response from cache ping * parsedLitellmParams * fix cache health check * fix cache health page * cache safely handle json dumps issues * test caching routes * test_primitive_types * fix caching routes * litellm_mapped_tests * fix pytest-mock * fix _serialize * fix linting on safe dumps * test_default_max_depth * pip install "pytest-mock==3.12.0" * litellm_mapped_tests_coverage * add readme on new litellm test dir
210 lines
6.7 KiB
Python
210 lines
6.7 KiB
Python
from typing import Any, Dict
|
|
|
|
from fastapi import APIRouter, Depends, HTTPException, Request
|
|
|
|
import litellm
|
|
from litellm._logging import verbose_proxy_logger
|
|
from litellm.caching.caching import RedisCache
|
|
from litellm.litellm_core_utils.safe_json_dumps import safe_dumps
|
|
from litellm.litellm_core_utils.sensitive_data_masker import SensitiveDataMasker
|
|
from litellm.proxy._types import ProxyErrorTypes, ProxyException
|
|
from litellm.proxy.auth.user_api_key_auth import user_api_key_auth
|
|
from litellm.types.caching import CachePingResponse
|
|
|
|
masker = SensitiveDataMasker()
|
|
|
|
router = APIRouter(
|
|
prefix="/cache",
|
|
tags=["caching"],
|
|
)
|
|
|
|
|
|
@router.get(
|
|
"/ping",
|
|
response_model=CachePingResponse,
|
|
dependencies=[Depends(user_api_key_auth)],
|
|
)
|
|
async def cache_ping():
|
|
"""
|
|
Endpoint for checking if cache can be pinged
|
|
"""
|
|
litellm_cache_params: Dict[str, Any] = {}
|
|
specific_cache_params: Dict[str, Any] = {}
|
|
try:
|
|
if litellm.cache is None:
|
|
raise HTTPException(
|
|
status_code=503, detail="Cache not initialized. litellm.cache is None"
|
|
)
|
|
litellm_cache_params = masker.mask_dict(vars(litellm.cache))
|
|
# remove field that might reference itself
|
|
litellm_cache_params.pop("cache", None)
|
|
specific_cache_params = (
|
|
masker.mask_dict(vars(litellm.cache.cache)) if litellm.cache else {}
|
|
)
|
|
|
|
if litellm.cache.type == "redis":
|
|
# ping the redis cache
|
|
ping_response = await litellm.cache.ping()
|
|
verbose_proxy_logger.debug(
|
|
"/cache/ping: ping_response: " + str(ping_response)
|
|
)
|
|
# making a set cache call
|
|
# add cache does not return anything
|
|
await litellm.cache.async_add_cache(
|
|
result="test_key",
|
|
model="test-model",
|
|
messages=[{"role": "user", "content": "test from litellm"}],
|
|
)
|
|
verbose_proxy_logger.debug("/cache/ping: done with set_cache()")
|
|
|
|
return CachePingResponse(
|
|
status="healthy",
|
|
cache_type=str(litellm.cache.type),
|
|
ping_response=True,
|
|
set_cache_response="success",
|
|
litellm_cache_params=safe_dumps(litellm_cache_params),
|
|
redis_cache_params=safe_dumps(specific_cache_params),
|
|
)
|
|
else:
|
|
return CachePingResponse(
|
|
status="healthy",
|
|
cache_type=str(litellm.cache.type),
|
|
litellm_cache_params=safe_dumps(litellm_cache_params),
|
|
)
|
|
except Exception as e:
|
|
import traceback
|
|
|
|
traceback.print_exc()
|
|
error_message = {
|
|
"message": f"Service Unhealthy ({str(e)})",
|
|
"litellm_cache_params": safe_dumps(litellm_cache_params),
|
|
"redis_cache_params": safe_dumps(specific_cache_params),
|
|
"traceback": traceback.format_exc(),
|
|
}
|
|
raise ProxyException(
|
|
message=safe_dumps(error_message),
|
|
type=ProxyErrorTypes.cache_ping_error,
|
|
param="cache_ping",
|
|
code=503,
|
|
)
|
|
|
|
|
|
@router.post(
|
|
"/delete",
|
|
tags=["caching"],
|
|
dependencies=[Depends(user_api_key_auth)],
|
|
)
|
|
async def cache_delete(request: Request):
|
|
"""
|
|
Endpoint for deleting a key from the cache. All responses from litellm proxy have `x-litellm-cache-key` in the headers
|
|
|
|
Parameters:
|
|
- **keys**: *Optional[List[str]]* - A list of keys to delete from the cache. Example {"keys": ["key1", "key2"]}
|
|
|
|
```shell
|
|
curl -X POST "http://0.0.0.0:4000/cache/delete" \
|
|
-H "Authorization: Bearer sk-1234" \
|
|
-d '{"keys": ["key1", "key2"]}'
|
|
```
|
|
|
|
"""
|
|
try:
|
|
if litellm.cache is None:
|
|
raise HTTPException(
|
|
status_code=503, detail="Cache not initialized. litellm.cache is None"
|
|
)
|
|
|
|
request_data = await request.json()
|
|
keys = request_data.get("keys", None)
|
|
|
|
if litellm.cache.type == "redis":
|
|
await litellm.cache.delete_cache_keys(keys=keys)
|
|
return {
|
|
"status": "success",
|
|
}
|
|
else:
|
|
raise HTTPException(
|
|
status_code=500,
|
|
detail=f"Cache type {litellm.cache.type} does not support deleting a key. only `redis` is supported",
|
|
)
|
|
except Exception as e:
|
|
raise HTTPException(
|
|
status_code=500,
|
|
detail=f"Cache Delete Failed({str(e)})",
|
|
)
|
|
|
|
|
|
@router.get(
|
|
"/redis/info",
|
|
dependencies=[Depends(user_api_key_auth)],
|
|
)
|
|
async def cache_redis_info():
|
|
"""
|
|
Endpoint for getting /redis/info
|
|
"""
|
|
try:
|
|
if litellm.cache is None:
|
|
raise HTTPException(
|
|
status_code=503, detail="Cache not initialized. litellm.cache is None"
|
|
)
|
|
if litellm.cache.type == "redis" and isinstance(
|
|
litellm.cache.cache, RedisCache
|
|
):
|
|
client_list = litellm.cache.cache.client_list()
|
|
redis_info = litellm.cache.cache.info()
|
|
num_clients = len(client_list)
|
|
return {
|
|
"num_clients": num_clients,
|
|
"clients": client_list,
|
|
"info": redis_info,
|
|
}
|
|
else:
|
|
raise HTTPException(
|
|
status_code=500,
|
|
detail=f"Cache type {litellm.cache.type} does not support flushing",
|
|
)
|
|
except Exception as e:
|
|
raise HTTPException(
|
|
status_code=503,
|
|
detail=f"Service Unhealthy ({str(e)})",
|
|
)
|
|
|
|
|
|
@router.post(
|
|
"/flushall",
|
|
tags=["caching"],
|
|
dependencies=[Depends(user_api_key_auth)],
|
|
)
|
|
async def cache_flushall():
|
|
"""
|
|
A function to flush all items from the cache. (All items will be deleted from the cache with this)
|
|
Raises HTTPException if the cache is not initialized or if the cache type does not support flushing.
|
|
Returns a dictionary with the status of the operation.
|
|
|
|
Usage:
|
|
```
|
|
curl -X POST http://0.0.0.0:4000/cache/flushall -H "Authorization: Bearer sk-1234"
|
|
```
|
|
"""
|
|
try:
|
|
if litellm.cache is None:
|
|
raise HTTPException(
|
|
status_code=503, detail="Cache not initialized. litellm.cache is None"
|
|
)
|
|
if litellm.cache.type == "redis" and isinstance(
|
|
litellm.cache.cache, RedisCache
|
|
):
|
|
litellm.cache.cache.flushall()
|
|
return {
|
|
"status": "success",
|
|
}
|
|
else:
|
|
raise HTTPException(
|
|
status_code=500,
|
|
detail=f"Cache type {litellm.cache.type} does not support flushing",
|
|
)
|
|
except Exception as e:
|
|
raise HTTPException(
|
|
status_code=503,
|
|
detail=f"Service Unhealthy ({str(e)})",
|
|
)
|