mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
(UI + Proxy) Cache Health Check Page - Cleanup/Improvements (#8665)
All checks were successful
Read Version from pyproject.toml / read-version (push) Successful in 14s
All checks were successful
Read Version from pyproject.toml / read-version (push) Successful in 14s
* fixes for redis cache ping serialization * fix cache ping check * fix cache health check ui * working error details on ui * ui expand / collapse error * move cache health check to diff file * fix displaying error from cache health check * ui allow copying errors * ui cache health fixes * show redis details * clean up cache health page * ui polish fixes * fix error handling on cache health page * fix redis_cache_params on cache ping response * error handling * cache health ping response * fx error response from cache ping * parsedLitellmParams * fix cache health check * fix cache health page * cache safely handle json dumps issues * test caching routes * test_primitive_types * fix caching routes * litellm_mapped_tests * fix pytest-mock * fix _serialize * fix linting on safe dumps * test_default_max_depth * pip install "pytest-mock==3.12.0" * litellm_mapped_tests_coverage * add readme on new litellm test dir
This commit is contained in:
parent
39db3147e8
commit
fff15543d9
16 changed files with 807 additions and 58 deletions
|
@ -678,6 +678,49 @@ jobs:
|
||||||
paths:
|
paths:
|
||||||
- llm_translation_coverage.xml
|
- llm_translation_coverage.xml
|
||||||
- llm_translation_coverage
|
- llm_translation_coverage
|
||||||
|
litellm_mapped_tests:
|
||||||
|
docker:
|
||||||
|
- image: cimg/python:3.11
|
||||||
|
auth:
|
||||||
|
username: ${DOCKERHUB_USERNAME}
|
||||||
|
password: ${DOCKERHUB_PASSWORD}
|
||||||
|
working_directory: ~/project
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- run:
|
||||||
|
name: Install Dependencies
|
||||||
|
command: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
python -m pip install -r requirements.txt
|
||||||
|
pip install "pytest-mock==3.12.0"
|
||||||
|
pip install "pytest==7.3.1"
|
||||||
|
pip install "pytest-retry==1.6.3"
|
||||||
|
pip install "pytest-cov==5.0.0"
|
||||||
|
pip install "pytest-asyncio==0.21.1"
|
||||||
|
pip install "respx==0.21.1"
|
||||||
|
# Run pytest and generate JUnit XML report
|
||||||
|
- run:
|
||||||
|
name: Run tests
|
||||||
|
command: |
|
||||||
|
pwd
|
||||||
|
ls
|
||||||
|
python -m pytest -vv tests/litellm --cov=litellm --cov-report=xml -x -s -v --junitxml=test-results/junit.xml --durations=5
|
||||||
|
no_output_timeout: 120m
|
||||||
|
- run:
|
||||||
|
name: Rename the coverage files
|
||||||
|
command: |
|
||||||
|
mv coverage.xml litellm_mapped_tests_coverage.xml
|
||||||
|
mv .coverage litellm_mapped_tests_coverage
|
||||||
|
|
||||||
|
# Store test results
|
||||||
|
- store_test_results:
|
||||||
|
path: test-results
|
||||||
|
- persist_to_workspace:
|
||||||
|
root: .
|
||||||
|
paths:
|
||||||
|
- litellm_mapped_tests_coverage.xml
|
||||||
|
- litellm_mapped_tests_coverage
|
||||||
batches_testing:
|
batches_testing:
|
||||||
docker:
|
docker:
|
||||||
- image: cimg/python:3.11
|
- image: cimg/python:3.11
|
||||||
|
@ -2316,6 +2359,12 @@ workflows:
|
||||||
only:
|
only:
|
||||||
- main
|
- main
|
||||||
- /litellm_.*/
|
- /litellm_.*/
|
||||||
|
- litellm_mapped_tests:
|
||||||
|
filters:
|
||||||
|
branches:
|
||||||
|
only:
|
||||||
|
- main
|
||||||
|
- /litellm_.*/
|
||||||
- batches_testing:
|
- batches_testing:
|
||||||
filters:
|
filters:
|
||||||
branches:
|
branches:
|
||||||
|
@ -2349,6 +2398,7 @@ workflows:
|
||||||
- upload-coverage:
|
- upload-coverage:
|
||||||
requires:
|
requires:
|
||||||
- llm_translation_testing
|
- llm_translation_testing
|
||||||
|
- litellm_mapped_tests
|
||||||
- batches_testing
|
- batches_testing
|
||||||
- litellm_utils_testing
|
- litellm_utils_testing
|
||||||
- pass_through_unit_testing
|
- pass_through_unit_testing
|
||||||
|
@ -2406,6 +2456,7 @@ workflows:
|
||||||
- load_testing
|
- load_testing
|
||||||
- test_bad_database_url
|
- test_bad_database_url
|
||||||
- llm_translation_testing
|
- llm_translation_testing
|
||||||
|
- litellm_mapped_tests
|
||||||
- batches_testing
|
- batches_testing
|
||||||
- litellm_utils_testing
|
- litellm_utils_testing
|
||||||
- pass_through_unit_testing
|
- pass_through_unit_testing
|
||||||
|
|
|
@ -22,7 +22,7 @@ repos:
|
||||||
rev: 7.0.0 # The version of flake8 to use
|
rev: 7.0.0 # The version of flake8 to use
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: flake8
|
||||||
exclude: ^litellm/tests/|^litellm/proxy/tests/
|
exclude: ^litellm/tests/|^litellm/proxy/tests/|^litellm/tests/litellm/|^tests/litellm/
|
||||||
additional_dependencies: [flake8-print]
|
additional_dependencies: [flake8-print]
|
||||||
files: litellm/.*\.py
|
files: litellm/.*\.py
|
||||||
# - id: flake8
|
# - id: flake8
|
||||||
|
|
49
litellm/litellm_core_utils/safe_json_dumps.py
Normal file
49
litellm/litellm_core_utils/safe_json_dumps.py
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
import json
|
||||||
|
from typing import Any, Union
|
||||||
|
|
||||||
|
|
||||||
|
def safe_dumps(data: Any, max_depth: int = 10) -> str:
|
||||||
|
"""
|
||||||
|
Recursively serialize data while detecting circular references.
|
||||||
|
If a circular reference is detected then a marker string is returned.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _serialize(obj: Any, seen: set, depth: int) -> Any:
|
||||||
|
# Check for maximum depth.
|
||||||
|
if depth > max_depth:
|
||||||
|
return "MaxDepthExceeded"
|
||||||
|
# Base-case: if it is a primitive, simply return it.
|
||||||
|
if isinstance(obj, (str, int, float, bool, type(None))):
|
||||||
|
return obj
|
||||||
|
# Check for circular reference.
|
||||||
|
if id(obj) in seen:
|
||||||
|
return "CircularReference Detected"
|
||||||
|
seen.add(id(obj))
|
||||||
|
result: Union[dict, list, tuple, set, str]
|
||||||
|
if isinstance(obj, dict):
|
||||||
|
result = {}
|
||||||
|
for k, v in obj.items():
|
||||||
|
result[k] = _serialize(v, seen, depth + 1)
|
||||||
|
seen.remove(id(obj))
|
||||||
|
return result
|
||||||
|
elif isinstance(obj, list):
|
||||||
|
result = [_serialize(item, seen, depth + 1) for item in obj]
|
||||||
|
seen.remove(id(obj))
|
||||||
|
return result
|
||||||
|
elif isinstance(obj, tuple):
|
||||||
|
result = tuple(_serialize(item, seen, depth + 1) for item in obj)
|
||||||
|
seen.remove(id(obj))
|
||||||
|
return result
|
||||||
|
elif isinstance(obj, set):
|
||||||
|
result = sorted([_serialize(item, seen, depth + 1) for item in obj])
|
||||||
|
seen.remove(id(obj))
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
# Fall back to string conversion for non-serializable objects.
|
||||||
|
try:
|
||||||
|
return str(obj)
|
||||||
|
except Exception:
|
||||||
|
return "Unserializable Object"
|
||||||
|
|
||||||
|
safe_data = _serialize(data, set(), 0)
|
||||||
|
return json.dumps(safe_data, default=str)
|
File diff suppressed because one or more lines are too long
|
@ -2000,6 +2000,7 @@ class ProxyErrorTypes(str, enum.Enum):
|
||||||
bad_request_error = "bad_request_error"
|
bad_request_error = "bad_request_error"
|
||||||
not_found_error = "not_found_error"
|
not_found_error = "not_found_error"
|
||||||
validation_error = "bad_request_error"
|
validation_error = "bad_request_error"
|
||||||
|
cache_ping_error = "cache_ping_error"
|
||||||
|
|
||||||
|
|
||||||
DB_CONNECTION_ERROR_TYPES = (httpx.ConnectError, httpx.ReadError, httpx.ReadTimeout)
|
DB_CONNECTION_ERROR_TYPES = (httpx.ConnectError, httpx.ReadError, httpx.ReadTimeout)
|
||||||
|
|
|
@ -5,8 +5,11 @@ from fastapi import APIRouter, Depends, HTTPException, Request
|
||||||
import litellm
|
import litellm
|
||||||
from litellm._logging import verbose_proxy_logger
|
from litellm._logging import verbose_proxy_logger
|
||||||
from litellm.caching.caching import RedisCache
|
from litellm.caching.caching import RedisCache
|
||||||
|
from litellm.litellm_core_utils.safe_json_dumps import safe_dumps
|
||||||
from litellm.litellm_core_utils.sensitive_data_masker import SensitiveDataMasker
|
from litellm.litellm_core_utils.sensitive_data_masker import SensitiveDataMasker
|
||||||
|
from litellm.proxy._types import ProxyErrorTypes, ProxyException
|
||||||
from litellm.proxy.auth.user_api_key_auth import user_api_key_auth
|
from litellm.proxy.auth.user_api_key_auth import user_api_key_auth
|
||||||
|
from litellm.types.caching import CachePingResponse
|
||||||
|
|
||||||
masker = SensitiveDataMasker()
|
masker = SensitiveDataMasker()
|
||||||
|
|
||||||
|
@ -18,6 +21,7 @@ router = APIRouter(
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
"/ping",
|
"/ping",
|
||||||
|
response_model=CachePingResponse,
|
||||||
dependencies=[Depends(user_api_key_auth)],
|
dependencies=[Depends(user_api_key_auth)],
|
||||||
)
|
)
|
||||||
async def cache_ping():
|
async def cache_ping():
|
||||||
|
@ -27,27 +31,17 @@ async def cache_ping():
|
||||||
litellm_cache_params: Dict[str, Any] = {}
|
litellm_cache_params: Dict[str, Any] = {}
|
||||||
specific_cache_params: Dict[str, Any] = {}
|
specific_cache_params: Dict[str, Any] = {}
|
||||||
try:
|
try:
|
||||||
|
|
||||||
if litellm.cache is None:
|
if litellm.cache is None:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=503, detail="Cache not initialized. litellm.cache is None"
|
status_code=503, detail="Cache not initialized. litellm.cache is None"
|
||||||
)
|
)
|
||||||
litellm_cache_params = {}
|
litellm_cache_params = masker.mask_dict(vars(litellm.cache))
|
||||||
specific_cache_params = {}
|
# remove field that might reference itself
|
||||||
for k, v in vars(litellm.cache).items():
|
litellm_cache_params.pop("cache", None)
|
||||||
try:
|
specific_cache_params = (
|
||||||
if k == "cache":
|
masker.mask_dict(vars(litellm.cache.cache)) if litellm.cache else {}
|
||||||
continue
|
)
|
||||||
litellm_cache_params[k] = v
|
|
||||||
except Exception:
|
|
||||||
litellm_cache_params[k] = "<unable to copy or convert>"
|
|
||||||
for k, v in vars(litellm.cache.cache).items():
|
|
||||||
try:
|
|
||||||
specific_cache_params[k] = v
|
|
||||||
except Exception:
|
|
||||||
specific_cache_params[k] = "<unable to copy or convert>"
|
|
||||||
litellm_cache_params = masker.mask_dict(litellm_cache_params)
|
|
||||||
specific_cache_params = masker.mask_dict(specific_cache_params)
|
|
||||||
if litellm.cache.type == "redis":
|
if litellm.cache.type == "redis":
|
||||||
# ping the redis cache
|
# ping the redis cache
|
||||||
ping_response = await litellm.cache.ping()
|
ping_response = await litellm.cache.ping()
|
||||||
|
@ -63,24 +57,35 @@ async def cache_ping():
|
||||||
)
|
)
|
||||||
verbose_proxy_logger.debug("/cache/ping: done with set_cache()")
|
verbose_proxy_logger.debug("/cache/ping: done with set_cache()")
|
||||||
|
|
||||||
return {
|
return CachePingResponse(
|
||||||
"status": "healthy",
|
status="healthy",
|
||||||
"cache_type": litellm.cache.type,
|
cache_type=str(litellm.cache.type),
|
||||||
"ping_response": True,
|
ping_response=True,
|
||||||
"set_cache_response": "success",
|
set_cache_response="success",
|
||||||
"litellm_cache_params": litellm_cache_params,
|
litellm_cache_params=safe_dumps(litellm_cache_params),
|
||||||
"redis_cache_params": specific_cache_params,
|
redis_cache_params=safe_dumps(specific_cache_params),
|
||||||
}
|
)
|
||||||
else:
|
else:
|
||||||
return {
|
return CachePingResponse(
|
||||||
"status": "healthy",
|
status="healthy",
|
||||||
"cache_type": litellm.cache.type,
|
cache_type=str(litellm.cache.type),
|
||||||
"litellm_cache_params": litellm_cache_params,
|
litellm_cache_params=safe_dumps(litellm_cache_params),
|
||||||
}
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
import traceback
|
||||||
status_code=503,
|
|
||||||
detail=f"Service Unhealthy ({str(e)}).Cache parameters: {litellm_cache_params}.specific_cache_params: {specific_cache_params}",
|
traceback.print_exc()
|
||||||
|
error_message = {
|
||||||
|
"message": f"Service Unhealthy ({str(e)})",
|
||||||
|
"litellm_cache_params": safe_dumps(litellm_cache_params),
|
||||||
|
"redis_cache_params": safe_dumps(specific_cache_params),
|
||||||
|
"traceback": traceback.format_exc(),
|
||||||
|
}
|
||||||
|
raise ProxyException(
|
||||||
|
message=safe_dumps(error_message),
|
||||||
|
type=ProxyErrorTypes.cache_ping_error,
|
||||||
|
param="cache_ping",
|
||||||
|
code=503,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -11,5 +11,7 @@ general_settings:
|
||||||
|
|
||||||
|
|
||||||
litellm_settings:
|
litellm_settings:
|
||||||
callbacks: ["gcs_bucket"]
|
cache: true
|
||||||
|
cache_params:
|
||||||
|
type: redis
|
||||||
|
ttl: 600
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Literal, Optional, TypedDict
|
from typing import Literal, Optional, TypedDict
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
class LiteLLMCacheType(str, Enum):
|
class LiteLLMCacheType(str, Enum):
|
||||||
LOCAL = "local"
|
LOCAL = "local"
|
||||||
|
@ -51,3 +53,12 @@ DynamicCacheControl = TypedDict(
|
||||||
"no-store": Optional[bool],
|
"no-store": Optional[bool],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CachePingResponse(BaseModel):
|
||||||
|
status: str
|
||||||
|
cache_type: str
|
||||||
|
ping_response: Optional[bool] = None
|
||||||
|
set_cache_response: Optional[str] = None
|
||||||
|
litellm_cache_params: Optional[str] = None
|
||||||
|
redis_cache_params: Optional[str] = None
|
||||||
|
|
|
@ -14,6 +14,7 @@ IGNORE_FUNCTIONS = [
|
||||||
"strip_field",
|
"strip_field",
|
||||||
"_transform_prompt",
|
"_transform_prompt",
|
||||||
"mask_dict",
|
"mask_dict",
|
||||||
|
"_serialize", # we now set a max depth for this
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
80
tests/litellm/litellm_core_utils/test_safe_json_dumps.py
Normal file
80
tests/litellm/litellm_core_utils/test_safe_json_dumps.py
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
sys.path.insert(
|
||||||
|
0, os.path.abspath("../../..")
|
||||||
|
) # Adds the parent directory to the system path
|
||||||
|
|
||||||
|
from litellm.litellm_core_utils.safe_json_dumps import safe_dumps
|
||||||
|
|
||||||
|
|
||||||
|
def test_primitive_types():
|
||||||
|
# Test basic primitive types
|
||||||
|
assert safe_dumps("test") == '"test"'
|
||||||
|
assert safe_dumps(123) == "123"
|
||||||
|
assert safe_dumps(3.14) == "3.14"
|
||||||
|
assert safe_dumps(True) == "true"
|
||||||
|
assert safe_dumps(None) == "null"
|
||||||
|
|
||||||
|
|
||||||
|
def test_nested_structures():
|
||||||
|
# Test nested dictionaries and lists
|
||||||
|
data = {"name": "test", "numbers": [1, 2, 3], "nested": {"a": 1, "b": 2}}
|
||||||
|
result = json.loads(safe_dumps(data))
|
||||||
|
assert result["name"] == "test"
|
||||||
|
assert result["numbers"] == [1, 2, 3]
|
||||||
|
assert result["nested"] == {"a": 1, "b": 2}
|
||||||
|
|
||||||
|
|
||||||
|
def test_circular_reference():
|
||||||
|
# Test circular reference detection
|
||||||
|
d = {}
|
||||||
|
d["self"] = d
|
||||||
|
result = json.loads(safe_dumps(d))
|
||||||
|
assert result["self"] == "CircularReference Detected"
|
||||||
|
|
||||||
|
|
||||||
|
def test_max_depth():
|
||||||
|
# Test maximum depth handling
|
||||||
|
deep_dict = {}
|
||||||
|
current = deep_dict
|
||||||
|
for i in range(15):
|
||||||
|
current["deeper"] = {}
|
||||||
|
current = current["deeper"]
|
||||||
|
|
||||||
|
result = json.loads(safe_dumps(deep_dict, max_depth=5))
|
||||||
|
assert "MaxDepthExceeded" in str(result)
|
||||||
|
|
||||||
|
|
||||||
|
def test_default_max_depth():
|
||||||
|
# Test that default max depth still prevents infinite recursion
|
||||||
|
deep_dict = {}
|
||||||
|
current = deep_dict
|
||||||
|
for i in range(1000): # Create a very deep dictionary
|
||||||
|
current["deeper"] = {}
|
||||||
|
current = current["deeper"]
|
||||||
|
|
||||||
|
result = json.loads(safe_dumps(deep_dict)) # No max_depth parameter provided
|
||||||
|
assert "MaxDepthExceeded" in str(result)
|
||||||
|
|
||||||
|
|
||||||
|
def test_complex_types():
|
||||||
|
# Test handling of sets and tuples
|
||||||
|
data = {"set": {1, 2, 3}, "tuple": (4, 5, 6)}
|
||||||
|
result = json.loads(safe_dumps(data))
|
||||||
|
assert result["set"] == [1, 2, 3] # Sets are converted to sorted lists
|
||||||
|
assert result["tuple"] == [4, 5, 6] # Tuples are converted to lists
|
||||||
|
|
||||||
|
|
||||||
|
def test_unserializable_object():
|
||||||
|
# Test handling of unserializable objects
|
||||||
|
class TestClass:
|
||||||
|
def __str__(self):
|
||||||
|
raise Exception("Cannot convert to string")
|
||||||
|
|
||||||
|
obj = TestClass()
|
||||||
|
result = json.loads(safe_dumps(obj))
|
||||||
|
assert result == "Unserializable Object"
|
152
tests/litellm/proxy/test_caching_routes.py
Normal file
152
tests/litellm/proxy/test_caching_routes.py
Normal file
|
@ -0,0 +1,152 @@
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
|
||||||
|
sys.path.insert(
|
||||||
|
0, os.path.abspath("../../..")
|
||||||
|
) # Adds the parent directory to the system path
|
||||||
|
|
||||||
|
|
||||||
|
import litellm
|
||||||
|
from litellm.caching import RedisCache
|
||||||
|
from litellm.proxy.proxy_server import app
|
||||||
|
|
||||||
|
client = TestClient(app)
|
||||||
|
|
||||||
|
|
||||||
|
# Mock successful Redis connection
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_redis_success(mocker):
|
||||||
|
async def mock_ping():
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def mock_add_cache(*args, **kwargs):
|
||||||
|
return None
|
||||||
|
|
||||||
|
mock_cache = mocker.MagicMock()
|
||||||
|
mock_cache.type = "redis"
|
||||||
|
mock_cache.ping = mock_ping
|
||||||
|
mock_cache.async_add_cache = mock_add_cache
|
||||||
|
mock_cache.cache = RedisCache(
|
||||||
|
host="localhost",
|
||||||
|
port=6379,
|
||||||
|
password="hello",
|
||||||
|
)
|
||||||
|
|
||||||
|
mocker.patch.object(litellm, "cache", mock_cache)
|
||||||
|
return mock_cache
|
||||||
|
|
||||||
|
|
||||||
|
# Mock failed Redis connection
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_redis_failure(mocker):
|
||||||
|
async def mock_ping():
|
||||||
|
raise Exception("invalid username-password pair")
|
||||||
|
|
||||||
|
mock_cache = mocker.MagicMock()
|
||||||
|
mock_cache.type = "redis"
|
||||||
|
mock_cache.ping = mock_ping
|
||||||
|
|
||||||
|
mocker.patch.object(litellm, "cache", mock_cache)
|
||||||
|
return mock_cache
|
||||||
|
|
||||||
|
|
||||||
|
def test_cache_ping_success(mock_redis_success):
|
||||||
|
"""Test successful cache ping with regular response"""
|
||||||
|
response = client.get("/cache/ping", headers={"Authorization": "Bearer sk-1234"})
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
assert data["status"] == "healthy"
|
||||||
|
assert data["cache_type"] == "redis"
|
||||||
|
assert data["ping_response"] is True
|
||||||
|
assert data["set_cache_response"] == "success"
|
||||||
|
|
||||||
|
|
||||||
|
def test_cache_ping_with_complex_objects(mock_redis_success, mocker):
|
||||||
|
"""Test cache ping with non-standard serializable objects"""
|
||||||
|
|
||||||
|
# Mock complex objects in the cache parameters
|
||||||
|
class ComplexObject:
|
||||||
|
def __str__(self):
|
||||||
|
return "complex_object"
|
||||||
|
|
||||||
|
mock_redis_success.cache.complex_attr = ComplexObject()
|
||||||
|
mock_redis_success.cache.datetime_attr = mocker.MagicMock()
|
||||||
|
|
||||||
|
response = client.get("/cache/ping", headers={"Authorization": "Bearer sk-1234"})
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Verify response is JSON serializable
|
||||||
|
data = response.json()
|
||||||
|
print("data=", json.dumps(data, indent=4))
|
||||||
|
assert data["status"] == "healthy"
|
||||||
|
assert "litellm_cache_params" in data
|
||||||
|
|
||||||
|
# Verify complex objects were converted to strings
|
||||||
|
cache_params = json.loads(data["litellm_cache_params"])
|
||||||
|
assert isinstance(cache_params, dict)
|
||||||
|
|
||||||
|
|
||||||
|
def test_cache_ping_with_circular_reference(mock_redis_success):
|
||||||
|
"""Test cache ping with circular reference in cache parameters"""
|
||||||
|
# Create circular reference
|
||||||
|
circular_dict = {}
|
||||||
|
circular_dict["self"] = circular_dict
|
||||||
|
mock_redis_success.cache.circular_ref = circular_dict
|
||||||
|
|
||||||
|
response = client.get("/cache/ping", headers={"Authorization": "Bearer sk-1234"})
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Verify response is still JSON serializable
|
||||||
|
data = response.json()
|
||||||
|
assert data["status"] == "healthy"
|
||||||
|
|
||||||
|
|
||||||
|
def test_cache_ping_failure(mock_redis_failure):
|
||||||
|
"""Test cache ping failure with expected error fields"""
|
||||||
|
response = client.get("/cache/ping", headers={"Authorization": "Bearer sk-1234"})
|
||||||
|
assert response.status_code == 503
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
print("data=", json.dumps(data, indent=4, default=str))
|
||||||
|
|
||||||
|
assert "error" in data
|
||||||
|
error = data["error"]
|
||||||
|
|
||||||
|
# Verify error contains all expected fields
|
||||||
|
assert "message" in error
|
||||||
|
error_details = json.loads(error["message"])
|
||||||
|
assert "message" in error_details
|
||||||
|
assert "litellm_cache_params" in error_details
|
||||||
|
assert "redis_cache_params" in error_details
|
||||||
|
assert "traceback" in error_details
|
||||||
|
|
||||||
|
# Verify specific error message
|
||||||
|
assert "invalid username-password pair" in error_details["message"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_cache_ping_no_cache_initialized():
|
||||||
|
"""Test cache ping when no cache is initialized"""
|
||||||
|
# Set cache to None
|
||||||
|
original_cache = litellm.cache
|
||||||
|
litellm.cache = None
|
||||||
|
|
||||||
|
response = client.get("/cache/ping", headers={"Authorization": "Bearer sk-1234"})
|
||||||
|
assert response.status_code == 503
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
print("response data=", json.dumps(data, indent=4))
|
||||||
|
assert "error" in data
|
||||||
|
error = data["error"]
|
||||||
|
|
||||||
|
# Verify error contains all expected fields
|
||||||
|
assert "message" in error
|
||||||
|
error_details = json.loads(error["message"])
|
||||||
|
assert "Cache not initialized. litellm.cache is None" in error_details["message"]
|
||||||
|
|
||||||
|
# Restore original cache
|
||||||
|
litellm.cache = original_cache
|
24
tests/litellm/readme.md
Normal file
24
tests/litellm/readme.md
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
# Testing for `litellm/`
|
||||||
|
|
||||||
|
This directory 1:1 maps the the `litellm/` directory, and can only contain mocked tests.
|
||||||
|
|
||||||
|
The point of this is to:
|
||||||
|
1. Increase test coverage of `litellm/`
|
||||||
|
2. Make it easy for contributors to add tests for the `litellm/` package and easily run tests without needing LLM API keys.
|
||||||
|
|
||||||
|
|
||||||
|
## File name conventions
|
||||||
|
|
||||||
|
- `litellm/proxy/test_caching_routes.py` maps to `litellm/proxy/caching_routes.py`
|
||||||
|
- `test_<filename>.py` maps to `litellm/<filename>.py`
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -29,6 +29,8 @@ import {
|
||||||
} from "antd";
|
} from "antd";
|
||||||
import {
|
import {
|
||||||
RefreshIcon,
|
RefreshIcon,
|
||||||
|
CheckCircleIcon,
|
||||||
|
XCircleIcon,
|
||||||
} from "@heroicons/react/outline";
|
} from "@heroicons/react/outline";
|
||||||
import {
|
import {
|
||||||
adminGlobalCacheActivity,
|
adminGlobalCacheActivity,
|
||||||
|
@ -36,6 +38,9 @@ import {
|
||||||
healthCheckCall,
|
healthCheckCall,
|
||||||
} from "./networking";
|
} from "./networking";
|
||||||
|
|
||||||
|
// Import the new component
|
||||||
|
import { CacheHealthTab } from "./cache_health";
|
||||||
|
|
||||||
const formatDateWithoutTZ = (date: Date | undefined) => {
|
const formatDateWithoutTZ = (date: Date | undefined) => {
|
||||||
if (!date) return undefined;
|
if (!date) return undefined;
|
||||||
return date.toISOString().split('T')[0];
|
return date.toISOString().split('T')[0];
|
||||||
|
@ -82,7 +87,32 @@ interface uiData {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface CacheHealthResponse {
|
||||||
|
status?: string;
|
||||||
|
cache_type?: string;
|
||||||
|
ping_response?: boolean;
|
||||||
|
set_cache_response?: string;
|
||||||
|
litellm_cache_params?: string;
|
||||||
|
error?: {
|
||||||
|
message: string;
|
||||||
|
type: string;
|
||||||
|
param: string;
|
||||||
|
code: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to deep-parse a JSON string if possible
|
||||||
|
const deepParse = (input: any) => {
|
||||||
|
let parsed = input;
|
||||||
|
if (typeof parsed === "string") {
|
||||||
|
try {
|
||||||
|
parsed = JSON.parse(parsed);
|
||||||
|
} catch {
|
||||||
|
return parsed;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return parsed;
|
||||||
|
};
|
||||||
|
|
||||||
const CacheDashboard: React.FC<CachePageProps> = ({
|
const CacheDashboard: React.FC<CachePageProps> = ({
|
||||||
accessToken,
|
accessToken,
|
||||||
|
@ -105,7 +135,7 @@ const CacheDashboard: React.FC<CachePageProps> = ({
|
||||||
});
|
});
|
||||||
|
|
||||||
const [lastRefreshed, setLastRefreshed] = useState("");
|
const [lastRefreshed, setLastRefreshed] = useState("");
|
||||||
const [healthCheckResponse, setHealthCheckResponse] = useState<string>("");
|
const [healthCheckResponse, setHealthCheckResponse] = useState<any>("");
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!accessToken || !dateValue) {
|
if (!accessToken || !dateValue) {
|
||||||
|
@ -245,9 +275,25 @@ const runCachingHealthCheck = async () => {
|
||||||
const response = await cachingHealthCheckCall(accessToken !== null ? accessToken : "");
|
const response = await cachingHealthCheckCall(accessToken !== null ? accessToken : "");
|
||||||
console.log("CACHING HEALTH CHECK RESPONSE", response);
|
console.log("CACHING HEALTH CHECK RESPONSE", response);
|
||||||
setHealthCheckResponse(response);
|
setHealthCheckResponse(response);
|
||||||
} catch (error) {
|
} catch (error: any) {
|
||||||
console.error("Error running health check:", error);
|
console.error("Error running health check:", error);
|
||||||
setHealthCheckResponse("Error running health check");
|
let errorData;
|
||||||
|
if (error && error.message) {
|
||||||
|
try {
|
||||||
|
// Parse the error message which may contain a nested error layer.
|
||||||
|
let parsedData = JSON.parse(error.message);
|
||||||
|
// If the parsed object is wrapped (e.g. { error: { ... } }), unwrap it.
|
||||||
|
if (parsedData.error) {
|
||||||
|
parsedData = parsedData.error;
|
||||||
|
}
|
||||||
|
errorData = parsedData;
|
||||||
|
} catch (e) {
|
||||||
|
errorData = { message: error.message };
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
errorData = { message: "Unknown error occurred" };
|
||||||
|
}
|
||||||
|
setHealthCheckResponse({ error: errorData });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -381,23 +427,11 @@ const runCachingHealthCheck = async () => {
|
||||||
</Card>
|
</Card>
|
||||||
</TabPanel>
|
</TabPanel>
|
||||||
<TabPanel>
|
<TabPanel>
|
||||||
<Card className="mt-4">
|
<CacheHealthTab
|
||||||
<Text>
|
accessToken={accessToken}
|
||||||
Cache health will run a very small request through API /cache/ping
|
healthCheckResponse={healthCheckResponse}
|
||||||
configured on litellm
|
runCachingHealthCheck={runCachingHealthCheck}
|
||||||
</Text>
|
/>
|
||||||
|
|
||||||
<Button onClick={runCachingHealthCheck} className="mt-4">Run cache health</Button>
|
|
||||||
{healthCheckResponse && (
|
|
||||||
<pre className="mt-4" style={{
|
|
||||||
whiteSpace: 'pre-wrap',
|
|
||||||
wordWrap: 'break-word',
|
|
||||||
maxWidth: '100%'
|
|
||||||
}}>
|
|
||||||
{JSON.stringify(healthCheckResponse, null, 2)}
|
|
||||||
</pre>
|
|
||||||
)}
|
|
||||||
</Card>
|
|
||||||
</TabPanel>
|
</TabPanel>
|
||||||
</TabPanels>
|
</TabPanels>
|
||||||
</TabGroup>
|
</TabGroup>
|
||||||
|
|
311
ui/litellm-dashboard/src/components/cache_health.tsx
Normal file
311
ui/litellm-dashboard/src/components/cache_health.tsx
Normal file
|
@ -0,0 +1,311 @@
|
||||||
|
import React from "react";
|
||||||
|
import { Card, Text, Button, TabGroup, TabList, Tab, TabPanel, TabPanels } from "@tremor/react";
|
||||||
|
import { CheckCircleIcon, XCircleIcon, ClipboardCopyIcon } from "@heroicons/react/outline";
|
||||||
|
import { ResponseTimeIndicator } from "./response_time_indicator";
|
||||||
|
|
||||||
|
// Helper function to deep-parse a JSON string if possible
|
||||||
|
const deepParse = (input: any) => {
|
||||||
|
let parsed = input;
|
||||||
|
if (typeof parsed === "string") {
|
||||||
|
try {
|
||||||
|
parsed = JSON.parse(parsed);
|
||||||
|
} catch {
|
||||||
|
return parsed;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return parsed;
|
||||||
|
};
|
||||||
|
|
||||||
|
// TableClickableErrorField component with copy-to-clipboard functionality
|
||||||
|
const TableClickableErrorField: React.FC<{ label: string; value: string | null | undefined }> = ({
|
||||||
|
label,
|
||||||
|
value,
|
||||||
|
}) => {
|
||||||
|
const [isExpanded, setIsExpanded] = React.useState(false);
|
||||||
|
const [copied, setCopied] = React.useState(false);
|
||||||
|
const safeValue = value?.toString() || "N/A";
|
||||||
|
const truncated = safeValue.length > 50 ? safeValue.substring(0, 50) + "..." : safeValue;
|
||||||
|
|
||||||
|
const handleCopy = () => {
|
||||||
|
navigator.clipboard.writeText(safeValue);
|
||||||
|
setCopied(true);
|
||||||
|
setTimeout(() => setCopied(false), 2000);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<tr className="hover:bg-gray-50">
|
||||||
|
<td className="px-4 py-2 align-top" colSpan={2}>
|
||||||
|
<div className="flex items-center justify-between group">
|
||||||
|
<div className="flex items-center flex-1">
|
||||||
|
<button
|
||||||
|
onClick={() => setIsExpanded(!isExpanded)}
|
||||||
|
className="text-gray-400 hover:text-gray-600 mr-2"
|
||||||
|
>
|
||||||
|
{isExpanded ? "▼" : "▶"}
|
||||||
|
</button>
|
||||||
|
<div>
|
||||||
|
<div className="text-sm text-gray-600">{label}</div>
|
||||||
|
<pre className="mt-1 text-sm font-mono text-gray-800 whitespace-pre-wrap">
|
||||||
|
{isExpanded ? safeValue : truncated}
|
||||||
|
</pre>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<button
|
||||||
|
onClick={handleCopy}
|
||||||
|
className="opacity-0 group-hover:opacity-100 text-gray-400 hover:text-gray-600"
|
||||||
|
>
|
||||||
|
<ClipboardCopyIcon className="h-4 w-4" />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add new interface for Redis details
|
||||||
|
interface RedisDetails {
|
||||||
|
redis_host?: string;
|
||||||
|
redis_port?: string;
|
||||||
|
redis_version?: string;
|
||||||
|
startup_nodes?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add new interface for Error Details
|
||||||
|
interface ErrorDetails {
|
||||||
|
message: string;
|
||||||
|
traceback: string;
|
||||||
|
litellm_params?: any;
|
||||||
|
redis_cache_params?: any;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update HealthCheckDetails component to handle errors
|
||||||
|
const HealthCheckDetails: React.FC<{ response: any }> = ({ response }) => {
|
||||||
|
// Initialize with safe default values
|
||||||
|
let errorDetails: ErrorDetails | null = null;
|
||||||
|
let parsedLitellmParams: any = {};
|
||||||
|
let parsedRedisParams: any = {};
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (response?.error) {
|
||||||
|
try {
|
||||||
|
const errorMessage = typeof response.error.message === 'string'
|
||||||
|
? JSON.parse(response.error.message)
|
||||||
|
: response.error.message;
|
||||||
|
|
||||||
|
errorDetails = {
|
||||||
|
message: errorMessage?.message || 'Unknown error',
|
||||||
|
traceback: errorMessage?.traceback || 'No traceback available',
|
||||||
|
litellm_params: errorMessage?.litellm_cache_params || {},
|
||||||
|
redis_cache_params: errorMessage?.redis_cache_params || {}
|
||||||
|
};
|
||||||
|
|
||||||
|
parsedLitellmParams = deepParse(errorDetails.litellm_params) || {};
|
||||||
|
parsedRedisParams = deepParse(errorDetails.redis_cache_params) || {};
|
||||||
|
} catch (e) {
|
||||||
|
console.warn("Error parsing error details:", e);
|
||||||
|
errorDetails = {
|
||||||
|
message: String(response.error.message || 'Unknown error'),
|
||||||
|
traceback: 'Error parsing details',
|
||||||
|
litellm_params: {},
|
||||||
|
redis_cache_params: {}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
parsedLitellmParams = deepParse(response?.litellm_cache_params) || {};
|
||||||
|
parsedRedisParams = deepParse(response?.redis_cache_params) || {};
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.warn("Error in response parsing:", e);
|
||||||
|
// Provide safe fallback values
|
||||||
|
parsedLitellmParams = {};
|
||||||
|
parsedRedisParams = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Safely extract Redis details with fallbacks
|
||||||
|
const redisDetails: RedisDetails = {
|
||||||
|
redis_host: parsedRedisParams?.redis_client?.connection_pool?.connection_kwargs?.host ||
|
||||||
|
parsedRedisParams?.redis_async_client?.connection_pool?.connection_kwargs?.host ||
|
||||||
|
parsedRedisParams?.connection_kwargs?.host ||
|
||||||
|
"N/A",
|
||||||
|
|
||||||
|
redis_port: parsedRedisParams?.redis_client?.connection_pool?.connection_kwargs?.port ||
|
||||||
|
parsedRedisParams?.redis_async_client?.connection_pool?.connection_kwargs?.port ||
|
||||||
|
parsedRedisParams?.connection_kwargs?.port ||
|
||||||
|
"N/A",
|
||||||
|
|
||||||
|
redis_version: parsedRedisParams?.redis_version || "N/A",
|
||||||
|
|
||||||
|
startup_nodes: (() => {
|
||||||
|
try {
|
||||||
|
if (parsedRedisParams?.redis_kwargs?.startup_nodes) {
|
||||||
|
return JSON.stringify(parsedRedisParams.redis_kwargs.startup_nodes);
|
||||||
|
}
|
||||||
|
const host = parsedRedisParams?.redis_client?.connection_pool?.connection_kwargs?.host ||
|
||||||
|
parsedRedisParams?.redis_async_client?.connection_pool?.connection_kwargs?.host;
|
||||||
|
const port = parsedRedisParams?.redis_client?.connection_pool?.connection_kwargs?.port ||
|
||||||
|
parsedRedisParams?.redis_async_client?.connection_pool?.connection_kwargs?.port;
|
||||||
|
return host && port ? JSON.stringify([{ host, port }]) : "N/A";
|
||||||
|
} catch (e) {
|
||||||
|
return "N/A";
|
||||||
|
}
|
||||||
|
})()
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="bg-white rounded-lg shadow">
|
||||||
|
<TabGroup>
|
||||||
|
<TabList className="border-b border-gray-200 px-4">
|
||||||
|
<Tab className="px-4 py-2 text-sm font-medium text-gray-600 hover:text-gray-800">Summary</Tab>
|
||||||
|
<Tab className="px-4 py-2 text-sm font-medium text-gray-600 hover:text-gray-800">Raw Response</Tab>
|
||||||
|
</TabList>
|
||||||
|
|
||||||
|
<TabPanels>
|
||||||
|
<TabPanel className="p-4">
|
||||||
|
<div>
|
||||||
|
<div className="flex items-center mb-6">
|
||||||
|
{(response?.status === "healthy") ? (
|
||||||
|
<CheckCircleIcon className="h-5 w-5 text-green-500 mr-2" />
|
||||||
|
) : (
|
||||||
|
<XCircleIcon className="h-5 w-5 text-red-500 mr-2" />
|
||||||
|
)}
|
||||||
|
<Text className={`text-sm font-medium ${response?.status === "healthy" ? "text-green-500" : "text-red-500"}`}>
|
||||||
|
Cache Status: {response?.status || "unhealthy"}
|
||||||
|
</Text>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<table className="w-full border-collapse">
|
||||||
|
<tbody>
|
||||||
|
{/* Show error message if present */}
|
||||||
|
{errorDetails && (
|
||||||
|
<>
|
||||||
|
<tr><td colSpan={2} className="pt-4 pb-2 font-semibold text-red-600">Error Details</td></tr>
|
||||||
|
<TableClickableErrorField
|
||||||
|
label="Error Message"
|
||||||
|
value={errorDetails.message}
|
||||||
|
/>
|
||||||
|
<TableClickableErrorField
|
||||||
|
label="Traceback"
|
||||||
|
value={errorDetails.traceback}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Always show cache details, regardless of error state */}
|
||||||
|
<tr><td colSpan={2} className="pt-4 pb-2 font-semibold">Cache Details</td></tr>
|
||||||
|
<TableClickableErrorField
|
||||||
|
label="Cache Configuration"
|
||||||
|
value={String(parsedLitellmParams?.type)}
|
||||||
|
/>
|
||||||
|
<TableClickableErrorField
|
||||||
|
label="Ping Response"
|
||||||
|
value={String(response.ping_response)}
|
||||||
|
/>
|
||||||
|
<TableClickableErrorField
|
||||||
|
label="Set Cache Response"
|
||||||
|
value={response.set_cache_response || "N/A"}
|
||||||
|
/>
|
||||||
|
<TableClickableErrorField
|
||||||
|
label="litellm_settings.cache_params"
|
||||||
|
value={JSON.stringify(parsedLitellmParams, null, 2)}
|
||||||
|
/>
|
||||||
|
|
||||||
|
{/* Redis Details Section */}
|
||||||
|
{parsedLitellmParams?.type === "redis" && (
|
||||||
|
<>
|
||||||
|
<tr><td colSpan={2} className="pt-4 pb-2 font-semibold">Redis Details</td></tr>
|
||||||
|
<TableClickableErrorField
|
||||||
|
label="Redis Host"
|
||||||
|
value={redisDetails.redis_host || "N/A"}
|
||||||
|
/>
|
||||||
|
<TableClickableErrorField
|
||||||
|
label="Redis Port"
|
||||||
|
value={redisDetails.redis_port || "N/A"}
|
||||||
|
/>
|
||||||
|
<TableClickableErrorField
|
||||||
|
label="Redis Version"
|
||||||
|
value={redisDetails.redis_version || "N/A"}
|
||||||
|
/>
|
||||||
|
<TableClickableErrorField
|
||||||
|
label="Startup Nodes"
|
||||||
|
value={redisDetails.startup_nodes || "N/A"}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</TabPanel>
|
||||||
|
|
||||||
|
<TabPanel className="p-4">
|
||||||
|
<div className="bg-gray-50 rounded-md p-4 font-mono text-sm">
|
||||||
|
<pre className="whitespace-pre-wrap break-words overflow-auto max-h-[500px]">
|
||||||
|
{(() => {
|
||||||
|
try {
|
||||||
|
const data = {
|
||||||
|
...response,
|
||||||
|
litellm_cache_params: parsedLitellmParams,
|
||||||
|
redis_cache_params: parsedRedisParams
|
||||||
|
};
|
||||||
|
// First parse any string JSON values
|
||||||
|
const prettyData = JSON.parse(JSON.stringify(data, (key, value) => {
|
||||||
|
if (typeof value === 'string') {
|
||||||
|
try {
|
||||||
|
return JSON.parse(value);
|
||||||
|
} catch {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}));
|
||||||
|
// Then stringify with proper formatting
|
||||||
|
return JSON.stringify(prettyData, null, 2);
|
||||||
|
} catch (e) {
|
||||||
|
return "Error formatting JSON: " + (e as Error).message;
|
||||||
|
}
|
||||||
|
})()}
|
||||||
|
</pre>
|
||||||
|
</div>
|
||||||
|
</TabPanel>
|
||||||
|
</TabPanels>
|
||||||
|
</TabGroup>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const CacheHealthTab: React.FC<{
|
||||||
|
accessToken: string | null;
|
||||||
|
healthCheckResponse: any;
|
||||||
|
runCachingHealthCheck: () => void;
|
||||||
|
responseTimeMs?: number | null;
|
||||||
|
}> = ({ accessToken, healthCheckResponse, runCachingHealthCheck, responseTimeMs }) => {
|
||||||
|
const [localResponseTimeMs, setLocalResponseTimeMs] = React.useState<number | null>(null);
|
||||||
|
const [isLoading, setIsLoading] = React.useState<boolean>(false);
|
||||||
|
|
||||||
|
const handleHealthCheck = async () => {
|
||||||
|
setIsLoading(true);
|
||||||
|
const startTime = performance.now();
|
||||||
|
await runCachingHealthCheck();
|
||||||
|
const endTime = performance.now();
|
||||||
|
setLocalResponseTimeMs(endTime - startTime);
|
||||||
|
setIsLoading(false);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-4">
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<Button
|
||||||
|
onClick={handleHealthCheck}
|
||||||
|
disabled={isLoading}
|
||||||
|
className="bg-indigo-600 hover:bg-indigo-700 disabled:bg-indigo-400 text-white text-sm px-4 py-2 rounded-md"
|
||||||
|
>
|
||||||
|
{isLoading ? "Running Health Check..." : "Run Health Check"}
|
||||||
|
</Button>
|
||||||
|
<ResponseTimeIndicator responseTimeMs={localResponseTimeMs} />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{healthCheckResponse && (
|
||||||
|
<HealthCheckDetails response={healthCheckResponse} />
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
|
@ -3391,7 +3391,7 @@ export const cachingHealthCheckCall = async (accessToken: String) => {
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
const errorData = await response.text();
|
const errorData = await response.text();
|
||||||
handleError(errorData);
|
handleError(errorData);
|
||||||
throw new Error("Network response was not ok");
|
throw new Error(errorData);
|
||||||
}
|
}
|
||||||
|
|
||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
|
|
|
@ -0,0 +1,29 @@
|
||||||
|
import React from 'react';
|
||||||
|
|
||||||
|
interface ResponseTimeIndicatorProps {
|
||||||
|
responseTimeMs: number | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const ResponseTimeIndicator: React.FC<ResponseTimeIndicatorProps> = ({ responseTimeMs }) => {
|
||||||
|
if (responseTimeMs === null || responseTimeMs === undefined) return null;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex items-center space-x-1 text-xs text-gray-500 font-mono">
|
||||||
|
<svg
|
||||||
|
className="w-4 h-4"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
fill="none"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
>
|
||||||
|
<path
|
||||||
|
d="M12 6V12L16 14M12 2C6.47715 2 2 6.47715 2 12C2 17.5228 6.47715 22 12 22C17.5228 22 22 17.5228 22 12C22 6.47715 17.5228 2 12 2Z"
|
||||||
|
stroke="currentColor"
|
||||||
|
strokeWidth="2"
|
||||||
|
strokeLinecap="round"
|
||||||
|
strokeLinejoin="round"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
<span>{responseTimeMs.toFixed(0)}ms</span>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
Loading…
Add table
Add a link
Reference in a new issue