diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index 22717b925..36548dbed 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -7549,7 +7549,7 @@ async def model_metrics( "LiteLLM_SpendLogs" WHERE "startTime" >= NOW() - INTERVAL '30 days' - AND "model" = $1 + AND "model" = $1 AND "cache_hit" != 'True' GROUP BY api_base, model, @@ -7598,7 +7598,7 @@ async def model_metrics( for day in _daily_entries: entry = {"date": str(day)} for model_key, latency in _daily_entries[day].items(): - entry[model_key] = round(latency, 8) + entry[model_key] = latency response.append(entry) return { diff --git a/ui/litellm-dashboard/src/components/model_dashboard.tsx b/ui/litellm-dashboard/src/components/model_dashboard.tsx index 12e650ac7..c9fcdf228 100644 --- a/ui/litellm-dashboard/src/components/model_dashboard.tsx +++ b/ui/litellm-dashboard/src/components/model_dashboard.tsx @@ -720,17 +720,20 @@ const handleEditSubmit = async (formValues: Record) => { return (
{date &&

Date: {date}

} - {sortedPayload.map((category: any, idx: number) => ( -
-
-
-

{category.dataKey}

+ {sortedPayload.map((category: any, idx: number) => { + const roundedValue = parseFloat(category.value.toFixed(5)); + const displayValue = + roundedValue === 0 && category.value > 0 ? "<0.00001" : roundedValue.toFixed(5); + return ( +
+
+
+

{category.dataKey}

+
+

{displayValue}

-

- {category.value.toFixed(5)} -

-
- ))} + ); + })}
); };