diff --git a/ui/litellm-dashboard/src/components/model_dashboard.tsx b/ui/litellm-dashboard/src/components/model_dashboard.tsx index 622baa966..62f1d142b 100644 --- a/ui/litellm-dashboard/src/components/model_dashboard.tsx +++ b/ui/litellm-dashboard/src/components/model_dashboard.tsx @@ -15,7 +15,7 @@ import { } from "@tremor/react"; import { TabPanel, TabPanels, TabGroup, TabList, Tab, TextInput, Icon } from "@tremor/react"; import { Select, SelectItem, MultiSelect, MultiSelectItem } from "@tremor/react"; -import { modelInfoCall, userGetRequesedtModelsCall, modelMetricsCall, modelCreateCall, Model, modelCostMap, modelDeleteCall } from "./networking"; +import { modelInfoCall, userGetRequesedtModelsCall, modelMetricsCall, modelCreateCall, Model, modelCostMap, modelDeleteCall, healthCheckCall } from "./networking"; import { BarChart } from "@tremor/react"; import { Button as Button2, @@ -81,6 +81,9 @@ const ModelDashboard: React.FC = ({ const providers: Providers[] = [Providers.OpenAI, Providers.Azure, Providers.Anthropic, Providers.Google_AI_Studio, Providers.Bedrock, Providers.OpenAI_Compatible] const [selectedProvider, setSelectedProvider] = useState("OpenAI"); + const [healthCheckResponse, setHealthCheckResponse] = useState(''); + + useEffect(() => { if (!accessToken || !token || !userRole || !userID) { @@ -246,6 +249,18 @@ const ModelDashboard: React.FC = ({ console.log(`providerModels: ${providerModels}`); } + const runHealthCheck = async () => { + try { + message.info('Running health check...'); + setHealthCheckResponse(''); + const response = await healthCheckCall(accessToken); + setHealthCheckResponse(response); + } catch (error) { + console.error('Error running health check:', error); + setHealthCheckResponse('Error running health check'); + } + }; + const handleSubmit = async (formValues: Record) => { try { /** @@ -354,6 +369,7 @@ const ModelDashboard: React.FC = ({ All Models Add Model +
/health Models
@@ -593,6 +609,17 @@ const ModelDashboard: React.FC = ({ + + + `/health` will run a very small request through your models configured on litellm + + + {healthCheckResponse && ( +
{JSON.stringify(healthCheckResponse, null, 2)}
+ )} + +
+
diff --git a/ui/litellm-dashboard/src/components/networking.tsx b/ui/litellm-dashboard/src/components/networking.tsx index b0e202ced..d6a929e31 100644 --- a/ui/litellm-dashboard/src/components/networking.tsx +++ b/ui/litellm-dashboard/src/components/networking.tsx @@ -1287,4 +1287,39 @@ export const setCallbacksCall = async ( } }; +export const healthCheckCall = async ( + accessToken: String, +) => { + /** + * Get all the models user has access to + */ + try { + let url = proxyBaseUrl ? `${proxyBaseUrl}/health` : `/health`; + + //message.info("Requesting model data"); + const response = await fetch(url, { + method: "GET", + headers: { + Authorization: `Bearer ${accessToken}`, + "Content-Type": "application/json", + }, + }); + + if (!response.ok) { + const errorData = await response.text(); + message.error(errorData); + throw new Error("Network response was not ok"); + } + + const data = await response.json(); + //message.info("Received model data"); + return data; + // Handle success - you might want to update some state or UI based on the created key + } catch (error) { + console.error("Failed to call /health:", error); + throw error; + } +}; + +