diff --git a/litellm/model_prices_and_context_window_backup.json b/litellm/model_prices_and_context_window_backup.json index 4870025cb..66061acc4 100644 --- a/litellm/model_prices_and_context_window_backup.json +++ b/litellm/model_prices_and_context_window_backup.json @@ -2070,6 +2070,36 @@ "output_cost_per_token": 0.00000028, "litellm_provider": "perplexity", "mode": "chat" + }, + "perplexity/sonar-small-chat": { + "max_tokens": 16384, + "input_cost_per_token": 0.00000007, + "output_cost_per_token": 0.00000028, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/sonar-small-online": { + "max_tokens": 12000, + "input_cost_per_token": 0, + "output_cost_per_token": 0.00000028, + "input_cost_per_request": 0.005, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/sonar-medium-chat": { + "max_tokens": 16384, + "input_cost_per_token": 0.0000006, + "output_cost_per_token": 0.0000018, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/sonar-medium-online": { + "max_tokens": 12000, + "input_cost_per_token": 0, + "output_cost_per_token": 0.0000018, + "input_cost_per_request": 0.005, + "litellm_provider": "perplexity", + "mode": "chat" }, "anyscale/mistralai/Mistral-7B-Instruct-v0.1": { "max_tokens": 16384, diff --git a/litellm/proxy/_types.py b/litellm/proxy/_types.py index 6612a462e..ca9dea969 100644 --- a/litellm/proxy/_types.py +++ b/litellm/proxy/_types.py @@ -260,6 +260,10 @@ class NewTeamRequest(LiteLLMBase): models: list = [] +class GlobalEndUsersSpend(LiteLLMBase): + api_key: Optional[str] = None + + class TeamMemberAddRequest(LiteLLMBase): team_id: str member: Optional[Member] = None diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index 20fbb20ff..17e616e2b 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -4145,7 +4145,7 @@ async def global_spend_keys( tags=["Budget & Spend Tracking"], dependencies=[Depends(user_api_key_auth)], ) -async def global_spend_end_users(): +async def global_spend_end_users(data: GlobalEndUsersSpend): """ [BETA] This is a beta endpoint. It will change. @@ -4155,9 +4155,26 @@ async def global_spend_end_users(): if prisma_client is None: raise HTTPException(status_code=500, detail={"error": "No db connected"}) - sql_query = f"""SELECT * FROM "Last30dTopEndUsersSpend";""" - response = await prisma_client.db.query_raw(query=sql_query) + if data.api_key is None: + sql_query = f"""SELECT * FROM "Last30dTopEndUsersSpend";""" + + response = await prisma_client.db.query_raw(query=sql_query) + else: + """ + Gets the top 100 end-users for a given api key + """ + current_date = datetime.now() + past_date = current_date - timedelta(days=30) + response = await prisma_client.db.litellm_spendlogs.group_by( # type: ignore + by=["end_user"], + where={ + "AND": [{"startTime": {"gte": past_date}}, {"api_key": data.api_key}] + }, + sum={"spend": True}, + order={"_sum": {"spend": "desc"}}, # type: ignore + take=100, + ) return response diff --git a/ui/litellm-dashboard/src/components/view_users.tsx b/ui/litellm-dashboard/src/components/view_users.tsx index 00329e206..5b2b5f6de 100644 --- a/ui/litellm-dashboard/src/components/view_users.tsx +++ b/ui/litellm-dashboard/src/components/view_users.tsx @@ -10,17 +10,21 @@ import { TableCell, TableBody, Tab, + Text, TabGroup, TabList, TabPanels, Metric, Grid, TabPanel, + Select, + SelectItem, } from "@tremor/react"; import { userInfoCall, adminTopEndUsersCall } from "./networking"; import { Badge, BadgeDelta, Button } from "@tremor/react"; import RequestAccess from "./request_model_access"; import CreateUser from "./create_user_button"; +import Paragraph from "antd/es/skeleton/Paragraph"; interface ViewUserDashboardProps { accessToken: string | null; @@ -166,6 +170,17 @@ const ViewUserDashboard: React.FC = ({ +
+
+
+ Key + +
+