mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
fix - backend sort by date + /global/activity/model
This commit is contained in:
parent
3bab323dcc
commit
aa50020bda
1 changed files with 165 additions and 1 deletions
|
@ -5887,7 +5887,6 @@ async def view_spend_tags(
|
||||||
"/global/activity",
|
"/global/activity",
|
||||||
tags=["Budget & Spend Tracking"],
|
tags=["Budget & Spend Tracking"],
|
||||||
dependencies=[Depends(user_api_key_auth)],
|
dependencies=[Depends(user_api_key_auth)],
|
||||||
include_in_schema=False,
|
|
||||||
responses={
|
responses={
|
||||||
200: {"model": List[LiteLLM_SpendLogs]},
|
200: {"model": List[LiteLLM_SpendLogs]},
|
||||||
},
|
},
|
||||||
|
@ -5969,6 +5968,9 @@ async def get_global_activity(
|
||||||
sum_api_requests += row.get("api_requests", 0)
|
sum_api_requests += row.get("api_requests", 0)
|
||||||
sum_total_tokens += row.get("total_tokens", 0)
|
sum_total_tokens += row.get("total_tokens", 0)
|
||||||
|
|
||||||
|
# sort daily_data by date
|
||||||
|
daily_data = sorted(daily_data, key=lambda x: x["date"])
|
||||||
|
|
||||||
data_to_return = {
|
data_to_return = {
|
||||||
"daily_data": daily_data,
|
"daily_data": daily_data,
|
||||||
"sum_api_requests": sum_api_requests,
|
"sum_api_requests": sum_api_requests,
|
||||||
|
@ -5984,6 +5986,168 @@ async def get_global_activity(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/global/activity/model",
|
||||||
|
tags=["Budget & Spend Tracking"],
|
||||||
|
dependencies=[Depends(user_api_key_auth)],
|
||||||
|
responses={
|
||||||
|
200: {"model": List[LiteLLM_SpendLogs]},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
async def get_global_activity_model(
|
||||||
|
start_date: Optional[str] = fastapi.Query(
|
||||||
|
default=None,
|
||||||
|
description="Time from which to start viewing spend",
|
||||||
|
),
|
||||||
|
end_date: Optional[str] = fastapi.Query(
|
||||||
|
default=None,
|
||||||
|
description="Time till which to view spend",
|
||||||
|
),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Get number of API Requests, total tokens through proxy - Grouped by MODEL
|
||||||
|
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"model": "gpt-4",
|
||||||
|
"daily_data": [
|
||||||
|
const chartdata = [
|
||||||
|
{
|
||||||
|
date: 'Jan 22',
|
||||||
|
api_requests: 10,
|
||||||
|
total_tokens: 2000
|
||||||
|
},
|
||||||
|
{
|
||||||
|
date: 'Jan 23',
|
||||||
|
api_requests: 10,
|
||||||
|
total_tokens: 12
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"sum_api_requests": 20,
|
||||||
|
"sum_total_tokens": 2012
|
||||||
|
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"model": "azure/gpt-4-turbo",
|
||||||
|
"daily_data": [
|
||||||
|
const chartdata = [
|
||||||
|
{
|
||||||
|
date: 'Jan 22',
|
||||||
|
api_requests: 10,
|
||||||
|
total_tokens: 2000
|
||||||
|
},
|
||||||
|
{
|
||||||
|
date: 'Jan 23',
|
||||||
|
api_requests: 10,
|
||||||
|
total_tokens: 12
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"sum_api_requests": 20,
|
||||||
|
"sum_total_tokens": 2012
|
||||||
|
|
||||||
|
},
|
||||||
|
]
|
||||||
|
"""
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
if start_date is None or end_date is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail={"error": "Please provide start_date and end_date"},
|
||||||
|
)
|
||||||
|
|
||||||
|
start_date_obj = datetime.strptime(start_date, "%Y-%m-%d")
|
||||||
|
end_date_obj = datetime.strptime(end_date, "%Y-%m-%d")
|
||||||
|
|
||||||
|
"""
|
||||||
|
litellm/proxy/proxy_server.py:219: note: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs [annotation-unchecked]
|
||||||
|
litellm/proxy/proxy_server.py:6087: error: Need type annotation for "spend_by_model" (hint: "spend_by_model: Dict[<type>, <type>] = ...") [var-annotated]
|
||||||
|
litellm/proxy/proxy_server.py:6105: error: "object" has no attribute "append" [attr-defined]
|
||||||
|
litellm/proxy/proxy_server.py:6113: error: Argument "key" to "sorted" has incompatible type "Callable[[tuple[Any, dict[str, object]]], object]"; expected "Callable[[tuple[Any, dict[str, object]]], SupportsDunderLT[Any] | SupportsDunderGT[Any]]" [arg-type]
|
||||||
|
litellm/proxy/proxy_server.py:6113: error: Incompatible return value type (got "object", expected "SupportsDunderLT[Any] | SupportsDunderGT[Any]") [return-value]
|
||||||
|
litellm/proxy/proxy_server.py:6121: error: No overload variant of "sorted" matches argument types "object", "Callable[[Any], Any]" [call-overload]
|
||||||
|
litellm/proxy/proxy_server.py:6121: note: Possible overload variants:
|
||||||
|
litellm/proxy/proxy_server.py:6121: note: def [SupportsRichComparisonT] sorted(Iterable[SupportsRichComparisonT], /, *, key: None = ..., reverse: bool = ...) -> list[SupportsRichComparisonT]
|
||||||
|
litellm/proxy/proxy_server.py:6121: note: def [_T] sorted(Iterable[_T], /, *, key: Callable[[_T], SupportsDunderLT[Any] | SupportsDunderGT[Any]], reverse: bool = ...) -> list[_T]
|
||||||
|
litellm/proxy/proxy_server.py:6121: error: Value of type Never is not indexable [index]
|
||||||
|
Found 6 errors in 1 file (checked 1 source file)
|
||||||
|
"""
|
||||||
|
|
||||||
|
global prisma_client, llm_router, premium_user
|
||||||
|
try:
|
||||||
|
if prisma_client is None:
|
||||||
|
raise Exception(
|
||||||
|
f"Database not connected. Connect a database to your proxy - https://docs.litellm.ai/docs/simple_proxy#managing-auth---virtual-keys"
|
||||||
|
)
|
||||||
|
|
||||||
|
sql_query = """
|
||||||
|
SELECT
|
||||||
|
model,
|
||||||
|
date_trunc('day', "startTime") AS date,
|
||||||
|
COUNT(*) AS api_requests,
|
||||||
|
SUM(total_tokens) AS total_tokens
|
||||||
|
FROM "LiteLLM_SpendLogs"
|
||||||
|
WHERE "startTime" BETWEEN $1::date AND $2::date + interval '1 day'
|
||||||
|
GROUP BY model, date_trunc('day', "startTime")
|
||||||
|
|
||||||
|
"""
|
||||||
|
db_response = await prisma_client.db.query_raw(
|
||||||
|
sql_query, start_date_obj, end_date_obj
|
||||||
|
)
|
||||||
|
if db_response is None:
|
||||||
|
return []
|
||||||
|
|
||||||
|
model_ui_data: dict = (
|
||||||
|
{}
|
||||||
|
) # {"gpt-4": {"daily_data": [], "sum_api_requests": 0, "sum_total_tokens": 0}}
|
||||||
|
|
||||||
|
for row in db_response:
|
||||||
|
_model = row["model"]
|
||||||
|
if _model not in model_ui_data:
|
||||||
|
model_ui_data[_model] = {
|
||||||
|
"daily_data": [],
|
||||||
|
"sum_api_requests": 0,
|
||||||
|
"sum_total_tokens": 0,
|
||||||
|
}
|
||||||
|
_date_obj = datetime.fromisoformat(row["date"])
|
||||||
|
row["date"] = _date_obj.strftime("%b %d")
|
||||||
|
|
||||||
|
model_ui_data[_model]["daily_data"].append(row)
|
||||||
|
model_ui_data[_model]["sum_api_requests"] += row.get("api_requests", 0)
|
||||||
|
model_ui_data[_model]["sum_total_tokens"] += row.get("total_tokens", 0)
|
||||||
|
|
||||||
|
# sort mode ui data by sum_api_requests -> get top 10 models
|
||||||
|
model_ui_data = dict(
|
||||||
|
sorted(
|
||||||
|
model_ui_data.items(),
|
||||||
|
key=lambda x: x[1]["sum_api_requests"],
|
||||||
|
reverse=True,
|
||||||
|
)[:10]
|
||||||
|
)
|
||||||
|
|
||||||
|
response = []
|
||||||
|
|
||||||
|
for model, data in model_ui_data.items():
|
||||||
|
_sort_daily_data = sorted(data["daily_data"], key=lambda x: x["date"])
|
||||||
|
|
||||||
|
response.append(
|
||||||
|
{
|
||||||
|
"model": model,
|
||||||
|
"daily_data": _sort_daily_data,
|
||||||
|
"sum_api_requests": data["sum_api_requests"],
|
||||||
|
"sum_total_tokens": data["sum_total_tokens"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail={"error": str(e)},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
"/global/spend/provider",
|
"/global/spend/provider",
|
||||||
tags=["Budget & Spend Tracking"],
|
tags=["Budget & Spend Tracking"],
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue