mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
feat(proxy_server.py): give request-level breakdown if ttft metric is selected for ju
st that day
This commit is contained in:
parent
7b565271e2
commit
4dd7b05406
1 changed files with 66 additions and 28 deletions
|
@ -493,7 +493,7 @@ async def user_api_key_auth(
|
||||||
|
|
||||||
if route in LiteLLMRoutes.public_routes.value:
|
if route in LiteLLMRoutes.public_routes.value:
|
||||||
# check if public endpoint
|
# check if public endpoint
|
||||||
return UserAPIKeyAuth()
|
return UserAPIKeyAuth(user_role="app_owner")
|
||||||
|
|
||||||
if general_settings.get("enable_jwt_auth", False) == True:
|
if general_settings.get("enable_jwt_auth", False) == True:
|
||||||
is_jwt = jwt_handler.is_jwt(token=api_key)
|
is_jwt = jwt_handler.is_jwt(token=api_key)
|
||||||
|
@ -1385,7 +1385,9 @@ async def user_api_key_auth(
|
||||||
api_key=api_key, user_role="app_owner", **valid_token_dict
|
api_key=api_key, user_role="app_owner", **valid_token_dict
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
return UserAPIKeyAuth(api_key=api_key, **valid_token_dict)
|
return UserAPIKeyAuth(
|
||||||
|
api_key=api_key, user_role="app_owner", **valid_token_dict
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
raise Exception()
|
raise Exception()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -9579,28 +9581,54 @@ async def model_streaming_metrics(
|
||||||
startTime = startTime or datetime.now() - timedelta(days=7) # show over past week
|
startTime = startTime or datetime.now() - timedelta(days=7) # show over past week
|
||||||
endTime = endTime or datetime.now()
|
endTime = endTime or datetime.now()
|
||||||
|
|
||||||
sql_query = """
|
is_same_day = startTime.date() == endTime.date()
|
||||||
SELECT
|
if is_same_day:
|
||||||
api_base,
|
sql_query = """
|
||||||
model_group,
|
SELECT
|
||||||
model,
|
api_base,
|
||||||
DATE_TRUNC('day', "startTime")::DATE AS day,
|
model_group,
|
||||||
AVG(EXTRACT(epoch FROM ("completionStartTime" - "startTime"))) AS time_to_first_token
|
model,
|
||||||
FROM
|
"startTime",
|
||||||
"LiteLLM_SpendLogs"
|
request_id,
|
||||||
WHERE
|
EXTRACT(epoch FROM ("completionStartTime" - "startTime")) AS time_to_first_token
|
||||||
"startTime" BETWEEN $2::timestamp AND $3::timestamp
|
FROM
|
||||||
AND "model_group" = $1 AND "cache_hit" != 'True'
|
"LiteLLM_SpendLogs"
|
||||||
AND "completionStartTime" IS NOT NULL
|
WHERE
|
||||||
AND "completionStartTime" != "endTime"
|
"model_group" = $1 AND "cache_hit" != 'True'
|
||||||
GROUP BY
|
AND "completionStartTime" IS NOT NULL
|
||||||
api_base,
|
AND "completionStartTime" != "endTime"
|
||||||
model_group,
|
AND DATE("startTime") = DATE($2::timestamp)
|
||||||
model,
|
GROUP BY
|
||||||
day
|
api_base,
|
||||||
ORDER BY
|
model_group,
|
||||||
time_to_first_token DESC;
|
model,
|
||||||
"""
|
request_id
|
||||||
|
ORDER BY
|
||||||
|
time_to_first_token DESC;
|
||||||
|
"""
|
||||||
|
else:
|
||||||
|
sql_query = """
|
||||||
|
SELECT
|
||||||
|
api_base,
|
||||||
|
model_group,
|
||||||
|
model,
|
||||||
|
DATE_TRUNC('day', "startTime")::DATE AS day,
|
||||||
|
AVG(EXTRACT(epoch FROM ("completionStartTime" - "startTime"))) AS time_to_first_token
|
||||||
|
FROM
|
||||||
|
"LiteLLM_SpendLogs"
|
||||||
|
WHERE
|
||||||
|
"startTime" BETWEEN $2::timestamp AND $3::timestamp
|
||||||
|
AND "model_group" = $1 AND "cache_hit" != 'True'
|
||||||
|
AND "completionStartTime" IS NOT NULL
|
||||||
|
AND "completionStartTime" != "endTime"
|
||||||
|
GROUP BY
|
||||||
|
api_base,
|
||||||
|
model_group,
|
||||||
|
model,
|
||||||
|
day
|
||||||
|
ORDER BY
|
||||||
|
time_to_first_token DESC;
|
||||||
|
"""
|
||||||
|
|
||||||
_all_api_bases = set()
|
_all_api_bases = set()
|
||||||
db_response = await prisma_client.db.query_raw(
|
db_response = await prisma_client.db.query_raw(
|
||||||
|
@ -9611,10 +9639,19 @@ async def model_streaming_metrics(
|
||||||
for model_data in db_response:
|
for model_data in db_response:
|
||||||
_api_base = model_data["api_base"]
|
_api_base = model_data["api_base"]
|
||||||
_model = model_data["model"]
|
_model = model_data["model"]
|
||||||
_day = model_data["day"]
|
|
||||||
time_to_first_token = model_data["time_to_first_token"]
|
time_to_first_token = model_data["time_to_first_token"]
|
||||||
if _day not in _daily_entries:
|
unique_key = ""
|
||||||
_daily_entries[_day] = {}
|
if is_same_day:
|
||||||
|
_request_id = model_data["request_id"]
|
||||||
|
unique_key = _request_id
|
||||||
|
if _request_id not in _daily_entries:
|
||||||
|
_daily_entries[_request_id] = {}
|
||||||
|
else:
|
||||||
|
_day = model_data["day"]
|
||||||
|
unique_key = _day
|
||||||
|
time_to_first_token = model_data["time_to_first_token"]
|
||||||
|
if _day not in _daily_entries:
|
||||||
|
_daily_entries[_day] = {}
|
||||||
_combined_model_name = str(_model)
|
_combined_model_name = str(_model)
|
||||||
if "https://" in _api_base:
|
if "https://" in _api_base:
|
||||||
_combined_model_name = str(_api_base)
|
_combined_model_name = str(_api_base)
|
||||||
|
@ -9622,7 +9659,8 @@ async def model_streaming_metrics(
|
||||||
_combined_model_name = _combined_model_name.split("/openai/")[0]
|
_combined_model_name = _combined_model_name.split("/openai/")[0]
|
||||||
|
|
||||||
_all_api_bases.add(_combined_model_name)
|
_all_api_bases.add(_combined_model_name)
|
||||||
_daily_entries[_day][_combined_model_name] = time_to_first_token
|
|
||||||
|
_daily_entries[unique_key][_combined_model_name] = time_to_first_token
|
||||||
|
|
||||||
"""
|
"""
|
||||||
each entry needs to be like this:
|
each entry needs to be like this:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue