forked from phoenix/litellm-mirror
(feat) track - api_key in spendLogs
This commit is contained in:
parent
5b54bcc712
commit
73938080f2
5 changed files with 92 additions and 5 deletions
|
@ -317,14 +317,15 @@ class LiteLLM_UserTable(LiteLLMBase):
|
||||||
|
|
||||||
class LiteLLM_SpendLogs(LiteLLMBase):
|
class LiteLLM_SpendLogs(LiteLLMBase):
|
||||||
request_id: str
|
request_id: str
|
||||||
|
api_key: str
|
||||||
|
model: Optional[str] = ""
|
||||||
call_type: str
|
call_type: str
|
||||||
|
spend: Optional[float] = 0.0
|
||||||
startTime: Union[str, datetime, None]
|
startTime: Union[str, datetime, None]
|
||||||
endTime: Union[str, datetime, None]
|
endTime: Union[str, datetime, None]
|
||||||
model: Optional[str] = ""
|
|
||||||
user: Optional[str] = ""
|
user: Optional[str] = ""
|
||||||
modelParameters: Optional[Json] = {}
|
modelParameters: Optional[Json] = {}
|
||||||
messages: Optional[Json] = []
|
messages: Optional[Json] = []
|
||||||
spend: Optional[float] = 0.0
|
|
||||||
response: Optional[Json] = {}
|
response: Optional[Json] = {}
|
||||||
usage: Optional[Json] = {}
|
usage: Optional[Json] = {}
|
||||||
metadata: Optional[Json] = {}
|
metadata: Optional[Json] = {}
|
||||||
|
|
|
@ -36,13 +36,14 @@ model LiteLLM_Config {
|
||||||
model LiteLLM_SpendLogs {
|
model LiteLLM_SpendLogs {
|
||||||
request_id String @unique
|
request_id String @unique
|
||||||
call_type String
|
call_type String
|
||||||
|
api_key String @default ("")
|
||||||
|
spend Float @default(0.0)
|
||||||
startTime DateTime // Assuming start_time is a DateTime field
|
startTime DateTime // Assuming start_time is a DateTime field
|
||||||
endTime DateTime // Assuming end_time is a DateTime field
|
endTime DateTime // Assuming end_time is a DateTime field
|
||||||
model String @default("")
|
model String @default("")
|
||||||
user String @default("")
|
user String @default("")
|
||||||
modelParameters Json @default("{}")// Assuming optional_params is a JSON field
|
modelParameters Json @default("{}")// Assuming optional_params is a JSON field
|
||||||
messages Json @default("[]")
|
messages Json @default("[]")
|
||||||
spend Float @default(0.0)
|
|
||||||
response Json @default("{}")
|
response Json @default("{}")
|
||||||
usage Json @default("{}")
|
usage Json @default("{}")
|
||||||
metadata Json @default("{}")
|
metadata Json @default("{}")
|
||||||
|
|
|
@ -779,3 +779,85 @@ async def send_email(sender_name, sender_email, receiver_email, subject, html):
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print_verbose("An error occurred while sending the email:", str(e))
|
print_verbose("An error occurred while sending the email:", str(e))
|
||||||
|
|
||||||
|
|
||||||
|
def hash_token(token: str):
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
# Hash the string using SHA-256
|
||||||
|
hashed_token = hashlib.sha256(token.encode()).hexdigest()
|
||||||
|
|
||||||
|
return hashed_token
|
||||||
|
|
||||||
|
|
||||||
|
def get_logging_payload(kwargs, response_obj, start_time, end_time):
|
||||||
|
from litellm.proxy._types import LiteLLM_SpendLogs
|
||||||
|
from pydantic import Json
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
if kwargs == None:
|
||||||
|
kwargs = {}
|
||||||
|
# standardize this function to be used across, s3, dynamoDB, langfuse logging
|
||||||
|
litellm_params = kwargs.get("litellm_params", {})
|
||||||
|
metadata = (
|
||||||
|
litellm_params.get("metadata", {}) or {}
|
||||||
|
) # if litellm_params['metadata'] == None
|
||||||
|
messages = kwargs.get("messages")
|
||||||
|
optional_params = kwargs.get("optional_params", {})
|
||||||
|
call_type = kwargs.get("call_type", "litellm.completion")
|
||||||
|
cache_hit = kwargs.get("cache_hit", False)
|
||||||
|
usage = response_obj["usage"]
|
||||||
|
id = response_obj.get("id", str(uuid.uuid4()))
|
||||||
|
api_key = metadata.get("user_api_key", "")
|
||||||
|
if api_key is not None and type(api_key) == str:
|
||||||
|
# hash the api_key
|
||||||
|
api_key = hash_token(api_key)
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"request_id": id,
|
||||||
|
"call_type": call_type,
|
||||||
|
"api_key": api_key,
|
||||||
|
"cache_hit": cache_hit,
|
||||||
|
"startTime": start_time,
|
||||||
|
"endTime": end_time,
|
||||||
|
"model": kwargs.get("model", ""),
|
||||||
|
"user": kwargs.get("user", ""),
|
||||||
|
"modelParameters": optional_params,
|
||||||
|
"messages": messages,
|
||||||
|
"response": response_obj,
|
||||||
|
"usage": usage,
|
||||||
|
"metadata": metadata,
|
||||||
|
}
|
||||||
|
|
||||||
|
json_fields = [
|
||||||
|
field
|
||||||
|
for field, field_type in LiteLLM_SpendLogs.__annotations__.items()
|
||||||
|
if field_type == Json or field_type == Optional[Json]
|
||||||
|
]
|
||||||
|
str_fields = [
|
||||||
|
field
|
||||||
|
for field, field_type in LiteLLM_SpendLogs.__annotations__.items()
|
||||||
|
if field_type == str or field_type == Optional[str]
|
||||||
|
]
|
||||||
|
datetime_fields = [
|
||||||
|
field
|
||||||
|
for field, field_type in LiteLLM_SpendLogs.__annotations__.items()
|
||||||
|
if field_type == datetime
|
||||||
|
]
|
||||||
|
|
||||||
|
for param in json_fields:
|
||||||
|
if param in payload and type(payload[param]) != Json:
|
||||||
|
if type(payload[param]) == litellm.ModelResponse:
|
||||||
|
payload[param] = payload[param].model_dump_json()
|
||||||
|
if type(payload[param]) == litellm.EmbeddingResponse:
|
||||||
|
payload[param] = payload[param].model_dump_json()
|
||||||
|
elif type(payload[param]) == litellm.Usage:
|
||||||
|
payload[param] = payload[param].model_dump_json()
|
||||||
|
else:
|
||||||
|
payload[param] = json.dumps(payload[param])
|
||||||
|
|
||||||
|
for param in str_fields:
|
||||||
|
if param in payload and type(payload[param]) != str:
|
||||||
|
payload[param] = str(payload[param])
|
||||||
|
|
||||||
|
return payload
|
||||||
|
|
|
@ -8442,10 +8442,12 @@ def get_logging_payload(kwargs, response_obj, start_time, end_time):
|
||||||
cache_hit = kwargs.get("cache_hit", False)
|
cache_hit = kwargs.get("cache_hit", False)
|
||||||
usage = response_obj["usage"]
|
usage = response_obj["usage"]
|
||||||
id = response_obj.get("id", str(uuid.uuid4()))
|
id = response_obj.get("id", str(uuid.uuid4()))
|
||||||
|
api_key = metadata.get("user_api_key", "")
|
||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"request_id": id,
|
"request_id": id,
|
||||||
"call_type": call_type,
|
"call_type": call_type,
|
||||||
|
"api_key": api_key,
|
||||||
"cache_hit": cache_hit,
|
"cache_hit": cache_hit,
|
||||||
"startTime": start_time,
|
"startTime": start_time,
|
||||||
"endTime": end_time,
|
"endTime": end_time,
|
||||||
|
|
|
@ -35,16 +35,17 @@ model LiteLLM_Config {
|
||||||
|
|
||||||
model LiteLLM_SpendLogs {
|
model LiteLLM_SpendLogs {
|
||||||
request_id String @unique
|
request_id String @unique
|
||||||
|
api_key String @default ("")
|
||||||
call_type String
|
call_type String
|
||||||
|
spend Float @default(0.0)
|
||||||
startTime DateTime // Assuming start_time is a DateTime field
|
startTime DateTime // Assuming start_time is a DateTime field
|
||||||
endTime DateTime // Assuming end_time is a DateTime field
|
endTime DateTime // Assuming end_time is a DateTime field
|
||||||
model String @default("")
|
model String @default("")
|
||||||
user String @default("")
|
user String @default("")
|
||||||
modelParameters Json @default("{}")// Assuming optional_params is a JSON field
|
modelParameters Json @default("{}")// Assuming optional_params is a JSON field
|
||||||
messages Json @default("[]")
|
messages Json @default("[]")
|
||||||
spend Float @default(0.0)
|
|
||||||
response Json @default("{}")
|
response Json @default("{}")
|
||||||
usage Json @default("{}")
|
usage Json @default("{}")
|
||||||
metadata Json @default("{}")
|
metadata Json @default("{}")
|
||||||
cache_hit String @default("")
|
cache_hit String @default("")
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue