Merge pull request #1634 from BerriAI/litellm_stop_logging_model_params_spendlogs

[Fix] SpendLogs stop logging model params
This commit is contained in:
Ishaan Jaff 2024-01-26 10:28:54 -08:00 committed by GitHub
commit 12f569ad60
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 14 additions and 11 deletions

View file

@ -346,11 +346,12 @@ class LiteLLM_SpendLogs(LiteLLMBase):
model: Optional[str] = ""
call_type: str
spend: Optional[float] = 0.0
total_tokens: Optional[float] = 0.0
prompt_tokens: Optional[float] = 0.0
completion_tokens: Optional[float] = 0.0
startTime: Union[str, datetime, None]
endTime: Union[str, datetime, None]
user: Optional[str] = ""
modelParameters: Optional[Json] = {}
usage: Optional[Json] = {}
metadata: Optional[Json] = {}
cache_hit: Optional[str] = "False"
cache_key: Optional[str] = None

View file

@ -50,12 +50,13 @@ model LiteLLM_SpendLogs {
call_type String
api_key String @default ("")
spend Float @default(0.0)
total_tokens Float @default(0.0)
prompt_tokens Float @default(0.0)
completion_tokens Float @default(0.0)
startTime DateTime // Assuming start_time is a DateTime field
endTime DateTime // Assuming end_time is a DateTime field
model String @default("")
user String @default("")
modelParameters Json @default("{}")// Assuming optional_params is a JSON field
usage Json @default("{}")
metadata Json @default("{}")
cache_hit String @default("")
cache_key String @default("")

View file

@ -1059,10 +1059,11 @@ def get_logging_payload(kwargs, response_obj, start_time, end_time):
metadata = (
litellm_params.get("metadata", {}) or {}
) # if litellm_params['metadata'] == None
optional_params = kwargs.get("optional_params", {})
call_type = kwargs.get("call_type", "litellm.completion")
cache_hit = kwargs.get("cache_hit", False)
usage = response_obj["usage"]
if type(usage) == litellm.Usage:
usage = dict(usage)
id = response_obj.get("id", str(uuid.uuid4()))
api_key = metadata.get("user_api_key", "")
if api_key is not None and isinstance(api_key, str) and api_key.startswith("sk-"):
@ -1090,10 +1091,11 @@ def get_logging_payload(kwargs, response_obj, start_time, end_time):
"endTime": end_time,
"model": kwargs.get("model", ""),
"user": kwargs.get("user", ""),
"modelParameters": optional_params,
"usage": usage,
"metadata": metadata,
"cache_key": cache_key,
"total_tokens": usage.get("total_tokens", 0),
"prompt_tokens": usage.get("prompt_tokens", 0),
"completion_tokens": usage.get("completion_tokens", 0),
}
json_fields = [
@ -1118,8 +1120,6 @@ def get_logging_payload(kwargs, response_obj, start_time, end_time):
payload[param] = payload[param].model_dump_json()
if type(payload[param]) == litellm.EmbeddingResponse:
payload[param] = payload[param].model_dump_json()
elif type(payload[param]) == litellm.Usage:
payload[param] = payload[param].model_dump_json()
else:
payload[param] = json.dumps(payload[param])

View file

@ -53,12 +53,13 @@ model LiteLLM_SpendLogs {
call_type String
api_key String @default ("")
spend Float @default(0.0)
total_tokens Float @default(0.0)
prompt_tokens Float @default(0.0)
completion_tokens Float @default(0.0)
startTime DateTime // Assuming start_time is a DateTime field
endTime DateTime // Assuming end_time is a DateTime field
model String @default("")
user String @default("")
modelParameters Json @default("{}")// Assuming optional_params is a JSON field
usage Json @default("{}")
metadata Json @default("{}")
cache_hit String @default("")
cache_key String @default("")