(feat) add cache_key in spend_log

This commit is contained in:
ishaan-jaff 2024-01-24 17:56:00 -08:00
parent d694993703
commit 2130a61b6e
5 changed files with 12 additions and 2 deletions

View file

@ -343,8 +343,7 @@ class LiteLLM_SpendLogs(LiteLLMBase):
endTime: Union[str, datetime, None]
user: Optional[str] = ""
modelParameters: Optional[Json] = {}
messages: Optional[Json] = []
response: Optional[Json] = {}
usage: Optional[Json] = {}
metadata: Optional[Json] = {}
cache_hit: Optional[str] = "False"
cache_key: Optional[str] = None

View file

@ -58,4 +58,5 @@ model LiteLLM_SpendLogs {
usage Json @default("{}")
metadata Json @default("{}")
cache_hit String @default("")
cache_key String @default("")
}

View file

@ -995,6 +995,10 @@ def get_logging_payload(kwargs, response_obj, start_time, end_time):
if api_key is not None and isinstance(api_key, str) and api_key.startswith("sk-"):
# hash the api_key
api_key = hash_token(api_key)
from litellm.caching import Cache
c = Cache()
cache_key = c.get_cache_key(**kwargs)
if "headers" in metadata and "authorization" in metadata["headers"]:
metadata["headers"].pop(
@ -1013,6 +1017,7 @@ def get_logging_payload(kwargs, response_obj, start_time, end_time):
"modelParameters": optional_params,
"usage": usage,
"metadata": metadata,
"cache_key": cache_key,
}
json_fields = [

View file

@ -763,6 +763,10 @@ def test_call_with_key_over_budget(prisma_client):
assert spend_log.request_id == request_id
assert spend_log.spend == float("2e-05")
assert spend_log.model == "chatgpt-v-2"
assert (
spend_log.cache_key
== "a61ae14fe4a8b8014a61e6ae01a100c8bc6770ac37c293242afed954bc69207d"
)
# use generated key to auth in
result = await user_api_key_auth(request=request, api_key=bearer_token)

View file

@ -61,4 +61,5 @@ model LiteLLM_SpendLogs {
usage Json @default("{}")
metadata Json @default("{}")
cache_hit String @default("")
cache_key String @default("")
}