fix(types/utils.py): support passing prompt cache usage stats in usage object

Passes deepseek prompt caching values through to end user
This commit is contained in:
Krrish Dholakia 2024-08-02 09:30:50 -07:00
parent cd073d5ad3
commit 0a30ba9674
3 changed files with 40 additions and 9 deletions

View file

@ -5825,6 +5825,8 @@ def convert_to_model_response_object(
model_response_object.usage.completion_tokens = response_object["usage"].get("completion_tokens", 0) # type: ignore
model_response_object.usage.prompt_tokens = response_object["usage"].get("prompt_tokens", 0) # type: ignore
model_response_object.usage.total_tokens = response_object["usage"].get("total_tokens", 0) # type: ignore
model_response_object.usage.prompt_cache_hit_tokens = response_object["usage"].get("prompt_cache_hit_tokens", None) # type: ignore
model_response_object.usage.prompt_cache_miss_tokens = response_object["usage"].get("prompt_cache_miss_tokens", None) # type: ignore
if "created" in response_object:
model_response_object.created = response_object["created"] or int(