mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
(fix) add usage tracking in callback
This commit is contained in:
parent
4b8926a0f5
commit
098e399931
1 changed files with 6 additions and 7 deletions
|
@ -1888,11 +1888,11 @@ def stream_chunk_builder(chunks: list):
|
||||||
"finish_reason": finish_reason,
|
"finish_reason": finish_reason,
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
# "usage": {
|
"usage": {
|
||||||
# "prompt_tokens": 0, # Modify as needed
|
"prompt_tokens": 0, # Modify as needed
|
||||||
# "completion_tokens": 0, # Modify as needed
|
"completion_tokens": 0, # Modify as needed
|
||||||
# "total_tokens": 0 # Modify as needed
|
"total_tokens": 0 # Modify as needed
|
||||||
# }
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
# Extract the "content" strings from the nested dictionaries within "choices"
|
# Extract the "content" strings from the nested dictionaries within "choices"
|
||||||
|
@ -1939,6 +1939,5 @@ def stream_chunk_builder(chunks: list):
|
||||||
|
|
||||||
|
|
||||||
# # Update usage information if needed
|
# # Update usage information if needed
|
||||||
# response["usage"]["completion_tokens"] = token
|
response["usage"]["completion_tokens"] = litellm.utils.token_counter(model=model, text=combined_content)
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue