mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
fix(main.py): fix stream_chunk_builder usage calc
Closes https://github.com/BerriAI/litellm/issues/4496
This commit is contained in:
parent
e34ba00a51
commit
27e9f96380
3 changed files with 57 additions and 12 deletions
|
@ -5022,10 +5022,9 @@ def stream_chunk_builder(
|
|||
for chunk in chunks:
|
||||
if "usage" in chunk:
|
||||
if "prompt_tokens" in chunk["usage"]:
|
||||
prompt_tokens += chunk["usage"].get("prompt_tokens", 0) or 0
|
||||
prompt_tokens = chunk["usage"].get("prompt_tokens", 0) or 0
|
||||
if "completion_tokens" in chunk["usage"]:
|
||||
completion_tokens += chunk["usage"].get("completion_tokens", 0) or 0
|
||||
|
||||
completion_tokens = chunk["usage"].get("completion_tokens", 0) or 0
|
||||
try:
|
||||
response["usage"]["prompt_tokens"] = prompt_tokens or token_counter(
|
||||
model=model, messages=messages
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue