mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
fix(test_text_completion.py): fix test
This commit is contained in:
parent
ff12e023ae
commit
fd6ccfca7d
3 changed files with 14 additions and 8 deletions
|
@ -3164,8 +3164,7 @@ def stream_chunk_builder_text_completion(chunks: list, messages: Optional[List]=
|
|||
else:
|
||||
completion_output = ""
|
||||
# # Update usage information if needed
|
||||
print(f"INSIDE TEXT COMPLETION STREAM CHUNK BUILDER")
|
||||
_usage = litellm.Usage
|
||||
_usage = litellm.Usage()
|
||||
print(f"messages: {messages}")
|
||||
_usage.prompt_tokens = token_counter(
|
||||
model=model, messages=messages, count_response_tokens=True
|
||||
|
@ -3180,6 +3179,7 @@ def stream_chunk_builder_text_completion(chunks: list, messages: Optional[List]=
|
|||
_usage.prompt_tokens + _usage.completion_tokens
|
||||
)
|
||||
response["usage"] = _usage
|
||||
print(f"final usage: {_usage}")
|
||||
return litellm.TextCompletionResponse(**response)
|
||||
|
||||
def stream_chunk_builder(chunks: list, messages: Optional[list] = None):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue