mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
fix(main.py): fix streaming_chunk_builder to return usage
This commit is contained in:
parent
8edfcd8e5d
commit
4a5dae3941
6 changed files with 133 additions and 117 deletions
|
@ -1,3 +1,9 @@
|
|||
import sys, os, time
|
||||
import traceback, asyncio
|
||||
import pytest
|
||||
sys.path.insert(
|
||||
0, os.path.abspath("../..")
|
||||
) # Adds the parent directory to the system path
|
||||
from litellm import completion, stream_chunk_builder
|
||||
import litellm
|
||||
import os, dotenv
|
||||
|
@ -24,40 +30,21 @@ function_schema = {
|
|||
},
|
||||
}
|
||||
|
||||
@pytest.mark.skip
|
||||
def test_stream_chunk_builder():
|
||||
litellm.set_verbose = False
|
||||
litellm.api_key = os.environ["OPENAI_API_KEY"]
|
||||
response = completion(
|
||||
model="gpt-3.5-turbo",
|
||||
messages=messages,
|
||||
functions=[function_schema],
|
||||
stream=True,
|
||||
)
|
||||
try:
|
||||
litellm.set_verbose = False
|
||||
litellm.api_key = os.environ["OPENAI_API_KEY"]
|
||||
response = completion(
|
||||
model="gpt-3.5-turbo",
|
||||
messages=messages,
|
||||
functions=[function_schema],
|
||||
stream=True,
|
||||
complete_response=True # runs stream_chunk_builder under-the-hood
|
||||
)
|
||||
|
||||
chunks = []
|
||||
|
||||
for chunk in response:
|
||||
# print(chunk)
|
||||
chunks.append(chunk)
|
||||
|
||||
try:
|
||||
print(f"chunks: {chunks}")
|
||||
rebuilt_response = stream_chunk_builder(chunks)
|
||||
|
||||
# exract the response from the rebuilt response
|
||||
rebuilt_response["id"]
|
||||
rebuilt_response["object"]
|
||||
rebuilt_response["created"]
|
||||
rebuilt_response["model"]
|
||||
rebuilt_response["choices"]
|
||||
rebuilt_response["choices"][0]["index"]
|
||||
choices = rebuilt_response["choices"][0]
|
||||
message = choices["message"]
|
||||
role = message["role"]
|
||||
content = message["content"]
|
||||
finish_reason = choices["finish_reason"]
|
||||
print(role, content, finish_reason)
|
||||
except Exception as e:
|
||||
raise Exception("stream_chunk_builder failed to rebuild response", e)
|
||||
print(f"response: {response}")
|
||||
print(f"response usage: {response['usage']}")
|
||||
except Exception as e:
|
||||
pytest.fail(f"An exception occurred - {str(e)}")
|
||||
|
||||
test_stream_chunk_builder()
|
Loading…
Add table
Add a link
Reference in a new issue