test(test_streaming.py): fix test

This commit is contained in:
Krrish Dholakia 2024-04-25 20:22:18 -07:00
parent 5307510592
commit 9eb75cc159
2 changed files with 16 additions and 5 deletions

View file

@ -2723,8 +2723,18 @@ def test_aamazing_unit_test_custom_stream_wrapper_n():
chunk_list = [] chunk_list = []
for chunk in chunks: for chunk in chunks:
_chunk = litellm.ModelResponse(**chunk, stream=True) new_chunk = litellm.ModelResponse(stream=True, id=chunk["id"])
chunk_list.append(_chunk) if "choices" in chunk and isinstance(chunk["choices"], list):
print("INSIDE CHUNK CHOICES!")
new_choices = []
for choice in chunk["choices"]:
if isinstance(choice, litellm.utils.StreamingChoices):
_new_choice = choice
elif isinstance(choice, dict):
_new_choice = litellm.utils.StreamingChoices(**choice)
new_choices.append(_new_choice)
new_chunk.choices = new_choices
chunk_list.append(new_chunk)
completion_stream = ModelResponseListIterator(model_responses=chunk_list) completion_stream = ModelResponseListIterator(model_responses=chunk_list)

View file

@ -7103,6 +7103,7 @@ def convert_to_model_response_object(
model_response_object.model = response_object["model"] model_response_object.model = response_object["model"]
if start_time is not None and end_time is not None: if start_time is not None and end_time is not None:
if isinstance(start_time, type(end_time)):
model_response_object._response_ms = ( # type: ignore model_response_object._response_ms = ( # type: ignore
end_time - start_time end_time - start_time
).total_seconds() * 1000 ).total_seconds() * 1000