fix(utils.py): fix stream chunk builder for sync/async success

This commit is contained in:
Krrish Dholakia 2023-12-13 07:52:51 -08:00
parent a64bd2ca1e
commit d1aef59fbc
2 changed files with 6 additions and 4 deletions

View file

@ -78,6 +78,7 @@ class TmpFunction:
async def async_test_logging_fn(self, kwargs, completion_obj, start_time, end_time): async def async_test_logging_fn(self, kwargs, completion_obj, start_time, end_time):
print(f"ON ASYNC LOGGING") print(f"ON ASYNC LOGGING")
self.async_success = True self.async_success = True
print(f'kwargs.get("complete_streaming_response"): {kwargs.get("complete_streaming_response")}')
self.complete_streaming_response_in_callback = kwargs.get("complete_streaming_response") self.complete_streaming_response_in_callback = kwargs.get("complete_streaming_response")
@ -102,7 +103,8 @@ def test_async_chat_openai_stream():
complete_streaming_response = complete_streaming_response.strip("'") complete_streaming_response = complete_streaming_response.strip("'")
response1 = tmp_function.complete_streaming_response_in_callback["choices"][0]["message"]["content"] response1 = tmp_function.complete_streaming_response_in_callback["choices"][0]["message"]["content"]
response2 = complete_streaming_response response2 = complete_streaming_response
assert [ord(c) for c in response1] == [ord(c) for c in response2] # assert [ord(c) for c in response1] == [ord(c) for c in response2]
assert response1 == response2
assert tmp_function.async_success == True assert tmp_function.async_success == True
except Exception as e: except Exception as e:
print(e) print(e)

View file

@ -830,9 +830,9 @@ class Logging:
complete_streaming_response = None complete_streaming_response = None
if self.stream == True and self.model_call_details.get("litellm_params", {}).get("acompletion", False) == True: if self.stream == True and self.model_call_details.get("litellm_params", {}).get("acompletion", False) == True:
# if it's acompletion == True, chunks are built/appended in async_success_handler # if it's acompletion == True, chunks are built/appended in async_success_handler
self.streaming_chunks.append(result)
if result.choices[0].finish_reason is not None: # if it's the last chunk if result.choices[0].finish_reason is not None: # if it's the last chunk
complete_streaming_response = litellm.stream_chunk_builder(self.streaming_chunks, messages=self.model_call_details.get("messages", None)) streaming_chunks = self.streaming_chunks + [result]
complete_streaming_response = litellm.stream_chunk_builder(streaming_chunks, messages=self.model_call_details.get("messages", None))
else: else:
# this is a completion() call # this is a completion() call
if self.stream == True: if self.stream == True:
@ -1053,7 +1053,7 @@ class Logging:
if self.stream: if self.stream:
if result.choices[0].finish_reason is not None: # if it's the last chunk if result.choices[0].finish_reason is not None: # if it's the last chunk
self.streaming_chunks.append(result) self.streaming_chunks.append(result)
print_verbose(f"final set of received chunks: {self.streaming_chunks}") # print_verbose(f"final set of received chunks: {self.streaming_chunks}")
try: try:
complete_streaming_response = litellm.stream_chunk_builder(self.streaming_chunks, messages=self.model_call_details.get("messages", None)) complete_streaming_response = litellm.stream_chunk_builder(self.streaming_chunks, messages=self.model_call_details.get("messages", None))
except: except: