From 6e87d1ca18fb53777db8ac4a7a981c6a9663d41d Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Tue, 12 Dec 2023 12:33:34 -0800 Subject: [PATCH] fix(utils.py): safe fail complete streaming response --- litellm/utils.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/litellm/utils.py b/litellm/utils.py index ccb0450e6..55c7d76c3 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -1044,7 +1044,11 @@ class Logging: if self.stream: if result.choices[0].finish_reason is not None: # if it's the last chunk self.streaming_chunks.append(result) - complete_streaming_response = litellm.stream_chunk_builder(self.streaming_chunks, messages=self.model_call_details.get("messages", None)) + print_verbose(f"final set of received chunks: {self.streaming_chunks}") + try: + complete_streaming_response = litellm.stream_chunk_builder(self.streaming_chunks, messages=self.model_call_details.get("messages", None)) + except: + complete_streaming_response = None else: self.streaming_chunks.append(result) if complete_streaming_response: