Bug fix - String data: stripped from entire content in streamed Gemini responses (#9070)

* _strip_sse_data_from_chunk

* use _strip_sse_data_from_chunk

* use _strip_sse_data_from_chunk

* use _strip_sse_data_from_chunk

* _strip_sse_data_from_chunk

* test_strip_sse_data_from_chunk

* _strip_sse_data_from_chunk

* testing

* _strip_sse_data_from_chunk
This commit is contained in:
Ishaan Jaff 2025-03-07 21:06:39 -08:00 committed by GitHub
parent 2163d5c3e9
commit e2d612efd9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 213 additions and 8 deletions

View file

@ -3,6 +3,7 @@ from typing import AsyncIterator, Iterator, List, Optional, Union
import httpx
import litellm
from litellm import verbose_logger
from litellm.llms.base_llm.chat.transformation import BaseLLMException
from litellm.types.utils import GenericStreamingChunk as GChunk
@ -78,7 +79,11 @@ class AWSEventStreamDecoder:
message = self._parse_message_from_event(event)
if message:
# remove data: prefix and "\n\n" at the end
message = message.replace("data:", "").replace("\n\n", "")
message = (
litellm.CustomStreamWrapper._strip_sse_data_from_chunk(message)
or ""
)
message = message.replace("\n\n", "")
# Accumulate JSON data
accumulated_json += message
@ -127,7 +132,11 @@ class AWSEventStreamDecoder:
if message:
verbose_logger.debug("sagemaker parsed chunk bytes %s", message)
# remove data: prefix and "\n\n" at the end
message = message.replace("data:", "").replace("\n\n", "")
message = (
litellm.CustomStreamWrapper._strip_sse_data_from_chunk(message)
or ""
)
message = message.replace("\n\n", "")
# Accumulate JSON data
accumulated_json += message