mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
Bug fix - String data: stripped from entire content in streamed Gemini responses (#9070)
* _strip_sse_data_from_chunk * use _strip_sse_data_from_chunk * use _strip_sse_data_from_chunk * use _strip_sse_data_from_chunk * _strip_sse_data_from_chunk * test_strip_sse_data_from_chunk * _strip_sse_data_from_chunk * testing * _strip_sse_data_from_chunk
This commit is contained in:
parent
2163d5c3e9
commit
e2d612efd9
7 changed files with 213 additions and 8 deletions
|
@ -3,6 +3,7 @@ from typing import AsyncIterator, Iterator, List, Optional, Union
|
|||
|
||||
import httpx
|
||||
|
||||
import litellm
|
||||
from litellm import verbose_logger
|
||||
from litellm.llms.base_llm.chat.transformation import BaseLLMException
|
||||
from litellm.types.utils import GenericStreamingChunk as GChunk
|
||||
|
@ -78,7 +79,11 @@ class AWSEventStreamDecoder:
|
|||
message = self._parse_message_from_event(event)
|
||||
if message:
|
||||
# remove data: prefix and "\n\n" at the end
|
||||
message = message.replace("data:", "").replace("\n\n", "")
|
||||
message = (
|
||||
litellm.CustomStreamWrapper._strip_sse_data_from_chunk(message)
|
||||
or ""
|
||||
)
|
||||
message = message.replace("\n\n", "")
|
||||
|
||||
# Accumulate JSON data
|
||||
accumulated_json += message
|
||||
|
@ -127,7 +132,11 @@ class AWSEventStreamDecoder:
|
|||
if message:
|
||||
verbose_logger.debug("sagemaker parsed chunk bytes %s", message)
|
||||
# remove data: prefix and "\n\n" at the end
|
||||
message = message.replace("data:", "").replace("\n\n", "")
|
||||
message = (
|
||||
litellm.CustomStreamWrapper._strip_sse_data_from_chunk(message)
|
||||
or ""
|
||||
)
|
||||
message = message.replace("\n\n", "")
|
||||
|
||||
# Accumulate JSON data
|
||||
accumulated_json += message
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue