mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
Litellm dev 04 22 2025 p1 (#10206)
* fix(openai.py): initial commit adding generic event type for openai responses api streaming Ensures handling for undocumented event types - e.g. "response.reasoning_summary_part.added" * fix(transformation.py): handle unknown openai response type * fix(datadog_llm_observability.py): handle dict[str, any] -> dict[str, str] conversion Fixes https://github.com/BerriAI/litellm/issues/9494 * test: add more unit testing * test: add unit test * fix(common_utils.py): fix message with content list * test: update testing
This commit is contained in:
parent
f670ebeb2f
commit
217681eb5e
12 changed files with 165 additions and 10 deletions
|
@ -44,12 +44,12 @@ class BaseResponsesAPIStreamingIterator:
|
|||
self.responses_api_provider_config = responses_api_provider_config
|
||||
self.completed_response: Optional[ResponsesAPIStreamingResponse] = None
|
||||
self.start_time = datetime.now()
|
||||
|
||||
|
||||
# set request kwargs
|
||||
self.litellm_metadata = litellm_metadata
|
||||
self.custom_llm_provider = custom_llm_provider
|
||||
|
||||
def _process_chunk(self, chunk):
|
||||
def _process_chunk(self, chunk) -> Optional[ResponsesAPIStreamingResponse]:
|
||||
"""Process a single chunk of data from the stream"""
|
||||
if not chunk:
|
||||
return None
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue