mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 02:34:29 +00:00
fixes streaming iterator
This commit is contained in:
parent
0b2f68ddd2
commit
e91bdff9f4
1 changed files with 13 additions and 5 deletions
|
@ -5,12 +5,11 @@ from litellm.main import stream_chunk_builder
|
|||
from litellm.responses.litellm_completion_transformation.transformation import (
|
||||
LiteLLMCompletionResponsesConfig,
|
||||
)
|
||||
from litellm.responses.streaming_iterator import (
|
||||
ResponsesAPIStreamingIterator,
|
||||
SyncResponsesAPIStreamingIterator,
|
||||
)
|
||||
from litellm.responses.streaming_iterator import ResponsesAPIStreamingIterator
|
||||
from litellm.types.llms.openai import (
|
||||
ResponseCompletedEvent,
|
||||
ResponseInputParam,
|
||||
ResponsesAPIOptionalRequestParams,
|
||||
ResponsesAPIStreamEvents,
|
||||
ResponsesAPIStreamingResponse,
|
||||
)
|
||||
|
@ -29,10 +28,16 @@ class LiteLLMCompletionStreamingIterator(ResponsesAPIStreamingIterator):
|
|||
def __init__(
|
||||
self,
|
||||
litellm_custom_stream_wrapper: litellm.CustomStreamWrapper,
|
||||
request_input: Union[str, ResponseInputParam],
|
||||
responses_api_request: ResponsesAPIOptionalRequestParams,
|
||||
):
|
||||
self.litellm_custom_stream_wrapper: litellm.CustomStreamWrapper = (
|
||||
litellm_custom_stream_wrapper
|
||||
)
|
||||
self.request_input: Union[str, ResponseInputParam] = request_input
|
||||
self.responses_api_request: ResponsesAPIOptionalRequestParams = (
|
||||
responses_api_request
|
||||
)
|
||||
self.collected_chunks: List[ModelResponseStream] = []
|
||||
self.finished: bool = False
|
||||
|
||||
|
@ -65,10 +70,13 @@ class LiteLLMCompletionStreamingIterator(ResponsesAPIStreamingIterator):
|
|||
Union[ModelResponse, TextCompletionResponse]
|
||||
] = stream_chunk_builder(chunks=self.collected_chunks)
|
||||
if litellm_model_response and isinstance(litellm_model_response, ModelResponse):
|
||||
|
||||
return ResponseCompletedEvent(
|
||||
type=ResponsesAPIStreamEvents.RESPONSE_COMPLETED,
|
||||
response=LiteLLMCompletionResponsesConfig.transform_chat_completion_response_to_responses_api_response(
|
||||
litellm_model_response
|
||||
request_input=self.request_input,
|
||||
chat_completion_response=litellm_model_response,
|
||||
responses_api_request=self.responses_api_request,
|
||||
),
|
||||
)
|
||||
else:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue