fix stream=True

This commit is contained in:
Ishaan Jaff 2025-04-17 23:01:24 -07:00
parent e8b9b4f68b
commit a5c526a2cd
2 changed files with 11 additions and 1 deletions

View file

@ -5,6 +5,9 @@ Handler for transforming responses api requests to litellm.completion requests
from typing import Any, Coroutine, Optional, Union
import litellm
from litellm.responses.litellm_completion_transformation.streaming_iterator import (
LiteLLMCompletionStreamingIterator,
)
from litellm.responses.litellm_completion_transformation.transformation import (
LiteLLMCompletionResponsesConfig,
)
@ -26,6 +29,7 @@ class LiteLLMCompletionTransformationHandler:
responses_api_request: ResponsesAPIOptionalRequestParams,
custom_llm_provider: Optional[str] = None,
_is_async: bool = False,
stream: Optional[bool] = None,
**kwargs,
) -> Union[
ResponsesAPIResponse,
@ -40,6 +44,8 @@ class LiteLLMCompletionTransformationHandler:
input=input,
responses_api_request=responses_api_request,
custom_llm_provider=custom_llm_provider,
stream=stream,
**kwargs,
)
)
@ -88,4 +94,7 @@ class LiteLLMCompletionTransformationHandler:
return responses_api_response
raise ValueError("litellm_completion_response is not a ModelResponse")
elif isinstance(litellm_completion_response, litellm.CustomStreamWrapper):
return LiteLLMCompletionStreamingIterator(
litellm_custom_stream_wrapper=litellm_completion_response,
)