add fake_stream to llm http handler

This commit is contained in:
Ishaan Jaff 2025-03-20 09:55:59 -07:00
parent 1567e52185
commit 6608770e64
2 changed files with 5 additions and 0 deletions

View file

@ -978,6 +978,7 @@ class BaseLLMHTTPHandler:
timeout: Optional[Union[float, httpx.Timeout]] = None, timeout: Optional[Union[float, httpx.Timeout]] = None,
client: Optional[Union[HTTPHandler, AsyncHTTPHandler]] = None, client: Optional[Union[HTTPHandler, AsyncHTTPHandler]] = None,
_is_async: bool = False, _is_async: bool = False,
fake_stream: bool = False,
) -> Union[ ) -> Union[
ResponsesAPIResponse, ResponsesAPIResponse,
BaseResponsesAPIStreamingIterator, BaseResponsesAPIStreamingIterator,
@ -1100,6 +1101,7 @@ class BaseLLMHTTPHandler:
extra_body: Optional[Dict[str, Any]] = None, extra_body: Optional[Dict[str, Any]] = None,
timeout: Optional[Union[float, httpx.Timeout]] = None, timeout: Optional[Union[float, httpx.Timeout]] = None,
client: Optional[Union[HTTPHandler, AsyncHTTPHandler]] = None, client: Optional[Union[HTTPHandler, AsyncHTTPHandler]] = None,
fake_stream: bool = False,
) -> Union[ResponsesAPIResponse, BaseResponsesAPIStreamingIterator]: ) -> Union[ResponsesAPIResponse, BaseResponsesAPIStreamingIterator]:
""" """
Async version of the responses API handler. Async version of the responses API handler.

View file

@ -232,6 +232,9 @@ def responses(
timeout=timeout or request_timeout, timeout=timeout or request_timeout,
_is_async=_is_async, _is_async=_is_async,
client=kwargs.get("client"), client=kwargs.get("client"),
fake_stream=responses_api_provider_config.should_fake_stream(
model=model, stream=stream, custom_llm_provider=custom_llm_provider
),
) )
return response return response