fixes for responses API

This commit is contained in:
Ishaan Jaff 2025-04-17 09:53:47 -07:00
parent 7859365416
commit 93d44ee013

View file

@ -462,7 +462,7 @@ class BaseLLMHTTPHandler:
) )
if fake_stream is True: if fake_stream is True:
model_response: (ModelResponse) = provider_config.transform_response( model_response: ModelResponse = provider_config.transform_response(
model=model, model=model,
raw_response=response, raw_response=response,
model_response=litellm.ModelResponse(), model_response=litellm.ModelResponse(),
@ -595,7 +595,7 @@ class BaseLLMHTTPHandler:
) )
if fake_stream is True: if fake_stream is True:
model_response: (ModelResponse) = provider_config.transform_response( model_response: ModelResponse = provider_config.transform_response(
model=model, model=model,
raw_response=response, raw_response=response,
model_response=litellm.ModelResponse(), model_response=litellm.ModelResponse(),
@ -1055,9 +1055,16 @@ class BaseLLMHTTPHandler:
if extra_headers: if extra_headers:
headers.update(extra_headers) headers.update(extra_headers)
# Check if streaming is requested
stream = response_api_optional_request_params.get("stream", False)
api_base = responses_api_provider_config.get_complete_url( api_base = responses_api_provider_config.get_complete_url(
api_base=litellm_params.api_base, api_base=litellm_params.api_base,
api_key=litellm_params.api_key,
model=model, model=model,
optional_params=response_api_optional_request_params,
litellm_params=dict(litellm_params),
stream=stream,
) )
data = responses_api_provider_config.transform_responses_api_request( data = responses_api_provider_config.transform_responses_api_request(
@ -1079,9 +1086,6 @@ class BaseLLMHTTPHandler:
}, },
) )
# Check if streaming is requested
stream = response_api_optional_request_params.get("stream", False)
try: try:
if stream: if stream:
# For streaming, use stream=True in the request # For streaming, use stream=True in the request
@ -1170,9 +1174,16 @@ class BaseLLMHTTPHandler:
if extra_headers: if extra_headers:
headers.update(extra_headers) headers.update(extra_headers)
# Check if streaming is requested
stream = response_api_optional_request_params.get("stream", False)
api_base = responses_api_provider_config.get_complete_url( api_base = responses_api_provider_config.get_complete_url(
api_base=litellm_params.api_base, api_base=litellm_params.api_base,
api_key=litellm_params.api_key,
model=model, model=model,
optional_params=response_api_optional_request_params,
litellm_params=dict(litellm_params),
stream=stream,
) )
data = responses_api_provider_config.transform_responses_api_request( data = responses_api_provider_config.transform_responses_api_request(
@ -1193,8 +1204,6 @@ class BaseLLMHTTPHandler:
"headers": headers, "headers": headers,
}, },
) )
# Check if streaming is requested
stream = response_api_optional_request_params.get("stream", False)
try: try:
if stream: if stream: