mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
fixes for responses API
This commit is contained in:
parent
7859365416
commit
93d44ee013
1 changed files with 16 additions and 7 deletions
|
@ -462,7 +462,7 @@ class BaseLLMHTTPHandler:
|
|||
)
|
||||
|
||||
if fake_stream is True:
|
||||
model_response: (ModelResponse) = provider_config.transform_response(
|
||||
model_response: ModelResponse = provider_config.transform_response(
|
||||
model=model,
|
||||
raw_response=response,
|
||||
model_response=litellm.ModelResponse(),
|
||||
|
@ -595,7 +595,7 @@ class BaseLLMHTTPHandler:
|
|||
)
|
||||
|
||||
if fake_stream is True:
|
||||
model_response: (ModelResponse) = provider_config.transform_response(
|
||||
model_response: ModelResponse = provider_config.transform_response(
|
||||
model=model,
|
||||
raw_response=response,
|
||||
model_response=litellm.ModelResponse(),
|
||||
|
@ -1055,9 +1055,16 @@ class BaseLLMHTTPHandler:
|
|||
if extra_headers:
|
||||
headers.update(extra_headers)
|
||||
|
||||
# Check if streaming is requested
|
||||
stream = response_api_optional_request_params.get("stream", False)
|
||||
|
||||
api_base = responses_api_provider_config.get_complete_url(
|
||||
api_base=litellm_params.api_base,
|
||||
api_key=litellm_params.api_key,
|
||||
model=model,
|
||||
optional_params=response_api_optional_request_params,
|
||||
litellm_params=dict(litellm_params),
|
||||
stream=stream,
|
||||
)
|
||||
|
||||
data = responses_api_provider_config.transform_responses_api_request(
|
||||
|
@ -1079,9 +1086,6 @@ class BaseLLMHTTPHandler:
|
|||
},
|
||||
)
|
||||
|
||||
# Check if streaming is requested
|
||||
stream = response_api_optional_request_params.get("stream", False)
|
||||
|
||||
try:
|
||||
if stream:
|
||||
# For streaming, use stream=True in the request
|
||||
|
@ -1170,9 +1174,16 @@ class BaseLLMHTTPHandler:
|
|||
if extra_headers:
|
||||
headers.update(extra_headers)
|
||||
|
||||
# Check if streaming is requested
|
||||
stream = response_api_optional_request_params.get("stream", False)
|
||||
|
||||
api_base = responses_api_provider_config.get_complete_url(
|
||||
api_base=litellm_params.api_base,
|
||||
api_key=litellm_params.api_key,
|
||||
model=model,
|
||||
optional_params=response_api_optional_request_params,
|
||||
litellm_params=dict(litellm_params),
|
||||
stream=stream,
|
||||
)
|
||||
|
||||
data = responses_api_provider_config.transform_responses_api_request(
|
||||
|
@ -1193,8 +1204,6 @@ class BaseLLMHTTPHandler:
|
|||
"headers": headers,
|
||||
},
|
||||
)
|
||||
# Check if streaming is requested
|
||||
stream = response_api_optional_request_params.get("stream", False)
|
||||
|
||||
try:
|
||||
if stream:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue