[Feat] Unified Responses API - Add Azure Responses API support (#10116)

* initial commit for azure responses api support

* update get complete url

* fixes for responses API

* working azure responses API

* working responses API

* test suite for responses API

* azure responses API test suite

* fix test with complete url

* fix test refactor

* test fix metadata checks

* fix code quality check
This commit is contained in:
Ishaan Jaff 2025-04-17 16:47:59 -07:00 committed by GitHub
parent 72c820e8c8
commit 0f31729e6b
11 changed files with 428 additions and 191 deletions

View file

@ -462,7 +462,7 @@ class BaseLLMHTTPHandler:
)
if fake_stream is True:
model_response: (ModelResponse) = provider_config.transform_response(
model_response: ModelResponse = provider_config.transform_response(
model=model,
raw_response=response,
model_response=litellm.ModelResponse(),
@ -595,7 +595,7 @@ class BaseLLMHTTPHandler:
)
if fake_stream is True:
model_response: (ModelResponse) = provider_config.transform_response(
model_response: ModelResponse = provider_config.transform_response(
model=model,
raw_response=response,
model_response=litellm.ModelResponse(),
@ -1055,9 +1055,16 @@ class BaseLLMHTTPHandler:
if extra_headers:
headers.update(extra_headers)
# Check if streaming is requested
stream = response_api_optional_request_params.get("stream", False)
api_base = responses_api_provider_config.get_complete_url(
api_base=litellm_params.api_base,
api_key=litellm_params.api_key,
model=model,
optional_params=response_api_optional_request_params,
litellm_params=dict(litellm_params),
stream=stream,
)
data = responses_api_provider_config.transform_responses_api_request(
@ -1079,9 +1086,6 @@ class BaseLLMHTTPHandler:
},
)
# Check if streaming is requested
stream = response_api_optional_request_params.get("stream", False)
try:
if stream:
# For streaming, use stream=True in the request
@ -1170,9 +1174,16 @@ class BaseLLMHTTPHandler:
if extra_headers:
headers.update(extra_headers)
# Check if streaming is requested
stream = response_api_optional_request_params.get("stream", False)
api_base = responses_api_provider_config.get_complete_url(
api_base=litellm_params.api_base,
api_key=litellm_params.api_key,
model=model,
optional_params=response_api_optional_request_params,
litellm_params=dict(litellm_params),
stream=stream,
)
data = responses_api_provider_config.transform_responses_api_request(
@ -1193,8 +1204,6 @@ class BaseLLMHTTPHandler:
"headers": headers,
},
)
# Check if streaming is requested
stream = response_api_optional_request_params.get("stream", False)
try:
if stream: