mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
Add exception mapping for responses API
This commit is contained in:
parent
f632e93dd1
commit
c2ed7add37
2 changed files with 99 additions and 72 deletions
|
@ -128,7 +128,6 @@ def exception_type( # type: ignore # noqa: PLR0915
|
||||||
extra_kwargs: Optional[dict] = {},
|
extra_kwargs: Optional[dict] = {},
|
||||||
):
|
):
|
||||||
"""Maps an LLM Provider Exception to OpenAI Exception Format"""
|
"""Maps an LLM Provider Exception to OpenAI Exception Format"""
|
||||||
|
|
||||||
if any(
|
if any(
|
||||||
isinstance(original_exception, exc_type)
|
isinstance(original_exception, exc_type)
|
||||||
for exc_type in litellm.LITELLM_EXCEPTION_TYPES
|
for exc_type in litellm.LITELLM_EXCEPTION_TYPES
|
||||||
|
|
|
@ -58,15 +58,24 @@ async def aresponses(
|
||||||
extra_query: Optional[Dict[str, Any]] = None,
|
extra_query: Optional[Dict[str, Any]] = None,
|
||||||
extra_body: Optional[Dict[str, Any]] = None,
|
extra_body: Optional[Dict[str, Any]] = None,
|
||||||
timeout: Optional[Union[float, httpx.Timeout]] = None,
|
timeout: Optional[Union[float, httpx.Timeout]] = None,
|
||||||
|
# LiteLLM specific params,
|
||||||
|
custom_llm_provider: Optional[str] = None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
) -> Union[ResponsesAPIResponse, BaseResponsesAPIStreamingIterator]:
|
) -> Union[ResponsesAPIResponse, BaseResponsesAPIStreamingIterator]:
|
||||||
"""
|
"""
|
||||||
Async: Handles responses API requests by reusing the synchronous function
|
Async: Handles responses API requests by reusing the synchronous function
|
||||||
"""
|
"""
|
||||||
|
local_vars = locals()
|
||||||
try:
|
try:
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
kwargs["aresponses"] = True
|
kwargs["aresponses"] = True
|
||||||
|
|
||||||
|
# get custom llm provider so we can use this for mapping exceptions
|
||||||
|
if custom_llm_provider is None:
|
||||||
|
_, custom_llm_provider, _, _ = litellm.get_llm_provider(
|
||||||
|
model=model, api_base=local_vars.get("base_url", None)
|
||||||
|
)
|
||||||
|
|
||||||
func = partial(
|
func = partial(
|
||||||
responses,
|
responses,
|
||||||
input=input,
|
input=input,
|
||||||
|
@ -91,6 +100,7 @@ async def aresponses(
|
||||||
extra_query=extra_query,
|
extra_query=extra_query,
|
||||||
extra_body=extra_body,
|
extra_body=extra_body,
|
||||||
timeout=timeout,
|
timeout=timeout,
|
||||||
|
custom_llm_provider=custom_llm_provider,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -104,7 +114,13 @@ async def aresponses(
|
||||||
response = init_response
|
response = init_response
|
||||||
return response
|
return response
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise e
|
raise litellm.exception_type(
|
||||||
|
model=model,
|
||||||
|
custom_llm_provider=custom_llm_provider,
|
||||||
|
original_exception=e,
|
||||||
|
completion_kwargs=local_vars,
|
||||||
|
extra_kwargs=kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@client
|
@client
|
||||||
|
@ -133,12 +149,16 @@ def responses(
|
||||||
extra_query: Optional[Dict[str, Any]] = None,
|
extra_query: Optional[Dict[str, Any]] = None,
|
||||||
extra_body: Optional[Dict[str, Any]] = None,
|
extra_body: Optional[Dict[str, Any]] = None,
|
||||||
timeout: Optional[Union[float, httpx.Timeout]] = None,
|
timeout: Optional[Union[float, httpx.Timeout]] = None,
|
||||||
|
# LiteLLM specific params,
|
||||||
|
custom_llm_provider: Optional[str] = None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Synchronous version of the Responses API.
|
Synchronous version of the Responses API.
|
||||||
Uses the synchronous HTTP handler to make requests.
|
Uses the synchronous HTTP handler to make requests.
|
||||||
"""
|
"""
|
||||||
|
local_vars = locals()
|
||||||
|
try:
|
||||||
litellm_logging_obj: LiteLLMLoggingObj = kwargs.get("litellm_logging_obj") # type: ignore
|
litellm_logging_obj: LiteLLMLoggingObj = kwargs.get("litellm_logging_obj") # type: ignore
|
||||||
litellm_call_id: Optional[str] = kwargs.get("litellm_call_id", None)
|
litellm_call_id: Optional[str] = kwargs.get("litellm_call_id", None)
|
||||||
_is_async = kwargs.pop("aresponses", False) is True
|
_is_async = kwargs.pop("aresponses", False) is True
|
||||||
|
@ -148,7 +168,7 @@ def responses(
|
||||||
model, custom_llm_provider, dynamic_api_key, dynamic_api_base = (
|
model, custom_llm_provider, dynamic_api_key, dynamic_api_base = (
|
||||||
litellm.get_llm_provider(
|
litellm.get_llm_provider(
|
||||||
model=model,
|
model=model,
|
||||||
custom_llm_provider=kwargs.get("custom_llm_provider", None),
|
custom_llm_provider=custom_llm_provider,
|
||||||
api_base=litellm_params.api_base,
|
api_base=litellm_params.api_base,
|
||||||
api_key=litellm_params.api_key,
|
api_key=litellm_params.api_key,
|
||||||
)
|
)
|
||||||
|
@ -169,12 +189,12 @@ def responses(
|
||||||
message=f"Responses API not available for custom_llm_provider={custom_llm_provider}, model: {model}",
|
message=f"Responses API not available for custom_llm_provider={custom_llm_provider}, model: {model}",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get all parameters using locals() and combine with kwargs
|
|
||||||
local_vars = locals()
|
|
||||||
local_vars.update(kwargs)
|
local_vars.update(kwargs)
|
||||||
# Get ResponsesAPIOptionalRequestParams with only valid parameters
|
# Get ResponsesAPIOptionalRequestParams with only valid parameters
|
||||||
response_api_optional_params: ResponsesAPIOptionalRequestParams = (
|
response_api_optional_params: ResponsesAPIOptionalRequestParams = (
|
||||||
ResponsesAPIRequestUtils.get_requested_response_api_optional_param(local_vars)
|
ResponsesAPIRequestUtils.get_requested_response_api_optional_param(
|
||||||
|
local_vars
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get optional parameters for the responses API
|
# Get optional parameters for the responses API
|
||||||
|
@ -215,3 +235,11 @@ def responses(
|
||||||
)
|
)
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
except Exception as e:
|
||||||
|
raise litellm.exception_type(
|
||||||
|
model=model,
|
||||||
|
custom_llm_provider=custom_llm_provider,
|
||||||
|
original_exception=e,
|
||||||
|
completion_kwargs=local_vars,
|
||||||
|
extra_kwargs=kwargs,
|
||||||
|
)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue