mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
ResponsesAPIRequestUtils
This commit is contained in:
parent
58acf23c3e
commit
ffa4978f8a
2 changed files with 67 additions and 59 deletions
|
@ -9,7 +9,7 @@ import litellm
|
||||||
from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj
|
from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj
|
||||||
from litellm.llms.base_llm.responses.transformation import BaseResponsesAPIConfig
|
from litellm.llms.base_llm.responses.transformation import BaseResponsesAPIConfig
|
||||||
from litellm.llms.custom_httpx.llm_http_handler import BaseLLMHTTPHandler
|
from litellm.llms.custom_httpx.llm_http_handler import BaseLLMHTTPHandler
|
||||||
from litellm.responses.utils import get_optional_params_responses_api
|
from litellm.responses.utils import ResponsesAPIRequestUtils
|
||||||
from litellm.types.llms.openai import (
|
from litellm.types.llms.openai import (
|
||||||
Reasoning,
|
Reasoning,
|
||||||
ResponseIncludable,
|
ResponseIncludable,
|
||||||
|
@ -31,23 +31,6 @@ base_llm_http_handler = BaseLLMHTTPHandler()
|
||||||
#################################################
|
#################################################
|
||||||
|
|
||||||
|
|
||||||
def get_requested_response_api_optional_param(
|
|
||||||
params: Dict[str, Any]
|
|
||||||
) -> ResponsesAPIOptionalRequestParams:
|
|
||||||
"""
|
|
||||||
Filter parameters to only include those defined in ResponsesAPIOptionalRequestParams.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
params: Dictionary of parameters to filter
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
ResponsesAPIOptionalRequestParams instance with only the valid parameters
|
|
||||||
"""
|
|
||||||
valid_keys = get_type_hints(ResponsesAPIOptionalRequestParams).keys()
|
|
||||||
filtered_params = {k: v for k, v in params.items() if k in valid_keys}
|
|
||||||
return ResponsesAPIOptionalRequestParams(**filtered_params)
|
|
||||||
|
|
||||||
|
|
||||||
@client
|
@client
|
||||||
async def aresponses(
|
async def aresponses(
|
||||||
input: Union[str, ResponseInputParam],
|
input: Union[str, ResponseInputParam],
|
||||||
|
@ -190,14 +173,16 @@ def responses(
|
||||||
local_vars.update(kwargs)
|
local_vars.update(kwargs)
|
||||||
# Get ResponsesAPIOptionalRequestParams with only valid parameters
|
# Get ResponsesAPIOptionalRequestParams with only valid parameters
|
||||||
response_api_optional_params: ResponsesAPIOptionalRequestParams = (
|
response_api_optional_params: ResponsesAPIOptionalRequestParams = (
|
||||||
get_requested_response_api_optional_param(local_vars)
|
ResponsesAPIRequestUtils.get_requested_response_api_optional_param(local_vars)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get optional parameters for the responses API
|
# Get optional parameters for the responses API
|
||||||
responses_api_request_params: Dict = get_optional_params_responses_api(
|
responses_api_request_params: Dict = (
|
||||||
model=model,
|
ResponsesAPIRequestUtils.get_optional_params_responses_api(
|
||||||
responses_api_provider_config=responses_api_provider_config,
|
model=model,
|
||||||
response_api_optional_params=response_api_optional_params,
|
responses_api_provider_config=responses_api_provider_config,
|
||||||
|
response_api_optional_params=response_api_optional_params,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Pre Call logging
|
# Pre Call logging
|
||||||
|
|
|
@ -1,56 +1,79 @@
|
||||||
import json
|
from typing import Any, Dict, get_type_hints
|
||||||
from typing import Any, Dict
|
|
||||||
|
|
||||||
import litellm
|
import litellm
|
||||||
from litellm.llms.base_llm.responses.transformation import BaseResponsesAPIConfig
|
from litellm.llms.base_llm.responses.transformation import BaseResponsesAPIConfig
|
||||||
from litellm.types.llms.openai import (
|
from litellm.types.llms.openai import (
|
||||||
ResponseAPIUsage,
|
ResponseAPIUsage,
|
||||||
ResponsesAPIOptionalRequestParams,
|
ResponsesAPIOptionalRequestParams,
|
||||||
ResponsesAPIRequestParams,
|
|
||||||
)
|
)
|
||||||
from litellm.types.utils import Usage
|
from litellm.types.utils import Usage
|
||||||
|
|
||||||
|
|
||||||
def get_optional_params_responses_api(
|
class ResponsesAPIRequestUtils:
|
||||||
model: str,
|
"""Helper utils for constructing ResponseAPI requests"""
|
||||||
responses_api_provider_config: BaseResponsesAPIConfig,
|
|
||||||
response_api_optional_params: ResponsesAPIOptionalRequestParams,
|
|
||||||
) -> Dict:
|
|
||||||
"""
|
|
||||||
Get optional parameters for the responses API.
|
|
||||||
|
|
||||||
Args:
|
@staticmethod
|
||||||
params: Dictionary of all parameters
|
def get_optional_params_responses_api(
|
||||||
model: The model name
|
model: str,
|
||||||
responses_api_provider_config: The provider configuration for responses API
|
responses_api_provider_config: BaseResponsesAPIConfig,
|
||||||
|
response_api_optional_params: ResponsesAPIOptionalRequestParams,
|
||||||
|
) -> Dict:
|
||||||
|
"""
|
||||||
|
Get optional parameters for the responses API.
|
||||||
|
|
||||||
Returns:
|
Args:
|
||||||
A dictionary of supported parameters for the responses API
|
params: Dictionary of all parameters
|
||||||
"""
|
model: The model name
|
||||||
# Remove None values and internal parameters
|
responses_api_provider_config: The provider configuration for responses API
|
||||||
|
|
||||||
# Get supported parameters for the model
|
Returns:
|
||||||
supported_params = responses_api_provider_config.get_supported_openai_params(model)
|
A dictionary of supported parameters for the responses API
|
||||||
|
"""
|
||||||
|
# Remove None values and internal parameters
|
||||||
|
|
||||||
# Check for unsupported parameters
|
# Get supported parameters for the model
|
||||||
unsupported_params = [
|
supported_params = responses_api_provider_config.get_supported_openai_params(
|
||||||
param for param in response_api_optional_params if param not in supported_params
|
model
|
||||||
]
|
|
||||||
|
|
||||||
if unsupported_params:
|
|
||||||
raise litellm.UnsupportedParamsError(
|
|
||||||
model=model,
|
|
||||||
message=f"The following parameters are not supported for model {model}: {', '.join(unsupported_params)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Map parameters to provider-specific format
|
# Check for unsupported parameters
|
||||||
mapped_params = responses_api_provider_config.map_openai_params(
|
unsupported_params = [
|
||||||
response_api_optional_params=response_api_optional_params,
|
param
|
||||||
model=model,
|
for param in response_api_optional_params
|
||||||
drop_params=litellm.drop_params,
|
if param not in supported_params
|
||||||
)
|
]
|
||||||
|
|
||||||
return mapped_params
|
if unsupported_params:
|
||||||
|
raise litellm.UnsupportedParamsError(
|
||||||
|
model=model,
|
||||||
|
message=f"The following parameters are not supported for model {model}: {', '.join(unsupported_params)}",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Map parameters to provider-specific format
|
||||||
|
mapped_params = responses_api_provider_config.map_openai_params(
|
||||||
|
response_api_optional_params=response_api_optional_params,
|
||||||
|
model=model,
|
||||||
|
drop_params=litellm.drop_params,
|
||||||
|
)
|
||||||
|
|
||||||
|
return mapped_params
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_requested_response_api_optional_param(
|
||||||
|
params: Dict[str, Any]
|
||||||
|
) -> ResponsesAPIOptionalRequestParams:
|
||||||
|
"""
|
||||||
|
Filter parameters to only include those defined in ResponsesAPIOptionalRequestParams.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
params: Dictionary of parameters to filter
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ResponsesAPIOptionalRequestParams instance with only the valid parameters
|
||||||
|
"""
|
||||||
|
valid_keys = get_type_hints(ResponsesAPIOptionalRequestParams).keys()
|
||||||
|
filtered_params = {k: v for k, v in params.items() if k in valid_keys}
|
||||||
|
return ResponsesAPIOptionalRequestParams(**filtered_params)
|
||||||
|
|
||||||
|
|
||||||
class ResponseAPILoggingUtils:
|
class ResponseAPILoggingUtils:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue