mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
working basic openai response api request
This commit is contained in:
parent
c063c4b090
commit
5dac3a5d3b
5 changed files with 66 additions and 51 deletions
|
@ -1,6 +1,6 @@
|
||||||
import types
|
import types
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import TYPE_CHECKING, Any, Optional, Union
|
from typing import TYPE_CHECKING, Any, Dict, Optional, Union
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
|
|
||||||
|
@ -53,10 +53,10 @@ class BaseResponsesAPIConfig(ABC):
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def map_openai_params(
|
def map_openai_params(
|
||||||
self,
|
self,
|
||||||
optional_params: dict,
|
response_api_optional_params: ResponsesAPIOptionalRequestParams,
|
||||||
model: str,
|
model: str,
|
||||||
drop_params: bool,
|
drop_params: bool,
|
||||||
) -> ResponsesAPIOptionalRequestParams:
|
) -> Dict:
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -92,7 +92,7 @@ class BaseResponsesAPIConfig(ABC):
|
||||||
self,
|
self,
|
||||||
model: str,
|
model: str,
|
||||||
input: Union[str, ResponseInputParam],
|
input: Union[str, ResponseInputParam],
|
||||||
response_api_optional_request_params: ResponsesAPIOptionalRequestParams,
|
response_api_optional_request_params: Dict,
|
||||||
litellm_params: GenericLiteLLMParams,
|
litellm_params: GenericLiteLLMParams,
|
||||||
headers: dict,
|
headers: dict,
|
||||||
) -> ResponsesAPIRequestParams:
|
) -> ResponsesAPIRequestParams:
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import io
|
import io
|
||||||
import json
|
import json
|
||||||
from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
|
from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union
|
||||||
|
|
||||||
import httpx # type: ignore
|
import httpx # type: ignore
|
||||||
|
|
||||||
|
@ -966,10 +966,13 @@ class BaseLLMHTTPHandler:
|
||||||
custom_llm_provider: str,
|
custom_llm_provider: str,
|
||||||
input: Union[str, ResponseInputParam],
|
input: Union[str, ResponseInputParam],
|
||||||
responses_api_provider_config: BaseResponsesAPIConfig,
|
responses_api_provider_config: BaseResponsesAPIConfig,
|
||||||
response_api_optional_request_params: ResponsesAPIOptionalRequestParams,
|
response_api_optional_request_params: Dict,
|
||||||
logging_obj: LiteLLMLoggingObj,
|
logging_obj: LiteLLMLoggingObj,
|
||||||
litellm_params: GenericLiteLLMParams,
|
litellm_params: GenericLiteLLMParams,
|
||||||
client: Optional[Union[HTTPHandler, AsyncHTTPHandler]] = None,
|
client: Optional[Union[HTTPHandler, AsyncHTTPHandler]] = None,
|
||||||
|
extra_headers: Optional[Dict[str, Any]] = None,
|
||||||
|
extra_body: Optional[Dict[str, Any]] = None,
|
||||||
|
timeout: Optional[Union[float, httpx.Timeout]] = None,
|
||||||
) -> ResponsesAPIResponse:
|
) -> ResponsesAPIResponse:
|
||||||
if client is None or not isinstance(client, AsyncHTTPHandler):
|
if client is None or not isinstance(client, AsyncHTTPHandler):
|
||||||
async_httpx_client = get_async_httpx_client(
|
async_httpx_client = get_async_httpx_client(
|
||||||
|
@ -1020,11 +1023,9 @@ class BaseLLMHTTPHandler:
|
||||||
provider_config=responses_api_provider_config,
|
provider_config=responses_api_provider_config,
|
||||||
)
|
)
|
||||||
|
|
||||||
base_response_api_response = ResponsesAPIResponse()
|
|
||||||
return responses_api_provider_config.transform_response_api_response(
|
return responses_api_provider_config.transform_response_api_response(
|
||||||
model=model,
|
model=model,
|
||||||
raw_response=response,
|
raw_response=response,
|
||||||
model_response=base_response_api_response,
|
|
||||||
logging_obj=logging_obj,
|
logging_obj=logging_obj,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from typing import TYPE_CHECKING, Any, Optional, Union
|
from typing import TYPE_CHECKING, Any, Dict, Optional, Union
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
|
|
||||||
|
@ -55,39 +55,17 @@ class OpenAIResponsesAPIConfig(BaseResponsesAPIConfig):
|
||||||
|
|
||||||
def map_openai_params(
|
def map_openai_params(
|
||||||
self,
|
self,
|
||||||
optional_params: dict,
|
response_api_optional_params: ResponsesAPIOptionalRequestParams,
|
||||||
model: str,
|
model: str,
|
||||||
drop_params: bool,
|
drop_params: bool,
|
||||||
) -> ResponsesAPIOptionalRequestParams:
|
) -> Dict:
|
||||||
|
return dict(response_api_optional_params)
|
||||||
return ResponsesAPIOptionalRequestParams(
|
|
||||||
include=optional_params.get("include"),
|
|
||||||
instructions=optional_params.get("instructions"),
|
|
||||||
max_output_tokens=optional_params.get("max_output_tokens"),
|
|
||||||
metadata=optional_params.get("metadata"),
|
|
||||||
parallel_tool_calls=optional_params.get("parallel_tool_calls"),
|
|
||||||
previous_response_id=optional_params.get("previous_response_id"),
|
|
||||||
reasoning=optional_params.get("reasoning"),
|
|
||||||
store=optional_params.get("store"),
|
|
||||||
stream=optional_params.get("stream"),
|
|
||||||
temperature=optional_params.get("temperature"),
|
|
||||||
text=optional_params.get("text"),
|
|
||||||
tool_choice=optional_params.get("tool_choice"),
|
|
||||||
tools=optional_params.get("tools"),
|
|
||||||
top_p=optional_params.get("top_p"),
|
|
||||||
truncation=optional_params.get("truncation"),
|
|
||||||
user=optional_params.get("user"),
|
|
||||||
extra_headers=optional_params.get("extra_headers"),
|
|
||||||
extra_query=optional_params.get("extra_query"),
|
|
||||||
extra_body=optional_params.get("extra_body"),
|
|
||||||
timeout=optional_params.get("timeout"),
|
|
||||||
)
|
|
||||||
|
|
||||||
def transform_responses_api_request(
|
def transform_responses_api_request(
|
||||||
self,
|
self,
|
||||||
model: str,
|
model: str,
|
||||||
input: Union[str, ResponseInputParam],
|
input: Union[str, ResponseInputParam],
|
||||||
response_api_optional_request_params: ResponsesAPIOptionalRequestParams,
|
response_api_optional_request_params: Dict,
|
||||||
litellm_params: GenericLiteLLMParams,
|
litellm_params: GenericLiteLLMParams,
|
||||||
headers: dict,
|
headers: dict,
|
||||||
) -> ResponsesAPIRequestParams:
|
) -> ResponsesAPIRequestParams:
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
from typing import Any, Dict, Iterable, List, Literal, Optional, Union
|
from typing import Any, Dict, Iterable, List, Literal, Optional, Union, get_type_hints
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
|
|
||||||
import litellm
|
import litellm
|
||||||
|
from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj
|
||||||
from litellm.llms.base_llm.responses.transformation import BaseResponsesAPIConfig
|
from litellm.llms.base_llm.responses.transformation import BaseResponsesAPIConfig
|
||||||
from litellm.llms.custom_httpx.llm_http_handler import BaseLLMHTTPHandler
|
from litellm.llms.custom_httpx.llm_http_handler import BaseLLMHTTPHandler
|
||||||
from litellm.responses.utils import (
|
from litellm.responses.utils import (
|
||||||
|
@ -13,12 +14,13 @@ from litellm.types.llms.openai import (
|
||||||
Reasoning,
|
Reasoning,
|
||||||
ResponseIncludable,
|
ResponseIncludable,
|
||||||
ResponseInputParam,
|
ResponseInputParam,
|
||||||
|
ResponsesAPIOptionalRequestParams,
|
||||||
ResponseTextConfigParam,
|
ResponseTextConfigParam,
|
||||||
ToolChoice,
|
ToolChoice,
|
||||||
ToolParam,
|
ToolParam,
|
||||||
)
|
)
|
||||||
from litellm.types.router import GenericLiteLLMParams
|
from litellm.types.router import GenericLiteLLMParams
|
||||||
from litellm.utils import ProviderConfigManager
|
from litellm.utils import ProviderConfigManager, client
|
||||||
|
|
||||||
####### ENVIRONMENT VARIABLES ###################
|
####### ENVIRONMENT VARIABLES ###################
|
||||||
# Initialize any necessary instances or variables here
|
# Initialize any necessary instances or variables here
|
||||||
|
@ -26,6 +28,24 @@ base_llm_http_handler = BaseLLMHTTPHandler()
|
||||||
#################################################
|
#################################################
|
||||||
|
|
||||||
|
|
||||||
|
def get_requested_response_api_optional_param(
|
||||||
|
params: Dict[str, Any]
|
||||||
|
) -> ResponsesAPIOptionalRequestParams:
|
||||||
|
"""
|
||||||
|
Filter parameters to only include those defined in ResponsesAPIOptionalRequestParams.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
params: Dictionary of parameters to filter
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ResponsesAPIOptionalRequestParams instance with only the valid parameters
|
||||||
|
"""
|
||||||
|
valid_keys = get_type_hints(ResponsesAPIOptionalRequestParams).keys()
|
||||||
|
filtered_params = {k: v for k, v in params.items() if k in valid_keys}
|
||||||
|
return ResponsesAPIOptionalRequestParams(**filtered_params)
|
||||||
|
|
||||||
|
|
||||||
|
@client
|
||||||
async def aresponses(
|
async def aresponses(
|
||||||
input: Union[str, ResponseInputParam],
|
input: Union[str, ResponseInputParam],
|
||||||
model: str,
|
model: str,
|
||||||
|
@ -53,6 +73,8 @@ async def aresponses(
|
||||||
timeout: Optional[Union[float, httpx.Timeout]] = None,
|
timeout: Optional[Union[float, httpx.Timeout]] = None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
):
|
):
|
||||||
|
litellm_logging_obj: LiteLLMLoggingObj = kwargs.get("litellm_logging_obj") # type: ignore
|
||||||
|
litellm_call_id: Optional[str] = kwargs.get("litellm_call_id", None)
|
||||||
|
|
||||||
# get llm provider logic
|
# get llm provider logic
|
||||||
litellm_params = GenericLiteLLMParams(**kwargs)
|
litellm_params = GenericLiteLLMParams(**kwargs)
|
||||||
|
@ -81,22 +103,31 @@ async def aresponses(
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get all parameters using locals() and combine with kwargs
|
# Get all parameters using locals() and combine with kwargs
|
||||||
all_params = {**locals(), **kwargs}
|
local_vars = locals()
|
||||||
|
local_vars.update(kwargs)
|
||||||
|
# Get ResponsesAPIOptionalRequestParams with only valid parameters
|
||||||
|
response_api_optional_params: ResponsesAPIOptionalRequestParams = (
|
||||||
|
get_requested_response_api_optional_param(local_vars)
|
||||||
|
)
|
||||||
|
|
||||||
# Get optional parameters for the responses API
|
# Get optional parameters for the responses API
|
||||||
responses_api_request_params: ResponsesAPIRequestParams = (
|
responses_api_request_params: Dict = get_optional_params_responses_api(
|
||||||
get_optional_params_responses_api(
|
model=model,
|
||||||
model=model,
|
responses_api_provider_config=responses_api_provider_config,
|
||||||
responses_api_provider_config=responses_api_provider_config,
|
response_api_optional_params=response_api_optional_params,
|
||||||
optional_params={**locals(), **kwargs},
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
response = await base_llm_http_handler.async_response_api_handler(
|
response = await base_llm_http_handler.async_response_api_handler(
|
||||||
model=model,
|
model=model,
|
||||||
input=input,
|
input=input,
|
||||||
responses_api_provider_config=responses_api_provider_config,
|
responses_api_provider_config=responses_api_provider_config,
|
||||||
responses_api_request_params=responses_api_request_params,
|
response_api_optional_request_params=responses_api_request_params,
|
||||||
|
custom_llm_provider=custom_llm_provider,
|
||||||
|
litellm_params=litellm_params,
|
||||||
|
logging_obj=litellm_logging_obj,
|
||||||
|
extra_headers=extra_headers,
|
||||||
|
extra_body=extra_body,
|
||||||
|
timeout=timeout,
|
||||||
)
|
)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
|
@ -1,15 +1,19 @@
|
||||||
|
import json
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict
|
||||||
|
|
||||||
import litellm
|
import litellm
|
||||||
from litellm.llms.base_llm.responses.transformation import BaseResponsesAPIConfig
|
from litellm.llms.base_llm.responses.transformation import BaseResponsesAPIConfig
|
||||||
from litellm.types.llms.openai import ResponsesAPIRequestParams
|
from litellm.types.llms.openai import (
|
||||||
|
ResponsesAPIOptionalRequestParams,
|
||||||
|
ResponsesAPIRequestParams,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_optional_params_responses_api(
|
def get_optional_params_responses_api(
|
||||||
model: str,
|
model: str,
|
||||||
responses_api_provider_config: BaseResponsesAPIConfig,
|
responses_api_provider_config: BaseResponsesAPIConfig,
|
||||||
optional_params: Dict[str, Any],
|
response_api_optional_params: ResponsesAPIOptionalRequestParams,
|
||||||
) -> ResponsesAPIRequestParams:
|
) -> Dict:
|
||||||
"""
|
"""
|
||||||
Get optional parameters for the responses API.
|
Get optional parameters for the responses API.
|
||||||
|
|
||||||
|
@ -22,14 +26,13 @@ def get_optional_params_responses_api(
|
||||||
A dictionary of supported parameters for the responses API
|
A dictionary of supported parameters for the responses API
|
||||||
"""
|
"""
|
||||||
# Remove None values and internal parameters
|
# Remove None values and internal parameters
|
||||||
filtered_params = {k: v for k, v in optional_params.items() if v is not None}
|
|
||||||
|
|
||||||
# Get supported parameters for the model
|
# Get supported parameters for the model
|
||||||
supported_params = responses_api_provider_config.get_supported_openai_params(model)
|
supported_params = responses_api_provider_config.get_supported_openai_params(model)
|
||||||
|
|
||||||
# Check for unsupported parameters
|
# Check for unsupported parameters
|
||||||
unsupported_params = [
|
unsupported_params = [
|
||||||
param for param in filtered_params if param not in supported_params
|
param for param in response_api_optional_params if param not in supported_params
|
||||||
]
|
]
|
||||||
|
|
||||||
if unsupported_params:
|
if unsupported_params:
|
||||||
|
@ -40,7 +43,9 @@ def get_optional_params_responses_api(
|
||||||
|
|
||||||
# Map parameters to provider-specific format
|
# Map parameters to provider-specific format
|
||||||
mapped_params = responses_api_provider_config.map_openai_params(
|
mapped_params = responses_api_provider_config.map_openai_params(
|
||||||
optional_params=filtered_params, model=model, drop_params=litellm.drop_params
|
response_api_optional_params=response_api_optional_params,
|
||||||
|
model=model,
|
||||||
|
drop_params=litellm.drop_params,
|
||||||
)
|
)
|
||||||
|
|
||||||
return mapped_params
|
return mapped_params
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue