working transform

This commit is contained in:
Ishaan Jaff 2025-03-11 15:24:42 -07:00
parent 2fbcf88fda
commit 8dfd1dc136
3 changed files with 90 additions and 3 deletions

View file

@ -2,6 +2,7 @@ import types
from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, Any, Optional
from litellm.types.llms.openai import ResponsesAPIRequestParams
from litellm.types.utils import ModelInfo
from ..chat.transformation import BaseLLMException
@ -44,11 +45,11 @@ class BaseResponsesAPIConfig(ABC):
@abstractmethod
def map_openai_params(
self,
non_default_params: dict,
optional_params: dict,
model: str,
drop_params: bool,
) -> dict:
) -> ResponsesAPIRequestParams:
pass
@abstractmethod

View file

@ -1,5 +1,63 @@
from litellm.llms.base_llm.responses.transformation import BaseResponsesAPIConfig
from litellm.types.llms.openai import ResponsesAPIRequestParams
class OpenAIResponsesAPIConfig(BaseResponsesAPIConfig):
pass
def get_supported_openai_params(self, model: str) -> list:
"""
All OpenAI Responses API params are supported
"""
return [
"input",
"model",
"include",
"instructions",
"max_output_tokens",
"metadata",
"parallel_tool_calls",
"previous_response_id",
"reasoning",
"store",
"stream",
"temperature",
"text",
"tool_choice",
"tools",
"top_p",
"truncation",
"user",
"extra_headers",
"extra_query",
"extra_body",
"timeout",
]
def map_openai_params(
self,
optional_params: dict,
model: str,
drop_params: bool,
) -> ResponsesAPIRequestParams:
return ResponsesAPIRequestParams(
include=optional_params.get("include"),
instructions=optional_params.get("instructions"),
max_output_tokens=optional_params.get("max_output_tokens"),
metadata=optional_params.get("metadata"),
parallel_tool_calls=optional_params.get("parallel_tool_calls"),
previous_response_id=optional_params.get("previous_response_id"),
reasoning=optional_params.get("reasoning"),
store=optional_params.get("store"),
stream=optional_params.get("stream"),
temperature=optional_params.get("temperature"),
text=optional_params.get("text"),
tool_choice=optional_params.get("tool_choice"),
tools=optional_params.get("tools"),
top_p=optional_params.get("top_p"),
truncation=optional_params.get("truncation"),
user=optional_params.get("user"),
extra_headers=optional_params.get("extra_headers"),
extra_query=optional_params.get("extra_query"),
extra_body=optional_params.get("extra_body"),
timeout=optional_params.get("timeout"),
)

View file

@ -1,6 +1,7 @@
from os import PathLike
from typing import IO, Any, Iterable, List, Literal, Mapping, Optional, Tuple, Union
import httpx
from openai._legacy_response import (
HttpxBinaryResponseContent as _HttpxBinaryResponseContent,
)
@ -692,3 +693,30 @@ OpenAIAudioTranscriptionOptionalParams = Literal[
OpenAIImageVariationOptionalParams = Literal["n", "size", "response_format", "user"]
class ResponsesAPIRequestParams(TypedDict, total=False):
"""TypedDict for parameters supported by the responses API."""
input: Union[str, ResponseInputParam]
model: str
include: Optional[List[ResponseIncludable]]
instructions: Optional[str]
max_output_tokens: Optional[int]
metadata: Optional[Dict[str, Any]]
parallel_tool_calls: Optional[bool]
previous_response_id: Optional[str]
reasoning: Optional[Reasoning]
store: Optional[bool]
stream: Optional[bool]
temperature: Optional[float]
text: Optional[ResponseTextConfigParam]
tool_choice: Optional[ToolChoice]
tools: Optional[Iterable[ToolParam]]
top_p: Optional[float]
truncation: Optional[Literal["auto", "disabled"]]
user: Optional[str]
extra_headers: Optional[Dict[str, Any]]
extra_query: Optional[Dict[str, Any]]
extra_body: Optional[Dict[str, Any]]
timeout: Optional[Union[float, httpx.Timeout]]