diff --git a/litellm/llms/base_llm/responses/transformation.py b/litellm/llms/base_llm/responses/transformation.py index 649b91226f..2013bdfe10 100644 --- a/litellm/llms/base_llm/responses/transformation.py +++ b/litellm/llms/base_llm/responses/transformation.py @@ -1,6 +1,6 @@ import types from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, Any, Dict, Optional, Union +from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union import httpx @@ -10,6 +10,7 @@ from litellm.types.llms.openai import ( ResponsesAPIResponse, ResponsesAPIStreamingResponse, ) +from litellm.types.responses.main import * from litellm.types.router import GenericLiteLLMParams if TYPE_CHECKING: @@ -73,11 +74,6 @@ class BaseResponsesAPIConfig(ABC): def get_complete_url( self, api_base: Optional[str], - api_key: Optional[str], - model: str, - optional_params: dict, - litellm_params: dict, - stream: Optional[bool] = None, ) -> str: """ OPTIONAL @@ -122,6 +118,31 @@ class BaseResponsesAPIConfig(ABC): """ pass + ######################################################### + ########## DELETE RESPONSE API TRANSFORMATION ############## + ######################################################### + @abstractmethod + def transform_delete_response_api_request( + self, + response_id: str, + api_base: str, + litellm_params: GenericLiteLLMParams, + headers: dict, + ) -> Tuple[str, Dict]: + pass + + @abstractmethod + def transform_delete_response_api_response( + self, + raw_response: httpx.Response, + logging_obj: LiteLLMLoggingObj, + ) -> DeleteResponseResult: + pass + + ######################################################### + ########## END DELETE RESPONSE API TRANSFORMATION ########## + ######################################################### + def get_error_class( self, error_message: str, status_code: int, headers: Union[dict, httpx.Headers] ) -> BaseLLMException: diff --git a/litellm/types/responses/main.py b/litellm/types/responses/main.py index 63a548bbfd..cf62f0d863 100644 --- a/litellm/types/responses/main.py +++ b/litellm/types/responses/main.py @@ -46,3 +46,19 @@ class GenericResponseOutputItem(BaseLiteLLMOpenAIResponseObject): status: str # "completed", "in_progress", etc. role: str # "assistant", "user", etc. content: List[OutputText] + + +class DeleteResponseResult(BaseLiteLLMOpenAIResponseObject): + """ + Result of a delete response request + + { + "id": "resp_6786a1bec27481909a17d673315b29f6", + "object": "response", + "deleted": true + } + """ + + id: Optional[str] + object: Optional[str] + deleted: Optional[bool]