From be9bf68246d90cfa5c77d5f683a03f657bf7b722 Mon Sep 17 00:00:00 2001 From: Krzysztof Malczuk <2000krzysztof@gmail.com> Date: Mon, 30 Jun 2025 10:28:02 +0100 Subject: [PATCH] feat: Add webmethod for deleting openai responses (#2160) # What does this PR do? This PR creates a webmethod for deleting open AI responses, adds and implementation for it and makes an integration test for the OpenAI delete response method. [//]: # (If resolving an issue, uncomment and update the line below) # (Closes #2077) ## Test Plan Ran the standard tests and the pre-commit hooks and the unit tests. # (## Documentation) For this pr I made the routes and implementation based on the current get and create methods. The unit tests were not able to handle this test due to the mock interface in use, which did not allow for effective CRUD to be tested. I instead created an integration test to match the existing ones in the test_openai_responses. --- docs/_static/llama-stack-spec.html | 151 +++++++++++++----- docs/_static/llama-stack-spec.yaml | 106 ++++++++---- docs/openapi_generator/pyopenapi/utility.py | 2 +- llama_stack/apis/agents/agents.py | 10 ++ llama_stack/apis/agents/openai_responses.py | 7 + .../inline/agents/meta_reference/agents.py | 3 + .../agents/meta_reference/openai_responses.py | 4 + .../utils/responses/responses_store.py | 8 + .../agents/test_openai_responses.py | 9 +- 9 files changed, 225 insertions(+), 75 deletions(-) diff --git a/docs/_static/llama-stack-spec.html b/docs/_static/llama-stack-spec.html index f9e4bb38e..ae9ad5d4c 100644 --- a/docs/_static/llama-stack-spec.html +++ b/docs/_static/llama-stack-spec.html @@ -817,6 +817,90 @@ ] } }, + "/v1/openai/v1/responses/{response_id}": { + "get": { + "responses": { + "200": { + "description": "An OpenAIResponseObject.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenAIResponseObject" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "Agents" + ], + "description": "Retrieve an OpenAI response by its ID.", + "parameters": [ + { + "name": "response_id", + "in": "path", + "description": "The ID of the OpenAI response to retrieve.", + "required": true, + "schema": { + "type": "string" + } + } + ] + }, + "delete": { + "responses": { + "200": { + "description": "An OpenAIDeleteResponseObject", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenAIDeleteResponseObject" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "Agents" + ], + "description": "Delete an OpenAI response by its ID.", + "parameters": [ + { + "name": "response_id", + "in": "path", + "description": "The ID of the OpenAI response to delete.", + "required": true, + "schema": { + "type": "string" + } + } + ] + } + }, "/v1/inference/embeddings": { "post": { "responses": { @@ -1284,49 +1368,6 @@ ] } }, - "/v1/openai/v1/responses/{response_id}": { - "get": { - "responses": { - "200": { - "description": "An OpenAIResponseObject.", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/OpenAIResponseObject" - } - } - } - }, - "400": { - "$ref": "#/components/responses/BadRequest400" - }, - "429": { - "$ref": "#/components/responses/TooManyRequests429" - }, - "500": { - "$ref": "#/components/responses/InternalServerError500" - }, - "default": { - "$ref": "#/components/responses/DefaultError" - } - }, - "tags": [ - "Agents" - ], - "description": "Retrieve an OpenAI response by its ID.", - "parameters": [ - { - "name": "response_id", - "in": "path", - "description": "The ID of the OpenAI response to retrieve.", - "required": true, - "schema": { - "type": "string" - } - } - ] - } - }, "/v1/scoring-functions/{scoring_fn_id}": { "get": { "responses": { @@ -9063,6 +9104,30 @@ ], "title": "OpenAIResponseObjectStreamResponseWebSearchCallSearching" }, + "OpenAIDeleteResponseObject": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "object": { + "type": "string", + "const": "response", + "default": "response" + }, + "deleted": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false, + "required": [ + "id", + "object", + "deleted" + ], + "title": "OpenAIDeleteResponseObject" + }, "EmbeddingsRequest": { "type": "object", "properties": { diff --git a/docs/_static/llama-stack-spec.yaml b/docs/_static/llama-stack-spec.yaml index 9175c97fc..48cefe12b 100644 --- a/docs/_static/llama-stack-spec.yaml +++ b/docs/_static/llama-stack-spec.yaml @@ -558,6 +558,64 @@ paths: required: true schema: type: string + /v1/openai/v1/responses/{response_id}: + get: + responses: + '200': + description: An OpenAIResponseObject. + content: + application/json: + schema: + $ref: '#/components/schemas/OpenAIResponseObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + description: Retrieve an OpenAI response by its ID. + parameters: + - name: response_id + in: path + description: >- + The ID of the OpenAI response to retrieve. + required: true + schema: + type: string + delete: + responses: + '200': + description: An OpenAIDeleteResponseObject + content: + application/json: + schema: + $ref: '#/components/schemas/OpenAIDeleteResponseObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + description: Delete an OpenAI response by its ID. + parameters: + - name: response_id + in: path + description: The ID of the OpenAI response to delete. + required: true + schema: + type: string /v1/inference/embeddings: post: responses: @@ -883,36 +941,6 @@ paths: required: true schema: type: string - /v1/openai/v1/responses/{response_id}: - get: - responses: - '200': - description: An OpenAIResponseObject. - content: - application/json: - schema: - $ref: '#/components/schemas/OpenAIResponseObject' - '400': - $ref: '#/components/responses/BadRequest400' - '429': - $ref: >- - #/components/responses/TooManyRequests429 - '500': - $ref: >- - #/components/responses/InternalServerError500 - default: - $ref: '#/components/responses/DefaultError' - tags: - - Agents - description: Retrieve an OpenAI response by its ID. - parameters: - - name: response_id - in: path - description: >- - The ID of the OpenAI response to retrieve. - required: true - schema: - type: string /v1/scoring-functions/{scoring_fn_id}: get: responses: @@ -6404,6 +6432,24 @@ components: - type title: >- OpenAIResponseObjectStreamResponseWebSearchCallSearching + OpenAIDeleteResponseObject: + type: object + properties: + id: + type: string + object: + type: string + const: response + default: response + deleted: + type: boolean + default: true + additionalProperties: false + required: + - id + - object + - deleted + title: OpenAIDeleteResponseObject EmbeddingsRequest: type: object properties: diff --git a/docs/openapi_generator/pyopenapi/utility.py b/docs/openapi_generator/pyopenapi/utility.py index 7e54c6fbb..57f92403d 100644 --- a/docs/openapi_generator/pyopenapi/utility.py +++ b/docs/openapi_generator/pyopenapi/utility.py @@ -156,7 +156,7 @@ def _validate_api_delete_method_returns_none(method) -> str | None: # Allow OpenAI endpoints to return response objects since they follow OpenAI specification method_name = getattr(method, '__name__', '') - if method_name.startswith('openai_'): + if method_name.__contains__('openai_'): return None if return_type is not None and return_type is not type(None): diff --git a/llama_stack/apis/agents/agents.py b/llama_stack/apis/agents/agents.py index 9bd46a3a6..64b162e9e 100644 --- a/llama_stack/apis/agents/agents.py +++ b/llama_stack/apis/agents/agents.py @@ -32,6 +32,7 @@ from llama_stack.schema_utils import json_schema_type, register_schema, webmetho from .openai_responses import ( ListOpenAIResponseInputItem, ListOpenAIResponseObject, + OpenAIDeleteResponseObject, OpenAIResponseInput, OpenAIResponseInputTool, OpenAIResponseObject, @@ -647,3 +648,12 @@ class Agents(Protocol): :returns: An ListOpenAIResponseInputItem. """ ... + + @webmethod(route="/openai/v1/responses/{response_id}", method="DELETE") + async def delete_openai_response(self, response_id: str) -> OpenAIDeleteResponseObject: + """Delete an OpenAI response by its ID. + + :param response_id: The ID of the OpenAI response to delete. + :returns: An OpenAIDeleteResponseObject + """ + ... diff --git a/llama_stack/apis/agents/openai_responses.py b/llama_stack/apis/agents/openai_responses.py index 27b85e2d6..10843a3fe 100644 --- a/llama_stack/apis/agents/openai_responses.py +++ b/llama_stack/apis/agents/openai_responses.py @@ -229,6 +229,13 @@ class OpenAIResponseObject(BaseModel): user: str | None = None +@json_schema_type +class OpenAIDeleteResponseObject(BaseModel): + id: str + object: Literal["response"] = "response" + deleted: bool = True + + @json_schema_type class OpenAIResponseObjectStreamResponseCreated(BaseModel): response: OpenAIResponseObject diff --git a/llama_stack/providers/inline/agents/meta_reference/agents.py b/llama_stack/providers/inline/agents/meta_reference/agents.py index 89fadafb4..4d0c429bd 100644 --- a/llama_stack/providers/inline/agents/meta_reference/agents.py +++ b/llama_stack/providers/inline/agents/meta_reference/agents.py @@ -359,3 +359,6 @@ class MetaReferenceAgentsImpl(Agents): return await self.openai_responses_impl.list_openai_response_input_items( response_id, after, before, include, limit, order ) + + async def delete_openai_response(self, response_id: str) -> None: + return await self.openai_responses_impl.delete_openai_response(response_id) diff --git a/llama_stack/providers/inline/agents/meta_reference/openai_responses.py b/llama_stack/providers/inline/agents/meta_reference/openai_responses.py index f291593f4..240e6a213 100644 --- a/llama_stack/providers/inline/agents/meta_reference/openai_responses.py +++ b/llama_stack/providers/inline/agents/meta_reference/openai_responses.py @@ -19,6 +19,7 @@ from llama_stack.apis.agents.openai_responses import ( AllowedToolsFilter, ListOpenAIResponseInputItem, ListOpenAIResponseObject, + OpenAIDeleteResponseObject, OpenAIResponseInput, OpenAIResponseInputFunctionToolCallOutput, OpenAIResponseInputMessageContent, @@ -574,6 +575,9 @@ class OpenAIResponsesImpl: input=input, ) + async def delete_openai_response(self, response_id: str) -> OpenAIDeleteResponseObject: + return await self.responses_store.delete_response_object(response_id) + async def _convert_response_tools_to_chat_tools( self, tools: list[OpenAIResponseInputTool] ) -> tuple[ diff --git a/llama_stack/providers/utils/responses/responses_store.py b/llama_stack/providers/utils/responses/responses_store.py index 36151d1c3..1b1cb66d4 100644 --- a/llama_stack/providers/utils/responses/responses_store.py +++ b/llama_stack/providers/utils/responses/responses_store.py @@ -9,6 +9,7 @@ from llama_stack.apis.agents import ( from llama_stack.apis.agents.openai_responses import ( ListOpenAIResponseInputItem, ListOpenAIResponseObject, + OpenAIDeleteResponseObject, OpenAIResponseInput, OpenAIResponseObject, OpenAIResponseObjectWithInput, @@ -114,6 +115,13 @@ class ResponsesStore: return OpenAIResponseObjectWithInput(**row["response_object"]) + async def delete_response_object(self, response_id: str) -> OpenAIDeleteResponseObject: + row = await self.sql_store.fetch_one("openai_responses", where={"id": response_id}) + if not row: + raise ValueError(f"Response with id {response_id} not found") + await self.sql_store.delete("openai_responses", where={"id": response_id}) + return OpenAIDeleteResponseObject(id=response_id) + async def list_response_input_items( self, response_id: str, diff --git a/tests/integration/agents/test_openai_responses.py b/tests/integration/agents/test_openai_responses.py index b0b123c45..7ae48913b 100644 --- a/tests/integration/agents/test_openai_responses.py +++ b/tests/integration/agents/test_openai_responses.py @@ -4,7 +4,7 @@ # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. import pytest -from openai import OpenAI +from openai import BadRequestError, OpenAI from llama_stack.distribution.library_client import LlamaStackAsLibraryClient @@ -92,6 +92,13 @@ def test_responses_store(openai_client, client_with_models, text_model_id, strea if output_type == "message": assert retrieved_response.output[0].content[0].text == content + # Delete the response + delete_response = client.responses.delete(response_id) + assert delete_response is None + + with pytest.raises(BadRequestError): + client.responses.retrieve(response_id) + def test_list_response_input_items(openai_client, client_with_models, text_model_id): """Test the new list_openai_response_input_items endpoint."""