mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-28 02:53:30 +00:00
Add delete_openai_response route, define delete OpenAI message schema and make an integration test
This commit is contained in:
parent
5639ad7466
commit
b45c650063
9 changed files with 225 additions and 75 deletions
151
docs/_static/llama-stack-spec.html
vendored
151
docs/_static/llama-stack-spec.html
vendored
|
@ -817,6 +817,90 @@
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"/v1/openai/v1/responses/{response_id}": {
|
||||||
|
"get": {
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "An OpenAIResponseObject.",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/OpenAIResponseObject"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"400": {
|
||||||
|
"$ref": "#/components/responses/BadRequest400"
|
||||||
|
},
|
||||||
|
"429": {
|
||||||
|
"$ref": "#/components/responses/TooManyRequests429"
|
||||||
|
},
|
||||||
|
"500": {
|
||||||
|
"$ref": "#/components/responses/InternalServerError500"
|
||||||
|
},
|
||||||
|
"default": {
|
||||||
|
"$ref": "#/components/responses/DefaultError"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"tags": [
|
||||||
|
"Agents"
|
||||||
|
],
|
||||||
|
"description": "Retrieve an OpenAI response by its ID.",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"name": "response_id",
|
||||||
|
"in": "path",
|
||||||
|
"description": "The ID of the OpenAI response to retrieve.",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"delete": {
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "An OpenAIDeleteResponseObject",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/OpenAIDeleteResponseObject"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"400": {
|
||||||
|
"$ref": "#/components/responses/BadRequest400"
|
||||||
|
},
|
||||||
|
"429": {
|
||||||
|
"$ref": "#/components/responses/TooManyRequests429"
|
||||||
|
},
|
||||||
|
"500": {
|
||||||
|
"$ref": "#/components/responses/InternalServerError500"
|
||||||
|
},
|
||||||
|
"default": {
|
||||||
|
"$ref": "#/components/responses/DefaultError"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"tags": [
|
||||||
|
"Agents"
|
||||||
|
],
|
||||||
|
"description": "Delete an OpenAI response by its ID.",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"name": "response_id",
|
||||||
|
"in": "path",
|
||||||
|
"description": "The ID of the OpenAI response to delete.",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
"/v1/inference/embeddings": {
|
"/v1/inference/embeddings": {
|
||||||
"post": {
|
"post": {
|
||||||
"responses": {
|
"responses": {
|
||||||
|
@ -1284,49 +1368,6 @@
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"/v1/openai/v1/responses/{response_id}": {
|
|
||||||
"get": {
|
|
||||||
"responses": {
|
|
||||||
"200": {
|
|
||||||
"description": "An OpenAIResponseObject.",
|
|
||||||
"content": {
|
|
||||||
"application/json": {
|
|
||||||
"schema": {
|
|
||||||
"$ref": "#/components/schemas/OpenAIResponseObject"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"400": {
|
|
||||||
"$ref": "#/components/responses/BadRequest400"
|
|
||||||
},
|
|
||||||
"429": {
|
|
||||||
"$ref": "#/components/responses/TooManyRequests429"
|
|
||||||
},
|
|
||||||
"500": {
|
|
||||||
"$ref": "#/components/responses/InternalServerError500"
|
|
||||||
},
|
|
||||||
"default": {
|
|
||||||
"$ref": "#/components/responses/DefaultError"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"tags": [
|
|
||||||
"Agents"
|
|
||||||
],
|
|
||||||
"description": "Retrieve an OpenAI response by its ID.",
|
|
||||||
"parameters": [
|
|
||||||
{
|
|
||||||
"name": "response_id",
|
|
||||||
"in": "path",
|
|
||||||
"description": "The ID of the OpenAI response to retrieve.",
|
|
||||||
"required": true,
|
|
||||||
"schema": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"/v1/scoring-functions/{scoring_fn_id}": {
|
"/v1/scoring-functions/{scoring_fn_id}": {
|
||||||
"get": {
|
"get": {
|
||||||
"responses": {
|
"responses": {
|
||||||
|
@ -8170,6 +8211,30 @@
|
||||||
],
|
],
|
||||||
"title": "OpenAIResponseObjectStreamResponseWebSearchCallSearching"
|
"title": "OpenAIResponseObjectStreamResponseWebSearchCallSearching"
|
||||||
},
|
},
|
||||||
|
"OpenAIDeleteResponseObject": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"object": {
|
||||||
|
"type": "string",
|
||||||
|
"const": "response",
|
||||||
|
"default": "response"
|
||||||
|
},
|
||||||
|
"deleted": {
|
||||||
|
"type": "boolean",
|
||||||
|
"default": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additionalProperties": false,
|
||||||
|
"required": [
|
||||||
|
"id",
|
||||||
|
"object",
|
||||||
|
"deleted"
|
||||||
|
],
|
||||||
|
"title": "OpenAIDeleteResponseObject"
|
||||||
|
},
|
||||||
"EmbeddingsRequest": {
|
"EmbeddingsRequest": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
|
106
docs/_static/llama-stack-spec.yaml
vendored
106
docs/_static/llama-stack-spec.yaml
vendored
|
@ -558,6 +558,64 @@ paths:
|
||||||
required: true
|
required: true
|
||||||
schema:
|
schema:
|
||||||
type: string
|
type: string
|
||||||
|
/v1/openai/v1/responses/{response_id}:
|
||||||
|
get:
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: An OpenAIResponseObject.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/OpenAIResponseObject'
|
||||||
|
'400':
|
||||||
|
$ref: '#/components/responses/BadRequest400'
|
||||||
|
'429':
|
||||||
|
$ref: >-
|
||||||
|
#/components/responses/TooManyRequests429
|
||||||
|
'500':
|
||||||
|
$ref: >-
|
||||||
|
#/components/responses/InternalServerError500
|
||||||
|
default:
|
||||||
|
$ref: '#/components/responses/DefaultError'
|
||||||
|
tags:
|
||||||
|
- Agents
|
||||||
|
description: Retrieve an OpenAI response by its ID.
|
||||||
|
parameters:
|
||||||
|
- name: response_id
|
||||||
|
in: path
|
||||||
|
description: >-
|
||||||
|
The ID of the OpenAI response to retrieve.
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
delete:
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: An OpenAIDeleteResponseObject
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/OpenAIDeleteResponseObject'
|
||||||
|
'400':
|
||||||
|
$ref: '#/components/responses/BadRequest400'
|
||||||
|
'429':
|
||||||
|
$ref: >-
|
||||||
|
#/components/responses/TooManyRequests429
|
||||||
|
'500':
|
||||||
|
$ref: >-
|
||||||
|
#/components/responses/InternalServerError500
|
||||||
|
default:
|
||||||
|
$ref: '#/components/responses/DefaultError'
|
||||||
|
tags:
|
||||||
|
- Agents
|
||||||
|
description: Delete an OpenAI response by its ID.
|
||||||
|
parameters:
|
||||||
|
- name: response_id
|
||||||
|
in: path
|
||||||
|
description: The ID of the OpenAI response to delete.
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
/v1/inference/embeddings:
|
/v1/inference/embeddings:
|
||||||
post:
|
post:
|
||||||
responses:
|
responses:
|
||||||
|
@ -883,36 +941,6 @@ paths:
|
||||||
required: true
|
required: true
|
||||||
schema:
|
schema:
|
||||||
type: string
|
type: string
|
||||||
/v1/openai/v1/responses/{response_id}:
|
|
||||||
get:
|
|
||||||
responses:
|
|
||||||
'200':
|
|
||||||
description: An OpenAIResponseObject.
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/OpenAIResponseObject'
|
|
||||||
'400':
|
|
||||||
$ref: '#/components/responses/BadRequest400'
|
|
||||||
'429':
|
|
||||||
$ref: >-
|
|
||||||
#/components/responses/TooManyRequests429
|
|
||||||
'500':
|
|
||||||
$ref: >-
|
|
||||||
#/components/responses/InternalServerError500
|
|
||||||
default:
|
|
||||||
$ref: '#/components/responses/DefaultError'
|
|
||||||
tags:
|
|
||||||
- Agents
|
|
||||||
description: Retrieve an OpenAI response by its ID.
|
|
||||||
parameters:
|
|
||||||
- name: response_id
|
|
||||||
in: path
|
|
||||||
description: >-
|
|
||||||
The ID of the OpenAI response to retrieve.
|
|
||||||
required: true
|
|
||||||
schema:
|
|
||||||
type: string
|
|
||||||
/v1/scoring-functions/{scoring_fn_id}:
|
/v1/scoring-functions/{scoring_fn_id}:
|
||||||
get:
|
get:
|
||||||
responses:
|
responses:
|
||||||
|
@ -5782,6 +5810,24 @@ components:
|
||||||
- type
|
- type
|
||||||
title: >-
|
title: >-
|
||||||
OpenAIResponseObjectStreamResponseWebSearchCallSearching
|
OpenAIResponseObjectStreamResponseWebSearchCallSearching
|
||||||
|
OpenAIDeleteResponseObject:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
object:
|
||||||
|
type: string
|
||||||
|
const: response
|
||||||
|
default: response
|
||||||
|
deleted:
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
additionalProperties: false
|
||||||
|
required:
|
||||||
|
- id
|
||||||
|
- object
|
||||||
|
- deleted
|
||||||
|
title: OpenAIDeleteResponseObject
|
||||||
EmbeddingsRequest:
|
EmbeddingsRequest:
|
||||||
type: object
|
type: object
|
||||||
properties:
|
properties:
|
||||||
|
|
|
@ -156,7 +156,7 @@ def _validate_api_delete_method_returns_none(method) -> str | None:
|
||||||
|
|
||||||
# Allow OpenAI endpoints to return response objects since they follow OpenAI specification
|
# Allow OpenAI endpoints to return response objects since they follow OpenAI specification
|
||||||
method_name = getattr(method, '__name__', '')
|
method_name = getattr(method, '__name__', '')
|
||||||
if method_name.startswith('openai_'):
|
if method_name.__contains__('openai_'):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if return_type is not None and return_type is not type(None):
|
if return_type is not None and return_type is not type(None):
|
||||||
|
|
|
@ -33,6 +33,7 @@ from llama_stack.schema_utils import json_schema_type, register_schema, webmetho
|
||||||
from .openai_responses import (
|
from .openai_responses import (
|
||||||
ListOpenAIResponseInputItem,
|
ListOpenAIResponseInputItem,
|
||||||
ListOpenAIResponseObject,
|
ListOpenAIResponseObject,
|
||||||
|
OpenAIDeleteResponseObject,
|
||||||
OpenAIResponseInput,
|
OpenAIResponseInput,
|
||||||
OpenAIResponseInputTool,
|
OpenAIResponseInputTool,
|
||||||
OpenAIResponseObject,
|
OpenAIResponseObject,
|
||||||
|
@ -656,3 +657,12 @@ class Agents(Protocol):
|
||||||
:returns: An ListOpenAIResponseInputItem.
|
:returns: An ListOpenAIResponseInputItem.
|
||||||
"""
|
"""
|
||||||
...
|
...
|
||||||
|
|
||||||
|
@webmethod(route="/openai/v1/responses/{response_id}", method="DELETE")
|
||||||
|
async def delete_openai_response(self, response_id: str) -> OpenAIDeleteResponseObject:
|
||||||
|
"""Delete an OpenAI response by its ID.
|
||||||
|
|
||||||
|
:param response_id: The ID of the OpenAI response to delete.
|
||||||
|
:returns: An OpenAIDeleteResponseObject
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
|
@ -173,6 +173,13 @@ class OpenAIResponseObject(BaseModel):
|
||||||
user: str | None = None
|
user: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
@json_schema_type
|
||||||
|
class OpenAIDeleteResponseObject(BaseModel):
|
||||||
|
id: str
|
||||||
|
object: Literal["response"] = "response"
|
||||||
|
deleted: bool = True
|
||||||
|
|
||||||
|
|
||||||
@json_schema_type
|
@json_schema_type
|
||||||
class OpenAIResponseObjectStreamResponseCreated(BaseModel):
|
class OpenAIResponseObjectStreamResponseCreated(BaseModel):
|
||||||
response: OpenAIResponseObject
|
response: OpenAIResponseObject
|
||||||
|
|
|
@ -358,3 +358,6 @@ class MetaReferenceAgentsImpl(Agents):
|
||||||
return await self.openai_responses_impl.list_openai_response_input_items(
|
return await self.openai_responses_impl.list_openai_response_input_items(
|
||||||
response_id, after, before, include, limit, order
|
response_id, after, before, include, limit, order
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def delete_openai_response(self, response_id: str) -> None:
|
||||||
|
return await self.openai_responses_impl.delete_openai_response(response_id)
|
||||||
|
|
|
@ -18,6 +18,7 @@ from llama_stack.apis.agents.openai_responses import (
|
||||||
AllowedToolsFilter,
|
AllowedToolsFilter,
|
||||||
ListOpenAIResponseInputItem,
|
ListOpenAIResponseInputItem,
|
||||||
ListOpenAIResponseObject,
|
ListOpenAIResponseObject,
|
||||||
|
OpenAIDeleteResponseObject,
|
||||||
OpenAIResponseInput,
|
OpenAIResponseInput,
|
||||||
OpenAIResponseInputFunctionToolCallOutput,
|
OpenAIResponseInputFunctionToolCallOutput,
|
||||||
OpenAIResponseInputMessageContent,
|
OpenAIResponseInputMessageContent,
|
||||||
|
@ -564,6 +565,9 @@ class OpenAIResponsesImpl:
|
||||||
input=input,
|
input=input,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def delete_openai_response(self, response_id: str) -> OpenAIDeleteResponseObject:
|
||||||
|
return await self.responses_store.delete_response_object(response_id)
|
||||||
|
|
||||||
async def _convert_response_tools_to_chat_tools(
|
async def _convert_response_tools_to_chat_tools(
|
||||||
self, tools: list[OpenAIResponseInputTool]
|
self, tools: list[OpenAIResponseInputTool]
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
|
|
|
@ -9,6 +9,7 @@ from llama_stack.apis.agents import (
|
||||||
from llama_stack.apis.agents.openai_responses import (
|
from llama_stack.apis.agents.openai_responses import (
|
||||||
ListOpenAIResponseInputItem,
|
ListOpenAIResponseInputItem,
|
||||||
ListOpenAIResponseObject,
|
ListOpenAIResponseObject,
|
||||||
|
OpenAIDeleteResponseObject,
|
||||||
OpenAIResponseInput,
|
OpenAIResponseInput,
|
||||||
OpenAIResponseObject,
|
OpenAIResponseObject,
|
||||||
OpenAIResponseObjectWithInput,
|
OpenAIResponseObjectWithInput,
|
||||||
|
@ -98,6 +99,13 @@ class ResponsesStore:
|
||||||
raise ValueError(f"Response with id {response_id} not found") from None
|
raise ValueError(f"Response with id {response_id} not found") from None
|
||||||
return OpenAIResponseObjectWithInput(**row["response_object"])
|
return OpenAIResponseObjectWithInput(**row["response_object"])
|
||||||
|
|
||||||
|
async def delete_response_object(self, response_id: str) -> OpenAIDeleteResponseObject:
|
||||||
|
row = await self.sql_store.fetch_one("openai_responses", where={"id": response_id})
|
||||||
|
if not row:
|
||||||
|
raise ValueError(f"Response with id {response_id} not found")
|
||||||
|
await self.sql_store.delete("openai_responses", where={"id": response_id})
|
||||||
|
return OpenAIDeleteResponseObject(id=response_id)
|
||||||
|
|
||||||
async def list_response_input_items(
|
async def list_response_input_items(
|
||||||
self,
|
self,
|
||||||
response_id: str,
|
response_id: str,
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
# This source code is licensed under the terms described in the LICENSE file in
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
# the root directory of this source tree.
|
# the root directory of this source tree.
|
||||||
import pytest
|
import pytest
|
||||||
from openai import OpenAI
|
from openai import BadRequestError, OpenAI
|
||||||
|
|
||||||
from llama_stack.distribution.library_client import LlamaStackAsLibraryClient
|
from llama_stack.distribution.library_client import LlamaStackAsLibraryClient
|
||||||
|
|
||||||
|
@ -92,6 +92,13 @@ def test_responses_store(openai_client, client_with_models, text_model_id, strea
|
||||||
if output_type == "message":
|
if output_type == "message":
|
||||||
assert retrieved_response.output[0].content[0].text == content
|
assert retrieved_response.output[0].content[0].text == content
|
||||||
|
|
||||||
|
# Delete the response
|
||||||
|
delete_response = client.responses.delete(response_id)
|
||||||
|
assert delete_response is None
|
||||||
|
|
||||||
|
with pytest.raises(BadRequestError):
|
||||||
|
client.responses.retrieve(response_id)
|
||||||
|
|
||||||
|
|
||||||
def test_list_response_input_items(openai_client, client_with_models, text_model_id):
|
def test_list_response_input_items(openai_client, client_with_models, text_model_id):
|
||||||
"""Test the new list_openai_response_input_items endpoint."""
|
"""Test the new list_openai_response_input_items endpoint."""
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue