mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-27 18:50:41 +00:00
Merge b45c650063
into 40fdce79b3
This commit is contained in:
commit
c7e5704d37
9 changed files with 225 additions and 75 deletions
151
docs/_static/llama-stack-spec.html
vendored
151
docs/_static/llama-stack-spec.html
vendored
|
@ -817,6 +817,90 @@
|
|||
]
|
||||
}
|
||||
},
|
||||
"/v1/openai/v1/responses/{response_id}": {
|
||||
"get": {
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "An OpenAIResponseObject.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/OpenAIResponseObject"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"$ref": "#/components/responses/BadRequest400"
|
||||
},
|
||||
"429": {
|
||||
"$ref": "#/components/responses/TooManyRequests429"
|
||||
},
|
||||
"500": {
|
||||
"$ref": "#/components/responses/InternalServerError500"
|
||||
},
|
||||
"default": {
|
||||
"$ref": "#/components/responses/DefaultError"
|
||||
}
|
||||
},
|
||||
"tags": [
|
||||
"Agents"
|
||||
],
|
||||
"description": "Retrieve an OpenAI response by its ID.",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "response_id",
|
||||
"in": "path",
|
||||
"description": "The ID of the OpenAI response to retrieve.",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"delete": {
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "An OpenAIDeleteResponseObject",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/OpenAIDeleteResponseObject"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"$ref": "#/components/responses/BadRequest400"
|
||||
},
|
||||
"429": {
|
||||
"$ref": "#/components/responses/TooManyRequests429"
|
||||
},
|
||||
"500": {
|
||||
"$ref": "#/components/responses/InternalServerError500"
|
||||
},
|
||||
"default": {
|
||||
"$ref": "#/components/responses/DefaultError"
|
||||
}
|
||||
},
|
||||
"tags": [
|
||||
"Agents"
|
||||
],
|
||||
"description": "Delete an OpenAI response by its ID.",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "response_id",
|
||||
"in": "path",
|
||||
"description": "The ID of the OpenAI response to delete.",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/v1/inference/embeddings": {
|
||||
"post": {
|
||||
"responses": {
|
||||
|
@ -1284,49 +1368,6 @@
|
|||
]
|
||||
}
|
||||
},
|
||||
"/v1/openai/v1/responses/{response_id}": {
|
||||
"get": {
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "An OpenAIResponseObject.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/OpenAIResponseObject"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"$ref": "#/components/responses/BadRequest400"
|
||||
},
|
||||
"429": {
|
||||
"$ref": "#/components/responses/TooManyRequests429"
|
||||
},
|
||||
"500": {
|
||||
"$ref": "#/components/responses/InternalServerError500"
|
||||
},
|
||||
"default": {
|
||||
"$ref": "#/components/responses/DefaultError"
|
||||
}
|
||||
},
|
||||
"tags": [
|
||||
"Agents"
|
||||
],
|
||||
"description": "Retrieve an OpenAI response by its ID.",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "response_id",
|
||||
"in": "path",
|
||||
"description": "The ID of the OpenAI response to retrieve.",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/v1/scoring-functions/{scoring_fn_id}": {
|
||||
"get": {
|
||||
"responses": {
|
||||
|
@ -9063,6 +9104,30 @@
|
|||
],
|
||||
"title": "OpenAIResponseObjectStreamResponseWebSearchCallSearching"
|
||||
},
|
||||
"OpenAIDeleteResponseObject": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"object": {
|
||||
"type": "string",
|
||||
"const": "response",
|
||||
"default": "response"
|
||||
},
|
||||
"deleted": {
|
||||
"type": "boolean",
|
||||
"default": true
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"id",
|
||||
"object",
|
||||
"deleted"
|
||||
],
|
||||
"title": "OpenAIDeleteResponseObject"
|
||||
},
|
||||
"EmbeddingsRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
|
106
docs/_static/llama-stack-spec.yaml
vendored
106
docs/_static/llama-stack-spec.yaml
vendored
|
@ -558,6 +558,64 @@ paths:
|
|||
required: true
|
||||
schema:
|
||||
type: string
|
||||
/v1/openai/v1/responses/{response_id}:
|
||||
get:
|
||||
responses:
|
||||
'200':
|
||||
description: An OpenAIResponseObject.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/OpenAIResponseObject'
|
||||
'400':
|
||||
$ref: '#/components/responses/BadRequest400'
|
||||
'429':
|
||||
$ref: >-
|
||||
#/components/responses/TooManyRequests429
|
||||
'500':
|
||||
$ref: >-
|
||||
#/components/responses/InternalServerError500
|
||||
default:
|
||||
$ref: '#/components/responses/DefaultError'
|
||||
tags:
|
||||
- Agents
|
||||
description: Retrieve an OpenAI response by its ID.
|
||||
parameters:
|
||||
- name: response_id
|
||||
in: path
|
||||
description: >-
|
||||
The ID of the OpenAI response to retrieve.
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
delete:
|
||||
responses:
|
||||
'200':
|
||||
description: An OpenAIDeleteResponseObject
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/OpenAIDeleteResponseObject'
|
||||
'400':
|
||||
$ref: '#/components/responses/BadRequest400'
|
||||
'429':
|
||||
$ref: >-
|
||||
#/components/responses/TooManyRequests429
|
||||
'500':
|
||||
$ref: >-
|
||||
#/components/responses/InternalServerError500
|
||||
default:
|
||||
$ref: '#/components/responses/DefaultError'
|
||||
tags:
|
||||
- Agents
|
||||
description: Delete an OpenAI response by its ID.
|
||||
parameters:
|
||||
- name: response_id
|
||||
in: path
|
||||
description: The ID of the OpenAI response to delete.
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
/v1/inference/embeddings:
|
||||
post:
|
||||
responses:
|
||||
|
@ -883,36 +941,6 @@ paths:
|
|||
required: true
|
||||
schema:
|
||||
type: string
|
||||
/v1/openai/v1/responses/{response_id}:
|
||||
get:
|
||||
responses:
|
||||
'200':
|
||||
description: An OpenAIResponseObject.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/OpenAIResponseObject'
|
||||
'400':
|
||||
$ref: '#/components/responses/BadRequest400'
|
||||
'429':
|
||||
$ref: >-
|
||||
#/components/responses/TooManyRequests429
|
||||
'500':
|
||||
$ref: >-
|
||||
#/components/responses/InternalServerError500
|
||||
default:
|
||||
$ref: '#/components/responses/DefaultError'
|
||||
tags:
|
||||
- Agents
|
||||
description: Retrieve an OpenAI response by its ID.
|
||||
parameters:
|
||||
- name: response_id
|
||||
in: path
|
||||
description: >-
|
||||
The ID of the OpenAI response to retrieve.
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
/v1/scoring-functions/{scoring_fn_id}:
|
||||
get:
|
||||
responses:
|
||||
|
@ -6404,6 +6432,24 @@ components:
|
|||
- type
|
||||
title: >-
|
||||
OpenAIResponseObjectStreamResponseWebSearchCallSearching
|
||||
OpenAIDeleteResponseObject:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
object:
|
||||
type: string
|
||||
const: response
|
||||
default: response
|
||||
deleted:
|
||||
type: boolean
|
||||
default: true
|
||||
additionalProperties: false
|
||||
required:
|
||||
- id
|
||||
- object
|
||||
- deleted
|
||||
title: OpenAIDeleteResponseObject
|
||||
EmbeddingsRequest:
|
||||
type: object
|
||||
properties:
|
||||
|
|
|
@ -156,7 +156,7 @@ def _validate_api_delete_method_returns_none(method) -> str | None:
|
|||
|
||||
# Allow OpenAI endpoints to return response objects since they follow OpenAI specification
|
||||
method_name = getattr(method, '__name__', '')
|
||||
if method_name.startswith('openai_'):
|
||||
if method_name.__contains__('openai_'):
|
||||
return None
|
||||
|
||||
if return_type is not None and return_type is not type(None):
|
||||
|
|
|
@ -32,6 +32,7 @@ from llama_stack.schema_utils import json_schema_type, register_schema, webmetho
|
|||
from .openai_responses import (
|
||||
ListOpenAIResponseInputItem,
|
||||
ListOpenAIResponseObject,
|
||||
OpenAIDeleteResponseObject,
|
||||
OpenAIResponseInput,
|
||||
OpenAIResponseInputTool,
|
||||
OpenAIResponseObject,
|
||||
|
@ -647,3 +648,12 @@ class Agents(Protocol):
|
|||
:returns: An ListOpenAIResponseInputItem.
|
||||
"""
|
||||
...
|
||||
|
||||
@webmethod(route="/openai/v1/responses/{response_id}", method="DELETE")
|
||||
async def delete_openai_response(self, response_id: str) -> OpenAIDeleteResponseObject:
|
||||
"""Delete an OpenAI response by its ID.
|
||||
|
||||
:param response_id: The ID of the OpenAI response to delete.
|
||||
:returns: An OpenAIDeleteResponseObject
|
||||
"""
|
||||
...
|
||||
|
|
|
@ -229,6 +229,13 @@ class OpenAIResponseObject(BaseModel):
|
|||
user: str | None = None
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class OpenAIDeleteResponseObject(BaseModel):
|
||||
id: str
|
||||
object: Literal["response"] = "response"
|
||||
deleted: bool = True
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class OpenAIResponseObjectStreamResponseCreated(BaseModel):
|
||||
response: OpenAIResponseObject
|
||||
|
|
|
@ -359,3 +359,6 @@ class MetaReferenceAgentsImpl(Agents):
|
|||
return await self.openai_responses_impl.list_openai_response_input_items(
|
||||
response_id, after, before, include, limit, order
|
||||
)
|
||||
|
||||
async def delete_openai_response(self, response_id: str) -> None:
|
||||
return await self.openai_responses_impl.delete_openai_response(response_id)
|
||||
|
|
|
@ -19,6 +19,7 @@ from llama_stack.apis.agents.openai_responses import (
|
|||
AllowedToolsFilter,
|
||||
ListOpenAIResponseInputItem,
|
||||
ListOpenAIResponseObject,
|
||||
OpenAIDeleteResponseObject,
|
||||
OpenAIResponseInput,
|
||||
OpenAIResponseInputFunctionToolCallOutput,
|
||||
OpenAIResponseInputMessageContent,
|
||||
|
@ -574,6 +575,9 @@ class OpenAIResponsesImpl:
|
|||
input=input,
|
||||
)
|
||||
|
||||
async def delete_openai_response(self, response_id: str) -> OpenAIDeleteResponseObject:
|
||||
return await self.responses_store.delete_response_object(response_id)
|
||||
|
||||
async def _convert_response_tools_to_chat_tools(
|
||||
self, tools: list[OpenAIResponseInputTool]
|
||||
) -> tuple[
|
||||
|
|
|
@ -9,6 +9,7 @@ from llama_stack.apis.agents import (
|
|||
from llama_stack.apis.agents.openai_responses import (
|
||||
ListOpenAIResponseInputItem,
|
||||
ListOpenAIResponseObject,
|
||||
OpenAIDeleteResponseObject,
|
||||
OpenAIResponseInput,
|
||||
OpenAIResponseObject,
|
||||
OpenAIResponseObjectWithInput,
|
||||
|
@ -114,6 +115,13 @@ class ResponsesStore:
|
|||
|
||||
return OpenAIResponseObjectWithInput(**row["response_object"])
|
||||
|
||||
async def delete_response_object(self, response_id: str) -> OpenAIDeleteResponseObject:
|
||||
row = await self.sql_store.fetch_one("openai_responses", where={"id": response_id})
|
||||
if not row:
|
||||
raise ValueError(f"Response with id {response_id} not found")
|
||||
await self.sql_store.delete("openai_responses", where={"id": response_id})
|
||||
return OpenAIDeleteResponseObject(id=response_id)
|
||||
|
||||
async def list_response_input_items(
|
||||
self,
|
||||
response_id: str,
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
import pytest
|
||||
from openai import OpenAI
|
||||
from openai import BadRequestError, OpenAI
|
||||
|
||||
from llama_stack.distribution.library_client import LlamaStackAsLibraryClient
|
||||
|
||||
|
@ -92,6 +92,13 @@ def test_responses_store(openai_client, client_with_models, text_model_id, strea
|
|||
if output_type == "message":
|
||||
assert retrieved_response.output[0].content[0].text == content
|
||||
|
||||
# Delete the response
|
||||
delete_response = client.responses.delete(response_id)
|
||||
assert delete_response is None
|
||||
|
||||
with pytest.raises(BadRequestError):
|
||||
client.responses.retrieve(response_id)
|
||||
|
||||
|
||||
def test_list_response_input_items(openai_client, client_with_models, text_model_id):
|
||||
"""Test the new list_openai_response_input_items endpoint."""
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue