diff --git a/docs/static/llama-stack-spec.html b/docs/static/llama-stack-spec.html
index 4693d39e0..96e97035f 100644
--- a/docs/static/llama-stack-spec.html
+++ b/docs/static/llama-stack-spec.html
@@ -252,6 +252,483 @@
"deprecated": false
}
},
+ "/v1/conversations": {
+ "post": {
+ "responses": {
+ "200": {
+ "description": "The created conversation object.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Conversation"
+ }
+ }
+ }
+ },
+ "400": {
+ "$ref": "#/components/responses/BadRequest400"
+ },
+ "429": {
+ "$ref": "#/components/responses/TooManyRequests429"
+ },
+ "500": {
+ "$ref": "#/components/responses/InternalServerError500"
+ },
+ "default": {
+ "$ref": "#/components/responses/DefaultError"
+ }
+ },
+ "tags": [
+ "Conversations"
+ ],
+ "summary": "Create a conversation.",
+ "description": "Create a conversation.",
+ "parameters": [],
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/CreateConversationRequest"
+ }
+ }
+ },
+ "required": true
+ },
+ "deprecated": false
+ }
+ },
+ "/v1/conversations/{conversation_id}": {
+ "get": {
+ "responses": {
+ "200": {
+ "description": "The conversation object.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Conversation"
+ }
+ }
+ }
+ },
+ "400": {
+ "$ref": "#/components/responses/BadRequest400"
+ },
+ "429": {
+ "$ref": "#/components/responses/TooManyRequests429"
+ },
+ "500": {
+ "$ref": "#/components/responses/InternalServerError500"
+ },
+ "default": {
+ "$ref": "#/components/responses/DefaultError"
+ }
+ },
+ "tags": [
+ "Conversations"
+ ],
+ "summary": "Get a conversation with the given ID.",
+ "description": "Get a conversation with the given ID.",
+ "parameters": [
+ {
+ "name": "conversation_id",
+ "in": "path",
+ "description": "The conversation identifier.",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ }
+ ],
+ "deprecated": false
+ },
+ "post": {
+ "responses": {
+ "200": {
+ "description": "The updated conversation object.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Conversation"
+ }
+ }
+ }
+ },
+ "400": {
+ "$ref": "#/components/responses/BadRequest400"
+ },
+ "429": {
+ "$ref": "#/components/responses/TooManyRequests429"
+ },
+ "500": {
+ "$ref": "#/components/responses/InternalServerError500"
+ },
+ "default": {
+ "$ref": "#/components/responses/DefaultError"
+ }
+ },
+ "tags": [
+ "Conversations"
+ ],
+ "summary": "Update a conversation's metadata with the given ID.",
+ "description": "Update a conversation's metadata with the given ID.",
+ "parameters": [
+ {
+ "name": "conversation_id",
+ "in": "path",
+ "description": "The conversation identifier.",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ }
+ ],
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/UpdateConversationRequest"
+ }
+ }
+ },
+ "required": true
+ },
+ "deprecated": false
+ },
+ "delete": {
+ "responses": {
+ "200": {
+ "description": "The deleted conversation resource.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ConversationDeletedResource"
+ }
+ }
+ }
+ },
+ "400": {
+ "$ref": "#/components/responses/BadRequest400"
+ },
+ "429": {
+ "$ref": "#/components/responses/TooManyRequests429"
+ },
+ "500": {
+ "$ref": "#/components/responses/InternalServerError500"
+ },
+ "default": {
+ "$ref": "#/components/responses/DefaultError"
+ }
+ },
+ "tags": [
+ "Conversations"
+ ],
+ "summary": "Delete a conversation with the given ID.",
+ "description": "Delete a conversation with the given ID.",
+ "parameters": [
+ {
+ "name": "conversation_id",
+ "in": "path",
+ "description": "The conversation identifier.",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ }
+ ],
+ "deprecated": false
+ }
+ },
+ "/v1/conversations/{conversation_id}/items": {
+ "get": {
+ "responses": {
+ "200": {
+ "description": "List of conversation items.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ConversationItemList"
+ }
+ }
+ }
+ },
+ "400": {
+ "$ref": "#/components/responses/BadRequest400"
+ },
+ "429": {
+ "$ref": "#/components/responses/TooManyRequests429"
+ },
+ "500": {
+ "$ref": "#/components/responses/InternalServerError500"
+ },
+ "default": {
+ "$ref": "#/components/responses/DefaultError"
+ }
+ },
+ "tags": [
+ "Conversations"
+ ],
+ "summary": "List items in the conversation.",
+ "description": "List items in the conversation.",
+ "parameters": [
+ {
+ "name": "conversation_id",
+ "in": "path",
+ "description": "The conversation identifier.",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "after",
+ "in": "query",
+ "description": "An item ID to list items after, used in pagination.",
+ "required": true,
+ "schema": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "object",
+ "title": "NotGiven",
+ "description": "A sentinel singleton class used to distinguish omitted keyword arguments from those passed in with the value None (which may have different behavior).\nFor example:\n\n```py\ndef get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...\n\n\nget(timeout=1) # 1s timeout\nget(timeout=None) # No timeout\nget() # Default timeout behavior, which may not be statically known at the method definition.\n```"
+ }
+ ]
+ }
+ },
+ {
+ "name": "include",
+ "in": "query",
+ "description": "Specify additional output data to include in the response.",
+ "required": true,
+ "schema": {
+ "oneOf": [
+ {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "enum": [
+ "code_interpreter_call.outputs",
+ "computer_call_output.output.image_url",
+ "file_search_call.results",
+ "message.input_image.image_url",
+ "message.output_text.logprobs",
+ "reasoning.encrypted_content"
+ ]
+ }
+ },
+ {
+ "type": "object",
+ "title": "NotGiven",
+ "description": "A sentinel singleton class used to distinguish omitted keyword arguments from those passed in with the value None (which may have different behavior).\nFor example:\n\n```py\ndef get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...\n\n\nget(timeout=1) # 1s timeout\nget(timeout=None) # No timeout\nget() # Default timeout behavior, which may not be statically known at the method definition.\n```"
+ }
+ ]
+ }
+ },
+ {
+ "name": "limit",
+ "in": "query",
+ "description": "A limit on the number of objects to be returned (1-100, default 20).",
+ "required": true,
+ "schema": {
+ "oneOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "type": "object",
+ "title": "NotGiven",
+ "description": "A sentinel singleton class used to distinguish omitted keyword arguments from those passed in with the value None (which may have different behavior).\nFor example:\n\n```py\ndef get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...\n\n\nget(timeout=1) # 1s timeout\nget(timeout=None) # No timeout\nget() # Default timeout behavior, which may not be statically known at the method definition.\n```"
+ }
+ ]
+ }
+ },
+ {
+ "name": "order",
+ "in": "query",
+ "description": "The order to return items in (asc or desc, default desc).",
+ "required": true,
+ "schema": {
+ "oneOf": [
+ {
+ "type": "string",
+ "enum": [
+ "asc",
+ "desc"
+ ]
+ },
+ {
+ "type": "object",
+ "title": "NotGiven",
+ "description": "A sentinel singleton class used to distinguish omitted keyword arguments from those passed in with the value None (which may have different behavior).\nFor example:\n\n```py\ndef get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...\n\n\nget(timeout=1) # 1s timeout\nget(timeout=None) # No timeout\nget() # Default timeout behavior, which may not be statically known at the method definition.\n```"
+ }
+ ]
+ }
+ }
+ ],
+ "deprecated": false
+ },
+ "post": {
+ "responses": {
+ "200": {
+ "description": "List of created items.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ConversationItemList"
+ }
+ }
+ }
+ },
+ "400": {
+ "$ref": "#/components/responses/BadRequest400"
+ },
+ "429": {
+ "$ref": "#/components/responses/TooManyRequests429"
+ },
+ "500": {
+ "$ref": "#/components/responses/InternalServerError500"
+ },
+ "default": {
+ "$ref": "#/components/responses/DefaultError"
+ }
+ },
+ "tags": [
+ "Conversations"
+ ],
+ "summary": "Create items in the conversation.",
+ "description": "Create items in the conversation.",
+ "parameters": [
+ {
+ "name": "conversation_id",
+ "in": "path",
+ "description": "The conversation identifier.",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ }
+ ],
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/AddItemsRequest"
+ }
+ }
+ },
+ "required": true
+ },
+ "deprecated": false
+ }
+ },
+ "/v1/conversations/{conversation_id}/items/{item_id}": {
+ "get": {
+ "responses": {
+ "200": {
+ "description": "The conversation item.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ConversationItem"
+ }
+ }
+ }
+ },
+ "400": {
+ "$ref": "#/components/responses/BadRequest400"
+ },
+ "429": {
+ "$ref": "#/components/responses/TooManyRequests429"
+ },
+ "500": {
+ "$ref": "#/components/responses/InternalServerError500"
+ },
+ "default": {
+ "$ref": "#/components/responses/DefaultError"
+ }
+ },
+ "tags": [
+ "Conversations"
+ ],
+ "summary": "Retrieve a conversation item.",
+ "description": "Retrieve a conversation item.",
+ "parameters": [
+ {
+ "name": "conversation_id",
+ "in": "path",
+ "description": "The conversation identifier.",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "item_id",
+ "in": "path",
+ "description": "The item identifier.",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ }
+ ],
+ "deprecated": false
+ },
+ "delete": {
+ "responses": {
+ "200": {
+ "description": "The deleted item resource.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ConversationItemDeletedResource"
+ }
+ }
+ }
+ },
+ "400": {
+ "$ref": "#/components/responses/BadRequest400"
+ },
+ "429": {
+ "$ref": "#/components/responses/TooManyRequests429"
+ },
+ "500": {
+ "$ref": "#/components/responses/InternalServerError500"
+ },
+ "default": {
+ "$ref": "#/components/responses/DefaultError"
+ }
+ },
+ "tags": [
+ "Conversations"
+ ],
+ "summary": "Delete a conversation item.",
+ "description": "Delete a conversation item.",
+ "parameters": [
+ {
+ "name": "conversation_id",
+ "in": "path",
+ "description": "The conversation identifier.",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "item_id",
+ "in": "path",
+ "description": "The item identifier.",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ }
+ ],
+ "deprecated": false
+ }
+ },
"/v1/embeddings": {
"post": {
"responses": {
@@ -5111,6 +5588,819 @@
"title": "OpenAICompletionChoice",
"description": "A choice from an OpenAI-compatible completion response."
},
+ "ConversationItem": {
+ "oneOf": [
+ {
+ "$ref": "#/components/schemas/OpenAIResponseMessage"
+ },
+ {
+ "$ref": "#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall"
+ },
+ {
+ "$ref": "#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall"
+ },
+ {
+ "$ref": "#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall"
+ },
+ {
+ "$ref": "#/components/schemas/OpenAIResponseOutputMessageMCPCall"
+ },
+ {
+ "$ref": "#/components/schemas/OpenAIResponseOutputMessageMCPListTools"
+ }
+ ],
+ "discriminator": {
+ "propertyName": "type",
+ "mapping": {
+ "message": "#/components/schemas/OpenAIResponseMessage",
+ "function_call": "#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall",
+ "file_search_call": "#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall",
+ "web_search_call": "#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall",
+ "mcp_call": "#/components/schemas/OpenAIResponseOutputMessageMCPCall",
+ "mcp_list_tools": "#/components/schemas/OpenAIResponseOutputMessageMCPListTools"
+ }
+ }
+ },
+ "OpenAIResponseAnnotationCitation": {
+ "type": "object",
+ "properties": {
+ "type": {
+ "type": "string",
+ "const": "url_citation",
+ "default": "url_citation",
+ "description": "Annotation type identifier, always \"url_citation\""
+ },
+ "end_index": {
+ "type": "integer",
+ "description": "End position of the citation span in the content"
+ },
+ "start_index": {
+ "type": "integer",
+ "description": "Start position of the citation span in the content"
+ },
+ "title": {
+ "type": "string",
+ "description": "Title of the referenced web resource"
+ },
+ "url": {
+ "type": "string",
+ "description": "URL of the referenced web resource"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "end_index",
+ "start_index",
+ "title",
+ "url"
+ ],
+ "title": "OpenAIResponseAnnotationCitation",
+ "description": "URL citation annotation for referencing external web resources."
+ },
+ "OpenAIResponseAnnotationContainerFileCitation": {
+ "type": "object",
+ "properties": {
+ "type": {
+ "type": "string",
+ "const": "container_file_citation",
+ "default": "container_file_citation"
+ },
+ "container_id": {
+ "type": "string"
+ },
+ "end_index": {
+ "type": "integer"
+ },
+ "file_id": {
+ "type": "string"
+ },
+ "filename": {
+ "type": "string"
+ },
+ "start_index": {
+ "type": "integer"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "container_id",
+ "end_index",
+ "file_id",
+ "filename",
+ "start_index"
+ ],
+ "title": "OpenAIResponseAnnotationContainerFileCitation"
+ },
+ "OpenAIResponseAnnotationFileCitation": {
+ "type": "object",
+ "properties": {
+ "type": {
+ "type": "string",
+ "const": "file_citation",
+ "default": "file_citation",
+ "description": "Annotation type identifier, always \"file_citation\""
+ },
+ "file_id": {
+ "type": "string",
+ "description": "Unique identifier of the referenced file"
+ },
+ "filename": {
+ "type": "string",
+ "description": "Name of the referenced file"
+ },
+ "index": {
+ "type": "integer",
+ "description": "Position index of the citation within the content"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "file_id",
+ "filename",
+ "index"
+ ],
+ "title": "OpenAIResponseAnnotationFileCitation",
+ "description": "File citation annotation for referencing specific files in response content."
+ },
+ "OpenAIResponseAnnotationFilePath": {
+ "type": "object",
+ "properties": {
+ "type": {
+ "type": "string",
+ "const": "file_path",
+ "default": "file_path"
+ },
+ "file_id": {
+ "type": "string"
+ },
+ "index": {
+ "type": "integer"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "file_id",
+ "index"
+ ],
+ "title": "OpenAIResponseAnnotationFilePath"
+ },
+ "OpenAIResponseAnnotations": {
+ "oneOf": [
+ {
+ "$ref": "#/components/schemas/OpenAIResponseAnnotationFileCitation"
+ },
+ {
+ "$ref": "#/components/schemas/OpenAIResponseAnnotationCitation"
+ },
+ {
+ "$ref": "#/components/schemas/OpenAIResponseAnnotationContainerFileCitation"
+ },
+ {
+ "$ref": "#/components/schemas/OpenAIResponseAnnotationFilePath"
+ }
+ ],
+ "discriminator": {
+ "propertyName": "type",
+ "mapping": {
+ "file_citation": "#/components/schemas/OpenAIResponseAnnotationFileCitation",
+ "url_citation": "#/components/schemas/OpenAIResponseAnnotationCitation",
+ "container_file_citation": "#/components/schemas/OpenAIResponseAnnotationContainerFileCitation",
+ "file_path": "#/components/schemas/OpenAIResponseAnnotationFilePath"
+ }
+ }
+ },
+ "OpenAIResponseInputMessageContent": {
+ "oneOf": [
+ {
+ "$ref": "#/components/schemas/OpenAIResponseInputMessageContentText"
+ },
+ {
+ "$ref": "#/components/schemas/OpenAIResponseInputMessageContentImage"
+ }
+ ],
+ "discriminator": {
+ "propertyName": "type",
+ "mapping": {
+ "input_text": "#/components/schemas/OpenAIResponseInputMessageContentText",
+ "input_image": "#/components/schemas/OpenAIResponseInputMessageContentImage"
+ }
+ }
+ },
+ "OpenAIResponseInputMessageContentImage": {
+ "type": "object",
+ "properties": {
+ "detail": {
+ "oneOf": [
+ {
+ "type": "string",
+ "const": "low"
+ },
+ {
+ "type": "string",
+ "const": "high"
+ },
+ {
+ "type": "string",
+ "const": "auto"
+ }
+ ],
+ "default": "auto",
+ "description": "Level of detail for image processing, can be \"low\", \"high\", or \"auto\""
+ },
+ "type": {
+ "type": "string",
+ "const": "input_image",
+ "default": "input_image",
+ "description": "Content type identifier, always \"input_image\""
+ },
+ "image_url": {
+ "type": "string",
+ "description": "(Optional) URL of the image content"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "detail",
+ "type"
+ ],
+ "title": "OpenAIResponseInputMessageContentImage",
+ "description": "Image content for input messages in OpenAI response format."
+ },
+ "OpenAIResponseInputMessageContentText": {
+ "type": "object",
+ "properties": {
+ "text": {
+ "type": "string",
+ "description": "The text content of the input message"
+ },
+ "type": {
+ "type": "string",
+ "const": "input_text",
+ "default": "input_text",
+ "description": "Content type identifier, always \"input_text\""
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "text",
+ "type"
+ ],
+ "title": "OpenAIResponseInputMessageContentText",
+ "description": "Text content for input messages in OpenAI response format."
+ },
+ "OpenAIResponseMessage": {
+ "type": "object",
+ "properties": {
+ "content": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/OpenAIResponseInputMessageContent"
+ }
+ },
+ {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/OpenAIResponseOutputMessageContent"
+ }
+ }
+ ]
+ },
+ "role": {
+ "oneOf": [
+ {
+ "type": "string",
+ "const": "system"
+ },
+ {
+ "type": "string",
+ "const": "developer"
+ },
+ {
+ "type": "string",
+ "const": "user"
+ },
+ {
+ "type": "string",
+ "const": "assistant"
+ }
+ ]
+ },
+ "type": {
+ "type": "string",
+ "const": "message",
+ "default": "message"
+ },
+ "id": {
+ "type": "string"
+ },
+ "status": {
+ "type": "string"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "content",
+ "role",
+ "type"
+ ],
+ "title": "OpenAIResponseMessage",
+ "description": "Corresponds to the various Message types in the Responses API. They are all under one type because the Responses API gives them all the same \"type\" value, and there is no way to tell them apart in certain scenarios."
+ },
+ "OpenAIResponseOutputMessageContent": {
+ "type": "object",
+ "properties": {
+ "text": {
+ "type": "string"
+ },
+ "type": {
+ "type": "string",
+ "const": "output_text",
+ "default": "output_text"
+ },
+ "annotations": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/OpenAIResponseAnnotations"
+ }
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "text",
+ "type",
+ "annotations"
+ ],
+ "title": "OpenAIResponseOutputMessageContentOutputText"
+ },
+ "OpenAIResponseOutputMessageFileSearchToolCall": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string",
+ "description": "Unique identifier for this tool call"
+ },
+ "queries": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "description": "List of search queries executed"
+ },
+ "status": {
+ "type": "string",
+ "description": "Current status of the file search operation"
+ },
+ "type": {
+ "type": "string",
+ "const": "file_search_call",
+ "default": "file_search_call",
+ "description": "Tool call type identifier, always \"file_search_call\""
+ },
+ "results": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "attributes": {
+ "type": "object",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "array"
+ },
+ {
+ "type": "object"
+ }
+ ]
+ },
+ "description": "(Optional) Key-value attributes associated with the file"
+ },
+ "file_id": {
+ "type": "string",
+ "description": "Unique identifier of the file containing the result"
+ },
+ "filename": {
+ "type": "string",
+ "description": "Name of the file containing the result"
+ },
+ "score": {
+ "type": "number",
+ "description": "Relevance score for this search result (between 0 and 1)"
+ },
+ "text": {
+ "type": "string",
+ "description": "Text content of the search result"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "attributes",
+ "file_id",
+ "filename",
+ "score",
+ "text"
+ ],
+ "title": "OpenAIResponseOutputMessageFileSearchToolCallResults",
+ "description": "Search results returned by the file search operation."
+ },
+ "description": "(Optional) Search results returned by the file search operation"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "queries",
+ "status",
+ "type"
+ ],
+ "title": "OpenAIResponseOutputMessageFileSearchToolCall",
+ "description": "File search tool call output message for OpenAI responses."
+ },
+ "OpenAIResponseOutputMessageFunctionToolCall": {
+ "type": "object",
+ "properties": {
+ "call_id": {
+ "type": "string",
+ "description": "Unique identifier for the function call"
+ },
+ "name": {
+ "type": "string",
+ "description": "Name of the function being called"
+ },
+ "arguments": {
+ "type": "string",
+ "description": "JSON string containing the function arguments"
+ },
+ "type": {
+ "type": "string",
+ "const": "function_call",
+ "default": "function_call",
+ "description": "Tool call type identifier, always \"function_call\""
+ },
+ "id": {
+ "type": "string",
+ "description": "(Optional) Additional identifier for the tool call"
+ },
+ "status": {
+ "type": "string",
+ "description": "(Optional) Current status of the function call execution"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "call_id",
+ "name",
+ "arguments",
+ "type"
+ ],
+ "title": "OpenAIResponseOutputMessageFunctionToolCall",
+ "description": "Function tool call output message for OpenAI responses."
+ },
+ "OpenAIResponseOutputMessageMCPCall": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string",
+ "description": "Unique identifier for this MCP call"
+ },
+ "type": {
+ "type": "string",
+ "const": "mcp_call",
+ "default": "mcp_call",
+ "description": "Tool call type identifier, always \"mcp_call\""
+ },
+ "arguments": {
+ "type": "string",
+ "description": "JSON string containing the MCP call arguments"
+ },
+ "name": {
+ "type": "string",
+ "description": "Name of the MCP method being called"
+ },
+ "server_label": {
+ "type": "string",
+ "description": "Label identifying the MCP server handling the call"
+ },
+ "error": {
+ "type": "string",
+ "description": "(Optional) Error message if the MCP call failed"
+ },
+ "output": {
+ "type": "string",
+ "description": "(Optional) Output result from the successful MCP call"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "type",
+ "arguments",
+ "name",
+ "server_label"
+ ],
+ "title": "OpenAIResponseOutputMessageMCPCall",
+ "description": "Model Context Protocol (MCP) call output message for OpenAI responses."
+ },
+ "OpenAIResponseOutputMessageMCPListTools": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string",
+ "description": "Unique identifier for this MCP list tools operation"
+ },
+ "type": {
+ "type": "string",
+ "const": "mcp_list_tools",
+ "default": "mcp_list_tools",
+ "description": "Tool call type identifier, always \"mcp_list_tools\""
+ },
+ "server_label": {
+ "type": "string",
+ "description": "Label identifying the MCP server providing the tools"
+ },
+ "tools": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "input_schema": {
+ "type": "object",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "array"
+ },
+ {
+ "type": "object"
+ }
+ ]
+ },
+ "description": "JSON schema defining the tool's input parameters"
+ },
+ "name": {
+ "type": "string",
+ "description": "Name of the tool"
+ },
+ "description": {
+ "type": "string",
+ "description": "(Optional) Description of what the tool does"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "input_schema",
+ "name"
+ ],
+ "title": "MCPListToolsTool",
+ "description": "Tool definition returned by MCP list tools operation."
+ },
+ "description": "List of available tools provided by the MCP server"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "type",
+ "server_label",
+ "tools"
+ ],
+ "title": "OpenAIResponseOutputMessageMCPListTools",
+ "description": "MCP list tools output message containing available tools from an MCP server."
+ },
+ "OpenAIResponseOutputMessageWebSearchToolCall": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string",
+ "description": "Unique identifier for this tool call"
+ },
+ "status": {
+ "type": "string",
+ "description": "Current status of the web search operation"
+ },
+ "type": {
+ "type": "string",
+ "const": "web_search_call",
+ "default": "web_search_call",
+ "description": "Tool call type identifier, always \"web_search_call\""
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "status",
+ "type"
+ ],
+ "title": "OpenAIResponseOutputMessageWebSearchToolCall",
+ "description": "Web search tool call output message for OpenAI responses."
+ },
+ "CreateConversationRequest": {
+ "type": "object",
+ "properties": {
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/ConversationItem"
+ },
+ "description": "Initial items to include in the conversation context."
+ },
+ "metadata": {
+ "type": "object",
+ "additionalProperties": {
+ "type": "string"
+ },
+ "description": "Set of key-value pairs that can be attached to an object."
+ }
+ },
+ "additionalProperties": false,
+ "title": "CreateConversationRequest"
+ },
+ "Conversation": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string"
+ },
+ "object": {
+ "type": "string",
+ "const": "conversation",
+ "default": "conversation"
+ },
+ "created_at": {
+ "type": "integer"
+ },
+ "metadata": {
+ "type": "object",
+ "additionalProperties": {
+ "type": "string"
+ }
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "title": "dict",
+ "description": "dict() -> new empty dictionary dict(mapping) -> new dictionary initialized from a mapping object's (key, value) pairs dict(iterable) -> new dictionary initialized as if via: d = {} for k, v in iterable: d[k] = v dict(**kwargs) -> new dictionary initialized with the name=value pairs in the keyword argument list. For example: dict(one=1, two=2)"
+ }
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "object",
+ "created_at"
+ ],
+ "title": "Conversation",
+ "description": "OpenAI-compatible conversation object."
+ },
+ "UpdateConversationRequest": {
+ "type": "object",
+ "properties": {
+ "metadata": {
+ "type": "object",
+ "additionalProperties": {
+ "type": "string"
+ },
+ "description": "Set of key-value pairs that can be attached to an object."
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "metadata"
+ ],
+ "title": "UpdateConversationRequest"
+ },
+ "ConversationDeletedResource": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string"
+ },
+ "object": {
+ "type": "string",
+ "default": "conversation.deleted"
+ },
+ "deleted": {
+ "type": "boolean",
+ "default": true
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "object",
+ "deleted"
+ ],
+ "title": "ConversationDeletedResource",
+ "description": "Response for deleted conversation."
+ },
+ "ConversationItemList": {
+ "type": "object",
+ "properties": {
+ "object": {
+ "type": "string",
+ "default": "list"
+ },
+ "data": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/ConversationItem"
+ }
+ },
+ "first_id": {
+ "type": "string"
+ },
+ "last_id": {
+ "type": "string"
+ },
+ "has_more": {
+ "type": "boolean",
+ "default": false
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "object",
+ "data",
+ "has_more"
+ ],
+ "title": "ConversationItemList",
+ "description": "List of conversation items with pagination."
+ },
+ "AddItemsRequest": {
+ "type": "object",
+ "properties": {
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/ConversationItem"
+ },
+ "description": "Items to include in the conversation context."
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "items"
+ ],
+ "title": "AddItemsRequest"
+ },
+ "ConversationItemDeletedResource": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string"
+ },
+ "object": {
+ "type": "string",
+ "default": "conversation.item.deleted"
+ },
+ "deleted": {
+ "type": "boolean",
+ "default": true
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "object",
+ "deleted"
+ ],
+ "title": "ConversationItemDeletedResource",
+ "description": "Response for deleted conversation item."
+ },
"OpenaiEmbeddingsRequest": {
"type": "object",
"properties": {
@@ -5995,158 +7285,6 @@
"title": "ListOpenAIResponseObject",
"description": "Paginated list of OpenAI response objects with navigation metadata."
},
- "OpenAIResponseAnnotationCitation": {
- "type": "object",
- "properties": {
- "type": {
- "type": "string",
- "const": "url_citation",
- "default": "url_citation",
- "description": "Annotation type identifier, always \"url_citation\""
- },
- "end_index": {
- "type": "integer",
- "description": "End position of the citation span in the content"
- },
- "start_index": {
- "type": "integer",
- "description": "Start position of the citation span in the content"
- },
- "title": {
- "type": "string",
- "description": "Title of the referenced web resource"
- },
- "url": {
- "type": "string",
- "description": "URL of the referenced web resource"
- }
- },
- "additionalProperties": false,
- "required": [
- "type",
- "end_index",
- "start_index",
- "title",
- "url"
- ],
- "title": "OpenAIResponseAnnotationCitation",
- "description": "URL citation annotation for referencing external web resources."
- },
- "OpenAIResponseAnnotationContainerFileCitation": {
- "type": "object",
- "properties": {
- "type": {
- "type": "string",
- "const": "container_file_citation",
- "default": "container_file_citation"
- },
- "container_id": {
- "type": "string"
- },
- "end_index": {
- "type": "integer"
- },
- "file_id": {
- "type": "string"
- },
- "filename": {
- "type": "string"
- },
- "start_index": {
- "type": "integer"
- }
- },
- "additionalProperties": false,
- "required": [
- "type",
- "container_id",
- "end_index",
- "file_id",
- "filename",
- "start_index"
- ],
- "title": "OpenAIResponseAnnotationContainerFileCitation"
- },
- "OpenAIResponseAnnotationFileCitation": {
- "type": "object",
- "properties": {
- "type": {
- "type": "string",
- "const": "file_citation",
- "default": "file_citation",
- "description": "Annotation type identifier, always \"file_citation\""
- },
- "file_id": {
- "type": "string",
- "description": "Unique identifier of the referenced file"
- },
- "filename": {
- "type": "string",
- "description": "Name of the referenced file"
- },
- "index": {
- "type": "integer",
- "description": "Position index of the citation within the content"
- }
- },
- "additionalProperties": false,
- "required": [
- "type",
- "file_id",
- "filename",
- "index"
- ],
- "title": "OpenAIResponseAnnotationFileCitation",
- "description": "File citation annotation for referencing specific files in response content."
- },
- "OpenAIResponseAnnotationFilePath": {
- "type": "object",
- "properties": {
- "type": {
- "type": "string",
- "const": "file_path",
- "default": "file_path"
- },
- "file_id": {
- "type": "string"
- },
- "index": {
- "type": "integer"
- }
- },
- "additionalProperties": false,
- "required": [
- "type",
- "file_id",
- "index"
- ],
- "title": "OpenAIResponseAnnotationFilePath"
- },
- "OpenAIResponseAnnotations": {
- "oneOf": [
- {
- "$ref": "#/components/schemas/OpenAIResponseAnnotationFileCitation"
- },
- {
- "$ref": "#/components/schemas/OpenAIResponseAnnotationCitation"
- },
- {
- "$ref": "#/components/schemas/OpenAIResponseAnnotationContainerFileCitation"
- },
- {
- "$ref": "#/components/schemas/OpenAIResponseAnnotationFilePath"
- }
- ],
- "discriminator": {
- "propertyName": "type",
- "mapping": {
- "file_citation": "#/components/schemas/OpenAIResponseAnnotationFileCitation",
- "url_citation": "#/components/schemas/OpenAIResponseAnnotationCitation",
- "container_file_citation": "#/components/schemas/OpenAIResponseAnnotationContainerFileCitation",
- "file_path": "#/components/schemas/OpenAIResponseAnnotationFilePath"
- }
- }
- },
"OpenAIResponseError": {
"type": "object",
"properties": {
@@ -6222,85 +7360,6 @@
"title": "OpenAIResponseInputFunctionToolCallOutput",
"description": "This represents the output of a function call that gets passed back to the model."
},
- "OpenAIResponseInputMessageContent": {
- "oneOf": [
- {
- "$ref": "#/components/schemas/OpenAIResponseInputMessageContentText"
- },
- {
- "$ref": "#/components/schemas/OpenAIResponseInputMessageContentImage"
- }
- ],
- "discriminator": {
- "propertyName": "type",
- "mapping": {
- "input_text": "#/components/schemas/OpenAIResponseInputMessageContentText",
- "input_image": "#/components/schemas/OpenAIResponseInputMessageContentImage"
- }
- }
- },
- "OpenAIResponseInputMessageContentImage": {
- "type": "object",
- "properties": {
- "detail": {
- "oneOf": [
- {
- "type": "string",
- "const": "low"
- },
- {
- "type": "string",
- "const": "high"
- },
- {
- "type": "string",
- "const": "auto"
- }
- ],
- "default": "auto",
- "description": "Level of detail for image processing, can be \"low\", \"high\", or \"auto\""
- },
- "type": {
- "type": "string",
- "const": "input_image",
- "default": "input_image",
- "description": "Content type identifier, always \"input_image\""
- },
- "image_url": {
- "type": "string",
- "description": "(Optional) URL of the image content"
- }
- },
- "additionalProperties": false,
- "required": [
- "detail",
- "type"
- ],
- "title": "OpenAIResponseInputMessageContentImage",
- "description": "Image content for input messages in OpenAI response format."
- },
- "OpenAIResponseInputMessageContentText": {
- "type": "object",
- "properties": {
- "text": {
- "type": "string",
- "description": "The text content of the input message"
- },
- "type": {
- "type": "string",
- "const": "input_text",
- "default": "input_text",
- "description": "Content type identifier, always \"input_text\""
- }
- },
- "additionalProperties": false,
- "required": [
- "text",
- "type"
- ],
- "title": "OpenAIResponseInputMessageContentText",
- "description": "Text content for input messages in OpenAI response format."
- },
"OpenAIResponseMCPApprovalRequest": {
"type": "object",
"properties": {
@@ -6363,69 +7422,6 @@
"title": "OpenAIResponseMCPApprovalResponse",
"description": "A response to an MCP approval request."
},
- "OpenAIResponseMessage": {
- "type": "object",
- "properties": {
- "content": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "array",
- "items": {
- "$ref": "#/components/schemas/OpenAIResponseInputMessageContent"
- }
- },
- {
- "type": "array",
- "items": {
- "$ref": "#/components/schemas/OpenAIResponseOutputMessageContent"
- }
- }
- ]
- },
- "role": {
- "oneOf": [
- {
- "type": "string",
- "const": "system"
- },
- {
- "type": "string",
- "const": "developer"
- },
- {
- "type": "string",
- "const": "user"
- },
- {
- "type": "string",
- "const": "assistant"
- }
- ]
- },
- "type": {
- "type": "string",
- "const": "message",
- "default": "message"
- },
- "id": {
- "type": "string"
- },
- "status": {
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [
- "content",
- "role",
- "type"
- ],
- "title": "OpenAIResponseMessage",
- "description": "Corresponds to the various Message types in the Responses API. They are all under one type because the Responses API gives them all the same \"type\" value, and there is no way to tell them apart in certain scenarios."
- },
"OpenAIResponseObjectWithInput": {
"type": "object",
"properties": {
@@ -6547,318 +7543,6 @@
}
}
},
- "OpenAIResponseOutputMessageContent": {
- "type": "object",
- "properties": {
- "text": {
- "type": "string"
- },
- "type": {
- "type": "string",
- "const": "output_text",
- "default": "output_text"
- },
- "annotations": {
- "type": "array",
- "items": {
- "$ref": "#/components/schemas/OpenAIResponseAnnotations"
- }
- }
- },
- "additionalProperties": false,
- "required": [
- "text",
- "type",
- "annotations"
- ],
- "title": "OpenAIResponseOutputMessageContentOutputText"
- },
- "OpenAIResponseOutputMessageFileSearchToolCall": {
- "type": "object",
- "properties": {
- "id": {
- "type": "string",
- "description": "Unique identifier for this tool call"
- },
- "queries": {
- "type": "array",
- "items": {
- "type": "string"
- },
- "description": "List of search queries executed"
- },
- "status": {
- "type": "string",
- "description": "Current status of the file search operation"
- },
- "type": {
- "type": "string",
- "const": "file_search_call",
- "default": "file_search_call",
- "description": "Tool call type identifier, always \"file_search_call\""
- },
- "results": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "attributes": {
- "type": "object",
- "additionalProperties": {
- "oneOf": [
- {
- "type": "null"
- },
- {
- "type": "boolean"
- },
- {
- "type": "number"
- },
- {
- "type": "string"
- },
- {
- "type": "array"
- },
- {
- "type": "object"
- }
- ]
- },
- "description": "(Optional) Key-value attributes associated with the file"
- },
- "file_id": {
- "type": "string",
- "description": "Unique identifier of the file containing the result"
- },
- "filename": {
- "type": "string",
- "description": "Name of the file containing the result"
- },
- "score": {
- "type": "number",
- "description": "Relevance score for this search result (between 0 and 1)"
- },
- "text": {
- "type": "string",
- "description": "Text content of the search result"
- }
- },
- "additionalProperties": false,
- "required": [
- "attributes",
- "file_id",
- "filename",
- "score",
- "text"
- ],
- "title": "OpenAIResponseOutputMessageFileSearchToolCallResults",
- "description": "Search results returned by the file search operation."
- },
- "description": "(Optional) Search results returned by the file search operation"
- }
- },
- "additionalProperties": false,
- "required": [
- "id",
- "queries",
- "status",
- "type"
- ],
- "title": "OpenAIResponseOutputMessageFileSearchToolCall",
- "description": "File search tool call output message for OpenAI responses."
- },
- "OpenAIResponseOutputMessageFunctionToolCall": {
- "type": "object",
- "properties": {
- "call_id": {
- "type": "string",
- "description": "Unique identifier for the function call"
- },
- "name": {
- "type": "string",
- "description": "Name of the function being called"
- },
- "arguments": {
- "type": "string",
- "description": "JSON string containing the function arguments"
- },
- "type": {
- "type": "string",
- "const": "function_call",
- "default": "function_call",
- "description": "Tool call type identifier, always \"function_call\""
- },
- "id": {
- "type": "string",
- "description": "(Optional) Additional identifier for the tool call"
- },
- "status": {
- "type": "string",
- "description": "(Optional) Current status of the function call execution"
- }
- },
- "additionalProperties": false,
- "required": [
- "call_id",
- "name",
- "arguments",
- "type"
- ],
- "title": "OpenAIResponseOutputMessageFunctionToolCall",
- "description": "Function tool call output message for OpenAI responses."
- },
- "OpenAIResponseOutputMessageMCPCall": {
- "type": "object",
- "properties": {
- "id": {
- "type": "string",
- "description": "Unique identifier for this MCP call"
- },
- "type": {
- "type": "string",
- "const": "mcp_call",
- "default": "mcp_call",
- "description": "Tool call type identifier, always \"mcp_call\""
- },
- "arguments": {
- "type": "string",
- "description": "JSON string containing the MCP call arguments"
- },
- "name": {
- "type": "string",
- "description": "Name of the MCP method being called"
- },
- "server_label": {
- "type": "string",
- "description": "Label identifying the MCP server handling the call"
- },
- "error": {
- "type": "string",
- "description": "(Optional) Error message if the MCP call failed"
- },
- "output": {
- "type": "string",
- "description": "(Optional) Output result from the successful MCP call"
- }
- },
- "additionalProperties": false,
- "required": [
- "id",
- "type",
- "arguments",
- "name",
- "server_label"
- ],
- "title": "OpenAIResponseOutputMessageMCPCall",
- "description": "Model Context Protocol (MCP) call output message for OpenAI responses."
- },
- "OpenAIResponseOutputMessageMCPListTools": {
- "type": "object",
- "properties": {
- "id": {
- "type": "string",
- "description": "Unique identifier for this MCP list tools operation"
- },
- "type": {
- "type": "string",
- "const": "mcp_list_tools",
- "default": "mcp_list_tools",
- "description": "Tool call type identifier, always \"mcp_list_tools\""
- },
- "server_label": {
- "type": "string",
- "description": "Label identifying the MCP server providing the tools"
- },
- "tools": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "input_schema": {
- "type": "object",
- "additionalProperties": {
- "oneOf": [
- {
- "type": "null"
- },
- {
- "type": "boolean"
- },
- {
- "type": "number"
- },
- {
- "type": "string"
- },
- {
- "type": "array"
- },
- {
- "type": "object"
- }
- ]
- },
- "description": "JSON schema defining the tool's input parameters"
- },
- "name": {
- "type": "string",
- "description": "Name of the tool"
- },
- "description": {
- "type": "string",
- "description": "(Optional) Description of what the tool does"
- }
- },
- "additionalProperties": false,
- "required": [
- "input_schema",
- "name"
- ],
- "title": "MCPListToolsTool",
- "description": "Tool definition returned by MCP list tools operation."
- },
- "description": "List of available tools provided by the MCP server"
- }
- },
- "additionalProperties": false,
- "required": [
- "id",
- "type",
- "server_label",
- "tools"
- ],
- "title": "OpenAIResponseOutputMessageMCPListTools",
- "description": "MCP list tools output message containing available tools from an MCP server."
- },
- "OpenAIResponseOutputMessageWebSearchToolCall": {
- "type": "object",
- "properties": {
- "id": {
- "type": "string",
- "description": "Unique identifier for this tool call"
- },
- "status": {
- "type": "string",
- "description": "Current status of the web search operation"
- },
- "type": {
- "type": "string",
- "const": "web_search_call",
- "default": "web_search_call",
- "description": "Tool call type identifier, always \"web_search_call\""
- }
- },
- "additionalProperties": false,
- "required": [
- "id",
- "status",
- "type"
- ],
- "title": "OpenAIResponseOutputMessageWebSearchToolCall",
- "description": "Web search tool call output message for OpenAI responses."
- },
"OpenAIResponseText": {
"type": "object",
"properties": {
@@ -12188,6 +12872,11 @@
"description": "APIs for creating and interacting with agentic systems.\n\n## Responses API\n\nThe Responses API provides OpenAI-compatible functionality with enhanced capabilities for dynamic, stateful interactions.\n\n> **✅ STABLE**: This API is production-ready with backward compatibility guarantees. Recommended for production applications.\n\n### ✅ Supported Tools\n\nThe Responses API supports the following tool types:\n\n- **`web_search`**: Search the web for current information and real-time data\n- **`file_search`**: Search through uploaded files and vector stores\n - Supports dynamic `vector_store_ids` per call\n - Compatible with OpenAI file search patterns\n- **`function`**: Call custom functions with JSON schema validation\n- **`mcp_tool`**: Model Context Protocol integration\n\n### ✅ Supported Fields & Features\n\n**Core Capabilities:**\n- **Dynamic Configuration**: Switch models, vector stores, and tools per request without pre-configuration\n- **Conversation Branching**: Use `previous_response_id` to branch conversations and explore different paths\n- **Rich Annotations**: Automatic file citations, URL citations, and container file citations\n- **Status Tracking**: Monitor tool call execution status and handle failures gracefully\n\n### 🚧 Work in Progress\n\n- Full real-time response streaming support\n- `tool_choice` parameter\n- `max_tool_calls` parameter\n- Built-in tools (code interpreter, containers API)\n- Safety & guardrails\n- `reasoning` capabilities\n- `service_tier`\n- `logprobs`\n- `max_output_tokens`\n- `metadata` handling\n- `instructions`\n- `incomplete_details`\n- `background`",
"x-displayName": "Agents"
},
+ {
+ "name": "Conversations",
+ "description": "",
+ "x-displayName": "Protocol for conversation management operations."
+ },
{
"name": "Files",
"description": ""
@@ -12261,6 +12950,7 @@
"name": "Operations",
"tags": [
"Agents",
+ "Conversations",
"Files",
"Inference",
"Inspect",
diff --git a/docs/static/llama-stack-spec.yaml b/docs/static/llama-stack-spec.yaml
index 7d275a221..b9e03d614 100644
--- a/docs/static/llama-stack-spec.yaml
+++ b/docs/static/llama-stack-spec.yaml
@@ -167,6 +167,420 @@ paths:
$ref: '#/components/schemas/OpenaiCompletionRequest'
required: true
deprecated: false
+ /v1/conversations:
+ post:
+ responses:
+ '200':
+ description: The created conversation object.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Conversation'
+ '400':
+ $ref: '#/components/responses/BadRequest400'
+ '429':
+ $ref: >-
+ #/components/responses/TooManyRequests429
+ '500':
+ $ref: >-
+ #/components/responses/InternalServerError500
+ default:
+ $ref: '#/components/responses/DefaultError'
+ tags:
+ - Conversations
+ summary: Create a conversation.
+ description: Create a conversation.
+ parameters: []
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/CreateConversationRequest'
+ required: true
+ deprecated: false
+ /v1/conversations/{conversation_id}:
+ get:
+ responses:
+ '200':
+ description: The conversation object.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Conversation'
+ '400':
+ $ref: '#/components/responses/BadRequest400'
+ '429':
+ $ref: >-
+ #/components/responses/TooManyRequests429
+ '500':
+ $ref: >-
+ #/components/responses/InternalServerError500
+ default:
+ $ref: '#/components/responses/DefaultError'
+ tags:
+ - Conversations
+ summary: Get a conversation with the given ID.
+ description: Get a conversation with the given ID.
+ parameters:
+ - name: conversation_id
+ in: path
+ description: The conversation identifier.
+ required: true
+ schema:
+ type: string
+ deprecated: false
+ post:
+ responses:
+ '200':
+ description: The updated conversation object.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Conversation'
+ '400':
+ $ref: '#/components/responses/BadRequest400'
+ '429':
+ $ref: >-
+ #/components/responses/TooManyRequests429
+ '500':
+ $ref: >-
+ #/components/responses/InternalServerError500
+ default:
+ $ref: '#/components/responses/DefaultError'
+ tags:
+ - Conversations
+ summary: >-
+ Update a conversation's metadata with the given ID.
+ description: >-
+ Update a conversation's metadata with the given ID.
+ parameters:
+ - name: conversation_id
+ in: path
+ description: The conversation identifier.
+ required: true
+ schema:
+ type: string
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/UpdateConversationRequest'
+ required: true
+ deprecated: false
+ delete:
+ responses:
+ '200':
+ description: The deleted conversation resource.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ConversationDeletedResource'
+ '400':
+ $ref: '#/components/responses/BadRequest400'
+ '429':
+ $ref: >-
+ #/components/responses/TooManyRequests429
+ '500':
+ $ref: >-
+ #/components/responses/InternalServerError500
+ default:
+ $ref: '#/components/responses/DefaultError'
+ tags:
+ - Conversations
+ summary: Delete a conversation with the given ID.
+ description: Delete a conversation with the given ID.
+ parameters:
+ - name: conversation_id
+ in: path
+ description: The conversation identifier.
+ required: true
+ schema:
+ type: string
+ deprecated: false
+ /v1/conversations/{conversation_id}/items:
+ get:
+ responses:
+ '200':
+ description: List of conversation items.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ConversationItemList'
+ '400':
+ $ref: '#/components/responses/BadRequest400'
+ '429':
+ $ref: >-
+ #/components/responses/TooManyRequests429
+ '500':
+ $ref: >-
+ #/components/responses/InternalServerError500
+ default:
+ $ref: '#/components/responses/DefaultError'
+ tags:
+ - Conversations
+ summary: List items in the conversation.
+ description: List items in the conversation.
+ parameters:
+ - name: conversation_id
+ in: path
+ description: The conversation identifier.
+ required: true
+ schema:
+ type: string
+ - name: after
+ in: query
+ description: >-
+ An item ID to list items after, used in pagination.
+ required: true
+ schema:
+ oneOf:
+ - type: string
+ - type: object
+ title: NotGiven
+ description: >-
+ A sentinel singleton class used to distinguish omitted keyword arguments
+ from those passed in with the value None (which may have different
+ behavior).
+
+ For example:
+
+
+ ```py
+
+ def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response:
+ ...
+
+
+
+ get(timeout=1) # 1s timeout
+
+ get(timeout=None) # No timeout
+
+ get() # Default timeout behavior, which may not be statically known
+ at the method definition.
+
+ ```
+ - name: include
+ in: query
+ description: >-
+ Specify additional output data to include in the response.
+ required: true
+ schema:
+ oneOf:
+ - type: array
+ items:
+ type: string
+ enum:
+ - code_interpreter_call.outputs
+ - computer_call_output.output.image_url
+ - file_search_call.results
+ - message.input_image.image_url
+ - message.output_text.logprobs
+ - reasoning.encrypted_content
+ - type: object
+ title: NotGiven
+ description: >-
+ A sentinel singleton class used to distinguish omitted keyword arguments
+ from those passed in with the value None (which may have different
+ behavior).
+
+ For example:
+
+
+ ```py
+
+ def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response:
+ ...
+
+
+
+ get(timeout=1) # 1s timeout
+
+ get(timeout=None) # No timeout
+
+ get() # Default timeout behavior, which may not be statically known
+ at the method definition.
+
+ ```
+ - name: limit
+ in: query
+ description: >-
+ A limit on the number of objects to be returned (1-100, default 20).
+ required: true
+ schema:
+ oneOf:
+ - type: integer
+ - type: object
+ title: NotGiven
+ description: >-
+ A sentinel singleton class used to distinguish omitted keyword arguments
+ from those passed in with the value None (which may have different
+ behavior).
+
+ For example:
+
+
+ ```py
+
+ def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response:
+ ...
+
+
+
+ get(timeout=1) # 1s timeout
+
+ get(timeout=None) # No timeout
+
+ get() # Default timeout behavior, which may not be statically known
+ at the method definition.
+
+ ```
+ - name: order
+ in: query
+ description: >-
+ The order to return items in (asc or desc, default desc).
+ required: true
+ schema:
+ oneOf:
+ - type: string
+ enum:
+ - asc
+ - desc
+ - type: object
+ title: NotGiven
+ description: >-
+ A sentinel singleton class used to distinguish omitted keyword arguments
+ from those passed in with the value None (which may have different
+ behavior).
+
+ For example:
+
+
+ ```py
+
+ def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response:
+ ...
+
+
+
+ get(timeout=1) # 1s timeout
+
+ get(timeout=None) # No timeout
+
+ get() # Default timeout behavior, which may not be statically known
+ at the method definition.
+
+ ```
+ deprecated: false
+ post:
+ responses:
+ '200':
+ description: List of created items.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ConversationItemList'
+ '400':
+ $ref: '#/components/responses/BadRequest400'
+ '429':
+ $ref: >-
+ #/components/responses/TooManyRequests429
+ '500':
+ $ref: >-
+ #/components/responses/InternalServerError500
+ default:
+ $ref: '#/components/responses/DefaultError'
+ tags:
+ - Conversations
+ summary: Create items in the conversation.
+ description: Create items in the conversation.
+ parameters:
+ - name: conversation_id
+ in: path
+ description: The conversation identifier.
+ required: true
+ schema:
+ type: string
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/AddItemsRequest'
+ required: true
+ deprecated: false
+ /v1/conversations/{conversation_id}/items/{item_id}:
+ get:
+ responses:
+ '200':
+ description: The conversation item.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ConversationItem'
+ '400':
+ $ref: '#/components/responses/BadRequest400'
+ '429':
+ $ref: >-
+ #/components/responses/TooManyRequests429
+ '500':
+ $ref: >-
+ #/components/responses/InternalServerError500
+ default:
+ $ref: '#/components/responses/DefaultError'
+ tags:
+ - Conversations
+ summary: Retrieve a conversation item.
+ description: Retrieve a conversation item.
+ parameters:
+ - name: conversation_id
+ in: path
+ description: The conversation identifier.
+ required: true
+ schema:
+ type: string
+ - name: item_id
+ in: path
+ description: The item identifier.
+ required: true
+ schema:
+ type: string
+ deprecated: false
+ delete:
+ responses:
+ '200':
+ description: The deleted item resource.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ConversationItemDeletedResource'
+ '400':
+ $ref: '#/components/responses/BadRequest400'
+ '429':
+ $ref: >-
+ #/components/responses/TooManyRequests429
+ '500':
+ $ref: >-
+ #/components/responses/InternalServerError500
+ default:
+ $ref: '#/components/responses/DefaultError'
+ tags:
+ - Conversations
+ summary: Delete a conversation item.
+ description: Delete a conversation item.
+ parameters:
+ - name: conversation_id
+ in: path
+ description: The conversation identifier.
+ required: true
+ schema:
+ type: string
+ - name: item_id
+ in: path
+ description: The item identifier.
+ required: true
+ schema:
+ type: string
+ deprecated: false
/v1/embeddings:
post:
responses:
@@ -3756,6 +4170,633 @@ components:
title: OpenAICompletionChoice
description: >-
A choice from an OpenAI-compatible completion response.
+ ConversationItem:
+ oneOf:
+ - $ref: '#/components/schemas/OpenAIResponseMessage'
+ - $ref: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall'
+ - $ref: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall'
+ - $ref: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall'
+ - $ref: '#/components/schemas/OpenAIResponseOutputMessageMCPCall'
+ - $ref: '#/components/schemas/OpenAIResponseOutputMessageMCPListTools'
+ discriminator:
+ propertyName: type
+ mapping:
+ message: '#/components/schemas/OpenAIResponseMessage'
+ function_call: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall'
+ file_search_call: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall'
+ web_search_call: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall'
+ mcp_call: '#/components/schemas/OpenAIResponseOutputMessageMCPCall'
+ mcp_list_tools: '#/components/schemas/OpenAIResponseOutputMessageMCPListTools'
+ OpenAIResponseAnnotationCitation:
+ type: object
+ properties:
+ type:
+ type: string
+ const: url_citation
+ default: url_citation
+ description: >-
+ Annotation type identifier, always "url_citation"
+ end_index:
+ type: integer
+ description: >-
+ End position of the citation span in the content
+ start_index:
+ type: integer
+ description: >-
+ Start position of the citation span in the content
+ title:
+ type: string
+ description: Title of the referenced web resource
+ url:
+ type: string
+ description: URL of the referenced web resource
+ additionalProperties: false
+ required:
+ - type
+ - end_index
+ - start_index
+ - title
+ - url
+ title: OpenAIResponseAnnotationCitation
+ description: >-
+ URL citation annotation for referencing external web resources.
+ "OpenAIResponseAnnotationContainerFileCitation":
+ type: object
+ properties:
+ type:
+ type: string
+ const: container_file_citation
+ default: container_file_citation
+ container_id:
+ type: string
+ end_index:
+ type: integer
+ file_id:
+ type: string
+ filename:
+ type: string
+ start_index:
+ type: integer
+ additionalProperties: false
+ required:
+ - type
+ - container_id
+ - end_index
+ - file_id
+ - filename
+ - start_index
+ title: >-
+ OpenAIResponseAnnotationContainerFileCitation
+ OpenAIResponseAnnotationFileCitation:
+ type: object
+ properties:
+ type:
+ type: string
+ const: file_citation
+ default: file_citation
+ description: >-
+ Annotation type identifier, always "file_citation"
+ file_id:
+ type: string
+ description: Unique identifier of the referenced file
+ filename:
+ type: string
+ description: Name of the referenced file
+ index:
+ type: integer
+ description: >-
+ Position index of the citation within the content
+ additionalProperties: false
+ required:
+ - type
+ - file_id
+ - filename
+ - index
+ title: OpenAIResponseAnnotationFileCitation
+ description: >-
+ File citation annotation for referencing specific files in response content.
+ OpenAIResponseAnnotationFilePath:
+ type: object
+ properties:
+ type:
+ type: string
+ const: file_path
+ default: file_path
+ file_id:
+ type: string
+ index:
+ type: integer
+ additionalProperties: false
+ required:
+ - type
+ - file_id
+ - index
+ title: OpenAIResponseAnnotationFilePath
+ OpenAIResponseAnnotations:
+ oneOf:
+ - $ref: '#/components/schemas/OpenAIResponseAnnotationFileCitation'
+ - $ref: '#/components/schemas/OpenAIResponseAnnotationCitation'
+ - $ref: '#/components/schemas/OpenAIResponseAnnotationContainerFileCitation'
+ - $ref: '#/components/schemas/OpenAIResponseAnnotationFilePath'
+ discriminator:
+ propertyName: type
+ mapping:
+ file_citation: '#/components/schemas/OpenAIResponseAnnotationFileCitation'
+ url_citation: '#/components/schemas/OpenAIResponseAnnotationCitation'
+ container_file_citation: '#/components/schemas/OpenAIResponseAnnotationContainerFileCitation'
+ file_path: '#/components/schemas/OpenAIResponseAnnotationFilePath'
+ OpenAIResponseInputMessageContent:
+ oneOf:
+ - $ref: '#/components/schemas/OpenAIResponseInputMessageContentText'
+ - $ref: '#/components/schemas/OpenAIResponseInputMessageContentImage'
+ discriminator:
+ propertyName: type
+ mapping:
+ input_text: '#/components/schemas/OpenAIResponseInputMessageContentText'
+ input_image: '#/components/schemas/OpenAIResponseInputMessageContentImage'
+ OpenAIResponseInputMessageContentImage:
+ type: object
+ properties:
+ detail:
+ oneOf:
+ - type: string
+ const: low
+ - type: string
+ const: high
+ - type: string
+ const: auto
+ default: auto
+ description: >-
+ Level of detail for image processing, can be "low", "high", or "auto"
+ type:
+ type: string
+ const: input_image
+ default: input_image
+ description: >-
+ Content type identifier, always "input_image"
+ image_url:
+ type: string
+ description: (Optional) URL of the image content
+ additionalProperties: false
+ required:
+ - detail
+ - type
+ title: OpenAIResponseInputMessageContentImage
+ description: >-
+ Image content for input messages in OpenAI response format.
+ OpenAIResponseInputMessageContentText:
+ type: object
+ properties:
+ text:
+ type: string
+ description: The text content of the input message
+ type:
+ type: string
+ const: input_text
+ default: input_text
+ description: >-
+ Content type identifier, always "input_text"
+ additionalProperties: false
+ required:
+ - text
+ - type
+ title: OpenAIResponseInputMessageContentText
+ description: >-
+ Text content for input messages in OpenAI response format.
+ OpenAIResponseMessage:
+ type: object
+ properties:
+ content:
+ oneOf:
+ - type: string
+ - type: array
+ items:
+ $ref: '#/components/schemas/OpenAIResponseInputMessageContent'
+ - type: array
+ items:
+ $ref: '#/components/schemas/OpenAIResponseOutputMessageContent'
+ role:
+ oneOf:
+ - type: string
+ const: system
+ - type: string
+ const: developer
+ - type: string
+ const: user
+ - type: string
+ const: assistant
+ type:
+ type: string
+ const: message
+ default: message
+ id:
+ type: string
+ status:
+ type: string
+ additionalProperties: false
+ required:
+ - content
+ - role
+ - type
+ title: OpenAIResponseMessage
+ description: >-
+ Corresponds to the various Message types in the Responses API. They are all
+ under one type because the Responses API gives them all the same "type" value,
+ and there is no way to tell them apart in certain scenarios.
+ OpenAIResponseOutputMessageContent:
+ type: object
+ properties:
+ text:
+ type: string
+ type:
+ type: string
+ const: output_text
+ default: output_text
+ annotations:
+ type: array
+ items:
+ $ref: '#/components/schemas/OpenAIResponseAnnotations'
+ additionalProperties: false
+ required:
+ - text
+ - type
+ - annotations
+ title: >-
+ OpenAIResponseOutputMessageContentOutputText
+ "OpenAIResponseOutputMessageFileSearchToolCall":
+ type: object
+ properties:
+ id:
+ type: string
+ description: Unique identifier for this tool call
+ queries:
+ type: array
+ items:
+ type: string
+ description: List of search queries executed
+ status:
+ type: string
+ description: >-
+ Current status of the file search operation
+ type:
+ type: string
+ const: file_search_call
+ default: file_search_call
+ description: >-
+ Tool call type identifier, always "file_search_call"
+ results:
+ type: array
+ items:
+ type: object
+ properties:
+ attributes:
+ type: object
+ additionalProperties:
+ oneOf:
+ - type: 'null'
+ - type: boolean
+ - type: number
+ - type: string
+ - type: array
+ - type: object
+ description: >-
+ (Optional) Key-value attributes associated with the file
+ file_id:
+ type: string
+ description: >-
+ Unique identifier of the file containing the result
+ filename:
+ type: string
+ description: Name of the file containing the result
+ score:
+ type: number
+ description: >-
+ Relevance score for this search result (between 0 and 1)
+ text:
+ type: string
+ description: Text content of the search result
+ additionalProperties: false
+ required:
+ - attributes
+ - file_id
+ - filename
+ - score
+ - text
+ title: >-
+ OpenAIResponseOutputMessageFileSearchToolCallResults
+ description: >-
+ Search results returned by the file search operation.
+ description: >-
+ (Optional) Search results returned by the file search operation
+ additionalProperties: false
+ required:
+ - id
+ - queries
+ - status
+ - type
+ title: >-
+ OpenAIResponseOutputMessageFileSearchToolCall
+ description: >-
+ File search tool call output message for OpenAI responses.
+ "OpenAIResponseOutputMessageFunctionToolCall":
+ type: object
+ properties:
+ call_id:
+ type: string
+ description: Unique identifier for the function call
+ name:
+ type: string
+ description: Name of the function being called
+ arguments:
+ type: string
+ description: >-
+ JSON string containing the function arguments
+ type:
+ type: string
+ const: function_call
+ default: function_call
+ description: >-
+ Tool call type identifier, always "function_call"
+ id:
+ type: string
+ description: >-
+ (Optional) Additional identifier for the tool call
+ status:
+ type: string
+ description: >-
+ (Optional) Current status of the function call execution
+ additionalProperties: false
+ required:
+ - call_id
+ - name
+ - arguments
+ - type
+ title: >-
+ OpenAIResponseOutputMessageFunctionToolCall
+ description: >-
+ Function tool call output message for OpenAI responses.
+ OpenAIResponseOutputMessageMCPCall:
+ type: object
+ properties:
+ id:
+ type: string
+ description: Unique identifier for this MCP call
+ type:
+ type: string
+ const: mcp_call
+ default: mcp_call
+ description: >-
+ Tool call type identifier, always "mcp_call"
+ arguments:
+ type: string
+ description: >-
+ JSON string containing the MCP call arguments
+ name:
+ type: string
+ description: Name of the MCP method being called
+ server_label:
+ type: string
+ description: >-
+ Label identifying the MCP server handling the call
+ error:
+ type: string
+ description: >-
+ (Optional) Error message if the MCP call failed
+ output:
+ type: string
+ description: >-
+ (Optional) Output result from the successful MCP call
+ additionalProperties: false
+ required:
+ - id
+ - type
+ - arguments
+ - name
+ - server_label
+ title: OpenAIResponseOutputMessageMCPCall
+ description: >-
+ Model Context Protocol (MCP) call output message for OpenAI responses.
+ OpenAIResponseOutputMessageMCPListTools:
+ type: object
+ properties:
+ id:
+ type: string
+ description: >-
+ Unique identifier for this MCP list tools operation
+ type:
+ type: string
+ const: mcp_list_tools
+ default: mcp_list_tools
+ description: >-
+ Tool call type identifier, always "mcp_list_tools"
+ server_label:
+ type: string
+ description: >-
+ Label identifying the MCP server providing the tools
+ tools:
+ type: array
+ items:
+ type: object
+ properties:
+ input_schema:
+ type: object
+ additionalProperties:
+ oneOf:
+ - type: 'null'
+ - type: boolean
+ - type: number
+ - type: string
+ - type: array
+ - type: object
+ description: >-
+ JSON schema defining the tool's input parameters
+ name:
+ type: string
+ description: Name of the tool
+ description:
+ type: string
+ description: >-
+ (Optional) Description of what the tool does
+ additionalProperties: false
+ required:
+ - input_schema
+ - name
+ title: MCPListToolsTool
+ description: >-
+ Tool definition returned by MCP list tools operation.
+ description: >-
+ List of available tools provided by the MCP server
+ additionalProperties: false
+ required:
+ - id
+ - type
+ - server_label
+ - tools
+ title: OpenAIResponseOutputMessageMCPListTools
+ description: >-
+ MCP list tools output message containing available tools from an MCP server.
+ "OpenAIResponseOutputMessageWebSearchToolCall":
+ type: object
+ properties:
+ id:
+ type: string
+ description: Unique identifier for this tool call
+ status:
+ type: string
+ description: >-
+ Current status of the web search operation
+ type:
+ type: string
+ const: web_search_call
+ default: web_search_call
+ description: >-
+ Tool call type identifier, always "web_search_call"
+ additionalProperties: false
+ required:
+ - id
+ - status
+ - type
+ title: >-
+ OpenAIResponseOutputMessageWebSearchToolCall
+ description: >-
+ Web search tool call output message for OpenAI responses.
+ CreateConversationRequest:
+ type: object
+ properties:
+ items:
+ type: array
+ items:
+ $ref: '#/components/schemas/ConversationItem'
+ description: >-
+ Initial items to include in the conversation context.
+ metadata:
+ type: object
+ additionalProperties:
+ type: string
+ description: >-
+ Set of key-value pairs that can be attached to an object.
+ additionalProperties: false
+ title: CreateConversationRequest
+ Conversation:
+ type: object
+ properties:
+ id:
+ type: string
+ object:
+ type: string
+ const: conversation
+ default: conversation
+ created_at:
+ type: integer
+ metadata:
+ type: object
+ additionalProperties:
+ type: string
+ items:
+ type: array
+ items:
+ type: object
+ title: dict
+ description: >-
+ dict() -> new empty dictionary dict(mapping) -> new dictionary initialized
+ from a mapping object's (key, value) pairs dict(iterable) -> new
+ dictionary initialized as if via: d = {} for k, v in iterable: d[k]
+ = v dict(**kwargs) -> new dictionary initialized with the name=value
+ pairs in the keyword argument list. For example: dict(one=1, two=2)
+ additionalProperties: false
+ required:
+ - id
+ - object
+ - created_at
+ title: Conversation
+ description: OpenAI-compatible conversation object.
+ UpdateConversationRequest:
+ type: object
+ properties:
+ metadata:
+ type: object
+ additionalProperties:
+ type: string
+ description: >-
+ Set of key-value pairs that can be attached to an object.
+ additionalProperties: false
+ required:
+ - metadata
+ title: UpdateConversationRequest
+ ConversationDeletedResource:
+ type: object
+ properties:
+ id:
+ type: string
+ object:
+ type: string
+ default: conversation.deleted
+ deleted:
+ type: boolean
+ default: true
+ additionalProperties: false
+ required:
+ - id
+ - object
+ - deleted
+ title: ConversationDeletedResource
+ description: Response for deleted conversation.
+ ConversationItemList:
+ type: object
+ properties:
+ object:
+ type: string
+ default: list
+ data:
+ type: array
+ items:
+ $ref: '#/components/schemas/ConversationItem'
+ first_id:
+ type: string
+ last_id:
+ type: string
+ has_more:
+ type: boolean
+ default: false
+ additionalProperties: false
+ required:
+ - object
+ - data
+ - has_more
+ title: ConversationItemList
+ description: >-
+ List of conversation items with pagination.
+ AddItemsRequest:
+ type: object
+ properties:
+ items:
+ type: array
+ items:
+ $ref: '#/components/schemas/ConversationItem'
+ description: >-
+ Items to include in the conversation context.
+ additionalProperties: false
+ required:
+ - items
+ title: AddItemsRequest
+ ConversationItemDeletedResource:
+ type: object
+ properties:
+ id:
+ type: string
+ object:
+ type: string
+ default: conversation.item.deleted
+ deleted:
+ type: boolean
+ default: true
+ additionalProperties: false
+ required:
+ - id
+ - object
+ - deleted
+ title: ConversationItemDeletedResource
+ description: Response for deleted conversation item.
OpenaiEmbeddingsRequest:
type: object
properties:
@@ -4450,124 +5491,6 @@ components:
title: ListOpenAIResponseObject
description: >-
Paginated list of OpenAI response objects with navigation metadata.
- OpenAIResponseAnnotationCitation:
- type: object
- properties:
- type:
- type: string
- const: url_citation
- default: url_citation
- description: >-
- Annotation type identifier, always "url_citation"
- end_index:
- type: integer
- description: >-
- End position of the citation span in the content
- start_index:
- type: integer
- description: >-
- Start position of the citation span in the content
- title:
- type: string
- description: Title of the referenced web resource
- url:
- type: string
- description: URL of the referenced web resource
- additionalProperties: false
- required:
- - type
- - end_index
- - start_index
- - title
- - url
- title: OpenAIResponseAnnotationCitation
- description: >-
- URL citation annotation for referencing external web resources.
- "OpenAIResponseAnnotationContainerFileCitation":
- type: object
- properties:
- type:
- type: string
- const: container_file_citation
- default: container_file_citation
- container_id:
- type: string
- end_index:
- type: integer
- file_id:
- type: string
- filename:
- type: string
- start_index:
- type: integer
- additionalProperties: false
- required:
- - type
- - container_id
- - end_index
- - file_id
- - filename
- - start_index
- title: >-
- OpenAIResponseAnnotationContainerFileCitation
- OpenAIResponseAnnotationFileCitation:
- type: object
- properties:
- type:
- type: string
- const: file_citation
- default: file_citation
- description: >-
- Annotation type identifier, always "file_citation"
- file_id:
- type: string
- description: Unique identifier of the referenced file
- filename:
- type: string
- description: Name of the referenced file
- index:
- type: integer
- description: >-
- Position index of the citation within the content
- additionalProperties: false
- required:
- - type
- - file_id
- - filename
- - index
- title: OpenAIResponseAnnotationFileCitation
- description: >-
- File citation annotation for referencing specific files in response content.
- OpenAIResponseAnnotationFilePath:
- type: object
- properties:
- type:
- type: string
- const: file_path
- default: file_path
- file_id:
- type: string
- index:
- type: integer
- additionalProperties: false
- required:
- - type
- - file_id
- - index
- title: OpenAIResponseAnnotationFilePath
- OpenAIResponseAnnotations:
- oneOf:
- - $ref: '#/components/schemas/OpenAIResponseAnnotationFileCitation'
- - $ref: '#/components/schemas/OpenAIResponseAnnotationCitation'
- - $ref: '#/components/schemas/OpenAIResponseAnnotationContainerFileCitation'
- - $ref: '#/components/schemas/OpenAIResponseAnnotationFilePath'
- discriminator:
- propertyName: type
- mapping:
- file_citation: '#/components/schemas/OpenAIResponseAnnotationFileCitation'
- url_citation: '#/components/schemas/OpenAIResponseAnnotationCitation'
- container_file_citation: '#/components/schemas/OpenAIResponseAnnotationContainerFileCitation'
- file_path: '#/components/schemas/OpenAIResponseAnnotationFilePath'
OpenAIResponseError:
type: object
properties:
@@ -4620,64 +5543,6 @@ components:
description: >-
This represents the output of a function call that gets passed back to the
model.
- OpenAIResponseInputMessageContent:
- oneOf:
- - $ref: '#/components/schemas/OpenAIResponseInputMessageContentText'
- - $ref: '#/components/schemas/OpenAIResponseInputMessageContentImage'
- discriminator:
- propertyName: type
- mapping:
- input_text: '#/components/schemas/OpenAIResponseInputMessageContentText'
- input_image: '#/components/schemas/OpenAIResponseInputMessageContentImage'
- OpenAIResponseInputMessageContentImage:
- type: object
- properties:
- detail:
- oneOf:
- - type: string
- const: low
- - type: string
- const: high
- - type: string
- const: auto
- default: auto
- description: >-
- Level of detail for image processing, can be "low", "high", or "auto"
- type:
- type: string
- const: input_image
- default: input_image
- description: >-
- Content type identifier, always "input_image"
- image_url:
- type: string
- description: (Optional) URL of the image content
- additionalProperties: false
- required:
- - detail
- - type
- title: OpenAIResponseInputMessageContentImage
- description: >-
- Image content for input messages in OpenAI response format.
- OpenAIResponseInputMessageContentText:
- type: object
- properties:
- text:
- type: string
- description: The text content of the input message
- type:
- type: string
- const: input_text
- default: input_text
- description: >-
- Content type identifier, always "input_text"
- additionalProperties: false
- required:
- - text
- - type
- title: OpenAIResponseInputMessageContentText
- description: >-
- Text content for input messages in OpenAI response format.
OpenAIResponseMCPApprovalRequest:
type: object
properties:
@@ -4725,46 +5590,6 @@ components:
- type
title: OpenAIResponseMCPApprovalResponse
description: A response to an MCP approval request.
- OpenAIResponseMessage:
- type: object
- properties:
- content:
- oneOf:
- - type: string
- - type: array
- items:
- $ref: '#/components/schemas/OpenAIResponseInputMessageContent'
- - type: array
- items:
- $ref: '#/components/schemas/OpenAIResponseOutputMessageContent'
- role:
- oneOf:
- - type: string
- const: system
- - type: string
- const: developer
- - type: string
- const: user
- - type: string
- const: assistant
- type:
- type: string
- const: message
- default: message
- id:
- type: string
- status:
- type: string
- additionalProperties: false
- required:
- - content
- - role
- - type
- title: OpenAIResponseMessage
- description: >-
- Corresponds to the various Message types in the Responses API. They are all
- under one type because the Responses API gives them all the same "type" value,
- and there is no way to tell them apart in certain scenarios.
OpenAIResponseObjectWithInput:
type: object
properties:
@@ -4862,263 +5687,6 @@ components:
mcp_call: '#/components/schemas/OpenAIResponseOutputMessageMCPCall'
mcp_list_tools: '#/components/schemas/OpenAIResponseOutputMessageMCPListTools'
mcp_approval_request: '#/components/schemas/OpenAIResponseMCPApprovalRequest'
- OpenAIResponseOutputMessageContent:
- type: object
- properties:
- text:
- type: string
- type:
- type: string
- const: output_text
- default: output_text
- annotations:
- type: array
- items:
- $ref: '#/components/schemas/OpenAIResponseAnnotations'
- additionalProperties: false
- required:
- - text
- - type
- - annotations
- title: >-
- OpenAIResponseOutputMessageContentOutputText
- "OpenAIResponseOutputMessageFileSearchToolCall":
- type: object
- properties:
- id:
- type: string
- description: Unique identifier for this tool call
- queries:
- type: array
- items:
- type: string
- description: List of search queries executed
- status:
- type: string
- description: >-
- Current status of the file search operation
- type:
- type: string
- const: file_search_call
- default: file_search_call
- description: >-
- Tool call type identifier, always "file_search_call"
- results:
- type: array
- items:
- type: object
- properties:
- attributes:
- type: object
- additionalProperties:
- oneOf:
- - type: 'null'
- - type: boolean
- - type: number
- - type: string
- - type: array
- - type: object
- description: >-
- (Optional) Key-value attributes associated with the file
- file_id:
- type: string
- description: >-
- Unique identifier of the file containing the result
- filename:
- type: string
- description: Name of the file containing the result
- score:
- type: number
- description: >-
- Relevance score for this search result (between 0 and 1)
- text:
- type: string
- description: Text content of the search result
- additionalProperties: false
- required:
- - attributes
- - file_id
- - filename
- - score
- - text
- title: >-
- OpenAIResponseOutputMessageFileSearchToolCallResults
- description: >-
- Search results returned by the file search operation.
- description: >-
- (Optional) Search results returned by the file search operation
- additionalProperties: false
- required:
- - id
- - queries
- - status
- - type
- title: >-
- OpenAIResponseOutputMessageFileSearchToolCall
- description: >-
- File search tool call output message for OpenAI responses.
- "OpenAIResponseOutputMessageFunctionToolCall":
- type: object
- properties:
- call_id:
- type: string
- description: Unique identifier for the function call
- name:
- type: string
- description: Name of the function being called
- arguments:
- type: string
- description: >-
- JSON string containing the function arguments
- type:
- type: string
- const: function_call
- default: function_call
- description: >-
- Tool call type identifier, always "function_call"
- id:
- type: string
- description: >-
- (Optional) Additional identifier for the tool call
- status:
- type: string
- description: >-
- (Optional) Current status of the function call execution
- additionalProperties: false
- required:
- - call_id
- - name
- - arguments
- - type
- title: >-
- OpenAIResponseOutputMessageFunctionToolCall
- description: >-
- Function tool call output message for OpenAI responses.
- OpenAIResponseOutputMessageMCPCall:
- type: object
- properties:
- id:
- type: string
- description: Unique identifier for this MCP call
- type:
- type: string
- const: mcp_call
- default: mcp_call
- description: >-
- Tool call type identifier, always "mcp_call"
- arguments:
- type: string
- description: >-
- JSON string containing the MCP call arguments
- name:
- type: string
- description: Name of the MCP method being called
- server_label:
- type: string
- description: >-
- Label identifying the MCP server handling the call
- error:
- type: string
- description: >-
- (Optional) Error message if the MCP call failed
- output:
- type: string
- description: >-
- (Optional) Output result from the successful MCP call
- additionalProperties: false
- required:
- - id
- - type
- - arguments
- - name
- - server_label
- title: OpenAIResponseOutputMessageMCPCall
- description: >-
- Model Context Protocol (MCP) call output message for OpenAI responses.
- OpenAIResponseOutputMessageMCPListTools:
- type: object
- properties:
- id:
- type: string
- description: >-
- Unique identifier for this MCP list tools operation
- type:
- type: string
- const: mcp_list_tools
- default: mcp_list_tools
- description: >-
- Tool call type identifier, always "mcp_list_tools"
- server_label:
- type: string
- description: >-
- Label identifying the MCP server providing the tools
- tools:
- type: array
- items:
- type: object
- properties:
- input_schema:
- type: object
- additionalProperties:
- oneOf:
- - type: 'null'
- - type: boolean
- - type: number
- - type: string
- - type: array
- - type: object
- description: >-
- JSON schema defining the tool's input parameters
- name:
- type: string
- description: Name of the tool
- description:
- type: string
- description: >-
- (Optional) Description of what the tool does
- additionalProperties: false
- required:
- - input_schema
- - name
- title: MCPListToolsTool
- description: >-
- Tool definition returned by MCP list tools operation.
- description: >-
- List of available tools provided by the MCP server
- additionalProperties: false
- required:
- - id
- - type
- - server_label
- - tools
- title: OpenAIResponseOutputMessageMCPListTools
- description: >-
- MCP list tools output message containing available tools from an MCP server.
- "OpenAIResponseOutputMessageWebSearchToolCall":
- type: object
- properties:
- id:
- type: string
- description: Unique identifier for this tool call
- status:
- type: string
- description: >-
- Current status of the web search operation
- type:
- type: string
- const: web_search_call
- default: web_search_call
- description: >-
- Tool call type identifier, always "web_search_call"
- additionalProperties: false
- required:
- - id
- - status
- - type
- title: >-
- OpenAIResponseOutputMessageWebSearchToolCall
- description: >-
- Web search tool call output message for OpenAI responses.
OpenAIResponseText:
type: object
properties:
@@ -9152,6 +9720,10 @@ tags:
- `background`
x-displayName: Agents
+ - name: Conversations
+ description: ''
+ x-displayName: >-
+ Protocol for conversation management operations.
- name: Files
description: ''
- name: Inference
@@ -9202,6 +9774,7 @@ x-tagGroups:
- name: Operations
tags:
- Agents
+ - Conversations
- Files
- Inference
- Inspect
diff --git a/docs/static/stainless-llama-stack-spec.html b/docs/static/stainless-llama-stack-spec.html
index 1ae477e7e..7ec48ef74 100644
--- a/docs/static/stainless-llama-stack-spec.html
+++ b/docs/static/stainless-llama-stack-spec.html
@@ -252,6 +252,483 @@
"deprecated": false
}
},
+ "/v1/conversations": {
+ "post": {
+ "responses": {
+ "200": {
+ "description": "The created conversation object.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Conversation"
+ }
+ }
+ }
+ },
+ "400": {
+ "$ref": "#/components/responses/BadRequest400"
+ },
+ "429": {
+ "$ref": "#/components/responses/TooManyRequests429"
+ },
+ "500": {
+ "$ref": "#/components/responses/InternalServerError500"
+ },
+ "default": {
+ "$ref": "#/components/responses/DefaultError"
+ }
+ },
+ "tags": [
+ "Conversations"
+ ],
+ "summary": "Create a conversation.",
+ "description": "Create a conversation.",
+ "parameters": [],
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/CreateConversationRequest"
+ }
+ }
+ },
+ "required": true
+ },
+ "deprecated": false
+ }
+ },
+ "/v1/conversations/{conversation_id}": {
+ "get": {
+ "responses": {
+ "200": {
+ "description": "The conversation object.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Conversation"
+ }
+ }
+ }
+ },
+ "400": {
+ "$ref": "#/components/responses/BadRequest400"
+ },
+ "429": {
+ "$ref": "#/components/responses/TooManyRequests429"
+ },
+ "500": {
+ "$ref": "#/components/responses/InternalServerError500"
+ },
+ "default": {
+ "$ref": "#/components/responses/DefaultError"
+ }
+ },
+ "tags": [
+ "Conversations"
+ ],
+ "summary": "Get a conversation with the given ID.",
+ "description": "Get a conversation with the given ID.",
+ "parameters": [
+ {
+ "name": "conversation_id",
+ "in": "path",
+ "description": "The conversation identifier.",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ }
+ ],
+ "deprecated": false
+ },
+ "post": {
+ "responses": {
+ "200": {
+ "description": "The updated conversation object.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Conversation"
+ }
+ }
+ }
+ },
+ "400": {
+ "$ref": "#/components/responses/BadRequest400"
+ },
+ "429": {
+ "$ref": "#/components/responses/TooManyRequests429"
+ },
+ "500": {
+ "$ref": "#/components/responses/InternalServerError500"
+ },
+ "default": {
+ "$ref": "#/components/responses/DefaultError"
+ }
+ },
+ "tags": [
+ "Conversations"
+ ],
+ "summary": "Update a conversation's metadata with the given ID.",
+ "description": "Update a conversation's metadata with the given ID.",
+ "parameters": [
+ {
+ "name": "conversation_id",
+ "in": "path",
+ "description": "The conversation identifier.",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ }
+ ],
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/UpdateConversationRequest"
+ }
+ }
+ },
+ "required": true
+ },
+ "deprecated": false
+ },
+ "delete": {
+ "responses": {
+ "200": {
+ "description": "The deleted conversation resource.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ConversationDeletedResource"
+ }
+ }
+ }
+ },
+ "400": {
+ "$ref": "#/components/responses/BadRequest400"
+ },
+ "429": {
+ "$ref": "#/components/responses/TooManyRequests429"
+ },
+ "500": {
+ "$ref": "#/components/responses/InternalServerError500"
+ },
+ "default": {
+ "$ref": "#/components/responses/DefaultError"
+ }
+ },
+ "tags": [
+ "Conversations"
+ ],
+ "summary": "Delete a conversation with the given ID.",
+ "description": "Delete a conversation with the given ID.",
+ "parameters": [
+ {
+ "name": "conversation_id",
+ "in": "path",
+ "description": "The conversation identifier.",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ }
+ ],
+ "deprecated": false
+ }
+ },
+ "/v1/conversations/{conversation_id}/items": {
+ "get": {
+ "responses": {
+ "200": {
+ "description": "List of conversation items.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ConversationItemList"
+ }
+ }
+ }
+ },
+ "400": {
+ "$ref": "#/components/responses/BadRequest400"
+ },
+ "429": {
+ "$ref": "#/components/responses/TooManyRequests429"
+ },
+ "500": {
+ "$ref": "#/components/responses/InternalServerError500"
+ },
+ "default": {
+ "$ref": "#/components/responses/DefaultError"
+ }
+ },
+ "tags": [
+ "Conversations"
+ ],
+ "summary": "List items in the conversation.",
+ "description": "List items in the conversation.",
+ "parameters": [
+ {
+ "name": "conversation_id",
+ "in": "path",
+ "description": "The conversation identifier.",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "after",
+ "in": "query",
+ "description": "An item ID to list items after, used in pagination.",
+ "required": true,
+ "schema": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "object",
+ "title": "NotGiven",
+ "description": "A sentinel singleton class used to distinguish omitted keyword arguments from those passed in with the value None (which may have different behavior).\nFor example:\n\n```py\ndef get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...\n\n\nget(timeout=1) # 1s timeout\nget(timeout=None) # No timeout\nget() # Default timeout behavior, which may not be statically known at the method definition.\n```"
+ }
+ ]
+ }
+ },
+ {
+ "name": "include",
+ "in": "query",
+ "description": "Specify additional output data to include in the response.",
+ "required": true,
+ "schema": {
+ "oneOf": [
+ {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "enum": [
+ "code_interpreter_call.outputs",
+ "computer_call_output.output.image_url",
+ "file_search_call.results",
+ "message.input_image.image_url",
+ "message.output_text.logprobs",
+ "reasoning.encrypted_content"
+ ]
+ }
+ },
+ {
+ "type": "object",
+ "title": "NotGiven",
+ "description": "A sentinel singleton class used to distinguish omitted keyword arguments from those passed in with the value None (which may have different behavior).\nFor example:\n\n```py\ndef get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...\n\n\nget(timeout=1) # 1s timeout\nget(timeout=None) # No timeout\nget() # Default timeout behavior, which may not be statically known at the method definition.\n```"
+ }
+ ]
+ }
+ },
+ {
+ "name": "limit",
+ "in": "query",
+ "description": "A limit on the number of objects to be returned (1-100, default 20).",
+ "required": true,
+ "schema": {
+ "oneOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "type": "object",
+ "title": "NotGiven",
+ "description": "A sentinel singleton class used to distinguish omitted keyword arguments from those passed in with the value None (which may have different behavior).\nFor example:\n\n```py\ndef get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...\n\n\nget(timeout=1) # 1s timeout\nget(timeout=None) # No timeout\nget() # Default timeout behavior, which may not be statically known at the method definition.\n```"
+ }
+ ]
+ }
+ },
+ {
+ "name": "order",
+ "in": "query",
+ "description": "The order to return items in (asc or desc, default desc).",
+ "required": true,
+ "schema": {
+ "oneOf": [
+ {
+ "type": "string",
+ "enum": [
+ "asc",
+ "desc"
+ ]
+ },
+ {
+ "type": "object",
+ "title": "NotGiven",
+ "description": "A sentinel singleton class used to distinguish omitted keyword arguments from those passed in with the value None (which may have different behavior).\nFor example:\n\n```py\ndef get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...\n\n\nget(timeout=1) # 1s timeout\nget(timeout=None) # No timeout\nget() # Default timeout behavior, which may not be statically known at the method definition.\n```"
+ }
+ ]
+ }
+ }
+ ],
+ "deprecated": false
+ },
+ "post": {
+ "responses": {
+ "200": {
+ "description": "List of created items.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ConversationItemList"
+ }
+ }
+ }
+ },
+ "400": {
+ "$ref": "#/components/responses/BadRequest400"
+ },
+ "429": {
+ "$ref": "#/components/responses/TooManyRequests429"
+ },
+ "500": {
+ "$ref": "#/components/responses/InternalServerError500"
+ },
+ "default": {
+ "$ref": "#/components/responses/DefaultError"
+ }
+ },
+ "tags": [
+ "Conversations"
+ ],
+ "summary": "Create items in the conversation.",
+ "description": "Create items in the conversation.",
+ "parameters": [
+ {
+ "name": "conversation_id",
+ "in": "path",
+ "description": "The conversation identifier.",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ }
+ ],
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/AddItemsRequest"
+ }
+ }
+ },
+ "required": true
+ },
+ "deprecated": false
+ }
+ },
+ "/v1/conversations/{conversation_id}/items/{item_id}": {
+ "get": {
+ "responses": {
+ "200": {
+ "description": "The conversation item.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ConversationItem"
+ }
+ }
+ }
+ },
+ "400": {
+ "$ref": "#/components/responses/BadRequest400"
+ },
+ "429": {
+ "$ref": "#/components/responses/TooManyRequests429"
+ },
+ "500": {
+ "$ref": "#/components/responses/InternalServerError500"
+ },
+ "default": {
+ "$ref": "#/components/responses/DefaultError"
+ }
+ },
+ "tags": [
+ "Conversations"
+ ],
+ "summary": "Retrieve a conversation item.",
+ "description": "Retrieve a conversation item.",
+ "parameters": [
+ {
+ "name": "conversation_id",
+ "in": "path",
+ "description": "The conversation identifier.",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "item_id",
+ "in": "path",
+ "description": "The item identifier.",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ }
+ ],
+ "deprecated": false
+ },
+ "delete": {
+ "responses": {
+ "200": {
+ "description": "The deleted item resource.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ConversationItemDeletedResource"
+ }
+ }
+ }
+ },
+ "400": {
+ "$ref": "#/components/responses/BadRequest400"
+ },
+ "429": {
+ "$ref": "#/components/responses/TooManyRequests429"
+ },
+ "500": {
+ "$ref": "#/components/responses/InternalServerError500"
+ },
+ "default": {
+ "$ref": "#/components/responses/DefaultError"
+ }
+ },
+ "tags": [
+ "Conversations"
+ ],
+ "summary": "Delete a conversation item.",
+ "description": "Delete a conversation item.",
+ "parameters": [
+ {
+ "name": "conversation_id",
+ "in": "path",
+ "description": "The conversation identifier.",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "item_id",
+ "in": "path",
+ "description": "The item identifier.",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ }
+ ],
+ "deprecated": false
+ }
+ },
"/v1/embeddings": {
"post": {
"responses": {
@@ -7120,6 +7597,819 @@
"title": "OpenAICompletionChoice",
"description": "A choice from an OpenAI-compatible completion response."
},
+ "ConversationItem": {
+ "oneOf": [
+ {
+ "$ref": "#/components/schemas/OpenAIResponseMessage"
+ },
+ {
+ "$ref": "#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall"
+ },
+ {
+ "$ref": "#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall"
+ },
+ {
+ "$ref": "#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall"
+ },
+ {
+ "$ref": "#/components/schemas/OpenAIResponseOutputMessageMCPCall"
+ },
+ {
+ "$ref": "#/components/schemas/OpenAIResponseOutputMessageMCPListTools"
+ }
+ ],
+ "discriminator": {
+ "propertyName": "type",
+ "mapping": {
+ "message": "#/components/schemas/OpenAIResponseMessage",
+ "function_call": "#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall",
+ "file_search_call": "#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall",
+ "web_search_call": "#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall",
+ "mcp_call": "#/components/schemas/OpenAIResponseOutputMessageMCPCall",
+ "mcp_list_tools": "#/components/schemas/OpenAIResponseOutputMessageMCPListTools"
+ }
+ }
+ },
+ "OpenAIResponseAnnotationCitation": {
+ "type": "object",
+ "properties": {
+ "type": {
+ "type": "string",
+ "const": "url_citation",
+ "default": "url_citation",
+ "description": "Annotation type identifier, always \"url_citation\""
+ },
+ "end_index": {
+ "type": "integer",
+ "description": "End position of the citation span in the content"
+ },
+ "start_index": {
+ "type": "integer",
+ "description": "Start position of the citation span in the content"
+ },
+ "title": {
+ "type": "string",
+ "description": "Title of the referenced web resource"
+ },
+ "url": {
+ "type": "string",
+ "description": "URL of the referenced web resource"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "end_index",
+ "start_index",
+ "title",
+ "url"
+ ],
+ "title": "OpenAIResponseAnnotationCitation",
+ "description": "URL citation annotation for referencing external web resources."
+ },
+ "OpenAIResponseAnnotationContainerFileCitation": {
+ "type": "object",
+ "properties": {
+ "type": {
+ "type": "string",
+ "const": "container_file_citation",
+ "default": "container_file_citation"
+ },
+ "container_id": {
+ "type": "string"
+ },
+ "end_index": {
+ "type": "integer"
+ },
+ "file_id": {
+ "type": "string"
+ },
+ "filename": {
+ "type": "string"
+ },
+ "start_index": {
+ "type": "integer"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "container_id",
+ "end_index",
+ "file_id",
+ "filename",
+ "start_index"
+ ],
+ "title": "OpenAIResponseAnnotationContainerFileCitation"
+ },
+ "OpenAIResponseAnnotationFileCitation": {
+ "type": "object",
+ "properties": {
+ "type": {
+ "type": "string",
+ "const": "file_citation",
+ "default": "file_citation",
+ "description": "Annotation type identifier, always \"file_citation\""
+ },
+ "file_id": {
+ "type": "string",
+ "description": "Unique identifier of the referenced file"
+ },
+ "filename": {
+ "type": "string",
+ "description": "Name of the referenced file"
+ },
+ "index": {
+ "type": "integer",
+ "description": "Position index of the citation within the content"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "file_id",
+ "filename",
+ "index"
+ ],
+ "title": "OpenAIResponseAnnotationFileCitation",
+ "description": "File citation annotation for referencing specific files in response content."
+ },
+ "OpenAIResponseAnnotationFilePath": {
+ "type": "object",
+ "properties": {
+ "type": {
+ "type": "string",
+ "const": "file_path",
+ "default": "file_path"
+ },
+ "file_id": {
+ "type": "string"
+ },
+ "index": {
+ "type": "integer"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "file_id",
+ "index"
+ ],
+ "title": "OpenAIResponseAnnotationFilePath"
+ },
+ "OpenAIResponseAnnotations": {
+ "oneOf": [
+ {
+ "$ref": "#/components/schemas/OpenAIResponseAnnotationFileCitation"
+ },
+ {
+ "$ref": "#/components/schemas/OpenAIResponseAnnotationCitation"
+ },
+ {
+ "$ref": "#/components/schemas/OpenAIResponseAnnotationContainerFileCitation"
+ },
+ {
+ "$ref": "#/components/schemas/OpenAIResponseAnnotationFilePath"
+ }
+ ],
+ "discriminator": {
+ "propertyName": "type",
+ "mapping": {
+ "file_citation": "#/components/schemas/OpenAIResponseAnnotationFileCitation",
+ "url_citation": "#/components/schemas/OpenAIResponseAnnotationCitation",
+ "container_file_citation": "#/components/schemas/OpenAIResponseAnnotationContainerFileCitation",
+ "file_path": "#/components/schemas/OpenAIResponseAnnotationFilePath"
+ }
+ }
+ },
+ "OpenAIResponseInputMessageContent": {
+ "oneOf": [
+ {
+ "$ref": "#/components/schemas/OpenAIResponseInputMessageContentText"
+ },
+ {
+ "$ref": "#/components/schemas/OpenAIResponseInputMessageContentImage"
+ }
+ ],
+ "discriminator": {
+ "propertyName": "type",
+ "mapping": {
+ "input_text": "#/components/schemas/OpenAIResponseInputMessageContentText",
+ "input_image": "#/components/schemas/OpenAIResponseInputMessageContentImage"
+ }
+ }
+ },
+ "OpenAIResponseInputMessageContentImage": {
+ "type": "object",
+ "properties": {
+ "detail": {
+ "oneOf": [
+ {
+ "type": "string",
+ "const": "low"
+ },
+ {
+ "type": "string",
+ "const": "high"
+ },
+ {
+ "type": "string",
+ "const": "auto"
+ }
+ ],
+ "default": "auto",
+ "description": "Level of detail for image processing, can be \"low\", \"high\", or \"auto\""
+ },
+ "type": {
+ "type": "string",
+ "const": "input_image",
+ "default": "input_image",
+ "description": "Content type identifier, always \"input_image\""
+ },
+ "image_url": {
+ "type": "string",
+ "description": "(Optional) URL of the image content"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "detail",
+ "type"
+ ],
+ "title": "OpenAIResponseInputMessageContentImage",
+ "description": "Image content for input messages in OpenAI response format."
+ },
+ "OpenAIResponseInputMessageContentText": {
+ "type": "object",
+ "properties": {
+ "text": {
+ "type": "string",
+ "description": "The text content of the input message"
+ },
+ "type": {
+ "type": "string",
+ "const": "input_text",
+ "default": "input_text",
+ "description": "Content type identifier, always \"input_text\""
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "text",
+ "type"
+ ],
+ "title": "OpenAIResponseInputMessageContentText",
+ "description": "Text content for input messages in OpenAI response format."
+ },
+ "OpenAIResponseMessage": {
+ "type": "object",
+ "properties": {
+ "content": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/OpenAIResponseInputMessageContent"
+ }
+ },
+ {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/OpenAIResponseOutputMessageContent"
+ }
+ }
+ ]
+ },
+ "role": {
+ "oneOf": [
+ {
+ "type": "string",
+ "const": "system"
+ },
+ {
+ "type": "string",
+ "const": "developer"
+ },
+ {
+ "type": "string",
+ "const": "user"
+ },
+ {
+ "type": "string",
+ "const": "assistant"
+ }
+ ]
+ },
+ "type": {
+ "type": "string",
+ "const": "message",
+ "default": "message"
+ },
+ "id": {
+ "type": "string"
+ },
+ "status": {
+ "type": "string"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "content",
+ "role",
+ "type"
+ ],
+ "title": "OpenAIResponseMessage",
+ "description": "Corresponds to the various Message types in the Responses API. They are all under one type because the Responses API gives them all the same \"type\" value, and there is no way to tell them apart in certain scenarios."
+ },
+ "OpenAIResponseOutputMessageContent": {
+ "type": "object",
+ "properties": {
+ "text": {
+ "type": "string"
+ },
+ "type": {
+ "type": "string",
+ "const": "output_text",
+ "default": "output_text"
+ },
+ "annotations": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/OpenAIResponseAnnotations"
+ }
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "text",
+ "type",
+ "annotations"
+ ],
+ "title": "OpenAIResponseOutputMessageContentOutputText"
+ },
+ "OpenAIResponseOutputMessageFileSearchToolCall": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string",
+ "description": "Unique identifier for this tool call"
+ },
+ "queries": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "description": "List of search queries executed"
+ },
+ "status": {
+ "type": "string",
+ "description": "Current status of the file search operation"
+ },
+ "type": {
+ "type": "string",
+ "const": "file_search_call",
+ "default": "file_search_call",
+ "description": "Tool call type identifier, always \"file_search_call\""
+ },
+ "results": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "attributes": {
+ "type": "object",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "array"
+ },
+ {
+ "type": "object"
+ }
+ ]
+ },
+ "description": "(Optional) Key-value attributes associated with the file"
+ },
+ "file_id": {
+ "type": "string",
+ "description": "Unique identifier of the file containing the result"
+ },
+ "filename": {
+ "type": "string",
+ "description": "Name of the file containing the result"
+ },
+ "score": {
+ "type": "number",
+ "description": "Relevance score for this search result (between 0 and 1)"
+ },
+ "text": {
+ "type": "string",
+ "description": "Text content of the search result"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "attributes",
+ "file_id",
+ "filename",
+ "score",
+ "text"
+ ],
+ "title": "OpenAIResponseOutputMessageFileSearchToolCallResults",
+ "description": "Search results returned by the file search operation."
+ },
+ "description": "(Optional) Search results returned by the file search operation"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "queries",
+ "status",
+ "type"
+ ],
+ "title": "OpenAIResponseOutputMessageFileSearchToolCall",
+ "description": "File search tool call output message for OpenAI responses."
+ },
+ "OpenAIResponseOutputMessageFunctionToolCall": {
+ "type": "object",
+ "properties": {
+ "call_id": {
+ "type": "string",
+ "description": "Unique identifier for the function call"
+ },
+ "name": {
+ "type": "string",
+ "description": "Name of the function being called"
+ },
+ "arguments": {
+ "type": "string",
+ "description": "JSON string containing the function arguments"
+ },
+ "type": {
+ "type": "string",
+ "const": "function_call",
+ "default": "function_call",
+ "description": "Tool call type identifier, always \"function_call\""
+ },
+ "id": {
+ "type": "string",
+ "description": "(Optional) Additional identifier for the tool call"
+ },
+ "status": {
+ "type": "string",
+ "description": "(Optional) Current status of the function call execution"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "call_id",
+ "name",
+ "arguments",
+ "type"
+ ],
+ "title": "OpenAIResponseOutputMessageFunctionToolCall",
+ "description": "Function tool call output message for OpenAI responses."
+ },
+ "OpenAIResponseOutputMessageMCPCall": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string",
+ "description": "Unique identifier for this MCP call"
+ },
+ "type": {
+ "type": "string",
+ "const": "mcp_call",
+ "default": "mcp_call",
+ "description": "Tool call type identifier, always \"mcp_call\""
+ },
+ "arguments": {
+ "type": "string",
+ "description": "JSON string containing the MCP call arguments"
+ },
+ "name": {
+ "type": "string",
+ "description": "Name of the MCP method being called"
+ },
+ "server_label": {
+ "type": "string",
+ "description": "Label identifying the MCP server handling the call"
+ },
+ "error": {
+ "type": "string",
+ "description": "(Optional) Error message if the MCP call failed"
+ },
+ "output": {
+ "type": "string",
+ "description": "(Optional) Output result from the successful MCP call"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "type",
+ "arguments",
+ "name",
+ "server_label"
+ ],
+ "title": "OpenAIResponseOutputMessageMCPCall",
+ "description": "Model Context Protocol (MCP) call output message for OpenAI responses."
+ },
+ "OpenAIResponseOutputMessageMCPListTools": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string",
+ "description": "Unique identifier for this MCP list tools operation"
+ },
+ "type": {
+ "type": "string",
+ "const": "mcp_list_tools",
+ "default": "mcp_list_tools",
+ "description": "Tool call type identifier, always \"mcp_list_tools\""
+ },
+ "server_label": {
+ "type": "string",
+ "description": "Label identifying the MCP server providing the tools"
+ },
+ "tools": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "input_schema": {
+ "type": "object",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "array"
+ },
+ {
+ "type": "object"
+ }
+ ]
+ },
+ "description": "JSON schema defining the tool's input parameters"
+ },
+ "name": {
+ "type": "string",
+ "description": "Name of the tool"
+ },
+ "description": {
+ "type": "string",
+ "description": "(Optional) Description of what the tool does"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "input_schema",
+ "name"
+ ],
+ "title": "MCPListToolsTool",
+ "description": "Tool definition returned by MCP list tools operation."
+ },
+ "description": "List of available tools provided by the MCP server"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "type",
+ "server_label",
+ "tools"
+ ],
+ "title": "OpenAIResponseOutputMessageMCPListTools",
+ "description": "MCP list tools output message containing available tools from an MCP server."
+ },
+ "OpenAIResponseOutputMessageWebSearchToolCall": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string",
+ "description": "Unique identifier for this tool call"
+ },
+ "status": {
+ "type": "string",
+ "description": "Current status of the web search operation"
+ },
+ "type": {
+ "type": "string",
+ "const": "web_search_call",
+ "default": "web_search_call",
+ "description": "Tool call type identifier, always \"web_search_call\""
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "status",
+ "type"
+ ],
+ "title": "OpenAIResponseOutputMessageWebSearchToolCall",
+ "description": "Web search tool call output message for OpenAI responses."
+ },
+ "CreateConversationRequest": {
+ "type": "object",
+ "properties": {
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/ConversationItem"
+ },
+ "description": "Initial items to include in the conversation context."
+ },
+ "metadata": {
+ "type": "object",
+ "additionalProperties": {
+ "type": "string"
+ },
+ "description": "Set of key-value pairs that can be attached to an object."
+ }
+ },
+ "additionalProperties": false,
+ "title": "CreateConversationRequest"
+ },
+ "Conversation": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string"
+ },
+ "object": {
+ "type": "string",
+ "const": "conversation",
+ "default": "conversation"
+ },
+ "created_at": {
+ "type": "integer"
+ },
+ "metadata": {
+ "type": "object",
+ "additionalProperties": {
+ "type": "string"
+ }
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "title": "dict",
+ "description": "dict() -> new empty dictionary dict(mapping) -> new dictionary initialized from a mapping object's (key, value) pairs dict(iterable) -> new dictionary initialized as if via: d = {} for k, v in iterable: d[k] = v dict(**kwargs) -> new dictionary initialized with the name=value pairs in the keyword argument list. For example: dict(one=1, two=2)"
+ }
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "object",
+ "created_at"
+ ],
+ "title": "Conversation",
+ "description": "OpenAI-compatible conversation object."
+ },
+ "UpdateConversationRequest": {
+ "type": "object",
+ "properties": {
+ "metadata": {
+ "type": "object",
+ "additionalProperties": {
+ "type": "string"
+ },
+ "description": "Set of key-value pairs that can be attached to an object."
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "metadata"
+ ],
+ "title": "UpdateConversationRequest"
+ },
+ "ConversationDeletedResource": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string"
+ },
+ "object": {
+ "type": "string",
+ "default": "conversation.deleted"
+ },
+ "deleted": {
+ "type": "boolean",
+ "default": true
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "object",
+ "deleted"
+ ],
+ "title": "ConversationDeletedResource",
+ "description": "Response for deleted conversation."
+ },
+ "ConversationItemList": {
+ "type": "object",
+ "properties": {
+ "object": {
+ "type": "string",
+ "default": "list"
+ },
+ "data": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/ConversationItem"
+ }
+ },
+ "first_id": {
+ "type": "string"
+ },
+ "last_id": {
+ "type": "string"
+ },
+ "has_more": {
+ "type": "boolean",
+ "default": false
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "object",
+ "data",
+ "has_more"
+ ],
+ "title": "ConversationItemList",
+ "description": "List of conversation items with pagination."
+ },
+ "AddItemsRequest": {
+ "type": "object",
+ "properties": {
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/ConversationItem"
+ },
+ "description": "Items to include in the conversation context."
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "items"
+ ],
+ "title": "AddItemsRequest"
+ },
+ "ConversationItemDeletedResource": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string"
+ },
+ "object": {
+ "type": "string",
+ "default": "conversation.item.deleted"
+ },
+ "deleted": {
+ "type": "boolean",
+ "default": true
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "object",
+ "deleted"
+ ],
+ "title": "ConversationItemDeletedResource",
+ "description": "Response for deleted conversation item."
+ },
"OpenaiEmbeddingsRequest": {
"type": "object",
"properties": {
@@ -8004,158 +9294,6 @@
"title": "ListOpenAIResponseObject",
"description": "Paginated list of OpenAI response objects with navigation metadata."
},
- "OpenAIResponseAnnotationCitation": {
- "type": "object",
- "properties": {
- "type": {
- "type": "string",
- "const": "url_citation",
- "default": "url_citation",
- "description": "Annotation type identifier, always \"url_citation\""
- },
- "end_index": {
- "type": "integer",
- "description": "End position of the citation span in the content"
- },
- "start_index": {
- "type": "integer",
- "description": "Start position of the citation span in the content"
- },
- "title": {
- "type": "string",
- "description": "Title of the referenced web resource"
- },
- "url": {
- "type": "string",
- "description": "URL of the referenced web resource"
- }
- },
- "additionalProperties": false,
- "required": [
- "type",
- "end_index",
- "start_index",
- "title",
- "url"
- ],
- "title": "OpenAIResponseAnnotationCitation",
- "description": "URL citation annotation for referencing external web resources."
- },
- "OpenAIResponseAnnotationContainerFileCitation": {
- "type": "object",
- "properties": {
- "type": {
- "type": "string",
- "const": "container_file_citation",
- "default": "container_file_citation"
- },
- "container_id": {
- "type": "string"
- },
- "end_index": {
- "type": "integer"
- },
- "file_id": {
- "type": "string"
- },
- "filename": {
- "type": "string"
- },
- "start_index": {
- "type": "integer"
- }
- },
- "additionalProperties": false,
- "required": [
- "type",
- "container_id",
- "end_index",
- "file_id",
- "filename",
- "start_index"
- ],
- "title": "OpenAIResponseAnnotationContainerFileCitation"
- },
- "OpenAIResponseAnnotationFileCitation": {
- "type": "object",
- "properties": {
- "type": {
- "type": "string",
- "const": "file_citation",
- "default": "file_citation",
- "description": "Annotation type identifier, always \"file_citation\""
- },
- "file_id": {
- "type": "string",
- "description": "Unique identifier of the referenced file"
- },
- "filename": {
- "type": "string",
- "description": "Name of the referenced file"
- },
- "index": {
- "type": "integer",
- "description": "Position index of the citation within the content"
- }
- },
- "additionalProperties": false,
- "required": [
- "type",
- "file_id",
- "filename",
- "index"
- ],
- "title": "OpenAIResponseAnnotationFileCitation",
- "description": "File citation annotation for referencing specific files in response content."
- },
- "OpenAIResponseAnnotationFilePath": {
- "type": "object",
- "properties": {
- "type": {
- "type": "string",
- "const": "file_path",
- "default": "file_path"
- },
- "file_id": {
- "type": "string"
- },
- "index": {
- "type": "integer"
- }
- },
- "additionalProperties": false,
- "required": [
- "type",
- "file_id",
- "index"
- ],
- "title": "OpenAIResponseAnnotationFilePath"
- },
- "OpenAIResponseAnnotations": {
- "oneOf": [
- {
- "$ref": "#/components/schemas/OpenAIResponseAnnotationFileCitation"
- },
- {
- "$ref": "#/components/schemas/OpenAIResponseAnnotationCitation"
- },
- {
- "$ref": "#/components/schemas/OpenAIResponseAnnotationContainerFileCitation"
- },
- {
- "$ref": "#/components/schemas/OpenAIResponseAnnotationFilePath"
- }
- ],
- "discriminator": {
- "propertyName": "type",
- "mapping": {
- "file_citation": "#/components/schemas/OpenAIResponseAnnotationFileCitation",
- "url_citation": "#/components/schemas/OpenAIResponseAnnotationCitation",
- "container_file_citation": "#/components/schemas/OpenAIResponseAnnotationContainerFileCitation",
- "file_path": "#/components/schemas/OpenAIResponseAnnotationFilePath"
- }
- }
- },
"OpenAIResponseError": {
"type": "object",
"properties": {
@@ -8231,85 +9369,6 @@
"title": "OpenAIResponseInputFunctionToolCallOutput",
"description": "This represents the output of a function call that gets passed back to the model."
},
- "OpenAIResponseInputMessageContent": {
- "oneOf": [
- {
- "$ref": "#/components/schemas/OpenAIResponseInputMessageContentText"
- },
- {
- "$ref": "#/components/schemas/OpenAIResponseInputMessageContentImage"
- }
- ],
- "discriminator": {
- "propertyName": "type",
- "mapping": {
- "input_text": "#/components/schemas/OpenAIResponseInputMessageContentText",
- "input_image": "#/components/schemas/OpenAIResponseInputMessageContentImage"
- }
- }
- },
- "OpenAIResponseInputMessageContentImage": {
- "type": "object",
- "properties": {
- "detail": {
- "oneOf": [
- {
- "type": "string",
- "const": "low"
- },
- {
- "type": "string",
- "const": "high"
- },
- {
- "type": "string",
- "const": "auto"
- }
- ],
- "default": "auto",
- "description": "Level of detail for image processing, can be \"low\", \"high\", or \"auto\""
- },
- "type": {
- "type": "string",
- "const": "input_image",
- "default": "input_image",
- "description": "Content type identifier, always \"input_image\""
- },
- "image_url": {
- "type": "string",
- "description": "(Optional) URL of the image content"
- }
- },
- "additionalProperties": false,
- "required": [
- "detail",
- "type"
- ],
- "title": "OpenAIResponseInputMessageContentImage",
- "description": "Image content for input messages in OpenAI response format."
- },
- "OpenAIResponseInputMessageContentText": {
- "type": "object",
- "properties": {
- "text": {
- "type": "string",
- "description": "The text content of the input message"
- },
- "type": {
- "type": "string",
- "const": "input_text",
- "default": "input_text",
- "description": "Content type identifier, always \"input_text\""
- }
- },
- "additionalProperties": false,
- "required": [
- "text",
- "type"
- ],
- "title": "OpenAIResponseInputMessageContentText",
- "description": "Text content for input messages in OpenAI response format."
- },
"OpenAIResponseMCPApprovalRequest": {
"type": "object",
"properties": {
@@ -8372,69 +9431,6 @@
"title": "OpenAIResponseMCPApprovalResponse",
"description": "A response to an MCP approval request."
},
- "OpenAIResponseMessage": {
- "type": "object",
- "properties": {
- "content": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "array",
- "items": {
- "$ref": "#/components/schemas/OpenAIResponseInputMessageContent"
- }
- },
- {
- "type": "array",
- "items": {
- "$ref": "#/components/schemas/OpenAIResponseOutputMessageContent"
- }
- }
- ]
- },
- "role": {
- "oneOf": [
- {
- "type": "string",
- "const": "system"
- },
- {
- "type": "string",
- "const": "developer"
- },
- {
- "type": "string",
- "const": "user"
- },
- {
- "type": "string",
- "const": "assistant"
- }
- ]
- },
- "type": {
- "type": "string",
- "const": "message",
- "default": "message"
- },
- "id": {
- "type": "string"
- },
- "status": {
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [
- "content",
- "role",
- "type"
- ],
- "title": "OpenAIResponseMessage",
- "description": "Corresponds to the various Message types in the Responses API. They are all under one type because the Responses API gives them all the same \"type\" value, and there is no way to tell them apart in certain scenarios."
- },
"OpenAIResponseObjectWithInput": {
"type": "object",
"properties": {
@@ -8556,318 +9552,6 @@
}
}
},
- "OpenAIResponseOutputMessageContent": {
- "type": "object",
- "properties": {
- "text": {
- "type": "string"
- },
- "type": {
- "type": "string",
- "const": "output_text",
- "default": "output_text"
- },
- "annotations": {
- "type": "array",
- "items": {
- "$ref": "#/components/schemas/OpenAIResponseAnnotations"
- }
- }
- },
- "additionalProperties": false,
- "required": [
- "text",
- "type",
- "annotations"
- ],
- "title": "OpenAIResponseOutputMessageContentOutputText"
- },
- "OpenAIResponseOutputMessageFileSearchToolCall": {
- "type": "object",
- "properties": {
- "id": {
- "type": "string",
- "description": "Unique identifier for this tool call"
- },
- "queries": {
- "type": "array",
- "items": {
- "type": "string"
- },
- "description": "List of search queries executed"
- },
- "status": {
- "type": "string",
- "description": "Current status of the file search operation"
- },
- "type": {
- "type": "string",
- "const": "file_search_call",
- "default": "file_search_call",
- "description": "Tool call type identifier, always \"file_search_call\""
- },
- "results": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "attributes": {
- "type": "object",
- "additionalProperties": {
- "oneOf": [
- {
- "type": "null"
- },
- {
- "type": "boolean"
- },
- {
- "type": "number"
- },
- {
- "type": "string"
- },
- {
- "type": "array"
- },
- {
- "type": "object"
- }
- ]
- },
- "description": "(Optional) Key-value attributes associated with the file"
- },
- "file_id": {
- "type": "string",
- "description": "Unique identifier of the file containing the result"
- },
- "filename": {
- "type": "string",
- "description": "Name of the file containing the result"
- },
- "score": {
- "type": "number",
- "description": "Relevance score for this search result (between 0 and 1)"
- },
- "text": {
- "type": "string",
- "description": "Text content of the search result"
- }
- },
- "additionalProperties": false,
- "required": [
- "attributes",
- "file_id",
- "filename",
- "score",
- "text"
- ],
- "title": "OpenAIResponseOutputMessageFileSearchToolCallResults",
- "description": "Search results returned by the file search operation."
- },
- "description": "(Optional) Search results returned by the file search operation"
- }
- },
- "additionalProperties": false,
- "required": [
- "id",
- "queries",
- "status",
- "type"
- ],
- "title": "OpenAIResponseOutputMessageFileSearchToolCall",
- "description": "File search tool call output message for OpenAI responses."
- },
- "OpenAIResponseOutputMessageFunctionToolCall": {
- "type": "object",
- "properties": {
- "call_id": {
- "type": "string",
- "description": "Unique identifier for the function call"
- },
- "name": {
- "type": "string",
- "description": "Name of the function being called"
- },
- "arguments": {
- "type": "string",
- "description": "JSON string containing the function arguments"
- },
- "type": {
- "type": "string",
- "const": "function_call",
- "default": "function_call",
- "description": "Tool call type identifier, always \"function_call\""
- },
- "id": {
- "type": "string",
- "description": "(Optional) Additional identifier for the tool call"
- },
- "status": {
- "type": "string",
- "description": "(Optional) Current status of the function call execution"
- }
- },
- "additionalProperties": false,
- "required": [
- "call_id",
- "name",
- "arguments",
- "type"
- ],
- "title": "OpenAIResponseOutputMessageFunctionToolCall",
- "description": "Function tool call output message for OpenAI responses."
- },
- "OpenAIResponseOutputMessageMCPCall": {
- "type": "object",
- "properties": {
- "id": {
- "type": "string",
- "description": "Unique identifier for this MCP call"
- },
- "type": {
- "type": "string",
- "const": "mcp_call",
- "default": "mcp_call",
- "description": "Tool call type identifier, always \"mcp_call\""
- },
- "arguments": {
- "type": "string",
- "description": "JSON string containing the MCP call arguments"
- },
- "name": {
- "type": "string",
- "description": "Name of the MCP method being called"
- },
- "server_label": {
- "type": "string",
- "description": "Label identifying the MCP server handling the call"
- },
- "error": {
- "type": "string",
- "description": "(Optional) Error message if the MCP call failed"
- },
- "output": {
- "type": "string",
- "description": "(Optional) Output result from the successful MCP call"
- }
- },
- "additionalProperties": false,
- "required": [
- "id",
- "type",
- "arguments",
- "name",
- "server_label"
- ],
- "title": "OpenAIResponseOutputMessageMCPCall",
- "description": "Model Context Protocol (MCP) call output message for OpenAI responses."
- },
- "OpenAIResponseOutputMessageMCPListTools": {
- "type": "object",
- "properties": {
- "id": {
- "type": "string",
- "description": "Unique identifier for this MCP list tools operation"
- },
- "type": {
- "type": "string",
- "const": "mcp_list_tools",
- "default": "mcp_list_tools",
- "description": "Tool call type identifier, always \"mcp_list_tools\""
- },
- "server_label": {
- "type": "string",
- "description": "Label identifying the MCP server providing the tools"
- },
- "tools": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "input_schema": {
- "type": "object",
- "additionalProperties": {
- "oneOf": [
- {
- "type": "null"
- },
- {
- "type": "boolean"
- },
- {
- "type": "number"
- },
- {
- "type": "string"
- },
- {
- "type": "array"
- },
- {
- "type": "object"
- }
- ]
- },
- "description": "JSON schema defining the tool's input parameters"
- },
- "name": {
- "type": "string",
- "description": "Name of the tool"
- },
- "description": {
- "type": "string",
- "description": "(Optional) Description of what the tool does"
- }
- },
- "additionalProperties": false,
- "required": [
- "input_schema",
- "name"
- ],
- "title": "MCPListToolsTool",
- "description": "Tool definition returned by MCP list tools operation."
- },
- "description": "List of available tools provided by the MCP server"
- }
- },
- "additionalProperties": false,
- "required": [
- "id",
- "type",
- "server_label",
- "tools"
- ],
- "title": "OpenAIResponseOutputMessageMCPListTools",
- "description": "MCP list tools output message containing available tools from an MCP server."
- },
- "OpenAIResponseOutputMessageWebSearchToolCall": {
- "type": "object",
- "properties": {
- "id": {
- "type": "string",
- "description": "Unique identifier for this tool call"
- },
- "status": {
- "type": "string",
- "description": "Current status of the web search operation"
- },
- "type": {
- "type": "string",
- "const": "web_search_call",
- "default": "web_search_call",
- "description": "Tool call type identifier, always \"web_search_call\""
- }
- },
- "additionalProperties": false,
- "required": [
- "id",
- "status",
- "type"
- ],
- "title": "OpenAIResponseOutputMessageWebSearchToolCall",
- "description": "Web search tool call output message for OpenAI responses."
- },
"OpenAIResponseText": {
"type": "object",
"properties": {
@@ -17748,6 +18432,11 @@
"name": "Benchmarks",
"description": ""
},
+ {
+ "name": "Conversations",
+ "description": "",
+ "x-displayName": "Protocol for conversation management operations."
+ },
{
"name": "DatasetIO",
"description": ""
@@ -17839,6 +18528,7 @@
"tags": [
"Agents",
"Benchmarks",
+ "Conversations",
"DatasetIO",
"Datasets",
"Eval",
diff --git a/docs/static/stainless-llama-stack-spec.yaml b/docs/static/stainless-llama-stack-spec.yaml
index cb2584d8a..3bede159b 100644
--- a/docs/static/stainless-llama-stack-spec.yaml
+++ b/docs/static/stainless-llama-stack-spec.yaml
@@ -170,6 +170,420 @@ paths:
$ref: '#/components/schemas/OpenaiCompletionRequest'
required: true
deprecated: false
+ /v1/conversations:
+ post:
+ responses:
+ '200':
+ description: The created conversation object.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Conversation'
+ '400':
+ $ref: '#/components/responses/BadRequest400'
+ '429':
+ $ref: >-
+ #/components/responses/TooManyRequests429
+ '500':
+ $ref: >-
+ #/components/responses/InternalServerError500
+ default:
+ $ref: '#/components/responses/DefaultError'
+ tags:
+ - Conversations
+ summary: Create a conversation.
+ description: Create a conversation.
+ parameters: []
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/CreateConversationRequest'
+ required: true
+ deprecated: false
+ /v1/conversations/{conversation_id}:
+ get:
+ responses:
+ '200':
+ description: The conversation object.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Conversation'
+ '400':
+ $ref: '#/components/responses/BadRequest400'
+ '429':
+ $ref: >-
+ #/components/responses/TooManyRequests429
+ '500':
+ $ref: >-
+ #/components/responses/InternalServerError500
+ default:
+ $ref: '#/components/responses/DefaultError'
+ tags:
+ - Conversations
+ summary: Get a conversation with the given ID.
+ description: Get a conversation with the given ID.
+ parameters:
+ - name: conversation_id
+ in: path
+ description: The conversation identifier.
+ required: true
+ schema:
+ type: string
+ deprecated: false
+ post:
+ responses:
+ '200':
+ description: The updated conversation object.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Conversation'
+ '400':
+ $ref: '#/components/responses/BadRequest400'
+ '429':
+ $ref: >-
+ #/components/responses/TooManyRequests429
+ '500':
+ $ref: >-
+ #/components/responses/InternalServerError500
+ default:
+ $ref: '#/components/responses/DefaultError'
+ tags:
+ - Conversations
+ summary: >-
+ Update a conversation's metadata with the given ID.
+ description: >-
+ Update a conversation's metadata with the given ID.
+ parameters:
+ - name: conversation_id
+ in: path
+ description: The conversation identifier.
+ required: true
+ schema:
+ type: string
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/UpdateConversationRequest'
+ required: true
+ deprecated: false
+ delete:
+ responses:
+ '200':
+ description: The deleted conversation resource.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ConversationDeletedResource'
+ '400':
+ $ref: '#/components/responses/BadRequest400'
+ '429':
+ $ref: >-
+ #/components/responses/TooManyRequests429
+ '500':
+ $ref: >-
+ #/components/responses/InternalServerError500
+ default:
+ $ref: '#/components/responses/DefaultError'
+ tags:
+ - Conversations
+ summary: Delete a conversation with the given ID.
+ description: Delete a conversation with the given ID.
+ parameters:
+ - name: conversation_id
+ in: path
+ description: The conversation identifier.
+ required: true
+ schema:
+ type: string
+ deprecated: false
+ /v1/conversations/{conversation_id}/items:
+ get:
+ responses:
+ '200':
+ description: List of conversation items.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ConversationItemList'
+ '400':
+ $ref: '#/components/responses/BadRequest400'
+ '429':
+ $ref: >-
+ #/components/responses/TooManyRequests429
+ '500':
+ $ref: >-
+ #/components/responses/InternalServerError500
+ default:
+ $ref: '#/components/responses/DefaultError'
+ tags:
+ - Conversations
+ summary: List items in the conversation.
+ description: List items in the conversation.
+ parameters:
+ - name: conversation_id
+ in: path
+ description: The conversation identifier.
+ required: true
+ schema:
+ type: string
+ - name: after
+ in: query
+ description: >-
+ An item ID to list items after, used in pagination.
+ required: true
+ schema:
+ oneOf:
+ - type: string
+ - type: object
+ title: NotGiven
+ description: >-
+ A sentinel singleton class used to distinguish omitted keyword arguments
+ from those passed in with the value None (which may have different
+ behavior).
+
+ For example:
+
+
+ ```py
+
+ def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response:
+ ...
+
+
+
+ get(timeout=1) # 1s timeout
+
+ get(timeout=None) # No timeout
+
+ get() # Default timeout behavior, which may not be statically known
+ at the method definition.
+
+ ```
+ - name: include
+ in: query
+ description: >-
+ Specify additional output data to include in the response.
+ required: true
+ schema:
+ oneOf:
+ - type: array
+ items:
+ type: string
+ enum:
+ - code_interpreter_call.outputs
+ - computer_call_output.output.image_url
+ - file_search_call.results
+ - message.input_image.image_url
+ - message.output_text.logprobs
+ - reasoning.encrypted_content
+ - type: object
+ title: NotGiven
+ description: >-
+ A sentinel singleton class used to distinguish omitted keyword arguments
+ from those passed in with the value None (which may have different
+ behavior).
+
+ For example:
+
+
+ ```py
+
+ def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response:
+ ...
+
+
+
+ get(timeout=1) # 1s timeout
+
+ get(timeout=None) # No timeout
+
+ get() # Default timeout behavior, which may not be statically known
+ at the method definition.
+
+ ```
+ - name: limit
+ in: query
+ description: >-
+ A limit on the number of objects to be returned (1-100, default 20).
+ required: true
+ schema:
+ oneOf:
+ - type: integer
+ - type: object
+ title: NotGiven
+ description: >-
+ A sentinel singleton class used to distinguish omitted keyword arguments
+ from those passed in with the value None (which may have different
+ behavior).
+
+ For example:
+
+
+ ```py
+
+ def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response:
+ ...
+
+
+
+ get(timeout=1) # 1s timeout
+
+ get(timeout=None) # No timeout
+
+ get() # Default timeout behavior, which may not be statically known
+ at the method definition.
+
+ ```
+ - name: order
+ in: query
+ description: >-
+ The order to return items in (asc or desc, default desc).
+ required: true
+ schema:
+ oneOf:
+ - type: string
+ enum:
+ - asc
+ - desc
+ - type: object
+ title: NotGiven
+ description: >-
+ A sentinel singleton class used to distinguish omitted keyword arguments
+ from those passed in with the value None (which may have different
+ behavior).
+
+ For example:
+
+
+ ```py
+
+ def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response:
+ ...
+
+
+
+ get(timeout=1) # 1s timeout
+
+ get(timeout=None) # No timeout
+
+ get() # Default timeout behavior, which may not be statically known
+ at the method definition.
+
+ ```
+ deprecated: false
+ post:
+ responses:
+ '200':
+ description: List of created items.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ConversationItemList'
+ '400':
+ $ref: '#/components/responses/BadRequest400'
+ '429':
+ $ref: >-
+ #/components/responses/TooManyRequests429
+ '500':
+ $ref: >-
+ #/components/responses/InternalServerError500
+ default:
+ $ref: '#/components/responses/DefaultError'
+ tags:
+ - Conversations
+ summary: Create items in the conversation.
+ description: Create items in the conversation.
+ parameters:
+ - name: conversation_id
+ in: path
+ description: The conversation identifier.
+ required: true
+ schema:
+ type: string
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/AddItemsRequest'
+ required: true
+ deprecated: false
+ /v1/conversations/{conversation_id}/items/{item_id}:
+ get:
+ responses:
+ '200':
+ description: The conversation item.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ConversationItem'
+ '400':
+ $ref: '#/components/responses/BadRequest400'
+ '429':
+ $ref: >-
+ #/components/responses/TooManyRequests429
+ '500':
+ $ref: >-
+ #/components/responses/InternalServerError500
+ default:
+ $ref: '#/components/responses/DefaultError'
+ tags:
+ - Conversations
+ summary: Retrieve a conversation item.
+ description: Retrieve a conversation item.
+ parameters:
+ - name: conversation_id
+ in: path
+ description: The conversation identifier.
+ required: true
+ schema:
+ type: string
+ - name: item_id
+ in: path
+ description: The item identifier.
+ required: true
+ schema:
+ type: string
+ deprecated: false
+ delete:
+ responses:
+ '200':
+ description: The deleted item resource.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ConversationItemDeletedResource'
+ '400':
+ $ref: '#/components/responses/BadRequest400'
+ '429':
+ $ref: >-
+ #/components/responses/TooManyRequests429
+ '500':
+ $ref: >-
+ #/components/responses/InternalServerError500
+ default:
+ $ref: '#/components/responses/DefaultError'
+ tags:
+ - Conversations
+ summary: Delete a conversation item.
+ description: Delete a conversation item.
+ parameters:
+ - name: conversation_id
+ in: path
+ description: The conversation identifier.
+ required: true
+ schema:
+ type: string
+ - name: item_id
+ in: path
+ description: The item identifier.
+ required: true
+ schema:
+ type: string
+ deprecated: false
/v1/embeddings:
post:
responses:
@@ -5201,6 +5615,633 @@ components:
title: OpenAICompletionChoice
description: >-
A choice from an OpenAI-compatible completion response.
+ ConversationItem:
+ oneOf:
+ - $ref: '#/components/schemas/OpenAIResponseMessage'
+ - $ref: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall'
+ - $ref: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall'
+ - $ref: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall'
+ - $ref: '#/components/schemas/OpenAIResponseOutputMessageMCPCall'
+ - $ref: '#/components/schemas/OpenAIResponseOutputMessageMCPListTools'
+ discriminator:
+ propertyName: type
+ mapping:
+ message: '#/components/schemas/OpenAIResponseMessage'
+ function_call: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall'
+ file_search_call: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall'
+ web_search_call: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall'
+ mcp_call: '#/components/schemas/OpenAIResponseOutputMessageMCPCall'
+ mcp_list_tools: '#/components/schemas/OpenAIResponseOutputMessageMCPListTools'
+ OpenAIResponseAnnotationCitation:
+ type: object
+ properties:
+ type:
+ type: string
+ const: url_citation
+ default: url_citation
+ description: >-
+ Annotation type identifier, always "url_citation"
+ end_index:
+ type: integer
+ description: >-
+ End position of the citation span in the content
+ start_index:
+ type: integer
+ description: >-
+ Start position of the citation span in the content
+ title:
+ type: string
+ description: Title of the referenced web resource
+ url:
+ type: string
+ description: URL of the referenced web resource
+ additionalProperties: false
+ required:
+ - type
+ - end_index
+ - start_index
+ - title
+ - url
+ title: OpenAIResponseAnnotationCitation
+ description: >-
+ URL citation annotation for referencing external web resources.
+ "OpenAIResponseAnnotationContainerFileCitation":
+ type: object
+ properties:
+ type:
+ type: string
+ const: container_file_citation
+ default: container_file_citation
+ container_id:
+ type: string
+ end_index:
+ type: integer
+ file_id:
+ type: string
+ filename:
+ type: string
+ start_index:
+ type: integer
+ additionalProperties: false
+ required:
+ - type
+ - container_id
+ - end_index
+ - file_id
+ - filename
+ - start_index
+ title: >-
+ OpenAIResponseAnnotationContainerFileCitation
+ OpenAIResponseAnnotationFileCitation:
+ type: object
+ properties:
+ type:
+ type: string
+ const: file_citation
+ default: file_citation
+ description: >-
+ Annotation type identifier, always "file_citation"
+ file_id:
+ type: string
+ description: Unique identifier of the referenced file
+ filename:
+ type: string
+ description: Name of the referenced file
+ index:
+ type: integer
+ description: >-
+ Position index of the citation within the content
+ additionalProperties: false
+ required:
+ - type
+ - file_id
+ - filename
+ - index
+ title: OpenAIResponseAnnotationFileCitation
+ description: >-
+ File citation annotation for referencing specific files in response content.
+ OpenAIResponseAnnotationFilePath:
+ type: object
+ properties:
+ type:
+ type: string
+ const: file_path
+ default: file_path
+ file_id:
+ type: string
+ index:
+ type: integer
+ additionalProperties: false
+ required:
+ - type
+ - file_id
+ - index
+ title: OpenAIResponseAnnotationFilePath
+ OpenAIResponseAnnotations:
+ oneOf:
+ - $ref: '#/components/schemas/OpenAIResponseAnnotationFileCitation'
+ - $ref: '#/components/schemas/OpenAIResponseAnnotationCitation'
+ - $ref: '#/components/schemas/OpenAIResponseAnnotationContainerFileCitation'
+ - $ref: '#/components/schemas/OpenAIResponseAnnotationFilePath'
+ discriminator:
+ propertyName: type
+ mapping:
+ file_citation: '#/components/schemas/OpenAIResponseAnnotationFileCitation'
+ url_citation: '#/components/schemas/OpenAIResponseAnnotationCitation'
+ container_file_citation: '#/components/schemas/OpenAIResponseAnnotationContainerFileCitation'
+ file_path: '#/components/schemas/OpenAIResponseAnnotationFilePath'
+ OpenAIResponseInputMessageContent:
+ oneOf:
+ - $ref: '#/components/schemas/OpenAIResponseInputMessageContentText'
+ - $ref: '#/components/schemas/OpenAIResponseInputMessageContentImage'
+ discriminator:
+ propertyName: type
+ mapping:
+ input_text: '#/components/schemas/OpenAIResponseInputMessageContentText'
+ input_image: '#/components/schemas/OpenAIResponseInputMessageContentImage'
+ OpenAIResponseInputMessageContentImage:
+ type: object
+ properties:
+ detail:
+ oneOf:
+ - type: string
+ const: low
+ - type: string
+ const: high
+ - type: string
+ const: auto
+ default: auto
+ description: >-
+ Level of detail for image processing, can be "low", "high", or "auto"
+ type:
+ type: string
+ const: input_image
+ default: input_image
+ description: >-
+ Content type identifier, always "input_image"
+ image_url:
+ type: string
+ description: (Optional) URL of the image content
+ additionalProperties: false
+ required:
+ - detail
+ - type
+ title: OpenAIResponseInputMessageContentImage
+ description: >-
+ Image content for input messages in OpenAI response format.
+ OpenAIResponseInputMessageContentText:
+ type: object
+ properties:
+ text:
+ type: string
+ description: The text content of the input message
+ type:
+ type: string
+ const: input_text
+ default: input_text
+ description: >-
+ Content type identifier, always "input_text"
+ additionalProperties: false
+ required:
+ - text
+ - type
+ title: OpenAIResponseInputMessageContentText
+ description: >-
+ Text content for input messages in OpenAI response format.
+ OpenAIResponseMessage:
+ type: object
+ properties:
+ content:
+ oneOf:
+ - type: string
+ - type: array
+ items:
+ $ref: '#/components/schemas/OpenAIResponseInputMessageContent'
+ - type: array
+ items:
+ $ref: '#/components/schemas/OpenAIResponseOutputMessageContent'
+ role:
+ oneOf:
+ - type: string
+ const: system
+ - type: string
+ const: developer
+ - type: string
+ const: user
+ - type: string
+ const: assistant
+ type:
+ type: string
+ const: message
+ default: message
+ id:
+ type: string
+ status:
+ type: string
+ additionalProperties: false
+ required:
+ - content
+ - role
+ - type
+ title: OpenAIResponseMessage
+ description: >-
+ Corresponds to the various Message types in the Responses API. They are all
+ under one type because the Responses API gives them all the same "type" value,
+ and there is no way to tell them apart in certain scenarios.
+ OpenAIResponseOutputMessageContent:
+ type: object
+ properties:
+ text:
+ type: string
+ type:
+ type: string
+ const: output_text
+ default: output_text
+ annotations:
+ type: array
+ items:
+ $ref: '#/components/schemas/OpenAIResponseAnnotations'
+ additionalProperties: false
+ required:
+ - text
+ - type
+ - annotations
+ title: >-
+ OpenAIResponseOutputMessageContentOutputText
+ "OpenAIResponseOutputMessageFileSearchToolCall":
+ type: object
+ properties:
+ id:
+ type: string
+ description: Unique identifier for this tool call
+ queries:
+ type: array
+ items:
+ type: string
+ description: List of search queries executed
+ status:
+ type: string
+ description: >-
+ Current status of the file search operation
+ type:
+ type: string
+ const: file_search_call
+ default: file_search_call
+ description: >-
+ Tool call type identifier, always "file_search_call"
+ results:
+ type: array
+ items:
+ type: object
+ properties:
+ attributes:
+ type: object
+ additionalProperties:
+ oneOf:
+ - type: 'null'
+ - type: boolean
+ - type: number
+ - type: string
+ - type: array
+ - type: object
+ description: >-
+ (Optional) Key-value attributes associated with the file
+ file_id:
+ type: string
+ description: >-
+ Unique identifier of the file containing the result
+ filename:
+ type: string
+ description: Name of the file containing the result
+ score:
+ type: number
+ description: >-
+ Relevance score for this search result (between 0 and 1)
+ text:
+ type: string
+ description: Text content of the search result
+ additionalProperties: false
+ required:
+ - attributes
+ - file_id
+ - filename
+ - score
+ - text
+ title: >-
+ OpenAIResponseOutputMessageFileSearchToolCallResults
+ description: >-
+ Search results returned by the file search operation.
+ description: >-
+ (Optional) Search results returned by the file search operation
+ additionalProperties: false
+ required:
+ - id
+ - queries
+ - status
+ - type
+ title: >-
+ OpenAIResponseOutputMessageFileSearchToolCall
+ description: >-
+ File search tool call output message for OpenAI responses.
+ "OpenAIResponseOutputMessageFunctionToolCall":
+ type: object
+ properties:
+ call_id:
+ type: string
+ description: Unique identifier for the function call
+ name:
+ type: string
+ description: Name of the function being called
+ arguments:
+ type: string
+ description: >-
+ JSON string containing the function arguments
+ type:
+ type: string
+ const: function_call
+ default: function_call
+ description: >-
+ Tool call type identifier, always "function_call"
+ id:
+ type: string
+ description: >-
+ (Optional) Additional identifier for the tool call
+ status:
+ type: string
+ description: >-
+ (Optional) Current status of the function call execution
+ additionalProperties: false
+ required:
+ - call_id
+ - name
+ - arguments
+ - type
+ title: >-
+ OpenAIResponseOutputMessageFunctionToolCall
+ description: >-
+ Function tool call output message for OpenAI responses.
+ OpenAIResponseOutputMessageMCPCall:
+ type: object
+ properties:
+ id:
+ type: string
+ description: Unique identifier for this MCP call
+ type:
+ type: string
+ const: mcp_call
+ default: mcp_call
+ description: >-
+ Tool call type identifier, always "mcp_call"
+ arguments:
+ type: string
+ description: >-
+ JSON string containing the MCP call arguments
+ name:
+ type: string
+ description: Name of the MCP method being called
+ server_label:
+ type: string
+ description: >-
+ Label identifying the MCP server handling the call
+ error:
+ type: string
+ description: >-
+ (Optional) Error message if the MCP call failed
+ output:
+ type: string
+ description: >-
+ (Optional) Output result from the successful MCP call
+ additionalProperties: false
+ required:
+ - id
+ - type
+ - arguments
+ - name
+ - server_label
+ title: OpenAIResponseOutputMessageMCPCall
+ description: >-
+ Model Context Protocol (MCP) call output message for OpenAI responses.
+ OpenAIResponseOutputMessageMCPListTools:
+ type: object
+ properties:
+ id:
+ type: string
+ description: >-
+ Unique identifier for this MCP list tools operation
+ type:
+ type: string
+ const: mcp_list_tools
+ default: mcp_list_tools
+ description: >-
+ Tool call type identifier, always "mcp_list_tools"
+ server_label:
+ type: string
+ description: >-
+ Label identifying the MCP server providing the tools
+ tools:
+ type: array
+ items:
+ type: object
+ properties:
+ input_schema:
+ type: object
+ additionalProperties:
+ oneOf:
+ - type: 'null'
+ - type: boolean
+ - type: number
+ - type: string
+ - type: array
+ - type: object
+ description: >-
+ JSON schema defining the tool's input parameters
+ name:
+ type: string
+ description: Name of the tool
+ description:
+ type: string
+ description: >-
+ (Optional) Description of what the tool does
+ additionalProperties: false
+ required:
+ - input_schema
+ - name
+ title: MCPListToolsTool
+ description: >-
+ Tool definition returned by MCP list tools operation.
+ description: >-
+ List of available tools provided by the MCP server
+ additionalProperties: false
+ required:
+ - id
+ - type
+ - server_label
+ - tools
+ title: OpenAIResponseOutputMessageMCPListTools
+ description: >-
+ MCP list tools output message containing available tools from an MCP server.
+ "OpenAIResponseOutputMessageWebSearchToolCall":
+ type: object
+ properties:
+ id:
+ type: string
+ description: Unique identifier for this tool call
+ status:
+ type: string
+ description: >-
+ Current status of the web search operation
+ type:
+ type: string
+ const: web_search_call
+ default: web_search_call
+ description: >-
+ Tool call type identifier, always "web_search_call"
+ additionalProperties: false
+ required:
+ - id
+ - status
+ - type
+ title: >-
+ OpenAIResponseOutputMessageWebSearchToolCall
+ description: >-
+ Web search tool call output message for OpenAI responses.
+ CreateConversationRequest:
+ type: object
+ properties:
+ items:
+ type: array
+ items:
+ $ref: '#/components/schemas/ConversationItem'
+ description: >-
+ Initial items to include in the conversation context.
+ metadata:
+ type: object
+ additionalProperties:
+ type: string
+ description: >-
+ Set of key-value pairs that can be attached to an object.
+ additionalProperties: false
+ title: CreateConversationRequest
+ Conversation:
+ type: object
+ properties:
+ id:
+ type: string
+ object:
+ type: string
+ const: conversation
+ default: conversation
+ created_at:
+ type: integer
+ metadata:
+ type: object
+ additionalProperties:
+ type: string
+ items:
+ type: array
+ items:
+ type: object
+ title: dict
+ description: >-
+ dict() -> new empty dictionary dict(mapping) -> new dictionary initialized
+ from a mapping object's (key, value) pairs dict(iterable) -> new
+ dictionary initialized as if via: d = {} for k, v in iterable: d[k]
+ = v dict(**kwargs) -> new dictionary initialized with the name=value
+ pairs in the keyword argument list. For example: dict(one=1, two=2)
+ additionalProperties: false
+ required:
+ - id
+ - object
+ - created_at
+ title: Conversation
+ description: OpenAI-compatible conversation object.
+ UpdateConversationRequest:
+ type: object
+ properties:
+ metadata:
+ type: object
+ additionalProperties:
+ type: string
+ description: >-
+ Set of key-value pairs that can be attached to an object.
+ additionalProperties: false
+ required:
+ - metadata
+ title: UpdateConversationRequest
+ ConversationDeletedResource:
+ type: object
+ properties:
+ id:
+ type: string
+ object:
+ type: string
+ default: conversation.deleted
+ deleted:
+ type: boolean
+ default: true
+ additionalProperties: false
+ required:
+ - id
+ - object
+ - deleted
+ title: ConversationDeletedResource
+ description: Response for deleted conversation.
+ ConversationItemList:
+ type: object
+ properties:
+ object:
+ type: string
+ default: list
+ data:
+ type: array
+ items:
+ $ref: '#/components/schemas/ConversationItem'
+ first_id:
+ type: string
+ last_id:
+ type: string
+ has_more:
+ type: boolean
+ default: false
+ additionalProperties: false
+ required:
+ - object
+ - data
+ - has_more
+ title: ConversationItemList
+ description: >-
+ List of conversation items with pagination.
+ AddItemsRequest:
+ type: object
+ properties:
+ items:
+ type: array
+ items:
+ $ref: '#/components/schemas/ConversationItem'
+ description: >-
+ Items to include in the conversation context.
+ additionalProperties: false
+ required:
+ - items
+ title: AddItemsRequest
+ ConversationItemDeletedResource:
+ type: object
+ properties:
+ id:
+ type: string
+ object:
+ type: string
+ default: conversation.item.deleted
+ deleted:
+ type: boolean
+ default: true
+ additionalProperties: false
+ required:
+ - id
+ - object
+ - deleted
+ title: ConversationItemDeletedResource
+ description: Response for deleted conversation item.
OpenaiEmbeddingsRequest:
type: object
properties:
@@ -5895,124 +6936,6 @@ components:
title: ListOpenAIResponseObject
description: >-
Paginated list of OpenAI response objects with navigation metadata.
- OpenAIResponseAnnotationCitation:
- type: object
- properties:
- type:
- type: string
- const: url_citation
- default: url_citation
- description: >-
- Annotation type identifier, always "url_citation"
- end_index:
- type: integer
- description: >-
- End position of the citation span in the content
- start_index:
- type: integer
- description: >-
- Start position of the citation span in the content
- title:
- type: string
- description: Title of the referenced web resource
- url:
- type: string
- description: URL of the referenced web resource
- additionalProperties: false
- required:
- - type
- - end_index
- - start_index
- - title
- - url
- title: OpenAIResponseAnnotationCitation
- description: >-
- URL citation annotation for referencing external web resources.
- "OpenAIResponseAnnotationContainerFileCitation":
- type: object
- properties:
- type:
- type: string
- const: container_file_citation
- default: container_file_citation
- container_id:
- type: string
- end_index:
- type: integer
- file_id:
- type: string
- filename:
- type: string
- start_index:
- type: integer
- additionalProperties: false
- required:
- - type
- - container_id
- - end_index
- - file_id
- - filename
- - start_index
- title: >-
- OpenAIResponseAnnotationContainerFileCitation
- OpenAIResponseAnnotationFileCitation:
- type: object
- properties:
- type:
- type: string
- const: file_citation
- default: file_citation
- description: >-
- Annotation type identifier, always "file_citation"
- file_id:
- type: string
- description: Unique identifier of the referenced file
- filename:
- type: string
- description: Name of the referenced file
- index:
- type: integer
- description: >-
- Position index of the citation within the content
- additionalProperties: false
- required:
- - type
- - file_id
- - filename
- - index
- title: OpenAIResponseAnnotationFileCitation
- description: >-
- File citation annotation for referencing specific files in response content.
- OpenAIResponseAnnotationFilePath:
- type: object
- properties:
- type:
- type: string
- const: file_path
- default: file_path
- file_id:
- type: string
- index:
- type: integer
- additionalProperties: false
- required:
- - type
- - file_id
- - index
- title: OpenAIResponseAnnotationFilePath
- OpenAIResponseAnnotations:
- oneOf:
- - $ref: '#/components/schemas/OpenAIResponseAnnotationFileCitation'
- - $ref: '#/components/schemas/OpenAIResponseAnnotationCitation'
- - $ref: '#/components/schemas/OpenAIResponseAnnotationContainerFileCitation'
- - $ref: '#/components/schemas/OpenAIResponseAnnotationFilePath'
- discriminator:
- propertyName: type
- mapping:
- file_citation: '#/components/schemas/OpenAIResponseAnnotationFileCitation'
- url_citation: '#/components/schemas/OpenAIResponseAnnotationCitation'
- container_file_citation: '#/components/schemas/OpenAIResponseAnnotationContainerFileCitation'
- file_path: '#/components/schemas/OpenAIResponseAnnotationFilePath'
OpenAIResponseError:
type: object
properties:
@@ -6065,64 +6988,6 @@ components:
description: >-
This represents the output of a function call that gets passed back to the
model.
- OpenAIResponseInputMessageContent:
- oneOf:
- - $ref: '#/components/schemas/OpenAIResponseInputMessageContentText'
- - $ref: '#/components/schemas/OpenAIResponseInputMessageContentImage'
- discriminator:
- propertyName: type
- mapping:
- input_text: '#/components/schemas/OpenAIResponseInputMessageContentText'
- input_image: '#/components/schemas/OpenAIResponseInputMessageContentImage'
- OpenAIResponseInputMessageContentImage:
- type: object
- properties:
- detail:
- oneOf:
- - type: string
- const: low
- - type: string
- const: high
- - type: string
- const: auto
- default: auto
- description: >-
- Level of detail for image processing, can be "low", "high", or "auto"
- type:
- type: string
- const: input_image
- default: input_image
- description: >-
- Content type identifier, always "input_image"
- image_url:
- type: string
- description: (Optional) URL of the image content
- additionalProperties: false
- required:
- - detail
- - type
- title: OpenAIResponseInputMessageContentImage
- description: >-
- Image content for input messages in OpenAI response format.
- OpenAIResponseInputMessageContentText:
- type: object
- properties:
- text:
- type: string
- description: The text content of the input message
- type:
- type: string
- const: input_text
- default: input_text
- description: >-
- Content type identifier, always "input_text"
- additionalProperties: false
- required:
- - text
- - type
- title: OpenAIResponseInputMessageContentText
- description: >-
- Text content for input messages in OpenAI response format.
OpenAIResponseMCPApprovalRequest:
type: object
properties:
@@ -6170,46 +7035,6 @@ components:
- type
title: OpenAIResponseMCPApprovalResponse
description: A response to an MCP approval request.
- OpenAIResponseMessage:
- type: object
- properties:
- content:
- oneOf:
- - type: string
- - type: array
- items:
- $ref: '#/components/schemas/OpenAIResponseInputMessageContent'
- - type: array
- items:
- $ref: '#/components/schemas/OpenAIResponseOutputMessageContent'
- role:
- oneOf:
- - type: string
- const: system
- - type: string
- const: developer
- - type: string
- const: user
- - type: string
- const: assistant
- type:
- type: string
- const: message
- default: message
- id:
- type: string
- status:
- type: string
- additionalProperties: false
- required:
- - content
- - role
- - type
- title: OpenAIResponseMessage
- description: >-
- Corresponds to the various Message types in the Responses API. They are all
- under one type because the Responses API gives them all the same "type" value,
- and there is no way to tell them apart in certain scenarios.
OpenAIResponseObjectWithInput:
type: object
properties:
@@ -6307,263 +7132,6 @@ components:
mcp_call: '#/components/schemas/OpenAIResponseOutputMessageMCPCall'
mcp_list_tools: '#/components/schemas/OpenAIResponseOutputMessageMCPListTools'
mcp_approval_request: '#/components/schemas/OpenAIResponseMCPApprovalRequest'
- OpenAIResponseOutputMessageContent:
- type: object
- properties:
- text:
- type: string
- type:
- type: string
- const: output_text
- default: output_text
- annotations:
- type: array
- items:
- $ref: '#/components/schemas/OpenAIResponseAnnotations'
- additionalProperties: false
- required:
- - text
- - type
- - annotations
- title: >-
- OpenAIResponseOutputMessageContentOutputText
- "OpenAIResponseOutputMessageFileSearchToolCall":
- type: object
- properties:
- id:
- type: string
- description: Unique identifier for this tool call
- queries:
- type: array
- items:
- type: string
- description: List of search queries executed
- status:
- type: string
- description: >-
- Current status of the file search operation
- type:
- type: string
- const: file_search_call
- default: file_search_call
- description: >-
- Tool call type identifier, always "file_search_call"
- results:
- type: array
- items:
- type: object
- properties:
- attributes:
- type: object
- additionalProperties:
- oneOf:
- - type: 'null'
- - type: boolean
- - type: number
- - type: string
- - type: array
- - type: object
- description: >-
- (Optional) Key-value attributes associated with the file
- file_id:
- type: string
- description: >-
- Unique identifier of the file containing the result
- filename:
- type: string
- description: Name of the file containing the result
- score:
- type: number
- description: >-
- Relevance score for this search result (between 0 and 1)
- text:
- type: string
- description: Text content of the search result
- additionalProperties: false
- required:
- - attributes
- - file_id
- - filename
- - score
- - text
- title: >-
- OpenAIResponseOutputMessageFileSearchToolCallResults
- description: >-
- Search results returned by the file search operation.
- description: >-
- (Optional) Search results returned by the file search operation
- additionalProperties: false
- required:
- - id
- - queries
- - status
- - type
- title: >-
- OpenAIResponseOutputMessageFileSearchToolCall
- description: >-
- File search tool call output message for OpenAI responses.
- "OpenAIResponseOutputMessageFunctionToolCall":
- type: object
- properties:
- call_id:
- type: string
- description: Unique identifier for the function call
- name:
- type: string
- description: Name of the function being called
- arguments:
- type: string
- description: >-
- JSON string containing the function arguments
- type:
- type: string
- const: function_call
- default: function_call
- description: >-
- Tool call type identifier, always "function_call"
- id:
- type: string
- description: >-
- (Optional) Additional identifier for the tool call
- status:
- type: string
- description: >-
- (Optional) Current status of the function call execution
- additionalProperties: false
- required:
- - call_id
- - name
- - arguments
- - type
- title: >-
- OpenAIResponseOutputMessageFunctionToolCall
- description: >-
- Function tool call output message for OpenAI responses.
- OpenAIResponseOutputMessageMCPCall:
- type: object
- properties:
- id:
- type: string
- description: Unique identifier for this MCP call
- type:
- type: string
- const: mcp_call
- default: mcp_call
- description: >-
- Tool call type identifier, always "mcp_call"
- arguments:
- type: string
- description: >-
- JSON string containing the MCP call arguments
- name:
- type: string
- description: Name of the MCP method being called
- server_label:
- type: string
- description: >-
- Label identifying the MCP server handling the call
- error:
- type: string
- description: >-
- (Optional) Error message if the MCP call failed
- output:
- type: string
- description: >-
- (Optional) Output result from the successful MCP call
- additionalProperties: false
- required:
- - id
- - type
- - arguments
- - name
- - server_label
- title: OpenAIResponseOutputMessageMCPCall
- description: >-
- Model Context Protocol (MCP) call output message for OpenAI responses.
- OpenAIResponseOutputMessageMCPListTools:
- type: object
- properties:
- id:
- type: string
- description: >-
- Unique identifier for this MCP list tools operation
- type:
- type: string
- const: mcp_list_tools
- default: mcp_list_tools
- description: >-
- Tool call type identifier, always "mcp_list_tools"
- server_label:
- type: string
- description: >-
- Label identifying the MCP server providing the tools
- tools:
- type: array
- items:
- type: object
- properties:
- input_schema:
- type: object
- additionalProperties:
- oneOf:
- - type: 'null'
- - type: boolean
- - type: number
- - type: string
- - type: array
- - type: object
- description: >-
- JSON schema defining the tool's input parameters
- name:
- type: string
- description: Name of the tool
- description:
- type: string
- description: >-
- (Optional) Description of what the tool does
- additionalProperties: false
- required:
- - input_schema
- - name
- title: MCPListToolsTool
- description: >-
- Tool definition returned by MCP list tools operation.
- description: >-
- List of available tools provided by the MCP server
- additionalProperties: false
- required:
- - id
- - type
- - server_label
- - tools
- title: OpenAIResponseOutputMessageMCPListTools
- description: >-
- MCP list tools output message containing available tools from an MCP server.
- "OpenAIResponseOutputMessageWebSearchToolCall":
- type: object
- properties:
- id:
- type: string
- description: Unique identifier for this tool call
- status:
- type: string
- description: >-
- Current status of the web search operation
- type:
- type: string
- const: web_search_call
- default: web_search_call
- description: >-
- Tool call type identifier, always "web_search_call"
- additionalProperties: false
- required:
- - id
- - status
- - type
- title: >-
- OpenAIResponseOutputMessageWebSearchToolCall
- description: >-
- Web search tool call output message for OpenAI responses.
OpenAIResponseText:
type: object
properties:
@@ -13190,6 +13758,10 @@ tags:
x-displayName: Agents
- name: Benchmarks
description: ''
+ - name: Conversations
+ description: ''
+ x-displayName: >-
+ Protocol for conversation management operations.
- name: DatasetIO
description: ''
- name: Datasets
@@ -13251,6 +13823,7 @@ x-tagGroups:
tags:
- Agents
- Benchmarks
+ - Conversations
- DatasetIO
- Datasets
- Eval
diff --git a/llama_stack/apis/conversations/__init__.py b/llama_stack/apis/conversations/__init__.py
new file mode 100644
index 000000000..2d214d27a
--- /dev/null
+++ b/llama_stack/apis/conversations/__init__.py
@@ -0,0 +1,31 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
+from .conversations import (
+ Conversation,
+ ConversationCreateRequest,
+ ConversationDeletedResource,
+ ConversationItem,
+ ConversationItemCreateRequest,
+ ConversationItemDeletedResource,
+ ConversationItemList,
+ Conversations,
+ ConversationUpdateRequest,
+ Metadata,
+)
+
+__all__ = [
+ "Conversation",
+ "ConversationCreateRequest",
+ "ConversationDeletedResource",
+ "ConversationItem",
+ "ConversationItemCreateRequest",
+ "ConversationItemDeletedResource",
+ "ConversationItemList",
+ "Conversations",
+ "ConversationUpdateRequest",
+ "Metadata",
+]
diff --git a/llama_stack/apis/conversations/conversations.py b/llama_stack/apis/conversations/conversations.py
new file mode 100644
index 000000000..58ae9c35a
--- /dev/null
+++ b/llama_stack/apis/conversations/conversations.py
@@ -0,0 +1,260 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
+from typing import Annotated, Literal, Protocol, runtime_checkable
+
+from openai import NOT_GIVEN
+from openai._types import NotGiven
+from openai.types.responses.response_includable import ResponseIncludable
+from pydantic import BaseModel, Field
+
+from llama_stack.apis.agents.openai_responses import (
+ OpenAIResponseMessage,
+ OpenAIResponseOutputMessageFileSearchToolCall,
+ OpenAIResponseOutputMessageFunctionToolCall,
+ OpenAIResponseOutputMessageMCPCall,
+ OpenAIResponseOutputMessageMCPListTools,
+ OpenAIResponseOutputMessageWebSearchToolCall,
+)
+from llama_stack.apis.version import LLAMA_STACK_API_V1
+from llama_stack.providers.utils.telemetry.trace_protocol import trace_protocol
+from llama_stack.schema_utils import json_schema_type, register_schema, webmethod
+
+Metadata = dict[str, str]
+
+
+@json_schema_type
+class Conversation(BaseModel):
+ """OpenAI-compatible conversation object."""
+
+ id: str = Field(..., description="The unique ID of the conversation.")
+ object: Literal["conversation"] = Field(
+ default="conversation", description="The object type, which is always conversation."
+ )
+ created_at: int = Field(
+ ..., description="The time at which the conversation was created, measured in seconds since the Unix epoch."
+ )
+ metadata: Metadata | None = Field(
+ default=None,
+ description="Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard.",
+ )
+ items: list[dict] | None = Field(
+ default=None,
+ description="Initial items to include in the conversation context. You may add up to 20 items at a time.",
+ )
+
+
+@json_schema_type
+class ConversationMessage(BaseModel):
+ """OpenAI-compatible message item for conversations."""
+
+ id: str = Field(..., description="unique identifier for this message")
+ content: list[dict] = Field(..., description="message content")
+ role: str = Field(..., description="message role")
+ status: str = Field(..., description="message status")
+ type: Literal["message"] = "message"
+ object: Literal["message"] = "message"
+
+
+ConversationItem = Annotated[
+ OpenAIResponseMessage
+ | OpenAIResponseOutputMessageFunctionToolCall
+ | OpenAIResponseOutputMessageFileSearchToolCall
+ | OpenAIResponseOutputMessageWebSearchToolCall
+ | OpenAIResponseOutputMessageMCPCall
+ | OpenAIResponseOutputMessageMCPListTools,
+ Field(discriminator="type"),
+]
+register_schema(ConversationItem, name="ConversationItem")
+
+# Using OpenAI types directly caused issues but some notes for reference:
+# Note that ConversationItem is a Annotated Union of the types below:
+# from openai.types.responses import *
+# from openai.types.responses.response_item import *
+# from openai.types.conversations import ConversationItem
+# f = [
+# ResponseFunctionToolCallItem,
+# ResponseFunctionToolCallOutputItem,
+# ResponseFileSearchToolCall,
+# ResponseFunctionWebSearch,
+# ImageGenerationCall,
+# ResponseComputerToolCall,
+# ResponseComputerToolCallOutputItem,
+# ResponseReasoningItem,
+# ResponseCodeInterpreterToolCall,
+# LocalShellCall,
+# LocalShellCallOutput,
+# McpListTools,
+# McpApprovalRequest,
+# McpApprovalResponse,
+# McpCall,
+# ResponseCustomToolCall,
+# ResponseCustomToolCallOutput
+# ]
+
+
+@json_schema_type
+class ConversationCreateRequest(BaseModel):
+ """Request body for creating a conversation."""
+
+ items: list[ConversationItem] | None = Field(
+ default=[],
+ description="Initial items to include in the conversation context. You may add up to 20 items at a time.",
+ max_length=20,
+ )
+ metadata: Metadata | None = Field(
+ default={},
+ description="Set of 16 key-value pairs that can be attached to an object. Useful for storing additional information",
+ max_length=16,
+ )
+
+
+@json_schema_type
+class ConversationUpdateRequest(BaseModel):
+ """Request body for updating a conversation."""
+
+ metadata: Metadata = Field(
+ ...,
+ description="Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard. Keys are strings with a maximum length of 64 characters. Values are strings with a maximum length of 512 characters.",
+ )
+
+
+@json_schema_type
+class ConversationDeletedResource(BaseModel):
+ """Response for deleted conversation."""
+
+ id: str = Field(..., description="The deleted conversation identifier")
+ object: str = Field(default="conversation.deleted", description="Object type")
+ deleted: bool = Field(default=True, description="Whether the object was deleted")
+
+
+@json_schema_type
+class ConversationItemCreateRequest(BaseModel):
+ """Request body for creating conversation items."""
+
+ items: list[ConversationItem] = Field(
+ ...,
+ description="Items to include in the conversation context. You may add up to 20 items at a time.",
+ max_length=20,
+ )
+
+
+@json_schema_type
+class ConversationItemList(BaseModel):
+ """List of conversation items with pagination."""
+
+ object: str = Field(default="list", description="Object type")
+ data: list[ConversationItem] = Field(..., description="List of conversation items")
+ first_id: str | None = Field(default=None, description="The ID of the first item in the list")
+ last_id: str | None = Field(default=None, description="The ID of the last item in the list")
+ has_more: bool = Field(default=False, description="Whether there are more items available")
+
+
+@json_schema_type
+class ConversationItemDeletedResource(BaseModel):
+ """Response for deleted conversation item."""
+
+ id: str = Field(..., description="The deleted item identifier")
+ object: str = Field(default="conversation.item.deleted", description="Object type")
+ deleted: bool = Field(default=True, description="Whether the object was deleted")
+
+
+@runtime_checkable
+@trace_protocol
+class Conversations(Protocol):
+ """Protocol for conversation management operations."""
+
+ @webmethod(route="/conversations", method="POST", level=LLAMA_STACK_API_V1)
+ async def create_conversation(
+ self, items: list[ConversationItem] | None = None, metadata: Metadata | None = None
+ ) -> Conversation:
+ """Create a conversation.
+
+ :param items: Initial items to include in the conversation context.
+ :param metadata: Set of key-value pairs that can be attached to an object.
+ :returns: The created conversation object.
+ """
+ ...
+
+ @webmethod(route="/conversations/{conversation_id}", method="GET", level=LLAMA_STACK_API_V1)
+ async def get_conversation(self, conversation_id: str) -> Conversation:
+ """Get a conversation with the given ID.
+
+ :param conversation_id: The conversation identifier.
+ :returns: The conversation object.
+ """
+ ...
+
+ @webmethod(route="/conversations/{conversation_id}", method="POST", level=LLAMA_STACK_API_V1)
+ async def update_conversation(self, conversation_id: str, metadata: Metadata) -> Conversation:
+ """Update a conversation's metadata with the given ID.
+
+ :param conversation_id: The conversation identifier.
+ :param metadata: Set of key-value pairs that can be attached to an object.
+ :returns: The updated conversation object.
+ """
+ ...
+
+ @webmethod(route="/conversations/{conversation_id}", method="DELETE", level=LLAMA_STACK_API_V1)
+ async def openai_delete_conversation(self, conversation_id: str) -> ConversationDeletedResource:
+ """Delete a conversation with the given ID.
+
+ :param conversation_id: The conversation identifier.
+ :returns: The deleted conversation resource.
+ """
+ ...
+
+ @webmethod(route="/conversations/{conversation_id}/items", method="POST", level=LLAMA_STACK_API_V1)
+ async def add_items(self, conversation_id: str, items: list[ConversationItem]) -> ConversationItemList:
+ """Create items in the conversation.
+
+ :param conversation_id: The conversation identifier.
+ :param items: Items to include in the conversation context.
+ :returns: List of created items.
+ """
+ ...
+
+ @webmethod(route="/conversations/{conversation_id}/items/{item_id}", method="GET", level=LLAMA_STACK_API_V1)
+ async def retrieve(self, conversation_id: str, item_id: str) -> ConversationItem:
+ """Retrieve a conversation item.
+
+ :param conversation_id: The conversation identifier.
+ :param item_id: The item identifier.
+ :returns: The conversation item.
+ """
+ ...
+
+ @webmethod(route="/conversations/{conversation_id}/items", method="GET", level=LLAMA_STACK_API_V1)
+ async def list(
+ self,
+ conversation_id: str,
+ after: str | NotGiven = NOT_GIVEN,
+ include: list[ResponseIncludable] | NotGiven = NOT_GIVEN,
+ limit: int | NotGiven = NOT_GIVEN,
+ order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN,
+ ) -> ConversationItemList:
+ """List items in the conversation.
+
+ :param conversation_id: The conversation identifier.
+ :param after: An item ID to list items after, used in pagination.
+ :param include: Specify additional output data to include in the response.
+ :param limit: A limit on the number of objects to be returned (1-100, default 20).
+ :param order: The order to return items in (asc or desc, default desc).
+ :returns: List of conversation items.
+ """
+ ...
+
+ @webmethod(route="/conversations/{conversation_id}/items/{item_id}", method="DELETE", level=LLAMA_STACK_API_V1)
+ async def openai_delete_conversation_item(
+ self, conversation_id: str, item_id: str
+ ) -> ConversationItemDeletedResource:
+ """Delete a conversation item.
+
+ :param conversation_id: The conversation identifier.
+ :param item_id: The item identifier.
+ :returns: The deleted item resource.
+ """
+ ...
diff --git a/llama_stack/apis/datatypes.py b/llama_stack/apis/datatypes.py
index 8d0f2e26d..e522682c6 100644
--- a/llama_stack/apis/datatypes.py
+++ b/llama_stack/apis/datatypes.py
@@ -129,6 +129,7 @@ class Api(Enum, metaclass=DynamicApiMeta):
tool_groups = "tool_groups"
files = "files"
prompts = "prompts"
+ conversations = "conversations"
# built-in API
inspect = "inspect"
diff --git a/llama_stack/core/conversations/__init__.py b/llama_stack/core/conversations/__init__.py
new file mode 100644
index 000000000..756f351d8
--- /dev/null
+++ b/llama_stack/core/conversations/__init__.py
@@ -0,0 +1,5 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
diff --git a/llama_stack/core/conversations/conversations.py b/llama_stack/core/conversations/conversations.py
new file mode 100644
index 000000000..bef138e69
--- /dev/null
+++ b/llama_stack/core/conversations/conversations.py
@@ -0,0 +1,306 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
+import os
+import secrets
+import time
+from typing import Any
+
+from openai import NOT_GIVEN
+from pydantic import BaseModel, TypeAdapter
+
+from llama_stack.apis.conversations.conversations import (
+ Conversation,
+ ConversationDeletedResource,
+ ConversationItem,
+ ConversationItemDeletedResource,
+ ConversationItemList,
+ Conversations,
+ Metadata,
+)
+from llama_stack.core.datatypes import AccessRule
+from llama_stack.core.utils.config_dirs import DISTRIBS_BASE_DIR
+from llama_stack.log import get_logger
+from llama_stack.providers.utils.sqlstore.api import ColumnDefinition, ColumnType
+from llama_stack.providers.utils.sqlstore.authorized_sqlstore import AuthorizedSqlStore
+from llama_stack.providers.utils.sqlstore.sqlstore import (
+ SqliteSqlStoreConfig,
+ SqlStoreConfig,
+ sqlstore_impl,
+)
+
+logger = get_logger(name=__name__, category="openai::conversations")
+
+
+class ConversationServiceConfig(BaseModel):
+ """Configuration for the built-in conversation service.
+
+ :param conversations_store: SQL store configuration for conversations (defaults to SQLite)
+ :param policy: Access control rules
+ """
+
+ conversations_store: SqlStoreConfig = SqliteSqlStoreConfig(
+ db_path=(DISTRIBS_BASE_DIR / "conversations.db").as_posix()
+ )
+ policy: list[AccessRule] = []
+
+
+async def get_provider_impl(config: ConversationServiceConfig, deps: dict[Any, Any]):
+ """Get the conversation service implementation."""
+ impl = ConversationServiceImpl(config, deps)
+ await impl.initialize()
+ return impl
+
+
+class ConversationServiceImpl(Conversations):
+ """Built-in conversation service implementation using AuthorizedSqlStore."""
+
+ def __init__(self, config: ConversationServiceConfig, deps: dict[Any, Any]):
+ self.config = config
+ self.deps = deps
+ self.policy = config.policy
+
+ base_sql_store = sqlstore_impl(config.conversations_store)
+ self.sql_store = AuthorizedSqlStore(base_sql_store, self.policy)
+
+ async def initialize(self) -> None:
+ """Initialize the store and create tables."""
+ if isinstance(self.config.conversations_store, SqliteSqlStoreConfig):
+ os.makedirs(os.path.dirname(self.config.conversations_store.db_path), exist_ok=True)
+
+ await self.sql_store.create_table(
+ "openai_conversations",
+ {
+ "id": ColumnDefinition(type=ColumnType.STRING, primary_key=True),
+ "created_at": ColumnType.INTEGER,
+ "items": ColumnType.JSON,
+ "metadata": ColumnType.JSON,
+ },
+ )
+
+ await self.sql_store.create_table(
+ "conversation_items",
+ {
+ "id": ColumnDefinition(type=ColumnType.STRING, primary_key=True),
+ "conversation_id": ColumnType.STRING,
+ "created_at": ColumnType.INTEGER,
+ "item_data": ColumnType.JSON,
+ },
+ )
+
+ async def create_conversation(
+ self, items: list[ConversationItem] | None = None, metadata: Metadata | None = None
+ ) -> Conversation:
+ """Create a conversation."""
+ random_bytes = secrets.token_bytes(24)
+ conversation_id = f"conv_{random_bytes.hex()}"
+ created_at = int(time.time())
+
+ record_data = {
+ "id": conversation_id,
+ "created_at": created_at,
+ "items": [],
+ "metadata": metadata,
+ }
+
+ await self.sql_store.insert(
+ table="openai_conversations",
+ data=record_data,
+ )
+
+ if items:
+ item_records = []
+ for item in items:
+ item_dict = item.model_dump()
+ item_id = self._get_or_generate_item_id(item, item_dict)
+
+ item_record = {
+ "id": item_id,
+ "conversation_id": conversation_id,
+ "created_at": created_at,
+ "item_data": item_dict,
+ }
+
+ item_records.append(item_record)
+
+ await self.sql_store.insert(table="conversation_items", data=item_records)
+
+ conversation = Conversation(
+ id=conversation_id,
+ created_at=created_at,
+ metadata=metadata,
+ object="conversation",
+ )
+
+ logger.info(f"Created conversation {conversation_id}")
+ return conversation
+
+ async def get_conversation(self, conversation_id: str) -> Conversation:
+ """Get a conversation with the given ID."""
+ record = await self.sql_store.fetch_one(table="openai_conversations", where={"id": conversation_id})
+
+ if record is None:
+ raise ValueError(f"Conversation {conversation_id} not found")
+
+ return Conversation(
+ id=record["id"], created_at=record["created_at"], metadata=record.get("metadata"), object="conversation"
+ )
+
+ async def update_conversation(self, conversation_id: str, metadata: Metadata) -> Conversation:
+ """Update a conversation's metadata with the given ID"""
+ await self.sql_store.update(
+ table="openai_conversations", data={"metadata": metadata}, where={"id": conversation_id}
+ )
+
+ return await self.get_conversation(conversation_id)
+
+ async def openai_delete_conversation(self, conversation_id: str) -> ConversationDeletedResource:
+ """Delete a conversation with the given ID."""
+ await self.sql_store.delete(table="openai_conversations", where={"id": conversation_id})
+
+ logger.info(f"Deleted conversation {conversation_id}")
+ return ConversationDeletedResource(id=conversation_id)
+
+ def _validate_conversation_id(self, conversation_id: str) -> None:
+ """Validate conversation ID format."""
+ if not conversation_id.startswith("conv_"):
+ raise ValueError(
+ f"Invalid 'conversation_id': '{conversation_id}'. Expected an ID that begins with 'conv_'."
+ )
+
+ def _get_or_generate_item_id(self, item: ConversationItem, item_dict: dict) -> str:
+ """Get existing item ID or generate one if missing."""
+ if item.id is None:
+ random_bytes = secrets.token_bytes(24)
+ if item.type == "message":
+ item_id = f"msg_{random_bytes.hex()}"
+ else:
+ item_id = f"item_{random_bytes.hex()}"
+ item_dict["id"] = item_id
+ return item_id
+ return item.id
+
+ async def _get_validated_conversation(self, conversation_id: str) -> Conversation:
+ """Validate conversation ID and return the conversation if it exists."""
+ self._validate_conversation_id(conversation_id)
+ return await self.get_conversation(conversation_id)
+
+ async def add_items(self, conversation_id: str, items: list[ConversationItem]) -> ConversationItemList:
+ """Create (add) items to a conversation."""
+ await self._get_validated_conversation(conversation_id)
+
+ created_items = []
+ created_at = int(time.time())
+
+ for item in items:
+ item_dict = item.model_dump()
+ item_id = self._get_or_generate_item_id(item, item_dict)
+
+ item_record = {
+ "id": item_id,
+ "conversation_id": conversation_id,
+ "created_at": created_at,
+ "item_data": item_dict,
+ }
+
+ # TODO: Add support for upsert in sql_store, this will fail first if ID exists and then update
+ try:
+ await self.sql_store.insert(table="conversation_items", data=item_record)
+ except Exception:
+ # If insert fails due to ID conflict, update existing record
+ await self.sql_store.update(
+ table="conversation_items",
+ data={"created_at": created_at, "item_data": item_dict},
+ where={"id": item_id},
+ )
+
+ created_items.append(item_dict)
+
+ logger.info(f"Created {len(created_items)} items in conversation {conversation_id}")
+
+ # Convert created items (dicts) to proper ConversationItem types
+ adapter: TypeAdapter[ConversationItem] = TypeAdapter(ConversationItem)
+ response_items: list[ConversationItem] = [adapter.validate_python(item_dict) for item_dict in created_items]
+
+ return ConversationItemList(
+ data=response_items,
+ first_id=created_items[0]["id"] if created_items else None,
+ last_id=created_items[-1]["id"] if created_items else None,
+ has_more=False,
+ )
+
+ async def retrieve(self, conversation_id: str, item_id: str) -> ConversationItem:
+ """Retrieve a conversation item."""
+ if not conversation_id:
+ raise ValueError(f"Expected a non-empty value for `conversation_id` but received {conversation_id!r}")
+ if not item_id:
+ raise ValueError(f"Expected a non-empty value for `item_id` but received {item_id!r}")
+
+ # Get item from conversation_items table
+ record = await self.sql_store.fetch_one(
+ table="conversation_items", where={"id": item_id, "conversation_id": conversation_id}
+ )
+
+ if record is None:
+ raise ValueError(f"Item {item_id} not found in conversation {conversation_id}")
+
+ adapter: TypeAdapter[ConversationItem] = TypeAdapter(ConversationItem)
+ return adapter.validate_python(record["item_data"])
+
+ async def list(self, conversation_id: str, after=NOT_GIVEN, include=NOT_GIVEN, limit=NOT_GIVEN, order=NOT_GIVEN):
+ """List items in the conversation."""
+ result = await self.sql_store.fetch_all(table="conversation_items", where={"conversation_id": conversation_id})
+ records = result.data
+
+ if order != NOT_GIVEN and order == "asc":
+ records.sort(key=lambda x: x["created_at"])
+ else:
+ records.sort(key=lambda x: x["created_at"], reverse=True)
+
+ actual_limit = 20
+ if limit != NOT_GIVEN and isinstance(limit, int):
+ actual_limit = limit
+
+ records = records[:actual_limit]
+ items = [record["item_data"] for record in records]
+
+ adapter: TypeAdapter[ConversationItem] = TypeAdapter(ConversationItem)
+ response_items: list[ConversationItem] = [adapter.validate_python(item) for item in items]
+
+ first_id = response_items[0].id if response_items else None
+ last_id = response_items[-1].id if response_items else None
+
+ return ConversationItemList(
+ data=response_items,
+ first_id=first_id,
+ last_id=last_id,
+ has_more=False,
+ )
+
+ async def openai_delete_conversation_item(
+ self, conversation_id: str, item_id: str
+ ) -> ConversationItemDeletedResource:
+ """Delete a conversation item."""
+ if not conversation_id:
+ raise ValueError(f"Expected a non-empty value for `conversation_id` but received {conversation_id!r}")
+ if not item_id:
+ raise ValueError(f"Expected a non-empty value for `item_id` but received {item_id!r}")
+
+ _ = await self._get_validated_conversation(conversation_id)
+
+ record = await self.sql_store.fetch_one(
+ table="conversation_items", where={"id": item_id, "conversation_id": conversation_id}
+ )
+
+ if record is None:
+ raise ValueError(f"Item {item_id} not found in conversation {conversation_id}")
+
+ await self.sql_store.delete(
+ table="conversation_items", where={"id": item_id, "conversation_id": conversation_id}
+ )
+
+ logger.info(f"Deleted item {item_id} from conversation {conversation_id}")
+ return ConversationItemDeletedResource(id=item_id)
diff --git a/llama_stack/core/datatypes.py b/llama_stack/core/datatypes.py
index 930cf2646..10cc87bc2 100644
--- a/llama_stack/core/datatypes.py
+++ b/llama_stack/core/datatypes.py
@@ -475,6 +475,13 @@ InferenceStoreConfig (with queue tuning parameters) or a SqlStoreConfig (depreca
If not specified, a default SQLite store will be used.""",
)
+ conversations_store: SqlStoreConfig | None = Field(
+ default=None,
+ description="""
+Configuration for the persistence store used by the conversations API.
+If not specified, a default SQLite store will be used.""",
+ )
+
# registry of "resources" in the distribution
models: list[ModelInput] = Field(default_factory=list)
shields: list[ShieldInput] = Field(default_factory=list)
diff --git a/llama_stack/core/distribution.py b/llama_stack/core/distribution.py
index 302ecb960..f44967aaf 100644
--- a/llama_stack/core/distribution.py
+++ b/llama_stack/core/distribution.py
@@ -25,7 +25,7 @@ from llama_stack.providers.datatypes import (
logger = get_logger(name=__name__, category="core")
-INTERNAL_APIS = {Api.inspect, Api.providers, Api.prompts}
+INTERNAL_APIS = {Api.inspect, Api.providers, Api.prompts, Api.conversations}
def stack_apis() -> list[Api]:
diff --git a/llama_stack/core/resolver.py b/llama_stack/core/resolver.py
index f421c47ed..0d6f54f9e 100644
--- a/llama_stack/core/resolver.py
+++ b/llama_stack/core/resolver.py
@@ -10,6 +10,7 @@ from typing import Any
from llama_stack.apis.agents import Agents
from llama_stack.apis.batches import Batches
from llama_stack.apis.benchmarks import Benchmarks
+from llama_stack.apis.conversations import Conversations
from llama_stack.apis.datasetio import DatasetIO
from llama_stack.apis.datasets import Datasets
from llama_stack.apis.datatypes import ExternalApiSpec
@@ -96,6 +97,7 @@ def api_protocol_map(external_apis: dict[Api, ExternalApiSpec] | None = None) ->
Api.tool_runtime: ToolRuntime,
Api.files: Files,
Api.prompts: Prompts,
+ Api.conversations: Conversations,
}
if external_apis:
diff --git a/llama_stack/core/server/server.py b/llama_stack/core/server/server.py
index 873335775..32be57880 100644
--- a/llama_stack/core/server/server.py
+++ b/llama_stack/core/server/server.py
@@ -451,6 +451,7 @@ def create_app(
apis_to_serve.add("inspect")
apis_to_serve.add("providers")
apis_to_serve.add("prompts")
+ apis_to_serve.add("conversations")
for api_str in apis_to_serve:
api = Api(api_str)
diff --git a/llama_stack/core/stack.py b/llama_stack/core/stack.py
index 3e14328a3..d5d55319a 100644
--- a/llama_stack/core/stack.py
+++ b/llama_stack/core/stack.py
@@ -15,6 +15,7 @@ import yaml
from llama_stack.apis.agents import Agents
from llama_stack.apis.benchmarks import Benchmarks
+from llama_stack.apis.conversations import Conversations
from llama_stack.apis.datasetio import DatasetIO
from llama_stack.apis.datasets import Datasets
from llama_stack.apis.eval import Eval
@@ -34,6 +35,7 @@ from llama_stack.apis.telemetry import Telemetry
from llama_stack.apis.tools import RAGToolRuntime, ToolGroups, ToolRuntime
from llama_stack.apis.vector_dbs import VectorDBs
from llama_stack.apis.vector_io import VectorIO
+from llama_stack.core.conversations.conversations import ConversationServiceConfig, ConversationServiceImpl
from llama_stack.core.datatypes import Provider, StackRunConfig
from llama_stack.core.distribution import get_provider_registry
from llama_stack.core.inspect import DistributionInspectConfig, DistributionInspectImpl
@@ -73,6 +75,7 @@ class LlamaStack(
RAGToolRuntime,
Files,
Prompts,
+ Conversations,
):
pass
@@ -312,6 +315,12 @@ def add_internal_implementations(impls: dict[Api, Any], run_config: StackRunConf
)
impls[Api.prompts] = prompts_impl
+ conversations_impl = ConversationServiceImpl(
+ ConversationServiceConfig(run_config=run_config),
+ deps=impls,
+ )
+ impls[Api.conversations] = conversations_impl
+
class Stack:
def __init__(self, run_config: StackRunConfig, provider_registry: ProviderRegistry | None = None):
@@ -342,6 +351,8 @@ class Stack:
if Api.prompts in impls:
await impls[Api.prompts].initialize()
+ if Api.conversations in impls:
+ await impls[Api.conversations].initialize()
await register_resources(self.run_config, impls)
diff --git a/llama_stack/providers/utils/sqlstore/api.py b/llama_stack/providers/utils/sqlstore/api.py
index 6bb85ea0c..a61fd1090 100644
--- a/llama_stack/providers/utils/sqlstore/api.py
+++ b/llama_stack/providers/utils/sqlstore/api.py
@@ -4,7 +4,7 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
-from collections.abc import Mapping
+from collections.abc import Mapping, Sequence
from enum import Enum
from typing import Any, Literal, Protocol
@@ -41,9 +41,9 @@ class SqlStore(Protocol):
"""
pass
- async def insert(self, table: str, data: Mapping[str, Any]) -> None:
+ async def insert(self, table: str, data: Mapping[str, Any] | Sequence[Mapping[str, Any]]) -> None:
"""
- Insert a row into a table.
+ Insert a row or batch of rows into a table.
"""
pass
diff --git a/llama_stack/providers/utils/sqlstore/authorized_sqlstore.py b/llama_stack/providers/utils/sqlstore/authorized_sqlstore.py
index ab67f7052..e1da4db6e 100644
--- a/llama_stack/providers/utils/sqlstore/authorized_sqlstore.py
+++ b/llama_stack/providers/utils/sqlstore/authorized_sqlstore.py
@@ -4,7 +4,7 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
-from collections.abc import Mapping
+from collections.abc import Mapping, Sequence
from typing import Any, Literal
from llama_stack.core.access_control.access_control import default_policy, is_action_allowed
@@ -38,6 +38,18 @@ SQL_OPTIMIZED_POLICY = [
]
+def _enhance_item_with_access_control(item: Mapping[str, Any], current_user: User | None) -> Mapping[str, Any]:
+ """Add access control attributes to a data item."""
+ enhanced = dict(item)
+ if current_user:
+ enhanced["owner_principal"] = current_user.principal
+ enhanced["access_attributes"] = current_user.attributes
+ else:
+ enhanced["owner_principal"] = None
+ enhanced["access_attributes"] = None
+ return enhanced
+
+
class SqlRecord(ProtectedResource):
def __init__(self, record_id: str, table_name: str, owner: User):
self.type = f"sql_record::{table_name}"
@@ -102,18 +114,14 @@ class AuthorizedSqlStore:
await self.sql_store.add_column_if_not_exists(table, "access_attributes", ColumnType.JSON)
await self.sql_store.add_column_if_not_exists(table, "owner_principal", ColumnType.STRING)
- async def insert(self, table: str, data: Mapping[str, Any]) -> None:
- """Insert a row with automatic access control attribute capture."""
- enhanced_data = dict(data)
-
+ async def insert(self, table: str, data: Mapping[str, Any] | Sequence[Mapping[str, Any]]) -> None:
+ """Insert a row or batch of rows with automatic access control attribute capture."""
current_user = get_authenticated_user()
- if current_user:
- enhanced_data["owner_principal"] = current_user.principal
- enhanced_data["access_attributes"] = current_user.attributes
+ enhanced_data: Mapping[str, Any] | Sequence[Mapping[str, Any]]
+ if isinstance(data, Mapping):
+ enhanced_data = _enhance_item_with_access_control(data, current_user)
else:
- enhanced_data["owner_principal"] = None
- enhanced_data["access_attributes"] = None
-
+ enhanced_data = [_enhance_item_with_access_control(item, current_user) for item in data]
await self.sql_store.insert(table, enhanced_data)
async def fetch_all(
diff --git a/llama_stack/providers/utils/sqlstore/sqlalchemy_sqlstore.py b/llama_stack/providers/utils/sqlstore/sqlalchemy_sqlstore.py
index 46ed8c1d1..23cd6444e 100644
--- a/llama_stack/providers/utils/sqlstore/sqlalchemy_sqlstore.py
+++ b/llama_stack/providers/utils/sqlstore/sqlalchemy_sqlstore.py
@@ -3,7 +3,7 @@
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
-from collections.abc import Mapping
+from collections.abc import Mapping, Sequence
from typing import Any, Literal
from sqlalchemy import (
@@ -116,7 +116,7 @@ class SqlAlchemySqlStoreImpl(SqlStore):
async with engine.begin() as conn:
await conn.run_sync(self.metadata.create_all, tables=[sqlalchemy_table], checkfirst=True)
- async def insert(self, table: str, data: Mapping[str, Any]) -> None:
+ async def insert(self, table: str, data: Mapping[str, Any] | Sequence[Mapping[str, Any]]) -> None:
async with self.async_session() as session:
await session.execute(self.metadata.tables[table].insert(), data)
await session.commit()
diff --git a/llama_stack/strong_typing/schema.py b/llama_stack/strong_typing/schema.py
index 2bfb7033e..f911fc41f 100644
--- a/llama_stack/strong_typing/schema.py
+++ b/llama_stack/strong_typing/schema.py
@@ -484,12 +484,19 @@ class JsonSchemaGenerator:
}
return ret
elif origin_type is Literal:
- if len(typing.get_args(typ)) != 1:
- raise ValueError(f"Literal type {typ} has {len(typing.get_args(typ))} arguments")
- (literal_value,) = typing.get_args(typ) # unpack value of literal type
- schema = self.type_to_schema(type(literal_value))
- schema["const"] = literal_value
- return schema
+ literal_args = typing.get_args(typ)
+ if len(literal_args) == 1:
+ (literal_value,) = literal_args
+ schema = self.type_to_schema(type(literal_value))
+ schema["const"] = literal_value
+ return schema
+ elif len(literal_args) > 1:
+ first_value = literal_args[0]
+ schema = self.type_to_schema(type(first_value))
+ schema["enum"] = list(literal_args)
+ return schema
+ else:
+ return {"enum": []}
elif origin_type is type:
(concrete_type,) = typing.get_args(typ) # unpack single tuple element
return {"const": self.type_to_schema(concrete_type, force_expand=True)}
diff --git a/pyproject.toml b/pyproject.toml
index 8a162e90a..52eb8f7c8 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -32,7 +32,7 @@ dependencies = [
"jinja2>=3.1.6",
"jsonschema",
"llama-stack-client>=0.2.23",
- "openai>=1.100.0", # for expires_after support
+ "openai>=1.107", # for expires_after support
"prompt-toolkit",
"python-dotenv",
"python-jose[cryptography]",
@@ -49,6 +49,7 @@ dependencies = [
"opentelemetry-exporter-otlp-proto-http>=1.30.0", # server
"aiosqlite>=0.21.0", # server - for metadata store
"asyncpg", # for metadata store
+ "sqlalchemy[asyncio]>=2.0.41", # server - for conversations
]
[project.optional-dependencies]
diff --git a/tests/integration/conversations/test_openai_conversations.py b/tests/integration/conversations/test_openai_conversations.py
new file mode 100644
index 000000000..345e1c00a
--- /dev/null
+++ b/tests/integration/conversations/test_openai_conversations.py
@@ -0,0 +1,135 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
+import pytest
+
+
+@pytest.mark.integration
+class TestOpenAIConversations:
+ # TODO: Update to compat_client after client-SDK is generated
+ def test_conversation_create(self, openai_client):
+ conversation = openai_client.conversations.create(
+ metadata={"topic": "demo"}, items=[{"type": "message", "role": "user", "content": "Hello!"}]
+ )
+
+ assert conversation.id.startswith("conv_")
+ assert conversation.object == "conversation"
+ assert conversation.metadata["topic"] == "demo"
+ assert isinstance(conversation.created_at, int)
+
+ def test_conversation_retrieve(self, openai_client):
+ conversation = openai_client.conversations.create(metadata={"topic": "demo"})
+
+ retrieved = openai_client.conversations.retrieve(conversation.id)
+
+ assert retrieved.id == conversation.id
+ assert retrieved.object == "conversation"
+ assert retrieved.metadata["topic"] == "demo"
+ assert retrieved.created_at == conversation.created_at
+
+ def test_conversation_update(self, openai_client):
+ conversation = openai_client.conversations.create(metadata={"topic": "demo"})
+
+ updated = openai_client.conversations.update(conversation.id, metadata={"topic": "project-x"})
+
+ assert updated.id == conversation.id
+ assert updated.metadata["topic"] == "project-x"
+ assert updated.created_at == conversation.created_at
+
+ def test_conversation_delete(self, openai_client):
+ conversation = openai_client.conversations.create(metadata={"topic": "demo"})
+
+ deleted = openai_client.conversations.delete(conversation.id)
+
+ assert deleted.id == conversation.id
+ assert deleted.object == "conversation.deleted"
+ assert deleted.deleted is True
+
+ def test_conversation_items_create(self, openai_client):
+ conversation = openai_client.conversations.create()
+
+ items = openai_client.conversations.items.create(
+ conversation.id,
+ items=[
+ {"type": "message", "role": "user", "content": [{"type": "input_text", "text": "Hello!"}]},
+ {"type": "message", "role": "user", "content": [{"type": "input_text", "text": "How are you?"}]},
+ ],
+ )
+
+ assert items.object == "list"
+ assert len(items.data) == 2
+ assert items.data[0].content[0].text == "Hello!"
+ assert items.data[1].content[0].text == "How are you?"
+ assert items.first_id == items.data[0].id
+ assert items.last_id == items.data[1].id
+ assert items.has_more is False
+
+ def test_conversation_items_list(self, openai_client):
+ conversation = openai_client.conversations.create()
+
+ openai_client.conversations.items.create(
+ conversation.id,
+ items=[{"type": "message", "role": "user", "content": [{"type": "input_text", "text": "Hello!"}]}],
+ )
+
+ items = openai_client.conversations.items.list(conversation.id, limit=10)
+
+ assert items.object == "list"
+ assert len(items.data) >= 1
+ assert items.data[0].type == "message"
+ assert items.data[0].role == "user"
+ assert hasattr(items, "first_id")
+ assert hasattr(items, "last_id")
+ assert hasattr(items, "has_more")
+
+ def test_conversation_item_retrieve(self, openai_client):
+ conversation = openai_client.conversations.create()
+
+ created_items = openai_client.conversations.items.create(
+ conversation.id,
+ items=[{"type": "message", "role": "user", "content": [{"type": "input_text", "text": "Hello!"}]}],
+ )
+
+ item_id = created_items.data[0].id
+ item = openai_client.conversations.items.retrieve(item_id, conversation_id=conversation.id)
+
+ assert item.id == item_id
+ assert item.type == "message"
+ assert item.role == "user"
+ assert item.content[0].text == "Hello!"
+
+ def test_conversation_item_delete(self, openai_client):
+ conversation = openai_client.conversations.create()
+
+ created_items = openai_client.conversations.items.create(
+ conversation.id,
+ items=[{"type": "message", "role": "user", "content": [{"type": "input_text", "text": "Hello!"}]}],
+ )
+
+ item_id = created_items.data[0].id
+ deleted = openai_client.conversations.items.delete(item_id, conversation_id=conversation.id)
+
+ assert deleted.id == item_id
+ assert deleted.object == "conversation.item.deleted"
+ assert deleted.deleted is True
+
+ def test_full_workflow(self, openai_client):
+ conversation = openai_client.conversations.create(
+ metadata={"topic": "workflow-test"}, items=[{"type": "message", "role": "user", "content": "Hello!"}]
+ )
+
+ openai_client.conversations.items.create(
+ conversation.id,
+ items=[{"type": "message", "role": "user", "content": [{"type": "input_text", "text": "Follow up"}]}],
+ )
+
+ all_items = openai_client.conversations.items.list(conversation.id)
+ assert len(all_items.data) >= 2
+
+ updated = openai_client.conversations.update(conversation.id, metadata={"topic": "workflow-complete"})
+ assert updated.metadata["topic"] == "workflow-complete"
+
+ openai_client.conversations.delete(conversation.id)
diff --git a/tests/unit/conversations/test_api_models.py b/tests/unit/conversations/test_api_models.py
new file mode 100644
index 000000000..0e52778b8
--- /dev/null
+++ b/tests/unit/conversations/test_api_models.py
@@ -0,0 +1,60 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
+
+from llama_stack.apis.conversations.conversations import (
+ Conversation,
+ ConversationCreateRequest,
+ ConversationItem,
+ ConversationItemList,
+)
+
+
+def test_conversation_create_request_defaults():
+ request = ConversationCreateRequest()
+ assert request.items == []
+ assert request.metadata == {}
+
+
+def test_conversation_model_defaults():
+ conversation = Conversation(
+ id="conv_123456789",
+ created_at=1234567890,
+ metadata=None,
+ object="conversation",
+ )
+ assert conversation.id == "conv_123456789"
+ assert conversation.object == "conversation"
+ assert conversation.metadata is None
+
+
+def test_openai_client_compatibility():
+ from openai.types.conversations.message import Message
+ from pydantic import TypeAdapter
+
+ openai_message = Message(
+ id="msg_123",
+ content=[{"type": "input_text", "text": "Hello"}],
+ role="user",
+ status="in_progress",
+ type="message",
+ object="message",
+ )
+
+ adapter = TypeAdapter(ConversationItem)
+ validated_item = adapter.validate_python(openai_message.model_dump())
+
+ assert validated_item.id == "msg_123"
+ assert validated_item.type == "message"
+
+
+def test_conversation_item_list():
+ item_list = ConversationItemList(data=[])
+ assert item_list.object == "list"
+ assert item_list.data == []
+ assert item_list.first_id is None
+ assert item_list.last_id is None
+ assert item_list.has_more is False
diff --git a/tests/unit/conversations/test_conversations.py b/tests/unit/conversations/test_conversations.py
new file mode 100644
index 000000000..65c3e2333
--- /dev/null
+++ b/tests/unit/conversations/test_conversations.py
@@ -0,0 +1,132 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
+import tempfile
+from pathlib import Path
+
+import pytest
+from openai.types.conversations.conversation import Conversation as OpenAIConversation
+from openai.types.conversations.conversation_item import ConversationItem as OpenAIConversationItem
+from pydantic import TypeAdapter
+
+from llama_stack.apis.agents.openai_responses import (
+ OpenAIResponseInputMessageContentText,
+ OpenAIResponseMessage,
+)
+from llama_stack.core.conversations.conversations import (
+ ConversationServiceConfig,
+ ConversationServiceImpl,
+)
+from llama_stack.providers.utils.sqlstore.sqlstore import SqliteSqlStoreConfig
+
+
+@pytest.fixture
+async def service():
+ with tempfile.TemporaryDirectory() as tmpdir:
+ db_path = Path(tmpdir) / "test_conversations.db"
+
+ config = ConversationServiceConfig(conversations_store=SqliteSqlStoreConfig(db_path=str(db_path)), policy=[])
+ service = ConversationServiceImpl(config, {})
+ await service.initialize()
+ yield service
+
+
+async def test_conversation_lifecycle(service):
+ conversation = await service.create_conversation(metadata={"test": "data"})
+
+ assert conversation.id.startswith("conv_")
+ assert conversation.metadata == {"test": "data"}
+
+ retrieved = await service.get_conversation(conversation.id)
+ assert retrieved.id == conversation.id
+
+ deleted = await service.openai_delete_conversation(conversation.id)
+ assert deleted.id == conversation.id
+
+
+async def test_conversation_items(service):
+ conversation = await service.create_conversation()
+
+ items = [
+ OpenAIResponseMessage(
+ type="message",
+ role="user",
+ content=[OpenAIResponseInputMessageContentText(type="input_text", text="Hello")],
+ id="msg_test123",
+ status="completed",
+ )
+ ]
+ item_list = await service.add_items(conversation.id, items)
+
+ assert len(item_list.data) == 1
+ assert item_list.data[0].id == "msg_test123"
+
+ items = await service.list(conversation.id)
+ assert len(items.data) == 1
+
+
+async def test_invalid_conversation_id(service):
+ with pytest.raises(ValueError, match="Expected an ID that begins with 'conv_'"):
+ await service._get_validated_conversation("invalid_id")
+
+
+async def test_empty_parameter_validation(service):
+ with pytest.raises(ValueError, match="Expected a non-empty value"):
+ await service.retrieve("", "item_123")
+
+
+async def test_openai_type_compatibility(service):
+ conversation = await service.create_conversation(metadata={"test": "value"})
+
+ conversation_dict = conversation.model_dump()
+ openai_conversation = OpenAIConversation.model_validate(conversation_dict)
+
+ for attr in ["id", "object", "created_at", "metadata"]:
+ assert getattr(openai_conversation, attr) == getattr(conversation, attr)
+
+ items = [
+ OpenAIResponseMessage(
+ type="message",
+ role="user",
+ content=[OpenAIResponseInputMessageContentText(type="input_text", text="Hello")],
+ id="msg_test456",
+ status="completed",
+ )
+ ]
+ item_list = await service.add_items(conversation.id, items)
+
+ for attr in ["object", "data", "first_id", "last_id", "has_more"]:
+ assert hasattr(item_list, attr)
+ assert item_list.object == "list"
+
+ items = await service.list(conversation.id)
+ item = await service.retrieve(conversation.id, items.data[0].id)
+ item_dict = item.model_dump()
+
+ openai_item_adapter = TypeAdapter(OpenAIConversationItem)
+ openai_item_adapter.validate_python(item_dict)
+
+
+async def test_policy_configuration():
+ from llama_stack.core.access_control.datatypes import Action, Scope
+ from llama_stack.core.datatypes import AccessRule
+
+ with tempfile.TemporaryDirectory() as tmpdir:
+ db_path = Path(tmpdir) / "test_conversations_policy.db"
+
+ restrictive_policy = [
+ AccessRule(forbid=Scope(principal="test_user", actions=[Action.CREATE, Action.READ], resource="*"))
+ ]
+
+ config = ConversationServiceConfig(
+ conversations_store=SqliteSqlStoreConfig(db_path=str(db_path)), policy=restrictive_policy
+ )
+ service = ConversationServiceImpl(config, {})
+ await service.initialize()
+
+ assert service.policy == restrictive_policy
+ assert len(service.policy) == 1
+ assert service.policy[0].forbid is not None
diff --git a/tests/unit/utils/sqlstore/test_sqlstore.py b/tests/unit/utils/sqlstore/test_sqlstore.py
index ba59ec7ec..00669b698 100644
--- a/tests/unit/utils/sqlstore/test_sqlstore.py
+++ b/tests/unit/utils/sqlstore/test_sqlstore.py
@@ -368,6 +368,32 @@ async def test_where_operator_gt_and_update_delete():
assert {r["id"] for r in rows_after} == {1, 3}
+async def test_batch_insert():
+ with TemporaryDirectory() as tmp_dir:
+ db_path = tmp_dir + "/test.db"
+ store = SqlAlchemySqlStoreImpl(SqliteSqlStoreConfig(db_path=db_path))
+
+ await store.create_table(
+ "batch_test",
+ {
+ "id": ColumnType.INTEGER,
+ "name": ColumnType.STRING,
+ "value": ColumnType.INTEGER,
+ },
+ )
+
+ batch_data = [
+ {"id": 1, "name": "first", "value": 10},
+ {"id": 2, "name": "second", "value": 20},
+ {"id": 3, "name": "third", "value": 30},
+ ]
+
+ await store.insert("batch_test", batch_data)
+
+ result = await store.fetch_all("batch_test", order_by=[("id", "asc")])
+ assert result.data == batch_data
+
+
async def test_where_operator_edge_cases():
with TemporaryDirectory() as tmp_dir:
db_path = tmp_dir + "/test.db"
diff --git a/uv.lock b/uv.lock
index 63639ee4a..c1cd7e71c 100644
--- a/uv.lock
+++ b/uv.lock
@@ -1773,6 +1773,7 @@ dependencies = [
{ name = "python-jose", extra = ["cryptography"] },
{ name = "python-multipart" },
{ name = "rich" },
+ { name = "sqlalchemy", extra = ["asyncio"] },
{ name = "starlette" },
{ name = "termcolor" },
{ name = "tiktoken" },
@@ -1887,7 +1888,7 @@ requires-dist = [
{ name = "jsonschema" },
{ name = "llama-stack-client", specifier = ">=0.2.23" },
{ name = "llama-stack-client", marker = "extra == 'ui'", specifier = ">=0.2.23" },
- { name = "openai", specifier = ">=1.100.0" },
+ { name = "openai", specifier = ">=1.107" },
{ name = "opentelemetry-exporter-otlp-proto-http", specifier = ">=1.30.0" },
{ name = "opentelemetry-sdk", specifier = ">=1.30.0" },
{ name = "pandas", marker = "extra == 'ui'" },
@@ -1898,6 +1899,7 @@ requires-dist = [
{ name = "python-jose", extras = ["cryptography"] },
{ name = "python-multipart", specifier = ">=0.0.20" },
{ name = "rich" },
+ { name = "sqlalchemy", extras = ["asyncio"], specifier = ">=2.0.41" },
{ name = "starlette" },
{ name = "streamlit", marker = "extra == 'ui'" },
{ name = "streamlit-option-menu", marker = "extra == 'ui'" },