mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-04 04:04:14 +00:00
feat: add Prompts API to Responses API
This commit is contained in:
parent
9f6c658f2a
commit
bdc16ea392
15 changed files with 526 additions and 4 deletions
95
docs/static/deprecated-llama-stack-spec.html
vendored
95
docs/static/deprecated-llama-stack-spec.html
vendored
|
@ -9056,6 +9056,10 @@
|
|||
"type": "string",
|
||||
"description": "(Optional) ID of the previous response in a conversation"
|
||||
},
|
||||
"prompt": {
|
||||
"$ref": "#/components/schemas/Prompt",
|
||||
"description": "(Optional) Prompt object with ID, version, and variables"
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"description": "Current status of the response generation"
|
||||
|
@ -9521,6 +9525,44 @@
|
|||
"title": "OpenAIResponseText",
|
||||
"description": "Text response configuration for OpenAI responses."
|
||||
},
|
||||
"Prompt": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"prompt": {
|
||||
"type": "string",
|
||||
"description": "The system prompt text with variable placeholders. Variables are only supported when using the Responses API."
|
||||
},
|
||||
"version": {
|
||||
"type": "integer",
|
||||
"description": "Version (integer starting at 1, incremented on save)"
|
||||
},
|
||||
"prompt_id": {
|
||||
"type": "string",
|
||||
"description": "Unique identifier formatted as 'pmpt_<48-digit-hash>'"
|
||||
},
|
||||
"variables": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "List of prompt variable names that can be used in the prompt template"
|
||||
},
|
||||
"is_default": {
|
||||
"type": "boolean",
|
||||
"default": false,
|
||||
"description": "Boolean indicating whether this version is the default version for this prompt"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"version",
|
||||
"prompt_id",
|
||||
"variables",
|
||||
"is_default"
|
||||
],
|
||||
"title": "Prompt",
|
||||
"description": "A prompt resource representing a stored OpenAI Compatible prompt template in Llama Stack."
|
||||
},
|
||||
"OpenAIResponseInputTool": {
|
||||
"oneOf": [
|
||||
{
|
||||
|
@ -9824,6 +9866,51 @@
|
|||
"title": "OpenAIResponseInputToolWebSearch",
|
||||
"description": "Web search tool configuration for OpenAI response inputs."
|
||||
},
|
||||
"OpenAIResponsePromptParam": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"description": "Unique identifier of the prompt template"
|
||||
},
|
||||
"variables": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "null"
|
||||
},
|
||||
{
|
||||
"type": "boolean"
|
||||
},
|
||||
{
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array"
|
||||
},
|
||||
{
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"description": "Dictionary of variable names to values for template substitution"
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "Version number of the prompt to use (defaults to latest if not specified)"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"id"
|
||||
],
|
||||
"title": "OpenAIResponsePromptParam",
|
||||
"description": "Prompt object that is used for OpenAI responses."
|
||||
},
|
||||
"CreateOpenaiResponseRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
@ -9845,6 +9932,10 @@
|
|||
"type": "string",
|
||||
"description": "The underlying LLM used for completions."
|
||||
},
|
||||
"prompt": {
|
||||
"$ref": "#/components/schemas/OpenAIResponsePromptParam",
|
||||
"description": "Prompt object with ID, version, and variables."
|
||||
},
|
||||
"instructions": {
|
||||
"type": "string"
|
||||
},
|
||||
|
@ -9929,6 +10020,10 @@
|
|||
"type": "string",
|
||||
"description": "(Optional) ID of the previous response in a conversation"
|
||||
},
|
||||
"prompt": {
|
||||
"$ref": "#/components/schemas/Prompt",
|
||||
"description": "(Optional) Prompt object with ID, version, and variables"
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"description": "Current status of the response generation"
|
||||
|
|
78
docs/static/deprecated-llama-stack-spec.yaml
vendored
78
docs/static/deprecated-llama-stack-spec.yaml
vendored
|
@ -6708,6 +6708,10 @@ components:
|
|||
type: string
|
||||
description: >-
|
||||
(Optional) ID of the previous response in a conversation
|
||||
prompt:
|
||||
$ref: '#/components/schemas/Prompt'
|
||||
description: >-
|
||||
(Optional) Prompt object with ID, version, and variables
|
||||
status:
|
||||
type: string
|
||||
description: >-
|
||||
|
@ -7076,6 +7080,44 @@ components:
|
|||
title: OpenAIResponseText
|
||||
description: >-
|
||||
Text response configuration for OpenAI responses.
|
||||
Prompt:
|
||||
type: object
|
||||
properties:
|
||||
prompt:
|
||||
type: string
|
||||
description: >-
|
||||
The system prompt text with variable placeholders. Variables are only
|
||||
supported when using the Responses API.
|
||||
version:
|
||||
type: integer
|
||||
description: >-
|
||||
Version (integer starting at 1, incremented on save)
|
||||
prompt_id:
|
||||
type: string
|
||||
description: >-
|
||||
Unique identifier formatted as 'pmpt_<48-digit-hash>'
|
||||
variables:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: >-
|
||||
List of prompt variable names that can be used in the prompt template
|
||||
is_default:
|
||||
type: boolean
|
||||
default: false
|
||||
description: >-
|
||||
Boolean indicating whether this version is the default version for this
|
||||
prompt
|
||||
additionalProperties: false
|
||||
required:
|
||||
- version
|
||||
- prompt_id
|
||||
- variables
|
||||
- is_default
|
||||
title: Prompt
|
||||
description: >-
|
||||
A prompt resource representing a stored OpenAI Compatible prompt template
|
||||
in Llama Stack.
|
||||
OpenAIResponseInputTool:
|
||||
oneOf:
|
||||
- $ref: '#/components/schemas/OpenAIResponseInputToolWebSearch'
|
||||
|
@ -7286,6 +7328,34 @@ components:
|
|||
title: OpenAIResponseInputToolWebSearch
|
||||
description: >-
|
||||
Web search tool configuration for OpenAI response inputs.
|
||||
OpenAIResponsePromptParam:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
description: Unique identifier of the prompt template
|
||||
variables:
|
||||
type: object
|
||||
additionalProperties:
|
||||
oneOf:
|
||||
- type: 'null'
|
||||
- type: boolean
|
||||
- type: number
|
||||
- type: string
|
||||
- type: array
|
||||
- type: object
|
||||
description: >-
|
||||
Dictionary of variable names to values for template substitution
|
||||
version:
|
||||
type: string
|
||||
description: >-
|
||||
Version number of the prompt to use (defaults to latest if not specified)
|
||||
additionalProperties: false
|
||||
required:
|
||||
- id
|
||||
title: OpenAIResponsePromptParam
|
||||
description: >-
|
||||
Prompt object that is used for OpenAI responses.
|
||||
CreateOpenaiResponseRequest:
|
||||
type: object
|
||||
properties:
|
||||
|
@ -7299,6 +7369,10 @@ components:
|
|||
model:
|
||||
type: string
|
||||
description: The underlying LLM used for completions.
|
||||
prompt:
|
||||
$ref: '#/components/schemas/OpenAIResponsePromptParam'
|
||||
description: >-
|
||||
Prompt object with ID, version, and variables.
|
||||
instructions:
|
||||
type: string
|
||||
previous_response_id:
|
||||
|
@ -7370,6 +7444,10 @@ components:
|
|||
type: string
|
||||
description: >-
|
||||
(Optional) ID of the previous response in a conversation
|
||||
prompt:
|
||||
$ref: '#/components/schemas/Prompt'
|
||||
description: >-
|
||||
(Optional) Prompt object with ID, version, and variables
|
||||
status:
|
||||
type: string
|
||||
description: >-
|
||||
|
|
57
docs/static/llama-stack-spec.html
vendored
57
docs/static/llama-stack-spec.html
vendored
|
@ -7463,6 +7463,10 @@
|
|||
"type": "string",
|
||||
"description": "(Optional) ID of the previous response in a conversation"
|
||||
},
|
||||
"prompt": {
|
||||
"$ref": "#/components/schemas/Prompt",
|
||||
"description": "(Optional) Prompt object with ID, version, and variables"
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"description": "Current status of the response generation"
|
||||
|
@ -7919,6 +7923,51 @@
|
|||
"title": "OpenAIResponseInputToolWebSearch",
|
||||
"description": "Web search tool configuration for OpenAI response inputs."
|
||||
},
|
||||
"OpenAIResponsePromptParam": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"description": "Unique identifier of the prompt template"
|
||||
},
|
||||
"variables": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "null"
|
||||
},
|
||||
{
|
||||
"type": "boolean"
|
||||
},
|
||||
{
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array"
|
||||
},
|
||||
{
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"description": "Dictionary of variable names to values for template substitution"
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "Version number of the prompt to use (defaults to latest if not specified)"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"id"
|
||||
],
|
||||
"title": "OpenAIResponsePromptParam",
|
||||
"description": "Prompt object that is used for OpenAI responses."
|
||||
},
|
||||
"CreateOpenaiResponseRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
@ -7940,6 +7989,10 @@
|
|||
"type": "string",
|
||||
"description": "The underlying LLM used for completions."
|
||||
},
|
||||
"prompt": {
|
||||
"$ref": "#/components/schemas/OpenAIResponsePromptParam",
|
||||
"description": "Prompt object with ID, version, and variables."
|
||||
},
|
||||
"instructions": {
|
||||
"type": "string"
|
||||
},
|
||||
|
@ -8024,6 +8077,10 @@
|
|||
"type": "string",
|
||||
"description": "(Optional) ID of the previous response in a conversation"
|
||||
},
|
||||
"prompt": {
|
||||
"$ref": "#/components/schemas/Prompt",
|
||||
"description": "(Optional) Prompt object with ID, version, and variables"
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"description": "Current status of the response generation"
|
||||
|
|
40
docs/static/llama-stack-spec.yaml
vendored
40
docs/static/llama-stack-spec.yaml
vendored
|
@ -5628,6 +5628,10 @@ components:
|
|||
type: string
|
||||
description: >-
|
||||
(Optional) ID of the previous response in a conversation
|
||||
prompt:
|
||||
$ref: '#/components/schemas/Prompt'
|
||||
description: >-
|
||||
(Optional) Prompt object with ID, version, and variables
|
||||
status:
|
||||
type: string
|
||||
description: >-
|
||||
|
@ -5949,6 +5953,34 @@ components:
|
|||
title: OpenAIResponseInputToolWebSearch
|
||||
description: >-
|
||||
Web search tool configuration for OpenAI response inputs.
|
||||
OpenAIResponsePromptParam:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
description: Unique identifier of the prompt template
|
||||
variables:
|
||||
type: object
|
||||
additionalProperties:
|
||||
oneOf:
|
||||
- type: 'null'
|
||||
- type: boolean
|
||||
- type: number
|
||||
- type: string
|
||||
- type: array
|
||||
- type: object
|
||||
description: >-
|
||||
Dictionary of variable names to values for template substitution
|
||||
version:
|
||||
type: string
|
||||
description: >-
|
||||
Version number of the prompt to use (defaults to latest if not specified)
|
||||
additionalProperties: false
|
||||
required:
|
||||
- id
|
||||
title: OpenAIResponsePromptParam
|
||||
description: >-
|
||||
Prompt object that is used for OpenAI responses.
|
||||
CreateOpenaiResponseRequest:
|
||||
type: object
|
||||
properties:
|
||||
|
@ -5962,6 +5994,10 @@ components:
|
|||
model:
|
||||
type: string
|
||||
description: The underlying LLM used for completions.
|
||||
prompt:
|
||||
$ref: '#/components/schemas/OpenAIResponsePromptParam'
|
||||
description: >-
|
||||
Prompt object with ID, version, and variables.
|
||||
instructions:
|
||||
type: string
|
||||
previous_response_id:
|
||||
|
@ -6033,6 +6069,10 @@ components:
|
|||
type: string
|
||||
description: >-
|
||||
(Optional) ID of the previous response in a conversation
|
||||
prompt:
|
||||
$ref: '#/components/schemas/Prompt'
|
||||
description: >-
|
||||
(Optional) Prompt object with ID, version, and variables
|
||||
status:
|
||||
type: string
|
||||
description: >-
|
||||
|
|
57
docs/static/stainless-llama-stack-spec.html
vendored
57
docs/static/stainless-llama-stack-spec.html
vendored
|
@ -9472,6 +9472,10 @@
|
|||
"type": "string",
|
||||
"description": "(Optional) ID of the previous response in a conversation"
|
||||
},
|
||||
"prompt": {
|
||||
"$ref": "#/components/schemas/Prompt",
|
||||
"description": "(Optional) Prompt object with ID, version, and variables"
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"description": "Current status of the response generation"
|
||||
|
@ -9928,6 +9932,51 @@
|
|||
"title": "OpenAIResponseInputToolWebSearch",
|
||||
"description": "Web search tool configuration for OpenAI response inputs."
|
||||
},
|
||||
"OpenAIResponsePromptParam": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"description": "Unique identifier of the prompt template"
|
||||
},
|
||||
"variables": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "null"
|
||||
},
|
||||
{
|
||||
"type": "boolean"
|
||||
},
|
||||
{
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array"
|
||||
},
|
||||
{
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"description": "Dictionary of variable names to values for template substitution"
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "Version number of the prompt to use (defaults to latest if not specified)"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"id"
|
||||
],
|
||||
"title": "OpenAIResponsePromptParam",
|
||||
"description": "Prompt object that is used for OpenAI responses."
|
||||
},
|
||||
"CreateOpenaiResponseRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
@ -9949,6 +9998,10 @@
|
|||
"type": "string",
|
||||
"description": "The underlying LLM used for completions."
|
||||
},
|
||||
"prompt": {
|
||||
"$ref": "#/components/schemas/OpenAIResponsePromptParam",
|
||||
"description": "Prompt object with ID, version, and variables."
|
||||
},
|
||||
"instructions": {
|
||||
"type": "string"
|
||||
},
|
||||
|
@ -10033,6 +10086,10 @@
|
|||
"type": "string",
|
||||
"description": "(Optional) ID of the previous response in a conversation"
|
||||
},
|
||||
"prompt": {
|
||||
"$ref": "#/components/schemas/Prompt",
|
||||
"description": "(Optional) Prompt object with ID, version, and variables"
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"description": "Current status of the response generation"
|
||||
|
|
40
docs/static/stainless-llama-stack-spec.yaml
vendored
40
docs/static/stainless-llama-stack-spec.yaml
vendored
|
@ -7073,6 +7073,10 @@ components:
|
|||
type: string
|
||||
description: >-
|
||||
(Optional) ID of the previous response in a conversation
|
||||
prompt:
|
||||
$ref: '#/components/schemas/Prompt'
|
||||
description: >-
|
||||
(Optional) Prompt object with ID, version, and variables
|
||||
status:
|
||||
type: string
|
||||
description: >-
|
||||
|
@ -7394,6 +7398,34 @@ components:
|
|||
title: OpenAIResponseInputToolWebSearch
|
||||
description: >-
|
||||
Web search tool configuration for OpenAI response inputs.
|
||||
OpenAIResponsePromptParam:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
description: Unique identifier of the prompt template
|
||||
variables:
|
||||
type: object
|
||||
additionalProperties:
|
||||
oneOf:
|
||||
- type: 'null'
|
||||
- type: boolean
|
||||
- type: number
|
||||
- type: string
|
||||
- type: array
|
||||
- type: object
|
||||
description: >-
|
||||
Dictionary of variable names to values for template substitution
|
||||
version:
|
||||
type: string
|
||||
description: >-
|
||||
Version number of the prompt to use (defaults to latest if not specified)
|
||||
additionalProperties: false
|
||||
required:
|
||||
- id
|
||||
title: OpenAIResponsePromptParam
|
||||
description: >-
|
||||
Prompt object that is used for OpenAI responses.
|
||||
CreateOpenaiResponseRequest:
|
||||
type: object
|
||||
properties:
|
||||
|
@ -7407,6 +7439,10 @@ components:
|
|||
model:
|
||||
type: string
|
||||
description: The underlying LLM used for completions.
|
||||
prompt:
|
||||
$ref: '#/components/schemas/OpenAIResponsePromptParam'
|
||||
description: >-
|
||||
Prompt object with ID, version, and variables.
|
||||
instructions:
|
||||
type: string
|
||||
previous_response_id:
|
||||
|
@ -7478,6 +7514,10 @@ components:
|
|||
type: string
|
||||
description: >-
|
||||
(Optional) ID of the previous response in a conversation
|
||||
prompt:
|
||||
$ref: '#/components/schemas/Prompt'
|
||||
description: >-
|
||||
(Optional) Prompt object with ID, version, and variables
|
||||
status:
|
||||
type: string
|
||||
description: >-
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue