mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-11 19:56:03 +00:00
feat(responses)!: introduce OpenAI compatible prompts to Responses API
This commit is contained in:
parent
d10bfb5121
commit
d94efaaac4
12 changed files with 593 additions and 8 deletions
81
docs/static/llama-stack-spec.html
vendored
81
docs/static/llama-stack-spec.html
vendored
|
|
@ -5696,16 +5696,53 @@
|
|||
},
|
||||
{
|
||||
"$ref": "#/components/schemas/OpenAIResponseInputMessageContentImage"
|
||||
},
|
||||
{
|
||||
"$ref": "#/components/schemas/OpenAIResponseInputMessageContentFile"
|
||||
}
|
||||
],
|
||||
"discriminator": {
|
||||
"propertyName": "type",
|
||||
"mapping": {
|
||||
"input_text": "#/components/schemas/OpenAIResponseInputMessageContentText",
|
||||
"input_image": "#/components/schemas/OpenAIResponseInputMessageContentImage"
|
||||
"input_image": "#/components/schemas/OpenAIResponseInputMessageContentImage",
|
||||
"input_file": "#/components/schemas/OpenAIResponseInputMessageContentFile"
|
||||
}
|
||||
}
|
||||
},
|
||||
"OpenAIResponseInputMessageContentFile": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"const": "input_file",
|
||||
"default": "input_file",
|
||||
"description": "The type of the input item. Always `input_file`."
|
||||
},
|
||||
"file_data": {
|
||||
"type": "string",
|
||||
"description": "The data of the file to be sent to the model."
|
||||
},
|
||||
"file_id": {
|
||||
"type": "string",
|
||||
"description": "(Optional) The ID of the file to be sent to the model."
|
||||
},
|
||||
"file_url": {
|
||||
"type": "string",
|
||||
"description": "The URL of the file to be sent to the model."
|
||||
},
|
||||
"filename": {
|
||||
"type": "string",
|
||||
"description": "The name of the file to be sent to the model."
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "OpenAIResponseInputMessageContentFile",
|
||||
"description": "File content for input messages in OpenAI response format."
|
||||
},
|
||||
"OpenAIResponseInputMessageContentImage": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
|
@ -5733,6 +5770,10 @@
|
|||
"default": "input_image",
|
||||
"description": "Content type identifier, always \"input_image\""
|
||||
},
|
||||
"file_id": {
|
||||
"type": "string",
|
||||
"description": "(Optional) The ID of the file to be sent to the model."
|
||||
},
|
||||
"image_url": {
|
||||
"type": "string",
|
||||
"description": "(Optional) URL of the image content"
|
||||
|
|
@ -7521,6 +7562,10 @@
|
|||
"type": "string",
|
||||
"description": "(Optional) ID of the previous response in a conversation"
|
||||
},
|
||||
"prompt": {
|
||||
"$ref": "#/components/schemas/OpenAIResponsePrompt",
|
||||
"description": "(Optional) Reference to a prompt template and its variables."
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"description": "Current status of the response generation"
|
||||
|
|
@ -7616,6 +7661,32 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"OpenAIResponsePrompt": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"description": "Unique identifier of the prompt template"
|
||||
},
|
||||
"variables": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#/components/schemas/OpenAIResponseInputMessageContent"
|
||||
},
|
||||
"description": "Dictionary of variable names to OpenAIResponseInputMessageContent structure for template substitution. The substitution values can either be strings, or other Response input types like images or files."
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "Version number of the prompt to use (defaults to latest if not specified)"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"id"
|
||||
],
|
||||
"title": "OpenAIResponsePrompt",
|
||||
"description": "OpenAI compatible Prompt object that is used in OpenAI responses."
|
||||
},
|
||||
"OpenAIResponseText": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
|
@ -7986,6 +8057,10 @@
|
|||
"type": "string",
|
||||
"description": "The underlying LLM used for completions."
|
||||
},
|
||||
"prompt": {
|
||||
"$ref": "#/components/schemas/OpenAIResponsePrompt",
|
||||
"description": "(Optional) Prompt object with ID, version, and variables."
|
||||
},
|
||||
"instructions": {
|
||||
"type": "string"
|
||||
},
|
||||
|
|
@ -8074,6 +8149,10 @@
|
|||
"type": "string",
|
||||
"description": "(Optional) ID of the previous response in a conversation"
|
||||
},
|
||||
"prompt": {
|
||||
"$ref": "#/components/schemas/OpenAIResponsePrompt",
|
||||
"description": "(Optional) Reference to a prompt template and its variables."
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"description": "Current status of the response generation"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue