Merge branch 'main' into responses_object

This commit is contained in:
Emilio Garcia 2025-09-17 08:48:08 -04:00 committed by GitHub
commit a666f6df3e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
381 changed files with 56349 additions and 12626 deletions

View file

@ -427,6 +427,58 @@ paths:
schema:
$ref: '#/components/schemas/CreateOpenaiResponseRequest'
required: true
/v1/prompts:
get:
responses:
'200':
description: >-
A ListPromptsResponse containing all prompts.
content:
application/json:
schema:
$ref: '#/components/schemas/ListPromptsResponse'
'400':
$ref: '#/components/responses/BadRequest400'
'429':
$ref: >-
#/components/responses/TooManyRequests429
'500':
$ref: >-
#/components/responses/InternalServerError500
default:
$ref: '#/components/responses/DefaultError'
tags:
- Prompts
description: List all prompts.
parameters: []
post:
responses:
'200':
description: The created Prompt resource.
content:
application/json:
schema:
$ref: '#/components/schemas/Prompt'
'400':
$ref: '#/components/responses/BadRequest400'
'429':
$ref: >-
#/components/responses/TooManyRequests429
'500':
$ref: >-
#/components/responses/InternalServerError500
default:
$ref: '#/components/responses/DefaultError'
tags:
- Prompts
description: Create a new prompt.
parameters: []
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/CreatePromptRequest'
required: true
/v1/agents/{agent_id}:
get:
responses:
@ -616,6 +668,103 @@ paths:
required: true
schema:
type: string
/v1/prompts/{prompt_id}:
get:
responses:
'200':
description: A Prompt resource.
content:
application/json:
schema:
$ref: '#/components/schemas/Prompt'
'400':
$ref: '#/components/responses/BadRequest400'
'429':
$ref: >-
#/components/responses/TooManyRequests429
'500':
$ref: >-
#/components/responses/InternalServerError500
default:
$ref: '#/components/responses/DefaultError'
tags:
- Prompts
description: >-
Get a prompt by its identifier and optional version.
parameters:
- name: prompt_id
in: path
description: The identifier of the prompt to get.
required: true
schema:
type: string
- name: version
in: query
description: >-
The version of the prompt to get (defaults to latest).
required: false
schema:
type: integer
post:
responses:
'200':
description: >-
The updated Prompt resource with incremented version.
content:
application/json:
schema:
$ref: '#/components/schemas/Prompt'
'400':
$ref: '#/components/responses/BadRequest400'
'429':
$ref: >-
#/components/responses/TooManyRequests429
'500':
$ref: >-
#/components/responses/InternalServerError500
default:
$ref: '#/components/responses/DefaultError'
tags:
- Prompts
description: >-
Update an existing prompt (increments version).
parameters:
- name: prompt_id
in: path
description: The identifier of the prompt to update.
required: true
schema:
type: string
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/UpdatePromptRequest'
required: true
delete:
responses:
'200':
description: OK
'400':
$ref: '#/components/responses/BadRequest400'
'429':
$ref: >-
#/components/responses/TooManyRequests429
'500':
$ref: >-
#/components/responses/InternalServerError500
default:
$ref: '#/components/responses/DefaultError'
tags:
- Prompts
description: Delete a prompt.
parameters:
- name: prompt_id
in: path
description: The identifier of the prompt to delete.
required: true
schema:
type: string
/v1/inference/embeddings:
post:
responses:
@ -805,6 +954,30 @@ paths:
required: true
schema:
type: string
delete:
responses:
'200':
description: OK
'400':
$ref: '#/components/responses/BadRequest400'
'429':
$ref: >-
#/components/responses/TooManyRequests429
'500':
$ref: >-
#/components/responses/InternalServerError500
default:
$ref: '#/components/responses/DefaultError'
tags:
- Benchmarks
description: Unregister a benchmark.
parameters:
- name: benchmark_id
in: path
description: The ID of the benchmark to unregister.
required: true
schema:
type: string
/v1/openai/v1/chat/completions/{completion_id}:
get:
responses:
@ -970,6 +1143,31 @@ paths:
required: true
schema:
type: string
delete:
responses:
'200':
description: OK
'400':
$ref: '#/components/responses/BadRequest400'
'429':
$ref: >-
#/components/responses/TooManyRequests429
'500':
$ref: >-
#/components/responses/InternalServerError500
default:
$ref: '#/components/responses/DefaultError'
tags:
- ScoringFunctions
description: Unregister a scoring function.
parameters:
- name: scoring_fn_id
in: path
description: >-
The ID of the scoring function to unregister.
required: true
schema:
type: string
/v1/shields/{identifier}:
get:
responses:
@ -1983,6 +2181,37 @@ paths:
required: false
schema:
$ref: '#/components/schemas/Order'
/v1/prompts/{prompt_id}/versions:
get:
responses:
'200':
description: >-
A ListPromptsResponse containing all versions of the prompt.
content:
application/json:
schema:
$ref: '#/components/schemas/ListPromptsResponse'
'400':
$ref: '#/components/responses/BadRequest400'
'429':
$ref: >-
#/components/responses/TooManyRequests429
'500':
$ref: >-
#/components/responses/InternalServerError500
default:
$ref: '#/components/responses/DefaultError'
tags:
- Prompts
description: List all versions of a specific prompt.
parameters:
- name: prompt_id
in: path
description: >-
The identifier of the prompt to list versions for.
required: true
schema:
type: string
/v1/providers:
get:
responses:
@ -2933,6 +3162,10 @@ paths:
- file: The File object (not file name) to be uploaded.
- purpose: The intended purpose of the uploaded file.
- expires_after: Optional form values describing expiration for the file.
Expected expires_after[anchor] = "created_at", expires_after[seconds] = <int>.
Seconds must be between 3600 and 2592000 (1 hour to 30 days).
parameters: []
requestBody:
content:
@ -2945,9 +3178,19 @@ paths:
format: binary
purpose:
$ref: '#/components/schemas/OpenAIFilePurpose'
expires_after_anchor:
oneOf:
- type: string
- type: 'null'
expires_after_seconds:
oneOf:
- type: integer
- type: 'null'
required:
- file
- purpose
- expires_after_anchor
- expires_after_seconds
required: true
/v1/openai/v1/models:
get:
@ -3532,6 +3775,43 @@ paths:
schema:
$ref: '#/components/schemas/ScoreBatchRequest'
required: true
/v1/prompts/{prompt_id}/set-default-version:
post:
responses:
'200':
description: >-
The prompt with the specified version now set as default.
content:
application/json:
schema:
$ref: '#/components/schemas/Prompt'
'400':
$ref: '#/components/responses/BadRequest400'
'429':
$ref: >-
#/components/responses/TooManyRequests429
'500':
$ref: >-
#/components/responses/InternalServerError500
default:
$ref: '#/components/responses/DefaultError'
tags:
- Prompts
description: >-
Set which version of a prompt should be the default in get_prompt (latest).
parameters:
- name: prompt_id
in: path
description: The identifier of the prompt.
required: true
schema:
type: string
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/SetDefaultVersionRequest'
required: true
/v1/post-training/supervised-fine-tune:
post:
responses:
@ -7394,6 +7674,61 @@ components:
- type
title: >-
OpenAIResponseObjectStreamResponseWebSearchCallSearching
CreatePromptRequest:
type: object
properties:
prompt:
type: string
description: >-
The prompt text content with variable placeholders.
variables:
type: array
items:
type: string
description: >-
List of variable names that can be used in the prompt template.
additionalProperties: false
required:
- prompt
title: CreatePromptRequest
Prompt:
type: object
properties:
prompt:
type: string
description: >-
The system prompt text with variable placeholders. Variables are only
supported when using the Responses API.
version:
type: integer
description: >-
Version (integer starting at 1, incremented on save)
prompt_id:
type: string
description: >-
Unique identifier formatted as 'pmpt_<48-digit-hash>'
variables:
type: array
items:
type: string
description: >-
List of prompt variable names that can be used in the prompt template
is_default:
type: boolean
default: false
description: >-
Boolean indicating whether this version is the default version for this
prompt
additionalProperties: false
required:
- version
- prompt_id
- variables
- is_default
title: Prompt
description: >-
A prompt resource representing a stored OpenAI Compatible prompt template
in Llama Stack.
OpenAIDeleteResponseObject:
type: object
properties:
@ -7867,6 +8202,7 @@ components:
- benchmark
- tool
- tool_group
- prompt
const: benchmark
default: benchmark
description: The resource type, always benchmark
@ -8353,6 +8689,7 @@ components:
- benchmark
- tool
- tool_group
- prompt
const: dataset
default: dataset
description: >-
@ -8465,6 +8802,7 @@ components:
- benchmark
- tool
- tool_group
- prompt
const: model
default: model
description: >-
@ -8656,6 +8994,7 @@ components:
- benchmark
- tool
- tool_group
- prompt
const: scoring_function
default: scoring_function
description: >-
@ -8732,6 +9071,7 @@ components:
- benchmark
- tool
- tool_group
- prompt
const: shield
default: shield
description: The resource type, always shield
@ -8911,6 +9251,7 @@ components:
- benchmark
- tool
- tool_group
- prompt
const: tool
default: tool
description: Type of resource, always 'tool'
@ -8969,6 +9310,7 @@ components:
- benchmark
- tool
- tool_group
- prompt
const: tool_group
default: tool_group
description: Type of resource, always 'tool_group'
@ -9197,6 +9539,7 @@ components:
- benchmark
- tool
- tool_group
- prompt
const: vector_db
default: vector_db
description: >-
@ -9882,6 +10225,18 @@ components:
title: OpenAIResponseObjectWithInput
description: >-
OpenAI response object extended with input context information.
ListPromptsResponse:
type: object
properties:
data:
type: array
items:
$ref: '#/components/schemas/Prompt'
additionalProperties: false
required:
- data
title: ListPromptsResponse
description: Response model to list prompts.
ListProvidersResponse:
type: object
properties:
@ -13027,6 +13382,16 @@ components:
title: ScoreBatchResponse
description: >-
Response from batch scoring operations on datasets.
SetDefaultVersionRequest:
type: object
properties:
version:
type: integer
description: The version to set as default.
additionalProperties: false
required:
- version
title: SetDefaultVersionRequest
AlgorithmConfig:
oneOf:
- $ref: '#/components/schemas/LoraFinetuningConfig'
@ -13223,6 +13588,32 @@ components:
description: >-
Response from the synthetic data generation. Batch of (prompt, response, score)
tuples that pass the threshold.
UpdatePromptRequest:
type: object
properties:
prompt:
type: string
description: The updated prompt text content.
version:
type: integer
description: >-
The current version of the prompt being updated.
variables:
type: array
items:
type: string
description: >-
Updated list of variable names that can be used in the prompt template.
set_as_default:
type: boolean
description: >-
Set the new version as the default (default=True).
additionalProperties: false
required:
- prompt
- version
- set_as_default
title: UpdatePromptRequest
VersionInfo:
type: object
properties:
@ -13334,6 +13725,9 @@ tags:
- name: Inspect
- name: Models
- name: PostTraining (Coming Soon)
- name: Prompts
x-displayName: >-
Protocol for prompt management operations.
- name: Providers
x-displayName: >-
Providers API for inspecting, listing, and modifying providers and their configurations.
@ -13361,6 +13755,7 @@ x-tagGroups:
- Inspect
- Models
- PostTraining (Coming Soon)
- Prompts
- Providers
- Safety
- Scoring