mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-12 12:06:04 +00:00
add support for instructions parameter in response object
This commit is contained in:
parent
08cbb69ef7
commit
f176e1a74b
10 changed files with 229 additions and 29 deletions
28
docs/static/deprecated-llama-stack-spec.html
vendored
28
docs/static/deprecated-llama-stack-spec.html
vendored
|
|
@ -9024,6 +9024,20 @@
|
|||
"$ref": "#/components/schemas/OpenAIResponseUsage",
|
||||
"description": "(Optional) Token usage information for the response"
|
||||
},
|
||||
"instructions": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/OpenAIResponseInput"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "(Optional) System message inserted into the model's context"
|
||||
},
|
||||
"input": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
|
|
@ -9901,6 +9915,20 @@
|
|||
"usage": {
|
||||
"$ref": "#/components/schemas/OpenAIResponseUsage",
|
||||
"description": "(Optional) Token usage information for the response"
|
||||
},
|
||||
"instructions": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/OpenAIResponseInput"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "(Optional) System message inserted into the model's context"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
|
|
|
|||
16
docs/static/deprecated-llama-stack-spec.yaml
vendored
16
docs/static/deprecated-llama-stack-spec.yaml
vendored
|
|
@ -6734,6 +6734,14 @@ components:
|
|||
$ref: '#/components/schemas/OpenAIResponseUsage'
|
||||
description: >-
|
||||
(Optional) Token usage information for the response
|
||||
instructions:
|
||||
oneOf:
|
||||
- type: string
|
||||
- type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/OpenAIResponseInput'
|
||||
description: >-
|
||||
(Optional) System message inserted into the model's context
|
||||
input:
|
||||
type: array
|
||||
items:
|
||||
|
|
@ -7403,6 +7411,14 @@ components:
|
|||
$ref: '#/components/schemas/OpenAIResponseUsage'
|
||||
description: >-
|
||||
(Optional) Token usage information for the response
|
||||
instructions:
|
||||
oneOf:
|
||||
- type: string
|
||||
- type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/OpenAIResponseInput'
|
||||
description: >-
|
||||
(Optional) System message inserted into the model's context
|
||||
additionalProperties: false
|
||||
required:
|
||||
- created_at
|
||||
|
|
|
|||
28
docs/static/llama-stack-spec.html
vendored
28
docs/static/llama-stack-spec.html
vendored
|
|
@ -7600,6 +7600,20 @@
|
|||
"$ref": "#/components/schemas/OpenAIResponseUsage",
|
||||
"description": "(Optional) Token usage information for the response"
|
||||
},
|
||||
"instructions": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/OpenAIResponseInput"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "(Optional) System message inserted into the model's context"
|
||||
},
|
||||
"input": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
|
|
@ -8148,6 +8162,20 @@
|
|||
"usage": {
|
||||
"$ref": "#/components/schemas/OpenAIResponseUsage",
|
||||
"description": "(Optional) Token usage information for the response"
|
||||
},
|
||||
"instructions": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/OpenAIResponseInput"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "(Optional) System message inserted into the model's context"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
|
|
|
|||
16
docs/static/llama-stack-spec.yaml
vendored
16
docs/static/llama-stack-spec.yaml
vendored
|
|
@ -5815,6 +5815,14 @@ components:
|
|||
$ref: '#/components/schemas/OpenAIResponseUsage'
|
||||
description: >-
|
||||
(Optional) Token usage information for the response
|
||||
instructions:
|
||||
oneOf:
|
||||
- type: string
|
||||
- type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/OpenAIResponseInput'
|
||||
description: >-
|
||||
(Optional) System message inserted into the model's context
|
||||
input:
|
||||
type: array
|
||||
items:
|
||||
|
|
@ -6218,6 +6226,14 @@ components:
|
|||
$ref: '#/components/schemas/OpenAIResponseUsage'
|
||||
description: >-
|
||||
(Optional) Token usage information for the response
|
||||
instructions:
|
||||
oneOf:
|
||||
- type: string
|
||||
- type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/OpenAIResponseInput'
|
||||
description: >-
|
||||
(Optional) System message inserted into the model's context
|
||||
additionalProperties: false
|
||||
required:
|
||||
- created_at
|
||||
|
|
|
|||
28
docs/static/stainless-llama-stack-spec.html
vendored
28
docs/static/stainless-llama-stack-spec.html
vendored
|
|
@ -9272,6 +9272,20 @@
|
|||
"$ref": "#/components/schemas/OpenAIResponseUsage",
|
||||
"description": "(Optional) Token usage information for the response"
|
||||
},
|
||||
"instructions": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/OpenAIResponseInput"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "(Optional) System message inserted into the model's context"
|
||||
},
|
||||
"input": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
|
|
@ -9820,6 +9834,20 @@
|
|||
"usage": {
|
||||
"$ref": "#/components/schemas/OpenAIResponseUsage",
|
||||
"description": "(Optional) Token usage information for the response"
|
||||
},
|
||||
"instructions": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/OpenAIResponseInput"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "(Optional) System message inserted into the model's context"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
|
|
|
|||
16
docs/static/stainless-llama-stack-spec.yaml
vendored
16
docs/static/stainless-llama-stack-spec.yaml
vendored
|
|
@ -7028,6 +7028,14 @@ components:
|
|||
$ref: '#/components/schemas/OpenAIResponseUsage'
|
||||
description: >-
|
||||
(Optional) Token usage information for the response
|
||||
instructions:
|
||||
oneOf:
|
||||
- type: string
|
||||
- type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/OpenAIResponseInput'
|
||||
description: >-
|
||||
(Optional) System message inserted into the model's context
|
||||
input:
|
||||
type: array
|
||||
items:
|
||||
|
|
@ -7431,6 +7439,14 @@ components:
|
|||
$ref: '#/components/schemas/OpenAIResponseUsage'
|
||||
description: >-
|
||||
(Optional) Token usage information for the response
|
||||
instructions:
|
||||
oneOf:
|
||||
- type: string
|
||||
- type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/OpenAIResponseInput'
|
||||
description: >-
|
||||
(Optional) System message inserted into the model's context
|
||||
additionalProperties: false
|
||||
required:
|
||||
- created_at
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue