mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-31 02:43:53 +00:00
OpenAI completion prompt can also be an array
The OpenAI completion prompt field can be a string or an array, so update things to use and pass that properly. This also stubs in a basic conversion of OpenAI non-streaming completion requests to Llama Stack completion calls, for those providers that don't actually have an OpenAI backend to allow them to still accept requests via the OpenAI APIs. Signed-off-by: Ben Browning <bbrownin@redhat.com>
This commit is contained in:
parent
24cfa1ef1a
commit
a6cf8fa12b
10 changed files with 95 additions and 12 deletions
12
docs/_static/llama-stack-spec.html
vendored
12
docs/_static/llama-stack-spec.html
vendored
|
|
@ -9401,7 +9401,17 @@
|
|||
"description": "The identifier of the model to use. The model must be registered with Llama Stack and available via the /models endpoint."
|
||||
},
|
||||
"prompt": {
|
||||
"type": "string",
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "The prompt to generate a completion for"
|
||||
},
|
||||
"best_of": {
|
||||
|
|
|
|||
6
docs/_static/llama-stack-spec.yaml
vendored
6
docs/_static/llama-stack-spec.yaml
vendored
|
|
@ -6477,7 +6477,11 @@ components:
|
|||
The identifier of the model to use. The model must be registered with
|
||||
Llama Stack and available via the /models endpoint.
|
||||
prompt:
|
||||
type: string
|
||||
oneOf:
|
||||
- type: string
|
||||
- type: array
|
||||
items:
|
||||
type: string
|
||||
description: The prompt to generate a completion for
|
||||
best_of:
|
||||
type: integer
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue