mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-12 13:57:57 +00:00
test
# What does this PR do? ## Test Plan
This commit is contained in:
parent
f50ce11a3b
commit
9f5fdce86e
32 changed files with 652 additions and 892 deletions
16
docs/static/deprecated-llama-stack-spec.yaml
vendored
16
docs/static/deprecated-llama-stack-spec.yaml
vendored
|
@ -1167,7 +1167,7 @@ paths:
|
|||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/OpenaiCompletionRequest'
|
||||
$ref: '#/components/schemas/OpenAICompletionRequest'
|
||||
required: true
|
||||
deprecated: true
|
||||
/v1/openai/v1/embeddings:
|
||||
|
@ -5671,6 +5671,8 @@ components:
|
|||
- model
|
||||
- messages
|
||||
title: OpenaiChatCompletionRequest
|
||||
description: >-
|
||||
Request parameters for OpenAI-compatible chat completion endpoint.
|
||||
OpenAIChatCompletion:
|
||||
type: object
|
||||
properties:
|
||||
|
@ -5824,7 +5826,7 @@ components:
|
|||
- model
|
||||
- input_messages
|
||||
title: OpenAICompletionWithInputMessages
|
||||
OpenaiCompletionRequest:
|
||||
OpenAICompletionRequest:
|
||||
type: object
|
||||
properties:
|
||||
model:
|
||||
|
@ -5916,8 +5918,14 @@ components:
|
|||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: >-
|
||||
(Optional) vLLM-specific parameter for guided generation with a list of
|
||||
choices.
|
||||
prompt_logprobs:
|
||||
type: integer
|
||||
description: >-
|
||||
(Optional) vLLM-specific parameter for number of log probabilities to
|
||||
return for prompt tokens.
|
||||
suffix:
|
||||
type: string
|
||||
description: >-
|
||||
|
@ -5926,7 +5934,9 @@ components:
|
|||
required:
|
||||
- model
|
||||
- prompt
|
||||
title: OpenaiCompletionRequest
|
||||
title: OpenAICompletionRequest
|
||||
description: >-
|
||||
Request parameters for OpenAI-compatible completion endpoint.
|
||||
OpenAICompletion:
|
||||
type: object
|
||||
properties:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue