forked from phoenix-oss/llama-stack-mirror
feat: Add temperature support to responses API (#2065)
# What does this PR do? Add support for the temperature to the responses API ## Test Plan Manually tested simple case unit tests added for simple case and tool calls Signed-off-by: Derek Higgins <derekh@redhat.com>
This commit is contained in:
parent
f36f68c590
commit
64829947d0
6 changed files with 220 additions and 3 deletions
3
docs/_static/llama-stack-spec.html
vendored
3
docs/_static/llama-stack-spec.html
vendored
|
@ -6462,6 +6462,9 @@
|
|||
"stream": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"temperature": {
|
||||
"type": "number"
|
||||
},
|
||||
"tools": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
|
|
2
docs/_static/llama-stack-spec.yaml
vendored
2
docs/_static/llama-stack-spec.yaml
vendored
|
@ -4506,6 +4506,8 @@ components:
|
|||
type: boolean
|
||||
stream:
|
||||
type: boolean
|
||||
temperature:
|
||||
type: number
|
||||
tools:
|
||||
type: array
|
||||
items:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue