mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-28 02:01:59 +00:00
feat: Add temperature support to responses API
Signed-off-by: Derek Higgins <derekh@redhat.com>
This commit is contained in:
parent
2c7aba4158
commit
70dde487c3
6 changed files with 220 additions and 3 deletions
|
|
@ -628,6 +628,7 @@ class Agents(Protocol):
|
|||
previous_response_id: Optional[str] = None,
|
||||
store: Optional[bool] = True,
|
||||
stream: Optional[bool] = False,
|
||||
temperature: Optional[float] = None,
|
||||
tools: Optional[List[OpenAIResponseInputTool]] = None,
|
||||
) -> Union[OpenAIResponseObject, AsyncIterator[OpenAIResponseObjectStream]]:
|
||||
"""Create a new OpenAI response.
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue