mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-03 09:21:45 +00:00
clarifying wording on tool & ipython
This commit is contained in:
parent
d05a4a8734
commit
18c68c4950
1 changed files with 1 additions and 1 deletions
|
@ -61,7 +61,7 @@ def usecases(base_model: bool = False) -> List[UseCase | str]:
|
|||
- `system`: Sets the context in which to interact with the AI model. It typically includes rules, guidelines, or necessary information that helps the model respond effectively.
|
||||
- `user`: Represents the human interacting with the model. It includes the inputs, commands, and questions to the model.
|
||||
- `assistant`: Represents the response generated by the AI model based on the context provided in the `system`, `tool` and `user` prompts.
|
||||
- `tool`: Represents the output of a tool call when sent back to the model from the executor. (The actual token used by the model is `<|ipython|>`.)
|
||||
- `tool`: Represents the output of a tool call when sent back to the model from the executor. Note that the role name used in the prompt template is `ipython`; scroll down to the last example to see how this is used.
|
||||
"""
|
||||
),
|
||||
]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue