mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-15 06:00:48 +00:00
Incorporated suggested code modification
Signed-off-by: Dean Wampler <dean.wampler@ibm.com>
This commit is contained in:
parent
688f42ba3a
commit
3ed082e69a
1 changed files with 8 additions and 6 deletions
|
@ -52,14 +52,16 @@ agent = Agent(
|
|||
prompt = "How do you do great work?"
|
||||
print("prompt>", prompt)
|
||||
|
||||
use_stream = True
|
||||
response = agent.create_turn(
|
||||
messages=[{"role": "user", "content": prompt}],
|
||||
session_id=agent.create_session("rag_session"),
|
||||
stream=True,
|
||||
stream=use_stream,
|
||||
)
|
||||
|
||||
# NOTE: `AgentEventLogger().log(response)` only works with streaming responses.
|
||||
# So, if you change the value for `stream` to `False` in the `create_turn` call,
|
||||
# comment out the following two lines!
|
||||
for log in AgentEventLogger().log(response):
|
||||
log.print()
|
||||
# Only call `AgentEventLogger().log(response)` for streaming responses.
|
||||
if use_stream:
|
||||
for log in AgentEventLogger().log(response):
|
||||
log.print()
|
||||
else:
|
||||
print(response)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue