mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-29 07:14:20 +00:00
agentic system client sdk
This commit is contained in:
parent
b7b8f5c2c3
commit
8c378fadcc
1 changed files with 9 additions and 7 deletions
|
@ -34,15 +34,17 @@ def main(host: str, port: int):
|
|||
|
||||
# TODO(xiyan): This does not work with current server, need to wrap it in a request (similar to AgentConfig?)
|
||||
response = client.agentic_system.turns.create(
|
||||
agent_id=agentic_system_create_response.agent_id,
|
||||
session_id=agentic_system_create_session_response.session_id,
|
||||
messages=[
|
||||
UserMessage(content="What is the capital of France?", role="user"),
|
||||
],
|
||||
stream=True,
|
||||
request={
|
||||
"agent_id": agentic_system_create_response.agent_id,
|
||||
"session_id": agentic_system_create_session_response.session_id,
|
||||
"messages": [
|
||||
UserMessage(content="What is the capital of France?", role="user"),
|
||||
],
|
||||
"stream": False,
|
||||
}
|
||||
)
|
||||
|
||||
# print(response)
|
||||
print(response)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue