mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-29 15:23:51 +00:00
agentic system client sdk
This commit is contained in:
parent
b7b8f5c2c3
commit
8c378fadcc
1 changed files with 9 additions and 7 deletions
|
@ -34,15 +34,17 @@ def main(host: str, port: int):
|
||||||
|
|
||||||
# TODO(xiyan): This does not work with current server, need to wrap it in a request (similar to AgentConfig?)
|
# TODO(xiyan): This does not work with current server, need to wrap it in a request (similar to AgentConfig?)
|
||||||
response = client.agentic_system.turns.create(
|
response = client.agentic_system.turns.create(
|
||||||
agent_id=agentic_system_create_response.agent_id,
|
request={
|
||||||
session_id=agentic_system_create_session_response.session_id,
|
"agent_id": agentic_system_create_response.agent_id,
|
||||||
messages=[
|
"session_id": agentic_system_create_session_response.session_id,
|
||||||
UserMessage(content="What is the capital of France?", role="user"),
|
"messages": [
|
||||||
],
|
UserMessage(content="What is the capital of France?", role="user"),
|
||||||
stream=True,
|
],
|
||||||
|
"stream": False,
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
# print(response)
|
print(response)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue