From 8c378fadcc6a6ef5322bdf6c2c04dbb65b2c1923 Mon Sep 17 00:00:00 2001 From: Xi Yan Date: Mon, 9 Sep 2024 11:46:08 -0700 Subject: [PATCH] agentic system client sdk --- llama_toolchain/agentic_system/client_sdk.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/llama_toolchain/agentic_system/client_sdk.py b/llama_toolchain/agentic_system/client_sdk.py index 18c83e83c..f4c68d717 100644 --- a/llama_toolchain/agentic_system/client_sdk.py +++ b/llama_toolchain/agentic_system/client_sdk.py @@ -34,15 +34,17 @@ def main(host: str, port: int): # TODO(xiyan): This does not work with current server, need to wrap it in a request (similar to AgentConfig?) response = client.agentic_system.turns.create( - agent_id=agentic_system_create_response.agent_id, - session_id=agentic_system_create_session_response.session_id, - messages=[ - UserMessage(content="What is the capital of France?", role="user"), - ], - stream=True, + request={ + "agent_id": agentic_system_create_response.agent_id, + "session_id": agentic_system_create_session_response.session_id, + "messages": [ + UserMessage(content="What is the capital of France?", role="user"), + ], + "stream": False, + } ) - # print(response) + print(response) if __name__ == "__main__":