forked from phoenix-oss/llama-stack-mirror
docs: update test_agents to use new Agent SDK API (#1402)
# Summary: new Agent SDK API is added in https://github.com/meta-llama/llama-stack-client-python/pull/178 Update docs and test to reflect this. Closes https://github.com/meta-llama/llama-stack/issues/1365 # Test Plan: ```bash py.test -v -s --nbval-lax ./docs/getting_started.ipynb LLAMA_STACK_CONFIG=fireworks \ pytest -s -v tests/integration/agents/test_agents.py \ --safety-shield meta-llama/Llama-Guard-3-8B --text-model meta-llama/Llama-3.1-8B-Instruct ```
This commit is contained in:
parent
3d71e5a036
commit
ca2910d27a
13 changed files with 121 additions and 206 deletions
|
@ -1635,18 +1635,14 @@
|
|||
"source": [
|
||||
"from llama_stack_client.lib.agents.agent import Agent\n",
|
||||
"from llama_stack_client.lib.agents.event_logger import EventLogger\n",
|
||||
"from llama_stack_client.types.agent_create_params import AgentConfig\n",
|
||||
"from termcolor import cprint\n",
|
||||
"\n",
|
||||
"agent_config = AgentConfig(\n",
|
||||
"agent = Agent(\n",
|
||||
" client, \n",
|
||||
" model=model_id,\n",
|
||||
" instructions=\"You are a helpful assistant\",\n",
|
||||
" toolgroups=[\"builtin::websearch\"],\n",
|
||||
" input_shields=[],\n",
|
||||
" output_shields=[],\n",
|
||||
" enable_session_persistence=False,\n",
|
||||
" tools=[\"builtin::websearch\"],\n",
|
||||
")\n",
|
||||
"agent = Agent(client, agent_config)\n",
|
||||
"user_prompts = [\n",
|
||||
" \"Hello\",\n",
|
||||
" \"Which teams played in the NBA western conference finals of 2024\",\n",
|
||||
|
@ -1815,7 +1811,6 @@
|
|||
"import uuid\n",
|
||||
"from llama_stack_client.lib.agents.agent import Agent\n",
|
||||
"from llama_stack_client.lib.agents.event_logger import EventLogger\n",
|
||||
"from llama_stack_client.types.agent_create_params import AgentConfig\n",
|
||||
"from termcolor import cprint\n",
|
||||
"from llama_stack_client.types import Document\n",
|
||||
"\n",
|
||||
|
@ -1841,11 +1836,11 @@
|
|||
" vector_db_id=vector_db_id,\n",
|
||||
" chunk_size_in_tokens=512,\n",
|
||||
")\n",
|
||||
"agent_config = AgentConfig(\n",
|
||||
"rag_agent = Agent(\n",
|
||||
" client, \n",
|
||||
" model=model_id,\n",
|
||||
" instructions=\"You are a helpful assistant\",\n",
|
||||
" enable_session_persistence=False,\n",
|
||||
" toolgroups = [\n",
|
||||
" tools = [\n",
|
||||
" {\n",
|
||||
" \"name\": \"builtin::rag/knowledge_search\",\n",
|
||||
" \"args\" : {\n",
|
||||
|
@ -1854,7 +1849,6 @@
|
|||
" }\n",
|
||||
" ],\n",
|
||||
")\n",
|
||||
"rag_agent = Agent(client, agent_config)\n",
|
||||
"session_id = rag_agent.create_session(\"test-session\")\n",
|
||||
"user_prompts = [\n",
|
||||
" \"What are the top 5 topics that were explained? Only list succinct bullet points.\",\n",
|
||||
|
@ -1978,23 +1972,19 @@
|
|||
"source": [
|
||||
"from llama_stack_client.types.agents.turn_create_params import Document\n",
|
||||
"\n",
|
||||
"agent_config = AgentConfig(\n",
|
||||
"codex_agent = Agent(\n",
|
||||
" client, \n",
|
||||
" model=\"meta-llama/Llama-3.1-8B-Instruct\",\n",
|
||||
" instructions=\"You are a helpful assistant\",\n",
|
||||
" tools=[\n",
|
||||
" \"builtin::code_interpreter\",\n",
|
||||
" \"builtin::websearch\"\n",
|
||||
" ],\n",
|
||||
" sampling_params = {\n",
|
||||
" \"max_tokens\" : 4096,\n",
|
||||
" \"temperature\": 0.0\n",
|
||||
" },\n",
|
||||
" model=\"meta-llama/Llama-3.1-8B-Instruct\",\n",
|
||||
" instructions=\"You are a helpful assistant\",\n",
|
||||
" toolgroups=[\n",
|
||||
" \"builtin::code_interpreter\",\n",
|
||||
" \"builtin::websearch\"\n",
|
||||
" ],\n",
|
||||
" tool_choice=\"auto\",\n",
|
||||
" input_shields=[],\n",
|
||||
" output_shields=[],\n",
|
||||
" enable_session_persistence=False,\n",
|
||||
")\n",
|
||||
"codex_agent = Agent(client, agent_config)\n",
|
||||
"session_id = codex_agent.create_session(\"test-session\")\n",
|
||||
"\n",
|
||||
"\n",
|
||||
|
@ -2904,18 +2894,14 @@
|
|||
"# NBVAL_SKIP\n",
|
||||
"from llama_stack_client.lib.agents.agent import Agent\n",
|
||||
"from llama_stack_client.lib.agents.event_logger import EventLogger\n",
|
||||
"from llama_stack_client.types.agent_create_params import AgentConfig\n",
|
||||
"from termcolor import cprint\n",
|
||||
"\n",
|
||||
"agent_config = AgentConfig(\n",
|
||||
"agent = Agent(\n",
|
||||
" client, \n",
|
||||
" model=model_id,\n",
|
||||
" instructions=\"You are a helpful assistant\",\n",
|
||||
" toolgroups=[\"mcp::filesystem\"],\n",
|
||||
" input_shields=[],\n",
|
||||
" output_shields=[],\n",
|
||||
" enable_session_persistence=False,\n",
|
||||
" tools=[\"mcp::filesystem\"],\n",
|
||||
")\n",
|
||||
"agent = Agent(client, agent_config)\n",
|
||||
"user_prompts = [\n",
|
||||
" \"Hello\",\n",
|
||||
" \"list all the files /content\",\n",
|
||||
|
@ -3010,17 +2996,13 @@
|
|||
"source": [
|
||||
"from llama_stack_client.lib.agents.agent import Agent\n",
|
||||
"from llama_stack_client.lib.agents.event_logger import EventLogger\n",
|
||||
"from llama_stack_client.types.agent_create_params import AgentConfig\n",
|
||||
"\n",
|
||||
"agent_config = AgentConfig(\n",
|
||||
"agent = Agent(\n",
|
||||
" client, \n",
|
||||
" model=\"meta-llama/Llama-3.3-70B-Instruct\",\n",
|
||||
" instructions=\"You are a helpful assistant. Use search tool to answer the questions. \",\n",
|
||||
" toolgroups=[\"builtin::websearch\"],\n",
|
||||
" input_shields=[],\n",
|
||||
" output_shields=[],\n",
|
||||
" enable_session_persistence=False,\n",
|
||||
" tools=[\"builtin::websearch\"],\n",
|
||||
")\n",
|
||||
"agent = Agent(client, agent_config)\n",
|
||||
"user_prompts = [\n",
|
||||
" \"Which teams played in the NBA western conference finals of 2024. Search the web for the answer.\",\n",
|
||||
" \"In which episode and season of South Park does Bill Cosby (BSM-471) first appear? Give me the number and title. Search the web for the answer.\",\n",
|
||||
|
@ -4346,16 +4328,11 @@
|
|||
}
|
||||
],
|
||||
"source": [
|
||||
"from llama_stack_client.types.agent_create_params import AgentConfig\n",
|
||||
"\n",
|
||||
"agent_config = AgentConfig(\n",
|
||||
"agent = Agent(\n",
|
||||
" client, \n",
|
||||
" model=vision_model_id,\n",
|
||||
" instructions=\"You are a helpful assistant\",\n",
|
||||
" enable_session_persistence=False,\n",
|
||||
" toolgroups=[],\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"agent = Agent(client, agent_config)\n",
|
||||
"session_id = agent.create_session(\"test-session\")\n",
|
||||
"\n",
|
||||
"response = agent.create_turn(\n",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue