feat: allow specifying specific tool within toolgroup (#1239)

Summary:

E.g. `builtin::rag::knowledge_search`

Test Plan:
```
LLAMA_STACK_CONFIG=fireworks pytest -s -v tests/client-sdk/agents/ --safety-shield meta-llama/Llama-Guard-3-8B
```
This commit is contained in:
ehhuang 2025-02-26 14:07:05 -08:00 committed by GitHub
parent 657efc67bc
commit c8a20b8ed0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 80 additions and 64 deletions

View file

@ -441,7 +441,8 @@ def xtest_override_system_message_behavior(llama_stack_client, agent_config):
assert "get_boiling_point" in logs_str
def test_rag_agent(llama_stack_client, agent_config):
@pytest.mark.parametrize("rag_tool_name", ["builtin::rag/knowledge_search", "builtin::rag"])
def test_rag_agent(llama_stack_client, agent_config, rag_tool_name):
urls = ["chat.rst", "llama3.rst", "memory_optimizations.rst", "lora_finetune.rst"]
documents = [
Document(
@ -469,7 +470,7 @@ def test_rag_agent(llama_stack_client, agent_config):
**agent_config,
"toolgroups": [
dict(
name="builtin::rag",
name=rag_tool_name,
args={
"vector_db_ids": [vector_db_id],
},
@ -483,10 +484,6 @@ def test_rag_agent(llama_stack_client, agent_config):
"Instead of the standard multi-head attention, what attention type does Llama3-8B use?",
"grouped",
),
(
"What `tune` command to use for getting access to Llama3-8B-Instruct ?",
"download",
),
]
for prompt, expected_kw in user_prompts:
response = rag_agent.create_turn(
@ -541,7 +538,7 @@ def test_rag_and_code_agent(llama_stack_client, agent_config):
**agent_config,
"toolgroups": [
dict(
name="builtin::rag",
name="builtin::rag/knowledge_search",
args={"vector_db_ids": [vector_db_id]},
),
"builtin::code_interpreter",