mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-23 22:13:56 +00:00
record tool_runtime tests
This commit is contained in:
parent
579ff826f7
commit
136451feee
8 changed files with 782 additions and 9 deletions
|
|
@ -24,7 +24,7 @@ def mcp_server():
|
|||
yield mcp_server_info
|
||||
|
||||
|
||||
def test_mcp_invocation(llama_stack_client, mcp_server):
|
||||
def test_mcp_invocation(llama_stack_client, text_model_id, mcp_server):
|
||||
if not isinstance(llama_stack_client, LlamaStackAsLibraryClient):
|
||||
pytest.skip("The local MCP server only reliably reachable from library client.")
|
||||
|
||||
|
|
@ -69,14 +69,10 @@ def test_mcp_invocation(llama_stack_client, mcp_server):
|
|||
assert content[0].type == "text"
|
||||
assert content[0].text == "Hello, world!"
|
||||
|
||||
models = [
|
||||
m for m in llama_stack_client.models.list() if m.model_type == ModelType.llm and "guard" not in m.identifier
|
||||
]
|
||||
model_id = models[0].identifier
|
||||
print(f"Using model: {model_id}")
|
||||
print(f"Using model: {text_model_id}")
|
||||
agent = Agent(
|
||||
client=llama_stack_client,
|
||||
model=model_id,
|
||||
model=text_model_id,
|
||||
instructions="You are a helpful assistant.",
|
||||
tools=[test_toolgroup_id],
|
||||
)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue