llama-stack-mirror/tests/integration/tool_runtime/test_mcp.py
Omar Abdelwahab 84baa5c406 feat: unify MCP authentication across Responses and Tool Runtime APIs
- Add authorization parameter to Tool Runtime API signatures (list_runtime_tools, invoke_tool)
- Update MCP provider implementation to use authorization from request body instead of provider-data
- Deprecate mcp_authorization and mcp_headers from provider-data (MCPProviderDataValidator now empty)
- Update all Tool Runtime tests to pass authorization as request body parameter
- Responses API already uses request body authorization (no changes needed)

This provides a single, consistent way to pass MCP authentication tokens across both APIs, addressing reviewer feedback about avoiding multiple configuration paths.
2025-11-12 14:41:00 -08:00

118 lines
3.9 KiB
Python

# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
import pytest
from llama_stack_client.lib.agents.agent import Agent
from llama_stack_client.lib.agents.turn_events import StepCompleted, StepProgress, ToolCallIssuedDelta
from llama_stack.core.library_client import LlamaStackAsLibraryClient
AUTH_TOKEN = "test-token"
from tests.common.mcp import MCP_TOOLGROUP_ID, make_mcp_server
@pytest.fixture(scope="function")
def mcp_server():
with make_mcp_server(required_auth_token=AUTH_TOKEN) as mcp_server_info:
yield mcp_server_info
def test_mcp_invocation(llama_stack_client, text_model_id, mcp_server):
if not isinstance(llama_stack_client, LlamaStackAsLibraryClient):
pytest.skip("The local MCP server only reliably reachable from library client.")
test_toolgroup_id = MCP_TOOLGROUP_ID
uri = mcp_server["server_url"]
# registering should not raise an error anymore even if you don't specify the auth token
try:
llama_stack_client.toolgroups.unregister(toolgroup_id=test_toolgroup_id)
except Exception:
pass
llama_stack_client.toolgroups.register(
toolgroup_id=test_toolgroup_id,
provider_id="model-context-protocol",
mcp_endpoint=dict(uri=uri),
)
# Authorization now passed as request body parameter (not provider-data)
with pytest.raises(Exception, match="Unauthorized"):
llama_stack_client.tools.list(toolgroup_id=test_toolgroup_id)
tools_list = llama_stack_client.tools.list(
toolgroup_id=test_toolgroup_id,
authorization=AUTH_TOKEN, # Pass authorization as parameter
)
assert len(tools_list) == 2
assert {t.name for t in tools_list} == {"greet_everyone", "get_boiling_point"}
response = llama_stack_client.tool_runtime.invoke_tool(
tool_name="greet_everyone",
kwargs=dict(url="https://www.google.com"),
authorization=AUTH_TOKEN, # Pass authorization as parameter
)
content = response.content
assert len(content) == 1
assert content[0].type == "text"
assert content[0].text == "Hello, world!"
print(f"Using model: {text_model_id}")
tool_defs = [
{
"type": "mcp",
"server_url": uri,
"server_label": test_toolgroup_id,
"require_approval": "never",
"allowed_tools": [tool.name for tool in tools_list],
"authorization": AUTH_TOKEN,
}
]
agent = Agent(
client=llama_stack_client,
model=text_model_id,
instructions="You are a helpful assistant.",
tools=tool_defs,
)
session_id = agent.create_session("test-session")
chunks = list(
agent.create_turn(
session_id=session_id,
messages=[
{
"type": "message",
"role": "user",
"content": [
{
"type": "input_text",
"text": "Say hi to the world. Use tools to do so.",
}
],
}
],
stream=True,
)
)
events = [chunk.event for chunk in chunks]
final_response = next((chunk.response for chunk in reversed(chunks) if chunk.response), None)
assert final_response is not None
issued_calls = [
event for event in events if isinstance(event, StepProgress) and isinstance(event.delta, ToolCallIssuedDelta)
]
assert issued_calls
assert issued_calls[-1].delta.tool_name == "greet_everyone"
tool_events = [
event for event in events if isinstance(event, StepCompleted) and event.step_type == "tool_execution"
]
assert tool_events
assert tool_events[-1].result.tool_calls[0].tool_name == "greet_everyone"
assert "hello" in final_response.output_text.lower()