mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-06 22:10:41 +00:00
fix: only load mcp when enabled in tool_group (#2621)
# What does this PR do? The agent code is currently importing MCP modules even when MCP isn’t enabled. Do we consider this worth fixing, or are we treating MCP as a first-class dependency? I believe we should treat it as such. If everyone agrees, let’s go ahead and close this. Note: The current setup breaks if someone builds a distro without including MCP in tool_group but still serves the agent API. Also, we should bump the MCP version to support streamable responses, as SSE is being deprecated. Signed-off-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
parent
c4349f532b
commit
df6ce8befa
3 changed files with 7 additions and 4 deletions
|
@ -74,7 +74,6 @@ from llama_stack.log import get_logger
|
||||||
from llama_stack.models.llama.datatypes import ToolDefinition, ToolParamDefinition
|
from llama_stack.models.llama.datatypes import ToolDefinition, ToolParamDefinition
|
||||||
from llama_stack.providers.utils.inference.openai_compat import convert_tooldef_to_openai_tool
|
from llama_stack.providers.utils.inference.openai_compat import convert_tooldef_to_openai_tool
|
||||||
from llama_stack.providers.utils.responses.responses_store import ResponsesStore
|
from llama_stack.providers.utils.responses.responses_store import ResponsesStore
|
||||||
from llama_stack.providers.utils.tools.mcp import invoke_mcp_tool, list_mcp_tools
|
|
||||||
|
|
||||||
logger = get_logger(name=__name__, category="openai_responses")
|
logger = get_logger(name=__name__, category="openai_responses")
|
||||||
|
|
||||||
|
@ -627,6 +626,8 @@ class OpenAIResponsesImpl:
|
||||||
raise ValueError(f"Tool {tool_name} not found")
|
raise ValueError(f"Tool {tool_name} not found")
|
||||||
chat_tools.append(make_openai_tool(tool_name, tool))
|
chat_tools.append(make_openai_tool(tool_name, tool))
|
||||||
elif input_tool.type == "mcp":
|
elif input_tool.type == "mcp":
|
||||||
|
from llama_stack.providers.utils.tools.mcp import list_mcp_tools
|
||||||
|
|
||||||
always_allowed = None
|
always_allowed = None
|
||||||
never_allowed = None
|
never_allowed = None
|
||||||
if input_tool.allowed_tools:
|
if input_tool.allowed_tools:
|
||||||
|
@ -760,7 +761,9 @@ class OpenAIResponsesImpl:
|
||||||
error_exc = None
|
error_exc = None
|
||||||
result = None
|
result = None
|
||||||
try:
|
try:
|
||||||
if function.name in ctx.mcp_tool_to_server:
|
if ctx.mcp_tool_to_server and function.name in ctx.mcp_tool_to_server:
|
||||||
|
from llama_stack.providers.utils.tools.mcp import invoke_mcp_tool
|
||||||
|
|
||||||
mcp_tool = ctx.mcp_tool_to_server[function.name]
|
mcp_tool = ctx.mcp_tool_to_server[function.name]
|
||||||
result = await invoke_mcp_tool(
|
result = await invoke_mcp_tool(
|
||||||
endpoint=mcp_tool.server_url,
|
endpoint=mcp_tool.server_url,
|
||||||
|
|
|
@ -23,7 +23,7 @@ def available_providers() -> list[ProviderSpec]:
|
||||||
"pillow",
|
"pillow",
|
||||||
"pandas",
|
"pandas",
|
||||||
"scikit-learn",
|
"scikit-learn",
|
||||||
"mcp",
|
"mcp>=1.8.1",
|
||||||
]
|
]
|
||||||
+ kvstore_dependencies(), # TODO make this dynamic based on the kvstore config
|
+ kvstore_dependencies(), # TODO make this dynamic based on the kvstore config
|
||||||
module="llama_stack.providers.inline.agents.meta_reference",
|
module="llama_stack.providers.inline.agents.meta_reference",
|
||||||
|
|
|
@ -85,7 +85,7 @@ def available_providers() -> list[ProviderSpec]:
|
||||||
adapter_type="model-context-protocol",
|
adapter_type="model-context-protocol",
|
||||||
module="llama_stack.providers.remote.tool_runtime.model_context_protocol",
|
module="llama_stack.providers.remote.tool_runtime.model_context_protocol",
|
||||||
config_class="llama_stack.providers.remote.tool_runtime.model_context_protocol.config.MCPProviderConfig",
|
config_class="llama_stack.providers.remote.tool_runtime.model_context_protocol.config.MCPProviderConfig",
|
||||||
pip_packages=["mcp"],
|
pip_packages=["mcp>=1.8.1"],
|
||||||
provider_data_validator="llama_stack.providers.remote.tool_runtime.model_context_protocol.config.MCPProviderDataValidator",
|
provider_data_validator="llama_stack.providers.remote.tool_runtime.model_context_protocol.config.MCPProviderDataValidator",
|
||||||
description="Model Context Protocol (MCP) tool for standardized tool calling and context management.",
|
description="Model Context Protocol (MCP) tool for standardized tool calling and context management.",
|
||||||
),
|
),
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue