diff --git a/llama_stack/providers/inline/agents/meta_reference/openai_responses.py b/llama_stack/providers/inline/agents/meta_reference/openai_responses.py index 240e6a213..7eb2b3897 100644 --- a/llama_stack/providers/inline/agents/meta_reference/openai_responses.py +++ b/llama_stack/providers/inline/agents/meta_reference/openai_responses.py @@ -74,7 +74,6 @@ from llama_stack.log import get_logger from llama_stack.models.llama.datatypes import ToolDefinition, ToolParamDefinition from llama_stack.providers.utils.inference.openai_compat import convert_tooldef_to_openai_tool from llama_stack.providers.utils.responses.responses_store import ResponsesStore -from llama_stack.providers.utils.tools.mcp import invoke_mcp_tool, list_mcp_tools logger = get_logger(name=__name__, category="openai_responses") @@ -627,6 +626,8 @@ class OpenAIResponsesImpl: raise ValueError(f"Tool {tool_name} not found") chat_tools.append(make_openai_tool(tool_name, tool)) elif input_tool.type == "mcp": + from llama_stack.providers.utils.tools.mcp import list_mcp_tools + always_allowed = None never_allowed = None if input_tool.allowed_tools: @@ -760,7 +761,9 @@ class OpenAIResponsesImpl: error_exc = None result = None try: - if function.name in ctx.mcp_tool_to_server: + if ctx.mcp_tool_to_server and function.name in ctx.mcp_tool_to_server: + from llama_stack.providers.utils.tools.mcp import invoke_mcp_tool + mcp_tool = ctx.mcp_tool_to_server[function.name] result = await invoke_mcp_tool( endpoint=mcp_tool.server_url, diff --git a/llama_stack/providers/registry/agents.py b/llama_stack/providers/registry/agents.py index 6f8c05a67..57110d129 100644 --- a/llama_stack/providers/registry/agents.py +++ b/llama_stack/providers/registry/agents.py @@ -23,7 +23,7 @@ def available_providers() -> list[ProviderSpec]: "pillow", "pandas", "scikit-learn", - "mcp", + "mcp>=1.8.1", ] + kvstore_dependencies(), # TODO make this dynamic based on the kvstore config module="llama_stack.providers.inline.agents.meta_reference", diff --git a/llama_stack/providers/registry/tool_runtime.py b/llama_stack/providers/registry/tool_runtime.py index 0dc880408..661851443 100644 --- a/llama_stack/providers/registry/tool_runtime.py +++ b/llama_stack/providers/registry/tool_runtime.py @@ -85,7 +85,7 @@ def available_providers() -> list[ProviderSpec]: adapter_type="model-context-protocol", module="llama_stack.providers.remote.tool_runtime.model_context_protocol", config_class="llama_stack.providers.remote.tool_runtime.model_context_protocol.config.MCPProviderConfig", - pip_packages=["mcp"], + pip_packages=["mcp>=1.8.1"], provider_data_validator="llama_stack.providers.remote.tool_runtime.model_context_protocol.config.MCPProviderDataValidator", description="Model Context Protocol (MCP) tool for standardized tool calling and context management.", ),