use llama_stack.log

This commit is contained in:
Ashwin Bharambe 2025-09-27 11:15:13 -07:00
parent 371de50875
commit 0098d24762

View file

@ -156,7 +156,6 @@ def make_mcp_server(required_auth_token: str | None = None, tools: dict[str, Cal
:param required_auth_token: Optional auth token required for access
:param tools: Dictionary of tool_name -> tool_function. If None, uses default tools.
"""
import logging
import threading
import time
@ -168,6 +167,8 @@ def make_mcp_server(required_auth_token: str | None = None, tools: dict[str, Cal
from starlette.responses import Response
from starlette.routing import Mount, Route
from llama_stack.log import get_logger
server = FastMCP("FastMCP Test Server", log_level="WARNING")
tools = tools or default_tools()
@ -212,7 +213,7 @@ def make_mcp_server(required_auth_token: str | None = None, tools: dict[str, Cal
return sock.getsockname()[1]
port = get_open_port()
logger = logging.getLogger(__name__)
logger = get_logger(__name__, category="tests::mcp")
# make uvicorn logs be less verbose
config = uvicorn.Config(app, host="0.0.0.0", port=port, log_level="warning")