mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-08 19:10:56 +00:00
fix(ci): enable responses tests in CI; suppress expected MCP auth error logs (#3889)
Let us enable responses suite in CI now. Also a minor fix: MCP tool tests intentionally trigger authentication failures to verify error handling, but the resulting error logs clutter test output.
This commit is contained in:
parent
7b90e0e9c8
commit
7918188f1e
2 changed files with 14 additions and 9 deletions
2
.github/workflows/integration-tests.yml
vendored
2
.github/workflows/integration-tests.yml
vendored
|
|
@ -61,7 +61,7 @@ jobs:
|
||||||
&& fromJSON('[{"setup": "vllm", "suite": "base"}]')
|
&& fromJSON('[{"setup": "vllm", "suite": "base"}]')
|
||||||
|| github.event.inputs.test-setup == 'ollama-vision'
|
|| github.event.inputs.test-setup == 'ollama-vision'
|
||||||
&& fromJSON('[{"setup": "ollama-vision", "suite": "vision"}]')
|
&& fromJSON('[{"setup": "ollama-vision", "suite": "vision"}]')
|
||||||
|| fromJSON('[{"setup": "ollama", "suite": "base"}, {"setup": "ollama-vision", "suite": "vision"}]')
|
|| fromJSON('[{"setup": "ollama", "suite": "base"}, {"setup": "ollama-vision", "suite": "vision"}, {"setup": "gpt", "suite": "responses"}]')
|
||||||
}}
|
}}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
# the root directory of this source tree.
|
# the root directory of this source tree.
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
import logging # allow-direct-logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
|
|
@ -198,7 +199,7 @@ def test_response_sequential_file_search(
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("case", mcp_tool_test_cases)
|
@pytest.mark.parametrize("case", mcp_tool_test_cases)
|
||||||
def test_response_non_streaming_mcp_tool(compat_client, text_model_id, case):
|
def test_response_non_streaming_mcp_tool(compat_client, text_model_id, case, caplog):
|
||||||
if not isinstance(compat_client, LlamaStackAsLibraryClient):
|
if not isinstance(compat_client, LlamaStackAsLibraryClient):
|
||||||
pytest.skip("in-process MCP server is only supported in library client")
|
pytest.skip("in-process MCP server is only supported in library client")
|
||||||
|
|
||||||
|
|
@ -245,13 +246,17 @@ def test_response_non_streaming_mcp_tool(compat_client, text_model_id, case):
|
||||||
if isinstance(compat_client, LlamaStackAsLibraryClient)
|
if isinstance(compat_client, LlamaStackAsLibraryClient)
|
||||||
else (httpx.HTTPStatusError, openai.AuthenticationError)
|
else (httpx.HTTPStatusError, openai.AuthenticationError)
|
||||||
)
|
)
|
||||||
with pytest.raises(exc_type):
|
# Suppress expected auth error logs only for the failing auth attempt
|
||||||
compat_client.responses.create(
|
with caplog.at_level(
|
||||||
model=text_model_id,
|
logging.CRITICAL, logger="llama_stack.providers.inline.agents.meta_reference.responses.streaming"
|
||||||
input=case.input,
|
):
|
||||||
tools=tools,
|
with pytest.raises(exc_type):
|
||||||
stream=False,
|
compat_client.responses.create(
|
||||||
)
|
model=text_model_id,
|
||||||
|
input=case.input,
|
||||||
|
tools=tools,
|
||||||
|
stream=False,
|
||||||
|
)
|
||||||
|
|
||||||
for tool in tools:
|
for tool in tools:
|
||||||
if tool["type"] == "mcp":
|
if tool["type"] == "mcp":
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue