From bf8d76f19b7ab622e8a2a0928a08831636ed5c16 Mon Sep 17 00:00:00 2001 From: Ashwin Bharambe Date: Tue, 27 May 2025 12:58:44 -0700 Subject: [PATCH] fixes --- .../inline/agents/meta_reference/openai_responses.py | 2 -- tests/verifications/openai_api/conftest.py | 10 +++++----- tests/verifications/openai_api/test_responses.py | 3 ++- 3 files changed, 7 insertions(+), 8 deletions(-) diff --git a/llama_stack/providers/inline/agents/meta_reference/openai_responses.py b/llama_stack/providers/inline/agents/meta_reference/openai_responses.py index 5d5f9ef94..3a56d41ef 100644 --- a/llama_stack/providers/inline/agents/meta_reference/openai_responses.py +++ b/llama_stack/providers/inline/agents/meta_reference/openai_responses.py @@ -359,8 +359,6 @@ class OpenAIResponsesImpl: temperature=temperature, ) - print(f"chat_tools: {chat_tools}") - print(f"messages: {messages}") inference_result = await self.inference_api.openai_chat_completion( model=model, messages=messages, diff --git a/tests/verifications/openai_api/conftest.py b/tests/verifications/openai_api/conftest.py index b55a5d11a..9d773b8de 100644 --- a/tests/verifications/openai_api/conftest.py +++ b/tests/verifications/openai_api/conftest.py @@ -10,17 +10,17 @@ from tests.verifications.openai_api.fixtures.fixtures import _load_all_verificat def pytest_generate_tests(metafunc): """Dynamically parametrize tests based on the selected provider and config.""" if "model" in metafunc.fixturenames: + model = metafunc.config.getoption("model") + if model: + metafunc.parametrize("model", [model]) + return + provider = metafunc.config.getoption("provider") if not provider: print("Warning: --provider not specified. Skipping model parametrization.") metafunc.parametrize("model", []) return - model = metafunc.config.getoption("model") - if model: - metafunc.parametrize("model", [model]) - return - try: config_data = _load_all_verification_configs() except (OSError, FileNotFoundError) as e: diff --git a/tests/verifications/openai_api/test_responses.py b/tests/verifications/openai_api/test_responses.py index 356e456e4..2ce0a3e9c 100644 --- a/tests/verifications/openai_api/test_responses.py +++ b/tests/verifications/openai_api/test_responses.py @@ -7,6 +7,7 @@ import json import httpx +import openai import pytest from llama_stack import LlamaStackAsLibraryClient @@ -306,7 +307,7 @@ def test_response_non_streaming_mcp_tool(request, openai_client, model, provider exc_type = ( AuthenticationRequiredError if isinstance(openai_client, LlamaStackAsLibraryClient) - else httpx.HTTPStatusError + else (httpx.HTTPStatusError, openai.AuthenticationError) ) with pytest.raises(exc_type): openai_client.responses.create(