From 639e3f45703c6bbc98ba67443f195294ba9701e5 Mon Sep 17 00:00:00 2001 From: Matthew Farrellee Date: Mon, 18 Aug 2025 19:20:56 -0400 Subject: [PATCH] move server requirement directly to openai_client --- tests/integration/files/test_files.py | 16 +--------------- tests/integration/fixtures/common.py | 15 ++++++++++++++- 2 files changed, 15 insertions(+), 16 deletions(-) diff --git a/tests/integration/files/test_files.py b/tests/integration/files/test_files.py index 545ed08cf..c47e47f0c 100644 --- a/tests/integration/files/test_files.py +++ b/tests/integration/files/test_files.py @@ -10,7 +10,6 @@ from unittest.mock import patch import pytest from llama_stack.core.datatypes import User -from llama_stack.core.library_client import LlamaStackAsLibraryClient # a fixture to skip all these tests if a files provider is not available @@ -20,20 +19,7 @@ def skip_if_no_files_provider(llama_stack_client): pytest.skip("No files providers found") -@pytest.fixture(scope="session") -def skip_if_no_server_running(llama_stack_client): - """ - Skip test if no server is running. - - We use the llama_stack_client to tell if a server was started or not. - - We use this with openai_client because it relies on a running server. - """ - if isinstance(llama_stack_client, LlamaStackAsLibraryClient): - pytest.skip("No server running") - - -def test_openai_client_basic_operations(openai_client, skip_if_no_server_running): +def test_openai_client_basic_operations(openai_client): """Test basic file operations through OpenAI client.""" from openai import NotFoundError diff --git a/tests/integration/fixtures/common.py b/tests/integration/fixtures/common.py index 6c38e8941..9cf56f6f5 100644 --- a/tests/integration/fixtures/common.py +++ b/tests/integration/fixtures/common.py @@ -263,7 +263,20 @@ def instantiate_llama_stack_client(session): @pytest.fixture(scope="session") -def openai_client(llama_stack_client): +def require_server(llama_stack_client): + """ + Skip test if no server is running. + + We use the llama_stack_client to tell if a server was started or not. + + We use this with openai_client because it relies on a running server. + """ + if isinstance(llama_stack_client, LlamaStackAsLibraryClient): + pytest.skip("No server running") + + +@pytest.fixture(scope="session") +def openai_client(llama_stack_client, require_server): base_url = f"{llama_stack_client.base_url}/v1/openai/v1" return OpenAI(base_url=base_url, api_key="fake")