move server requirement directly to openai_client

This commit is contained in:
Matthew Farrellee 2025-08-18 19:20:56 -04:00
parent a01596a5fe
commit 639e3f4570
2 changed files with 15 additions and 16 deletions

View file

@ -10,7 +10,6 @@ from unittest.mock import patch
import pytest
from llama_stack.core.datatypes import User
from llama_stack.core.library_client import LlamaStackAsLibraryClient
# a fixture to skip all these tests if a files provider is not available
@ -20,20 +19,7 @@ def skip_if_no_files_provider(llama_stack_client):
pytest.skip("No files providers found")
@pytest.fixture(scope="session")
def skip_if_no_server_running(llama_stack_client):
"""
Skip test if no server is running.
We use the llama_stack_client to tell if a server was started or not.
We use this with openai_client because it relies on a running server.
"""
if isinstance(llama_stack_client, LlamaStackAsLibraryClient):
pytest.skip("No server running")
def test_openai_client_basic_operations(openai_client, skip_if_no_server_running):
def test_openai_client_basic_operations(openai_client):
"""Test basic file operations through OpenAI client."""
from openai import NotFoundError

View file

@ -263,7 +263,20 @@ def instantiate_llama_stack_client(session):
@pytest.fixture(scope="session")
def openai_client(llama_stack_client):
def require_server(llama_stack_client):
"""
Skip test if no server is running.
We use the llama_stack_client to tell if a server was started or not.
We use this with openai_client because it relies on a running server.
"""
if isinstance(llama_stack_client, LlamaStackAsLibraryClient):
pytest.skip("No server running")
@pytest.fixture(scope="session")
def openai_client(llama_stack_client, require_server):
base_url = f"{llama_stack_client.base_url}/v1/openai/v1"
return OpenAI(base_url=base_url, api_key="fake")