From 76d45bb5b162e423b7780f669272fde02f520740 Mon Sep 17 00:00:00 2001 From: Matthew Farrellee Date: Mon, 18 Aug 2025 15:14:05 -0400 Subject: [PATCH] chore(files tests): update files integration tests and fix inline::localfs - update files=inline::localfs to raise ResourceNotFoundError instead of ValueError - only skip tests when no files provider is available - directly use openai_client and llama_stack_client where appropriate - check for correct behavior of non-existent file - xfail the isolation test, no implementation supports it test plan - ``` $ uv run ./scripts/integration-tests.sh --stack-config server:ci-tests --provider ollama --test-subdirs files ... tests/integration/files/test_files.py::test_openai_client_basic_operations PASSED [ 25%] tests/integration/files/test_files.py::test_files_authentication_isolation XFAIL [ 50%] tests/integration/files/test_files.py::test_files_authentication_shared_attributes PASSED [ 75%] tests/integration/files/test_files.py::test_files_authentication_anonymous_access PASSED [100%] ==================================== 3 passed, 1 xfailed in 1.03s ===================================== ``` previously - ``` $ uv run llama stack build --image-type venv --providers files=inline::localfs --run & ... $ ./scripts/integration-tests.sh --stack-config http://localhost:8321 --provider ollama --test-subdirs files ... tests/integration/files/test_files.py::test_openai_client_basic_operations[openai_client-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] PASSED [ 12%] tests/integration/files/test_files.py::test_files_authentication_isolation[openai_client-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [ 25%] tests/integration/files/test_files.py::test_files_authentication_shared_attributes[openai_client-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [ 37%] tests/integration/files/test_files.py::test_files_authentication_anonymous_access[openai_client-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [ 50%] tests/integration/files/test_files.py::test_openai_client_basic_operations[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] PASSED [ 62%] tests/integration/files/test_files.py::test_files_authentication_isolation[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [ 75%] tests/integration/files/test_files.py::test_files_authentication_shared_attributes[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [ 87%] tests/integration/files/test_files.py::test_files_authentication_anonymous_access[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [100%] ========================================================= 2 passed, 6 skipped in 1.31s ========================================================== ``` --- .../providers/inline/files/localfs/files.py | 9 +- tests/integration/files/test_files.py | 87 +++++++++---------- tests/integration/fixtures/common.py | 4 +- 3 files changed, 50 insertions(+), 50 deletions(-) diff --git a/llama_stack/providers/inline/files/localfs/files.py b/llama_stack/providers/inline/files/localfs/files.py index 1e9dca3b5..a4393f6d3 100644 --- a/llama_stack/providers/inline/files/localfs/files.py +++ b/llama_stack/providers/inline/files/localfs/files.py @@ -11,6 +11,7 @@ from typing import Annotated from fastapi import File, Form, Response, UploadFile +from llama_stack.apis.common.errors import ResourceNotFoundError from llama_stack.apis.common.responses import Order from llama_stack.apis.files import ( Files, @@ -162,7 +163,7 @@ class LocalfsFilesImpl(Files): row = await self.sql_store.fetch_one("openai_files", policy=self.policy, where={"id": file_id}) if not row: - raise ValueError(f"File with id {file_id} not found") + raise ResourceNotFoundError(file_id, "files", "files.list()") return OpenAIFileObject( id=row["id"], @@ -180,7 +181,7 @@ class LocalfsFilesImpl(Files): row = await self.sql_store.fetch_one("openai_files", policy=self.policy, where={"id": file_id}) if not row: - raise ValueError(f"File with id {file_id} not found") + raise ResourceNotFoundError(file_id, "files", "files.list()") # Delete physical file file_path = Path(row["file_path"]) @@ -203,12 +204,12 @@ class LocalfsFilesImpl(Files): # Get file metadata row = await self.sql_store.fetch_one("openai_files", policy=self.policy, where={"id": file_id}) if not row: - raise ValueError(f"File with id {file_id} not found") + raise ResourceNotFoundError(file_id, "files", "files.list()") # Read file content file_path = Path(row["file_path"]) if not file_path.exists(): - raise ValueError(f"File content not found on disk: {file_path}") + raise ResourceNotFoundError(file_id, "files content", "files.list()") with open(file_path, "rb") as f: content = f.read() diff --git a/tests/integration/files/test_files.py b/tests/integration/files/test_files.py index b17c7db83..c47e47f0c 100644 --- a/tests/integration/files/test_files.py +++ b/tests/integration/files/test_files.py @@ -8,17 +8,22 @@ from io import BytesIO from unittest.mock import patch import pytest -from openai import OpenAI from llama_stack.core.datatypes import User -from llama_stack.core.library_client import LlamaStackAsLibraryClient -def test_openai_client_basic_operations(compat_client, client_with_models): +# a fixture to skip all these tests if a files provider is not available +@pytest.fixture(autouse=True) +def skip_if_no_files_provider(llama_stack_client): + if not [provider for provider in llama_stack_client.providers.list() if provider.api == "files"]: + pytest.skip("No files providers found") + + +def test_openai_client_basic_operations(openai_client): """Test basic file operations through OpenAI client.""" - if isinstance(client_with_models, LlamaStackAsLibraryClient) and isinstance(compat_client, OpenAI): - pytest.skip("OpenAI files are not supported when testing with LlamaStackAsLibraryClient") - client = compat_client + from openai import NotFoundError + + client = openai_client test_content = b"files test content" @@ -31,6 +36,7 @@ def test_openai_client_basic_operations(compat_client, client_with_models): # Verify basic response structure assert uploaded_file.id.startswith("file-") assert hasattr(uploaded_file, "filename") + assert uploaded_file.filename == "openai_test.txt" # List files files_list = client.files.list() @@ -43,37 +49,41 @@ def test_openai_client_basic_operations(compat_client, client_with_models): # Retrieve file content - OpenAI client returns httpx Response object content_response = client.files.content(uploaded_file.id) - # The response is an httpx Response object with .content attribute containing bytes - if isinstance(content_response, str): - # Llama Stack Client returns a str - # TODO: fix Llama Stack Client - content = bytes(content_response, "utf-8") - else: - content = content_response.content - assert content == test_content + assert content_response.content == test_content # Delete file delete_response = client.files.delete(uploaded_file.id) assert delete_response.deleted is True - except Exception as e: - # Cleanup in case of failure - try: + # Retrieve file should fail + with pytest.raises(NotFoundError, match="not found"): + client.files.retrieve(uploaded_file.id) + + # File should not be found in listing + files_list = client.files.list() + file_ids = [f.id for f in files_list.data] + assert uploaded_file.id not in file_ids + + # Double delete should fail + with pytest.raises(NotFoundError, match="not found"): client.files.delete(uploaded_file.id) - except Exception: - pass - raise e + + finally: + # Cleanup in case of failure + if "uploaded_file" in locals(): # maybe create fails + try: + client.files.delete(uploaded_file.id) + except NotFoundError: + pass # ignore 404 +@pytest.mark.xfail(message="Not all providers support user isolation") @patch("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user") -def test_files_authentication_isolation(mock_get_authenticated_user, compat_client, client_with_models): +def test_files_authentication_isolation(mock_get_authenticated_user, llama_stack_client): """Test that users can only access their own files.""" - if isinstance(client_with_models, LlamaStackAsLibraryClient) and isinstance(compat_client, OpenAI): - pytest.skip("OpenAI files are not supported when testing with LlamaStackAsLibraryClient") - if not isinstance(client_with_models, LlamaStackAsLibraryClient): - pytest.skip("Authentication tests require LlamaStackAsLibraryClient (library mode)") + from llama_stack_client import NotFoundError - client = compat_client + client = llama_stack_client # Create two test users user1 = User("user1", {"roles": ["user"], "teams": ["team-a"]}) @@ -117,7 +127,7 @@ def test_files_authentication_isolation(mock_get_authenticated_user, compat_clie # User 1 cannot retrieve user2's file mock_get_authenticated_user.return_value = user1 - with pytest.raises(ValueError, match="not found"): + with pytest.raises(NotFoundError, match="not found"): client.files.retrieve(user2_file.id) # User 1 can access their file content @@ -131,7 +141,7 @@ def test_files_authentication_isolation(mock_get_authenticated_user, compat_clie # User 1 cannot access user2's file content mock_get_authenticated_user.return_value = user1 - with pytest.raises(ValueError, match="not found"): + with pytest.raises(NotFoundError, match="not found"): client.files.content(user2_file.id) # User 1 can delete their own file @@ -141,7 +151,7 @@ def test_files_authentication_isolation(mock_get_authenticated_user, compat_clie # User 1 cannot delete user2's file mock_get_authenticated_user.return_value = user1 - with pytest.raises(ValueError, match="not found"): + with pytest.raises(NotFoundError, match="not found"): client.files.delete(user2_file.id) # User 2 can still access their file after user1's file is deleted @@ -169,14 +179,9 @@ def test_files_authentication_isolation(mock_get_authenticated_user, compat_clie @patch("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user") -def test_files_authentication_shared_attributes(mock_get_authenticated_user, compat_client, client_with_models): +def test_files_authentication_shared_attributes(mock_get_authenticated_user, llama_stack_client): """Test access control with users having identical attributes.""" - if isinstance(client_with_models, LlamaStackAsLibraryClient) and isinstance(compat_client, OpenAI): - pytest.skip("OpenAI files are not supported when testing with LlamaStackAsLibraryClient") - if not isinstance(client_with_models, LlamaStackAsLibraryClient): - pytest.skip("Authentication tests require LlamaStackAsLibraryClient (library mode)") - - client = compat_client + client = llama_stack_client # Create users with identical attributes (required for default policy) user_a = User("user-a", {"roles": ["user"], "teams": ["shared-team"]}) @@ -231,14 +236,8 @@ def test_files_authentication_shared_attributes(mock_get_authenticated_user, com @patch("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user") -def test_files_authentication_anonymous_access(mock_get_authenticated_user, compat_client, client_with_models): - """Test anonymous user behavior when no authentication is present.""" - if isinstance(client_with_models, LlamaStackAsLibraryClient) and isinstance(compat_client, OpenAI): - pytest.skip("OpenAI files are not supported when testing with LlamaStackAsLibraryClient") - if not isinstance(client_with_models, LlamaStackAsLibraryClient): - pytest.skip("Authentication tests require LlamaStackAsLibraryClient (library mode)") - - client = compat_client +def test_files_authentication_anonymous_access(mock_get_authenticated_user, llama_stack_client): + client = llama_stack_client # Simulate anonymous user (no authentication) mock_get_authenticated_user.return_value = None diff --git a/tests/integration/fixtures/common.py b/tests/integration/fixtures/common.py index 0b7132d71..6c38e8941 100644 --- a/tests/integration/fixtures/common.py +++ b/tests/integration/fixtures/common.py @@ -263,8 +263,8 @@ def instantiate_llama_stack_client(session): @pytest.fixture(scope="session") -def openai_client(client_with_models): - base_url = f"{client_with_models.base_url}/v1/openai/v1" +def openai_client(llama_stack_client): + base_url = f"{llama_stack_client.base_url}/v1/openai/v1" return OpenAI(base_url=base_url, api_key="fake")