mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-04 04:04:14 +00:00
chore(files tests): update files integration tests and fix inline::localfs (#3195)
- update files=inline::localfs to raise ResourceNotFoundError instead of ValueError - only skip tests when no files provider is available - directly use openai_client and llama_stack_client where appropriate - check for correct behavior of non-existent file - xfail the isolation test, no implementation supports it test plan - ``` $ uv run ./scripts/integration-tests.sh --stack-config server:ci-tests --provider ollama --test-subdirs files ... tests/integration/files/test_files.py::test_openai_client_basic_operations PASSED [ 25%] tests/integration/files/test_files.py::test_files_authentication_isolation XFAIL [ 50%] tests/integration/files/test_files.py::test_files_authentication_shared_attributes PASSED [ 75%] tests/integration/files/test_files.py::test_files_authentication_anonymous_access PASSED [100%] ==================================== 3 passed, 1 xfailed in 1.03s ===================================== ``` previously - ``` $ uv run llama stack build --image-type venv --providers files=inline::localfs --run & ... $ ./scripts/integration-tests.sh --stack-config http://localhost:8321 --provider ollama --test-subdirs files ... tests/integration/files/test_files.py::test_openai_client_basic_operations[openai_client-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] PASSED [ 12%] tests/integration/files/test_files.py::test_files_authentication_isolation[openai_client-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [ 25%] tests/integration/files/test_files.py::test_files_authentication_shared_attributes[openai_client-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [ 37%] tests/integration/files/test_files.py::test_files_authentication_anonymous_access[openai_client-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [ 50%] tests/integration/files/test_files.py::test_openai_client_basic_operations[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] PASSED [ 62%] tests/integration/files/test_files.py::test_files_authentication_isolation[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [ 75%] tests/integration/files/test_files.py::test_files_authentication_shared_attributes[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [ 87%] tests/integration/files/test_files.py::test_files_authentication_anonymous_access[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [100%] ========================================================= 2 passed, 6 skipped in 1.31s ========================================================== ```
This commit is contained in:
parent
55e9959f62
commit
c2c859a6b0
4 changed files with 92 additions and 87 deletions
|
@ -8,20 +8,27 @@ from io import BytesIO
|
|||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from openai import OpenAI
|
||||
|
||||
from llama_stack.core.datatypes import User
|
||||
from llama_stack.core.library_client import LlamaStackAsLibraryClient
|
||||
|
||||
|
||||
def test_openai_client_basic_operations(compat_client, client_with_models):
|
||||
# a fixture to skip all these tests if a files provider is not available
|
||||
@pytest.fixture(autouse=True)
|
||||
def skip_if_no_files_provider(llama_stack_client):
|
||||
if not [provider for provider in llama_stack_client.providers.list() if provider.api == "files"]:
|
||||
pytest.skip("No files providers found")
|
||||
|
||||
|
||||
def test_openai_client_basic_operations(openai_client):
|
||||
"""Test basic file operations through OpenAI client."""
|
||||
if isinstance(client_with_models, LlamaStackAsLibraryClient) and isinstance(compat_client, OpenAI):
|
||||
pytest.skip("OpenAI files are not supported when testing with LlamaStackAsLibraryClient")
|
||||
client = compat_client
|
||||
from openai import NotFoundError
|
||||
|
||||
client = openai_client
|
||||
|
||||
test_content = b"files test content"
|
||||
|
||||
uploaded_file = None
|
||||
|
||||
try:
|
||||
# Upload file using OpenAI client
|
||||
with BytesIO(test_content) as file_buffer:
|
||||
|
@ -31,6 +38,7 @@ def test_openai_client_basic_operations(compat_client, client_with_models):
|
|||
# Verify basic response structure
|
||||
assert uploaded_file.id.startswith("file-")
|
||||
assert hasattr(uploaded_file, "filename")
|
||||
assert uploaded_file.filename == "openai_test.txt"
|
||||
|
||||
# List files
|
||||
files_list = client.files.list()
|
||||
|
@ -43,37 +51,41 @@ def test_openai_client_basic_operations(compat_client, client_with_models):
|
|||
|
||||
# Retrieve file content - OpenAI client returns httpx Response object
|
||||
content_response = client.files.content(uploaded_file.id)
|
||||
# The response is an httpx Response object with .content attribute containing bytes
|
||||
if isinstance(content_response, str):
|
||||
# Llama Stack Client returns a str
|
||||
# TODO: fix Llama Stack Client
|
||||
content = bytes(content_response, "utf-8")
|
||||
else:
|
||||
content = content_response.content
|
||||
assert content == test_content
|
||||
assert content_response.content == test_content
|
||||
|
||||
# Delete file
|
||||
delete_response = client.files.delete(uploaded_file.id)
|
||||
assert delete_response.deleted is True
|
||||
|
||||
except Exception as e:
|
||||
# Cleanup in case of failure
|
||||
try:
|
||||
# Retrieve file should fail
|
||||
with pytest.raises(NotFoundError, match="not found"):
|
||||
client.files.retrieve(uploaded_file.id)
|
||||
|
||||
# File should not be found in listing
|
||||
files_list = client.files.list()
|
||||
file_ids = [f.id for f in files_list.data]
|
||||
assert uploaded_file.id not in file_ids
|
||||
|
||||
# Double delete should fail
|
||||
with pytest.raises(NotFoundError, match="not found"):
|
||||
client.files.delete(uploaded_file.id)
|
||||
except Exception:
|
||||
pass
|
||||
raise e
|
||||
|
||||
finally:
|
||||
# Cleanup in case of failure
|
||||
if uploaded_file is not None:
|
||||
try:
|
||||
client.files.delete(uploaded_file.id)
|
||||
except NotFoundError:
|
||||
pass # ignore 404
|
||||
|
||||
|
||||
@pytest.mark.xfail(message="User isolation broken for current providers, must be fixed.")
|
||||
@patch("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user")
|
||||
def test_files_authentication_isolation(mock_get_authenticated_user, compat_client, client_with_models):
|
||||
def test_files_authentication_isolation(mock_get_authenticated_user, llama_stack_client):
|
||||
"""Test that users can only access their own files."""
|
||||
if isinstance(client_with_models, LlamaStackAsLibraryClient) and isinstance(compat_client, OpenAI):
|
||||
pytest.skip("OpenAI files are not supported when testing with LlamaStackAsLibraryClient")
|
||||
if not isinstance(client_with_models, LlamaStackAsLibraryClient):
|
||||
pytest.skip("Authentication tests require LlamaStackAsLibraryClient (library mode)")
|
||||
from llama_stack_client import NotFoundError
|
||||
|
||||
client = compat_client
|
||||
client = llama_stack_client
|
||||
|
||||
# Create two test users
|
||||
user1 = User("user1", {"roles": ["user"], "teams": ["team-a"]})
|
||||
|
@ -117,7 +129,7 @@ def test_files_authentication_isolation(mock_get_authenticated_user, compat_clie
|
|||
|
||||
# User 1 cannot retrieve user2's file
|
||||
mock_get_authenticated_user.return_value = user1
|
||||
with pytest.raises(ValueError, match="not found"):
|
||||
with pytest.raises(NotFoundError, match="not found"):
|
||||
client.files.retrieve(user2_file.id)
|
||||
|
||||
# User 1 can access their file content
|
||||
|
@ -131,7 +143,7 @@ def test_files_authentication_isolation(mock_get_authenticated_user, compat_clie
|
|||
|
||||
# User 1 cannot access user2's file content
|
||||
mock_get_authenticated_user.return_value = user1
|
||||
with pytest.raises(ValueError, match="not found"):
|
||||
with pytest.raises(NotFoundError, match="not found"):
|
||||
client.files.content(user2_file.id)
|
||||
|
||||
# User 1 can delete their own file
|
||||
|
@ -141,7 +153,7 @@ def test_files_authentication_isolation(mock_get_authenticated_user, compat_clie
|
|||
|
||||
# User 1 cannot delete user2's file
|
||||
mock_get_authenticated_user.return_value = user1
|
||||
with pytest.raises(ValueError, match="not found"):
|
||||
with pytest.raises(NotFoundError, match="not found"):
|
||||
client.files.delete(user2_file.id)
|
||||
|
||||
# User 2 can still access their file after user1's file is deleted
|
||||
|
@ -169,14 +181,9 @@ def test_files_authentication_isolation(mock_get_authenticated_user, compat_clie
|
|||
|
||||
|
||||
@patch("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user")
|
||||
def test_files_authentication_shared_attributes(mock_get_authenticated_user, compat_client, client_with_models):
|
||||
def test_files_authentication_shared_attributes(mock_get_authenticated_user, llama_stack_client):
|
||||
"""Test access control with users having identical attributes."""
|
||||
if isinstance(client_with_models, LlamaStackAsLibraryClient) and isinstance(compat_client, OpenAI):
|
||||
pytest.skip("OpenAI files are not supported when testing with LlamaStackAsLibraryClient")
|
||||
if not isinstance(client_with_models, LlamaStackAsLibraryClient):
|
||||
pytest.skip("Authentication tests require LlamaStackAsLibraryClient (library mode)")
|
||||
|
||||
client = compat_client
|
||||
client = llama_stack_client
|
||||
|
||||
# Create users with identical attributes (required for default policy)
|
||||
user_a = User("user-a", {"roles": ["user"], "teams": ["shared-team"]})
|
||||
|
@ -231,14 +238,8 @@ def test_files_authentication_shared_attributes(mock_get_authenticated_user, com
|
|||
|
||||
|
||||
@patch("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user")
|
||||
def test_files_authentication_anonymous_access(mock_get_authenticated_user, compat_client, client_with_models):
|
||||
"""Test anonymous user behavior when no authentication is present."""
|
||||
if isinstance(client_with_models, LlamaStackAsLibraryClient) and isinstance(compat_client, OpenAI):
|
||||
pytest.skip("OpenAI files are not supported when testing with LlamaStackAsLibraryClient")
|
||||
if not isinstance(client_with_models, LlamaStackAsLibraryClient):
|
||||
pytest.skip("Authentication tests require LlamaStackAsLibraryClient (library mode)")
|
||||
|
||||
client = compat_client
|
||||
def test_files_authentication_anonymous_access(mock_get_authenticated_user, llama_stack_client):
|
||||
client = llama_stack_client
|
||||
|
||||
# Simulate anonymous user (no authentication)
|
||||
mock_get_authenticated_user.return_value = None
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue