mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-17 21:59:49 +00:00
chore(files tests): update files integration tests and fix inline::localfs
- update files=inline::localfs to raise ResourceNotFoundError instead of ValueError - only skip tests when no files provider is available - directly use openai_client and llama_stack_client where appropriate - check for correct behavior of non-existent file - xfail the isolation test, no implementation supports it test plan - ``` $ uv run ./scripts/integration-tests.sh --stack-config server:ci-tests --provider ollama --test-subdirs files ... tests/integration/files/test_files.py::test_openai_client_basic_operations PASSED [ 25%] tests/integration/files/test_files.py::test_files_authentication_isolation XFAIL [ 50%] tests/integration/files/test_files.py::test_files_authentication_shared_attributes PASSED [ 75%] tests/integration/files/test_files.py::test_files_authentication_anonymous_access PASSED [100%] ==================================== 3 passed, 1 xfailed in 1.03s ===================================== ``` previously - ``` $ uv run llama stack build --image-type venv --providers files=inline::localfs --run & ... $ ./scripts/integration-tests.sh --stack-config http://localhost:8321 --provider ollama --test-subdirs files ... tests/integration/files/test_files.py::test_openai_client_basic_operations[openai_client-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] PASSED [ 12%] tests/integration/files/test_files.py::test_files_authentication_isolation[openai_client-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [ 25%] tests/integration/files/test_files.py::test_files_authentication_shared_attributes[openai_client-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [ 37%] tests/integration/files/test_files.py::test_files_authentication_anonymous_access[openai_client-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [ 50%] tests/integration/files/test_files.py::test_openai_client_basic_operations[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] PASSED [ 62%] tests/integration/files/test_files.py::test_files_authentication_isolation[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [ 75%] tests/integration/files/test_files.py::test_files_authentication_shared_attributes[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [ 87%] tests/integration/files/test_files.py::test_files_authentication_anonymous_access[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [100%] ========================================================= 2 passed, 6 skipped in 1.31s ========================================================== ```
This commit is contained in:
parent
5e7c2250be
commit
76d45bb5b1
3 changed files with 50 additions and 50 deletions
|
|
@ -11,6 +11,7 @@ from typing import Annotated
|
||||||
|
|
||||||
from fastapi import File, Form, Response, UploadFile
|
from fastapi import File, Form, Response, UploadFile
|
||||||
|
|
||||||
|
from llama_stack.apis.common.errors import ResourceNotFoundError
|
||||||
from llama_stack.apis.common.responses import Order
|
from llama_stack.apis.common.responses import Order
|
||||||
from llama_stack.apis.files import (
|
from llama_stack.apis.files import (
|
||||||
Files,
|
Files,
|
||||||
|
|
@ -162,7 +163,7 @@ class LocalfsFilesImpl(Files):
|
||||||
|
|
||||||
row = await self.sql_store.fetch_one("openai_files", policy=self.policy, where={"id": file_id})
|
row = await self.sql_store.fetch_one("openai_files", policy=self.policy, where={"id": file_id})
|
||||||
if not row:
|
if not row:
|
||||||
raise ValueError(f"File with id {file_id} not found")
|
raise ResourceNotFoundError(file_id, "files", "files.list()")
|
||||||
|
|
||||||
return OpenAIFileObject(
|
return OpenAIFileObject(
|
||||||
id=row["id"],
|
id=row["id"],
|
||||||
|
|
@ -180,7 +181,7 @@ class LocalfsFilesImpl(Files):
|
||||||
|
|
||||||
row = await self.sql_store.fetch_one("openai_files", policy=self.policy, where={"id": file_id})
|
row = await self.sql_store.fetch_one("openai_files", policy=self.policy, where={"id": file_id})
|
||||||
if not row:
|
if not row:
|
||||||
raise ValueError(f"File with id {file_id} not found")
|
raise ResourceNotFoundError(file_id, "files", "files.list()")
|
||||||
|
|
||||||
# Delete physical file
|
# Delete physical file
|
||||||
file_path = Path(row["file_path"])
|
file_path = Path(row["file_path"])
|
||||||
|
|
@ -203,12 +204,12 @@ class LocalfsFilesImpl(Files):
|
||||||
# Get file metadata
|
# Get file metadata
|
||||||
row = await self.sql_store.fetch_one("openai_files", policy=self.policy, where={"id": file_id})
|
row = await self.sql_store.fetch_one("openai_files", policy=self.policy, where={"id": file_id})
|
||||||
if not row:
|
if not row:
|
||||||
raise ValueError(f"File with id {file_id} not found")
|
raise ResourceNotFoundError(file_id, "files", "files.list()")
|
||||||
|
|
||||||
# Read file content
|
# Read file content
|
||||||
file_path = Path(row["file_path"])
|
file_path = Path(row["file_path"])
|
||||||
if not file_path.exists():
|
if not file_path.exists():
|
||||||
raise ValueError(f"File content not found on disk: {file_path}")
|
raise ResourceNotFoundError(file_id, "files content", "files.list()")
|
||||||
|
|
||||||
with open(file_path, "rb") as f:
|
with open(file_path, "rb") as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
|
|
|
||||||
|
|
@ -8,17 +8,22 @@ from io import BytesIO
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from openai import OpenAI
|
|
||||||
|
|
||||||
from llama_stack.core.datatypes import User
|
from llama_stack.core.datatypes import User
|
||||||
from llama_stack.core.library_client import LlamaStackAsLibraryClient
|
|
||||||
|
|
||||||
|
|
||||||
def test_openai_client_basic_operations(compat_client, client_with_models):
|
# a fixture to skip all these tests if a files provider is not available
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def skip_if_no_files_provider(llama_stack_client):
|
||||||
|
if not [provider for provider in llama_stack_client.providers.list() if provider.api == "files"]:
|
||||||
|
pytest.skip("No files providers found")
|
||||||
|
|
||||||
|
|
||||||
|
def test_openai_client_basic_operations(openai_client):
|
||||||
"""Test basic file operations through OpenAI client."""
|
"""Test basic file operations through OpenAI client."""
|
||||||
if isinstance(client_with_models, LlamaStackAsLibraryClient) and isinstance(compat_client, OpenAI):
|
from openai import NotFoundError
|
||||||
pytest.skip("OpenAI files are not supported when testing with LlamaStackAsLibraryClient")
|
|
||||||
client = compat_client
|
client = openai_client
|
||||||
|
|
||||||
test_content = b"files test content"
|
test_content = b"files test content"
|
||||||
|
|
||||||
|
|
@ -31,6 +36,7 @@ def test_openai_client_basic_operations(compat_client, client_with_models):
|
||||||
# Verify basic response structure
|
# Verify basic response structure
|
||||||
assert uploaded_file.id.startswith("file-")
|
assert uploaded_file.id.startswith("file-")
|
||||||
assert hasattr(uploaded_file, "filename")
|
assert hasattr(uploaded_file, "filename")
|
||||||
|
assert uploaded_file.filename == "openai_test.txt"
|
||||||
|
|
||||||
# List files
|
# List files
|
||||||
files_list = client.files.list()
|
files_list = client.files.list()
|
||||||
|
|
@ -43,37 +49,41 @@ def test_openai_client_basic_operations(compat_client, client_with_models):
|
||||||
|
|
||||||
# Retrieve file content - OpenAI client returns httpx Response object
|
# Retrieve file content - OpenAI client returns httpx Response object
|
||||||
content_response = client.files.content(uploaded_file.id)
|
content_response = client.files.content(uploaded_file.id)
|
||||||
# The response is an httpx Response object with .content attribute containing bytes
|
assert content_response.content == test_content
|
||||||
if isinstance(content_response, str):
|
|
||||||
# Llama Stack Client returns a str
|
|
||||||
# TODO: fix Llama Stack Client
|
|
||||||
content = bytes(content_response, "utf-8")
|
|
||||||
else:
|
|
||||||
content = content_response.content
|
|
||||||
assert content == test_content
|
|
||||||
|
|
||||||
# Delete file
|
# Delete file
|
||||||
delete_response = client.files.delete(uploaded_file.id)
|
delete_response = client.files.delete(uploaded_file.id)
|
||||||
assert delete_response.deleted is True
|
assert delete_response.deleted is True
|
||||||
|
|
||||||
except Exception as e:
|
# Retrieve file should fail
|
||||||
# Cleanup in case of failure
|
with pytest.raises(NotFoundError, match="not found"):
|
||||||
try:
|
client.files.retrieve(uploaded_file.id)
|
||||||
|
|
||||||
|
# File should not be found in listing
|
||||||
|
files_list = client.files.list()
|
||||||
|
file_ids = [f.id for f in files_list.data]
|
||||||
|
assert uploaded_file.id not in file_ids
|
||||||
|
|
||||||
|
# Double delete should fail
|
||||||
|
with pytest.raises(NotFoundError, match="not found"):
|
||||||
client.files.delete(uploaded_file.id)
|
client.files.delete(uploaded_file.id)
|
||||||
except Exception:
|
|
||||||
pass
|
finally:
|
||||||
raise e
|
# Cleanup in case of failure
|
||||||
|
if "uploaded_file" in locals(): # maybe create fails
|
||||||
|
try:
|
||||||
|
client.files.delete(uploaded_file.id)
|
||||||
|
except NotFoundError:
|
||||||
|
pass # ignore 404
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.xfail(message="Not all providers support user isolation")
|
||||||
@patch("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user")
|
@patch("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user")
|
||||||
def test_files_authentication_isolation(mock_get_authenticated_user, compat_client, client_with_models):
|
def test_files_authentication_isolation(mock_get_authenticated_user, llama_stack_client):
|
||||||
"""Test that users can only access their own files."""
|
"""Test that users can only access their own files."""
|
||||||
if isinstance(client_with_models, LlamaStackAsLibraryClient) and isinstance(compat_client, OpenAI):
|
from llama_stack_client import NotFoundError
|
||||||
pytest.skip("OpenAI files are not supported when testing with LlamaStackAsLibraryClient")
|
|
||||||
if not isinstance(client_with_models, LlamaStackAsLibraryClient):
|
|
||||||
pytest.skip("Authentication tests require LlamaStackAsLibraryClient (library mode)")
|
|
||||||
|
|
||||||
client = compat_client
|
client = llama_stack_client
|
||||||
|
|
||||||
# Create two test users
|
# Create two test users
|
||||||
user1 = User("user1", {"roles": ["user"], "teams": ["team-a"]})
|
user1 = User("user1", {"roles": ["user"], "teams": ["team-a"]})
|
||||||
|
|
@ -117,7 +127,7 @@ def test_files_authentication_isolation(mock_get_authenticated_user, compat_clie
|
||||||
|
|
||||||
# User 1 cannot retrieve user2's file
|
# User 1 cannot retrieve user2's file
|
||||||
mock_get_authenticated_user.return_value = user1
|
mock_get_authenticated_user.return_value = user1
|
||||||
with pytest.raises(ValueError, match="not found"):
|
with pytest.raises(NotFoundError, match="not found"):
|
||||||
client.files.retrieve(user2_file.id)
|
client.files.retrieve(user2_file.id)
|
||||||
|
|
||||||
# User 1 can access their file content
|
# User 1 can access their file content
|
||||||
|
|
@ -131,7 +141,7 @@ def test_files_authentication_isolation(mock_get_authenticated_user, compat_clie
|
||||||
|
|
||||||
# User 1 cannot access user2's file content
|
# User 1 cannot access user2's file content
|
||||||
mock_get_authenticated_user.return_value = user1
|
mock_get_authenticated_user.return_value = user1
|
||||||
with pytest.raises(ValueError, match="not found"):
|
with pytest.raises(NotFoundError, match="not found"):
|
||||||
client.files.content(user2_file.id)
|
client.files.content(user2_file.id)
|
||||||
|
|
||||||
# User 1 can delete their own file
|
# User 1 can delete their own file
|
||||||
|
|
@ -141,7 +151,7 @@ def test_files_authentication_isolation(mock_get_authenticated_user, compat_clie
|
||||||
|
|
||||||
# User 1 cannot delete user2's file
|
# User 1 cannot delete user2's file
|
||||||
mock_get_authenticated_user.return_value = user1
|
mock_get_authenticated_user.return_value = user1
|
||||||
with pytest.raises(ValueError, match="not found"):
|
with pytest.raises(NotFoundError, match="not found"):
|
||||||
client.files.delete(user2_file.id)
|
client.files.delete(user2_file.id)
|
||||||
|
|
||||||
# User 2 can still access their file after user1's file is deleted
|
# User 2 can still access their file after user1's file is deleted
|
||||||
|
|
@ -169,14 +179,9 @@ def test_files_authentication_isolation(mock_get_authenticated_user, compat_clie
|
||||||
|
|
||||||
|
|
||||||
@patch("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user")
|
@patch("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user")
|
||||||
def test_files_authentication_shared_attributes(mock_get_authenticated_user, compat_client, client_with_models):
|
def test_files_authentication_shared_attributes(mock_get_authenticated_user, llama_stack_client):
|
||||||
"""Test access control with users having identical attributes."""
|
"""Test access control with users having identical attributes."""
|
||||||
if isinstance(client_with_models, LlamaStackAsLibraryClient) and isinstance(compat_client, OpenAI):
|
client = llama_stack_client
|
||||||
pytest.skip("OpenAI files are not supported when testing with LlamaStackAsLibraryClient")
|
|
||||||
if not isinstance(client_with_models, LlamaStackAsLibraryClient):
|
|
||||||
pytest.skip("Authentication tests require LlamaStackAsLibraryClient (library mode)")
|
|
||||||
|
|
||||||
client = compat_client
|
|
||||||
|
|
||||||
# Create users with identical attributes (required for default policy)
|
# Create users with identical attributes (required for default policy)
|
||||||
user_a = User("user-a", {"roles": ["user"], "teams": ["shared-team"]})
|
user_a = User("user-a", {"roles": ["user"], "teams": ["shared-team"]})
|
||||||
|
|
@ -231,14 +236,8 @@ def test_files_authentication_shared_attributes(mock_get_authenticated_user, com
|
||||||
|
|
||||||
|
|
||||||
@patch("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user")
|
@patch("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user")
|
||||||
def test_files_authentication_anonymous_access(mock_get_authenticated_user, compat_client, client_with_models):
|
def test_files_authentication_anonymous_access(mock_get_authenticated_user, llama_stack_client):
|
||||||
"""Test anonymous user behavior when no authentication is present."""
|
client = llama_stack_client
|
||||||
if isinstance(client_with_models, LlamaStackAsLibraryClient) and isinstance(compat_client, OpenAI):
|
|
||||||
pytest.skip("OpenAI files are not supported when testing with LlamaStackAsLibraryClient")
|
|
||||||
if not isinstance(client_with_models, LlamaStackAsLibraryClient):
|
|
||||||
pytest.skip("Authentication tests require LlamaStackAsLibraryClient (library mode)")
|
|
||||||
|
|
||||||
client = compat_client
|
|
||||||
|
|
||||||
# Simulate anonymous user (no authentication)
|
# Simulate anonymous user (no authentication)
|
||||||
mock_get_authenticated_user.return_value = None
|
mock_get_authenticated_user.return_value = None
|
||||||
|
|
|
||||||
|
|
@ -263,8 +263,8 @@ def instantiate_llama_stack_client(session):
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
def openai_client(client_with_models):
|
def openai_client(llama_stack_client):
|
||||||
base_url = f"{client_with_models.base_url}/v1/openai/v1"
|
base_url = f"{llama_stack_client.base_url}/v1/openai/v1"
|
||||||
return OpenAI(base_url=base_url, api_key="fake")
|
return OpenAI(base_url=base_url, api_key="fake")
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue