mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-21 09:23:13 +00:00
chore(files tests): update files integration tests and fix inline::localfs (#3195)
- update files=inline::localfs to raise ResourceNotFoundError instead of ValueError - only skip tests when no files provider is available - directly use openai_client and llama_stack_client where appropriate - check for correct behavior of non-existent file - xfail the isolation test, no implementation supports it test plan - ``` $ uv run ./scripts/integration-tests.sh --stack-config server:ci-tests --provider ollama --test-subdirs files ... tests/integration/files/test_files.py::test_openai_client_basic_operations PASSED [ 25%] tests/integration/files/test_files.py::test_files_authentication_isolation XFAIL [ 50%] tests/integration/files/test_files.py::test_files_authentication_shared_attributes PASSED [ 75%] tests/integration/files/test_files.py::test_files_authentication_anonymous_access PASSED [100%] ==================================== 3 passed, 1 xfailed in 1.03s ===================================== ``` previously - ``` $ uv run llama stack build --image-type venv --providers files=inline::localfs --run & ... $ ./scripts/integration-tests.sh --stack-config http://localhost:8321 --provider ollama --test-subdirs files ... tests/integration/files/test_files.py::test_openai_client_basic_operations[openai_client-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] PASSED [ 12%] tests/integration/files/test_files.py::test_files_authentication_isolation[openai_client-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [ 25%] tests/integration/files/test_files.py::test_files_authentication_shared_attributes[openai_client-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [ 37%] tests/integration/files/test_files.py::test_files_authentication_anonymous_access[openai_client-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [ 50%] tests/integration/files/test_files.py::test_openai_client_basic_operations[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] PASSED [ 62%] tests/integration/files/test_files.py::test_files_authentication_isolation[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [ 75%] tests/integration/files/test_files.py::test_files_authentication_shared_attributes[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [ 87%] tests/integration/files/test_files.py::test_files_authentication_anonymous_access[client_with_models-ollama/llama3.2:3b-instruct-fp16-None-sentence-transformers/all-MiniLM-L6-v2-None-384] SKIPPED [100%] ========================================================= 2 passed, 6 skipped in 1.31s ========================================================== ```
This commit is contained in:
parent
55e9959f62
commit
c2c859a6b0
4 changed files with 92 additions and 87 deletions
|
@ -11,6 +11,7 @@ from typing import Annotated
|
||||||
|
|
||||||
from fastapi import File, Form, Response, UploadFile
|
from fastapi import File, Form, Response, UploadFile
|
||||||
|
|
||||||
|
from llama_stack.apis.common.errors import ResourceNotFoundError
|
||||||
from llama_stack.apis.common.responses import Order
|
from llama_stack.apis.common.responses import Order
|
||||||
from llama_stack.apis.files import (
|
from llama_stack.apis.files import (
|
||||||
Files,
|
Files,
|
||||||
|
@ -20,12 +21,15 @@ from llama_stack.apis.files import (
|
||||||
OpenAIFilePurpose,
|
OpenAIFilePurpose,
|
||||||
)
|
)
|
||||||
from llama_stack.core.datatypes import AccessRule
|
from llama_stack.core.datatypes import AccessRule
|
||||||
|
from llama_stack.log import get_logger
|
||||||
from llama_stack.providers.utils.sqlstore.api import ColumnDefinition, ColumnType
|
from llama_stack.providers.utils.sqlstore.api import ColumnDefinition, ColumnType
|
||||||
from llama_stack.providers.utils.sqlstore.authorized_sqlstore import AuthorizedSqlStore
|
from llama_stack.providers.utils.sqlstore.authorized_sqlstore import AuthorizedSqlStore
|
||||||
from llama_stack.providers.utils.sqlstore.sqlstore import sqlstore_impl
|
from llama_stack.providers.utils.sqlstore.sqlstore import sqlstore_impl
|
||||||
|
|
||||||
from .config import LocalfsFilesImplConfig
|
from .config import LocalfsFilesImplConfig
|
||||||
|
|
||||||
|
logger = get_logger(name=__name__, category="files")
|
||||||
|
|
||||||
|
|
||||||
class LocalfsFilesImpl(Files):
|
class LocalfsFilesImpl(Files):
|
||||||
def __init__(self, config: LocalfsFilesImplConfig, policy: list[AccessRule]) -> None:
|
def __init__(self, config: LocalfsFilesImplConfig, policy: list[AccessRule]) -> None:
|
||||||
|
@ -65,6 +69,18 @@ class LocalfsFilesImpl(Files):
|
||||||
"""Get the filesystem path for a file ID."""
|
"""Get the filesystem path for a file ID."""
|
||||||
return Path(self.config.storage_dir) / file_id
|
return Path(self.config.storage_dir) / file_id
|
||||||
|
|
||||||
|
async def _lookup_file_id(self, file_id: str) -> tuple[OpenAIFileObject, Path]:
|
||||||
|
"""Look up a OpenAIFileObject and filesystem path from its ID."""
|
||||||
|
if not self.sql_store:
|
||||||
|
raise RuntimeError("Files provider not initialized")
|
||||||
|
|
||||||
|
row = await self.sql_store.fetch_one("openai_files", policy=self.policy, where={"id": file_id})
|
||||||
|
if not row:
|
||||||
|
raise ResourceNotFoundError(file_id, "File", "client.files.list()")
|
||||||
|
|
||||||
|
file_path = Path(row.pop("file_path"))
|
||||||
|
return OpenAIFileObject(**row), file_path
|
||||||
|
|
||||||
# OpenAI Files API Implementation
|
# OpenAI Files API Implementation
|
||||||
async def openai_upload_file(
|
async def openai_upload_file(
|
||||||
self,
|
self,
|
||||||
|
@ -157,37 +173,19 @@ class LocalfsFilesImpl(Files):
|
||||||
|
|
||||||
async def openai_retrieve_file(self, file_id: str) -> OpenAIFileObject:
|
async def openai_retrieve_file(self, file_id: str) -> OpenAIFileObject:
|
||||||
"""Returns information about a specific file."""
|
"""Returns information about a specific file."""
|
||||||
if not self.sql_store:
|
file_obj, _ = await self._lookup_file_id(file_id)
|
||||||
raise RuntimeError("Files provider not initialized")
|
|
||||||
|
|
||||||
row = await self.sql_store.fetch_one("openai_files", policy=self.policy, where={"id": file_id})
|
return file_obj
|
||||||
if not row:
|
|
||||||
raise ValueError(f"File with id {file_id} not found")
|
|
||||||
|
|
||||||
return OpenAIFileObject(
|
|
||||||
id=row["id"],
|
|
||||||
filename=row["filename"],
|
|
||||||
purpose=OpenAIFilePurpose(row["purpose"]),
|
|
||||||
bytes=row["bytes"],
|
|
||||||
created_at=row["created_at"],
|
|
||||||
expires_at=row["expires_at"],
|
|
||||||
)
|
|
||||||
|
|
||||||
async def openai_delete_file(self, file_id: str) -> OpenAIFileDeleteResponse:
|
async def openai_delete_file(self, file_id: str) -> OpenAIFileDeleteResponse:
|
||||||
"""Delete a file."""
|
"""Delete a file."""
|
||||||
if not self.sql_store:
|
|
||||||
raise RuntimeError("Files provider not initialized")
|
|
||||||
|
|
||||||
row = await self.sql_store.fetch_one("openai_files", policy=self.policy, where={"id": file_id})
|
|
||||||
if not row:
|
|
||||||
raise ValueError(f"File with id {file_id} not found")
|
|
||||||
|
|
||||||
# Delete physical file
|
# Delete physical file
|
||||||
file_path = Path(row["file_path"])
|
_, file_path = await self._lookup_file_id(file_id)
|
||||||
if file_path.exists():
|
if file_path.exists():
|
||||||
file_path.unlink()
|
file_path.unlink()
|
||||||
|
|
||||||
# Delete metadata from database
|
# Delete metadata from database
|
||||||
|
assert self.sql_store is not None, "Files provider not initialized"
|
||||||
await self.sql_store.delete("openai_files", where={"id": file_id})
|
await self.sql_store.delete("openai_files", where={"id": file_id})
|
||||||
|
|
||||||
return OpenAIFileDeleteResponse(
|
return OpenAIFileDeleteResponse(
|
||||||
|
@ -197,25 +195,17 @@ class LocalfsFilesImpl(Files):
|
||||||
|
|
||||||
async def openai_retrieve_file_content(self, file_id: str) -> Response:
|
async def openai_retrieve_file_content(self, file_id: str) -> Response:
|
||||||
"""Returns the contents of the specified file."""
|
"""Returns the contents of the specified file."""
|
||||||
if not self.sql_store:
|
|
||||||
raise RuntimeError("Files provider not initialized")
|
|
||||||
|
|
||||||
# Get file metadata
|
|
||||||
row = await self.sql_store.fetch_one("openai_files", policy=self.policy, where={"id": file_id})
|
|
||||||
if not row:
|
|
||||||
raise ValueError(f"File with id {file_id} not found")
|
|
||||||
|
|
||||||
# Read file content
|
# Read file content
|
||||||
file_path = Path(row["file_path"])
|
file_obj, file_path = await self._lookup_file_id(file_id)
|
||||||
if not file_path.exists():
|
|
||||||
raise ValueError(f"File content not found on disk: {file_path}")
|
|
||||||
|
|
||||||
with open(file_path, "rb") as f:
|
if not file_path.exists():
|
||||||
content = f.read()
|
logger.warning(f"File '{file_id}'s underlying '{file_path}' is missing, deleting metadata.")
|
||||||
|
await self.openai_delete_file(file_id)
|
||||||
|
raise ResourceNotFoundError(file_id, "File", "client.files.list()")
|
||||||
|
|
||||||
# Return as binary response with appropriate content type
|
# Return as binary response with appropriate content type
|
||||||
return Response(
|
return Response(
|
||||||
content=content,
|
content=file_path.read_bytes(),
|
||||||
media_type="application/octet-stream",
|
media_type="application/octet-stream",
|
||||||
headers={"Content-Disposition": f'attachment; filename="{row["filename"]}"'},
|
headers={"Content-Disposition": f'attachment; filename="{file_obj.filename}"'},
|
||||||
)
|
)
|
||||||
|
|
|
@ -8,20 +8,27 @@ from io import BytesIO
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from openai import OpenAI
|
|
||||||
|
|
||||||
from llama_stack.core.datatypes import User
|
from llama_stack.core.datatypes import User
|
||||||
from llama_stack.core.library_client import LlamaStackAsLibraryClient
|
|
||||||
|
|
||||||
|
|
||||||
def test_openai_client_basic_operations(compat_client, client_with_models):
|
# a fixture to skip all these tests if a files provider is not available
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def skip_if_no_files_provider(llama_stack_client):
|
||||||
|
if not [provider for provider in llama_stack_client.providers.list() if provider.api == "files"]:
|
||||||
|
pytest.skip("No files providers found")
|
||||||
|
|
||||||
|
|
||||||
|
def test_openai_client_basic_operations(openai_client):
|
||||||
"""Test basic file operations through OpenAI client."""
|
"""Test basic file operations through OpenAI client."""
|
||||||
if isinstance(client_with_models, LlamaStackAsLibraryClient) and isinstance(compat_client, OpenAI):
|
from openai import NotFoundError
|
||||||
pytest.skip("OpenAI files are not supported when testing with LlamaStackAsLibraryClient")
|
|
||||||
client = compat_client
|
client = openai_client
|
||||||
|
|
||||||
test_content = b"files test content"
|
test_content = b"files test content"
|
||||||
|
|
||||||
|
uploaded_file = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Upload file using OpenAI client
|
# Upload file using OpenAI client
|
||||||
with BytesIO(test_content) as file_buffer:
|
with BytesIO(test_content) as file_buffer:
|
||||||
|
@ -31,6 +38,7 @@ def test_openai_client_basic_operations(compat_client, client_with_models):
|
||||||
# Verify basic response structure
|
# Verify basic response structure
|
||||||
assert uploaded_file.id.startswith("file-")
|
assert uploaded_file.id.startswith("file-")
|
||||||
assert hasattr(uploaded_file, "filename")
|
assert hasattr(uploaded_file, "filename")
|
||||||
|
assert uploaded_file.filename == "openai_test.txt"
|
||||||
|
|
||||||
# List files
|
# List files
|
||||||
files_list = client.files.list()
|
files_list = client.files.list()
|
||||||
|
@ -43,37 +51,41 @@ def test_openai_client_basic_operations(compat_client, client_with_models):
|
||||||
|
|
||||||
# Retrieve file content - OpenAI client returns httpx Response object
|
# Retrieve file content - OpenAI client returns httpx Response object
|
||||||
content_response = client.files.content(uploaded_file.id)
|
content_response = client.files.content(uploaded_file.id)
|
||||||
# The response is an httpx Response object with .content attribute containing bytes
|
assert content_response.content == test_content
|
||||||
if isinstance(content_response, str):
|
|
||||||
# Llama Stack Client returns a str
|
|
||||||
# TODO: fix Llama Stack Client
|
|
||||||
content = bytes(content_response, "utf-8")
|
|
||||||
else:
|
|
||||||
content = content_response.content
|
|
||||||
assert content == test_content
|
|
||||||
|
|
||||||
# Delete file
|
# Delete file
|
||||||
delete_response = client.files.delete(uploaded_file.id)
|
delete_response = client.files.delete(uploaded_file.id)
|
||||||
assert delete_response.deleted is True
|
assert delete_response.deleted is True
|
||||||
|
|
||||||
except Exception as e:
|
# Retrieve file should fail
|
||||||
|
with pytest.raises(NotFoundError, match="not found"):
|
||||||
|
client.files.retrieve(uploaded_file.id)
|
||||||
|
|
||||||
|
# File should not be found in listing
|
||||||
|
files_list = client.files.list()
|
||||||
|
file_ids = [f.id for f in files_list.data]
|
||||||
|
assert uploaded_file.id not in file_ids
|
||||||
|
|
||||||
|
# Double delete should fail
|
||||||
|
with pytest.raises(NotFoundError, match="not found"):
|
||||||
|
client.files.delete(uploaded_file.id)
|
||||||
|
|
||||||
|
finally:
|
||||||
# Cleanup in case of failure
|
# Cleanup in case of failure
|
||||||
|
if uploaded_file is not None:
|
||||||
try:
|
try:
|
||||||
client.files.delete(uploaded_file.id)
|
client.files.delete(uploaded_file.id)
|
||||||
except Exception:
|
except NotFoundError:
|
||||||
pass
|
pass # ignore 404
|
||||||
raise e
|
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.xfail(message="User isolation broken for current providers, must be fixed.")
|
||||||
@patch("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user")
|
@patch("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user")
|
||||||
def test_files_authentication_isolation(mock_get_authenticated_user, compat_client, client_with_models):
|
def test_files_authentication_isolation(mock_get_authenticated_user, llama_stack_client):
|
||||||
"""Test that users can only access their own files."""
|
"""Test that users can only access their own files."""
|
||||||
if isinstance(client_with_models, LlamaStackAsLibraryClient) and isinstance(compat_client, OpenAI):
|
from llama_stack_client import NotFoundError
|
||||||
pytest.skip("OpenAI files are not supported when testing with LlamaStackAsLibraryClient")
|
|
||||||
if not isinstance(client_with_models, LlamaStackAsLibraryClient):
|
|
||||||
pytest.skip("Authentication tests require LlamaStackAsLibraryClient (library mode)")
|
|
||||||
|
|
||||||
client = compat_client
|
client = llama_stack_client
|
||||||
|
|
||||||
# Create two test users
|
# Create two test users
|
||||||
user1 = User("user1", {"roles": ["user"], "teams": ["team-a"]})
|
user1 = User("user1", {"roles": ["user"], "teams": ["team-a"]})
|
||||||
|
@ -117,7 +129,7 @@ def test_files_authentication_isolation(mock_get_authenticated_user, compat_clie
|
||||||
|
|
||||||
# User 1 cannot retrieve user2's file
|
# User 1 cannot retrieve user2's file
|
||||||
mock_get_authenticated_user.return_value = user1
|
mock_get_authenticated_user.return_value = user1
|
||||||
with pytest.raises(ValueError, match="not found"):
|
with pytest.raises(NotFoundError, match="not found"):
|
||||||
client.files.retrieve(user2_file.id)
|
client.files.retrieve(user2_file.id)
|
||||||
|
|
||||||
# User 1 can access their file content
|
# User 1 can access their file content
|
||||||
|
@ -131,7 +143,7 @@ def test_files_authentication_isolation(mock_get_authenticated_user, compat_clie
|
||||||
|
|
||||||
# User 1 cannot access user2's file content
|
# User 1 cannot access user2's file content
|
||||||
mock_get_authenticated_user.return_value = user1
|
mock_get_authenticated_user.return_value = user1
|
||||||
with pytest.raises(ValueError, match="not found"):
|
with pytest.raises(NotFoundError, match="not found"):
|
||||||
client.files.content(user2_file.id)
|
client.files.content(user2_file.id)
|
||||||
|
|
||||||
# User 1 can delete their own file
|
# User 1 can delete their own file
|
||||||
|
@ -141,7 +153,7 @@ def test_files_authentication_isolation(mock_get_authenticated_user, compat_clie
|
||||||
|
|
||||||
# User 1 cannot delete user2's file
|
# User 1 cannot delete user2's file
|
||||||
mock_get_authenticated_user.return_value = user1
|
mock_get_authenticated_user.return_value = user1
|
||||||
with pytest.raises(ValueError, match="not found"):
|
with pytest.raises(NotFoundError, match="not found"):
|
||||||
client.files.delete(user2_file.id)
|
client.files.delete(user2_file.id)
|
||||||
|
|
||||||
# User 2 can still access their file after user1's file is deleted
|
# User 2 can still access their file after user1's file is deleted
|
||||||
|
@ -169,14 +181,9 @@ def test_files_authentication_isolation(mock_get_authenticated_user, compat_clie
|
||||||
|
|
||||||
|
|
||||||
@patch("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user")
|
@patch("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user")
|
||||||
def test_files_authentication_shared_attributes(mock_get_authenticated_user, compat_client, client_with_models):
|
def test_files_authentication_shared_attributes(mock_get_authenticated_user, llama_stack_client):
|
||||||
"""Test access control with users having identical attributes."""
|
"""Test access control with users having identical attributes."""
|
||||||
if isinstance(client_with_models, LlamaStackAsLibraryClient) and isinstance(compat_client, OpenAI):
|
client = llama_stack_client
|
||||||
pytest.skip("OpenAI files are not supported when testing with LlamaStackAsLibraryClient")
|
|
||||||
if not isinstance(client_with_models, LlamaStackAsLibraryClient):
|
|
||||||
pytest.skip("Authentication tests require LlamaStackAsLibraryClient (library mode)")
|
|
||||||
|
|
||||||
client = compat_client
|
|
||||||
|
|
||||||
# Create users with identical attributes (required for default policy)
|
# Create users with identical attributes (required for default policy)
|
||||||
user_a = User("user-a", {"roles": ["user"], "teams": ["shared-team"]})
|
user_a = User("user-a", {"roles": ["user"], "teams": ["shared-team"]})
|
||||||
|
@ -231,14 +238,8 @@ def test_files_authentication_shared_attributes(mock_get_authenticated_user, com
|
||||||
|
|
||||||
|
|
||||||
@patch("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user")
|
@patch("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user")
|
||||||
def test_files_authentication_anonymous_access(mock_get_authenticated_user, compat_client, client_with_models):
|
def test_files_authentication_anonymous_access(mock_get_authenticated_user, llama_stack_client):
|
||||||
"""Test anonymous user behavior when no authentication is present."""
|
client = llama_stack_client
|
||||||
if isinstance(client_with_models, LlamaStackAsLibraryClient) and isinstance(compat_client, OpenAI):
|
|
||||||
pytest.skip("OpenAI files are not supported when testing with LlamaStackAsLibraryClient")
|
|
||||||
if not isinstance(client_with_models, LlamaStackAsLibraryClient):
|
|
||||||
pytest.skip("Authentication tests require LlamaStackAsLibraryClient (library mode)")
|
|
||||||
|
|
||||||
client = compat_client
|
|
||||||
|
|
||||||
# Simulate anonymous user (no authentication)
|
# Simulate anonymous user (no authentication)
|
||||||
mock_get_authenticated_user.return_value = None
|
mock_get_authenticated_user.return_value = None
|
||||||
|
|
|
@ -263,8 +263,21 @@ def instantiate_llama_stack_client(session):
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
def openai_client(client_with_models):
|
def require_server(llama_stack_client):
|
||||||
base_url = f"{client_with_models.base_url}/v1/openai/v1"
|
"""
|
||||||
|
Skip test if no server is running.
|
||||||
|
|
||||||
|
We use the llama_stack_client to tell if a server was started or not.
|
||||||
|
|
||||||
|
We use this with openai_client because it relies on a running server.
|
||||||
|
"""
|
||||||
|
if isinstance(llama_stack_client, LlamaStackAsLibraryClient):
|
||||||
|
pytest.skip("No server running")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def openai_client(llama_stack_client, require_server):
|
||||||
|
base_url = f"{llama_stack_client.base_url}/v1/openai/v1"
|
||||||
return OpenAI(base_url=base_url, api_key="fake")
|
return OpenAI(base_url=base_url, api_key="fake")
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from llama_stack.apis.common.errors import ResourceNotFoundError
|
||||||
from llama_stack.apis.common.responses import Order
|
from llama_stack.apis.common.responses import Order
|
||||||
from llama_stack.apis.files import OpenAIFilePurpose
|
from llama_stack.apis.files import OpenAIFilePurpose
|
||||||
from llama_stack.core.access_control.access_control import default_policy
|
from llama_stack.core.access_control.access_control import default_policy
|
||||||
|
@ -190,7 +191,7 @@ class TestOpenAIFilesAPI:
|
||||||
|
|
||||||
async def test_retrieve_file_not_found(self, files_provider):
|
async def test_retrieve_file_not_found(self, files_provider):
|
||||||
"""Test retrieving a non-existent file."""
|
"""Test retrieving a non-existent file."""
|
||||||
with pytest.raises(ValueError, match="File with id file-nonexistent not found"):
|
with pytest.raises(ResourceNotFoundError, match="not found"):
|
||||||
await files_provider.openai_retrieve_file("file-nonexistent")
|
await files_provider.openai_retrieve_file("file-nonexistent")
|
||||||
|
|
||||||
async def test_retrieve_file_content_success(self, files_provider, sample_text_file):
|
async def test_retrieve_file_content_success(self, files_provider, sample_text_file):
|
||||||
|
@ -208,7 +209,7 @@ class TestOpenAIFilesAPI:
|
||||||
|
|
||||||
async def test_retrieve_file_content_not_found(self, files_provider):
|
async def test_retrieve_file_content_not_found(self, files_provider):
|
||||||
"""Test retrieving content of a non-existent file."""
|
"""Test retrieving content of a non-existent file."""
|
||||||
with pytest.raises(ValueError, match="File with id file-nonexistent not found"):
|
with pytest.raises(ResourceNotFoundError, match="not found"):
|
||||||
await files_provider.openai_retrieve_file_content("file-nonexistent")
|
await files_provider.openai_retrieve_file_content("file-nonexistent")
|
||||||
|
|
||||||
async def test_delete_file_success(self, files_provider, sample_text_file):
|
async def test_delete_file_success(self, files_provider, sample_text_file):
|
||||||
|
@ -229,12 +230,12 @@ class TestOpenAIFilesAPI:
|
||||||
assert delete_response.deleted is True
|
assert delete_response.deleted is True
|
||||||
|
|
||||||
# Verify file no longer exists
|
# Verify file no longer exists
|
||||||
with pytest.raises(ValueError, match=f"File with id {uploaded_file.id} not found"):
|
with pytest.raises(ResourceNotFoundError, match="not found"):
|
||||||
await files_provider.openai_retrieve_file(uploaded_file.id)
|
await files_provider.openai_retrieve_file(uploaded_file.id)
|
||||||
|
|
||||||
async def test_delete_file_not_found(self, files_provider):
|
async def test_delete_file_not_found(self, files_provider):
|
||||||
"""Test deleting a non-existent file."""
|
"""Test deleting a non-existent file."""
|
||||||
with pytest.raises(ValueError, match="File with id file-nonexistent not found"):
|
with pytest.raises(ResourceNotFoundError, match="not found"):
|
||||||
await files_provider.openai_delete_file("file-nonexistent")
|
await files_provider.openai_delete_file("file-nonexistent")
|
||||||
|
|
||||||
async def test_file_persistence_across_operations(self, files_provider, sample_text_file):
|
async def test_file_persistence_across_operations(self, files_provider, sample_text_file):
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue