diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 19d96cce2..ad58a4bf8 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -52,6 +52,20 @@ jobs: uv pip install -e . llama stack build --template ollama --image-type venv + - name: Setup minio when testing files + if: matrix.test-type == 'files' + run: | + mkdir -p ~/minio/data + docker run \ + -d \ + -p 9000:9000 \ + -p 9001:9001 \ + --name minio \ + -v ~/minio/data:/data \ + -e "MINIO_ROOT_USER=ROOTNAME" \ + -e "MINIO_ROOT_PASSWORD=CHANGEME123" \ + quay.io/minio/minio server /data --console-address ":9001" + - name: Start Llama Stack server in background if: matrix.client-type == 'http' env: diff --git a/llama_stack/providers/registry/files.py b/llama_stack/providers/registry/files.py index 46d62d820..cc9587bc1 100644 --- a/llama_stack/providers/registry/files.py +++ b/llama_stack/providers/registry/files.py @@ -11,6 +11,7 @@ from llama_stack.providers.datatypes import ( ProviderSpec, remote_provider_spec, ) +from llama_stack.providers.utils.kvstore import kvstore_dependencies def available_providers() -> list[ProviderSpec]: @@ -19,7 +20,7 @@ def available_providers() -> list[ProviderSpec]: api=Api.files, adapter=AdapterSpec( adapter_type="s3", - pip_packages=["aioboto3"], + pip_packages=["aioboto3"] + kvstore_dependencies(), module="llama_stack.providers.remote.files.object.s3", config_class="llama_stack.providers.remote.files.object.s3.config.S3FilesImplConfig", provider_data_validator="llama_stack.providers.remote.files.object.s3.S3ProviderDataValidator", diff --git a/llama_stack/providers/remote/files/object/s3/__init__.py b/llama_stack/providers/remote/files/object/s3/__init__.py index 13bd16230..11b7ee3f1 100644 --- a/llama_stack/providers/remote/files/object/s3/__init__.py +++ b/llama_stack/providers/remote/files/object/s3/__init__.py @@ -10,6 +10,9 @@ from .config import S3FilesImplConfig async def get_adapter_impl(config: S3FilesImplConfig, _deps): from .s3_files import S3FilesAdapter - impl = S3FilesAdapter(config) + impl = S3FilesAdapter( + config, + _deps, + ) await impl.initialize() return impl diff --git a/llama_stack/providers/remote/files/object/s3/persistence.py b/llama_stack/providers/remote/files/object/s3/persistence.py index 7f27eece3..1d141d80c 100644 --- a/llama_stack/providers/remote/files/object/s3/persistence.py +++ b/llama_stack/providers/remote/files/object/s3/persistence.py @@ -21,7 +21,8 @@ class UploadSessionInfo(BaseModel): upload_id: str bucket: str - key: str + key: str # Original key for file reading + s3_key: str # S3 key for S3 operations mime_type: str size: int url: str @@ -31,12 +32,12 @@ class UploadSessionInfo(BaseModel): class S3FilesPersistence: def __init__(self, kvstore: KVStore): self._kvstore = kvstore - self._store = None + self._store: KVStore | None = None async def _get_store(self) -> KVStore: """Get the kvstore instance, initializing it if needed.""" if self._store is None: - self._store = await anext(self._kvstore) + self._store = self._kvstore return self._store async def store_upload_session( @@ -47,6 +48,7 @@ class S3FilesPersistence: upload_id=session_info.id, bucket=bucket, key=key, + s3_key=key, mime_type=mime_type, size=size, url=session_info.url, diff --git a/llama_stack/providers/remote/files/object/s3/s3_files.py b/llama_stack/providers/remote/files/object/s3/s3_files.py index 45dff31ed..27b3db5a3 100644 --- a/llama_stack/providers/remote/files/object/s3/s3_files.py +++ b/llama_stack/providers/remote/files/object/s3/s3_files.py @@ -14,19 +14,21 @@ from llama_stack.apis.files.files import ( Files, FileUploadResponse, ) +from llama_stack.log import get_logger +from llama_stack.providers.utils.kvstore import KVStore from llama_stack.providers.utils.pagination import paginate_records -from .config import S3ImplConfig +from .config import S3FilesImplConfig +from .persistence import S3FilesPersistence + +logger = get_logger(name=__name__, category="files") class S3FilesAdapter(Files): - def __init__(self, config: S3ImplConfig): + def __init__(self, config: S3FilesImplConfig, kvstore: KVStore): self.config = config - self.session = aioboto3.Session( - aws_access_key_id=config.aws_access_key_id, - aws_secret_access_key=config.aws_secret_access_key, - region_name=config.region_name, - ) + self.session = aioboto3.Session() + self.persistence = S3FilesPersistence(kvstore) async def initialize(self): # TODO: health check? @@ -41,8 +43,16 @@ class S3FilesAdapter(Files): ) -> FileUploadResponse: """Create a presigned URL for uploading a file to S3.""" try: + logger.debug( + "create_upload_session", + {"original_key": key, "s3_key": key, "bucket": bucket, "mime_type": mime_type, "size": size}, + ) + async with self.session.client( "s3", + aws_access_key_id=self.config.aws_access_key_id, + aws_secret_access_key=self.config.aws_secret_access_key, + region_name=self.config.region_name, endpoint_url=self.config.endpoint_url, ) as s3: url = await s3.generate_presigned_url( @@ -52,15 +62,29 @@ class S3FilesAdapter(Files): "Key": key, "ContentType": mime_type, }, - ExpiresIn=3600, # URL expires in 1 hour + ExpiresIn=3600, # URL expires in 1 hour - should it be longer? ) - return FileUploadResponse( + logger.debug("Generated presigned URL", {"url": url}) + + response = FileUploadResponse( id=f"{bucket}/{key}", url=url, offset=0, size=size, ) + + # Store the session info + await self.persistence.store_upload_session( + session_info=response, + bucket=bucket, + key=key, # Store the original key for file reading + mime_type=mime_type, + size=size, + ) + + return response except ClientError as e: + logger.error("S3 ClientError in create_upload_session", {"error": str(e)}) raise Exception(f"Failed to create upload session: {str(e)}") from e async def upload_content_to_session( @@ -68,31 +92,78 @@ class S3FilesAdapter(Files): upload_id: str, ) -> FileResponse | None: """Upload content to S3 using the upload session.""" - bucket, key = upload_id.split("/", 1) + try: + # Get the upload session info from persistence + session_info = await self.persistence.get_upload_session(upload_id) + if not session_info: + raise Exception(f"Upload session {upload_id} not found") + + logger.debug( + "upload_content_to_session", + { + "upload_id": upload_id, + "bucket": session_info.bucket, + "key": session_info.key, + "mime_type": session_info.mime_type, + "size": session_info.size, + }, + ) + + # Read the file content + with open(session_info.key, "rb") as f: + content = f.read() + logger.debug("Read content", {"length": len(content)}) + + # Use a single S3 client for all operations async with self.session.client( "s3", + aws_access_key_id=self.config.aws_access_key_id, + aws_secret_access_key=self.config.aws_secret_access_key, + region_name=self.config.region_name, endpoint_url=self.config.endpoint_url, ) as s3: - response = await s3.head_object(Bucket=bucket, Key=key) + # Upload the content + await s3.put_object( + Bucket=session_info.bucket, Key=session_info.key, Body=content, ContentType=session_info.mime_type + ) + logger.debug("Upload successful") + + # Get the file info after upload + response = await s3.head_object(Bucket=session_info.bucket, Key=session_info.key) + logger.debug( + "File info retrieved", + { + "ContentType": response.get("ContentType"), + "ContentLength": response["ContentLength"], + "LastModified": response["LastModified"], + }, + ) + + # Generate a presigned URL for reading url = await s3.generate_presigned_url( "get_object", Params={ - "Bucket": bucket, - "Key": key, + "Bucket": session_info.bucket, + "Key": session_info.key, }, ExpiresIn=3600, ) + return FileResponse( - bucket=bucket, - key=key, + bucket=session_info.bucket, + key=session_info.key, # Use the original key to match test expectations mime_type=response.get("ContentType", "application/octet-stream"), url=url, bytes=response["ContentLength"], created_at=int(response["LastModified"].timestamp()), ) - except ClientError: - return None + except ClientError as e: + logger.error("S3 ClientError in upload_content_to_session", {"error": str(e)}) + raise Exception(f"Failed to upload content: {str(e)}") from e + finally: + # Clean up the upload session + await self.persistence.delete_upload_session(upload_id) async def get_upload_session_info( self, @@ -103,6 +174,9 @@ class S3FilesAdapter(Files): try: async with self.session.client( "s3", + aws_access_key_id=self.config.aws_access_key_id, + aws_secret_access_key=self.config.aws_secret_access_key, + region_name=self.config.region_name, endpoint_url=self.config.endpoint_url, ) as s3: response = await s3.head_object(Bucket=bucket, Key=key) @@ -132,15 +206,17 @@ class S3FilesAdapter(Files): """List all available S3 buckets.""" try: - async with self.session.client( + response = await self.session.client( "s3", + aws_access_key_id=self.config.aws_access_key_id, + aws_secret_access_key=self.config.aws_secret_access_key, + region_name=self.config.region_name, endpoint_url=self.config.endpoint_url, - ) as s3: - response = await s3.list_buckets() - buckets = [BucketResponse(name=bucket["Name"]) for bucket in response["Buckets"]] - # Convert BucketResponse objects to dictionaries for pagination - bucket_dicts = [bucket.model_dump() for bucket in buckets] - return paginate_records(bucket_dicts, page, size) + ).list_buckets() + buckets = [BucketResponse(name=bucket["Name"]) for bucket in response["Buckets"]] + # Convert BucketResponse objects to dictionaries for pagination + bucket_dicts = [bucket.model_dump() for bucket in buckets] + return paginate_records(bucket_dicts, page, size) except ClientError as e: raise Exception(f"Failed to list buckets: {str(e)}") from e @@ -152,37 +228,45 @@ class S3FilesAdapter(Files): ) -> PaginatedResponse: """List all files in an S3 bucket.""" try: - async with self.session.client( + response = await self.session.client( "s3", + aws_access_key_id=self.config.aws_access_key_id, + aws_secret_access_key=self.config.aws_secret_access_key, + region_name=self.config.region_name, endpoint_url=self.config.endpoint_url, - ) as s3: - response = await s3.list_objects_v2(Bucket=bucket) - files: list[FileResponse] = [] + ).list_objects_v2(Bucket=bucket) + files: list[FileResponse] = [] - for obj in response.get("Contents", []): - url = await s3.generate_presigned_url( - "get_object", - Params={ - "Bucket": bucket, - "Key": obj["Key"], - }, - ExpiresIn=3600, + for obj in response.get("Contents", []): + url = await self.session.client( + "s3", + aws_access_key_id=self.config.aws_access_key_id, + aws_secret_access_key=self.config.aws_secret_access_key, + region_name=self.config.region_name, + endpoint_url=self.config.endpoint_url, + ).generate_presigned_url( + "get_object", + Params={ + "Bucket": bucket, + "Key": obj["Key"], + }, + ExpiresIn=3600, + ) + + files.append( + FileResponse( + bucket=bucket, + key=obj["Key"], + mime_type="application/octet-stream", # Default mime type + url=url, + bytes=obj["Size"], + created_at=int(obj["LastModified"].timestamp()), ) + ) - files.append( - FileResponse( - bucket=bucket, - key=obj["Key"], - mime_type="application/octet-stream", # Default mime type - url=url, - bytes=obj["Size"], - created_at=int(obj["LastModified"].timestamp()), - ) - ) - - # Convert FileResponse objects to dictionaries for pagination - file_dicts = [file.model_dump() for file in files] - return paginate_records(file_dicts, page, size) + # Convert FileResponse objects to dictionaries for pagination + file_dicts = [file.model_dump() for file in files] + return paginate_records(file_dicts, page, size) except ClientError as e: raise Exception(f"Failed to list files in bucket: {str(e)}") from e @@ -195,6 +279,9 @@ class S3FilesAdapter(Files): try: async with self.session.client( "s3", + aws_access_key_id=self.config.aws_access_key_id, + aws_secret_access_key=self.config.aws_secret_access_key, + region_name=self.config.region_name, endpoint_url=self.config.endpoint_url, ) as s3: response = await s3.head_object(Bucket=bucket, Key=key) @@ -227,9 +314,11 @@ class S3FilesAdapter(Files): try: async with self.session.client( "s3", + aws_access_key_id=self.config.aws_access_key_id, + aws_secret_access_key=self.config.aws_secret_access_key, + region_name=self.config.region_name, endpoint_url=self.config.endpoint_url, ) as s3: - # Delete the file await s3.delete_object(Bucket=bucket, Key=key) except ClientError as e: raise Exception(f"Failed to delete file: {str(e)}") from e diff --git a/pyproject.toml b/pyproject.toml index ee180c4c9..bf5dc6dad 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -69,6 +69,7 @@ unit = [ "chardet", "qdrant-client", "opentelemetry-exporter-otlp-proto-http", + "aioboto3", ] # These are the core dependencies required for running integration tests. They are shared across all # providers. If a provider requires additional dependencies, please add them to your environment diff --git a/tests/integration/files/conftest.py b/tests/integration/files/conftest.py index 132c333f3..509668c28 100644 --- a/tests/integration/files/conftest.py +++ b/tests/integration/files/conftest.py @@ -1,4 +1,10 @@ -from typing import AsyncGenerator +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +from collections.abc import AsyncGenerator import pytest diff --git a/tests/integration/files/test_s3_integration.py b/tests/integration/files/test_s3_integration.py new file mode 100644 index 000000000..2a5df1993 --- /dev/null +++ b/tests/integration/files/test_s3_integration.py @@ -0,0 +1,205 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + + +import aioboto3 +import aiohttp +import botocore +import pytest + +pytestmark = pytest.mark.integration + + +@pytest.mark.asyncio +async def test_file_upload_download_flow(s3_files, tmp_path): + """Test complete file upload and download flow.""" + # Get the adapter from the fixture + adapter = await anext(s3_files) + + # Test data + bucket = "test-bucket" + key = tmp_path / "test-file.txt" + content = b"Hello, this is a test file content!" + key.write_bytes(content) + mime_type = "text/plain" + + # Create bucket and upload file + async with aioboto3.Session().client( + "s3", + endpoint_url=adapter.config.endpoint_url, + aws_access_key_id=adapter.config.aws_access_key_id, + aws_secret_access_key=adapter.config.aws_secret_access_key, + region_name=adapter.config.region_name, + ) as s3: + try: + await s3.create_bucket(Bucket=bucket) + except botocore.exceptions.ClientError as e: + if e.response["Error"]["Code"] == "BucketAlreadyOwnedByYou": + pass + else: + raise + except Exception as e: + print(f"Unexpected error creating bucket: {e}") + raise + + # Create upload session + upload_response = await adapter.create_upload_session( + bucket=bucket, key=key.as_posix(), mime_type=mime_type, size=len(content) + ) + + # Upload content using the adapter + response = await adapter.upload_content_to_session(upload_response.id) + assert response is not None + assert response.bucket == bucket + assert response.key == str(key) + assert response.bytes == len(content) + + # Verify file exists + file_info = await adapter.get_file(bucket, key.as_posix()) + assert file_info.bucket == bucket + assert file_info.key == key.as_posix() + assert file_info.mime_type == mime_type + assert file_info.bytes == len(content) + + # Download file using presigned URL + async with aiohttp.ClientSession() as session: + async with session.get(file_info.url) as response: + assert response.status == 200 + downloaded_content = await response.read() + assert downloaded_content == content + + # Clean up - delete the file + await adapter.delete_file(bucket, key.as_posix()) + + # Remove test bucket + await s3.delete_bucket(Bucket=bucket) + + +@pytest.mark.asyncio +async def test_pagination(s3_files, tmp_path): + """Test pagination functionality.""" + bucket = "pagination-test" + files = [f"file_{i}.txt" for i in range(15)] + content = b"test content" + mime_type = "text/plain" + + # Get the adapter from the fixture + adapter = await anext(s3_files) + + # Create bucket + async with adapter.session.client( + "s3", + aws_access_key_id=adapter.config.aws_access_key_id, + aws_secret_access_key=adapter.config.aws_secret_access_key, + region_name=adapter.config.region_name, + endpoint_url=adapter.config.endpoint_url, + ) as s3: + try: + await s3.create_bucket(Bucket=bucket) + except botocore.exceptions.ClientError as e: + if e.response["Error"]["Code"] == "BucketAlreadyOwnedByYou": + pass + else: + raise + + # Upload files using the proper upload methods + for filename in files: + # Create temporary file + temp_file = tmp_path / filename + temp_file.write_bytes(content) + + # Create upload session + upload_response = await adapter.create_upload_session( + bucket=bucket, key=filename, mime_type=mime_type, size=len(content) + ) + + # Upload content using the adapter + response = await adapter.upload_content_to_session(upload_response.id) + assert response is not None + assert response.bucket == bucket + assert response.key == filename + assert response.bytes == len(content) + + # Test first page + page1 = await adapter.list_files_in_bucket(bucket, page=1, size=5) + assert len(page1.items) == 5 + assert page1.total == 15 + + # Test second page + page2 = await adapter.list_files_in_bucket(bucket, page=2, size=5) + assert len(page2.items) == 5 + assert page2.total == 15 + + # Verify no overlap between pages + page1_keys = {item["key"] for item in page1.items} + page2_keys = {item["key"] for item in page2.items} + assert not page1_keys.intersection(page2_keys) + + # Also test list_all_buckets + buckets = await adapter.list_all_buckets() + assert len(buckets.data) > 0 + assert any(bucket["name"] == bucket for bucket in buckets.data) + + # Clean up - delete all files and the bucket + async with adapter.session.client( + "s3", + aws_access_key_id=adapter.config.aws_access_key_id, + aws_secret_access_key=adapter.config.aws_secret_access_key, + region_name=adapter.config.region_name, + endpoint_url=adapter.config.endpoint_url, + ) as s3: + for filename in files: + await adapter.delete_file(bucket, filename) + await s3.delete_bucket(Bucket=bucket) + + +# @pytest.mark.asyncio +# async def test_large_file_upload(s3_files): +# """Test uploading a large file.""" +# bucket = "large-file-test" +# key = "large-file.bin" +# mime_type = "application/octet-stream" + +# # Create a 5MB file +# content = os.urandom(5 * 1024 * 1024) + +# # Create bucket +# async with s3_files.session.client("s3") as s3: +# await s3.create_bucket(Bucket=bucket) + +# # Create upload session +# upload_response = await s3_files.create_upload_session( +# bucket=bucket, key=key, mime_type=mime_type, size=len(content) +# ) + +# # Upload content +# async with aiohttp.ClientSession() as session: +# async with session.put(upload_response.url, data=content) as response: +# assert response.status == 200 + +# # Verify file +# file_info = await s3_files.get_file(bucket, key) +# assert file_info.bytes == len(content) +# assert file_info.mime_type == mime_type + + +# @pytest.mark.asyncio +# async def test_error_handling(s3_files): +# """Test error handling for various scenarios.""" +# bucket = "error-test" +# key = "non-existent.txt" + +# # Test getting non-existent file +# with pytest.raises(Exception): +# await s3_files.get_file(bucket, key) + +# # Test listing files in non-existent bucket +# with pytest.raises(Exception): +# await s3_files.list_files_in_bucket(bucket) + +# # Test deleting non-existent file +# with pytest.raises(Exception): +# await s3_files.delete_file(bucket, key) diff --git a/tests/unit/providers/files/test_remote_files_s3.py b/tests/unit/providers/files/test_remote_files_s3.py index ef80d7269..520c4e992 100644 --- a/tests/unit/providers/files/test_remote_files_s3.py +++ b/tests/unit/providers/files/test_remote_files_s3.py @@ -1,7 +1,17 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +from collections.abc import AsyncGenerator + import pytest from llama_stack.providers.remote.files.object.s3.config import S3FilesImplConfig from llama_stack.providers.remote.files.object.s3.s3_files import S3FilesAdapter +from llama_stack.providers.utils.kvstore import KVStore, kvstore_impl +from llama_stack.providers.utils.kvstore.config import SqliteKVStoreConfig @pytest.fixture @@ -15,8 +25,22 @@ def s3_config(): @pytest.fixture -async def s3_files(s3_config): - adapter = S3FilesAdapter(s3_config) +async def kvstore() -> AsyncGenerator[KVStore, None]: + """Create a SQLite KV store for testing.""" + config = SqliteKVStoreConfig( + path=":memory:" # Use in-memory SQLite for tests + ) + store = await kvstore_impl(config) + await store.initialize() + yield store + + +@pytest.fixture +async def s3_files(s3_config, kvstore): + adapter = S3FilesAdapter( + s3_config, + kvstore, + ) await adapter.initialize() return adapter diff --git a/uv.lock b/uv.lock index 048e6e202..26807058a 100644 --- a/uv.lock +++ b/uv.lock @@ -16,6 +16,51 @@ resolution-markers = [ "python_full_version == '3.12.*' and sys_platform == 'darwin'", ] +[[package]] +name = "aioboto3" +version = "14.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiobotocore", extra = ["boto3"] }, + { name = "aiofiles" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/79/b7/2f0d45cf31f77f8432102d7225d189e6e65cc7a16a32a8ac929eabd719a7/aioboto3-14.3.0.tar.gz", hash = "sha256:1d18f88bb56835c607b62bb6cb907754d717bedde3ddfff6935727cb48a80135", size = 322658, upload-time = "2025-05-07T15:23:59.262Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/b0/f2415f03af890693ba8cb669c67f30b9ffa8b2065ecf91cc92e6782b5aa2/aioboto3-14.3.0-py3-none-any.whl", hash = "sha256:aec5de94e9edc1ffbdd58eead38a37f00ddac59a519db749a910c20b7b81bca7", size = 35697, upload-time = "2025-05-07T15:23:57.539Z" }, +] + +[[package]] +name = "aiobotocore" +version = "2.22.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "aioitertools" }, + { name = "botocore" }, + { name = "jmespath" }, + { name = "multidict" }, + { name = "python-dateutil" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9c/4c/113c4f5611103bba8e5252805fbee7944f5d9541addba9a96b091c0c4308/aiobotocore-2.22.0.tar.gz", hash = "sha256:11091477266b75c2b5d28421c1f2bc9a87d175d0b8619cb830805e7a113a170b", size = 110322, upload-time = "2025-05-01T16:45:45.484Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/8e/ffa5840cb7de19ada85bda1fae1ae22671a18992e9373f2e2df9db5389b5/aiobotocore-2.22.0-py3-none-any.whl", hash = "sha256:b4e6306f79df9d81daff1f9d63189a2dbee4b77ce3ab937304834e35eaaeeccf", size = 78930, upload-time = "2025-05-01T16:45:43.508Z" }, +] + +[package.optional-dependencies] +boto3 = [ + { name = "boto3" }, +] + +[[package]] +name = "aiofiles" +version = "24.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/03/a88171e277e8caa88a4c77808c20ebb04ba74cc4681bf1e9416c862de237/aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c", size = 30247, upload-time = "2024-06-24T11:02:03.584Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/45/30bb92d442636f570cb5651bc661f52b610e2eec3f891a5dc3a4c3667db0/aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5", size = 15896, upload-time = "2024-06-24T11:02:01.529Z" }, +] + [[package]] name = "aiohappyeyeballs" version = "2.5.0" @@ -107,6 +152,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9c/54/ebb815bc0fe057d8e7a11c086c479e972e827082f39aeebc6019dd4f0862/aiohttp-3.11.13-cp313-cp313-win_amd64.whl", hash = "sha256:5ceb81a4db2decdfa087381b5fc5847aa448244f973e5da232610304e199e7b2", size = 436452, upload-time = "2025-02-24T16:01:23.611Z" }, ] +[[package]] +name = "aioitertools" +version = "0.12.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/06/de/38491a84ab323b47c7f86e94d2830e748780525f7a10c8600b67ead7e9ea/aioitertools-0.12.0.tar.gz", hash = "sha256:c2a9055b4fbb7705f561b9d86053e8af5d10cc845d22c32008c43490b2d8dd6b", size = 19369, upload-time = "2024-09-02T03:33:40.349Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/13/58b70a580de00893223d61de8fea167877a3aed97d4a5e1405c9159ef925/aioitertools-0.12.0-py3-none-any.whl", hash = "sha256:fc1f5fac3d737354de8831cbba3eb04f79dd649d8f3afb4c5b114925e662a796", size = 24345, upload-time = "2024-09-02T03:34:59.454Z" }, +] + [[package]] name = "aiosignal" version = "1.3.2" @@ -299,6 +353,34 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ed/4d/1392562369b1139e741b30d624f09fe7091d17dd5579fae5732f044b12bb/blobfile-3.0.0-py3-none-any.whl", hash = "sha256:48ecc3307e622804bd8fe13bf6f40e6463c4439eba7a1f9ad49fd78aa63cc658", size = 75413, upload-time = "2024-08-27T00:02:51.518Z" }, ] +[[package]] +name = "boto3" +version = "1.37.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, + { name = "jmespath" }, + { name = "s3transfer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7e/3f/135ec0771e6d0e1af2ad7023a15df6677d96112072838d948c9b5075efe1/boto3-1.37.3.tar.gz", hash = "sha256:21f3ce0ef111297e63a6eb998a25197b8c10982970c320d4c6e8db08be2157be", size = 111160, upload-time = "2025-02-27T20:28:15.588Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/8c/213511a505af2239a673de4de145d013379275c569185187922f93dbdf14/boto3-1.37.3-py3-none-any.whl", hash = "sha256:2063b40af99fd02f6228ff52397b552ff3353831edaf8d25cc04801827ab9794", size = 139344, upload-time = "2025-02-27T20:28:13.085Z" }, +] + +[[package]] +name = "botocore" +version = "1.37.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jmespath" }, + { name = "python-dateutil" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/fb/b243ab806d2e1e6b8a475b731cc59a1f1e4709eded4884b988a27bbc996b/botocore-1.37.3.tar.gz", hash = "sha256:fe8403eb55a88faf9b0f9da6615e5bee7be056d75e17af66c3c8f0a3b0648da4", size = 13574648, upload-time = "2025-02-27T20:27:59.559Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/54/772118f15b5990173aa5264946cc8c9ff70c8f02d72ee6d63167a985188c/botocore-1.37.3-py3-none-any.whl", hash = "sha256:d01bd3bf4c80e61fa88d636ad9f5c9f60a551d71549b481386c6b4efe0bb2b2e", size = 13342066, upload-time = "2025-02-27T20:27:53.137Z" }, +] + [[package]] name = "braintrust-core" version = "0.0.58" @@ -1255,6 +1337,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/91/61/c80ef80ed8a0a21158e289ef70dac01e351d929a1c30cb0f49be60772547/jiter-0.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:3ac9f578c46f22405ff7f8b1f5848fb753cc4b8377fbec8470a7dc3997ca7566", size = 202374, upload-time = "2024-12-09T18:10:26.958Z" }, ] +[[package]] +name = "jmespath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" }, +] + [[package]] name = "jsonschema" version = "4.23.0" @@ -1502,6 +1593,7 @@ ui = [ { name = "streamlit-option-menu" }, ] unit = [ + { name = "aioboto3" }, { name = "aiohttp" }, { name = "aiosqlite" }, { name = "chardet" }, @@ -1514,6 +1606,7 @@ unit = [ [package.metadata] requires-dist = [ + { name = "aioboto3", marker = "extra == 'unit'" }, { name = "aiohttp", marker = "extra == 'test'" }, { name = "aiohttp", marker = "extra == 'unit'" }, { name = "aiosqlite", marker = "extra == 'test'" }, @@ -3419,6 +3512,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e8/a8/d71f44b93e3aa86ae232af1f2126ca7b95c0f515ec135462b3e1f351441c/ruff-0.9.6-py3-none-win_arm64.whl", hash = "sha256:0e2bb706a2be7ddfea4a4af918562fdc1bcb16df255e5fa595bbd800ce322a5a", size = 10177499, upload-time = "2025-02-10T12:59:42.989Z" }, ] +[[package]] +name = "s3transfer" +version = "0.11.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/39/24/1390172471d569e281fcfd29b92f2f73774e95972c965d14b6c802ff2352/s3transfer-0.11.3.tar.gz", hash = "sha256:edae4977e3a122445660c7c114bba949f9d191bae3b34a096f18a1c8c354527a", size = 148042, upload-time = "2025-02-26T20:44:57.459Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e4/81/48c41b554a54d75d4407740abb60e3a102ae416284df04d1dbdcbe3dbf24/s3transfer-0.11.3-py3-none-any.whl", hash = "sha256:ca855bdeb885174b5ffa95b9913622459d4ad8e331fc98eb01e6d5eb6a30655d", size = 84246, upload-time = "2025-02-26T20:44:55.509Z" }, +] + [[package]] name = "safetensors" version = "0.5.3"