mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-02 16:54:42 +00:00
Merge 1afd33c21e
into a1fbfb51e2
This commit is contained in:
commit
97266dbca2
20 changed files with 1064 additions and 132 deletions
16
.github/workflows/integration-tests.yml
vendored
16
.github/workflows/integration-tests.yml
vendored
|
@ -24,7 +24,7 @@ jobs:
|
||||||
matrix:
|
matrix:
|
||||||
# Listing tests manually since some of them currently fail
|
# Listing tests manually since some of them currently fail
|
||||||
# TODO: generate matrix list from tests/integration when fixed
|
# TODO: generate matrix list from tests/integration when fixed
|
||||||
test-type: [agents, inference, datasets, inspect, scoring, post_training, providers]
|
test-type: [agents, inference, datasets, inspect, scoring, post_training, providers, files]
|
||||||
client-type: [library, http]
|
client-type: [library, http]
|
||||||
fail-fast: false # we want to run all tests regardless of failure
|
fail-fast: false # we want to run all tests regardless of failure
|
||||||
|
|
||||||
|
@ -52,6 +52,20 @@ jobs:
|
||||||
uv pip install -e .
|
uv pip install -e .
|
||||||
llama stack build --template ollama --image-type venv
|
llama stack build --template ollama --image-type venv
|
||||||
|
|
||||||
|
- name: Setup minio when testing files
|
||||||
|
if: matrix.test-type == 'files'
|
||||||
|
run: |
|
||||||
|
mkdir -p ~/minio/data
|
||||||
|
docker run \
|
||||||
|
-d \
|
||||||
|
-p 9000:9000 \
|
||||||
|
-p 9001:9001 \
|
||||||
|
--name minio \
|
||||||
|
-v ~/minio/data:/data \
|
||||||
|
-e "MINIO_ROOT_USER=ROOTNAME" \
|
||||||
|
-e "MINIO_ROOT_PASSWORD=CHANGEME123" \
|
||||||
|
quay.io/minio/minio server /data --console-address ":9001"
|
||||||
|
|
||||||
- name: Start Llama Stack server in background
|
- name: Start Llama Stack server in background
|
||||||
if: matrix.client-type == 'http'
|
if: matrix.client-type == 'http'
|
||||||
env:
|
env:
|
||||||
|
|
93
docs/_static/llama-stack-spec.html
vendored
93
docs/_static/llama-stack-spec.html
vendored
|
@ -568,11 +568,11 @@
|
||||||
"get": {
|
"get": {
|
||||||
"responses": {
|
"responses": {
|
||||||
"200": {
|
"200": {
|
||||||
"description": "OK",
|
"description": "PaginatedResponse with the list of buckets",
|
||||||
"content": {
|
"content": {
|
||||||
"application/json": {
|
"application/json": {
|
||||||
"schema": {
|
"schema": {
|
||||||
"$ref": "#/components/schemas/ListBucketResponse"
|
"$ref": "#/components/schemas/PaginatedResponse"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -596,11 +596,21 @@
|
||||||
"description": "List all buckets.",
|
"description": "List all buckets.",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{
|
{
|
||||||
"name": "bucket",
|
"name": "page",
|
||||||
"in": "query",
|
"in": "query",
|
||||||
"required": true,
|
"description": "The page number (1-based). If None, starts from first page.",
|
||||||
|
"required": false,
|
||||||
"schema": {
|
"schema": {
|
||||||
"type": "string"
|
"type": "integer"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "size",
|
||||||
|
"in": "query",
|
||||||
|
"description": "Number of items per page. If None or -1, returns all items.",
|
||||||
|
"required": false,
|
||||||
|
"schema": {
|
||||||
|
"type": "integer"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -1850,7 +1860,7 @@
|
||||||
"parameters": []
|
"parameters": []
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"/v1/files/session:{upload_id}": {
|
"/v1/files/session/{upload_id}": {
|
||||||
"get": {
|
"get": {
|
||||||
"responses": {
|
"responses": {
|
||||||
"200": {
|
"200": {
|
||||||
|
@ -2631,11 +2641,11 @@
|
||||||
"get": {
|
"get": {
|
||||||
"responses": {
|
"responses": {
|
||||||
"200": {
|
"200": {
|
||||||
"description": "OK",
|
"description": "PaginatedResponse with the list of files",
|
||||||
"content": {
|
"content": {
|
||||||
"application/json": {
|
"application/json": {
|
||||||
"schema": {
|
"schema": {
|
||||||
"$ref": "#/components/schemas/ListFileResponse"
|
"$ref": "#/components/schemas/PaginatedResponse"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2666,6 +2676,24 @@
|
||||||
"schema": {
|
"schema": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "page",
|
||||||
|
"in": "query",
|
||||||
|
"description": "The page number (1-based). If None, starts from first page.",
|
||||||
|
"required": false,
|
||||||
|
"schema": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "size",
|
||||||
|
"in": "query",
|
||||||
|
"description": "Number of items per page. If None or -1, returns all items.",
|
||||||
|
"required": false,
|
||||||
|
"schema": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -9255,37 +9283,6 @@
|
||||||
],
|
],
|
||||||
"title": "Job"
|
"title": "Job"
|
||||||
},
|
},
|
||||||
"BucketResponse": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"name": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"additionalProperties": false,
|
|
||||||
"required": [
|
|
||||||
"name"
|
|
||||||
],
|
|
||||||
"title": "BucketResponse"
|
|
||||||
},
|
|
||||||
"ListBucketResponse": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"data": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/components/schemas/BucketResponse"
|
|
||||||
},
|
|
||||||
"description": "List of FileResponse entries"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"additionalProperties": false,
|
|
||||||
"required": [
|
|
||||||
"data"
|
|
||||||
],
|
|
||||||
"title": "ListBucketResponse",
|
|
||||||
"description": "Response representing a list of file entries."
|
|
||||||
},
|
|
||||||
"ListBenchmarksResponse": {
|
"ListBenchmarksResponse": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
@ -9318,24 +9315,6 @@
|
||||||
],
|
],
|
||||||
"title": "ListDatasetsResponse"
|
"title": "ListDatasetsResponse"
|
||||||
},
|
},
|
||||||
"ListFileResponse": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"data": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/components/schemas/FileResponse"
|
|
||||||
},
|
|
||||||
"description": "List of FileResponse entries"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"additionalProperties": false,
|
|
||||||
"required": [
|
|
||||||
"data"
|
|
||||||
],
|
|
||||||
"title": "ListFileResponse",
|
|
||||||
"description": "Response representing a list of file entries."
|
|
||||||
},
|
|
||||||
"ListModelsResponse": {
|
"ListModelsResponse": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
|
77
docs/_static/llama-stack-spec.yaml
vendored
77
docs/_static/llama-stack-spec.yaml
vendored
|
@ -379,11 +379,12 @@ paths:
|
||||||
get:
|
get:
|
||||||
responses:
|
responses:
|
||||||
'200':
|
'200':
|
||||||
description: OK
|
description: >-
|
||||||
|
PaginatedResponse with the list of buckets
|
||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: '#/components/schemas/ListBucketResponse'
|
$ref: '#/components/schemas/PaginatedResponse'
|
||||||
'400':
|
'400':
|
||||||
$ref: '#/components/responses/BadRequest400'
|
$ref: '#/components/responses/BadRequest400'
|
||||||
'429':
|
'429':
|
||||||
|
@ -398,11 +399,20 @@ paths:
|
||||||
- Files
|
- Files
|
||||||
description: List all buckets.
|
description: List all buckets.
|
||||||
parameters:
|
parameters:
|
||||||
- name: bucket
|
- name: page
|
||||||
in: query
|
in: query
|
||||||
required: true
|
description: >-
|
||||||
|
The page number (1-based). If None, starts from first page.
|
||||||
|
required: false
|
||||||
schema:
|
schema:
|
||||||
type: string
|
type: integer
|
||||||
|
- name: size
|
||||||
|
in: query
|
||||||
|
description: >-
|
||||||
|
Number of items per page. If None or -1, returns all items.
|
||||||
|
required: false
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
post:
|
post:
|
||||||
responses:
|
responses:
|
||||||
'200':
|
'200':
|
||||||
|
@ -1261,7 +1271,7 @@ paths:
|
||||||
- PostTraining (Coming Soon)
|
- PostTraining (Coming Soon)
|
||||||
description: ''
|
description: ''
|
||||||
parameters: []
|
parameters: []
|
||||||
/v1/files/session:{upload_id}:
|
/v1/files/session/{upload_id}:
|
||||||
get:
|
get:
|
||||||
responses:
|
responses:
|
||||||
'200':
|
'200':
|
||||||
|
@ -1816,11 +1826,11 @@ paths:
|
||||||
get:
|
get:
|
||||||
responses:
|
responses:
|
||||||
'200':
|
'200':
|
||||||
description: OK
|
description: PaginatedResponse with the list of files
|
||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: '#/components/schemas/ListFileResponse'
|
$ref: '#/components/schemas/PaginatedResponse'
|
||||||
'400':
|
'400':
|
||||||
$ref: '#/components/responses/BadRequest400'
|
$ref: '#/components/responses/BadRequest400'
|
||||||
'429':
|
'429':
|
||||||
|
@ -1841,6 +1851,20 @@ paths:
|
||||||
required: true
|
required: true
|
||||||
schema:
|
schema:
|
||||||
type: string
|
type: string
|
||||||
|
- name: page
|
||||||
|
in: query
|
||||||
|
description: >-
|
||||||
|
The page number (1-based). If None, starts from first page.
|
||||||
|
required: false
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
- name: size
|
||||||
|
in: query
|
||||||
|
description: >-
|
||||||
|
Number of items per page. If None or -1, returns all items.
|
||||||
|
required: false
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
/v1/models:
|
/v1/models:
|
||||||
get:
|
get:
|
||||||
responses:
|
responses:
|
||||||
|
@ -6387,29 +6411,6 @@ components:
|
||||||
- job_id
|
- job_id
|
||||||
- status
|
- status
|
||||||
title: Job
|
title: Job
|
||||||
BucketResponse:
|
|
||||||
type: object
|
|
||||||
properties:
|
|
||||||
name:
|
|
||||||
type: string
|
|
||||||
additionalProperties: false
|
|
||||||
required:
|
|
||||||
- name
|
|
||||||
title: BucketResponse
|
|
||||||
ListBucketResponse:
|
|
||||||
type: object
|
|
||||||
properties:
|
|
||||||
data:
|
|
||||||
type: array
|
|
||||||
items:
|
|
||||||
$ref: '#/components/schemas/BucketResponse'
|
|
||||||
description: List of FileResponse entries
|
|
||||||
additionalProperties: false
|
|
||||||
required:
|
|
||||||
- data
|
|
||||||
title: ListBucketResponse
|
|
||||||
description: >-
|
|
||||||
Response representing a list of file entries.
|
|
||||||
ListBenchmarksResponse:
|
ListBenchmarksResponse:
|
||||||
type: object
|
type: object
|
||||||
properties:
|
properties:
|
||||||
|
@ -6432,20 +6433,6 @@ components:
|
||||||
required:
|
required:
|
||||||
- data
|
- data
|
||||||
title: ListDatasetsResponse
|
title: ListDatasetsResponse
|
||||||
ListFileResponse:
|
|
||||||
type: object
|
|
||||||
properties:
|
|
||||||
data:
|
|
||||||
type: array
|
|
||||||
items:
|
|
||||||
$ref: '#/components/schemas/FileResponse'
|
|
||||||
description: List of FileResponse entries
|
|
||||||
additionalProperties: false
|
|
||||||
required:
|
|
||||||
- data
|
|
||||||
title: ListFileResponse
|
|
||||||
description: >-
|
|
||||||
Response representing a list of file entries.
|
|
||||||
ListModelsResponse:
|
ListModelsResponse:
|
||||||
type: object
|
type: object
|
||||||
properties:
|
properties:
|
||||||
|
|
|
@ -18,6 +18,7 @@ The `llamastack/distribution-ollama` distribution consists of the following prov
|
||||||
| agents | `inline::meta-reference` |
|
| agents | `inline::meta-reference` |
|
||||||
| datasetio | `remote::huggingface`, `inline::localfs` |
|
| datasetio | `remote::huggingface`, `inline::localfs` |
|
||||||
| eval | `inline::meta-reference` |
|
| eval | `inline::meta-reference` |
|
||||||
|
| files | `remote::s3` |
|
||||||
| inference | `remote::ollama` |
|
| inference | `remote::ollama` |
|
||||||
| safety | `inline::llama-guard` |
|
| safety | `inline::llama-guard` |
|
||||||
| scoring | `inline::basic`, `inline::llm-as-judge`, `inline::braintrust` |
|
| scoring | `inline::basic`, `inline::llm-as-judge`, `inline::braintrust` |
|
||||||
|
@ -36,6 +37,12 @@ The following environment variables can be configured:
|
||||||
- `OLLAMA_URL`: URL of the Ollama server (default: `http://127.0.0.1:11434`)
|
- `OLLAMA_URL`: URL of the Ollama server (default: `http://127.0.0.1:11434`)
|
||||||
- `INFERENCE_MODEL`: Inference model loaded into the Ollama server (default: `meta-llama/Llama-3.2-3B-Instruct`)
|
- `INFERENCE_MODEL`: Inference model loaded into the Ollama server (default: `meta-llama/Llama-3.2-3B-Instruct`)
|
||||||
- `SAFETY_MODEL`: Safety model loaded into the Ollama server (default: `meta-llama/Llama-Guard-3-1B`)
|
- `SAFETY_MODEL`: Safety model loaded into the Ollama server (default: `meta-llama/Llama-Guard-3-1B`)
|
||||||
|
- `AWS_ACCESS_KEY_ID`: AWS access key ID for S3 access (default: ``)
|
||||||
|
- `AWS_SECRET_ACCESS_KEY`: AWS secret access key for S3 access (default: ``)
|
||||||
|
- `AWS_REGION_NAME`: AWS region name for S3 access (default: ``)
|
||||||
|
- `AWS_ENDPOINT_URL`: AWS endpoint URL for S3 access (for custom endpoints) (default: ``)
|
||||||
|
- `AWS_BUCKET_NAME`: AWS bucket name for S3 access (default: ``)
|
||||||
|
- `AWS_VERIFY_TLS`: Whether to verify TLS for S3 connections (default: `true`)
|
||||||
|
|
||||||
|
|
||||||
## Setting up Ollama server
|
## Setting up Ollama server
|
||||||
|
|
|
@ -8,6 +8,7 @@ from typing import Protocol, runtime_checkable
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from llama_stack.apis.common.responses import PaginatedResponse
|
||||||
from llama_stack.providers.utils.telemetry.trace_protocol import trace_protocol
|
from llama_stack.providers.utils.telemetry.trace_protocol import trace_protocol
|
||||||
from llama_stack.schema_utils import json_schema_type, webmethod
|
from llama_stack.schema_utils import json_schema_type, webmethod
|
||||||
|
|
||||||
|
@ -34,17 +35,6 @@ class BucketResponse(BaseModel):
|
||||||
name: str
|
name: str
|
||||||
|
|
||||||
|
|
||||||
@json_schema_type
|
|
||||||
class ListBucketResponse(BaseModel):
|
|
||||||
"""
|
|
||||||
Response representing a list of file entries.
|
|
||||||
|
|
||||||
:param data: List of FileResponse entries
|
|
||||||
"""
|
|
||||||
|
|
||||||
data: list[BucketResponse]
|
|
||||||
|
|
||||||
|
|
||||||
@json_schema_type
|
@json_schema_type
|
||||||
class FileResponse(BaseModel):
|
class FileResponse(BaseModel):
|
||||||
"""
|
"""
|
||||||
|
@ -66,17 +56,6 @@ class FileResponse(BaseModel):
|
||||||
created_at: int
|
created_at: int
|
||||||
|
|
||||||
|
|
||||||
@json_schema_type
|
|
||||||
class ListFileResponse(BaseModel):
|
|
||||||
"""
|
|
||||||
Response representing a list of file entries.
|
|
||||||
|
|
||||||
:param data: List of FileResponse entries
|
|
||||||
"""
|
|
||||||
|
|
||||||
data: list[FileResponse]
|
|
||||||
|
|
||||||
|
|
||||||
@runtime_checkable
|
@runtime_checkable
|
||||||
@trace_protocol
|
@trace_protocol
|
||||||
class Files(Protocol):
|
class Files(Protocol):
|
||||||
|
@ -98,7 +77,7 @@ class Files(Protocol):
|
||||||
"""
|
"""
|
||||||
...
|
...
|
||||||
|
|
||||||
@webmethod(route="/files/session:{upload_id}", method="POST", raw_bytes_request_body=True)
|
@webmethod(route="/files/session/{upload_id}", method="POST", raw_bytes_request_body=True)
|
||||||
async def upload_content_to_session(
|
async def upload_content_to_session(
|
||||||
self,
|
self,
|
||||||
upload_id: str,
|
upload_id: str,
|
||||||
|
@ -111,7 +90,7 @@ class Files(Protocol):
|
||||||
"""
|
"""
|
||||||
...
|
...
|
||||||
|
|
||||||
@webmethod(route="/files/session:{upload_id}", method="GET")
|
@webmethod(route="/files/session/{upload_id}", method="GET")
|
||||||
async def get_upload_session_info(
|
async def get_upload_session_info(
|
||||||
self,
|
self,
|
||||||
upload_id: str,
|
upload_id: str,
|
||||||
|
@ -126,10 +105,15 @@ class Files(Protocol):
|
||||||
@webmethod(route="/files", method="GET")
|
@webmethod(route="/files", method="GET")
|
||||||
async def list_all_buckets(
|
async def list_all_buckets(
|
||||||
self,
|
self,
|
||||||
bucket: str,
|
page: int | None = None,
|
||||||
) -> ListBucketResponse:
|
size: int | None = None,
|
||||||
|
) -> PaginatedResponse:
|
||||||
"""
|
"""
|
||||||
List all buckets.
|
List all buckets.
|
||||||
|
|
||||||
|
:param page: The page number (1-based). If None, starts from first page.
|
||||||
|
:param size: Number of items per page. If None or -1, returns all items.
|
||||||
|
:return: PaginatedResponse with the list of buckets
|
||||||
"""
|
"""
|
||||||
...
|
...
|
||||||
|
|
||||||
|
@ -137,11 +121,16 @@ class Files(Protocol):
|
||||||
async def list_files_in_bucket(
|
async def list_files_in_bucket(
|
||||||
self,
|
self,
|
||||||
bucket: str,
|
bucket: str,
|
||||||
) -> ListFileResponse:
|
page: int | None = None,
|
||||||
|
size: int | None = None,
|
||||||
|
) -> PaginatedResponse:
|
||||||
"""
|
"""
|
||||||
List all files in a bucket.
|
List all files in a bucket.
|
||||||
|
|
||||||
:param bucket: Bucket name (valid chars: a-zA-Z0-9_-)
|
:param bucket: Bucket name (valid chars: a-zA-Z0-9_-)
|
||||||
|
:param page: The page number (1-based). If None, starts from first page.
|
||||||
|
:param size: Number of items per page. If None or -1, returns all items.
|
||||||
|
:return: PaginatedResponse with the list of files
|
||||||
"""
|
"""
|
||||||
...
|
...
|
||||||
|
|
||||||
|
|
|
@ -4,8 +4,26 @@
|
||||||
# This source code is licensed under the terms described in the LICENSE file in
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
# the root directory of this source tree.
|
# the root directory of this source tree.
|
||||||
|
|
||||||
from llama_stack.providers.datatypes import ProviderSpec
|
|
||||||
|
from llama_stack.providers.datatypes import (
|
||||||
|
AdapterSpec,
|
||||||
|
Api,
|
||||||
|
ProviderSpec,
|
||||||
|
remote_provider_spec,
|
||||||
|
)
|
||||||
|
from llama_stack.providers.utils.kvstore import kvstore_dependencies
|
||||||
|
|
||||||
|
|
||||||
def available_providers() -> list[ProviderSpec]:
|
def available_providers() -> list[ProviderSpec]:
|
||||||
return []
|
return [
|
||||||
|
remote_provider_spec(
|
||||||
|
api=Api.files,
|
||||||
|
adapter=AdapterSpec(
|
||||||
|
adapter_type="s3",
|
||||||
|
pip_packages=["aioboto3"] + kvstore_dependencies(),
|
||||||
|
module="llama_stack.providers.remote.files.object.s3",
|
||||||
|
config_class="llama_stack.providers.remote.files.object.s3.config.S3FilesImplConfig",
|
||||||
|
provider_data_validator="llama_stack.providers.remote.files.object.s3.S3ProviderDataValidator",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
18
llama_stack/providers/remote/files/object/s3/__init__.py
Normal file
18
llama_stack/providers/remote/files/object/s3/__init__.py
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
|
# the root directory of this source tree.
|
||||||
|
|
||||||
|
from .config import S3FilesImplConfig
|
||||||
|
|
||||||
|
|
||||||
|
async def get_adapter_impl(config: S3FilesImplConfig, _deps):
|
||||||
|
from .s3_files import S3FilesAdapter
|
||||||
|
|
||||||
|
impl = S3FilesAdapter(
|
||||||
|
config,
|
||||||
|
_deps,
|
||||||
|
)
|
||||||
|
await impl.initialize()
|
||||||
|
return impl
|
37
llama_stack/providers/remote/files/object/s3/config.py
Normal file
37
llama_stack/providers/remote/files/object/s3/config.py
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
|
# the root directory of this source tree.
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from llama_stack.providers.utils.kvstore import KVStoreConfig
|
||||||
|
from llama_stack.providers.utils.kvstore.config import SqliteKVStoreConfig
|
||||||
|
|
||||||
|
|
||||||
|
class S3FilesImplConfig(BaseModel):
|
||||||
|
"""Configuration for S3 file storage provider."""
|
||||||
|
|
||||||
|
aws_access_key_id: str = Field(description="AWS access key ID")
|
||||||
|
aws_secret_access_key: str = Field(description="AWS secret access key")
|
||||||
|
region_name: str | None = Field(default=None, description="AWS region name")
|
||||||
|
endpoint_url: str | None = Field(default=None, description="Optional endpoint URL for S3 compatible services")
|
||||||
|
bucket_name: str | None = Field(default=None, description="Default S3 bucket name")
|
||||||
|
verify_tls: bool = Field(default=True, description="Verify TLS certificates")
|
||||||
|
persistent_store: KVStoreConfig
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def sample_run_config(cls, __distro_dir__: str) -> dict:
|
||||||
|
return {
|
||||||
|
"aws_access_key_id": "your-access-key-id",
|
||||||
|
"aws_secret_access_key": "your-secret-access-key",
|
||||||
|
"region_name": "us-west-2",
|
||||||
|
"endpoint_url": None,
|
||||||
|
"bucket_name": "your-bucket-name",
|
||||||
|
"verify_tls": True,
|
||||||
|
"persistence_store": SqliteKVStoreConfig.sample_run_config(
|
||||||
|
__distro_dir__=__distro_dir__,
|
||||||
|
db_name="files_s3_store.db",
|
||||||
|
),
|
||||||
|
}
|
78
llama_stack/providers/remote/files/object/s3/persistence.py
Normal file
78
llama_stack/providers/remote/files/object/s3/persistence.py
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
|
# the root directory of this source tree.
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from llama_stack.apis.files.files import FileUploadResponse
|
||||||
|
from llama_stack.providers.utils.kvstore import KVStore
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class UploadSessionInfo(BaseModel):
|
||||||
|
"""Information about an upload session."""
|
||||||
|
|
||||||
|
upload_id: str
|
||||||
|
bucket: str
|
||||||
|
key: str # Original key for file reading
|
||||||
|
s3_key: str # S3 key for S3 operations
|
||||||
|
mime_type: str
|
||||||
|
size: int
|
||||||
|
url: str
|
||||||
|
created_at: datetime
|
||||||
|
|
||||||
|
|
||||||
|
class S3FilesPersistence:
|
||||||
|
def __init__(self, kvstore: KVStore):
|
||||||
|
self._kvstore = kvstore
|
||||||
|
self._store: KVStore | None = None
|
||||||
|
|
||||||
|
async def _get_store(self) -> KVStore:
|
||||||
|
"""Get the kvstore instance, initializing it if needed."""
|
||||||
|
if self._store is None:
|
||||||
|
self._store = self._kvstore
|
||||||
|
return self._store
|
||||||
|
|
||||||
|
async def store_upload_session(
|
||||||
|
self, session_info: FileUploadResponse, bucket: str, key: str, mime_type: str, size: int
|
||||||
|
):
|
||||||
|
"""Store upload session information."""
|
||||||
|
upload_info = UploadSessionInfo(
|
||||||
|
upload_id=session_info.id,
|
||||||
|
bucket=bucket,
|
||||||
|
key=key,
|
||||||
|
s3_key=key,
|
||||||
|
mime_type=mime_type,
|
||||||
|
size=size,
|
||||||
|
url=session_info.url,
|
||||||
|
created_at=datetime.now(timezone.utc),
|
||||||
|
)
|
||||||
|
|
||||||
|
store = await self._get_store()
|
||||||
|
await store.set(
|
||||||
|
key=f"upload_session:{session_info.id}",
|
||||||
|
value=upload_info.model_dump_json(),
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_upload_session(self, upload_id: str) -> UploadSessionInfo | None:
|
||||||
|
"""Get upload session information."""
|
||||||
|
store = await self._get_store()
|
||||||
|
value = await store.get(
|
||||||
|
key=f"upload_session:{upload_id}",
|
||||||
|
)
|
||||||
|
if not value:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return UploadSessionInfo(**json.loads(value))
|
||||||
|
|
||||||
|
async def delete_upload_session(self, upload_id: str) -> None:
|
||||||
|
"""Delete upload session information."""
|
||||||
|
store = await self._get_store()
|
||||||
|
await store.delete(key=f"upload_session:{upload_id}")
|
324
llama_stack/providers/remote/files/object/s3/s3_files.py
Normal file
324
llama_stack/providers/remote/files/object/s3/s3_files.py
Normal file
|
@ -0,0 +1,324 @@
|
||||||
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
|
# the root directory of this source tree.
|
||||||
|
|
||||||
|
import aioboto3
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
|
|
||||||
|
from llama_stack.apis.common.responses import PaginatedResponse
|
||||||
|
from llama_stack.apis.files.files import (
|
||||||
|
BucketResponse,
|
||||||
|
FileResponse,
|
||||||
|
Files,
|
||||||
|
FileUploadResponse,
|
||||||
|
)
|
||||||
|
from llama_stack.log import get_logger
|
||||||
|
from llama_stack.providers.utils.kvstore import KVStore
|
||||||
|
from llama_stack.providers.utils.pagination import paginate_records
|
||||||
|
|
||||||
|
from .config import S3FilesImplConfig
|
||||||
|
from .persistence import S3FilesPersistence
|
||||||
|
|
||||||
|
logger = get_logger(name=__name__, category="files")
|
||||||
|
|
||||||
|
|
||||||
|
class S3FilesAdapter(Files):
|
||||||
|
def __init__(self, config: S3FilesImplConfig, kvstore: KVStore):
|
||||||
|
self.config = config
|
||||||
|
self.session = aioboto3.Session()
|
||||||
|
self.persistence = S3FilesPersistence(kvstore)
|
||||||
|
|
||||||
|
async def initialize(self):
|
||||||
|
# TODO: health check?
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def create_upload_session(
|
||||||
|
self,
|
||||||
|
bucket: str,
|
||||||
|
key: str,
|
||||||
|
mime_type: str,
|
||||||
|
size: int,
|
||||||
|
) -> FileUploadResponse:
|
||||||
|
"""Create a presigned URL for uploading a file to S3."""
|
||||||
|
try:
|
||||||
|
logger.debug(
|
||||||
|
"create_upload_session",
|
||||||
|
{"original_key": key, "s3_key": key, "bucket": bucket, "mime_type": mime_type, "size": size},
|
||||||
|
)
|
||||||
|
|
||||||
|
async with self.session.client(
|
||||||
|
"s3",
|
||||||
|
aws_access_key_id=self.config.aws_access_key_id,
|
||||||
|
aws_secret_access_key=self.config.aws_secret_access_key,
|
||||||
|
region_name=self.config.region_name,
|
||||||
|
endpoint_url=self.config.endpoint_url,
|
||||||
|
) as s3:
|
||||||
|
url = await s3.generate_presigned_url(
|
||||||
|
"put_object",
|
||||||
|
Params={
|
||||||
|
"Bucket": bucket,
|
||||||
|
"Key": key,
|
||||||
|
"ContentType": mime_type,
|
||||||
|
},
|
||||||
|
ExpiresIn=3600, # URL expires in 1 hour - should it be longer?
|
||||||
|
)
|
||||||
|
logger.debug("Generated presigned URL", {"url": url})
|
||||||
|
|
||||||
|
response = FileUploadResponse(
|
||||||
|
id=f"{bucket}/{key}",
|
||||||
|
url=url,
|
||||||
|
offset=0,
|
||||||
|
size=size,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Store the session info
|
||||||
|
await self.persistence.store_upload_session(
|
||||||
|
session_info=response,
|
||||||
|
bucket=bucket,
|
||||||
|
key=key, # Store the original key for file reading
|
||||||
|
mime_type=mime_type,
|
||||||
|
size=size,
|
||||||
|
)
|
||||||
|
|
||||||
|
return response
|
||||||
|
except ClientError as e:
|
||||||
|
logger.error("S3 ClientError in create_upload_session", {"error": str(e)})
|
||||||
|
raise Exception(f"Failed to create upload session: {str(e)}") from e
|
||||||
|
|
||||||
|
async def upload_content_to_session(
|
||||||
|
self,
|
||||||
|
upload_id: str,
|
||||||
|
) -> FileResponse | None:
|
||||||
|
"""Upload content to S3 using the upload session."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get the upload session info from persistence
|
||||||
|
session_info = await self.persistence.get_upload_session(upload_id)
|
||||||
|
if not session_info:
|
||||||
|
raise Exception(f"Upload session {upload_id} not found")
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"upload_content_to_session",
|
||||||
|
{
|
||||||
|
"upload_id": upload_id,
|
||||||
|
"bucket": session_info.bucket,
|
||||||
|
"key": session_info.key,
|
||||||
|
"mime_type": session_info.mime_type,
|
||||||
|
"size": session_info.size,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Read the file content
|
||||||
|
with open(session_info.key, "rb") as f:
|
||||||
|
content = f.read()
|
||||||
|
logger.debug("Read content", {"length": len(content)})
|
||||||
|
|
||||||
|
# Use a single S3 client for all operations
|
||||||
|
async with self.session.client(
|
||||||
|
"s3",
|
||||||
|
aws_access_key_id=self.config.aws_access_key_id,
|
||||||
|
aws_secret_access_key=self.config.aws_secret_access_key,
|
||||||
|
region_name=self.config.region_name,
|
||||||
|
endpoint_url=self.config.endpoint_url,
|
||||||
|
) as s3:
|
||||||
|
# Upload the content
|
||||||
|
await s3.put_object(
|
||||||
|
Bucket=session_info.bucket, Key=session_info.key, Body=content, ContentType=session_info.mime_type
|
||||||
|
)
|
||||||
|
logger.debug("Upload successful")
|
||||||
|
|
||||||
|
# Get the file info after upload
|
||||||
|
response = await s3.head_object(Bucket=session_info.bucket, Key=session_info.key)
|
||||||
|
logger.debug(
|
||||||
|
"File info retrieved",
|
||||||
|
{
|
||||||
|
"ContentType": response.get("ContentType"),
|
||||||
|
"ContentLength": response["ContentLength"],
|
||||||
|
"LastModified": response["LastModified"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate a presigned URL for reading
|
||||||
|
url = await s3.generate_presigned_url(
|
||||||
|
"get_object",
|
||||||
|
Params={
|
||||||
|
"Bucket": session_info.bucket,
|
||||||
|
"Key": session_info.key,
|
||||||
|
},
|
||||||
|
ExpiresIn=3600,
|
||||||
|
)
|
||||||
|
|
||||||
|
return FileResponse(
|
||||||
|
bucket=session_info.bucket,
|
||||||
|
key=session_info.key, # Use the original key to match test expectations
|
||||||
|
mime_type=response.get("ContentType", "application/octet-stream"),
|
||||||
|
url=url,
|
||||||
|
bytes=response["ContentLength"],
|
||||||
|
created_at=int(response["LastModified"].timestamp()),
|
||||||
|
)
|
||||||
|
except ClientError as e:
|
||||||
|
logger.error("S3 ClientError in upload_content_to_session", {"error": str(e)})
|
||||||
|
raise Exception(f"Failed to upload content: {str(e)}") from e
|
||||||
|
finally:
|
||||||
|
# Clean up the upload session
|
||||||
|
await self.persistence.delete_upload_session(upload_id)
|
||||||
|
|
||||||
|
async def get_upload_session_info(
|
||||||
|
self,
|
||||||
|
upload_id: str,
|
||||||
|
) -> FileUploadResponse:
|
||||||
|
"""Get information about an upload session."""
|
||||||
|
bucket, key = upload_id.split("/", 1)
|
||||||
|
try:
|
||||||
|
async with self.session.client(
|
||||||
|
"s3",
|
||||||
|
aws_access_key_id=self.config.aws_access_key_id,
|
||||||
|
aws_secret_access_key=self.config.aws_secret_access_key,
|
||||||
|
region_name=self.config.region_name,
|
||||||
|
endpoint_url=self.config.endpoint_url,
|
||||||
|
) as s3:
|
||||||
|
response = await s3.head_object(Bucket=bucket, Key=key)
|
||||||
|
url = await s3.generate_presigned_url(
|
||||||
|
"put_object",
|
||||||
|
Params={
|
||||||
|
"Bucket": bucket,
|
||||||
|
"Key": key,
|
||||||
|
"ContentType": response.get("ContentType", "application/octet-stream"),
|
||||||
|
},
|
||||||
|
ExpiresIn=3600,
|
||||||
|
)
|
||||||
|
return FileUploadResponse(
|
||||||
|
id=upload_id,
|
||||||
|
url=url,
|
||||||
|
offset=0,
|
||||||
|
size=response["ContentLength"],
|
||||||
|
)
|
||||||
|
except ClientError as e:
|
||||||
|
raise Exception(f"Failed to get upload session info: {str(e)}") from e
|
||||||
|
|
||||||
|
async def list_all_buckets(
|
||||||
|
self,
|
||||||
|
page: int | None = None,
|
||||||
|
size: int | None = None,
|
||||||
|
) -> PaginatedResponse:
|
||||||
|
"""List all available S3 buckets."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = await self.session.client(
|
||||||
|
"s3",
|
||||||
|
aws_access_key_id=self.config.aws_access_key_id,
|
||||||
|
aws_secret_access_key=self.config.aws_secret_access_key,
|
||||||
|
region_name=self.config.region_name,
|
||||||
|
endpoint_url=self.config.endpoint_url,
|
||||||
|
).list_buckets()
|
||||||
|
buckets = [BucketResponse(name=bucket["Name"]) for bucket in response["Buckets"]]
|
||||||
|
# Convert BucketResponse objects to dictionaries for pagination
|
||||||
|
bucket_dicts = [bucket.model_dump() for bucket in buckets]
|
||||||
|
return paginate_records(bucket_dicts, page, size)
|
||||||
|
except ClientError as e:
|
||||||
|
raise Exception(f"Failed to list buckets: {str(e)}") from e
|
||||||
|
|
||||||
|
async def list_files_in_bucket(
|
||||||
|
self,
|
||||||
|
bucket: str,
|
||||||
|
page: int | None = None,
|
||||||
|
size: int | None = None,
|
||||||
|
) -> PaginatedResponse:
|
||||||
|
"""List all files in an S3 bucket."""
|
||||||
|
try:
|
||||||
|
response = await self.session.client(
|
||||||
|
"s3",
|
||||||
|
aws_access_key_id=self.config.aws_access_key_id,
|
||||||
|
aws_secret_access_key=self.config.aws_secret_access_key,
|
||||||
|
region_name=self.config.region_name,
|
||||||
|
endpoint_url=self.config.endpoint_url,
|
||||||
|
).list_objects_v2(Bucket=bucket)
|
||||||
|
files: list[FileResponse] = []
|
||||||
|
|
||||||
|
for obj in response.get("Contents", []):
|
||||||
|
url = await self.session.client(
|
||||||
|
"s3",
|
||||||
|
aws_access_key_id=self.config.aws_access_key_id,
|
||||||
|
aws_secret_access_key=self.config.aws_secret_access_key,
|
||||||
|
region_name=self.config.region_name,
|
||||||
|
endpoint_url=self.config.endpoint_url,
|
||||||
|
).generate_presigned_url(
|
||||||
|
"get_object",
|
||||||
|
Params={
|
||||||
|
"Bucket": bucket,
|
||||||
|
"Key": obj["Key"],
|
||||||
|
},
|
||||||
|
ExpiresIn=3600,
|
||||||
|
)
|
||||||
|
|
||||||
|
files.append(
|
||||||
|
FileResponse(
|
||||||
|
bucket=bucket,
|
||||||
|
key=obj["Key"],
|
||||||
|
mime_type="application/octet-stream", # Default mime type
|
||||||
|
url=url,
|
||||||
|
bytes=obj["Size"],
|
||||||
|
created_at=int(obj["LastModified"].timestamp()),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Convert FileResponse objects to dictionaries for pagination
|
||||||
|
file_dicts = [file.model_dump() for file in files]
|
||||||
|
return paginate_records(file_dicts, page, size)
|
||||||
|
except ClientError as e:
|
||||||
|
raise Exception(f"Failed to list files in bucket: {str(e)}") from e
|
||||||
|
|
||||||
|
async def get_file(
|
||||||
|
self,
|
||||||
|
bucket: str,
|
||||||
|
key: str,
|
||||||
|
) -> FileResponse:
|
||||||
|
"""Get information about a specific file in S3."""
|
||||||
|
try:
|
||||||
|
async with self.session.client(
|
||||||
|
"s3",
|
||||||
|
aws_access_key_id=self.config.aws_access_key_id,
|
||||||
|
aws_secret_access_key=self.config.aws_secret_access_key,
|
||||||
|
region_name=self.config.region_name,
|
||||||
|
endpoint_url=self.config.endpoint_url,
|
||||||
|
) as s3:
|
||||||
|
response = await s3.head_object(Bucket=bucket, Key=key)
|
||||||
|
url = await s3.generate_presigned_url(
|
||||||
|
"get_object",
|
||||||
|
Params={
|
||||||
|
"Bucket": bucket,
|
||||||
|
"Key": key,
|
||||||
|
},
|
||||||
|
ExpiresIn=3600,
|
||||||
|
)
|
||||||
|
|
||||||
|
return FileResponse(
|
||||||
|
bucket=bucket,
|
||||||
|
key=key,
|
||||||
|
mime_type=response.get("ContentType", "application/octet-stream"),
|
||||||
|
url=url,
|
||||||
|
bytes=response["ContentLength"],
|
||||||
|
created_at=int(response["LastModified"].timestamp()),
|
||||||
|
)
|
||||||
|
except ClientError as e:
|
||||||
|
raise Exception(f"Failed to get file info: {str(e)}") from e
|
||||||
|
|
||||||
|
async def delete_file(
|
||||||
|
self,
|
||||||
|
bucket: str,
|
||||||
|
key: str,
|
||||||
|
) -> None:
|
||||||
|
"""Delete a file from S3."""
|
||||||
|
try:
|
||||||
|
async with self.session.client(
|
||||||
|
"s3",
|
||||||
|
aws_access_key_id=self.config.aws_access_key_id,
|
||||||
|
aws_secret_access_key=self.config.aws_secret_access_key,
|
||||||
|
region_name=self.config.region_name,
|
||||||
|
endpoint_url=self.config.endpoint_url,
|
||||||
|
) as s3:
|
||||||
|
await s3.delete_object(Bucket=bucket, Key=key)
|
||||||
|
except ClientError as e:
|
||||||
|
raise Exception(f"Failed to delete file: {str(e)}") from e
|
|
@ -459,6 +459,7 @@
|
||||||
"uvicorn"
|
"uvicorn"
|
||||||
],
|
],
|
||||||
"ollama": [
|
"ollama": [
|
||||||
|
"aioboto3",
|
||||||
"aiohttp",
|
"aiohttp",
|
||||||
"aiosqlite",
|
"aiosqlite",
|
||||||
"autoevals",
|
"autoevals",
|
||||||
|
|
|
@ -29,4 +29,6 @@ distribution_spec:
|
||||||
- inline::rag-runtime
|
- inline::rag-runtime
|
||||||
- remote::model-context-protocol
|
- remote::model-context-protocol
|
||||||
- remote::wolfram-alpha
|
- remote::wolfram-alpha
|
||||||
|
files:
|
||||||
|
- remote::s3
|
||||||
image_type: conda
|
image_type: conda
|
||||||
|
|
|
@ -35,6 +35,7 @@ def get_distribution_template() -> DistributionTemplate:
|
||||||
"remote::model-context-protocol",
|
"remote::model-context-protocol",
|
||||||
"remote::wolfram-alpha",
|
"remote::wolfram-alpha",
|
||||||
],
|
],
|
||||||
|
"files": ["remote::s3"],
|
||||||
}
|
}
|
||||||
name = "ollama"
|
name = "ollama"
|
||||||
inference_provider = Provider(
|
inference_provider = Provider(
|
||||||
|
@ -48,6 +49,20 @@ def get_distribution_template() -> DistributionTemplate:
|
||||||
config=FaissVectorIOConfig.sample_run_config(f"~/.llama/distributions/{name}"),
|
config=FaissVectorIOConfig.sample_run_config(f"~/.llama/distributions/{name}"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Add S3 provider configuration
|
||||||
|
s3_provider = Provider(
|
||||||
|
provider_id="s3",
|
||||||
|
provider_type="remote::s3",
|
||||||
|
config={
|
||||||
|
"aws_access_key_id": "${env.AWS_ACCESS_KEY_ID:}",
|
||||||
|
"aws_secret_access_key": "${env.AWS_SECRET_ACCESS_KEY:}",
|
||||||
|
"region_name": "${env.AWS_REGION_NAME:}",
|
||||||
|
"endpoint_url": "${env.AWS_ENDPOINT_URL:}",
|
||||||
|
"bucket_name": "${env.AWS_BUCKET_NAME:}",
|
||||||
|
"verify_tls": "${env.AWS_VERIFY_TLS:true}",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
inference_model = ModelInput(
|
inference_model = ModelInput(
|
||||||
model_id="${env.INFERENCE_MODEL}",
|
model_id="${env.INFERENCE_MODEL}",
|
||||||
provider_id="ollama",
|
provider_id="ollama",
|
||||||
|
@ -92,6 +107,7 @@ def get_distribution_template() -> DistributionTemplate:
|
||||||
provider_overrides={
|
provider_overrides={
|
||||||
"inference": [inference_provider],
|
"inference": [inference_provider],
|
||||||
"vector_io": [vector_io_provider_faiss],
|
"vector_io": [vector_io_provider_faiss],
|
||||||
|
"files": [s3_provider],
|
||||||
},
|
},
|
||||||
default_models=[inference_model, embedding_model],
|
default_models=[inference_model, embedding_model],
|
||||||
default_tool_groups=default_tool_groups,
|
default_tool_groups=default_tool_groups,
|
||||||
|
@ -100,6 +116,7 @@ def get_distribution_template() -> DistributionTemplate:
|
||||||
provider_overrides={
|
provider_overrides={
|
||||||
"inference": [inference_provider],
|
"inference": [inference_provider],
|
||||||
"vector_io": [vector_io_provider_faiss],
|
"vector_io": [vector_io_provider_faiss],
|
||||||
|
"files": [s3_provider],
|
||||||
"safety": [
|
"safety": [
|
||||||
Provider(
|
Provider(
|
||||||
provider_id="llama-guard",
|
provider_id="llama-guard",
|
||||||
|
@ -148,5 +165,30 @@ def get_distribution_template() -> DistributionTemplate:
|
||||||
"meta-llama/Llama-Guard-3-1B",
|
"meta-llama/Llama-Guard-3-1B",
|
||||||
"Safety model loaded into the Ollama server",
|
"Safety model loaded into the Ollama server",
|
||||||
),
|
),
|
||||||
|
# Add AWS S3 environment variables
|
||||||
|
"AWS_ACCESS_KEY_ID": (
|
||||||
|
"",
|
||||||
|
"AWS access key ID for S3 access",
|
||||||
|
),
|
||||||
|
"AWS_SECRET_ACCESS_KEY": (
|
||||||
|
"",
|
||||||
|
"AWS secret access key for S3 access",
|
||||||
|
),
|
||||||
|
"AWS_REGION_NAME": (
|
||||||
|
"",
|
||||||
|
"AWS region name for S3 access",
|
||||||
|
),
|
||||||
|
"AWS_ENDPOINT_URL": (
|
||||||
|
"",
|
||||||
|
"AWS endpoint URL for S3 access (for custom endpoints)",
|
||||||
|
),
|
||||||
|
"AWS_BUCKET_NAME": (
|
||||||
|
"",
|
||||||
|
"AWS bucket name for S3 access",
|
||||||
|
),
|
||||||
|
"AWS_VERIFY_TLS": (
|
||||||
|
"true",
|
||||||
|
"Whether to verify TLS for S3 connections",
|
||||||
|
),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
|
@ -4,6 +4,7 @@ apis:
|
||||||
- agents
|
- agents
|
||||||
- datasetio
|
- datasetio
|
||||||
- eval
|
- eval
|
||||||
|
- files
|
||||||
- inference
|
- inference
|
||||||
- safety
|
- safety
|
||||||
- scoring
|
- scoring
|
||||||
|
@ -101,6 +102,16 @@ providers:
|
||||||
provider_type: remote::wolfram-alpha
|
provider_type: remote::wolfram-alpha
|
||||||
config:
|
config:
|
||||||
api_key: ${env.WOLFRAM_ALPHA_API_KEY:}
|
api_key: ${env.WOLFRAM_ALPHA_API_KEY:}
|
||||||
|
files:
|
||||||
|
- provider_id: s3
|
||||||
|
provider_type: remote::s3
|
||||||
|
config:
|
||||||
|
aws_access_key_id: ${env.AWS_ACCESS_KEY_ID:}
|
||||||
|
aws_secret_access_key: ${env.AWS_SECRET_ACCESS_KEY:}
|
||||||
|
region_name: ${env.AWS_REGION_NAME:}
|
||||||
|
endpoint_url: ${env.AWS_ENDPOINT_URL:}
|
||||||
|
bucket_name: ${env.AWS_BUCKET_NAME:}
|
||||||
|
verify_tls: ${env.AWS_VERIFY_TLS:true}
|
||||||
metadata_store:
|
metadata_store:
|
||||||
type: sqlite
|
type: sqlite
|
||||||
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/ollama}/registry.db
|
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/ollama}/registry.db
|
||||||
|
|
|
@ -4,6 +4,7 @@ apis:
|
||||||
- agents
|
- agents
|
||||||
- datasetio
|
- datasetio
|
||||||
- eval
|
- eval
|
||||||
|
- files
|
||||||
- inference
|
- inference
|
||||||
- safety
|
- safety
|
||||||
- scoring
|
- scoring
|
||||||
|
@ -99,6 +100,16 @@ providers:
|
||||||
provider_type: remote::wolfram-alpha
|
provider_type: remote::wolfram-alpha
|
||||||
config:
|
config:
|
||||||
api_key: ${env.WOLFRAM_ALPHA_API_KEY:}
|
api_key: ${env.WOLFRAM_ALPHA_API_KEY:}
|
||||||
|
files:
|
||||||
|
- provider_id: s3
|
||||||
|
provider_type: remote::s3
|
||||||
|
config:
|
||||||
|
aws_access_key_id: ${env.AWS_ACCESS_KEY_ID:}
|
||||||
|
aws_secret_access_key: ${env.AWS_SECRET_ACCESS_KEY:}
|
||||||
|
region_name: ${env.AWS_REGION_NAME:}
|
||||||
|
endpoint_url: ${env.AWS_ENDPOINT_URL:}
|
||||||
|
bucket_name: ${env.AWS_BUCKET_NAME:}
|
||||||
|
verify_tls: ${env.AWS_VERIFY_TLS:true}
|
||||||
metadata_store:
|
metadata_store:
|
||||||
type: sqlite
|
type: sqlite
|
||||||
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/ollama}/registry.db
|
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/ollama}/registry.db
|
||||||
|
|
|
@ -69,6 +69,7 @@ unit = [
|
||||||
"chardet",
|
"chardet",
|
||||||
"qdrant-client",
|
"qdrant-client",
|
||||||
"opentelemetry-exporter-otlp-proto-http",
|
"opentelemetry-exporter-otlp-proto-http",
|
||||||
|
"aioboto3",
|
||||||
]
|
]
|
||||||
# These are the core dependencies required for running integration tests. They are shared across all
|
# These are the core dependencies required for running integration tests. They are shared across all
|
||||||
# providers. If a provider requires additional dependencies, please add them to your environment
|
# providers. If a provider requires additional dependencies, please add them to your environment
|
||||||
|
|
44
tests/integration/files/conftest.py
Normal file
44
tests/integration/files/conftest.py
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
|
# the root directory of this source tree.
|
||||||
|
|
||||||
|
from collections.abc import AsyncGenerator
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from llama_stack.providers.remote.files.object.s3.config import S3FilesImplConfig
|
||||||
|
from llama_stack.providers.remote.files.object.s3.s3_files import S3FilesAdapter
|
||||||
|
from llama_stack.providers.utils.kvstore import KVStore, kvstore_impl
|
||||||
|
from llama_stack.providers.utils.kvstore.config import SqliteKVStoreConfig
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def s3_config():
|
||||||
|
"""Create S3 configuration for MinIO."""
|
||||||
|
return S3FilesImplConfig(
|
||||||
|
aws_access_key_id="ROOTNAME",
|
||||||
|
aws_secret_access_key="CHANGEME123",
|
||||||
|
region_name="us-east-1",
|
||||||
|
endpoint_url="http://localhost:9000",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def kvstore() -> AsyncGenerator[KVStore, None]:
|
||||||
|
"""Create a SQLite KV store for testing."""
|
||||||
|
config = SqliteKVStoreConfig(
|
||||||
|
path=":memory:" # Use in-memory SQLite for tests
|
||||||
|
)
|
||||||
|
store = await kvstore_impl(config)
|
||||||
|
await store.initialize()
|
||||||
|
yield store
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def s3_files(s3_config, kvstore) -> AsyncGenerator[S3FilesAdapter, None]:
|
||||||
|
"""Create S3FilesAdapter instance for testing."""
|
||||||
|
adapter = S3FilesAdapter(s3_config, kvstore)
|
||||||
|
await adapter.initialize()
|
||||||
|
yield adapter
|
205
tests/integration/files/test_s3_integration.py
Normal file
205
tests/integration/files/test_s3_integration.py
Normal file
|
@ -0,0 +1,205 @@
|
||||||
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
|
# the root directory of this source tree.
|
||||||
|
|
||||||
|
|
||||||
|
import aioboto3
|
||||||
|
import aiohttp
|
||||||
|
import botocore
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.integration
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_file_upload_download_flow(s3_files, tmp_path):
|
||||||
|
"""Test complete file upload and download flow."""
|
||||||
|
# Get the adapter from the fixture
|
||||||
|
adapter = await anext(s3_files)
|
||||||
|
|
||||||
|
# Test data
|
||||||
|
bucket = "test-bucket"
|
||||||
|
key = tmp_path / "test-file.txt"
|
||||||
|
content = b"Hello, this is a test file content!"
|
||||||
|
key.write_bytes(content)
|
||||||
|
mime_type = "text/plain"
|
||||||
|
|
||||||
|
# Create bucket and upload file
|
||||||
|
async with aioboto3.Session().client(
|
||||||
|
"s3",
|
||||||
|
endpoint_url=adapter.config.endpoint_url,
|
||||||
|
aws_access_key_id=adapter.config.aws_access_key_id,
|
||||||
|
aws_secret_access_key=adapter.config.aws_secret_access_key,
|
||||||
|
region_name=adapter.config.region_name,
|
||||||
|
) as s3:
|
||||||
|
try:
|
||||||
|
await s3.create_bucket(Bucket=bucket)
|
||||||
|
except botocore.exceptions.ClientError as e:
|
||||||
|
if e.response["Error"]["Code"] == "BucketAlreadyOwnedByYou":
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Unexpected error creating bucket: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Create upload session
|
||||||
|
upload_response = await adapter.create_upload_session(
|
||||||
|
bucket=bucket, key=key.as_posix(), mime_type=mime_type, size=len(content)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Upload content using the adapter
|
||||||
|
response = await adapter.upload_content_to_session(upload_response.id)
|
||||||
|
assert response is not None
|
||||||
|
assert response.bucket == bucket
|
||||||
|
assert response.key == str(key)
|
||||||
|
assert response.bytes == len(content)
|
||||||
|
|
||||||
|
# Verify file exists
|
||||||
|
file_info = await adapter.get_file(bucket, key.as_posix())
|
||||||
|
assert file_info.bucket == bucket
|
||||||
|
assert file_info.key == key.as_posix()
|
||||||
|
assert file_info.mime_type == mime_type
|
||||||
|
assert file_info.bytes == len(content)
|
||||||
|
|
||||||
|
# Download file using presigned URL
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
async with session.get(file_info.url) as response:
|
||||||
|
assert response.status == 200
|
||||||
|
downloaded_content = await response.read()
|
||||||
|
assert downloaded_content == content
|
||||||
|
|
||||||
|
# Clean up - delete the file
|
||||||
|
await adapter.delete_file(bucket, key.as_posix())
|
||||||
|
|
||||||
|
# Remove test bucket
|
||||||
|
await s3.delete_bucket(Bucket=bucket)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_pagination(s3_files, tmp_path):
|
||||||
|
"""Test pagination functionality."""
|
||||||
|
bucket = "pagination-test"
|
||||||
|
files = [f"file_{i}.txt" for i in range(15)]
|
||||||
|
content = b"test content"
|
||||||
|
mime_type = "text/plain"
|
||||||
|
|
||||||
|
# Get the adapter from the fixture
|
||||||
|
adapter = await anext(s3_files)
|
||||||
|
|
||||||
|
# Create bucket
|
||||||
|
async with adapter.session.client(
|
||||||
|
"s3",
|
||||||
|
aws_access_key_id=adapter.config.aws_access_key_id,
|
||||||
|
aws_secret_access_key=adapter.config.aws_secret_access_key,
|
||||||
|
region_name=adapter.config.region_name,
|
||||||
|
endpoint_url=adapter.config.endpoint_url,
|
||||||
|
) as s3:
|
||||||
|
try:
|
||||||
|
await s3.create_bucket(Bucket=bucket)
|
||||||
|
except botocore.exceptions.ClientError as e:
|
||||||
|
if e.response["Error"]["Code"] == "BucketAlreadyOwnedByYou":
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Upload files using the proper upload methods
|
||||||
|
for filename in files:
|
||||||
|
# Create temporary file
|
||||||
|
temp_file = tmp_path / filename
|
||||||
|
temp_file.write_bytes(content)
|
||||||
|
|
||||||
|
# Create upload session
|
||||||
|
upload_response = await adapter.create_upload_session(
|
||||||
|
bucket=bucket, key=filename, mime_type=mime_type, size=len(content)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Upload content using the adapter
|
||||||
|
response = await adapter.upload_content_to_session(upload_response.id)
|
||||||
|
assert response is not None
|
||||||
|
assert response.bucket == bucket
|
||||||
|
assert response.key == filename
|
||||||
|
assert response.bytes == len(content)
|
||||||
|
|
||||||
|
# Test first page
|
||||||
|
page1 = await adapter.list_files_in_bucket(bucket, page=1, size=5)
|
||||||
|
assert len(page1.items) == 5
|
||||||
|
assert page1.total == 15
|
||||||
|
|
||||||
|
# Test second page
|
||||||
|
page2 = await adapter.list_files_in_bucket(bucket, page=2, size=5)
|
||||||
|
assert len(page2.items) == 5
|
||||||
|
assert page2.total == 15
|
||||||
|
|
||||||
|
# Verify no overlap between pages
|
||||||
|
page1_keys = {item["key"] for item in page1.items}
|
||||||
|
page2_keys = {item["key"] for item in page2.items}
|
||||||
|
assert not page1_keys.intersection(page2_keys)
|
||||||
|
|
||||||
|
# Also test list_all_buckets
|
||||||
|
buckets = await adapter.list_all_buckets()
|
||||||
|
assert len(buckets.data) > 0
|
||||||
|
assert any(bucket["name"] == bucket for bucket in buckets.data)
|
||||||
|
|
||||||
|
# Clean up - delete all files and the bucket
|
||||||
|
async with adapter.session.client(
|
||||||
|
"s3",
|
||||||
|
aws_access_key_id=adapter.config.aws_access_key_id,
|
||||||
|
aws_secret_access_key=adapter.config.aws_secret_access_key,
|
||||||
|
region_name=adapter.config.region_name,
|
||||||
|
endpoint_url=adapter.config.endpoint_url,
|
||||||
|
) as s3:
|
||||||
|
for filename in files:
|
||||||
|
await adapter.delete_file(bucket, filename)
|
||||||
|
await s3.delete_bucket(Bucket=bucket)
|
||||||
|
|
||||||
|
|
||||||
|
# @pytest.mark.asyncio
|
||||||
|
# async def test_large_file_upload(s3_files):
|
||||||
|
# """Test uploading a large file."""
|
||||||
|
# bucket = "large-file-test"
|
||||||
|
# key = "large-file.bin"
|
||||||
|
# mime_type = "application/octet-stream"
|
||||||
|
|
||||||
|
# # Create a 5MB file
|
||||||
|
# content = os.urandom(5 * 1024 * 1024)
|
||||||
|
|
||||||
|
# # Create bucket
|
||||||
|
# async with s3_files.session.client("s3") as s3:
|
||||||
|
# await s3.create_bucket(Bucket=bucket)
|
||||||
|
|
||||||
|
# # Create upload session
|
||||||
|
# upload_response = await s3_files.create_upload_session(
|
||||||
|
# bucket=bucket, key=key, mime_type=mime_type, size=len(content)
|
||||||
|
# )
|
||||||
|
|
||||||
|
# # Upload content
|
||||||
|
# async with aiohttp.ClientSession() as session:
|
||||||
|
# async with session.put(upload_response.url, data=content) as response:
|
||||||
|
# assert response.status == 200
|
||||||
|
|
||||||
|
# # Verify file
|
||||||
|
# file_info = await s3_files.get_file(bucket, key)
|
||||||
|
# assert file_info.bytes == len(content)
|
||||||
|
# assert file_info.mime_type == mime_type
|
||||||
|
|
||||||
|
|
||||||
|
# @pytest.mark.asyncio
|
||||||
|
# async def test_error_handling(s3_files):
|
||||||
|
# """Test error handling for various scenarios."""
|
||||||
|
# bucket = "error-test"
|
||||||
|
# key = "non-existent.txt"
|
||||||
|
|
||||||
|
# # Test getting non-existent file
|
||||||
|
# with pytest.raises(Exception):
|
||||||
|
# await s3_files.get_file(bucket, key)
|
||||||
|
|
||||||
|
# # Test listing files in non-existent bucket
|
||||||
|
# with pytest.raises(Exception):
|
||||||
|
# await s3_files.list_files_in_bucket(bucket)
|
||||||
|
|
||||||
|
# # Test deleting non-existent file
|
||||||
|
# with pytest.raises(Exception):
|
||||||
|
# await s3_files.delete_file(bucket, key)
|
59
tests/unit/providers/files/test_remote_files_s3.py
Normal file
59
tests/unit/providers/files/test_remote_files_s3.py
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
|
# the root directory of this source tree.
|
||||||
|
|
||||||
|
from collections.abc import AsyncGenerator
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from llama_stack.providers.remote.files.object.s3.config import S3FilesImplConfig
|
||||||
|
from llama_stack.providers.remote.files.object.s3.s3_files import S3FilesAdapter
|
||||||
|
from llama_stack.providers.utils.kvstore import KVStore, kvstore_impl
|
||||||
|
from llama_stack.providers.utils.kvstore.config import SqliteKVStoreConfig
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def s3_config():
|
||||||
|
return S3FilesImplConfig(
|
||||||
|
aws_access_key_id="test-key",
|
||||||
|
aws_secret_access_key="test-secret",
|
||||||
|
region_name="us-east-1",
|
||||||
|
endpoint_url="http://localhost:9000",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def kvstore() -> AsyncGenerator[KVStore, None]:
|
||||||
|
"""Create a SQLite KV store for testing."""
|
||||||
|
config = SqliteKVStoreConfig(
|
||||||
|
path=":memory:" # Use in-memory SQLite for tests
|
||||||
|
)
|
||||||
|
store = await kvstore_impl(config)
|
||||||
|
await store.initialize()
|
||||||
|
yield store
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def s3_files(s3_config, kvstore):
|
||||||
|
adapter = S3FilesAdapter(
|
||||||
|
s3_config,
|
||||||
|
kvstore,
|
||||||
|
)
|
||||||
|
await adapter.initialize()
|
||||||
|
return adapter
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_create_upload_session(s3_files):
|
||||||
|
bucket = "test-bucket"
|
||||||
|
key = "test-file.txt"
|
||||||
|
mime_type = "text/plain"
|
||||||
|
size = 1024
|
||||||
|
|
||||||
|
response = await s3_files.create_upload_session(bucket, key, mime_type, size)
|
||||||
|
assert response.id == f"{bucket}/{key}"
|
||||||
|
assert response.size == size
|
||||||
|
assert response.offset == 0
|
||||||
|
assert response.url is not None
|
105
uv.lock
generated
105
uv.lock
generated
|
@ -16,6 +16,51 @@ resolution-markers = [
|
||||||
"python_full_version == '3.12.*' and sys_platform == 'darwin'",
|
"python_full_version == '3.12.*' and sys_platform == 'darwin'",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "aioboto3"
|
||||||
|
version = "14.3.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "aiobotocore", extra = ["boto3"] },
|
||||||
|
{ name = "aiofiles" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/79/b7/2f0d45cf31f77f8432102d7225d189e6e65cc7a16a32a8ac929eabd719a7/aioboto3-14.3.0.tar.gz", hash = "sha256:1d18f88bb56835c607b62bb6cb907754d717bedde3ddfff6935727cb48a80135", size = 322658, upload-time = "2025-05-07T15:23:59.262Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0e/b0/f2415f03af890693ba8cb669c67f30b9ffa8b2065ecf91cc92e6782b5aa2/aioboto3-14.3.0-py3-none-any.whl", hash = "sha256:aec5de94e9edc1ffbdd58eead38a37f00ddac59a519db749a910c20b7b81bca7", size = 35697, upload-time = "2025-05-07T15:23:57.539Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "aiobotocore"
|
||||||
|
version = "2.22.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "aiohttp" },
|
||||||
|
{ name = "aioitertools" },
|
||||||
|
{ name = "botocore" },
|
||||||
|
{ name = "jmespath" },
|
||||||
|
{ name = "multidict" },
|
||||||
|
{ name = "python-dateutil" },
|
||||||
|
{ name = "wrapt" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/9c/4c/113c4f5611103bba8e5252805fbee7944f5d9541addba9a96b091c0c4308/aiobotocore-2.22.0.tar.gz", hash = "sha256:11091477266b75c2b5d28421c1f2bc9a87d175d0b8619cb830805e7a113a170b", size = 110322, upload-time = "2025-05-01T16:45:45.484Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/00/8e/ffa5840cb7de19ada85bda1fae1ae22671a18992e9373f2e2df9db5389b5/aiobotocore-2.22.0-py3-none-any.whl", hash = "sha256:b4e6306f79df9d81daff1f9d63189a2dbee4b77ce3ab937304834e35eaaeeccf", size = 78930, upload-time = "2025-05-01T16:45:43.508Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.optional-dependencies]
|
||||||
|
boto3 = [
|
||||||
|
{ name = "boto3" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "aiofiles"
|
||||||
|
version = "24.1.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/0b/03/a88171e277e8caa88a4c77808c20ebb04ba74cc4681bf1e9416c862de237/aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c", size = 30247, upload-time = "2024-06-24T11:02:03.584Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a5/45/30bb92d442636f570cb5651bc661f52b610e2eec3f891a5dc3a4c3667db0/aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5", size = 15896, upload-time = "2024-06-24T11:02:01.529Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aiohappyeyeballs"
|
name = "aiohappyeyeballs"
|
||||||
version = "2.5.0"
|
version = "2.5.0"
|
||||||
|
@ -107,6 +152,15 @@ wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/9c/54/ebb815bc0fe057d8e7a11c086c479e972e827082f39aeebc6019dd4f0862/aiohttp-3.11.13-cp313-cp313-win_amd64.whl", hash = "sha256:5ceb81a4db2decdfa087381b5fc5847aa448244f973e5da232610304e199e7b2", size = 436452, upload-time = "2025-02-24T16:01:23.611Z" },
|
{ url = "https://files.pythonhosted.org/packages/9c/54/ebb815bc0fe057d8e7a11c086c479e972e827082f39aeebc6019dd4f0862/aiohttp-3.11.13-cp313-cp313-win_amd64.whl", hash = "sha256:5ceb81a4db2decdfa087381b5fc5847aa448244f973e5da232610304e199e7b2", size = 436452, upload-time = "2025-02-24T16:01:23.611Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "aioitertools"
|
||||||
|
version = "0.12.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/06/de/38491a84ab323b47c7f86e94d2830e748780525f7a10c8600b67ead7e9ea/aioitertools-0.12.0.tar.gz", hash = "sha256:c2a9055b4fbb7705f561b9d86053e8af5d10cc845d22c32008c43490b2d8dd6b", size = 19369, upload-time = "2024-09-02T03:33:40.349Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/85/13/58b70a580de00893223d61de8fea167877a3aed97d4a5e1405c9159ef925/aioitertools-0.12.0-py3-none-any.whl", hash = "sha256:fc1f5fac3d737354de8831cbba3eb04f79dd649d8f3afb4c5b114925e662a796", size = 24345, upload-time = "2024-09-02T03:34:59.454Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aiosignal"
|
name = "aiosignal"
|
||||||
version = "1.3.2"
|
version = "1.3.2"
|
||||||
|
@ -299,6 +353,34 @@ wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/ed/4d/1392562369b1139e741b30d624f09fe7091d17dd5579fae5732f044b12bb/blobfile-3.0.0-py3-none-any.whl", hash = "sha256:48ecc3307e622804bd8fe13bf6f40e6463c4439eba7a1f9ad49fd78aa63cc658", size = 75413, upload-time = "2024-08-27T00:02:51.518Z" },
|
{ url = "https://files.pythonhosted.org/packages/ed/4d/1392562369b1139e741b30d624f09fe7091d17dd5579fae5732f044b12bb/blobfile-3.0.0-py3-none-any.whl", hash = "sha256:48ecc3307e622804bd8fe13bf6f40e6463c4439eba7a1f9ad49fd78aa63cc658", size = 75413, upload-time = "2024-08-27T00:02:51.518Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "boto3"
|
||||||
|
version = "1.37.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "botocore" },
|
||||||
|
{ name = "jmespath" },
|
||||||
|
{ name = "s3transfer" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/7e/3f/135ec0771e6d0e1af2ad7023a15df6677d96112072838d948c9b5075efe1/boto3-1.37.3.tar.gz", hash = "sha256:21f3ce0ef111297e63a6eb998a25197b8c10982970c320d4c6e8db08be2157be", size = 111160, upload-time = "2025-02-27T20:28:15.588Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/62/8c/213511a505af2239a673de4de145d013379275c569185187922f93dbdf14/boto3-1.37.3-py3-none-any.whl", hash = "sha256:2063b40af99fd02f6228ff52397b552ff3353831edaf8d25cc04801827ab9794", size = 139344, upload-time = "2025-02-27T20:28:13.085Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "botocore"
|
||||||
|
version = "1.37.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "jmespath" },
|
||||||
|
{ name = "python-dateutil" },
|
||||||
|
{ name = "urllib3" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/74/fb/b243ab806d2e1e6b8a475b731cc59a1f1e4709eded4884b988a27bbc996b/botocore-1.37.3.tar.gz", hash = "sha256:fe8403eb55a88faf9b0f9da6615e5bee7be056d75e17af66c3c8f0a3b0648da4", size = 13574648, upload-time = "2025-02-27T20:27:59.559Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/88/54/772118f15b5990173aa5264946cc8c9ff70c8f02d72ee6d63167a985188c/botocore-1.37.3-py3-none-any.whl", hash = "sha256:d01bd3bf4c80e61fa88d636ad9f5c9f60a551d71549b481386c6b4efe0bb2b2e", size = 13342066, upload-time = "2025-02-27T20:27:53.137Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "braintrust-core"
|
name = "braintrust-core"
|
||||||
version = "0.0.58"
|
version = "0.0.58"
|
||||||
|
@ -1255,6 +1337,15 @@ wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/91/61/c80ef80ed8a0a21158e289ef70dac01e351d929a1c30cb0f49be60772547/jiter-0.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:3ac9f578c46f22405ff7f8b1f5848fb753cc4b8377fbec8470a7dc3997ca7566", size = 202374, upload-time = "2024-12-09T18:10:26.958Z" },
|
{ url = "https://files.pythonhosted.org/packages/91/61/c80ef80ed8a0a21158e289ef70dac01e351d929a1c30cb0f49be60772547/jiter-0.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:3ac9f578c46f22405ff7f8b1f5848fb753cc4b8377fbec8470a7dc3997ca7566", size = 202374, upload-time = "2024-12-09T18:10:26.958Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "jmespath"
|
||||||
|
version = "1.0.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "jsonschema"
|
name = "jsonschema"
|
||||||
version = "4.23.0"
|
version = "4.23.0"
|
||||||
|
@ -1502,6 +1593,7 @@ ui = [
|
||||||
{ name = "streamlit-option-menu" },
|
{ name = "streamlit-option-menu" },
|
||||||
]
|
]
|
||||||
unit = [
|
unit = [
|
||||||
|
{ name = "aioboto3" },
|
||||||
{ name = "aiohttp" },
|
{ name = "aiohttp" },
|
||||||
{ name = "aiosqlite" },
|
{ name = "aiosqlite" },
|
||||||
{ name = "chardet" },
|
{ name = "chardet" },
|
||||||
|
@ -1514,6 +1606,7 @@ unit = [
|
||||||
|
|
||||||
[package.metadata]
|
[package.metadata]
|
||||||
requires-dist = [
|
requires-dist = [
|
||||||
|
{ name = "aioboto3", marker = "extra == 'unit'" },
|
||||||
{ name = "aiohttp", marker = "extra == 'test'" },
|
{ name = "aiohttp", marker = "extra == 'test'" },
|
||||||
{ name = "aiohttp", marker = "extra == 'unit'" },
|
{ name = "aiohttp", marker = "extra == 'unit'" },
|
||||||
{ name = "aiosqlite", marker = "extra == 'test'" },
|
{ name = "aiosqlite", marker = "extra == 'test'" },
|
||||||
|
@ -3419,6 +3512,18 @@ wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/e8/a8/d71f44b93e3aa86ae232af1f2126ca7b95c0f515ec135462b3e1f351441c/ruff-0.9.6-py3-none-win_arm64.whl", hash = "sha256:0e2bb706a2be7ddfea4a4af918562fdc1bcb16df255e5fa595bbd800ce322a5a", size = 10177499, upload-time = "2025-02-10T12:59:42.989Z" },
|
{ url = "https://files.pythonhosted.org/packages/e8/a8/d71f44b93e3aa86ae232af1f2126ca7b95c0f515ec135462b3e1f351441c/ruff-0.9.6-py3-none-win_arm64.whl", hash = "sha256:0e2bb706a2be7ddfea4a4af918562fdc1bcb16df255e5fa595bbd800ce322a5a", size = 10177499, upload-time = "2025-02-10T12:59:42.989Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "s3transfer"
|
||||||
|
version = "0.11.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "botocore" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/39/24/1390172471d569e281fcfd29b92f2f73774e95972c965d14b6c802ff2352/s3transfer-0.11.3.tar.gz", hash = "sha256:edae4977e3a122445660c7c114bba949f9d191bae3b34a096f18a1c8c354527a", size = 148042, upload-time = "2025-02-26T20:44:57.459Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e4/81/48c41b554a54d75d4407740abb60e3a102ae416284df04d1dbdcbe3dbf24/s3transfer-0.11.3-py3-none-any.whl", hash = "sha256:ca855bdeb885174b5ffa95b9913622459d4ad8e331fc98eb01e6d5eb6a30655d", size = 84246, upload-time = "2025-02-26T20:44:55.509Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "safetensors"
|
name = "safetensors"
|
||||||
version = "0.5.3"
|
version = "0.5.3"
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue