feat: add s3 provider to files API

Signed-off-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
Sébastien Han 2025-04-01 11:46:42 +02:00
parent e3ad17ec5e
commit 749cbcca31
No known key found for this signature in database
17 changed files with 614 additions and 132 deletions

View file

@ -459,6 +459,7 @@
"uvicorn"
],
"ollama": [
"aioboto3",
"aiohttp",
"aiosqlite",
"autoevals",

View file

@ -29,4 +29,6 @@ distribution_spec:
- inline::rag-runtime
- remote::model-context-protocol
- remote::wolfram-alpha
files:
- remote::s3
image_type: conda

View file

@ -35,6 +35,7 @@ def get_distribution_template() -> DistributionTemplate:
"remote::model-context-protocol",
"remote::wolfram-alpha",
],
"files": ["remote::s3"],
}
name = "ollama"
inference_provider = Provider(
@ -48,6 +49,20 @@ def get_distribution_template() -> DistributionTemplate:
config=FaissVectorIOConfig.sample_run_config(f"~/.llama/distributions/{name}"),
)
# Add S3 provider configuration
s3_provider = Provider(
provider_id="s3",
provider_type="remote::s3",
config={
"aws_access_key_id": "${env.AWS_ACCESS_KEY_ID:}",
"aws_secret_access_key": "${env.AWS_SECRET_ACCESS_KEY:}",
"region_name": "${env.AWS_REGION_NAME:}",
"endpoint_url": "${env.AWS_ENDPOINT_URL:}",
"bucket_name": "${env.AWS_BUCKET_NAME:}",
"verify_tls": "${env.AWS_VERIFY_TLS:true}",
},
)
inference_model = ModelInput(
model_id="${env.INFERENCE_MODEL}",
provider_id="ollama",
@ -92,6 +107,7 @@ def get_distribution_template() -> DistributionTemplate:
provider_overrides={
"inference": [inference_provider],
"vector_io": [vector_io_provider_faiss],
"files": [s3_provider],
},
default_models=[inference_model, embedding_model],
default_tool_groups=default_tool_groups,
@ -100,6 +116,7 @@ def get_distribution_template() -> DistributionTemplate:
provider_overrides={
"inference": [inference_provider],
"vector_io": [vector_io_provider_faiss],
"files": [s3_provider],
"safety": [
Provider(
provider_id="llama-guard",
@ -148,5 +165,30 @@ def get_distribution_template() -> DistributionTemplate:
"meta-llama/Llama-Guard-3-1B",
"Safety model loaded into the Ollama server",
),
# Add AWS S3 environment variables
"AWS_ACCESS_KEY_ID": (
"",
"AWS access key ID for S3 access",
),
"AWS_SECRET_ACCESS_KEY": (
"",
"AWS secret access key for S3 access",
),
"AWS_REGION_NAME": (
"",
"AWS region name for S3 access",
),
"AWS_ENDPOINT_URL": (
"",
"AWS endpoint URL for S3 access (for custom endpoints)",
),
"AWS_BUCKET_NAME": (
"",
"AWS bucket name for S3 access",
),
"AWS_VERIFY_TLS": (
"true",
"Whether to verify TLS for S3 connections",
),
},
)

View file

@ -4,6 +4,7 @@ apis:
- agents
- datasetio
- eval
- files
- inference
- safety
- scoring
@ -101,6 +102,16 @@ providers:
provider_type: remote::wolfram-alpha
config:
api_key: ${env.WOLFRAM_ALPHA_API_KEY:}
files:
- provider_id: s3
provider_type: remote::s3
config:
aws_access_key_id: ${env.AWS_ACCESS_KEY_ID:}
aws_secret_access_key: ${env.AWS_SECRET_ACCESS_KEY:}
region_name: ${env.AWS_REGION_NAME:}
endpoint_url: ${env.AWS_ENDPOINT_URL:}
bucket_name: ${env.AWS_BUCKET_NAME:}
verify_tls: ${env.AWS_VERIFY_TLS:true}
metadata_store:
type: sqlite
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/ollama}/registry.db

View file

@ -4,6 +4,7 @@ apis:
- agents
- datasetio
- eval
- files
- inference
- safety
- scoring
@ -99,6 +100,16 @@ providers:
provider_type: remote::wolfram-alpha
config:
api_key: ${env.WOLFRAM_ALPHA_API_KEY:}
files:
- provider_id: s3
provider_type: remote::s3
config:
aws_access_key_id: ${env.AWS_ACCESS_KEY_ID:}
aws_secret_access_key: ${env.AWS_SECRET_ACCESS_KEY:}
region_name: ${env.AWS_REGION_NAME:}
endpoint_url: ${env.AWS_ENDPOINT_URL:}
bucket_name: ${env.AWS_BUCKET_NAME:}
verify_tls: ${env.AWS_VERIFY_TLS:true}
metadata_store:
type: sqlite
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/ollama}/registry.db