feat: re-work distro-codegen

each *.py file in the various templates now has to use `Provider`s rather than the stringified provider_types in the DistributionTemplate. Adjust that, regenerate all templates, docs, etc.

Signed-off-by: Charlie Doern <cdoern@redhat.com>
This commit is contained in:
Charlie Doern 2025-07-06 20:06:27 -04:00
parent dcc6b1eee9
commit 776fabed9e
28 changed files with 809 additions and 328 deletions

View file

@ -3,32 +3,50 @@ distribution_spec:
description: Use Meta Reference for running LLM inference
providers:
inference:
- inline::meta-reference
- provider_id: meta-reference
provider_type: inline::meta-reference
vector_io:
- inline::faiss
- remote::chromadb
- remote::pgvector
- provider_id: faiss
provider_type: inline::faiss
- provider_id: chromadb
provider_type: remote::chromadb
- provider_id: pgvector
provider_type: remote::pgvector
safety:
- inline::llama-guard
- provider_id: llama-guard
provider_type: inline::llama-guard
agents:
- inline::meta-reference
- provider_id: meta-reference
provider_type: inline::meta-reference
telemetry:
- inline::meta-reference
- provider_id: meta-reference
provider_type: inline::meta-reference
eval:
- inline::meta-reference
- provider_id: meta-reference
provider_type: inline::meta-reference
datasetio:
- remote::huggingface
- inline::localfs
- provider_id: huggingface
provider_type: remote::huggingface
- provider_id: localfs
provider_type: inline::localfs
scoring:
- inline::basic
- inline::llm-as-judge
- inline::braintrust
- provider_id: basic
provider_type: inline::basic
- provider_id: llm-as-judge
provider_type: inline::llm-as-judge
- provider_id: braintrust
provider_type: inline::braintrust
tool_runtime:
- remote::brave-search
- remote::tavily-search
- inline::rag-runtime
- remote::model-context-protocol
- provider_id: brave-search
provider_type: remote::brave-search
- provider_id: tavily-search
provider_type: remote::tavily-search
- provider_id: rag-runtime
provider_type: inline::rag-runtime
- provider_id: model-context-protocol
provider_type: remote::model-context-protocol
image_type: conda
image_name: meta-reference-gpu
additional_pip_packages:
- aiosqlite
- sqlalchemy[asyncio]

View file

@ -25,19 +25,91 @@ from llama_stack.templates.template import DistributionTemplate, RunConfigSettin
def get_distribution_template() -> DistributionTemplate:
providers = {
"inference": ["inline::meta-reference"],
"vector_io": ["inline::faiss", "remote::chromadb", "remote::pgvector"],
"safety": ["inline::llama-guard"],
"agents": ["inline::meta-reference"],
"telemetry": ["inline::meta-reference"],
"eval": ["inline::meta-reference"],
"datasetio": ["remote::huggingface", "inline::localfs"],
"scoring": ["inline::basic", "inline::llm-as-judge", "inline::braintrust"],
"inference": [
Provider(
provider_id="meta-reference",
provider_type="inline::meta-reference",
)
],
"vector_io": [
Provider(
provider_id="faiss",
provider_type="inline::faiss",
),
Provider(
provider_id="chromadb",
provider_type="remote::chromadb",
),
Provider(
provider_id="pgvector",
provider_type="remote::pgvector",
),
],
"safety": [
Provider(
provider_id="llama-guard",
provider_type="inline::llama-guard",
)
],
"agents": [
Provider(
provider_id="meta-reference",
provider_type="inline::meta-reference",
)
],
"telemetry": [
Provider(
provider_id="meta-reference",
provider_type="inline::meta-reference",
)
],
"eval": [
Provider(
provider_id="meta-reference",
provider_type="inline::meta-reference",
)
],
"datasetio": [
Provider(
provider_id="huggingface",
provider_type="remote::huggingface",
),
Provider(
provider_id="localfs",
provider_type="inline::localfs",
),
],
"scoring": [
Provider(
provider_id="basic",
provider_type="inline::basic",
),
Provider(
provider_id="llm-as-judge",
provider_type="inline::llm-as-judge",
),
Provider(
provider_id="braintrust",
provider_type="inline::braintrust",
),
],
"tool_runtime": [
"remote::brave-search",
"remote::tavily-search",
"inline::rag-runtime",
"remote::model-context-protocol",
Provider(
provider_id="brave-search",
provider_type="remote::brave-search",
),
Provider(
provider_id="tavily-search",
provider_type="remote::tavily-search",
),
Provider(
provider_id="rag-runtime",
provider_type="inline::rag-runtime",
),
Provider(
provider_id="model-context-protocol",
provider_type="remote::model-context-protocol",
),
],
}
name = "meta-reference-gpu"

View file

@ -24,7 +24,6 @@ providers:
max_seq_len: ${env.MAX_SEQ_LEN:=4096}
- provider_id: sentence-transformers
provider_type: inline::sentence-transformers
config: {}
- provider_id: meta-reference-safety
provider_type: inline::meta-reference
config:
@ -88,10 +87,8 @@ providers:
scoring:
- provider_id: basic
provider_type: inline::basic
config: {}
- provider_id: llm-as-judge
provider_type: inline::llm-as-judge
config: {}
- provider_id: braintrust
provider_type: inline::braintrust
config:
@ -109,10 +106,8 @@ providers:
max_results: 3
- provider_id: rag-runtime
provider_type: inline::rag-runtime
config: {}
- provider_id: model-context-protocol
provider_type: remote::model-context-protocol
config: {}
metadata_store:
type: sqlite
db_path: ${env.SQLITE_STORE_DIR:=~/.llama/distributions/meta-reference-gpu}/registry.db

View file

@ -24,7 +24,6 @@ providers:
max_seq_len: ${env.MAX_SEQ_LEN:=4096}
- provider_id: sentence-transformers
provider_type: inline::sentence-transformers
config: {}
vector_io:
- provider_id: faiss
provider_type: inline::faiss
@ -78,10 +77,8 @@ providers:
scoring:
- provider_id: basic
provider_type: inline::basic
config: {}
- provider_id: llm-as-judge
provider_type: inline::llm-as-judge
config: {}
- provider_id: braintrust
provider_type: inline::braintrust
config:
@ -99,10 +96,8 @@ providers:
max_results: 3
- provider_id: rag-runtime
provider_type: inline::rag-runtime
config: {}
- provider_id: model-context-protocol
provider_type: remote::model-context-protocol
config: {}
metadata_store:
type: sqlite
db_path: ${env.SQLITE_STORE_DIR:=~/.llama/distributions/meta-reference-gpu}/registry.db