version: 2 distribution_spec: description: TrustyAI distribution of Llama Stack providers: inference: - provider_type: remote::vllm - provider_type: inline::sentence-transformers vector_io: - provider_type: inline::milvus safety: - provider_type: remote::trustyai_fms agents: - provider_type: inline::meta-reference eval: - provider_type: remote::trustyai_lmeval datasetio: - provider_type: remote::huggingface - provider_type: inline::localfs scoring: - provider_type: inline::basic - provider_type: inline::llm-as-judge - provider_type: inline::braintrust telemetry: - provider_type: inline::meta-reference tool_runtime: - provider_type: remote::brave-search - provider_type: remote::tavily-search - provider_type: inline::rag-runtime - provider_type: remote::model-context-protocol container_image: registry.redhat.io/ubi9/python-312:latest additional_pip_packages: - aiosqlite - sqlalchemy[asyncio] image_type: container image_name: llama-stack-trustyai external_providers_dir: trustyai-distribution/providers.d