llama-stack-mirror/llama_stack/distributions/watsonx/build.yaml
2025-10-21 11:16:48 -07:00

33 lines
955 B
YAML

version: 2
distribution_spec:
description: Use watsonx for running LLM inference
providers:
inference:
- provider_type: remote::watsonx
- provider_type: inline::sentence-transformers
vector_io:
- provider_type: inline::faiss
safety:
- provider_type: inline::llama-guard
agents:
- provider_type: inline::meta-reference
eval:
- provider_type: inline::meta-reference
datasetio:
- provider_type: remote::huggingface
- provider_type: inline::localfs
scoring:
- provider_type: inline::basic
- provider_type: inline::llm-as-judge
- provider_type: inline::braintrust
tool_runtime:
- provider_type: remote::brave-search
- provider_type: remote::tavily-search
- provider_type: inline::rag-runtime
- provider_type: remote::model-context-protocol
files:
- provider_type: inline::localfs
image_type: venv
additional_pip_packages:
- aiosqlite
- sqlalchemy[asyncio]