llama-stack-mirror/llama_stack/templates/ollama/build.yaml

12 lines
295 B
YAML

name: ollama
distribution_spec:
description: Use ollama for running LLM inference
providers:
inference: remote::ollama
memory:
- meta-reference
- remote::chromadb
- remote::pgvector
safety: inline::llama-guard
agents: meta-reference
telemetry: meta-reference