llama-stack-mirror/llama_stack/distribution/templates/build_configs/local-cpu-docker-build.yaml
2024-10-17 13:48:00 -07:00

17 lines
390 B
YAML

name: local-cpu
distribution_spec:
description: remote inference + local safety/agents/memory
docker_image: null
providers:
inference:
- remote::ollama
- remote::tgi
- remote::together
- remote::fireworks
safety: meta-reference
agents: meta-reference
memory:
- remote::chromadb
- meta-reference
telemetry: meta-reference
image_type: docker