llama-stack-mirror/llama_stack/distribution/templates/docker/llamastack-local-cpu/build.yaml
2024-09-30 09:07:04 -07:00

15 lines
361 B
YAML

name: local-cpu
distribution_spec:
description: remote inference + local safety/agents/memory
docker_image: null
providers:
inference:
- remote::ollama
- remote::tgi
- remote::together
- remote::fireworks
safety: meta-reference
agents: meta-reference
memory: meta-reference
telemetry: meta-reference
image_type: docker