llama-stack-mirror/llama_toolchain/configs/distributions/conda/local-ollama-conda-example-build.yaml
2024-09-16 10:56:05 -07:00

10 lines
299 B
YAML

name: local-ollama-conda-example
distribution_spec:
description: Like local, but use ollama for running LLM inference
providers:
inference: remote::ollama
memory: meta-reference-faiss
safety: meta-reference
agentic_system: meta-reference
telemetry: console
image_type: conda