llama-stack/llama_stack/templates/bedrock/build.yaml

19 lines
400 B
YAML

version: '2'
name: bedrock
distribution_spec:
description: Use AWS Bedrock for running LLM inference and safety
docker_image: null
providers:
inference:
- remote::bedrock
memory:
- inline::faiss
- remote::chromadb
- remote::pgvector
safety:
- remote::bedrock
agents:
- inline::meta-reference
telemetry:
- inline::meta-reference
image_type: conda