version: '2' name: bedrock distribution_spec: description: Use AWS Bedrock for running LLM inference and safety docker_image: null providers: inference: - remote::bedrock memory: - inline::faiss - remote::chromadb - remote::pgvector safety: - remote::bedrock agents: - inline::meta-reference telemetry: - inline::meta-reference image_type: conda