version: '2' built_at: '2024-10-08T17:42:07.505267' image_name: local-cpu docker_image: local-cpu conda_env: null apis: - agents - inference - models - memory - safety - shields - memory_banks providers: inference: - provider_id: remote::ollama provider_type: remote::ollama config: host: localhost port: 6000 safety: - provider_id: meta-reference provider_type: meta-reference config: llama_guard_shield: null prompt_guard_shield: null memory: - provider_id: meta-reference provider_type: meta-reference config: {} agents: - provider_id: meta-reference provider_type: meta-reference config: persistence_store: namespace: null type: sqlite db_path: ~/.llama/runtime/kvstore.db telemetry: - provider_id: meta-reference provider_type: meta-reference config: {}