name: local-fireworks-conda-example distribution_spec: distribution_type: local-plus-fireworks-inference description: Use Fireworks.ai for running LLM inference docker_image: null providers: inference: remote::fireworks memory: meta-reference-faiss safety: meta-reference agentic_system: meta-reference telemetry: console image_type: conda