mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-22 12:37:53 +00:00
11 lines
299 B
YAML
11 lines
299 B
YAML
name: fireworks
|
|
distribution_spec:
|
|
description: Use Fireworks.ai for running LLM inference
|
|
providers:
|
|
inference: remote::fireworks
|
|
memory:
|
|
- inline::faiss
|
|
- remote::weaviate
|
|
safety: inline::llama-guard
|
|
agents: inline::meta-reference
|
|
telemetry: inline::meta-reference
|