Docs for meta-reference-gpu

This commit is contained in:
Ashwin Bharambe 2024-11-18 13:58:12 -08:00
parent 38563d7c00
commit dd732f037f
9 changed files with 374 additions and 101 deletions

View file

@ -1,68 +1,56 @@
version: '2'
image_name: local
image_name: meta-reference-gpu
docker_image: null
conda_env: local
conda_env: null
apis:
- shields
- agents
- models
- memory
- memory_banks
- inference
- memory
- safety
- telemetry
providers:
inference:
- provider_id: inference0
- provider_id: meta-reference-inference
provider_type: inline::meta-reference
config:
model: Llama3.2-3B-Instruct
quantization: null
torch_seed: null
model: ${env.INFERENCE_MODEL}
max_seq_len: 4096
max_batch_size: 1
- provider_id: inference1
provider_type: inline::meta-reference
config:
model: Llama-Guard-3-1B
quantization: null
torch_seed: null
max_seq_len: 2048
max_batch_size: 1
safety:
- provider_id: meta0
provider_type: inline::llama-guard
config:
model: Llama-Guard-3-1B
excluded_categories: []
- provider_id: meta1
provider_type: inline::prompt-guard
config:
model: Prompt-Guard-86M
# Uncomment to use prompt guard
# prompt_guard_shield:
# model: Prompt-Guard-86M
checkpoint_dir: ${env.INFERENCE_CHECKPOINT_DIR:null}
memory:
- provider_id: meta0
provider_type: inline::meta-reference
- provider_id: faiss
provider_type: inline::faiss
config:
kvstore:
type: sqlite
namespace: null
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/meta-reference-gpu}/faiss_store.db
safety:
- provider_id: llama-guard
provider_type: inline::llama-guard
config: {}
# Uncomment to use pgvector
# - provider_id: pgvector
# provider_type: remote::pgvector
# config:
# host: 127.0.0.1
# port: 5432
# db: postgres
# user: postgres
# password: mysecretpassword
agents:
- provider_id: meta0
- provider_id: meta-reference
provider_type: inline::meta-reference
config:
persistence_store:
namespace: null
type: sqlite
db_path: ~/.llama/runtime/agents_store.db
namespace: null
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/meta-reference-gpu}/agents_store.db
telemetry:
- provider_id: meta0
- provider_id: meta-reference
provider_type: inline::meta-reference
config: {}
metadata_store:
namespace: null
type: sqlite
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/meta-reference-gpu}/registry.db
models:
- metadata: {}
model_id: ${env.INFERENCE_MODEL}
provider_id: meta-reference-inference
provider_model_id: null
shields: []
memory_banks: []
datasets: []
scoring_fns: []
eval_tasks: []