This commit is contained in:
Xi Yan 2024-10-18 17:28:26 -07:00
parent cbb423a32f
commit c830235936
4 changed files with 17 additions and 4 deletions

View file

@ -1,4 +1,4 @@
name: meta-reference-gpu name: distribution-meta-reference-gpu
distribution_spec: distribution_spec:
description: Use code from `llama_stack` itself to serve all llama stack APIs description: Use code from `llama_stack` itself to serve all llama stack APIs
providers: providers:

View file

@ -1,6 +1,6 @@
name: local-ollama name: distribution-ollama
distribution_spec: distribution_spec:
description: Like local, but use ollama for running LLM inference description: Use ollama for running LLM inference
providers: providers:
inference: remote::ollama inference: remote::ollama
memory: memory:

View file

@ -0,0 +1,13 @@
name: distribution-tgi
distribution_spec:
description: Use TGI for running LLM inference
providers:
inference: remote::tgi
memory:
- meta-reference
- remote::chromadb
- remote::pgvector
safety: meta-reference
agents: meta-reference
telemetry: meta-reference
image_type: conda

View file

@ -13,7 +13,7 @@ from functools import lru_cache
from pathlib import Path from pathlib import Path
TEMPLATES_PATH = ( TEMPLATES_PATH = (
Path(os.path.relpath(__file__)).parent.parent.parent / "distribution" / "templates" Path(os.path.relpath(__file__)).parent.parent.parent.parent / "distributions"
) )