fix docker

This commit is contained in:
Xi Yan 2024-09-18 14:13:57 -07:00
parent d3d66ba40b
commit 2ba8047013
8 changed files with 13 additions and 25 deletions

View file

@ -1,4 +1,4 @@
name: local-conda
name: local
distribution_spec:
description: Use code from `llama_stack` itself to serve all llama stack APIs
providers:

View file

@ -1,10 +0,0 @@
name: local-docker
distribution_spec:
description: Use code from `llama_stack` itself to serve all llama stack APIs
providers:
inference: meta-reference
memory: meta-reference
safety: meta-reference
agents: meta-reference
telemetry: meta-reference
image_type: docker

View file

@ -1,4 +1,4 @@
name: local-fireworks-conda
name: local-fireworks
distribution_spec:
description: Use Fireworks.ai for running LLM inference
providers:

View file

@ -1,4 +1,4 @@
name: local-ollama-conda
name: local-ollama
distribution_spec:
description: Like local, but use ollama for running LLM inference
providers:

View file

@ -1,4 +1,4 @@
name: local-tgi-conda
name: local-tgi
distribution_spec:
description: Use TGI (local or with Hugging Face Inference Endpoints for running LLM inference. When using HF Inference Endpoints, you must provide the name of the endpoint).
providers:

View file

@ -1,4 +1,4 @@
name: local-tgi-conda
name: local-together
distribution_spec:
description: Use Together.ai for running LLM inference
providers: