mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-24 13:19:54 +00:00
Distributions updates (slight updates to ollama, add inline-vllm and remote-vllm) (#408)
* remote vllm distro * add inline-vllm details, fix things * Write some docs
This commit is contained in:
parent
ba82021d4b
commit
4986e46188
19 changed files with 365 additions and 46 deletions
12
llama_stack/templates/remote-vllm/build.yaml
Normal file
12
llama_stack/templates/remote-vllm/build.yaml
Normal file
|
@ -0,0 +1,12 @@
|
|||
name: remote-vllm
|
||||
distribution_spec:
|
||||
description: Use (an external) vLLM server for running LLM inference
|
||||
providers:
|
||||
inference: remote::vllm
|
||||
memory:
|
||||
- meta-reference
|
||||
- remote::chromadb
|
||||
- remote::pgvector
|
||||
safety: meta-reference
|
||||
agents: meta-reference
|
||||
telemetry: meta-reference
|
Loading…
Add table
Add a link
Reference in a new issue