mirror of
https://github.com/meta-llama/llama-stack.git
synced 2026-01-05 03:12:16 +00:00
debugged impl errors for building container and running data prep
Signed-off-by: James Kunstle <jkunstle@redhat.com>
This commit is contained in:
parent
06465441f2
commit
68000499f7
6 changed files with 66 additions and 19 deletions
25
hf-ilab-build.yaml
Normal file
25
hf-ilab-build.yaml
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
version: "2"
|
||||
distribution_spec:
|
||||
description: Use (an external) Ollama server for running LLM inference
|
||||
providers:
|
||||
inference:
|
||||
- remote::ollama
|
||||
vector_io:
|
||||
- inline::faiss
|
||||
safety:
|
||||
- inline::llama-guard
|
||||
telemetry:
|
||||
- inline::meta-reference
|
||||
agents:
|
||||
- inline::meta-reference
|
||||
eval:
|
||||
- inline::meta-reference
|
||||
datasetio:
|
||||
- inline::localfs
|
||||
scoring:
|
||||
- inline::llm-as-judge
|
||||
tool_runtime:
|
||||
- remote::brave-search
|
||||
post_training:
|
||||
- inline::huggingface-ilab
|
||||
image_type: venv
|
||||
Loading…
Add table
Add a link
Reference in a new issue