mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-12 16:16:09 +00:00
prototype: use pyproject and uv to build distribution
Goals: * remove the need of a custom tool to install a collection of python packages AKA `llama stack build` * use the power of 'uv', which was designed to manage dependencies * `llama stack build` can "probably" go away and be replaced with uv Howto, with the pyproject, you can install an Ollama distribution in a virtual env like so: ``` uv venv --python 3.10 ollama-distro source ollama-distro/bin/activate uv sync --extra ollama llama stack run llama_stack/templates/ollama/run.yaml ``` Caveats: * external provider, we could still use a build file or add the known external providers to the pyproject? * growth of the uv.lock? We create a requirements.txt for convenience as some users are most familiar with this format than looking at pyproject. Signed-off-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
parent
6832e8a658
commit
b6ebbe1bc0
13 changed files with 5579 additions and 679 deletions
|
@ -40,7 +40,7 @@ def available_providers() -> list[ProviderSpec]:
|
|||
api=Api.inference,
|
||||
provider_type="inline::vllm",
|
||||
pip_packages=[
|
||||
"vllm",
|
||||
"vllm; sys_platform == 'linux'",
|
||||
],
|
||||
module="llama_stack.providers.inline.inference.vllm",
|
||||
config_class="llama_stack.providers.inline.inference.vllm.VLLMConfig",
|
||||
|
@ -49,8 +49,9 @@ def available_providers() -> list[ProviderSpec]:
|
|||
api=Api.inference,
|
||||
provider_type="inline::sentence-transformers",
|
||||
pip_packages=[
|
||||
"torch torchvision --index-url https://download.pytorch.org/whl/cpu",
|
||||
"sentence-transformers --no-deps",
|
||||
"torch",
|
||||
"torchvision",
|
||||
"sentence-transformers",
|
||||
],
|
||||
module="llama_stack.providers.inline.inference.sentence_transformers",
|
||||
config_class="llama_stack.providers.inline.inference.sentence_transformers.config.SentenceTransformersInferenceConfig",
|
||||
|
|
15
llama_stack/templates/Containerfile
Normal file
15
llama_stack/templates/Containerfile
Normal file
|
@ -0,0 +1,15 @@
|
|||
# Usage:
|
||||
# podman build --build-arg TEMPLATE={TEMPLATE_NAME} -f llama_stack/templates/Containerfile -t TEMPLATE_NAME .
|
||||
FROM registry.access.redhat.com/ubi9
|
||||
WORKDIR /app
|
||||
ARG TEMPLATE
|
||||
|
||||
RUN dnf -y update \
|
||||
&& dnf install -y python3.11 python3.11-pip python3.11-wheel python3.11-setuptools python3.11-devel gcc make \
|
||||
&& ln -s /bin/pip3.11 /bin/pip \
|
||||
&& ln -s /bin/python3.11 /bin/python \
|
||||
&& dnf clean all
|
||||
RUN mkdir -p /.llama/providers.d /.cache
|
||||
RUN pip install --no-cache llama-stack[${TEMPLATE}]
|
||||
RUN chmod -R g+rw /app /.llama /.cache
|
||||
ENTRYPOINT ["python", "-m", "llama_stack.distribution.server.server", "--config", "/app/llama-stack/templates/${TEMPLATE}/run.yaml"]
|
Loading…
Add table
Add a link
Reference in a new issue