Ensure llama stack build --config <> --image-type <> works (#879)

Fix the issues brought up in
https://github.com/meta-llama/llama-stack/issues/870

Test all combinations of (conda, container) vs. (template, config)
combos.
This commit is contained in:
Ashwin Bharambe 2025-01-25 11:13:36 -08:00 committed by GitHub
parent 7de46e40f9
commit 891bf704eb
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 66 additions and 37 deletions

View file

@ -10,7 +10,7 @@ import sys
from enum import Enum
from pathlib import Path
from typing import Dict, List, Optional
from typing import Dict, List
from pydantic import BaseModel
from termcolor import cprint
@ -107,9 +107,9 @@ def build_image(
build_config: BuildConfig,
build_file_path: Path,
image_name: str,
template_name: Optional[str] = None,
template_or_config: str,
):
container_image = (
container_base = (
build_config.distribution_spec.container_image or "python:3.10-slim"
)
@ -119,16 +119,14 @@ def build_image(
normal_deps += SERVER_DEPENDENCIES
if build_config.image_type == ImageType.container.value:
if not template_name:
raise ValueError("template_name is required for container builds")
script = str(
importlib.resources.files("llama_stack") / "distribution/build_container.sh"
)
args = [
script,
template_name,
container_image,
template_or_config,
image_name,
container_base,
str(build_file_path),
str(BUILDS_BASE_DIR / ImageType.container.value),
" ".join(normal_deps),

View file

@ -12,22 +12,22 @@ TEST_PYPI_VERSION=${TEST_PYPI_VERSION:-}
PYPI_VERSION=${PYPI_VERSION:-}
BUILD_PLATFORM=${BUILD_PLATFORM:-}
if [ "$#" -lt 5 ]; then
if [ "$#" -lt 6 ]; then
# This only works for templates
echo "Usage: $0 <template_name> <container_base> <pip_dependencies> <host_build_dir> [<special_pip_deps>]" >&2
echo "Example: $0 fireworks python:3.9-slim 'fastapi uvicorn' /path/to/build/dir" >&2
echo "Usage: $0 <template_or_config> <image_name> <container_base> <build_file_path> <host_build_dir> <pip_dependencies> [<special_pip_deps>]" >&2
exit 1
fi
special_pip_deps="$6"
set -euo pipefail
template_name="$1"
container_base=$2
build_file_path=$3
host_build_dir=$4
pip_dependencies=$5
template_or_config="$1"
image_name="$2"
container_base="$3"
build_file_path="$4"
host_build_dir="$5"
pip_dependencies="$6"
special_pip_deps="$7"
# Define color codes
RED='\033[0;31m'
@ -147,14 +147,16 @@ RUN pip install --no-cache $models_mount
EOF
fi
add_to_container << EOF
# This would be good in production but for debugging flexibility lets not add it right now
# We need a more solid production ready entrypoint.sh anyway
#
ENTRYPOINT ["python", "-m", "llama_stack.distribution.server.server", "--template", "$template_name"]
# if template_or_config ends with .yaml, it is not a template and we should not use the --template flag
if [[ "$template_or_config" != *.yaml ]]; then
add_to_container << EOF
ENTRYPOINT ["python", "-m", "llama_stack.distribution.server.server", "--template", "$template_or_config"]
EOF
else
add_to_container << EOF
ENTRYPOINT ["python", "-m", "llama_stack.distribution.server.server"]
EOF
fi
printf "Containerfile created successfully in $TEMP_DIR/Containerfile\n\n"
cat $TEMP_DIR/Containerfile
@ -174,7 +176,9 @@ if command -v selinuxenabled &>/dev/null && selinuxenabled; then
fi
# Set version tag based on PyPI version
if [ -n "$TEST_PYPI_VERSION" ]; then
if [ -n "$PYPI_VERSION" ]; then
version_tag="$PYPI_VERSION"
elif [ -n "$TEST_PYPI_VERSION" ]; then
version_tag="test-$TEST_PYPI_VERSION"
elif [[ -n "$LLAMA_STACK_DIR" || -n "$LLAMA_MODELS_DIR" ]]; then
version_tag="dev"
@ -184,8 +188,7 @@ else
fi
# Add version tag to image name
build_name="distribution-$template_name"
image_tag="$build_name:$version_tag"
image_tag="$image_name:$version_tag"
# Detect platform architecture
ARCH=$(uname -m)

View file

@ -30,8 +30,8 @@ if [ $# -lt 3 ]; then
exit 1
fi
build_name="$1"
container_image="localhost/distribution-$build_name"
image_name="$1"
container_image="localhost/$image_name"
shift
yaml_config="$1"
@ -76,13 +76,15 @@ if [ -n "$LLAMA_CHECKPOINT_DIR" ]; then
CONTAINER_OPTS="$CONTAINER_OPTS --gpus=all"
fi
version_tag="latest"
if [ -n "$PYPI_VERSION" ]; then
version_tag="$PYPI_VERSION"
elif [ -n "$LLAMA_STACK_DIR" ]; then
version_tag="dev"
elif [ -n "$TEST_PYPI_VERSION" ]; then
version_tag="test-$TEST_PYPI_VERSION"
else
URL="https://pypi.org/pypi/llama-stack/json"
version_tag=$(curl -s $URL | jq -r '.info.version')
fi
$CONTAINER_BINARY run $CONTAINER_OPTS -it \