Ensure llama stack build --config <> --image-type <> works (#879)

Fix the issues brought up in
https://github.com/meta-llama/llama-stack/issues/870

Test all combinations of (conda, container) vs. (template, config)
combos.
This commit is contained in:
Ashwin Bharambe 2025-01-25 11:13:36 -08:00 committed by GitHub
parent 7de46e40f9
commit 891bf704eb
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 66 additions and 37 deletions

View file

@ -115,6 +115,8 @@ def run_stack_build_command(
f"Using conda environment {image_name}",
color="green",
)
else:
image_name = f"llamastack-{name}"
cprint(
textwrap.dedent(
@ -171,11 +173,22 @@ def run_stack_build_command(
)
return
_run_stack_build_command_from_build_config(build_config, image_name=image_name)
if build_config.image_type == ImageType.container.value and not args.image_name:
cprint(
"Please specify --image-name when building a container from a config file",
color="red",
)
return
_run_stack_build_command_from_build_config(
build_config, image_name=image_name, config_path=args.config
)
def _generate_run_config(
build_config: BuildConfig, build_dir: Path, image_name: str
build_config: BuildConfig,
build_dir: Path,
image_name: str,
) -> None:
"""
Generate a run.yaml template file for user to edit from a build.yaml file
@ -227,8 +240,9 @@ def _generate_run_config(
to_write = json.loads(run_config.model_dump_json())
f.write(yaml.dump(to_write, sort_keys=False))
# this path is only invoked when no template is provided
cprint(
f"You can now edit {run_config_file} and run `llama stack run {image_name}`",
f"You can now run your stack with `llama stack run {run_config_file}`",
color="green",
)
@ -237,6 +251,7 @@ def _run_stack_build_command_from_build_config(
build_config: BuildConfig,
image_name: Optional[str] = None,
template_name: Optional[str] = None,
config_path: Optional[str] = None,
) -> None:
if build_config.image_type == ImageType.container.value:
if template_name:
@ -263,7 +278,10 @@ def _run_stack_build_command_from_build_config(
f.write(yaml.dump(to_write, sort_keys=False))
return_code = build_image(
build_config, build_file_path, image_name, template_name=template_name
build_config,
build_file_path,
image_name,
template_or_config=template_name or config_path,
)
if return_code != 0:
return
@ -277,7 +295,7 @@ def _run_stack_build_command_from_build_config(
with importlib.resources.as_file(template_path) as path:
run_config_file = build_dir / f"{template_name}-run.yaml"
shutil.copy(path, run_config_file)
# Find all ${env.VARIABLE} patterns
cprint("Build Successful!", color="green")
else:
_generate_run_config(build_config, build_dir, image_name)

View file

@ -78,12 +78,15 @@ class StackRun(Subcommand):
config_file = Path(args.config)
has_yaml_suffix = args.config.endswith(".yaml")
template_name = None
if not config_file.exists() and not has_yaml_suffix:
# check if this is a template
config_file = (
Path(REPO_ROOT) / "llama_stack" / "templates" / args.config / "run.yaml"
)
if config_file.exists():
template_name = args.config
if not config_file.exists() and not has_yaml_suffix:
# check if it's a build config saved to conda dir
@ -120,7 +123,12 @@ class StackRun(Subcommand):
importlib.resources.files("llama_stack")
/ "distribution/start_container.sh"
)
run_args = [script, config.container_image]
image_name = (
f"distribution-{template_name}"
if template_name
else config.container_image
)
run_args = [script, image_name]
else:
current_conda_env = os.environ.get("CONDA_DEFAULT_ENV")
image_name = args.image_name or current_conda_env

View file

@ -10,7 +10,7 @@ import sys
from enum import Enum
from pathlib import Path
from typing import Dict, List, Optional
from typing import Dict, List
from pydantic import BaseModel
from termcolor import cprint
@ -107,9 +107,9 @@ def build_image(
build_config: BuildConfig,
build_file_path: Path,
image_name: str,
template_name: Optional[str] = None,
template_or_config: str,
):
container_image = (
container_base = (
build_config.distribution_spec.container_image or "python:3.10-slim"
)
@ -119,16 +119,14 @@ def build_image(
normal_deps += SERVER_DEPENDENCIES
if build_config.image_type == ImageType.container.value:
if not template_name:
raise ValueError("template_name is required for container builds")
script = str(
importlib.resources.files("llama_stack") / "distribution/build_container.sh"
)
args = [
script,
template_name,
container_image,
template_or_config,
image_name,
container_base,
str(build_file_path),
str(BUILDS_BASE_DIR / ImageType.container.value),
" ".join(normal_deps),

View file

@ -12,22 +12,22 @@ TEST_PYPI_VERSION=${TEST_PYPI_VERSION:-}
PYPI_VERSION=${PYPI_VERSION:-}
BUILD_PLATFORM=${BUILD_PLATFORM:-}
if [ "$#" -lt 5 ]; then
if [ "$#" -lt 6 ]; then
# This only works for templates
echo "Usage: $0 <template_name> <container_base> <pip_dependencies> <host_build_dir> [<special_pip_deps>]" >&2
echo "Example: $0 fireworks python:3.9-slim 'fastapi uvicorn' /path/to/build/dir" >&2
echo "Usage: $0 <template_or_config> <image_name> <container_base> <build_file_path> <host_build_dir> <pip_dependencies> [<special_pip_deps>]" >&2
exit 1
fi
special_pip_deps="$6"
set -euo pipefail
template_name="$1"
container_base=$2
build_file_path=$3
host_build_dir=$4
pip_dependencies=$5
template_or_config="$1"
image_name="$2"
container_base="$3"
build_file_path="$4"
host_build_dir="$5"
pip_dependencies="$6"
special_pip_deps="$7"
# Define color codes
RED='\033[0;31m'
@ -147,14 +147,16 @@ RUN pip install --no-cache $models_mount
EOF
fi
add_to_container << EOF
# This would be good in production but for debugging flexibility lets not add it right now
# We need a more solid production ready entrypoint.sh anyway
#
ENTRYPOINT ["python", "-m", "llama_stack.distribution.server.server", "--template", "$template_name"]
# if template_or_config ends with .yaml, it is not a template and we should not use the --template flag
if [[ "$template_or_config" != *.yaml ]]; then
add_to_container << EOF
ENTRYPOINT ["python", "-m", "llama_stack.distribution.server.server", "--template", "$template_or_config"]
EOF
else
add_to_container << EOF
ENTRYPOINT ["python", "-m", "llama_stack.distribution.server.server"]
EOF
fi
printf "Containerfile created successfully in $TEMP_DIR/Containerfile\n\n"
cat $TEMP_DIR/Containerfile
@ -174,7 +176,9 @@ if command -v selinuxenabled &>/dev/null && selinuxenabled; then
fi
# Set version tag based on PyPI version
if [ -n "$TEST_PYPI_VERSION" ]; then
if [ -n "$PYPI_VERSION" ]; then
version_tag="$PYPI_VERSION"
elif [ -n "$TEST_PYPI_VERSION" ]; then
version_tag="test-$TEST_PYPI_VERSION"
elif [[ -n "$LLAMA_STACK_DIR" || -n "$LLAMA_MODELS_DIR" ]]; then
version_tag="dev"
@ -184,8 +188,7 @@ else
fi
# Add version tag to image name
build_name="distribution-$template_name"
image_tag="$build_name:$version_tag"
image_tag="$image_name:$version_tag"
# Detect platform architecture
ARCH=$(uname -m)

View file

@ -30,8 +30,8 @@ if [ $# -lt 3 ]; then
exit 1
fi
build_name="$1"
container_image="localhost/distribution-$build_name"
image_name="$1"
container_image="localhost/$image_name"
shift
yaml_config="$1"
@ -76,13 +76,15 @@ if [ -n "$LLAMA_CHECKPOINT_DIR" ]; then
CONTAINER_OPTS="$CONTAINER_OPTS --gpus=all"
fi
version_tag="latest"
if [ -n "$PYPI_VERSION" ]; then
version_tag="$PYPI_VERSION"
elif [ -n "$LLAMA_STACK_DIR" ]; then
version_tag="dev"
elif [ -n "$TEST_PYPI_VERSION" ]; then
version_tag="test-$TEST_PYPI_VERSION"
else
URL="https://pypi.org/pypi/llama-stack/json"
version_tag=$(curl -s $URL | jq -r '.info.version')
fi
$CONTAINER_BINARY run $CONTAINER_OPTS -it \