diff --git a/llama_stack/cli/stack/_build.py b/llama_stack/cli/stack/_build.py index 97d8900df..96382d428 100644 --- a/llama_stack/cli/stack/_build.py +++ b/llama_stack/cli/stack/_build.py @@ -9,6 +9,7 @@ import importlib.resources import json import os import shutil +import sys import textwrap from functools import lru_cache from pathlib import Path @@ -79,7 +80,7 @@ def run_stack_build_command(args: argparse.Namespace) -> None: f"Could not find template {args.template}. Please run `llama stack build --list-templates` to check out the available templates", color="red", ) - return + sys.exit(1) build_config = available_templates[args.template] if args.image_type: build_config.image_type = args.image_type @@ -88,7 +89,7 @@ def run_stack_build_command(args: argparse.Namespace) -> None: f"Please specify a image-type (container | conda | venv) for {args.template}", color="red", ) - return + sys.exit(1) elif not args.config and not args.template: name = prompt( "> Enter a name for your Llama Stack (e.g. my-local-stack): ", @@ -169,14 +170,14 @@ def run_stack_build_command(args: argparse.Namespace) -> None: f"Could not parse config file {args.config}: {e}", color="red", ) - return + sys.exit(1) if build_config.image_type == ImageType.container.value and not args.image_name: cprint( "Please specify --image-name when building a container from a config file", color="red", ) - return + sys.exit(1) if args.print_deps_only: print(f"# Dependencies for {args.template or args.config or image_name}") @@ -195,18 +196,18 @@ def run_stack_build_command(args: argparse.Namespace) -> None: template_name=args.template, ) - except Exception as exc: + except (Exception, RuntimeError) as exc: cprint( f"Error building stack: {exc}", color="red", ) - return + sys.exit(1) if run_config is None: cprint( "Run config path is empty", color="red", ) - return + sys.exit(1) if args.run: run_config = Path(run_config) @@ -312,7 +313,7 @@ def _run_stack_build_command_from_build_config( template_or_config=template_name or config_path, ) if return_code != 0: - return + raise RuntimeError(f"Failed to build image {image_name}") if template_name: # copy run.yaml from template to build_dir instead of generating it again diff --git a/llama_stack/distribution/build_container.sh b/llama_stack/distribution/build_container.sh index 5f595af2c..08941a538 100755 --- a/llama_stack/distribution/build_container.sh +++ b/llama_stack/distribution/build_container.sh @@ -34,7 +34,7 @@ container_base="$3" build_file_path="$4" host_build_dir="$5" pip_dependencies="$6" -special_pip_deps="$7" +special_pip_deps="${7:-}" # Define color codes diff --git a/llama_stack/distribution/build_venv.sh b/llama_stack/distribution/build_venv.sh index f973fe955..52c5c7051 100755 --- a/llama_stack/distribution/build_venv.sh +++ b/llama_stack/distribution/build_venv.sh @@ -25,7 +25,7 @@ if [ -n "$LLAMA_MODELS_DIR" ]; then echo "Using llama-models-dir=$LLAMA_MODELS_DIR" fi -if [ "$#" -lt 3 ]; then +if [ "$#" -lt 2 ]; then echo "Usage: $0 []" >&2 echo "Example: $0 mybuild ./my-stack-build.yaml 'numpy pandas scipy'" >&2 exit 1 @@ -74,8 +74,8 @@ run() { local env_name="$1" local pip_dependencies="$2" local special_pip_deps="$3" - - if [ -n "$UV_SYSTEM_PYTHON" ] || [ "$env_name" == "__system__" ]; then + + if [ -n "$UV_SYSTEM_PYTHON" ] || [ "$env_name" == "__system__" ]; then echo "Installing dependencies in system Python environment" # if env == __system__, ensure we set UV_SYSTEM_PYTHON export UV_SYSTEM_PYTHON=1