From c4987bc349bf9319bbe17ac7a201121cf4b34312 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Han?= Date: Mon, 24 Feb 2025 19:18:52 +0100 Subject: [PATCH] fix: avoid failure when no special pip deps and better exit (#1228) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # What does this PR do? When building providers in a virtual environment or containers, special pip dependencies may not always be provided (e.g., for Ollama). The check should only fail if the required number of arguments is missing. Currently, two arguments are mandatory: 1. Environment name 2. Pip dependencies Additionally, return statements were replaced with sys.exit(1) in error conditions to ensure immediate termination on critical failures. Error handling in the stack build process was also improved to guarantee the program exits with status 1 when facing configuration issues or build failures. Signed-off-by: Sébastien Han [//]: # (If resolving an issue, uncomment and update the line below) [//]: # (Closes #[issue-number]) ## Test Plan This command shouldn't fail: ``` llama stack build --template ollama --image-type venv ``` [//]: # (## Documentation) Signed-off-by: Sébastien Han --- llama_stack/cli/stack/_build.py | 17 +++++++++-------- llama_stack/distribution/build_container.sh | 2 +- llama_stack/distribution/build_venv.sh | 6 +++--- 3 files changed, 13 insertions(+), 12 deletions(-) diff --git a/llama_stack/cli/stack/_build.py b/llama_stack/cli/stack/_build.py index 97d8900df..96382d428 100644 --- a/llama_stack/cli/stack/_build.py +++ b/llama_stack/cli/stack/_build.py @@ -9,6 +9,7 @@ import importlib.resources import json import os import shutil +import sys import textwrap from functools import lru_cache from pathlib import Path @@ -79,7 +80,7 @@ def run_stack_build_command(args: argparse.Namespace) -> None: f"Could not find template {args.template}. Please run `llama stack build --list-templates` to check out the available templates", color="red", ) - return + sys.exit(1) build_config = available_templates[args.template] if args.image_type: build_config.image_type = args.image_type @@ -88,7 +89,7 @@ def run_stack_build_command(args: argparse.Namespace) -> None: f"Please specify a image-type (container | conda | venv) for {args.template}", color="red", ) - return + sys.exit(1) elif not args.config and not args.template: name = prompt( "> Enter a name for your Llama Stack (e.g. my-local-stack): ", @@ -169,14 +170,14 @@ def run_stack_build_command(args: argparse.Namespace) -> None: f"Could not parse config file {args.config}: {e}", color="red", ) - return + sys.exit(1) if build_config.image_type == ImageType.container.value and not args.image_name: cprint( "Please specify --image-name when building a container from a config file", color="red", ) - return + sys.exit(1) if args.print_deps_only: print(f"# Dependencies for {args.template or args.config or image_name}") @@ -195,18 +196,18 @@ def run_stack_build_command(args: argparse.Namespace) -> None: template_name=args.template, ) - except Exception as exc: + except (Exception, RuntimeError) as exc: cprint( f"Error building stack: {exc}", color="red", ) - return + sys.exit(1) if run_config is None: cprint( "Run config path is empty", color="red", ) - return + sys.exit(1) if args.run: run_config = Path(run_config) @@ -312,7 +313,7 @@ def _run_stack_build_command_from_build_config( template_or_config=template_name or config_path, ) if return_code != 0: - return + raise RuntimeError(f"Failed to build image {image_name}") if template_name: # copy run.yaml from template to build_dir instead of generating it again diff --git a/llama_stack/distribution/build_container.sh b/llama_stack/distribution/build_container.sh index 5f595af2c..08941a538 100755 --- a/llama_stack/distribution/build_container.sh +++ b/llama_stack/distribution/build_container.sh @@ -34,7 +34,7 @@ container_base="$3" build_file_path="$4" host_build_dir="$5" pip_dependencies="$6" -special_pip_deps="$7" +special_pip_deps="${7:-}" # Define color codes diff --git a/llama_stack/distribution/build_venv.sh b/llama_stack/distribution/build_venv.sh index f973fe955..52c5c7051 100755 --- a/llama_stack/distribution/build_venv.sh +++ b/llama_stack/distribution/build_venv.sh @@ -25,7 +25,7 @@ if [ -n "$LLAMA_MODELS_DIR" ]; then echo "Using llama-models-dir=$LLAMA_MODELS_DIR" fi -if [ "$#" -lt 3 ]; then +if [ "$#" -lt 2 ]; then echo "Usage: $0 []" >&2 echo "Example: $0 mybuild ./my-stack-build.yaml 'numpy pandas scipy'" >&2 exit 1 @@ -74,8 +74,8 @@ run() { local env_name="$1" local pip_dependencies="$2" local special_pip_deps="$3" - - if [ -n "$UV_SYSTEM_PYTHON" ] || [ "$env_name" == "__system__" ]; then + + if [ -n "$UV_SYSTEM_PYTHON" ] || [ "$env_name" == "__system__" ]; then echo "Installing dependencies in system Python environment" # if env == __system__, ensure we set UV_SYSTEM_PYTHON export UV_SYSTEM_PYTHON=1