diff --git a/llama_stack/cli/stack/build.py b/llama_stack/cli/stack/build.py index 729bd3ff1..ca4c0d8ce 100644 --- a/llama_stack/cli/stack/build.py +++ b/llama_stack/cli/stack/build.py @@ -56,9 +56,8 @@ class StackBuild(Subcommand): "--image-name", type=str, help=textwrap.dedent( - """[for image-type=conda] Name of the conda environment to use for the build. If -not specified, currently active Conda environment will be used. If no Conda -environment is active, you must specify a name. + """[for image-type=conda|venv] Name of the conda or virtual environment to use for +the build. If not specified, currently active Conda environment will be used if found. """ ), default=None, diff --git a/llama_stack/distribution/build.py b/llama_stack/distribution/build.py index 9422c8457..511817de8 100644 --- a/llama_stack/distribution/build.py +++ b/llama_stack/distribution/build.py @@ -126,7 +126,6 @@ def build_image( args = [ script, str(image_name), - str(build_file_path), " ".join(normal_deps), ] diff --git a/llama_stack/distribution/build_venv.sh b/llama_stack/distribution/build_venv.sh index 3cb290bb7..0b0bffcfd 100755 --- a/llama_stack/distribution/build_venv.sh +++ b/llama_stack/distribution/build_venv.sh @@ -24,23 +24,21 @@ if [ -n "$LLAMA_MODELS_DIR" ]; then fi if [ "$#" -lt 3 ]; then - echo "Usage: $0 []" >&2 + echo "Usage: $0 []" >&2 echo "Example: $0 mybuild ./my-stack-build.yaml 'numpy pandas scipy'" >&2 exit 1 fi -special_pip_deps="$4" +special_pip_deps="$3" set -euo pipefail build_name="$1" env_name="llamastack-$build_name" -build_file_path="$2" -pip_dependencies="$3" +pip_dependencies="$2" # Define color codes RED='\033[0;31m' -GREEN='\033[0;32m' NC='\033[0m' # No Color # this is set if we actually create a new conda in which case we need to clean up @@ -49,34 +47,63 @@ ENVNAME="" SCRIPT_DIR=$(dirname "$(readlink -f "$0")") source "$SCRIPT_DIR/common.sh" +# pre-run checks to make sure we can proceed with the installation +pre_run_checks() { + local env_name="$1" + + if ! is_command_available uv; then + echo "uv is not installed, trying to install it." + if ! is_command_available pip; then + echo "pip is not installed, cannot automatically install 'uv'." + echo "Follow this link to install it:" + echo "https://docs.astral.sh/uv/getting-started/installation/" + exit 1 + else + pip install uv + fi + fi + + # checking if an environment with the same name already exists + if [ -d "$env_name" ]; then + echo "Environment '$env_name' already exists, re-using it." + fi +} + run() { local env_name="$1" local pip_dependencies="$2" local special_pip_deps="$3" - pip install uv + echo "Using virtual environment $env_name" + uv venv "$env_name" + # shellcheck source=/dev/null + source "$env_name/bin/activate" if [ -n "$TEST_PYPI_VERSION" ]; then # these packages are damaged in test-pypi, so install them first uv pip install fastapi libcst + # shellcheck disable=SC2086 + # we are building a command line so word splitting is expected uv pip install --extra-index-url https://test.pypi.org/simple/ \ - llama-models==$TEST_PYPI_VERSION llama-stack==$TEST_PYPI_VERSION \ + llama-models=="$TEST_PYPI_VERSION" llama-stack=="$TEST_PYPI_VERSION" \ $pip_dependencies if [ -n "$special_pip_deps" ]; then IFS='#' read -ra parts <<<"$special_pip_deps" for part in "${parts[@]}"; do echo "$part" + # shellcheck disable=SC2086 + # we are building a command line so word splitting is expected uv pip install $part done fi else - # Re-installing llama-stack in the new conda environment + # Re-installing llama-stack in the new virtual environment if [ -n "$LLAMA_STACK_DIR" ]; then if [ ! -d "$LLAMA_STACK_DIR" ]; then - printf "${RED}Warning: LLAMA_STACK_DIR is set but directory does not exist: $LLAMA_STACK_DIR${NC}\n" >&2 + printf "${RED}Warning: LLAMA_STACK_DIR is set but directory does not exist: %s${NC}\n" "$LLAMA_STACK_DIR" >&2 exit 1 fi - printf "Installing from LLAMA_STACK_DIR: $LLAMA_STACK_DIR\n" + printf "Installing from LLAMA_STACK_DIR: %s\n" "$LLAMA_STACK_DIR" uv pip install --no-cache-dir -e "$LLAMA_STACK_DIR" else uv pip install --no-cache-dir llama-stack @@ -84,26 +111,31 @@ run() { if [ -n "$LLAMA_MODELS_DIR" ]; then if [ ! -d "$LLAMA_MODELS_DIR" ]; then - printf "${RED}Warning: LLAMA_MODELS_DIR is set but directory does not exist: $LLAMA_MODELS_DIR${NC}\n" >&2 + printf "${RED}Warning: LLAMA_MODELS_DIR is set but directory does not exist: %s${NC}\n" "$LLAMA_MODELS_DIR" >&2 exit 1 fi - printf "Installing from LLAMA_MODELS_DIR: $LLAMA_MODELS_DIR\n" + printf "Installing from LLAMA_MODELS_DIR: %s\n" "$LLAMA_MODELS_DIR" uv pip uninstall llama-models uv pip install --no-cache-dir -e "$LLAMA_MODELS_DIR" fi # Install pip dependencies printf "Installing pip dependencies\n" + # shellcheck disable=SC2086 + # we are building a command line so word splitting is expected uv pip install $pip_dependencies if [ -n "$special_pip_deps" ]; then IFS='#' read -ra parts <<<"$special_pip_deps" for part in "${parts[@]}"; do echo "$part" + # shellcheck disable=SC2086 + # we are building a command line so word splitting is expected uv pip install $part done fi fi } +pre_run_checks "$env_name" run "$env_name" "$pip_dependencies" "$special_pip_deps" diff --git a/llama_stack/distribution/common.sh b/llama_stack/distribution/common.sh index 963eb395b..171023389 100755 --- a/llama_stack/distribution/common.sh +++ b/llama_stack/distribution/common.sh @@ -38,3 +38,8 @@ setup_cleanup_handlers() { conda deactivate } + +# check if a command is present +is_command_available() { + command -v "$1" &>/dev/null +}