diff --git a/llama_stack/cli/stack/_build.py b/llama_stack/cli/stack/_build.py index 666c2e6dd..97d8900df 100644 --- a/llama_stack/cli/stack/_build.py +++ b/llama_stack/cli/stack/_build.py @@ -23,10 +23,10 @@ from termcolor import cprint from llama_stack.cli.table import print_table from llama_stack.distribution.build import ( SERVER_DEPENDENCIES, - ImageType, build_image, get_provider_dependencies, ) +from llama_stack.distribution.configure import parse_and_maybe_upgrade_config from llama_stack.distribution.datatypes import ( BuildConfig, DistributionSpec, @@ -37,7 +37,8 @@ from llama_stack.distribution.distribution import get_provider_registry from llama_stack.distribution.resolver import InvalidProviderError from llama_stack.distribution.utils.config_dirs import DISTRIBS_BASE_DIR from llama_stack.distribution.utils.dynamic import instantiate_class_type -from llama_stack.distribution.utils.exec import in_notebook +from llama_stack.distribution.utils.exec import formulate_run_args, in_notebook, run_with_pty +from llama_stack.distribution.utils.image_types import ImageType from llama_stack.providers.datatypes import Api TEMPLATES_PATH = Path(__file__).parent.parent.parent / "templates" @@ -186,19 +187,41 @@ def run_stack_build_command(args: argparse.Namespace) -> None: print(f"uv pip install {special_dep}") return - _run_stack_build_command_from_build_config( - build_config, - image_name=image_name, - config_path=args.config, - template_name=args.template, - ) + try: + run_config = _run_stack_build_command_from_build_config( + build_config, + image_name=image_name, + config_path=args.config, + template_name=args.template, + ) + + except Exception as exc: + cprint( + f"Error building stack: {exc}", + color="red", + ) + return + if run_config is None: + cprint( + "Run config path is empty", + color="red", + ) + return + + if args.run: + run_config = Path(run_config) + config_dict = yaml.safe_load(run_config.read_text()) + config = parse_and_maybe_upgrade_config(config_dict) + run_args = formulate_run_args(args.image_type, args.image_name, config, args.template) + run_args.extend([run_config, str(os.getenv("LLAMA_STACK_PORT", 8321))]) + run_with_pty(run_args) def _generate_run_config( build_config: BuildConfig, build_dir: Path, image_name: str, -) -> None: +) -> str: """ Generate a run.yaml template file for user to edit from a build.yaml file """ @@ -248,6 +271,7 @@ def _generate_run_config( f"You can now run your stack with `llama stack run {run_config_file}`", color="green", ) + return run_config_file def _run_stack_build_command_from_build_config( @@ -255,7 +279,7 @@ def _run_stack_build_command_from_build_config( image_name: Optional[str] = None, template_name: Optional[str] = None, config_path: Optional[str] = None, -) -> None: +) -> str: if build_config.image_type == ImageType.container.value: if template_name: image_name = f"distribution-{template_name}" @@ -298,8 +322,9 @@ def _run_stack_build_command_from_build_config( shutil.copy(path, run_config_file) cprint("Build Successful!", color="green") + return template_path else: - _generate_run_config(build_config, build_dir, image_name) + return _generate_run_config(build_config, build_dir, image_name) def _run_template_list_cmd() -> None: diff --git a/llama_stack/cli/stack/build.py b/llama_stack/cli/stack/build.py index 7b17a960a..ceee725e6 100644 --- a/llama_stack/cli/stack/build.py +++ b/llama_stack/cli/stack/build.py @@ -68,6 +68,13 @@ the build. If not specified, currently active Conda environment will be used if help="Print the dependencies for the stack only, without building the stack", ) + self.parser.add_argument( + "--run", + action="store_true", + default=False, + help="Run the stack after building using the same image type, name, and other applicable arguments", + ) + def _run_stack_build_command(self, args: argparse.Namespace) -> None: # always keep implementation completely silo-ed away from CLI so CLI # can be fast to load and reduces dependencies diff --git a/llama_stack/cli/stack/run.py b/llama_stack/cli/stack/run.py index 0c9c74518..627ee829a 100644 --- a/llama_stack/cli/stack/run.py +++ b/llama_stack/cli/stack/run.py @@ -74,10 +74,6 @@ class StackRun(Subcommand): ) def _run_stack_run_cmd(self, args: argparse.Namespace) -> None: - import importlib.resources - import json - import subprocess - import yaml from termcolor import cprint @@ -87,7 +83,7 @@ class StackRun(Subcommand): BUILDS_BASE_DIR, DISTRIBS_BASE_DIR, ) - from llama_stack.distribution.utils.exec import run_with_pty + from llama_stack.distribution.utils.exec import formulate_run_args, run_with_pty if not args.config: self.parser.error("Must specify a config file to run") @@ -125,70 +121,7 @@ class StackRun(Subcommand): config_dict = yaml.safe_load(config_file.read_text()) config = parse_and_maybe_upgrade_config(config_dict) - if args.image_type == ImageType.container.value or config.container_image: - script = importlib.resources.files("llama_stack") / "distribution/start_container.sh" - image_name = f"distribution-{template_name}" if template_name else config.container_image - run_args = [script, image_name] - elif args.image_type == ImageType.conda.value: - current_conda_env = os.environ.get("CONDA_DEFAULT_ENV") - image_name = args.image_name or current_conda_env - if not image_name: - cprint( - "No current conda environment detected, please specify a conda environment name with --image-name", - color="red", - ) - return - - def get_conda_prefix(env_name): - # Conda "base" environment does not end with "base" in the - # prefix, so should be handled separately. - if env_name == "base": - return os.environ.get("CONDA_PREFIX") - # Get conda environments info - conda_env_info = json.loads(subprocess.check_output(["conda", "info", "--envs", "--json"]).decode()) - envs = conda_env_info["envs"] - for envpath in envs: - if envpath.endswith(env_name): - return envpath - return None - - print(f"Using conda environment: {image_name}") - conda_prefix = get_conda_prefix(image_name) - if not conda_prefix: - cprint( - f"Conda environment {image_name} does not exist.", - color="red", - ) - return - - build_file = Path(conda_prefix) / "llamastack-build.yaml" - if not build_file.exists(): - cprint( - f"Build file {build_file} does not exist.\n\nPlease run `llama stack build` or specify the correct conda environment name with --image-name", - color="red", - ) - return - - script = importlib.resources.files("llama_stack") / "distribution/start_conda_env.sh" - run_args = [ - script, - image_name, - ] - else: - # else must be venv since that is the only valid option left. - current_venv = os.environ.get("VIRTUAL_ENV") - venv = args.image_name or current_venv - if not venv: - cprint( - "No current virtual environment detected, please specify a virtual environment name with --image-name", - color="red", - ) - return - script = importlib.resources.files("llama_stack") / "distribution/start_venv.sh" - run_args = [ - script, - venv, - ] + run_args = formulate_run_args(args.image_type, args.image_name, config, template_name) run_args.extend([str(config_file), str(args.port)]) if args.disable_ipv6: diff --git a/llama_stack/distribution/build.py b/llama_stack/distribution/build.py index 511817de8..2b43b8128 100644 --- a/llama_stack/distribution/build.py +++ b/llama_stack/distribution/build.py @@ -7,7 +7,6 @@ import importlib.resources import logging import sys -from enum import Enum from pathlib import Path from typing import Dict, List @@ -18,6 +17,7 @@ from llama_stack.distribution.datatypes import BuildConfig, Provider from llama_stack.distribution.distribution import get_provider_registry from llama_stack.distribution.utils.config_dirs import BUILDS_BASE_DIR from llama_stack.distribution.utils.exec import run_command, run_with_pty +from llama_stack.distribution.utils.image_types import ImageType from llama_stack.providers.datatypes import Api log = logging.getLogger(__name__) @@ -33,12 +33,6 @@ SERVER_DEPENDENCIES = [ ] -class ImageType(Enum): - container = "container" - conda = "conda" - venv = "venv" - - class ApiInput(BaseModel): api: Api provider: str diff --git a/llama_stack/distribution/utils/exec.py b/llama_stack/distribution/utils/exec.py index e13e59aad..00afdadbe 100644 --- a/llama_stack/distribution/utils/exec.py +++ b/llama_stack/distribution/utils/exec.py @@ -12,8 +12,78 @@ import signal import subprocess import sys +from termcolor import cprint + log = logging.getLogger(__name__) +import importlib +import json +from pathlib import Path + +from llama_stack.distribution.utils.image_types import ImageType + + +def formulate_run_args(image_type, image_name, config, template_name) -> list: + if image_type == ImageType.container.value or config.container_image: + script = importlib.resources.files("llama_stack") / "distribution/start_container.sh" + image_name = f"distribution-{template_name}" if template_name else config.container_image + run_args = [script, image_name] + elif image_type == ImageType.conda.value: + current_conda_env = os.environ.get("CONDA_DEFAULT_ENV") + image_name = image_name or current_conda_env + if not image_name: + cprint( + "No current conda environment detected, please specify a conda environment name with --image-name", + color="red", + ) + return + + def get_conda_prefix(env_name): + # Conda "base" environment does not end with "base" in the + # prefix, so should be handled separately. + if env_name == "base": + return os.environ.get("CONDA_PREFIX") + # Get conda environments info + conda_env_info = json.loads(subprocess.check_output(["conda", "info", "--envs", "--json"]).decode()) + envs = conda_env_info["envs"] + for envpath in envs: + if envpath.endswith(env_name): + return envpath + return None + + print(f"Using conda environment: {image_name}") + conda_prefix = get_conda_prefix(image_name) + if not conda_prefix: + cprint( + f"Conda environment {image_name} does not exist.", + color="red", + ) + return + + build_file = Path(conda_prefix) / "llamastack-build.yaml" + if not build_file.exists(): + cprint( + f"Build file {build_file} does not exist.\n\nPlease run `llama stack build` or specify the correct conda environment name with --image-name", + color="red", + ) + return + + script = importlib.resources.files("llama_stack") / "distribution/start_conda_env.sh" + run_args = [ + script, + image_name, + ] + else: + # else must be venv since that is the only valid option left. + current_venv = os.environ.get("VIRTUAL_ENV") + venv = image_name or current_venv + script = importlib.resources.files("llama_stack") / "distribution/start_venv.sh" + run_args = [ + script, + venv, + ] + return run_args + def run_with_pty(command): if sys.platform.startswith("win"): diff --git a/llama_stack/distribution/utils/image_types.py b/llama_stack/distribution/utils/image_types.py new file mode 100644 index 000000000..1a43b092f --- /dev/null +++ b/llama_stack/distribution/utils/image_types.py @@ -0,0 +1,13 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +from enum import Enum + + +class ImageType(Enum): + container = "container" + conda = "conda" + venv = "venv"