diff --git a/README.md b/README.md index 01abb0b3e..936876708 100644 --- a/README.md +++ b/README.md @@ -82,7 +82,7 @@ $CONDA_PREFIX/bin/pip install -e . ## The Llama CLI -The `llama` CLI makes it easy to work with the Llama Stack set of tools, including installing and running Distributions, downloading models, studying model prompt formats, etc. Please see the [CLI reference](docs/cli_reference.md) for details. Please see the [Getting Started](docs/getting_started.md) guide for running a Llama Stack server. +The `llama` CLI makes it easy to work with the Llama Stack set of tools, including installing and running Distributions, downloading models, studying model prompt formats, etc. Please see the [CLI reference](docs/cli_reference.md) for details. Please see the [Getting Started](docs/getting_started.md) guide for running a Llama Stack server. ## Llama Stack Client SDK diff --git a/llama_stack/cli/stack/build.py b/llama_stack/cli/stack/build.py index 528aa290a..31cf991be 100644 --- a/llama_stack/cli/stack/build.py +++ b/llama_stack/cli/stack/build.py @@ -74,8 +74,8 @@ class StackBuild(Subcommand): self.parser.add_argument( "--image-type", type=str, - help="Image Type to use for the build. This can be either conda or docker. If not specified, will use conda by default", - default="conda", + help="Image Type to use for the build. This can be either conda or docker. If not specified, will use the image type from the template config.", + choices=["conda", "docker"], ) def _run_stack_build_command_from_build_config( @@ -183,7 +183,8 @@ class StackBuild(Subcommand): with open(build_path, "r") as f: build_config = BuildConfig(**yaml.safe_load(f)) build_config.name = args.name - build_config.image_type = args.image_type + if args.image_type: + build_config.image_type = args.image_type self._run_stack_build_command_from_build_config(build_config) return