diff --git a/docs/source/distributions/building_distro.md b/docs/source/distributions/building_distro.md index 3c375a950..ebf31ec3a 100644 --- a/docs/source/distributions/building_distro.md +++ b/docs/source/distributions/building_distro.md @@ -37,7 +37,7 @@ llama stack build > Enter a name for your Llama Stack (e.g. my-local-stack): my-stack > Enter the image type you want your Llama Stack to be built as (docker or conda): conda -> Enter the target platform you want your Llama Stack to be built for: linux/arm64 +> Enter the target platform you want your Llama Stack to be built for: Llama Stack is composed of several APIs working together. Let's select the provider types (implementations) you want to use for these APIs. diff --git a/llama_stack/cli/stack/build.py b/llama_stack/cli/stack/build.py index d723d30db..243a1828a 100644 --- a/llama_stack/cli/stack/build.py +++ b/llama_stack/cli/stack/build.py @@ -80,6 +80,7 @@ class StackBuild(Subcommand): self.parser.add_argument( "--platform", type=str, + default=None, help="Platform to use for the build. Required when using docker as image type, defaults to host if no platform is specified", ) @@ -139,12 +140,7 @@ class StackBuild(Subcommand): ) platform = prompt( - "> Enter the target platform you want your Llama Stack to be built for: ", - validator=Validator.from_callable( - lambda x: len(x) > 0, - error_message="Platform cannot be empty, please enter a platform", - ), - default="linux/arm64", + "> Enter the target platform you want your Llama Stack to be built for: " ) cprint( @@ -189,11 +185,10 @@ class StackBuild(Subcommand): ) build_config = BuildConfig( - name=name, - image_type=image_type, - distribution_spec=distribution_spec, - platform=platform, + name=name, image_type=image_type, distribution_spec=distribution_spec ) + if platform.strip(): + build_config.platform = platform self._run_stack_build_command_from_build_config(build_config) return diff --git a/llama_stack/distribution/build.py b/llama_stack/distribution/build.py index bdda0349f..550b77b08 100644 --- a/llama_stack/distribution/build.py +++ b/llama_stack/distribution/build.py @@ -121,6 +121,8 @@ def build_image(build_config: BuildConfig, build_file_path: Path): str(BUILDS_BASE_DIR / ImageType.docker.value), " ".join(normal_deps), ] + if build_config.platform is not None: + args.append(build_config.platform) elif build_config.image_type == ImageType.conda.value: script = pkg_resources.resource_filename( "llama_stack", "distribution/build_conda_env.sh" diff --git a/llama_stack/distribution/datatypes.py b/llama_stack/distribution/datatypes.py index e77aa14bf..1b9007492 100644 --- a/llama_stack/distribution/datatypes.py +++ b/llama_stack/distribution/datatypes.py @@ -25,7 +25,6 @@ from llama_stack.providers.utils.kvstore.config import KVStoreConfig LLAMA_STACK_BUILD_CONFIG_VERSION = "2" LLAMA_STACK_RUN_CONFIG_VERSION = "2" -LLAMA_STACK_DEFAULT_PLATFORM = "linux/arm64" RoutingKey = Union[str, List[str]] @@ -168,7 +167,7 @@ class BuildConfig(BaseModel): default="conda", description="Type of package to build (conda | docker | venv)", ) - platform: str = Field( - default=LLAMA_STACK_DEFAULT_PLATFORM, - description="The platform for docker image, defaults to linux/arm64", + platform: Optional[str] = Field( + default=None, + description="The platform for docker image", )