From 0f10de04ba46a19d2792582ad3cb530253891e7e Mon Sep 17 00:00:00 2001 From: Xi Yan Date: Mon, 30 Sep 2024 13:49:26 -0700 Subject: [PATCH] address comments, update output msg --- llama_stack/cli/stack/build.py | 5 +++++ llama_stack/distribution/build_container.sh | 2 +- llama_stack/distribution/configure_container.sh | 4 +++- llama_stack/distribution/server/server.py | 2 +- .../templates/docker/llamastack-local-gpu/build.yaml | 11 +++++++++++ 5 files changed, 21 insertions(+), 3 deletions(-) create mode 100644 llama_stack/distribution/templates/docker/llamastack-local-gpu/build.yaml diff --git a/llama_stack/cli/stack/build.py b/llama_stack/cli/stack/build.py index fc2341f5f..324107269 100644 --- a/llama_stack/cli/stack/build.py +++ b/llama_stack/cli/stack/build.py @@ -118,6 +118,11 @@ class StackBuild(Subcommand): f"You can now run `llama stack configure {build_config.name}`", color="green", ) + else: + cprint( + f"You can now run `docker run -it -p 5000:5000 llamastack-{build_config.name}` or `llama stack configure llamastack-{build_config.name}`", + color="green", + ) def _run_template_list_cmd(self, args: argparse.Namespace) -> None: import json diff --git a/llama_stack/distribution/build_container.sh b/llama_stack/distribution/build_container.sh index bfd22fe9f..cf5e700e5 100755 --- a/llama_stack/distribution/build_container.sh +++ b/llama_stack/distribution/build_container.sh @@ -128,4 +128,4 @@ set -x $DOCKER_BINARY build $DOCKER_OPTS -t $image_name -f "$TEMP_DIR/Dockerfile" "$REPO_DIR" $mounts set +x -echo "Success! You can run it with: $DOCKER_BINARY run -p 8000:8000 $image_name" +echo "Success! You can run it with: $DOCKER_BINARY $DOCKER_OPTS run -p 5000:5000 $image_name" diff --git a/llama_stack/distribution/configure_container.sh b/llama_stack/distribution/configure_container.sh index 0b4aabfa6..9264e21bc 100755 --- a/llama_stack/distribution/configure_container.sh +++ b/llama_stack/distribution/configure_container.sh @@ -37,4 +37,6 @@ $DOCKER_BINARY run $DOCKER_OPTS -it \ $docker_image \ "stack" \ "configure" \ - "./llamastack-build.yaml" + "./llamastack-build.yaml" \ + "--output-dir" \ + "$container_build_dir" diff --git a/llama_stack/distribution/server/server.py b/llama_stack/distribution/server/server.py index 5686cb97e..28301264c 100644 --- a/llama_stack/distribution/server/server.py +++ b/llama_stack/distribution/server/server.py @@ -409,7 +409,7 @@ async def resolve_impls_with_routing(run_config: StackRunConfig) -> Dict[Api, An def main( - yaml_config: str = "./llamastack-run.yaml", + yaml_config: str = "llamastack-run.yaml", port: int = 5000, disable_ipv6: bool = False, ): diff --git a/llama_stack/distribution/templates/docker/llamastack-local-gpu/build.yaml b/llama_stack/distribution/templates/docker/llamastack-local-gpu/build.yaml new file mode 100644 index 000000000..11d1ac01c --- /dev/null +++ b/llama_stack/distribution/templates/docker/llamastack-local-gpu/build.yaml @@ -0,0 +1,11 @@ +name: local-gpu +distribution_spec: + description: local meta reference + docker_image: null + providers: + inference: meta-reference + safety: meta-reference + agents: meta-reference + memory: meta-reference + telemetry: meta-reference +image_type: docker