From 3b807912d258bb7370d5a9fca56cd50cb70d76d3 Mon Sep 17 00:00:00 2001 From: Xi Yan Date: Thu, 26 Sep 2024 23:11:15 -0700 Subject: [PATCH] remove configure outside for docker --- llama_stack/cli/stack/configure.py | 40 +++--------- .../distribution/configure_container.sh | 34 ---------- llama_stack/distribution/server/server.py | 5 +- llama_stack/distribution/start_container.sh | 62 ------------------- 4 files changed, 12 insertions(+), 129 deletions(-) delete mode 100755 llama_stack/distribution/configure_container.sh delete mode 100755 llama_stack/distribution/start_container.sh diff --git a/llama_stack/cli/stack/configure.py b/llama_stack/cli/stack/configure.py index 135962d4d..76e2c5ca0 100644 --- a/llama_stack/cli/stack/configure.py +++ b/llama_stack/cli/stack/configure.py @@ -77,37 +77,15 @@ class StackConfigure(Subcommand): self._configure_llama_distribution(build_config, args.output_dir) return - - # if we get here, we need to try to find the docker image - cprint( - f"Could not find {build_config_file}. Trying docker image name instead...", - color="green", - ) - docker_image = args.config - builds_dir = BUILDS_BASE_DIR / ImageType.docker.value - if args.output_dir: - builds_dir = Path(output_dir) - os.makedirs(builds_dir, exist_ok=True) - - script = pkg_resources.resource_filename( - "llama_stack", "distribution/configure_container.sh" - ) - script_args = [script, docker_image, str(builds_dir)] - - return_code = run_with_pty(script_args) - - # we have regenerated the build config file with script, now check if it exists - if return_code != 0: - self.parser.error( - f"Failed to configure container {docker_image} with return code {return_code}. Please run `llama stack build first`. " - ) - return - - build_name = docker_image.removeprefix("llamastack-") - saved_file = str(builds_dir / f"{build_name}-run.yaml") - cprint( - f"YAML configuration has been written to {saved_file}. You can now run `llama stack run {saved_file}`", - color="green", + + # if we get here, we need to prompt user to try configure inside docker image + self.parser.error( + f""" + Could not find {build_config_file}. Did you download a docker image? + Try running `docker run -it --entrypoint "/bin/bash" ` + `llama stack configure llamastack-build.yaml --output-dir ./` + to set a new run configuration file. + """, ) return diff --git a/llama_stack/distribution/configure_container.sh b/llama_stack/distribution/configure_container.sh deleted file mode 100755 index 56e45db7e..000000000 --- a/llama_stack/distribution/configure_container.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash - -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -DOCKER_BINARY=${DOCKER_BINARY:-docker} -DOCKER_OPTS=${DOCKER_OPTS:-} - -set -euo pipefail - -error_handler() { - echo "Error occurred in script at line: ${1}" >&2 - exit 1 -} - -trap 'error_handler ${LINENO}' ERR - -if [ $# -lt 2 ]; then - echo "Usage: $0 " - exit 1 -fi - -docker_image="$1" -host_build_dir="$2" -container_build_dir="/app/builds" - -set -x -$DOCKER_BINARY run $DOCKER_OPTS -it \ - -v $host_build_dir:$container_build_dir \ - $docker_image \ - llama stack configure ./llamastack-build.yaml --output-dir $container_build_dir diff --git a/llama_stack/distribution/server/server.py b/llama_stack/distribution/server/server.py index 7a3e6276c..d665226cf 100644 --- a/llama_stack/distribution/server/server.py +++ b/llama_stack/distribution/server/server.py @@ -410,8 +410,9 @@ async def resolve_impls_with_routing(run_config: StackRunConfig) -> Dict[Api, An return impls, specs -def main(yaml_config: str, port: int = 5000, disable_ipv6: bool = False): - with open(yaml_config, "r") as fp: +def main(default_yaml: str, port: int = 5000, disable_ipv6: bool = False, config: Optional[str] = None): + config_file = config or default_yaml + with open(config_file, "r") as fp: config = StackRunConfig(**yaml.safe_load(fp)) app = FastAPI() diff --git a/llama_stack/distribution/start_container.sh b/llama_stack/distribution/start_container.sh deleted file mode 100755 index ee581cac4..000000000 --- a/llama_stack/distribution/start_container.sh +++ /dev/null @@ -1,62 +0,0 @@ -#!/bin/bash - -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -DOCKER_BINARY=${DOCKER_BINARY:-docker} -DOCKER_OPTS=${DOCKER_OPTS:-} -LLAMA_CHECKPOINT_DIR=${LLAMA_CHECKPOINT_DIR:-} - -set -euo pipefail - -RED='\033[0;31m' -NC='\033[0m' # No Color - -error_handler() { - echo "Error occurred in script at line: ${1}" >&2 - exit 1 -} - -trap 'error_handler ${LINENO}' ERR - -if [ $# -lt 3 ]; then - echo "Usage: $0 " - exit 1 -fi - -build_name="$1" -docker_image="llamastack-$build_name" -shift - -yaml_config="$1" -shift - -port="$1" -shift - -set -x - -if [ -n "$LLAMA_CHECKPOINT_DIR" ]; then - $DOCKER_BINARY run $DOCKER_OPTS -it \ - -p $port:$port \ - -v "$yaml_config:/app/config.yaml" \ - -v "$LLAMA_CHECKPOINT_DIR:/root/.llama" \ - --gpus=all \ - $docker_image \ - python -m llama_stack.distribution.server.server \ - --yaml_config /app/config.yaml \ - --port $port "$@" -fi - -if [ -z "$LLAMA_CHECKPOINT_DIR" ]; then - $DOCKER_BINARY run $DOCKER_OPTS -it \ - -p $port:$port \ - -v "$yaml_config:/app/config.yaml" \ - $docker_image \ - python -m llama_stack.distribution.server.server \ - --yaml_config /app/config.yaml \ - --port $port "$@" -fi