remove configure outside for docker

This commit is contained in:
Xi Yan 2024-09-26 23:11:15 -07:00
parent 0ad0a15810
commit 3b807912d2
4 changed files with 12 additions and 129 deletions

View file

@ -78,36 +78,14 @@ class StackConfigure(Subcommand):
self._configure_llama_distribution(build_config, args.output_dir) self._configure_llama_distribution(build_config, args.output_dir)
return return
# if we get here, we need to try to find the docker image # if we get here, we need to prompt user to try configure inside docker image
cprint(
f"Could not find {build_config_file}. Trying docker image name instead...",
color="green",
)
docker_image = args.config
builds_dir = BUILDS_BASE_DIR / ImageType.docker.value
if args.output_dir:
builds_dir = Path(output_dir)
os.makedirs(builds_dir, exist_ok=True)
script = pkg_resources.resource_filename(
"llama_stack", "distribution/configure_container.sh"
)
script_args = [script, docker_image, str(builds_dir)]
return_code = run_with_pty(script_args)
# we have regenerated the build config file with script, now check if it exists
if return_code != 0:
self.parser.error( self.parser.error(
f"Failed to configure container {docker_image} with return code {return_code}. Please run `llama stack build first`. " f"""
) Could not find {build_config_file}. Did you download a docker image?
return Try running `docker run -it --entrypoint "/bin/bash" <image_name>`
`llama stack configure llamastack-build.yaml --output-dir ./`
build_name = docker_image.removeprefix("llamastack-") to set a new run configuration file.
saved_file = str(builds_dir / f"{build_name}-run.yaml") """,
cprint(
f"YAML configuration has been written to {saved_file}. You can now run `llama stack run {saved_file}`",
color="green",
) )
return return

View file

@ -1,34 +0,0 @@
#!/bin/bash
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
DOCKER_BINARY=${DOCKER_BINARY:-docker}
DOCKER_OPTS=${DOCKER_OPTS:-}
set -euo pipefail
error_handler() {
echo "Error occurred in script at line: ${1}" >&2
exit 1
}
trap 'error_handler ${LINENO}' ERR
if [ $# -lt 2 ]; then
echo "Usage: $0 <container name> <build file path>"
exit 1
fi
docker_image="$1"
host_build_dir="$2"
container_build_dir="/app/builds"
set -x
$DOCKER_BINARY run $DOCKER_OPTS -it \
-v $host_build_dir:$container_build_dir \
$docker_image \
llama stack configure ./llamastack-build.yaml --output-dir $container_build_dir

View file

@ -410,8 +410,9 @@ async def resolve_impls_with_routing(run_config: StackRunConfig) -> Dict[Api, An
return impls, specs return impls, specs
def main(yaml_config: str, port: int = 5000, disable_ipv6: bool = False): def main(default_yaml: str, port: int = 5000, disable_ipv6: bool = False, config: Optional[str] = None):
with open(yaml_config, "r") as fp: config_file = config or default_yaml
with open(config_file, "r") as fp:
config = StackRunConfig(**yaml.safe_load(fp)) config = StackRunConfig(**yaml.safe_load(fp))
app = FastAPI() app = FastAPI()

View file

@ -1,62 +0,0 @@
#!/bin/bash
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
DOCKER_BINARY=${DOCKER_BINARY:-docker}
DOCKER_OPTS=${DOCKER_OPTS:-}
LLAMA_CHECKPOINT_DIR=${LLAMA_CHECKPOINT_DIR:-}
set -euo pipefail
RED='\033[0;31m'
NC='\033[0m' # No Color
error_handler() {
echo "Error occurred in script at line: ${1}" >&2
exit 1
}
trap 'error_handler ${LINENO}' ERR
if [ $# -lt 3 ]; then
echo "Usage: $0 <build_name> <yaml_config> <port> <other_args...>"
exit 1
fi
build_name="$1"
docker_image="llamastack-$build_name"
shift
yaml_config="$1"
shift
port="$1"
shift
set -x
if [ -n "$LLAMA_CHECKPOINT_DIR" ]; then
$DOCKER_BINARY run $DOCKER_OPTS -it \
-p $port:$port \
-v "$yaml_config:/app/config.yaml" \
-v "$LLAMA_CHECKPOINT_DIR:/root/.llama" \
--gpus=all \
$docker_image \
python -m llama_stack.distribution.server.server \
--yaml_config /app/config.yaml \
--port $port "$@"
fi
if [ -z "$LLAMA_CHECKPOINT_DIR" ]; then
$DOCKER_BINARY run $DOCKER_OPTS -it \
-p $port:$port \
-v "$yaml_config:/app/config.yaml" \
$docker_image \
python -m llama_stack.distribution.server.server \
--yaml_config /app/config.yaml \
--port $port "$@"
fi