mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-29 07:14:20 +00:00
bake run.yaml inside docker, simplify run
This commit is contained in:
parent
cb36be320f
commit
6cd3e4183f
3 changed files with 19 additions and 21 deletions
|
@ -113,15 +113,11 @@ class StackBuild(Subcommand):
|
||||||
if return_code != 0:
|
if return_code != 0:
|
||||||
return
|
return
|
||||||
|
|
||||||
configure_name = (
|
if build_config.image_type == ImageType.conda.value:
|
||||||
build_config.name
|
cprint(
|
||||||
if build_config.image_type == "conda"
|
f"You can now run `llama stack configure {build_config.name}`",
|
||||||
else (f"llamastack-{build_config.name}")
|
color="green",
|
||||||
)
|
)
|
||||||
cprint(
|
|
||||||
f"You can now run `llama stack configure {configure_name}`",
|
|
||||||
color="green",
|
|
||||||
)
|
|
||||||
|
|
||||||
def _run_template_list_cmd(self, args: argparse.Namespace) -> None:
|
def _run_template_list_cmd(self, args: argparse.Namespace) -> None:
|
||||||
import json
|
import json
|
||||||
|
|
|
@ -112,12 +112,6 @@ class StackConfigure(Subcommand):
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
build_name = docker_image.removeprefix("llamastack-")
|
|
||||||
saved_file = str(builds_dir / f"{build_name}-run.yaml")
|
|
||||||
cprint(
|
|
||||||
f"YAML configuration has been written to {saved_file}. You can now run `llama stack run {saved_file}`",
|
|
||||||
color="green",
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
def _configure_llama_distribution(
|
def _configure_llama_distribution(
|
||||||
|
@ -173,7 +167,8 @@ class StackConfigure(Subcommand):
|
||||||
color="blue",
|
color="blue",
|
||||||
)
|
)
|
||||||
|
|
||||||
cprint(
|
if build_config.image_type == "conda":
|
||||||
f"You can now run `llama stack run {image_name} --port PORT`",
|
cprint(
|
||||||
color="green",
|
f"You can now run `llama stack run {image_name} --port PORT`",
|
||||||
)
|
color="green",
|
||||||
|
)
|
||||||
|
|
|
@ -29,9 +29,12 @@ SCRIPT_DIR=$(dirname "$(readlink -f "$0")")
|
||||||
REPO_DIR=$(dirname $(dirname "$SCRIPT_DIR"))
|
REPO_DIR=$(dirname $(dirname "$SCRIPT_DIR"))
|
||||||
DOCKER_BINARY=${DOCKER_BINARY:-docker}
|
DOCKER_BINARY=${DOCKER_BINARY:-docker}
|
||||||
DOCKER_OPTS=${DOCKER_OPTS:-}
|
DOCKER_OPTS=${DOCKER_OPTS:-}
|
||||||
|
REPO_CONFIGS_DIR="$REPO_DIR/tmp/configs"
|
||||||
|
|
||||||
TEMP_DIR=$(mktemp -d)
|
TEMP_DIR=$(mktemp -d)
|
||||||
|
|
||||||
|
llama stack configure $build_file_path --output-dir $REPO_CONFIGS_DIR
|
||||||
|
|
||||||
add_to_docker() {
|
add_to_docker() {
|
||||||
local input
|
local input
|
||||||
output_file="$TEMP_DIR/Dockerfile"
|
output_file="$TEMP_DIR/Dockerfile"
|
||||||
|
@ -99,11 +102,12 @@ add_to_docker <<EOF
|
||||||
# This would be good in production but for debugging flexibility lets not add it right now
|
# This would be good in production but for debugging flexibility lets not add it right now
|
||||||
# We need a more solid production ready entrypoint.sh anyway
|
# We need a more solid production ready entrypoint.sh anyway
|
||||||
#
|
#
|
||||||
# ENTRYPOINT ["python", "-m", "llama_stack.distribution.server.server"]
|
CMD ["python", "-m", "llama_stack.distribution.server.server", "./llamastack-run.yaml"]
|
||||||
|
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
add_to_docker "ADD tmp/configs/$(basename "$build_file_path") ./llamastack-build.yaml"
|
add_to_docker "ADD tmp/configs/$(basename "$build_file_path") ./llamastack-build.yaml"
|
||||||
|
add_to_docker "ADD tmp/configs/$build_name-run.yaml ./llamastack-run.yaml"
|
||||||
|
|
||||||
printf "Dockerfile created successfully in $TEMP_DIR/Dockerfile"
|
printf "Dockerfile created successfully in $TEMP_DIR/Dockerfile"
|
||||||
cat $TEMP_DIR/Dockerfile
|
cat $TEMP_DIR/Dockerfile
|
||||||
|
@ -124,7 +128,10 @@ set -x
|
||||||
$DOCKER_BINARY build $DOCKER_OPTS -t $image_name -f "$TEMP_DIR/Dockerfile" "$REPO_DIR" $mounts
|
$DOCKER_BINARY build $DOCKER_OPTS -t $image_name -f "$TEMP_DIR/Dockerfile" "$REPO_DIR" $mounts
|
||||||
set +x
|
set +x
|
||||||
|
|
||||||
echo "You can run it with: podman run -p 8000:8000 $image_name"
|
echo "Success! You can run it with: $DOCKER_BINARY run -p 8000:8000 $image_name"
|
||||||
|
|
||||||
echo "Checking image builds..."
|
echo "Checking image builds..."
|
||||||
$DOCKER_BINARY run $DOCKER_OPTS -it $image_name cat llamastack-build.yaml
|
$DOCKER_BINARY run $DOCKER_OPTS -it $image_name cat llamastack-build.yaml
|
||||||
|
|
||||||
|
echo "Checking image run..."
|
||||||
|
$DOCKER_BINARY run $DOCKER_OPTS -it $image_name cat llamastack-run.yaml
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue