mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-03 18:00:36 +00:00
build(container): misc improvements (#1291)
# What does this PR do?
See individual commit messages.
[//]: # (If resolving an issue, uncomment and update the line below)
[//]: # (Closes #[issue-number])
## Test Plan
Apply this diff:
```
diff --git a/llama_stack/templates/ollama/build.yaml b/llama_stack/templates/ollama/build.yaml
index da33b8d5..4a702f6f 100644
--- a/llama_stack/templates/ollama/build.yaml
+++ b/llama_stack/templates/ollama/build.yaml
@@ -28,5 +28,5 @@ distribution_spec:
- remote::tavily-search
- inline::code-interpreter
- inline::rag-runtime
- - remote::model-context-protocol
+ container_image: "registry.access.redhat.com/ubi9"
image_type: conda
```
Then run:
```
CONTAINER_BINARY=podman llama stack build --template ollama --image-type container --image-name registry.access.redhat.com/ubi9
Containerfile created successfully in /var/folders/mq/rnm5w_7s2d3fxmtkx02knvhm0000gn/T/tmp.I7E5V6zbVI/Containerfile
FROM registry.access.redhat.com/ubi9
WORKDIR /app
RUN dnf -y update && dnf install -y iputils net-tools wget vim-minimal python3.11 python3.11-pip python3.11-wheel python3.11-setuptools && ln -s /bin/pip3.11 /bin/pip && ln -s /bin/python3.11 /bin/python && dnf clean all
ENV UV_SYSTEM_PYTHON=1
RUN pip install uv
RUN uv pip install --no-cache ollama nltk opentelemetry-sdk aiosqlite matplotlib datasets sqlite-vec scipy chromadb-client psycopg2-binary numpy scikit-learn openai redis pandas tqdm blobfile sentencepiece aiohttp requests pillow pymongo transformers autoevals opentelemetry-exporter-otlp-proto-http pypdf chardet aiosqlite fastapi fire httpx uvicorn
RUN uv pip install --no-cache llama-stack
RUN pip uninstall -y uv
ENTRYPOINT ["python", "-m", "llama_stack.distribution.server.server", "--template", "ollama"]
# Allows running as non-root user
RUN mkdir -p /.llama /.cache
RUN chmod -R g+rw /app /.llama /.cache
PWD: /Users/leseb/Documents/AI/llama-stack
Containerfile: /var/folders/mq/rnm5w_7s2d3fxmtkx02knvhm0000gn/T/tmp.I7E5V6zbVI/Containerfile
+ podman build --platform linux/arm64 -t distribution-ollama:0.1.4 -f /var/folders/mq/rnm5w_7s2d3fxmtkx02knvhm0000gn/T/tmp.I7E5V6zbVI/Containerfile . --progress=plain
STEP 1/11: FROM registry.access.redhat.com/ubi9
STEP 2/11: WORKDIR /app
--> Using cache d73dafd4caddd75bc29242a9031258fea759dc571c5bb53a64b5e6d86b3b1335
--> d73dafd4cadd
STEP 3/11: RUN dnf -y update && dnf install -y iputils net-tools wget vim-minimal python3.11 python3.11-pip python3.11-wheel python3.11-setuptools && ln -s /bin/pip3.11 /bin/pip && ln -s /bin/python3.11 /bin/python && dnf clean all
--> Using cache b74ad682db149771612a3ea1e4796e0760ab8a4e07c26ad672b46a86d38178c2
--> b74ad682db14
STEP 4/11: ENV UV_SYSTEM_PYTHON=1
--> Using cache 0812a05e6576506aa2fe646cbf239d0cb504cac30a50cb5cf4dc88e49039466d
--> 0812a05e6576
STEP 5/11: RUN pip install uv
--> Using cache a0ce1705f87e52f70f6eb34e66f67b68ebc7c1a073f4d2a664b189cfa89a4e88
--> a0ce1705f87e
STEP 6/11: RUN uv pip install --no-cache ollama nltk opentelemetry-sdk aiosqlite matplotlib datasets sqlite-vec scipy chromadb-client psycopg2-binary numpy scikit-learn openai redis pandas tqdm blobfile sentencepiece aiohttp requests pillow pymongo transformers autoevals opentelemetry-exporter-otlp-proto-http pypdf chardet aiosqlite fastapi fire httpx uvicorn
Using Python 3.11.9 environment at: /usr
Resolved 107 packages in 1.78s
Downloading kiwisolver (1.4MiB)
Downloading aiohttp (1.6MiB)
Downloading grpcio (5.4MiB)
Downloading nltk (1.4MiB)
Downloading transformers (9.5MiB)
Downloading pydantic-core (1.7MiB)
Downloading lxml (4.6MiB)
Downloading psycopg2-binary (2.7MiB)
Downloading scipy (33.8MiB)
Downloading scikit-learn (12.0MiB)
Downloading tokenizers (2.8MiB)
Downloading fonttools (4.6MiB)
Downloading pymongo (1.3MiB)
Downloading rapidfuzz (1.4MiB)
Downloading sentencepiece (1.2MiB)
Downloading pyarrow (38.7MiB)
Downloading matplotlib (8.1MiB)
Downloading pycryptodomex (2.1MiB)
Downloading pillow (4.2MiB)
Downloading pandas (14.9MiB)
Downloading numpy (13.6MiB)
Building fire==0.7.0
Downloaded sentencepiece
Downloaded kiwisolver
Downloaded pymongo
Downloaded rapidfuzz
Downloaded nltk
Downloaded aiohttp
Built fire==0.7.0
Downloaded pydantic-core
Downloaded pycryptodomex
Downloaded psycopg2-binary
Downloaded tokenizers
Downloaded pillow
Downloaded lxml
Downloaded fonttools
Downloaded grpcio
Downloaded matplotlib
Downloaded transformers
Downloaded scikit-learn
Downloaded numpy
Downloaded pandas
Downloaded scipy
Downloaded pyarrow
Prepared 107 packages in 3.03s
Installed 107 packages in 62ms
+ aiohappyeyeballs==2.4.6
+ aiohttp==3.11.13
+ aiosignal==1.3.2
+ aiosqlite==0.21.0
+ annotated-types==0.7.0
+ anyio==4.8.0
+ attrs==25.1.0
+ autoevals==0.0.120
+ backoff==2.2.1
+ blobfile==3.0.0
+ braintrust-core==0.0.58
+ certifi==2025.1.31
+ chardet==5.2.0
+ charset-normalizer==3.4.1
+ chevron==0.14.0
+ chromadb-client==0.6.3
+ click==8.1.8
+ contourpy==1.3.1
+ cycler==0.12.1
+ datasets==3.3.2
+ deprecated==1.2.18
+ dill==0.3.8
+ distro==1.9.0
+ dnspython==2.7.0
+ fastapi==0.115.8
+ filelock==3.17.0
+ fire==0.7.0
+ fonttools==4.56.0
+ frozenlist==1.5.0
+ fsspec==2024.12.0
+ googleapis-common-protos==1.68.0
+ grpcio==1.70.0
+ h11==0.14.0
+ httpcore==1.0.7
+ httpx==0.28.1
+ huggingface-hub==0.29.1
+ idna==3.10
+ importlib-metadata==8.5.0
+ jiter==0.8.2
+ joblib==1.4.2
+ jsonschema==4.23.0
+ jsonschema-specifications==2024.10.1
+ kiwisolver==1.4.8
+ levenshtein==0.26.1
+ lxml==5.3.1
+ matplotlib==3.10.0
+ monotonic==1.6
+ multidict==6.1.0
+ multiprocess==0.70.16
+ nltk==3.9.1
+ numpy==1.26.4
+ ollama==0.4.7
+ openai==1.64.0
+ opentelemetry-api==1.30.0
+ opentelemetry-exporter-otlp-proto-common==1.30.0
+ opentelemetry-exporter-otlp-proto-grpc==1.30.0
+ opentelemetry-exporter-otlp-proto-http==1.30.0
+ opentelemetry-proto==1.30.0
+ opentelemetry-sdk==1.30.0
+ opentelemetry-semantic-conventions==0.51b0
+ orjson==3.10.15
+ overrides==7.7.0
+ packaging==24.2
+ pandas==2.2.3
+ pillow==11.1.0
+ posthog==3.16.0
+ propcache==0.3.0
+ protobuf==5.29.3
+ psycopg2-binary==2.9.10
+ pyarrow==19.0.1
+ pycryptodomex==3.21.0
+ pydantic==2.10.6
+ pydantic-core==2.27.2
+ pymongo==4.11.1
+ pyparsing==3.2.1
+ pypdf==5.3.0
+ python-dateutil==2.9.0.post0
+ pytz==2025.1
+ pyyaml==6.0.2
+ rapidfuzz==3.12.1
+ redis==5.2.1
+ referencing==0.36.2
+ regex==2024.11.6
+ requests==2.32.3
+ rpds-py==0.23.1
+ safetensors==0.5.3
+ scikit-learn==1.6.1
+ scipy==1.15.2
+ sentencepiece==0.2.0
+ six==1.17.0
+ sniffio==1.3.1
+ sqlite-vec==0.1.6
+ starlette==0.45.3
+ tenacity==9.0.0
+ termcolor==2.5.0
+ threadpoolctl==3.5.0
+ tokenizers==0.21.0
+ tqdm==4.67.1
+ transformers==4.49.0
+ typing-extensions==4.12.2
+ tzdata==2025.1
+ urllib3==2.3.0
+ uvicorn==0.34.0
+ wrapt==1.17.2
+ xxhash==3.5.0
+ yarl==1.18.3
+ zipp==3.21.0
--> 5b5b823605a1
STEP 7/11: RUN uv pip install --no-cache llama-stack
Using Python 3.11.9 environment at: /usr
Resolved 55 packages in 1.08s
Downloading setuptools (1.2MiB)
Downloading pygments (1.2MiB)
Downloading llama-models (1.5MiB)
Downloading tiktoken (1.1MiB)
Downloaded tiktoken
Downloaded llama-models
Downloaded pygments
Downloaded setuptools
Prepared 15 packages in 402ms
Installed 15 packages in 15ms
+ jinja2==3.1.5
+ llama-models==0.1.4
+ llama-stack==0.1.4
+ llama-stack-client==0.1.4
+ markdown-it-py==3.0.0
+ markupsafe==3.0.2
+ mdurl==0.1.2
+ prompt-toolkit==3.0.50
+ pyaml==25.1.0
+ pygments==2.19.1
+ python-dotenv==1.0.1
+ rich==13.9.4
+ setuptools==75.8.2
+ tiktoken==0.9.0
+ wcwidth==0.2.13
--> 38a037443807
STEP 8/11: RUN pip uninstall -y uv
Found existing installation: uv 0.6.3
Uninstalling uv-0.6.3:
Successfully uninstalled uv-0.6.3
WARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv
--> 54f749dc5ece
STEP 9/11: ENTRYPOINT ["python", "-m", "llama_stack.distribution.server.server", "--template", "ollama"]
--> 481c138b1982
STEP 10/11: RUN mkdir -p /.llama /.cache
--> 0fc174f014a8
STEP 11/11: RUN chmod -R g+rw /app /.llama /.cache
COMMIT distribution-ollama:0.1.4
--> d41b4ab4b136
Successfully tagged localhost/distribution-ollama:0.1.4
d41b4ab4b1363bfbaf6239e6f313bcb37873ef4b5f2fd816a4ee55acf2ac54d3
+ set +x
Success!
Build Successful!
```
UBI9 container successfully builds.
Run the container:
```
podman run d41b4ab4b1363bfbaf6239e6f313bcb37873ef4b5f2fd816a4ee55acf2ac54d3 --env INFERENCE_MODEL=meta-llama/Llama-3.2-3B-Instruct
INFO 2025-02-27 13:08:03,666 llama_stack.distribution.resolver:213: Resolved 30 providers
INFO 2025-02-27 13:08:03,666 llama_stack.distribution.resolver:215: inner-inference => ollama
INFO 2025-02-27 13:08:03,666 llama_stack.distribution.resolver:215: models => __routing_table__
INFO 2025-02-27 13:08:03,666 llama_stack.distribution.resolver:215: inference => __autorouted__
INFO 2025-02-27 13:08:03,666 llama_stack.distribution.resolver:215: inner-vector_io => sqlite-vec
INFO 2025-02-27 13:08:03,666 llama_stack.distribution.resolver:215: inner-safety => llama-guard
INFO 2025-02-27 13:08:03,666 llama_stack.distribution.resolver:215: shields => __routing_table__
INFO 2025-02-27 13:08:03,666 llama_stack.distribution.resolver:215: safety => __autorouted__
INFO 2025-02-27 13:08:03,666 llama_stack.distribution.resolver:215: vector_dbs => __routing_table__
INFO 2025-02-27 13:08:03,666 llama_stack.distribution.resolver:215: vector_io => __autorouted__
INFO 2025-02-27 13:08:03,666 llama_stack.distribution.resolver:215: inner-tool_runtime => brave-search
INFO 2025-02-27 13:08:03,666 llama_stack.distribution.resolver:215: inner-tool_runtime => tavily-search
```
[//]: # (## Documentation)
---------
Signed-off-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
parent
18ab1985da
commit
c91548fe07
2 changed files with 40 additions and 28 deletions
|
|
@ -15,7 +15,6 @@ from termcolor import cprint
|
||||||
|
|
||||||
from llama_stack.distribution.datatypes import BuildConfig, Provider
|
from llama_stack.distribution.datatypes import BuildConfig, Provider
|
||||||
from llama_stack.distribution.distribution import get_provider_registry
|
from llama_stack.distribution.distribution import get_provider_registry
|
||||||
from llama_stack.distribution.utils.config_dirs import BUILDS_BASE_DIR
|
|
||||||
from llama_stack.distribution.utils.exec import run_command, run_with_pty
|
from llama_stack.distribution.utils.exec import run_command, run_with_pty
|
||||||
from llama_stack.distribution.utils.image_types import ImageType
|
from llama_stack.distribution.utils.image_types import ImageType
|
||||||
from llama_stack.providers.datatypes import Api
|
from llama_stack.providers.datatypes import Api
|
||||||
|
|
@ -103,8 +102,6 @@ def build_image(
|
||||||
template_or_config,
|
template_or_config,
|
||||||
image_name,
|
image_name,
|
||||||
container_base,
|
container_base,
|
||||||
str(build_file_path),
|
|
||||||
str(BUILDS_BASE_DIR / ImageType.container.value),
|
|
||||||
" ".join(normal_deps),
|
" ".join(normal_deps),
|
||||||
]
|
]
|
||||||
elif build_config.image_type == ImageType.conda.value:
|
elif build_config.image_type == ImageType.conda.value:
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
|
|
@ -20,26 +20,27 @@ UV_HTTP_TIMEOUT=${UV_HTTP_TIMEOUT:-500}
|
||||||
# mounting is not supported by docker buildx, so we use COPY instead
|
# mounting is not supported by docker buildx, so we use COPY instead
|
||||||
USE_COPY_NOT_MOUNT=${USE_COPY_NOT_MOUNT:-}
|
USE_COPY_NOT_MOUNT=${USE_COPY_NOT_MOUNT:-}
|
||||||
|
|
||||||
if [ "$#" -lt 6 ]; then
|
if [ "$#" -lt 4 ]; then
|
||||||
# This only works for templates
|
# This only works for templates
|
||||||
echo "Usage: $0 <template_or_config> <image_name> <container_base> <build_file_path> <host_build_dir> <pip_dependencies> [<special_pip_deps>]" >&2
|
echo "Usage: $0 <template_or_config> <image_name> <container_base> <pip_dependencies> [<special_pip_deps>]" >&2
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
template_or_config="$1"
|
template_or_config="$1"
|
||||||
image_name="$2"
|
shift
|
||||||
container_base="$3"
|
image_name="$1"
|
||||||
build_file_path="$4"
|
shift
|
||||||
host_build_dir="$5"
|
container_base="$1"
|
||||||
pip_dependencies="$6"
|
shift
|
||||||
special_pip_deps="${7:-}"
|
pip_dependencies="$1"
|
||||||
|
shift
|
||||||
|
special_pip_deps="${1:-}"
|
||||||
|
|
||||||
|
|
||||||
# Define color codes
|
# Define color codes
|
||||||
RED='\033[0;31m'
|
RED='\033[0;31m'
|
||||||
GREEN='\033[0;32m'
|
|
||||||
NC='\033[0m' # No Color
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
CONTAINER_BINARY=${CONTAINER_BINARY:-docker}
|
CONTAINER_BINARY=${CONTAINER_BINARY:-docker}
|
||||||
|
|
@ -48,7 +49,6 @@ CONTAINER_OPTS=${CONTAINER_OPTS:-}
|
||||||
TEMP_DIR=$(mktemp -d)
|
TEMP_DIR=$(mktemp -d)
|
||||||
|
|
||||||
add_to_container() {
|
add_to_container() {
|
||||||
local input
|
|
||||||
output_file="$TEMP_DIR/Containerfile"
|
output_file="$TEMP_DIR/Containerfile"
|
||||||
if [ -t 0 ]; then
|
if [ -t 0 ]; then
|
||||||
printf '%s\n' "$1" >>"$output_file"
|
printf '%s\n' "$1" >>"$output_file"
|
||||||
|
|
@ -64,9 +64,9 @@ if [[ $container_base == *"registry.access.redhat.com/ubi9"* ]]; then
|
||||||
FROM $container_base
|
FROM $container_base
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
RUN microdnf -y update && microdnf install -y iputils net-tools wget \
|
RUN dnf -y update && dnf install -y iputils net-tools wget \
|
||||||
vim-minimal python3.11 python3.11-pip python3.11-wheel \
|
vim-minimal python3.11 python3.11-pip python3.11-wheel \
|
||||||
python3.11-setuptools && ln -s /bin/pip3.11 /bin/pip && ln -s /bin/python3.11 /bin/python && microdnf clean all
|
python3.11-setuptools && ln -s /bin/pip3.11 /bin/pip && ln -s /bin/python3.11 /bin/python && dnf clean all
|
||||||
|
|
||||||
ENV UV_SYSTEM_PYTHON=1
|
ENV UV_SYSTEM_PYTHON=1
|
||||||
RUN pip install uv
|
RUN pip install uv
|
||||||
|
|
@ -165,6 +165,11 @@ EOF
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# remove uv after installation
|
||||||
|
add_to_container << EOF
|
||||||
|
RUN pip uninstall -y uv
|
||||||
|
EOF
|
||||||
|
|
||||||
# if template_or_config ends with .yaml, it is not a template and we should not use the --template flag
|
# if template_or_config ends with .yaml, it is not a template and we should not use the --template flag
|
||||||
if [[ "$template_or_config" != *.yaml ]]; then
|
if [[ "$template_or_config" != *.yaml ]]; then
|
||||||
add_to_container << EOF
|
add_to_container << EOF
|
||||||
|
|
@ -185,26 +190,31 @@ RUN mkdir -p /.llama /.cache
|
||||||
RUN chmod -R g+rw /app /.llama /.cache
|
RUN chmod -R g+rw /app /.llama /.cache
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
printf "Containerfile created successfully in $TEMP_DIR/Containerfile\n\n"
|
printf "Containerfile created successfully in %s/Containerfile\n\n" "$TEMP_DIR"
|
||||||
cat $TEMP_DIR/Containerfile
|
cat "$TEMP_DIR"/Containerfile
|
||||||
printf "\n"
|
printf "\n"
|
||||||
|
|
||||||
mounts=""
|
# Start building the CLI arguments
|
||||||
|
CLI_ARGS=()
|
||||||
|
|
||||||
|
# Read CONTAINER_OPTS and put it in an array
|
||||||
|
read -ra CLI_ARGS <<< "$CONTAINER_OPTS"
|
||||||
|
|
||||||
if [ "$USE_COPY_NOT_MOUNT" != "true" ]; then
|
if [ "$USE_COPY_NOT_MOUNT" != "true" ]; then
|
||||||
if [ -n "$LLAMA_STACK_DIR" ]; then
|
if [ -n "$LLAMA_STACK_DIR" ]; then
|
||||||
mounts="$mounts -v $(readlink -f $LLAMA_STACK_DIR):$stack_mount"
|
CLI_ARGS+=("-v" "$(readlink -f "$LLAMA_STACK_DIR"):$stack_mount")
|
||||||
fi
|
fi
|
||||||
if [ -n "$LLAMA_MODELS_DIR" ]; then
|
if [ -n "$LLAMA_MODELS_DIR" ]; then
|
||||||
mounts="$mounts -v $(readlink -f $LLAMA_MODELS_DIR):$models_mount"
|
CLI_ARGS+=("-v" "$(readlink -f "$LLAMA_MODELS_DIR"):$models_mount")
|
||||||
fi
|
fi
|
||||||
if [ -n "$LLAMA_STACK_CLIENT_DIR" ]; then
|
if [ -n "$LLAMA_STACK_CLIENT_DIR" ]; then
|
||||||
mounts="$mounts -v $(readlink -f $LLAMA_STACK_CLIENT_DIR):$client_mount"
|
CLI_ARGS+=("-v" "$(readlink -f "$LLAMA_STACK_CLIENT_DIR"):$client_mount")
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if command -v selinuxenabled &>/dev/null && selinuxenabled; then
|
if command -v selinuxenabled &>/dev/null && selinuxenabled; then
|
||||||
# Disable SELinux labels -- we don't want to relabel the llama-stack source dir
|
# Disable SELinux labels -- we don't want to relabel the llama-stack source dir
|
||||||
CONTAINER_OPTS="$CONTAINER_OPTS --security-opt label=disable"
|
CLI_ARGS+=("--security-opt" "label=disable")
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Set version tag based on PyPI version
|
# Set version tag based on PyPI version
|
||||||
|
|
@ -225,11 +235,11 @@ image_tag="$image_name:$version_tag"
|
||||||
# Detect platform architecture
|
# Detect platform architecture
|
||||||
ARCH=$(uname -m)
|
ARCH=$(uname -m)
|
||||||
if [ -n "$BUILD_PLATFORM" ]; then
|
if [ -n "$BUILD_PLATFORM" ]; then
|
||||||
PLATFORM="--platform $BUILD_PLATFORM"
|
CLI_ARGS+=("--platform $BUILD_PLATFORM")
|
||||||
elif [ "$ARCH" = "arm64" ] || [ "$ARCH" = "aarch64" ]; then
|
elif [ "$ARCH" = "arm64" ] || [ "$ARCH" = "aarch64" ]; then
|
||||||
PLATFORM="--platform linux/arm64"
|
CLI_ARGS+=("--platform" "linux/arm64")
|
||||||
elif [ "$ARCH" = "x86_64" ]; then
|
elif [ "$ARCH" = "x86_64" ]; then
|
||||||
PLATFORM="--platform linux/amd64"
|
CLI_ARGS+=("--platform" "linux/amd64")
|
||||||
else
|
else
|
||||||
echo "Unsupported architecture: $ARCH"
|
echo "Unsupported architecture: $ARCH"
|
||||||
exit 1
|
exit 1
|
||||||
|
|
@ -238,8 +248,13 @@ fi
|
||||||
echo "PWD: $(pwd)"
|
echo "PWD: $(pwd)"
|
||||||
echo "Containerfile: $TEMP_DIR/Containerfile"
|
echo "Containerfile: $TEMP_DIR/Containerfile"
|
||||||
set -x
|
set -x
|
||||||
$CONTAINER_BINARY build $CONTAINER_OPTS $PLATFORM -t $image_tag \
|
|
||||||
-f "$TEMP_DIR/Containerfile" "." $mounts --progress=plain
|
$CONTAINER_BINARY build \
|
||||||
|
"${CLI_ARGS[@]}" \
|
||||||
|
-t "$image_tag" \
|
||||||
|
-f "$TEMP_DIR/Containerfile" \
|
||||||
|
"." \
|
||||||
|
--progress=plain
|
||||||
|
|
||||||
# clean up tmp/configs
|
# clean up tmp/configs
|
||||||
set +x
|
set +x
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue