mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-11 20:40:40 +00:00
chore: improve build script
Signed-off-by: reidliu <reid201711@gmail.com>
This commit is contained in:
parent
6033e6893e
commit
7a2536c138
5 changed files with 43 additions and 76 deletions
|
@ -6,19 +6,7 @@
|
||||||
# This source code is licensed under the terms described in the LICENSE file in
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
# the root directory of this source tree.
|
# the root directory of this source tree.
|
||||||
|
|
||||||
LLAMA_STACK_DIR=${LLAMA_STACK_DIR:-}
|
set -euo pipefail
|
||||||
LLAMA_STACK_CLIENT_DIR=${LLAMA_STACK_CLIENT_DIR:-}
|
|
||||||
TEST_PYPI_VERSION=${TEST_PYPI_VERSION:-}
|
|
||||||
# This timeout (in seconds) is necessary when installing PyTorch via uv since it's likely to time out
|
|
||||||
# Reference: https://github.com/astral-sh/uv/pull/1694
|
|
||||||
UV_HTTP_TIMEOUT=${UV_HTTP_TIMEOUT:-500}
|
|
||||||
|
|
||||||
if [ -n "$LLAMA_STACK_DIR" ]; then
|
|
||||||
echo "Using llama-stack-dir=$LLAMA_STACK_DIR"
|
|
||||||
fi
|
|
||||||
if [ -n "$LLAMA_STACK_CLIENT_DIR" ]; then
|
|
||||||
echo "Using llama-stack-client-dir=$LLAMA_STACK_CLIENT_DIR"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$#" -lt 3 ]; then
|
if [ "$#" -lt 3 ]; then
|
||||||
echo "Usage: $0 <distribution_type> <conda_env_name> <build_file_path> <pip_dependencies> [<special_pip_deps>]" >&2
|
echo "Usage: $0 <distribution_type> <conda_env_name> <build_file_path> <pip_dependencies> [<special_pip_deps>]" >&2
|
||||||
|
@ -26,18 +14,10 @@ if [ "$#" -lt 3 ]; then
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
special_pip_deps="$4"
|
|
||||||
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
env_name="$1"
|
env_name="$1"
|
||||||
build_file_path="$2"
|
build_file_path="$2"
|
||||||
pip_dependencies="$3"
|
pip_dependencies="$3"
|
||||||
|
special_pip_deps="$4"
|
||||||
# Define color codes
|
|
||||||
RED='\033[0;31m'
|
|
||||||
GREEN='\033[0;32m'
|
|
||||||
NC='\033[0m' # No Color
|
|
||||||
|
|
||||||
# this is set if we actually create a new conda in which case we need to clean up
|
# this is set if we actually create a new conda in which case we need to clean up
|
||||||
ENVNAME=""
|
ENVNAME=""
|
||||||
|
@ -45,6 +25,13 @@ ENVNAME=""
|
||||||
SCRIPT_DIR=$(dirname "$(readlink -f "$0")")
|
SCRIPT_DIR=$(dirname "$(readlink -f "$0")")
|
||||||
source "$SCRIPT_DIR/common.sh"
|
source "$SCRIPT_DIR/common.sh"
|
||||||
|
|
||||||
|
if [ -n "$LLAMA_STACK_DIR" ]; then
|
||||||
|
echo "Using llama-stack-dir=$LLAMA_STACK_DIR"
|
||||||
|
fi
|
||||||
|
if [ -n "$LLAMA_STACK_CLIENT_DIR" ]; then
|
||||||
|
echo "Using llama-stack-client-dir=$LLAMA_STACK_CLIENT_DIR"
|
||||||
|
fi
|
||||||
|
|
||||||
ensure_conda_env_python310() {
|
ensure_conda_env_python310() {
|
||||||
local env_name="$1"
|
local env_name="$1"
|
||||||
local pip_dependencies="$2"
|
local pip_dependencies="$2"
|
||||||
|
|
|
@ -6,18 +6,7 @@
|
||||||
# This source code is licensed under the terms described in the LICENSE file in
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
# the root directory of this source tree.
|
# the root directory of this source tree.
|
||||||
|
|
||||||
LLAMA_STACK_DIR=${LLAMA_STACK_DIR:-}
|
set -euo pipefail
|
||||||
LLAMA_STACK_CLIENT_DIR=${LLAMA_STACK_CLIENT_DIR:-}
|
|
||||||
|
|
||||||
TEST_PYPI_VERSION=${TEST_PYPI_VERSION:-}
|
|
||||||
PYPI_VERSION=${PYPI_VERSION:-}
|
|
||||||
BUILD_PLATFORM=${BUILD_PLATFORM:-}
|
|
||||||
# This timeout (in seconds) is necessary when installing PyTorch via uv since it's likely to time out
|
|
||||||
# Reference: https://github.com/astral-sh/uv/pull/1694
|
|
||||||
UV_HTTP_TIMEOUT=${UV_HTTP_TIMEOUT:-500}
|
|
||||||
|
|
||||||
# mounting is not supported by docker buildx, so we use COPY instead
|
|
||||||
USE_COPY_NOT_MOUNT=${USE_COPY_NOT_MOUNT:-}
|
|
||||||
|
|
||||||
if [ "$#" -lt 4 ]; then
|
if [ "$#" -lt 4 ]; then
|
||||||
# This only works for templates
|
# This only works for templates
|
||||||
|
@ -25,7 +14,10 @@ if [ "$#" -lt 4 ]; then
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
set -euo pipefail
|
BUILD_PLATFORM=${BUILD_PLATFORM:-}
|
||||||
|
|
||||||
|
# mounting is not supported by docker buildx, so we use COPY instead
|
||||||
|
USE_COPY_NOT_MOUNT=${USE_COPY_NOT_MOUNT:-}
|
||||||
|
|
||||||
template_or_config="$1"
|
template_or_config="$1"
|
||||||
shift
|
shift
|
||||||
|
@ -37,14 +29,6 @@ pip_dependencies="$1"
|
||||||
shift
|
shift
|
||||||
special_pip_deps="${1:-}"
|
special_pip_deps="${1:-}"
|
||||||
|
|
||||||
|
|
||||||
# Define color codes
|
|
||||||
RED='\033[0;31m'
|
|
||||||
NC='\033[0m' # No Color
|
|
||||||
|
|
||||||
CONTAINER_BINARY=${CONTAINER_BINARY:-docker}
|
|
||||||
CONTAINER_OPTS=${CONTAINER_OPTS:-}
|
|
||||||
|
|
||||||
TEMP_DIR=$(mktemp -d)
|
TEMP_DIR=$(mktemp -d)
|
||||||
|
|
||||||
SCRIPT_DIR=$(dirname "$(readlink -f "$0")")
|
SCRIPT_DIR=$(dirname "$(readlink -f "$0")")
|
||||||
|
|
|
@ -9,21 +9,7 @@
|
||||||
# TODO: combine this with build_conda_env.sh since it is almost identical
|
# TODO: combine this with build_conda_env.sh since it is almost identical
|
||||||
# the only difference is that we don't do any conda-specific setup
|
# the only difference is that we don't do any conda-specific setup
|
||||||
|
|
||||||
LLAMA_STACK_DIR=${LLAMA_STACK_DIR:-}
|
set -euo pipefail
|
||||||
LLAMA_STACK_CLIENT_DIR=${LLAMA_STACK_CLIENT_DIR:-}
|
|
||||||
TEST_PYPI_VERSION=${TEST_PYPI_VERSION:-}
|
|
||||||
# This timeout (in seconds) is necessary when installing PyTorch via uv since it's likely to time out
|
|
||||||
# Reference: https://github.com/astral-sh/uv/pull/1694
|
|
||||||
UV_HTTP_TIMEOUT=${UV_HTTP_TIMEOUT:-500}
|
|
||||||
UV_SYSTEM_PYTHON=${UV_SYSTEM_PYTHON:-}
|
|
||||||
VIRTUAL_ENV=${VIRTUAL_ENV:-}
|
|
||||||
|
|
||||||
if [ -n "$LLAMA_STACK_DIR" ]; then
|
|
||||||
echo "Using llama-stack-dir=$LLAMA_STACK_DIR"
|
|
||||||
fi
|
|
||||||
if [ -n "$LLAMA_STACK_CLIENT_DIR" ]; then
|
|
||||||
echo "Using llama-stack-client-dir=$LLAMA_STACK_CLIENT_DIR"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$#" -lt 2 ]; then
|
if [ "$#" -lt 2 ]; then
|
||||||
echo "Usage: $0 <env_name> <pip_dependencies> [<special_pip_deps>]" >&2
|
echo "Usage: $0 <env_name> <pip_dependencies> [<special_pip_deps>]" >&2
|
||||||
|
@ -31,16 +17,12 @@ if [ "$#" -lt 2 ]; then
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
special_pip_deps="$3"
|
UV_SYSTEM_PYTHON=${UV_SYSTEM_PYTHON:-}
|
||||||
|
VIRTUAL_ENV=${VIRTUAL_ENV:-}
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
env_name="$1"
|
env_name="$1"
|
||||||
pip_dependencies="$2"
|
pip_dependencies="$2"
|
||||||
|
special_pip_deps="$3"
|
||||||
# Define color codes
|
|
||||||
RED='\033[0;31m'
|
|
||||||
NC='\033[0m' # No Color
|
|
||||||
|
|
||||||
# this is set if we actually create a new conda in which case we need to clean up
|
# this is set if we actually create a new conda in which case we need to clean up
|
||||||
ENVNAME=""
|
ENVNAME=""
|
||||||
|
@ -48,6 +30,13 @@ ENVNAME=""
|
||||||
SCRIPT_DIR=$(dirname "$(readlink -f "$0")")
|
SCRIPT_DIR=$(dirname "$(readlink -f "$0")")
|
||||||
source "$SCRIPT_DIR/common.sh"
|
source "$SCRIPT_DIR/common.sh"
|
||||||
|
|
||||||
|
if [ -n "$LLAMA_STACK_DIR" ]; then
|
||||||
|
echo "Using llama-stack-dir=$LLAMA_STACK_DIR"
|
||||||
|
fi
|
||||||
|
if [ -n "$LLAMA_STACK_CLIENT_DIR" ]; then
|
||||||
|
echo "Using llama-stack-client-dir=$LLAMA_STACK_CLIENT_DIR"
|
||||||
|
fi
|
||||||
|
|
||||||
# pre-run checks to make sure we can proceed with the installation
|
# pre-run checks to make sure we can proceed with the installation
|
||||||
pre_run_checks() {
|
pre_run_checks() {
|
||||||
local env_name="$1"
|
local env_name="$1"
|
||||||
|
|
|
@ -4,6 +4,22 @@
|
||||||
# This source code is licensed under the terms described in the LICENSE file in
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
# the root directory of this source tree.
|
# the root directory of this source tree.
|
||||||
|
|
||||||
|
# Common variables
|
||||||
|
LLAMA_STACK_DIR=${LLAMA_STACK_DIR:-}
|
||||||
|
LLAMA_STACK_CLIENT_DIR=${LLAMA_STACK_CLIENT_DIR:-}
|
||||||
|
TEST_PYPI_VERSION=${TEST_PYPI_VERSION:-}
|
||||||
|
# This timeout (in seconds) is necessary when installing PyTorch via uv since it's likely to time out
|
||||||
|
# Reference: https://github.com/astral-sh/uv/pull/1694
|
||||||
|
UV_HTTP_TIMEOUT=${UV_HTTP_TIMEOUT:-500}
|
||||||
|
CONTAINER_BINARY=${CONTAINER_BINARY:-docker}
|
||||||
|
CONTAINER_OPTS=${CONTAINER_OPTS:-}
|
||||||
|
PYPI_VERSION=${PYPI_VERSION:-}
|
||||||
|
|
||||||
|
# Define color codes
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
cleanup() {
|
cleanup() {
|
||||||
envname="$1"
|
envname="$1"
|
||||||
|
|
||||||
|
|
|
@ -6,19 +6,8 @@
|
||||||
# This source code is licensed under the terms described in the LICENSE file in
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
# the root directory of this source tree.
|
# the root directory of this source tree.
|
||||||
|
|
||||||
|
|
||||||
CONTAINER_BINARY=${CONTAINER_BINARY:-docker}
|
|
||||||
CONTAINER_OPTS=${CONTAINER_OPTS:-}
|
|
||||||
LLAMA_CHECKPOINT_DIR=${LLAMA_CHECKPOINT_DIR:-}
|
|
||||||
LLAMA_STACK_DIR=${LLAMA_STACK_DIR:-}
|
|
||||||
TEST_PYPI_VERSION=${TEST_PYPI_VERSION:-}
|
|
||||||
PYPI_VERSION=${PYPI_VERSION:-}
|
|
||||||
|
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
RED='\033[0;31m'
|
|
||||||
NC='\033[0m' # No Color
|
|
||||||
|
|
||||||
error_handler() {
|
error_handler() {
|
||||||
echo "Error occurred in script at line: ${1}" >&2
|
echo "Error occurred in script at line: ${1}" >&2
|
||||||
exit 1
|
exit 1
|
||||||
|
@ -31,6 +20,8 @@ if [ $# -lt 3 ]; then
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
LLAMA_CHECKPOINT_DIR=${LLAMA_CHECKPOINT_DIR:-}
|
||||||
|
|
||||||
env_type="$1"
|
env_type="$1"
|
||||||
shift
|
shift
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue