#!/bin/bash LLAMA_MODELS_DIR=${LLAMA_MODELS_DIR:-} LLAMA_STACK_DIR=${LLAMA_STACK_DIR:-} TEST_PYPI_VERSION=${TEST_PYPI_VERSION:-} if [ "$#" -lt 4 ]; then echo "Usage: $0 []" >&2 echo "Example: $0 my-fastapi-app python:3.9-slim 'fastapi uvicorn' " >&2 exit 1 fi special_pip_deps="$5" set -euo pipefail build_name="$1" image_name="llamastack-$build_name" docker_base=$2 build_file_path=$3 pip_dependencies=$4 # Define color codes RED='\033[0;31m' GREEN='\033[0;32m' NC='\033[0m' # No Color SCRIPT_DIR=$(dirname "$(readlink -f "$0")") REPO_DIR=$(dirname $(dirname "$SCRIPT_DIR")) DOCKER_BINARY=${DOCKER_BINARY:-docker} DOCKER_OPTS=${DOCKER_OPTS:-} TEMP_DIR=$(mktemp -d) add_to_docker() { local input output_file="$TEMP_DIR/Dockerfile" if [ -t 0 ]; then printf '%s\n' "$1" >>"$output_file" else # If stdin is not a terminal, read from it (heredoc) cat >>"$output_file" fi } add_to_docker <&2 exit 1 fi # Install in editable format. We will mount the source code into the container # so that changes will be reflected in the container without having to do a # rebuild. This is just for development convenience. add_to_docker "RUN pip install -e $stack_mount" else add_to_docker "RUN pip install llama-stack" fi if [ -n "$LLAMA_MODELS_DIR" ]; then if [ ! -d "$LLAMA_MODELS_DIR" ]; then echo "${RED}Warning: LLAMA_MODELS_DIR is set but directory does not exist: $LLAMA_MODELS_DIR${NC}" >&2 exit 1 fi add_to_docker <