mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-28 15:02:37 +00:00
added platform for docker image
This commit is contained in:
parent
c4d5d6bb91
commit
0c75f2cf31
4 changed files with 24 additions and 8 deletions
|
@ -77,6 +77,11 @@ class StackBuild(Subcommand):
|
|||
help="Image Type to use for the build. This can be either conda or docker. If not specified, will use the image type from the template config.",
|
||||
choices=["conda", "docker"],
|
||||
)
|
||||
self.parser.add_argument(
|
||||
"--platform",
|
||||
type=str,
|
||||
help="Platform to use for the build. Required when using docker as image type, defaults to host if no platform is specified",
|
||||
)
|
||||
|
||||
def _get_build_config_from_name(self, args: argparse.Namespace) -> Optional[Path]:
|
||||
if os.getenv("CONDA_PREFIX", ""):
|
||||
|
@ -205,6 +210,8 @@ class StackBuild(Subcommand):
|
|||
build_config.name = args.name
|
||||
if args.image_type:
|
||||
build_config.image_type = args.image_type
|
||||
if args.platform: # Add platform to build_config if provided
|
||||
build_config.platform = args.platform
|
||||
self._run_stack_build_command_from_build_config(build_config)
|
||||
|
||||
return
|
||||
|
@ -287,6 +294,8 @@ class StackBuild(Subcommand):
|
|||
build_config = BuildConfig(
|
||||
name=name, image_type=image_type, distribution_spec=distribution_spec
|
||||
)
|
||||
if args.platform: # Add platform to build_config if provided
|
||||
build_config.platform = args.platform
|
||||
self._run_stack_build_command_from_build_config(build_config)
|
||||
return
|
||||
|
||||
|
|
|
@ -35,7 +35,6 @@ class ImageType(Enum):
|
|||
docker = "docker"
|
||||
conda = "conda"
|
||||
|
||||
|
||||
class Dependencies(BaseModel):
|
||||
pip_packages: List[str]
|
||||
docker_image: Optional[str] = None
|
||||
|
@ -91,10 +90,12 @@ def build_image(build_config: BuildConfig, build_file_path: Path):
|
|||
script = pkg_resources.resource_filename(
|
||||
"llama_stack", "distribution/build_container.sh"
|
||||
)
|
||||
|
||||
args = [
|
||||
script,
|
||||
build_config.name,
|
||||
package_deps.docker_image,
|
||||
build_config.platform,
|
||||
str(build_file_path),
|
||||
str(BUILDS_BASE_DIR / ImageType.docker.value),
|
||||
" ".join(deps),
|
||||
|
@ -107,8 +108,9 @@ def build_image(build_config: BuildConfig, build_file_path: Path):
|
|||
script,
|
||||
build_config.name,
|
||||
str(build_file_path),
|
||||
" ".join(deps),
|
||||
" ".join(deps)
|
||||
]
|
||||
|
||||
|
||||
if special_deps:
|
||||
args.append("#".join(special_deps))
|
||||
|
|
|
@ -5,7 +5,7 @@ LLAMA_STACK_DIR=${LLAMA_STACK_DIR:-}
|
|||
TEST_PYPI_VERSION=${TEST_PYPI_VERSION:-}
|
||||
|
||||
if [ "$#" -lt 4 ]; then
|
||||
echo "Usage: $0 <build_name> <docker_base> <pip_dependencies> [<special_pip_deps>]" >&2
|
||||
echo "Usage: $0 <build_name> <docker_base> <platform> <pip_dependencies> [<special_pip_deps>]" >&2
|
||||
echo "Example: $0 my-fastapi-app python:3.9-slim 'fastapi uvicorn' " >&2
|
||||
exit 1
|
||||
fi
|
||||
|
@ -17,9 +17,10 @@ set -euo pipefail
|
|||
build_name="$1"
|
||||
image_name="llamastack-$build_name"
|
||||
docker_base=$2
|
||||
build_file_path=$3
|
||||
host_build_dir=$4
|
||||
pip_dependencies=$5
|
||||
platform=$3
|
||||
build_file_path=$4
|
||||
host_build_dir=$5
|
||||
pip_dependencies=$6
|
||||
|
||||
# Define color codes
|
||||
RED='\033[0;31m'
|
||||
|
@ -133,7 +134,7 @@ if command -v selinuxenabled &> /dev/null && selinuxenabled; then
|
|||
fi
|
||||
|
||||
set -x
|
||||
$DOCKER_BINARY build $DOCKER_OPTS -t $image_name -f "$TEMP_DIR/Dockerfile" "$REPO_DIR" $mounts
|
||||
$DOCKER_BINARY build --platform "$platform" $DOCKER_OPTS -t $image_name -f "$TEMP_DIR/Dockerfile" "$REPO_DIR" $mounts
|
||||
|
||||
# clean up tmp/configs
|
||||
rm -rf $REPO_CONFIGS_DIR
|
||||
|
|
|
@ -21,7 +21,7 @@ from llama_stack.apis.safety import Safety
|
|||
|
||||
LLAMA_STACK_BUILD_CONFIG_VERSION = "2"
|
||||
LLAMA_STACK_RUN_CONFIG_VERSION = "2"
|
||||
|
||||
LLAMA_STACK_DEFAULT_PLATFORM = "linux/arm64"
|
||||
|
||||
RoutingKey = Union[str, List[str]]
|
||||
|
||||
|
@ -130,6 +130,10 @@ can be instantiated multiple times (with different configs) if necessary.
|
|||
class BuildConfig(BaseModel):
|
||||
version: str = LLAMA_STACK_BUILD_CONFIG_VERSION
|
||||
name: str
|
||||
platform: str = Field(
|
||||
default=LLAMA_STACK_DEFAULT_PLATFORM,
|
||||
description="The platform to build for (cpu | gpu)",
|
||||
)
|
||||
distribution_spec: DistributionSpec = Field(
|
||||
description="The distribution spec to build including API providers. "
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue