Simplify and generalize llama api build yay

This commit is contained in:
Ashwin Bharambe 2024-08-30 14:51:40 -07:00
parent 297d51b183
commit f8517e4688
9 changed files with 103 additions and 151 deletions

View file

@ -11,15 +11,15 @@ from llama_toolchain.cli.subcommand import Subcommand
from llama_toolchain.core.datatypes import * # noqa: F403
def parse_dependencies(
dependencies: str, parser: argparse.ArgumentParser
def parse_api_provider_tuples(
tuples: str, parser: argparse.ArgumentParser
) -> Dict[str, ProviderSpec]:
from llama_toolchain.core.distribution import api_providers
all_providers = api_providers()
deps = {}
for dep in dependencies.split(","):
for dep in tuples.split(","):
dep = dep.strip()
if not dep:
continue
@ -48,29 +48,13 @@ class ApiBuild(Subcommand):
self.parser.set_defaults(func=self._run_api_build_command)
def _add_arguments(self):
from llama_toolchain.core.distribution import stack_apis
from llama_toolchain.core.package import (
BuildType,
)
allowed_args = [a.name for a in stack_apis()]
self.parser.add_argument(
"api",
choices=allowed_args,
help="Stack API (one of: {})".format(", ".join(allowed_args)),
)
self.parser.add_argument(
"--provider",
type=str,
help="The provider to package into the container",
required=True,
)
self.parser.add_argument(
"--dependencies",
type=str,
help="Comma separated list of (downstream_api=provider) dependencies needed for the API",
required=False,
"api_providers",
help="Comma separated list of (api=provider) tuples",
)
self.parser.add_argument(
"--name",
@ -92,14 +76,23 @@ class ApiBuild(Subcommand):
build_package,
)
api_input = ApiInput(
api=Api(args.api),
provider=args.provider,
dependencies=parse_dependencies(args.dependencies or "", self.parser),
parsed = parse_api_provider_tuples(args.api_providers, self.parser)
api_inputs = []
for api, provider_spec in parsed.items():
for dep in provider_spec.api_dependencies:
if dep not in parsed:
self.parser.error(f"API {api} needs dependency {dep} provided also")
return
api_inputs.append(
ApiInput(
api=api,
provider=provider_spec.provider_id,
)
)
build_package(
[api_input],
api_inputs,
build_type=BuildType(args.type),
name=args.name,
)

View file

@ -31,36 +31,16 @@ class ApiConfigure(Subcommand):
self.parser.set_defaults(func=self._run_api_configure_cmd)
def _add_arguments(self):
from llama_toolchain.core.distribution import stack_apis
from llama_toolchain.core.package import BuildType
allowed_args = [a.name for a in stack_apis()]
self.parser.add_argument(
"api",
choices=allowed_args,
help="Stack API (one of: {})".format(", ".join(allowed_args)),
)
self.parser.add_argument(
"--build-name",
type=str,
help="(Fully qualified) name of the API build to configure. Alternatively, specify the --provider and --name options.",
required=False,
)
self.parser.add_argument(
"--provider",
type=str,
help="The provider chosen for the API",
required=False,
help="Name of the build",
required=True,
)
self.parser.add_argument(
"--name",
type=str,
help="Name of the build target (image, conda env)",
required=False,
)
self.parser.add_argument(
"--type",
"--build-type",
type=str,
default="conda_env",
choices=[v.value for v in BuildType],
@ -69,15 +49,11 @@ class ApiConfigure(Subcommand):
def _run_api_configure_cmd(self, args: argparse.Namespace) -> None:
from llama_toolchain.core.package import BuildType
if args.build_name:
build_type = BuildType(args.build_type)
name = args.build_name
if name.endswith(".yaml"):
name = name.replace(".yaml", "")
else:
build_type = BuildType(args.type)
name = f"{build_type.descriptor()}-{args.provider}-{args.name}"
config_file = BUILDS_BASE_DIR / args.api / f"{name}.yaml"
config_file = (
BUILDS_BASE_DIR / "adhoc" / build_type.descriptor() / f"{name}.yaml"
)
if not config_file.exists():
self.parser.error(
f"Could not find {config_file}. Please run `llama api build` first"

View file

@ -13,6 +13,7 @@ import yaml
from llama_toolchain.cli.subcommand import Subcommand
from llama_toolchain.core.datatypes import * # noqa: F403
from llama_toolchain.common.config_dirs import BUILDS_BASE_DIR
class ApiStart(Subcommand):
@ -28,10 +29,18 @@ class ApiStart(Subcommand):
self.parser.set_defaults(func=self._run_api_start_cmd)
def _add_arguments(self):
from llama_toolchain.core.package import BuildType
self.parser.add_argument(
"yaml_config",
"build_name",
type=str,
help="Yaml config containing the API build configuration",
help="Name of the API build you want to start",
)
self.parser.add_argument(
"--build-type",
type=str,
default="conda_env",
choices=[v.value for v in BuildType],
)
self.parser.add_argument(
"--port",
@ -48,8 +57,16 @@ class ApiStart(Subcommand):
def _run_api_start_cmd(self, args: argparse.Namespace) -> None:
from llama_toolchain.common.exec import run_with_pty
from llama_toolchain.core.package import BuildType
config_file = Path(args.yaml_config)
if args.build_name.endswith(".yaml"):
path = args.build_name
else:
build_type = BuildType(args.build_type)
build_dir = BUILDS_BASE_DIR / "adhoc" / build_type.descriptor()
path = build_dir / f"{args.build_name}.yaml"
config_file = Path(path)
if not config_file.exists():
self.parser.error(
f"Could not find {config_file}. Please run `llama api build` first"

View file

@ -68,7 +68,6 @@ class StackBuild(Subcommand):
ApiInput(
api=api,
provider=provider_id,
dependencies={},
)
)

View file

@ -20,13 +20,14 @@ fi
set -euo pipefail
if [ "$#" -ne 3 ]; then
echo "Usage: $0 <api_or_stack> <environment_name> <pip_dependencies>" >&2
echo "Example: $0 [api|stack] conda-env 'numpy pandas scipy'" >&2
echo "Usage: $0 <distribution_id> <build_name> <pip_dependencies>" >&2
echo "Example: $0 <distribution_id> mybuild 'numpy pandas scipy'" >&2
exit 1
fi
api_or_stack="$1"
env_name="$2"
distribution_id="$1"
build_name="$2"
env_name="llamastack-$build_name"
pip_dependencies="$3"
# Define color codes
@ -53,19 +54,19 @@ ensure_conda_env_python310() {
# Check if the environment exists
if conda env list | grep -q "^${env_name} "; then
printf "Conda environment '${env_name}' exists. Checking Python version..."
printf "Conda environment '${env_name}' exists. Checking Python version...\n"
# Check Python version in the environment
current_version=$(conda run -n "${env_name}" python --version 2>&1 | cut -d' ' -f2 | cut -d'.' -f1,2)
if [ "$current_version" = "$python_version" ]; then
printf "Environment '${env_name}' already has Python ${python_version}. No action needed."
printf "Environment '${env_name}' already has Python ${python_version}. No action needed.\n"
else
printf "Updating environment '${env_name}' to Python ${python_version}..."
printf "Updating environment '${env_name}' to Python ${python_version}...\n"
conda install -n "${env_name}" python="${python_version}" -y
fi
else
printf "Conda environment '${env_name}' does not exist. Creating with Python ${python_version}..."
printf "Conda environment '${env_name}' does not exist. Creating with Python ${python_version}...\n"
conda create -n "${env_name}" python="${python_version}" -y
ENVNAME="${env_name}"
@ -83,11 +84,11 @@ ensure_conda_env_python310() {
# Re-installing llama-toolchain in the new conda environment
if [ -n "$LLAMA_TOOLCHAIN_DIR" ]; then
if [ ! -d "$LLAMA_TOOLCHAIN_DIR" ]; then
printf "${RED}Warning: LLAMA_TOOLCHAIN_DIR is set but directory does not exist: $LLAMA_TOOLCHAIN_DIR${NC}" >&2
printf "${RED}Warning: LLAMA_TOOLCHAIN_DIR is set but directory does not exist: $LLAMA_TOOLCHAIN_DIR${NC}\n" >&2
exit 1
fi
printf "Installing from LLAMA_TOOLCHAIN_DIR: $LLAMA_TOOLCHAIN_DIR"
printf "Installing from LLAMA_TOOLCHAIN_DIR: $LLAMA_TOOLCHAIN_DIR\n"
pip install --no-cache-dir -e "$LLAMA_TOOLCHAIN_DIR"
else
pip install --no-cache-dir llama-toolchain
@ -95,18 +96,18 @@ ensure_conda_env_python310() {
if [ -n "$LLAMA_MODELS_DIR" ]; then
if [ ! -d "$LLAMA_MODELS_DIR" ]; then
printf "${RED}Warning: LLAMA_MODELS_DIR is set but directory does not exist: $LLAMA_MODELS_DIR${NC}" >&2
printf "${RED}Warning: LLAMA_MODELS_DIR is set but directory does not exist: $LLAMA_MODELS_DIR${NC}\n" >&2
exit 1
fi
printf "Installing from LLAMA_MODELS_DIR: $LLAMA_MODELS_DIR"
printf "Installing from LLAMA_MODELS_DIR: $LLAMA_MODELS_DIR\n"
pip uninstall -y llama-models
pip install --no-cache-dir -e "$LLAMA_MODELS_DIR"
fi
# Install pip dependencies
if [ -n "$pip_dependencies" ]; then
printf "Installing pip dependencies: $pip_dependencies"
printf "Installing pip dependencies: $pip_dependencies\n"
pip install $pip_dependencies
fi
fi
@ -114,14 +115,14 @@ ensure_conda_env_python310() {
ensure_conda_env_python310 "$env_name" "$pip_dependencies"
printf "${GREEN}Successfully setup conda environment. Configuring build...${NC}"
printf "${GREEN}Successfully setup conda environment. Configuring build...${NC}\n"
if [ "$api_or_stack" = "stack" ]; then
subcommand="stack"
if [ "$distribution_id" = "adhoc" ]; then
subcommand="api"
target=""
else
subcommand="api"
target="$api_or_stack"
subcommand="stack"
target="$distribution_id"
fi
$CONDA_PREFIX/bin/python3 -m llama_toolchain.cli.llama $subcommand configure $target --build-name "$env_name"
$CONDA_PREFIX/bin/python3 -m llama_toolchain.cli.llama $subcommand configure $target --build-name "$build_name" --build-type conda_env

View file

@ -4,15 +4,15 @@ LLAMA_MODELS_DIR=${LLAMA_MODELS_DIR:-}
LLAMA_TOOLCHAIN_DIR=${LLAMA_TOOLCHAIN_DIR:-}
TEST_PYPI_VERSION=${TEST_PYPI_VERSION:-}
if [ "$#" -ne 4 ]; then
echo "Usage: $0 [api|stack] <image_name> <docker_base> <pip_dependencies>
echo "Example: $0 agentic_system my-fastapi-app python:3.9-slim 'fastapi uvicorn'
echo "Usage: $0 <distribution_id> <build_name> <docker_base> <pip_dependencies>
echo "Example: $0 distribution_id my-fastapi-app python:3.9-slim 'fastapi uvicorn'
exit 1
fi
api_or_stack=$1
image_name=$2
distribution_id=$1
build_name="$2"
image_name="llamastack-$build_name"
docker_base=$3
pip_dependencies=$4
@ -39,7 +39,6 @@ add_to_docker() {
fi
}
add_to_docker <<EOF
FROM $docker_base
WORKDIR /app
@ -110,12 +109,12 @@ set +x
printf "${GREEN}Succesfully setup Podman image. Configuring build...${NC}"
echo "You can run it with: podman run -p 8000:8000 $image_name"
if [ "$api_or_stack" = "stack" ]; then
subcommand="stack"
if [ "$distribution_id" = "adhoc" ]; then
subcommand="api"
target=""
else
subcommand="api"
target="$api_or_stack"
subcommand="stack"
target="$distribution_id"
fi
$CONDA_PREFIX/bin/python3 -m llama_toolchain.cli.llama $subcommand configure $target --build-name "$image_name"
$CONDA_PREFIX/bin/python3 -m llama_toolchain.cli.llama $subcommand configure $target --build-name "$build_name" --build-type container

View file

@ -8,7 +8,7 @@ import json
import os
from datetime import datetime
from enum import Enum
from typing import Dict, List, Optional
from typing import List, Optional
import pkg_resources
import yaml
@ -17,11 +17,11 @@ from pydantic import BaseModel
from termcolor import cprint
from llama_toolchain.common.config_dirs import BUILDS_BASE_DIR
from llama_toolchain.core.datatypes import * # noqa: F403
from llama_toolchain.common.exec import run_with_pty
from llama_toolchain.common.serialize import EnumEncoder
from llama_toolchain.core.distribution import api_providers
from llama_toolchain.core.datatypes import * # noqa: F403
from llama_toolchain.core.distribution import api_providers, SERVER_DEPENDENCIES
class BuildType(Enum):
@ -29,7 +29,7 @@ class BuildType(Enum):
conda_env = "conda_env"
def descriptor(self) -> str:
return "image" if self == self.container else "env"
return "docker" if self == self.container else "conda"
class Dependencies(BaseModel):
@ -37,29 +37,9 @@ class Dependencies(BaseModel):
docker_image: Optional[str] = None
def get_dependencies(
provider: ProviderSpec, dependencies: Dict[str, ProviderSpec]
) -> Dependencies:
from llama_toolchain.core.distribution import SERVER_DEPENDENCIES
pip_packages = provider.pip_packages
for dep in dependencies.values():
if dep.docker_image:
raise ValueError(
"You can only have the root provider specify a docker image"
)
pip_packages.extend(dep.pip_packages)
return Dependencies(
docker_image=provider.docker_image,
pip_packages=pip_packages + SERVER_DEPENDENCIES,
)
class ApiInput(BaseModel):
api: Api
provider: str
dependencies: Dict[str, ProviderSpec]
def build_package(
@ -69,31 +49,22 @@ def build_package(
distribution_id: Optional[str] = None,
docker_image: Optional[str] = None,
):
is_stack = len(api_inputs) > 1
if is_stack:
if not distribution_id:
raise ValueError(
"You must specify a distribution name when building the Llama Stack"
)
distribution_id = "adhoc"
api1 = api_inputs[0]
provider = distribution_id if is_stack else api1.provider
api_or_stack = "stack" if is_stack else api1.api.value
build_dir = BUILDS_BASE_DIR / api_or_stack
build_dir = BUILDS_BASE_DIR / distribution_id / build_type.descriptor()
os.makedirs(build_dir, exist_ok=True)
package_name = f"{build_type.descriptor()}-{provider}-{name}"
package_name = package_name.replace("::", "-")
package_name = name.replace("::", "-")
package_file = build_dir / f"{package_name}.yaml"
all_providers = api_providers()
package_deps = Dependencies(
docker_image=docker_image or "python:3.10-slim",
pip_packages=[],
pip_packages=SERVER_DEPENDENCIES,
)
stub_config = {}
for api_input in api_inputs:
api = api_input.api
@ -103,18 +74,12 @@ def build_package(
f"Provider `{api_input.provider}` is not available for API `{api}`"
)
deps = get_dependencies(
providers_for_api[api_input.provider],
api_input.dependencies,
)
if deps.docker_image:
provider = providers_for_api[api_input.provider]
package_deps.pip_packages.extend(provider.pip_packages)
if provider.docker_image:
raise ValueError("A stack's dependencies cannot have a docker image")
package_deps.pip_packages.extend(deps.pip_packages)
stub_config[api.value] = {"provider_id": api_input.provider}
for dep_api, dep_spec in api_input.dependencies.items():
if dep_api not in stub_config:
stub_config[dep_api] = {"provider_id": dep_spec.provider_id}
if package_file.exists():
cprint(
@ -154,7 +119,7 @@ def build_package(
)
args = [
script,
api_or_stack,
distribution_id,
package_name,
package_deps.docker_image,
" ".join(package_deps.pip_packages),
@ -165,7 +130,7 @@ def build_package(
)
args = [
script,
api_or_stack,
distribution_id,
package_name,
" ".join(package_deps.pip_packages),
]

View file

@ -19,11 +19,12 @@ error_handler() {
trap 'error_handler ${LINENO}' ERR
if [ $# -lt 3 ]; then
echo "Usage: $0 <env_name> <yaml_config> <port> <script_args...>"
echo "Usage: $0 <build_name> <yaml_config> <port> <script_args...>"
exit 1
fi
env_name="$1"
build_name="$1"
env_name="llamastack-$build_name"
shift
yaml_config="$1"

View file

@ -19,11 +19,12 @@ error_handler() {
trap 'error_handler ${LINENO}' ERR
if [ $# -lt 3 ]; then
echo "Usage: $0 <docker_image> <yaml_config> <port> <other_args...>"
echo "Usage: $0 <build_name> <yaml_config> <port> <other_args...>"
exit 1
fi
docker_image="$1"
build_name="$1"
docker_image="llamastack-$build_name"
shift
yaml_config="$1"