mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-29 07:14:20 +00:00
Add api build
subcommand -- WIP
This commit is contained in:
parent
f5620c09ad
commit
3a337c5f1c
7 changed files with 203 additions and 0 deletions
7
llama_toolchain/cli/api/__init__.py
Normal file
7
llama_toolchain/cli/api/__init__.py
Normal file
|
@ -0,0 +1,7 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
from .api import ApiParser # noqa
|
26
llama_toolchain/cli/api/api.py
Normal file
26
llama_toolchain/cli/api/api.py
Normal file
|
@ -0,0 +1,26 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
import argparse
|
||||
|
||||
from llama_toolchain.cli.subcommand import Subcommand
|
||||
|
||||
from .build import ApiBuild
|
||||
|
||||
|
||||
class ApiParser(Subcommand):
|
||||
def __init__(self, subparsers: argparse._SubParsersAction):
|
||||
super().__init__()
|
||||
self.parser = subparsers.add_parser(
|
||||
"api",
|
||||
prog="llama api",
|
||||
description="Operate on llama stack API providers",
|
||||
)
|
||||
|
||||
subparsers = self.parser.add_subparsers(title="api_subcommands")
|
||||
|
||||
# Add sub-commands
|
||||
ApiBuild.create(subparsers)
|
117
llama_toolchain/cli/api/build.py
Normal file
117
llama_toolchain/cli/api/build.py
Normal file
|
@ -0,0 +1,117 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
import argparse
|
||||
import os
|
||||
|
||||
import pkg_resources
|
||||
import yaml
|
||||
|
||||
from llama_toolchain.cli.subcommand import Subcommand
|
||||
from llama_toolchain.common.config_dirs import DISTRIBS_BASE_DIR
|
||||
|
||||
from termcolor import cprint
|
||||
|
||||
|
||||
class ApiBuild(Subcommand):
|
||||
|
||||
def __init__(self, subparsers: argparse._SubParsersAction):
|
||||
super().__init__()
|
||||
self.parser = subparsers.add_parser(
|
||||
"install",
|
||||
prog="llama api build",
|
||||
description="Build a Llama stack API provider container",
|
||||
formatter_class=argparse.RawTextHelpFormatter,
|
||||
)
|
||||
self._add_arguments()
|
||||
self.parser.set_defaults(func=self._run_api_build_command)
|
||||
|
||||
def _add_arguments(self):
|
||||
from llama_toolchain.distribution.distribution import stack_apis
|
||||
|
||||
allowed_args = [a.name for a in stack_apis()]
|
||||
self.parser.add_argument(
|
||||
"api",
|
||||
choices=allowed_args,
|
||||
help="Stack API (one of: {})".format(", ".join(allowed_args))
|
||||
)
|
||||
|
||||
self.parser.add_argument(
|
||||
"--provider",
|
||||
type=str,
|
||||
help="The provider to package into the container",
|
||||
required=True,
|
||||
)
|
||||
self.parser.add_argument(
|
||||
"--container-name",
|
||||
type=str,
|
||||
help="Name of the container (including tag if needed)",
|
||||
required=True,
|
||||
)
|
||||
self.parser.add_argument(
|
||||
"--dependencies",
|
||||
type=str,
|
||||
help="Comma separated list of (downstream_api=provider) dependencies needed for the API",
|
||||
required=False,
|
||||
)
|
||||
|
||||
def _run_api_build_command(self, args: argparse.Namespace) -> None:
|
||||
from llama_toolchain.common.exec import run_with_pty
|
||||
from llama_toolchain.distribution.datatypes import DistributionConfig
|
||||
from llama_toolchain.distribution.distribution import distribution_dependencies
|
||||
from llama_toolchain.distribution.registry import resolve_distribution_spec
|
||||
|
||||
os.makedirs(DISTRIBS_BASE_DIR, exist_ok=True)
|
||||
script = pkg_resources.resource_filename(
|
||||
"llama_toolchain",
|
||||
"distribution/build_api.sh",
|
||||
)
|
||||
|
||||
dist = resolve_distribution_spec(args.spec)
|
||||
if dist is None:
|
||||
self.parser.error(f"Could not find distribution {args.spec}")
|
||||
return
|
||||
|
||||
distrib_dir = DISTRIBS_BASE_DIR / args.name
|
||||
os.makedirs(distrib_dir, exist_ok=True)
|
||||
|
||||
deps = distribution_dependencies(dist)
|
||||
if not args.conda_env:
|
||||
print(f"Using {args.name} as the Conda environment for this distribution")
|
||||
|
||||
conda_env = args.conda_env or args.name
|
||||
|
||||
config_file = distrib_dir / "config.yaml"
|
||||
if config_file.exists():
|
||||
c = DistributionConfig(**yaml.safe_load(config_file.read_text()))
|
||||
if c.spec != dist.spec_id:
|
||||
self.parser.error(
|
||||
f"already installed distribution with `spec={c.spec}` does not match provided spec `{args.spec}`"
|
||||
)
|
||||
return
|
||||
if c.conda_env != conda_env:
|
||||
self.parser.error(
|
||||
f"already installed distribution has `conda_env={c.conda_env}` different from provided conda env `{conda_env}`"
|
||||
)
|
||||
return
|
||||
else:
|
||||
with open(config_file, "w") as f:
|
||||
c = DistributionConfig(
|
||||
spec=dist.spec_id,
|
||||
name=args.name,
|
||||
conda_env=conda_env,
|
||||
)
|
||||
f.write(yaml.dump(c.dict(), sort_keys=False))
|
||||
|
||||
return_code = run_with_pty([script, conda_env, args.name, " ".join(deps)])
|
||||
|
||||
assert return_code == 0, cprint(
|
||||
f"Failed to install distribution {dist.spec_id}", color="red"
|
||||
)
|
||||
cprint(
|
||||
f"Distribution `{args.name}` (with spec {dist.spec_id}) has been installed successfully!",
|
||||
color="green",
|
||||
)
|
|
@ -6,6 +6,7 @@
|
|||
|
||||
import argparse
|
||||
|
||||
from .api import ApiParser
|
||||
from .distribution import DistributionParser
|
||||
from .download import Download
|
||||
from .model import ModelParser
|
||||
|
@ -30,6 +31,7 @@ class LlamaCLIParser:
|
|||
Download.create(subparsers)
|
||||
ModelParser.create(subparsers)
|
||||
DistributionParser.create(subparsers)
|
||||
ApiParser.create(subparsers)
|
||||
|
||||
# Import sub-commands from agentic_system if they exist
|
||||
try:
|
||||
|
|
40
llama_toolchain/distribution/build_image.sh
Normal file
40
llama_toolchain/distribution/build_image.sh
Normal file
|
@ -0,0 +1,40 @@
|
|||
#!/bin/bash
|
||||
|
||||
if [ "$#" -ne 4 ]; then
|
||||
echo "Usage: $0 <image_name> <base_image> <pip_dependencies> <entrypoint_command>"
|
||||
echo "Example: $0 my-fastapi-app python:3.9-slim 'fastapi uvicorn' 'python3 -m llama_toolchain.distribution.server --port 8000'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
IMAGE_NAME=$1
|
||||
BASE_IMAGE=$2
|
||||
PIP_DEPENDENCIES=$3
|
||||
ENTRYPOINT_COMMAND=$4
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR=$(dirname "$(readlink -f "$0")")
|
||||
SOURCE_DIR=$(dirname $(dirname "$SCRIPT_DIR"))
|
||||
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
echo "Created temporary directory: $TEMP_DIR"
|
||||
|
||||
cat <<EOF >"$TEMP_DIR/Dockerfile"
|
||||
FROM $BASE_IMAGE
|
||||
WORKDIR /app
|
||||
COPY llama_toolchain /app
|
||||
|
||||
RUN pip install --no-cache-dir $PIP_DEPENDENCIES
|
||||
|
||||
EXPOSE 8000
|
||||
CMD $ENTRYPOINT_COMMAND
|
||||
EOF
|
||||
|
||||
echo "Dockerfile created successfully in $TEMP_DIR/Dockerfile"
|
||||
|
||||
podman build -t $IMAGE_NAME -f "$TEMP_DIR/Dockerfile" "$SOURCE_DIR"
|
||||
|
||||
echo "Podman image '$IMAGE_NAME' built successfully."
|
||||
echo "You can run it with: podman run -p 8000:8000 $IMAGE_NAME"
|
||||
|
||||
rm -rf "$TEMP_DIR"
|
|
@ -43,6 +43,13 @@ class InlineProviderSpec(ProviderSpec):
|
|||
default_factory=list,
|
||||
description="The pip dependencies needed for this implementation",
|
||||
)
|
||||
docker_image: Optional[str] = Field(
|
||||
default=None,
|
||||
description="""
|
||||
The docker image to use for this implementation. If one is provided, pip_packages will be ignored.
|
||||
If a provider depends on other providers, the dependencies MUST NOT specify a docker image.
|
||||
""",
|
||||
)
|
||||
module: str = Field(
|
||||
...,
|
||||
description="""
|
||||
|
|
|
@ -42,6 +42,10 @@ def distribution_dependencies(distribution: DistributionSpec) -> List[str]:
|
|||
] + SERVER_DEPENDENCIES
|
||||
|
||||
|
||||
def stack_apis() -> List[Api]:
|
||||
return [Api.inference, Api.safety, Api.agentic_system, Api.memory]
|
||||
|
||||
|
||||
def api_endpoints() -> Dict[Api, List[ApiEndpoint]]:
|
||||
apis = {}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue