mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-29 03:14:19 +00:00
Make llama stack build not create a new conda by default (#788)
## What does this PR do? So far `llama stack build` has always created a separate conda environment for packaging the dependencies of a distribution. The main reason to do so is isolation -- distributions are composed of providers which can have a variety of potentially conflicting dependencies. That said, this has created significant annoyance for new users since it is not at all transparent. The fact that `llama stack run` is actually running the code in some other conda is very surprising. This PR tries to make things better. - Both `llama stack build` and `llama stack run` now accept an `--image-name` argument which represents the (conda, docker, virtualenv) image you want to operate upon. - For the default (conda) mode, the script checks if a current conda environment exists. If one exists, it uses it. - If `--image-name` is provided, that option is used. In this case, an environment is created if needed. - There is no automatic `llamastack-` prefixing of the environment names done anymore. ## Test Plan Start in a conda environment, run `llama stack build --template fireworks`; verify that it successfully built into the current environment and stored the build file at `$CONDA_PREFIX/llamastack-build.yaml`. Run `llama stack run fireworks` which started correctly in the current environment. Ran the same build command outside of conda. It failed asking for `--image-name`. Ran it with `llama stack build --template fireworks --image-name foo`. This successfully created a conda environment called `foo` and installed deps. Ran `llama stack run fireworks` outside conda which failed. Activated a different conda, ran again, it failed saying it did not find the `llamastack-build.yaml` file. Then used `--image-name foo` option and it ran successfully.
This commit is contained in:
parent
59eeaf7f81
commit
cee3816609
8 changed files with 400 additions and 338 deletions
307
llama_stack/cli/stack/_build.py
Normal file
307
llama_stack/cli/stack/_build.py
Normal file
|
@ -0,0 +1,307 @@
|
||||||
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
|
# the root directory of this source tree.
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import importlib.resources
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import textwrap
|
||||||
|
from functools import lru_cache
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, Optional
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
from prompt_toolkit import prompt
|
||||||
|
from prompt_toolkit.completion import WordCompleter
|
||||||
|
from prompt_toolkit.validation import Validator
|
||||||
|
from termcolor import cprint
|
||||||
|
|
||||||
|
from llama_stack.cli.table import print_table
|
||||||
|
|
||||||
|
from llama_stack.distribution.build import build_image, ImageType
|
||||||
|
from llama_stack.distribution.datatypes import (
|
||||||
|
BuildConfig,
|
||||||
|
DistributionSpec,
|
||||||
|
Provider,
|
||||||
|
StackRunConfig,
|
||||||
|
)
|
||||||
|
from llama_stack.distribution.distribution import get_provider_registry
|
||||||
|
from llama_stack.distribution.resolver import InvalidProviderError
|
||||||
|
from llama_stack.distribution.utils.config_dirs import DISTRIBS_BASE_DIR
|
||||||
|
from llama_stack.distribution.utils.dynamic import instantiate_class_type
|
||||||
|
from llama_stack.providers.datatypes import Api
|
||||||
|
|
||||||
|
|
||||||
|
TEMPLATES_PATH = Path(__file__).parent.parent.parent / "templates"
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache()
|
||||||
|
def available_templates_specs() -> Dict[str, BuildConfig]:
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
template_specs = {}
|
||||||
|
for p in TEMPLATES_PATH.rglob("*build.yaml"):
|
||||||
|
template_name = p.parent.name
|
||||||
|
with open(p, "r") as f:
|
||||||
|
build_config = BuildConfig(**yaml.safe_load(f))
|
||||||
|
template_specs[template_name] = build_config
|
||||||
|
return template_specs
|
||||||
|
|
||||||
|
|
||||||
|
def run_stack_build_command(
|
||||||
|
parser: argparse.ArgumentParser, args: argparse.Namespace
|
||||||
|
) -> None:
|
||||||
|
if args.list_templates:
|
||||||
|
return _run_template_list_cmd()
|
||||||
|
|
||||||
|
current_conda_env = os.environ.get("CONDA_DEFAULT_ENV")
|
||||||
|
image_name = args.image_name or current_conda_env
|
||||||
|
|
||||||
|
if args.template:
|
||||||
|
available_templates = available_templates_specs()
|
||||||
|
if args.template not in available_templates:
|
||||||
|
cprint(
|
||||||
|
f"Could not find template {args.template}. Please run `llama stack build --list-templates` to check out the available templates",
|
||||||
|
color="red",
|
||||||
|
)
|
||||||
|
return
|
||||||
|
build_config = available_templates[args.template]
|
||||||
|
if args.image_type:
|
||||||
|
build_config.image_type = args.image_type
|
||||||
|
else:
|
||||||
|
cprint(
|
||||||
|
f"Please specify a image-type (docker | conda | venv) for {args.template}",
|
||||||
|
color="red",
|
||||||
|
)
|
||||||
|
return
|
||||||
|
_run_stack_build_command_from_build_config(
|
||||||
|
build_config,
|
||||||
|
image_name=image_name,
|
||||||
|
template_name=args.template,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if not args.config and not args.template:
|
||||||
|
name = prompt(
|
||||||
|
"> Enter a name for your Llama Stack (e.g. my-local-stack): ",
|
||||||
|
validator=Validator.from_callable(
|
||||||
|
lambda x: len(x) > 0,
|
||||||
|
error_message="Name cannot be empty, please enter a name",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
image_type = prompt(
|
||||||
|
"> Enter the image type you want your Llama Stack to be built as (docker or conda or venv): ",
|
||||||
|
validator=Validator.from_callable(
|
||||||
|
lambda x: x in ["docker", "conda", "venv"],
|
||||||
|
error_message="Invalid image type, please enter conda or docker or venv",
|
||||||
|
),
|
||||||
|
default="conda",
|
||||||
|
)
|
||||||
|
|
||||||
|
if image_type == "conda":
|
||||||
|
if not image_name:
|
||||||
|
cprint(
|
||||||
|
f"No current conda environment detected or specified, will create a new conda environment with the name `llamastack-{name}`",
|
||||||
|
color="yellow",
|
||||||
|
)
|
||||||
|
image_name = f"llamastack-{name}"
|
||||||
|
else:
|
||||||
|
cprint(
|
||||||
|
f"Using conda environment {image_name}",
|
||||||
|
color="green",
|
||||||
|
)
|
||||||
|
|
||||||
|
cprint(
|
||||||
|
textwrap.dedent(
|
||||||
|
"""
|
||||||
|
Llama Stack is composed of several APIs working together. Let's select
|
||||||
|
the provider types (implementations) you want to use for these APIs.
|
||||||
|
""",
|
||||||
|
),
|
||||||
|
color="green",
|
||||||
|
)
|
||||||
|
|
||||||
|
print("Tip: use <TAB> to see options for the providers.\n")
|
||||||
|
|
||||||
|
providers = dict()
|
||||||
|
for api, providers_for_api in get_provider_registry().items():
|
||||||
|
available_providers = [
|
||||||
|
x
|
||||||
|
for x in providers_for_api.keys()
|
||||||
|
if x not in ("remote", "remote::sample")
|
||||||
|
]
|
||||||
|
api_provider = prompt(
|
||||||
|
"> Enter provider for API {}: ".format(api.value),
|
||||||
|
completer=WordCompleter(available_providers),
|
||||||
|
complete_while_typing=True,
|
||||||
|
validator=Validator.from_callable(
|
||||||
|
lambda x: x in available_providers,
|
||||||
|
error_message="Invalid provider, use <TAB> to see options",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
providers[api.value] = api_provider
|
||||||
|
|
||||||
|
description = prompt(
|
||||||
|
"\n > (Optional) Enter a short description for your Llama Stack: ",
|
||||||
|
default="",
|
||||||
|
)
|
||||||
|
|
||||||
|
distribution_spec = DistributionSpec(
|
||||||
|
providers=providers,
|
||||||
|
description=description,
|
||||||
|
)
|
||||||
|
|
||||||
|
build_config = BuildConfig(
|
||||||
|
image_type=image_type, distribution_spec=distribution_spec
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
with open(args.config, "r") as f:
|
||||||
|
try:
|
||||||
|
build_config = BuildConfig(**yaml.safe_load(f))
|
||||||
|
except Exception as e:
|
||||||
|
cprint(
|
||||||
|
f"Could not parse config file {args.config}: {e}",
|
||||||
|
color="red",
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
_run_stack_build_command_from_build_config(build_config, image_name=image_name)
|
||||||
|
|
||||||
|
|
||||||
|
def _generate_run_config(
|
||||||
|
build_config: BuildConfig, build_dir: Path, image_name: str
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Generate a run.yaml template file for user to edit from a build.yaml file
|
||||||
|
"""
|
||||||
|
apis = list(build_config.distribution_spec.providers.keys())
|
||||||
|
run_config = StackRunConfig(
|
||||||
|
docker_image=(
|
||||||
|
image_name if build_config.image_type == ImageType.docker.value else None
|
||||||
|
),
|
||||||
|
image_name=image_name,
|
||||||
|
apis=apis,
|
||||||
|
providers={},
|
||||||
|
)
|
||||||
|
# build providers dict
|
||||||
|
provider_registry = get_provider_registry()
|
||||||
|
for api in apis:
|
||||||
|
run_config.providers[api] = []
|
||||||
|
provider_types = build_config.distribution_spec.providers[api]
|
||||||
|
if isinstance(provider_types, str):
|
||||||
|
provider_types = [provider_types]
|
||||||
|
|
||||||
|
for i, provider_type in enumerate(provider_types):
|
||||||
|
pid = provider_type.split("::")[-1]
|
||||||
|
|
||||||
|
p = provider_registry[Api(api)][provider_type]
|
||||||
|
if p.deprecation_error:
|
||||||
|
raise InvalidProviderError(p.deprecation_error)
|
||||||
|
|
||||||
|
config_type = instantiate_class_type(
|
||||||
|
provider_registry[Api(api)][provider_type].config_class
|
||||||
|
)
|
||||||
|
if hasattr(config_type, "sample_run_config"):
|
||||||
|
config = config_type.sample_run_config(
|
||||||
|
__distro_dir__=f"distributions/{image_name}"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
config = {}
|
||||||
|
|
||||||
|
p_spec = Provider(
|
||||||
|
provider_id=f"{pid}-{i}" if len(provider_types) > 1 else pid,
|
||||||
|
provider_type=provider_type,
|
||||||
|
config=config,
|
||||||
|
)
|
||||||
|
run_config.providers[api].append(p_spec)
|
||||||
|
|
||||||
|
run_config_file = build_dir / f"{image_name}-run.yaml"
|
||||||
|
|
||||||
|
with open(run_config_file, "w") as f:
|
||||||
|
to_write = json.loads(run_config.model_dump_json())
|
||||||
|
f.write(yaml.dump(to_write, sort_keys=False))
|
||||||
|
|
||||||
|
cprint(
|
||||||
|
f"You can now edit {run_config_file} and run `llama stack run {image_name}`",
|
||||||
|
color="green",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _run_stack_build_command_from_build_config(
|
||||||
|
build_config: BuildConfig,
|
||||||
|
image_name: Optional[str] = None,
|
||||||
|
template_name: Optional[str] = None,
|
||||||
|
) -> None:
|
||||||
|
if build_config.image_type == ImageType.docker.value:
|
||||||
|
if template_name:
|
||||||
|
image_name = f"distribution-{template_name}"
|
||||||
|
else:
|
||||||
|
if not image_name:
|
||||||
|
raise ValueError(
|
||||||
|
"Please specify an image name when building a docker image without a template"
|
||||||
|
)
|
||||||
|
elif build_config.image_type == ImageType.conda.value:
|
||||||
|
if not image_name:
|
||||||
|
raise ValueError("Please specify an image name when building a conda image")
|
||||||
|
|
||||||
|
if template_name:
|
||||||
|
build_dir = DISTRIBS_BASE_DIR / template_name
|
||||||
|
build_file_path = build_dir / f"{template_name}-build.yaml"
|
||||||
|
else:
|
||||||
|
build_dir = DISTRIBS_BASE_DIR / image_name
|
||||||
|
build_file_path = build_dir / f"{image_name}-build.yaml"
|
||||||
|
|
||||||
|
os.makedirs(build_dir, exist_ok=True)
|
||||||
|
with open(build_file_path, "w") as f:
|
||||||
|
to_write = json.loads(build_config.model_dump_json())
|
||||||
|
f.write(yaml.dump(to_write, sort_keys=False))
|
||||||
|
|
||||||
|
return_code = build_image(
|
||||||
|
build_config, build_file_path, image_name, template_name=template_name
|
||||||
|
)
|
||||||
|
if return_code != 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
if template_name:
|
||||||
|
# copy run.yaml from template to build_dir instead of generating it again
|
||||||
|
template_path = (
|
||||||
|
importlib.resources.files("llama_stack")
|
||||||
|
/ f"templates/{template_name}/run.yaml"
|
||||||
|
)
|
||||||
|
with importlib.resources.as_file(template_path) as path:
|
||||||
|
run_config_file = build_dir / f"{template_name}-run.yaml"
|
||||||
|
shutil.copy(path, run_config_file)
|
||||||
|
# Find all ${env.VARIABLE} patterns
|
||||||
|
cprint("Build Successful!", color="green")
|
||||||
|
else:
|
||||||
|
_generate_run_config(build_config, build_dir, image_name)
|
||||||
|
|
||||||
|
|
||||||
|
def _run_template_list_cmd() -> None:
|
||||||
|
# eventually, this should query a registry at llama.meta.com/llamastack/distributions
|
||||||
|
headers = [
|
||||||
|
"Template Name",
|
||||||
|
# "Providers",
|
||||||
|
"Description",
|
||||||
|
]
|
||||||
|
|
||||||
|
rows = []
|
||||||
|
for template_name, spec in available_templates_specs().items():
|
||||||
|
rows.append(
|
||||||
|
[
|
||||||
|
template_name,
|
||||||
|
# json.dumps(spec.distribution_spec.providers, indent=2),
|
||||||
|
spec.distribution_spec.description,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
print_table(
|
||||||
|
rows,
|
||||||
|
headers,
|
||||||
|
separate_rows=True,
|
||||||
|
)
|
|
@ -4,38 +4,9 @@
|
||||||
# This source code is licensed under the terms described in the LICENSE file in
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
# the root directory of this source tree.
|
# the root directory of this source tree.
|
||||||
import argparse
|
import argparse
|
||||||
import importlib.resources
|
import textwrap
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
from functools import lru_cache
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
from llama_stack.cli.subcommand import Subcommand
|
from llama_stack.cli.subcommand import Subcommand
|
||||||
from llama_stack.distribution.datatypes import (
|
|
||||||
BuildConfig,
|
|
||||||
DistributionSpec,
|
|
||||||
Provider,
|
|
||||||
StackRunConfig,
|
|
||||||
)
|
|
||||||
from llama_stack.distribution.distribution import get_provider_registry
|
|
||||||
from llama_stack.distribution.resolver import InvalidProviderError
|
|
||||||
from llama_stack.distribution.utils.dynamic import instantiate_class_type
|
|
||||||
from llama_stack.providers.datatypes import Api
|
|
||||||
|
|
||||||
TEMPLATES_PATH = Path(__file__).parent.parent.parent / "templates"
|
|
||||||
|
|
||||||
|
|
||||||
@lru_cache()
|
|
||||||
def available_templates_specs() -> List[BuildConfig]:
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
template_specs = []
|
|
||||||
for p in TEMPLATES_PATH.rglob("*build.yaml"):
|
|
||||||
with open(p, "r") as f:
|
|
||||||
build_config = BuildConfig(**yaml.safe_load(f))
|
|
||||||
template_specs.append(build_config)
|
|
||||||
return template_specs
|
|
||||||
|
|
||||||
|
|
||||||
class StackBuild(Subcommand):
|
class StackBuild(Subcommand):
|
||||||
|
@ -81,250 +52,21 @@ class StackBuild(Subcommand):
|
||||||
default="conda",
|
default="conda",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self.parser.add_argument(
|
||||||
|
"--image-name",
|
||||||
|
type=str,
|
||||||
|
help=textwrap.dedent(
|
||||||
|
"""[for image-type=conda] Name of the conda environment to use for the build. If
|
||||||
|
not specified, currently active Conda environment will be used. If no Conda
|
||||||
|
environment is active, you must specify a name.
|
||||||
|
"""
|
||||||
|
),
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
def _run_stack_build_command(self, args: argparse.Namespace) -> None:
|
def _run_stack_build_command(self, args: argparse.Namespace) -> None:
|
||||||
import textwrap
|
# always keep implementation completely silo-ed away from CLI so CLI
|
||||||
|
# can be fast to load and reduces dependencies
|
||||||
|
from ._build import run_stack_build_command
|
||||||
|
|
||||||
import yaml
|
return run_stack_build_command(self.parser, args)
|
||||||
from prompt_toolkit import prompt
|
|
||||||
from prompt_toolkit.completion import WordCompleter
|
|
||||||
from prompt_toolkit.validation import Validator
|
|
||||||
from termcolor import cprint
|
|
||||||
|
|
||||||
from llama_stack.distribution.distribution import get_provider_registry
|
|
||||||
|
|
||||||
if args.list_templates:
|
|
||||||
self._run_template_list_cmd(args)
|
|
||||||
return
|
|
||||||
|
|
||||||
if args.template:
|
|
||||||
available_templates = available_templates_specs()
|
|
||||||
for build_config in available_templates:
|
|
||||||
if build_config.name == args.template:
|
|
||||||
if args.image_type:
|
|
||||||
build_config.image_type = args.image_type
|
|
||||||
else:
|
|
||||||
self.parser.error(
|
|
||||||
f"Please specify a image-type (docker | conda | venv) for {args.template}"
|
|
||||||
)
|
|
||||||
self._run_stack_build_command_from_build_config(
|
|
||||||
build_config,
|
|
||||||
template_name=args.template,
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
self.parser.error(
|
|
||||||
f"Could not find template {args.template}. Please run `llama stack build --list-templates` to check out the available templates"
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
if not args.config and not args.template:
|
|
||||||
name = prompt(
|
|
||||||
"> Enter a name for your Llama Stack (e.g. my-local-stack): ",
|
|
||||||
validator=Validator.from_callable(
|
|
||||||
lambda x: len(x) > 0,
|
|
||||||
error_message="Name cannot be empty, please enter a name",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
image_type = prompt(
|
|
||||||
"> Enter the image type you want your Llama Stack to be built as (docker or conda or venv): ",
|
|
||||||
validator=Validator.from_callable(
|
|
||||||
lambda x: x in ["docker", "conda", "venv"],
|
|
||||||
error_message="Invalid image type, please enter conda or docker or venv",
|
|
||||||
),
|
|
||||||
default="conda",
|
|
||||||
)
|
|
||||||
|
|
||||||
cprint(
|
|
||||||
textwrap.dedent(
|
|
||||||
"""
|
|
||||||
Llama Stack is composed of several APIs working together. Let's select
|
|
||||||
the provider types (implementations) you want to use for these APIs.
|
|
||||||
""",
|
|
||||||
),
|
|
||||||
color="green",
|
|
||||||
)
|
|
||||||
|
|
||||||
print("Tip: use <TAB> to see options for the providers.\n")
|
|
||||||
|
|
||||||
providers = dict()
|
|
||||||
for api, providers_for_api in get_provider_registry().items():
|
|
||||||
available_providers = [
|
|
||||||
x
|
|
||||||
for x in providers_for_api.keys()
|
|
||||||
if x not in ("remote", "remote::sample")
|
|
||||||
]
|
|
||||||
api_provider = prompt(
|
|
||||||
"> Enter provider for API {}: ".format(api.value),
|
|
||||||
completer=WordCompleter(available_providers),
|
|
||||||
complete_while_typing=True,
|
|
||||||
validator=Validator.from_callable(
|
|
||||||
lambda x: x in available_providers,
|
|
||||||
error_message="Invalid provider, use <TAB> to see options",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
providers[api.value] = api_provider
|
|
||||||
|
|
||||||
description = prompt(
|
|
||||||
"\n > (Optional) Enter a short description for your Llama Stack: ",
|
|
||||||
default="",
|
|
||||||
)
|
|
||||||
|
|
||||||
distribution_spec = DistributionSpec(
|
|
||||||
providers=providers,
|
|
||||||
description=description,
|
|
||||||
)
|
|
||||||
|
|
||||||
build_config = BuildConfig(
|
|
||||||
name=name, image_type=image_type, distribution_spec=distribution_spec
|
|
||||||
)
|
|
||||||
self._run_stack_build_command_from_build_config(build_config)
|
|
||||||
return
|
|
||||||
|
|
||||||
with open(args.config, "r") as f:
|
|
||||||
try:
|
|
||||||
build_config = BuildConfig(**yaml.safe_load(f))
|
|
||||||
except Exception as e:
|
|
||||||
self.parser.error(f"Could not parse config file {args.config}: {e}")
|
|
||||||
return
|
|
||||||
self._run_stack_build_command_from_build_config(build_config)
|
|
||||||
|
|
||||||
def _generate_run_config(self, build_config: BuildConfig, build_dir: Path) -> None:
|
|
||||||
"""
|
|
||||||
Generate a run.yaml template file for user to edit from a build.yaml file
|
|
||||||
"""
|
|
||||||
import json
|
|
||||||
|
|
||||||
import yaml
|
|
||||||
from termcolor import cprint
|
|
||||||
|
|
||||||
from llama_stack.distribution.build import ImageType
|
|
||||||
|
|
||||||
apis = list(build_config.distribution_spec.providers.keys())
|
|
||||||
run_config = StackRunConfig(
|
|
||||||
docker_image=(
|
|
||||||
build_config.name
|
|
||||||
if build_config.image_type == ImageType.docker.value
|
|
||||||
else None
|
|
||||||
),
|
|
||||||
image_name=build_config.name,
|
|
||||||
conda_env=(
|
|
||||||
build_config.name
|
|
||||||
if build_config.image_type == ImageType.conda.value
|
|
||||||
else None
|
|
||||||
),
|
|
||||||
apis=apis,
|
|
||||||
providers={},
|
|
||||||
)
|
|
||||||
# build providers dict
|
|
||||||
provider_registry = get_provider_registry()
|
|
||||||
for api in apis:
|
|
||||||
run_config.providers[api] = []
|
|
||||||
provider_types = build_config.distribution_spec.providers[api]
|
|
||||||
if isinstance(provider_types, str):
|
|
||||||
provider_types = [provider_types]
|
|
||||||
|
|
||||||
for i, provider_type in enumerate(provider_types):
|
|
||||||
pid = provider_type.split("::")[-1]
|
|
||||||
|
|
||||||
p = provider_registry[Api(api)][provider_type]
|
|
||||||
if p.deprecation_error:
|
|
||||||
raise InvalidProviderError(p.deprecation_error)
|
|
||||||
|
|
||||||
config_type = instantiate_class_type(
|
|
||||||
provider_registry[Api(api)][provider_type].config_class
|
|
||||||
)
|
|
||||||
if hasattr(config_type, "sample_run_config"):
|
|
||||||
config = config_type.sample_run_config(
|
|
||||||
__distro_dir__=f"distributions/{build_config.name}"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
config = {}
|
|
||||||
|
|
||||||
p_spec = Provider(
|
|
||||||
provider_id=f"{pid}-{i}" if len(provider_types) > 1 else pid,
|
|
||||||
provider_type=provider_type,
|
|
||||||
config=config,
|
|
||||||
)
|
|
||||||
run_config.providers[api].append(p_spec)
|
|
||||||
|
|
||||||
os.makedirs(build_dir, exist_ok=True)
|
|
||||||
run_config_file = build_dir / f"{build_config.name}-run.yaml"
|
|
||||||
|
|
||||||
with open(run_config_file, "w") as f:
|
|
||||||
to_write = json.loads(run_config.model_dump_json())
|
|
||||||
f.write(yaml.dump(to_write, sort_keys=False))
|
|
||||||
|
|
||||||
cprint(
|
|
||||||
f"You can now edit {run_config_file} and run `llama stack run {run_config_file}`",
|
|
||||||
color="green",
|
|
||||||
)
|
|
||||||
|
|
||||||
def _run_stack_build_command_from_build_config(
|
|
||||||
self,
|
|
||||||
build_config: BuildConfig,
|
|
||||||
template_name: Optional[str] = None,
|
|
||||||
) -> None:
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
|
|
||||||
import yaml
|
|
||||||
from termcolor import cprint
|
|
||||||
|
|
||||||
from llama_stack.distribution.build import build_image
|
|
||||||
from llama_stack.distribution.utils.config_dirs import DISTRIBS_BASE_DIR
|
|
||||||
|
|
||||||
# save build.yaml spec for building same distribution again
|
|
||||||
build_dir = DISTRIBS_BASE_DIR / f"llamastack-{build_config.name}"
|
|
||||||
os.makedirs(build_dir, exist_ok=True)
|
|
||||||
build_file_path = build_dir / f"{build_config.name}-build.yaml"
|
|
||||||
|
|
||||||
with open(build_file_path, "w") as f:
|
|
||||||
to_write = json.loads(build_config.model_dump_json())
|
|
||||||
f.write(yaml.dump(to_write, sort_keys=False))
|
|
||||||
|
|
||||||
return_code = build_image(build_config, build_file_path)
|
|
||||||
if return_code != 0:
|
|
||||||
return
|
|
||||||
|
|
||||||
if template_name:
|
|
||||||
# copy run.yaml from template to build_dir instead of generating it again
|
|
||||||
template_path = (
|
|
||||||
importlib.resources.files("llama_stack")
|
|
||||||
/ f"templates/{template_name}/run.yaml"
|
|
||||||
)
|
|
||||||
with importlib.resources.as_file(template_path) as path:
|
|
||||||
run_config_file = build_dir / f"{build_config.name}-run.yaml"
|
|
||||||
shutil.copy(path, run_config_file)
|
|
||||||
# Find all ${env.VARIABLE} patterns
|
|
||||||
cprint("Build Successful!", color="green")
|
|
||||||
else:
|
|
||||||
self._generate_run_config(build_config, build_dir)
|
|
||||||
|
|
||||||
def _run_template_list_cmd(self, args: argparse.Namespace) -> None:
|
|
||||||
import json
|
|
||||||
|
|
||||||
from llama_stack.cli.table import print_table
|
|
||||||
|
|
||||||
# eventually, this should query a registry at llama.meta.com/llamastack/distributions
|
|
||||||
headers = [
|
|
||||||
"Template Name",
|
|
||||||
"Providers",
|
|
||||||
"Description",
|
|
||||||
]
|
|
||||||
|
|
||||||
rows = []
|
|
||||||
for spec in available_templates_specs():
|
|
||||||
rows.append(
|
|
||||||
[
|
|
||||||
spec.name,
|
|
||||||
json.dumps(spec.distribution_spec.providers, indent=2),
|
|
||||||
spec.distribution_spec.description,
|
|
||||||
]
|
|
||||||
)
|
|
||||||
print_table(
|
|
||||||
rows,
|
|
||||||
headers,
|
|
||||||
separate_rows=True,
|
|
||||||
)
|
|
||||||
|
|
|
@ -37,6 +37,11 @@ class StackRun(Subcommand):
|
||||||
help="Port to run the server on. Defaults to 5000",
|
help="Port to run the server on. Defaults to 5000",
|
||||||
default=int(os.getenv("LLAMA_STACK_PORT", 5000)),
|
default=int(os.getenv("LLAMA_STACK_PORT", 5000)),
|
||||||
)
|
)
|
||||||
|
self.parser.add_argument(
|
||||||
|
"--image-name",
|
||||||
|
type=str,
|
||||||
|
help="Name of the image to run. Defaults to the current conda environment",
|
||||||
|
)
|
||||||
self.parser.add_argument(
|
self.parser.add_argument(
|
||||||
"--disable-ipv6",
|
"--disable-ipv6",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
|
@ -53,8 +58,11 @@ class StackRun(Subcommand):
|
||||||
|
|
||||||
def _run_stack_run_cmd(self, args: argparse.Namespace) -> None:
|
def _run_stack_run_cmd(self, args: argparse.Namespace) -> None:
|
||||||
import importlib.resources
|
import importlib.resources
|
||||||
|
import json
|
||||||
|
import subprocess
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
from termcolor import cprint
|
||||||
|
|
||||||
from llama_stack.distribution.build import ImageType
|
from llama_stack.distribution.build import ImageType
|
||||||
from llama_stack.distribution.configure import parse_and_maybe_upgrade_config
|
from llama_stack.distribution.configure import parse_and_maybe_upgrade_config
|
||||||
|
@ -99,11 +107,11 @@ class StackRun(Subcommand):
|
||||||
|
|
||||||
if not config_file.exists():
|
if not config_file.exists():
|
||||||
self.parser.error(
|
self.parser.error(
|
||||||
f"File {str(config_file)} does not exist. Please run `llama stack build` to generate (and optionally edit) a run.yaml file"
|
f"File {str(config_file)} does not exist.\n\nPlease run `llama stack build` to generate (and optionally edit) a run.yaml file"
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
print(f"Using config file: {config_file}")
|
print(f"Using run configuration: {config_file}")
|
||||||
config_dict = yaml.safe_load(config_file.read_text())
|
config_dict = yaml.safe_load(config_file.read_text())
|
||||||
config = parse_and_maybe_upgrade_config(config_dict)
|
config = parse_and_maybe_upgrade_config(config_dict)
|
||||||
|
|
||||||
|
@ -114,13 +122,52 @@ class StackRun(Subcommand):
|
||||||
)
|
)
|
||||||
run_args = [script, config.docker_image]
|
run_args = [script, config.docker_image]
|
||||||
else:
|
else:
|
||||||
|
current_conda_env = os.environ.get("CONDA_DEFAULT_ENV")
|
||||||
|
image_name = args.image_name or current_conda_env
|
||||||
|
if not image_name:
|
||||||
|
cprint(
|
||||||
|
"No current conda environment detected, please specify a conda environment name with --image-name",
|
||||||
|
color="red",
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
def get_conda_prefix(env_name):
|
||||||
|
# Get conda environments info
|
||||||
|
conda_env_info = json.loads(
|
||||||
|
subprocess.check_output(
|
||||||
|
["conda", "info", "--envs", "--json"]
|
||||||
|
).decode()
|
||||||
|
)
|
||||||
|
envs = conda_env_info["envs"]
|
||||||
|
for envpath in envs:
|
||||||
|
if envpath.endswith(env_name):
|
||||||
|
return envpath
|
||||||
|
return None
|
||||||
|
|
||||||
|
print(f"Using conda environment: {image_name}")
|
||||||
|
conda_prefix = get_conda_prefix(image_name)
|
||||||
|
if not conda_prefix:
|
||||||
|
cprint(
|
||||||
|
f"Conda environment {image_name} does not exist.",
|
||||||
|
color="red",
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
build_file = Path(conda_prefix) / "llamastack-build.yaml"
|
||||||
|
if not build_file.exists():
|
||||||
|
cprint(
|
||||||
|
f"Build file {build_file} does not exist.\n\nPlease run `llama stack build` or specify the correct conda environment name with --image-name",
|
||||||
|
color="red",
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
script = (
|
script = (
|
||||||
importlib.resources.files("llama_stack")
|
importlib.resources.files("llama_stack")
|
||||||
/ "distribution/start_conda_env.sh"
|
/ "distribution/start_conda_env.sh"
|
||||||
)
|
)
|
||||||
run_args = [
|
run_args = [
|
||||||
script,
|
script,
|
||||||
config.conda_env,
|
image_name,
|
||||||
]
|
]
|
||||||
|
|
||||||
run_args.extend([str(config_file), str(args.port)])
|
run_args.extend([str(config_file), str(args.port)])
|
||||||
|
@ -129,13 +176,17 @@ class StackRun(Subcommand):
|
||||||
|
|
||||||
for env_var in args.env:
|
for env_var in args.env:
|
||||||
if "=" not in env_var:
|
if "=" not in env_var:
|
||||||
self.parser.error(
|
cprint(
|
||||||
f"Environment variable '{env_var}' must be in KEY=VALUE format"
|
f"Environment variable '{env_var}' must be in KEY=VALUE format",
|
||||||
|
color="red",
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
key, value = env_var.split("=", 1) # split on first = only
|
key, value = env_var.split("=", 1) # split on first = only
|
||||||
if not key:
|
if not key:
|
||||||
self.parser.error(f"Environment variable '{env_var}' has empty key")
|
cprint(
|
||||||
|
f"Environment variable '{env_var}' has empty key",
|
||||||
|
color="red",
|
||||||
|
)
|
||||||
return
|
return
|
||||||
run_args.extend(["--env", f"{key}={value}"])
|
run_args.extend(["--env", f"{key}={value}"])
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ import sys
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Dict, List
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from termcolor import cprint
|
from termcolor import cprint
|
||||||
|
@ -106,6 +106,8 @@ def print_pip_install_help(providers: Dict[str, List[Provider]]):
|
||||||
def build_image(
|
def build_image(
|
||||||
build_config: BuildConfig,
|
build_config: BuildConfig,
|
||||||
build_file_path: Path,
|
build_file_path: Path,
|
||||||
|
image_name: str,
|
||||||
|
template_name: Optional[str] = None,
|
||||||
):
|
):
|
||||||
docker_image = build_config.distribution_spec.docker_image or "python:3.10-slim"
|
docker_image = build_config.distribution_spec.docker_image or "python:3.10-slim"
|
||||||
|
|
||||||
|
@ -115,32 +117,34 @@ def build_image(
|
||||||
normal_deps += SERVER_DEPENDENCIES
|
normal_deps += SERVER_DEPENDENCIES
|
||||||
|
|
||||||
if build_config.image_type == ImageType.docker.value:
|
if build_config.image_type == ImageType.docker.value:
|
||||||
script = (
|
script = str(
|
||||||
importlib.resources.files("llama_stack") / "distribution/build_container.sh"
|
importlib.resources.files("llama_stack") / "distribution/build_container.sh"
|
||||||
)
|
)
|
||||||
args = [
|
args = [
|
||||||
script,
|
script,
|
||||||
build_config.name,
|
image_name,
|
||||||
docker_image,
|
docker_image,
|
||||||
str(build_file_path),
|
str(build_file_path),
|
||||||
str(BUILDS_BASE_DIR / ImageType.docker.value),
|
str(BUILDS_BASE_DIR / ImageType.docker.value),
|
||||||
" ".join(normal_deps),
|
" ".join(normal_deps),
|
||||||
]
|
]
|
||||||
elif build_config.image_type == ImageType.conda.value:
|
elif build_config.image_type == ImageType.conda.value:
|
||||||
script = (
|
script = str(
|
||||||
importlib.resources.files("llama_stack") / "distribution/build_conda_env.sh"
|
importlib.resources.files("llama_stack") / "distribution/build_conda_env.sh"
|
||||||
)
|
)
|
||||||
args = [
|
args = [
|
||||||
script,
|
script,
|
||||||
build_config.name,
|
str(image_name),
|
||||||
str(build_file_path),
|
str(build_file_path),
|
||||||
" ".join(normal_deps),
|
" ".join(normal_deps),
|
||||||
]
|
]
|
||||||
elif build_config.image_type == ImageType.venv.value:
|
elif build_config.image_type == ImageType.venv.value:
|
||||||
script = importlib.resources.files("llama_stack") / "distribution/build_venv.sh"
|
script = str(
|
||||||
|
importlib.resources.files("llama_stack") / "distribution/build_venv.sh"
|
||||||
|
)
|
||||||
args = [
|
args = [
|
||||||
script,
|
script,
|
||||||
build_config.name,
|
str(image_name),
|
||||||
str(build_file_path),
|
str(build_file_path),
|
||||||
" ".join(normal_deps),
|
" ".join(normal_deps),
|
||||||
]
|
]
|
||||||
|
@ -156,7 +160,7 @@ def build_image(
|
||||||
|
|
||||||
if return_code != 0:
|
if return_code != 0:
|
||||||
log.error(
|
log.error(
|
||||||
f"Failed to build target {build_config.name} with return code {return_code}",
|
f"Failed to build target {image_name} with return code {return_code}",
|
||||||
)
|
)
|
||||||
|
|
||||||
return return_code
|
return return_code
|
||||||
|
|
|
@ -18,8 +18,8 @@ if [ -n "$LLAMA_MODELS_DIR" ]; then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$#" -lt 3 ]; then
|
if [ "$#" -lt 3 ]; then
|
||||||
echo "Usage: $0 <distribution_type> <build_name> <build_file_path> <pip_dependencies> [<special_pip_deps>]" >&2
|
echo "Usage: $0 <distribution_type> <conda_env_name> <build_file_path> <pip_dependencies> [<special_pip_deps>]" >&2
|
||||||
echo "Example: $0 <distribution_type> mybuild ./my-stack-build.yaml 'numpy pandas scipy'" >&2
|
echo "Example: $0 <distribution_type> my-conda-env ./my-stack-build.yaml 'numpy pandas scipy'" >&2
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
@ -27,8 +27,7 @@ special_pip_deps="$4"
|
||||||
|
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
build_name="$1"
|
env_name="$1"
|
||||||
env_name="llamastack-$build_name"
|
|
||||||
build_file_path="$2"
|
build_file_path="$2"
|
||||||
pip_dependencies="$3"
|
pip_dependencies="$3"
|
||||||
|
|
||||||
|
@ -137,8 +136,8 @@ ensure_conda_env_python310() {
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
mv $build_file_path $CONDA_PREFIX/
|
mv $build_file_path $CONDA_PREFIX/llamastack-build.yaml
|
||||||
echo "Build spec configuration saved at $CONDA_PREFIX/$build_name-build.yaml"
|
echo "Build spec configuration saved at $CONDA_PREFIX/llamastack-build.yaml"
|
||||||
}
|
}
|
||||||
|
|
||||||
ensure_conda_env_python310 "$env_name" "$pip_dependencies" "$special_pip_deps"
|
ensure_conda_env_python310 "$env_name" "$pip_dependencies" "$special_pip_deps"
|
||||||
|
|
|
@ -131,10 +131,6 @@ this could be just a hash
|
||||||
default=None,
|
default=None,
|
||||||
description="Reference to the docker image if this package refers to a container",
|
description="Reference to the docker image if this package refers to a container",
|
||||||
)
|
)
|
||||||
conda_env: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Reference to the conda environment if this package refers to a conda environment",
|
|
||||||
)
|
|
||||||
apis: List[str] = Field(
|
apis: List[str] = Field(
|
||||||
default_factory=list,
|
default_factory=list,
|
||||||
description="""
|
description="""
|
||||||
|
@ -166,7 +162,7 @@ a default SQLite store will be used.""",
|
||||||
|
|
||||||
class BuildConfig(BaseModel):
|
class BuildConfig(BaseModel):
|
||||||
version: str = LLAMA_STACK_BUILD_CONFIG_VERSION
|
version: str = LLAMA_STACK_BUILD_CONFIG_VERSION
|
||||||
name: str
|
|
||||||
distribution_spec: DistributionSpec = Field(
|
distribution_spec: DistributionSpec = Field(
|
||||||
description="The distribution spec to build including API providers. "
|
description="The distribution spec to build including API providers. "
|
||||||
)
|
)
|
||||||
|
|
|
@ -23,8 +23,7 @@ if [ $# -lt 3 ]; then
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
build_name="$1"
|
env_name="$1"
|
||||||
env_name="llamastack-$build_name"
|
|
||||||
shift
|
shift
|
||||||
|
|
||||||
yaml_config="$1"
|
yaml_config="$1"
|
||||||
|
|
|
@ -1,36 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
||||||
# All rights reserved.
|
|
||||||
#
|
|
||||||
# This source code is licensed under the terms described in the LICENSE file in
|
|
||||||
# the root directory of this source tree.
|
|
||||||
|
|
||||||
if [[ $# -ne 1 ]]; then
|
|
||||||
echo "Error: Please provide the name of CONDA environment you wish to create"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
ENV_NAME=$1
|
|
||||||
|
|
||||||
set -eu
|
|
||||||
eval "$(conda shell.bash hook)"
|
|
||||||
|
|
||||||
echo "Will build env (or overwrite) named '$ENV_NAME'"
|
|
||||||
|
|
||||||
set -x
|
|
||||||
|
|
||||||
run_build() {
|
|
||||||
# Set up the conda environment
|
|
||||||
yes | conda remove --name $ENV_NAME --all
|
|
||||||
yes | conda create -n $ENV_NAME python=3.10
|
|
||||||
conda activate $ENV_NAME
|
|
||||||
|
|
||||||
# PT nightly
|
|
||||||
pip install --pre torch --index-url https://download.pytorch.org/whl/nightly/cu121
|
|
||||||
|
|
||||||
# install dependencies for `llama-agentic-system`
|
|
||||||
pip install -r fp8_requirements.txt
|
|
||||||
}
|
|
||||||
|
|
||||||
run_build
|
|
Loading…
Add table
Add a link
Reference in a new issue