mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-28 02:53:30 +00:00
## What does this PR do? So far `llama stack build` has always created a separate conda environment for packaging the dependencies of a distribution. The main reason to do so is isolation -- distributions are composed of providers which can have a variety of potentially conflicting dependencies. That said, this has created significant annoyance for new users since it is not at all transparent. The fact that `llama stack run` is actually running the code in some other conda is very surprising. This PR tries to make things better. - Both `llama stack build` and `llama stack run` now accept an `--image-name` argument which represents the (conda, docker, virtualenv) image you want to operate upon. - For the default (conda) mode, the script checks if a current conda environment exists. If one exists, it uses it. - If `--image-name` is provided, that option is used. In this case, an environment is created if needed. - There is no automatic `llamastack-` prefixing of the environment names done anymore. ## Test Plan Start in a conda environment, run `llama stack build --template fireworks`; verify that it successfully built into the current environment and stored the build file at `$CONDA_PREFIX/llamastack-build.yaml`. Run `llama stack run fireworks` which started correctly in the current environment. Ran the same build command outside of conda. It failed asking for `--image-name`. Ran it with `llama stack build --template fireworks --image-name foo`. This successfully created a conda environment called `foo` and installed deps. Ran `llama stack run fireworks` outside conda which failed. Activated a different conda, ran again, it failed saying it did not find the `llamastack-build.yaml` file. Then used `--image-name foo` option and it ran successfully.
72 lines
2.6 KiB
Python
72 lines
2.6 KiB
Python
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
# All rights reserved.
|
|
#
|
|
# This source code is licensed under the terms described in the LICENSE file in
|
|
# the root directory of this source tree.
|
|
import argparse
|
|
import textwrap
|
|
|
|
from llama_stack.cli.subcommand import Subcommand
|
|
|
|
|
|
class StackBuild(Subcommand):
|
|
def __init__(self, subparsers: argparse._SubParsersAction):
|
|
super().__init__()
|
|
self.parser = subparsers.add_parser(
|
|
"build",
|
|
prog="llama stack build",
|
|
description="Build a Llama stack container",
|
|
formatter_class=argparse.RawTextHelpFormatter,
|
|
)
|
|
self._add_arguments()
|
|
self.parser.set_defaults(func=self._run_stack_build_command)
|
|
|
|
def _add_arguments(self):
|
|
self.parser.add_argument(
|
|
"--config",
|
|
type=str,
|
|
default=None,
|
|
help="Path to a config file to use for the build. You can find example configs in llama_stack/distribution/**/build.yaml. If this argument is not provided, you will be prompted to enter information interactively",
|
|
)
|
|
|
|
self.parser.add_argument(
|
|
"--template",
|
|
type=str,
|
|
default=None,
|
|
help="Name of the example template config to use for build. You may use `llama stack build --list-templates` to check out the available templates",
|
|
)
|
|
|
|
self.parser.add_argument(
|
|
"--list-templates",
|
|
type=bool,
|
|
default=False,
|
|
action=argparse.BooleanOptionalAction,
|
|
help="Show the available templates for building a Llama Stack distribution",
|
|
)
|
|
|
|
self.parser.add_argument(
|
|
"--image-type",
|
|
type=str,
|
|
help="Image Type to use for the build. This can be either conda or docker. If not specified, will use the image type from the template config.",
|
|
choices=["conda", "docker", "venv"],
|
|
default="conda",
|
|
)
|
|
|
|
self.parser.add_argument(
|
|
"--image-name",
|
|
type=str,
|
|
help=textwrap.dedent(
|
|
"""[for image-type=conda] Name of the conda environment to use for the build. If
|
|
not specified, currently active Conda environment will be used. If no Conda
|
|
environment is active, you must specify a name.
|
|
"""
|
|
),
|
|
default=None,
|
|
)
|
|
|
|
def _run_stack_build_command(self, args: argparse.Namespace) -> None:
|
|
# always keep implementation completely silo-ed away from CLI so CLI
|
|
# can be fast to load and reduces dependencies
|
|
from ._build import run_stack_build_command
|
|
|
|
return run_stack_build_command(self.parser, args)
|