# Copyright (c) Meta Platforms, Inc. and affiliates. # All rights reserved. # # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. import argparse from pathlib import Path import pkg_resources import yaml from llama_toolchain.cli.subcommand import Subcommand from llama_toolchain.core.datatypes import * # noqa: F403 from llama_toolchain.common.config_dirs import BUILDS_BASE_DIR class ApiStart(Subcommand): def __init__(self, subparsers: argparse._SubParsersAction): super().__init__() self.parser = subparsers.add_parser( "start", prog="llama api start", description="""start the server for a Llama API provider. You should have already built and configured the provider.""", formatter_class=argparse.RawTextHelpFormatter, ) self._add_arguments() self.parser.set_defaults(func=self._run_api_start_cmd) def _add_arguments(self): from llama_toolchain.core.package import BuildType self.parser.add_argument( "build_name", type=str, help="Name of the API build you want to start", ) self.parser.add_argument( "--build-type", type=str, default="conda_env", choices=[v.value for v in BuildType], ) self.parser.add_argument( "--port", type=int, help="Port to run the server on. Defaults to 5000", default=5000, ) self.parser.add_argument( "--disable-ipv6", action="store_true", help="Disable IPv6 support", default=False, ) def _run_api_start_cmd(self, args: argparse.Namespace) -> None: from llama_toolchain.common.exec import run_with_pty from llama_toolchain.core.package import BuildType if args.build_name.endswith(".yaml"): path = args.build_name else: build_type = BuildType(args.build_type) build_dir = BUILDS_BASE_DIR / "adhoc" / build_type.descriptor() path = build_dir / f"{args.build_name}.yaml" config_file = Path(path) if not config_file.exists(): self.parser.error( f"Could not find {config_file}. Please run `llama api build` first" ) return with open(config_file, "r") as f: config = PackageConfig(**yaml.safe_load(f)) if config.docker_image: script = pkg_resources.resource_filename( "llama_toolchain", "core/start_container.sh", ) run_args = [script, config.docker_image] else: script = pkg_resources.resource_filename( "llama_toolchain", "core/start_conda_env.sh", ) run_args = [ script, config.conda_env, ] run_args.extend([str(config_file), str(args.port)]) if args.disable_ipv6: run_args.append("--disable-ipv6") run_with_pty(run_args)