From aebec57ed73e8750a3b5620403d195383d529cea Mon Sep 17 00:00:00 2001 From: Xi Yan Date: Tue, 10 Sep 2024 22:05:40 -0700 Subject: [PATCH] move import to inline --- llama_toolchain/cli/stack/build.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/llama_toolchain/cli/stack/build.py b/llama_toolchain/cli/stack/build.py index 5fbaf4a1f..b8ce1bf39 100644 --- a/llama_toolchain/cli/stack/build.py +++ b/llama_toolchain/cli/stack/build.py @@ -8,13 +8,8 @@ import argparse from llama_toolchain.cli.subcommand import Subcommand from llama_toolchain.core.datatypes import * # noqa: F403 -import json -import os import yaml -from llama_toolchain.common.config_dirs import DISTRIBS_BASE_DIR -from llama_toolchain.common.serialize import EnumEncoder -from termcolor import cprint def parse_api_provider_tuples( @@ -93,8 +88,14 @@ class StackBuild(Subcommand): def _run_stack_build_command_from_build_config( self, build_config: BuildConfig ) -> None: + import json + import os + + from llama_toolchain.common.config_dirs import DISTRIBS_BASE_DIR + from llama_toolchain.common.serialize import EnumEncoder from llama_toolchain.core.distribution_registry import resolve_distribution_spec from llama_toolchain.core.package import ApiInput, build_package, BuildType + from termcolor import cprint api_inputs = [] if build_config.distribution == "adhoc":