From 676b07e91ed9b7d681324688d9c0bd2fed32d848 Mon Sep 17 00:00:00 2001 From: Xi Yan Date: Tue, 5 Nov 2024 17:13:35 -0800 Subject: [PATCH] clean up build --- llama_stack/cli/stack/build.py | 17 +++-------------- llama_stack/distribution/build_container.sh | 2 -- .../providers/adapters/inference/tgi/config.py | 11 +++++++---- 3 files changed, 10 insertions(+), 20 deletions(-) diff --git a/llama_stack/cli/stack/build.py b/llama_stack/cli/stack/build.py index cbd4f3857..da3253de2 100644 --- a/llama_stack/cli/stack/build.py +++ b/llama_stack/cli/stack/build.py @@ -234,7 +234,7 @@ class StackBuild(Subcommand): run_config_file = build_dir / f"{build_config.name}-run.yaml" with open(run_config_file, "w") as f: - to_write = json.loads(json.dumps(run_config.model_dump(), cls=EnumEncoder)) + to_write = json.loads(build_config.model_dump_json()) f.write(yaml.dump(to_write, sort_keys=False)) cprint( @@ -252,25 +252,14 @@ class StackBuild(Subcommand): from llama_stack.distribution.build import build_image, ImageType from llama_stack.distribution.utils.config_dirs import DISTRIBS_BASE_DIR - from llama_stack.distribution.utils.serialize import EnumEncoder # save build.yaml spec for building same distribution again - if build_config.image_type == ImageType.docker.value: - # docker needs build file to be in the llama-stack repo dir to be able to copy over to the image - llama_stack_path = Path( - os.path.abspath(__file__) - ).parent.parent.parent.parent - build_dir = llama_stack_path / "tmp/configs/" - else: - build_dir = DISTRIBS_BASE_DIR / f"llamastack-{build_config.name}" - + build_dir = DISTRIBS_BASE_DIR / f"llamastack-{build_config.name}" os.makedirs(build_dir, exist_ok=True) build_file_path = build_dir / f"{build_config.name}-build.yaml" with open(build_file_path, "w") as f: - to_write = json.loads( - json.dumps(build_config.model_dump(), cls=EnumEncoder) - ) + to_write = json.loads(build_config.model_dump_json()) f.write(yaml.dump(to_write, sort_keys=False)) return_code = build_image(build_config, build_file_path) diff --git a/llama_stack/distribution/build_container.sh b/llama_stack/distribution/build_container.sh index ae2b17d9e..2d61feb66 100755 --- a/llama_stack/distribution/build_container.sh +++ b/llama_stack/distribution/build_container.sh @@ -36,7 +36,6 @@ SCRIPT_DIR=$(dirname "$(readlink -f "$0")") REPO_DIR=$(dirname $(dirname "$SCRIPT_DIR")) DOCKER_BINARY=${DOCKER_BINARY:-docker} DOCKER_OPTS=${DOCKER_OPTS:-} -REPO_CONFIGS_DIR="$REPO_DIR/tmp/configs" TEMP_DIR=$(mktemp -d) @@ -138,7 +137,6 @@ set -x $DOCKER_BINARY build $DOCKER_OPTS -t $image_name -f "$TEMP_DIR/Dockerfile" "$REPO_DIR" $mounts # clean up tmp/configs -rm -rf $REPO_CONFIGS_DIR set +x echo "Success!" diff --git a/llama_stack/providers/adapters/inference/tgi/config.py b/llama_stack/providers/adapters/inference/tgi/config.py index 8245c2c17..358c282f2 100644 --- a/llama_stack/providers/adapters/inference/tgi/config.py +++ b/llama_stack/providers/adapters/inference/tgi/config.py @@ -12,10 +12,13 @@ from pydantic import BaseModel, Field @json_schema_type class TGIImplConfig(BaseModel): - url: str = Field( - description="The URL for the TGI endpoint (e.g. 'http://localhost:8080')", - default="http://localhost:8080", - ) + host: str = "localhost" + port: int = 8080 + + @property + def url(self) -> str: + return f"http://{self.host}:{self.port}" + api_token: Optional[str] = Field( default=None, description="A bearer token if your TGI endpoint is protected.",