Fixes to the llama stack configure script + inference adapters

This commit is contained in:
Ashwin Bharambe 2024-09-03 23:22:21 -07:00
parent 4869f2b983
commit 1380d78c19
11 changed files with 124 additions and 37 deletions

View file

@ -117,12 +117,4 @@ ensure_conda_env_python310 "$env_name" "$pip_dependencies"
printf "${GREEN}Successfully setup conda environment. Configuring build...${NC}\n"
if [ "$distribution_id" = "adhoc" ]; then
subcommand="api"
target=""
else
subcommand="stack"
target="$distribution_id"
fi
$CONDA_PREFIX/bin/python3 -m llama_toolchain.cli.llama $subcommand configure $target --name "$build_name" --type conda_env
$CONDA_PREFIX/bin/python3 -m llama_toolchain.cli.llama stack configure $distribution_id --name "$build_name" --type conda_env

View file

@ -109,12 +109,4 @@ set +x
printf "${GREEN}Succesfully setup Podman image. Configuring build...${NC}"
echo "You can run it with: podman run -p 8000:8000 $image_name"
if [ "$distribution_id" = "adhoc" ]; then
subcommand="api"
target=""
else
subcommand="stack"
target="$distribution_id"
fi
$CONDA_PREFIX/bin/python3 -m llama_toolchain.cli.llama $subcommand configure $target --name "$build_name" --type container
$CONDA_PREFIX/bin/python3 -m llama_toolchain.cli.llama stack configure $distribution_id --name "$build_name" --type container