Add config file based CLI (#60)

* config file for build

* fix build command

* configure script with config

* fix configure script to work with config file

* update build.sh

* update readme

* distribution_type -> distribution

* fix run-config/config-file to config

* move import to inline

* only consume config as argument

* update configure to only consume config

* update readme

* update readme
This commit is contained in:
Xi Yan 2024-09-11 11:39:46 -07:00 committed by GitHub
parent 58def874a9
commit 89300df5dc
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 155 additions and 111 deletions

View file

@ -4,7 +4,7 @@ LLAMA_MODELS_DIR=${LLAMA_MODELS_DIR:-}
LLAMA_TOOLCHAIN_DIR=${LLAMA_TOOLCHAIN_DIR:-}
TEST_PYPI_VERSION=${TEST_PYPI_VERSION:-}
if [ "$#" -ne 4 ]; then
if [ "$#" -ne 5 ]; then
echo "Usage: $0 <distribution_type> <build_name> <docker_base> <pip_dependencies>
echo "Example: $0 distribution_type my-fastapi-app python:3.9-slim 'fastapi uvicorn'
exit 1
@ -14,7 +14,8 @@ distribution_type=$1
build_name="$2"
image_name="llamastack-$build_name"
docker_base=$3
pip_dependencies=$4
config_file=$4
pip_dependencies=$5
# Define color codes
RED='\033[0;31m'
@ -110,4 +111,4 @@ set +x
printf "${GREEN}Succesfully setup Podman image. Configuring build...${NC}"
echo "You can run it with: podman run -p 8000:8000 $image_name"
$CONDA_PREFIX/bin/python3 -m llama_toolchain.cli.llama stack configure $distribution_type --name "$build_name" --type container
$CONDA_PREFIX/bin/python3 -m llama_toolchain.cli.llama stack configure $config_file