mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-07 04:45:44 +00:00
update configure to only consume config
This commit is contained in:
parent
987e1cafc4
commit
6aa44805c2
4 changed files with 13 additions and 36 deletions
|
@ -4,7 +4,7 @@ LLAMA_MODELS_DIR=${LLAMA_MODELS_DIR:-}
|
|||
LLAMA_TOOLCHAIN_DIR=${LLAMA_TOOLCHAIN_DIR:-}
|
||||
TEST_PYPI_VERSION=${TEST_PYPI_VERSION:-}
|
||||
|
||||
if [ "$#" -ne 4 ]; then
|
||||
if [ "$#" -ne 5 ]; then
|
||||
echo "Usage: $0 <distribution_type> <build_name> <docker_base> <pip_dependencies>
|
||||
echo "Example: $0 distribution_type my-fastapi-app python:3.9-slim 'fastapi uvicorn'
|
||||
exit 1
|
||||
|
@ -14,7 +14,8 @@ distribution_type=$1
|
|||
build_name="$2"
|
||||
image_name="llamastack-$build_name"
|
||||
docker_base=$3
|
||||
pip_dependencies=$4
|
||||
config_file=$4
|
||||
pip_dependencies=$5
|
||||
|
||||
# Define color codes
|
||||
RED='\033[0;31m'
|
||||
|
@ -110,4 +111,4 @@ set +x
|
|||
printf "${GREEN}Succesfully setup Podman image. Configuring build...${NC}"
|
||||
echo "You can run it with: podman run -p 8000:8000 $image_name"
|
||||
|
||||
$CONDA_PREFIX/bin/python3 -m llama_toolchain.cli.llama stack configure --distribution $distribution_type --name "$build_name" --package-type container
|
||||
$CONDA_PREFIX/bin/python3 -m llama_toolchain.cli.llama stack configure $config_file
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue