mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-28 19:04:19 +00:00
22 lines
742 B
Bash
22 lines
742 B
Bash
#!/bin/bash
|
|
|
|
export POSTGRES_USER=${POSTGRES_USER:-llamastack}
|
|
export POSTGRES_DB=${POSTGRES_DB:-llamastack}
|
|
export POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-llamastack}
|
|
|
|
export INFERENCE_MODEL=${INFERENCE_MODEL:-meta-llama/Llama-3.2-3B-Instruct}
|
|
export SAFETY_MODEL=${SAFETY_MODEL:-meta-llama/Llama-Guard-3-1B}
|
|
|
|
set -euo pipefail
|
|
set -x
|
|
|
|
envsubst < ./vllm-k8s.yaml.template | kubectl apply -f -
|
|
envsubst < ./postgres-k8s.yaml.template | kubectl apply -f -
|
|
envsubst < ./chroma-k8s.yaml.template | kubectl apply -f -
|
|
|
|
kubectl create configmap llama-stack-config --from-file=stack_run_config.yaml \
|
|
--dry-run=client -o yaml > stack-configmap.yaml
|
|
|
|
kubectl apply -f stack-configmap.yaml
|
|
|
|
envsubst < ./stack-k8s.yaml.template | kubectl apply -f -
|