mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-01 16:24:44 +00:00
48 lines
1.4 KiB
Text
48 lines
1.4 KiB
Text
apiVersion: apps/v1
|
|
kind: Deployment
|
|
metadata:
|
|
name: llama-stack-ui
|
|
labels:
|
|
app.kubernetes.io/name: llama-stack
|
|
app.kubernetes.io/component: ui
|
|
spec:
|
|
replicas: 1
|
|
selector:
|
|
matchLabels:
|
|
app.kubernetes.io/name: llama-stack
|
|
app.kubernetes.io/component: ui
|
|
template:
|
|
metadata:
|
|
labels:
|
|
app.kubernetes.io/name: llama-stack
|
|
app.kubernetes.io/component: ui
|
|
spec:
|
|
containers:
|
|
- name: llama-stack-ui
|
|
image: python:3.12-slim
|
|
env:
|
|
- name: LLAMA_STACK_BACKEND_URL
|
|
value: "http://llama-stack-service:8321"
|
|
- name: LLAMA_STACK_ENDPOINT
|
|
value: "http://llama-stack-service:8321"
|
|
workingDir: /app
|
|
command: ["/bin/sh"]
|
|
args:
|
|
- -c
|
|
- |
|
|
# Install pip and git
|
|
/usr/local/bin/python -m pip install --upgrade pip
|
|
apt-get update && apt-get install -y git
|
|
# Clone the repository
|
|
git clone https://github.com/meta-llama/llama-stack.git /app
|
|
git checkout k8s_demo
|
|
# Navigate to the playground directory
|
|
cd /app/llama_stack/distribution/ui
|
|
|
|
# Install requirements
|
|
pip install -r requirements.txt
|
|
|
|
# Run the Streamlit app
|
|
streamlit run app.py --server.port=8322 --server.address=0.0.0.0
|
|
ports:
|
|
- containerPort: 8322
|