second try

This commit is contained in:
Kai Wu 2025-07-30 14:51:43 -07:00
parent 31a15332c4
commit 1cb9d3bca2
11 changed files with 237 additions and 64 deletions

View file

@ -28,27 +28,34 @@ spec:
initContainers:
- name: wait-for-vllm-server
image: busybox:1.28
command: ['sh', '-c', 'until nc -z vllm-server.default.svc.cluster.local 8000; do echo waiting for vllm-server on port 8000; sleep 2; done;']
- name: wait-for-vllm-server-safety
command: ['sh', '-c', 'until nc -z vllm-server.default.svc.cluster.local 8001; do echo waiting for vllm-server on port 8000; sleep 2; done;']
- name: wait-for-llm-nim-code
image: busybox:1.28
command: ['sh', '-c', 'until nc -z vllm-server-safety.default.svc.cluster.local 8001; do echo waiting for vllm-server-safety on port 8001; sleep 2; done;']
command: ['sh', '-c', 'until nc -z llm-nim-code.default.svc.cluster.local 8000; do echo waiting for llm-nim-code on port 8001; sleep 2; done;']
containers:
- name: llama-stack
image: llamastack/distribution-starter:latest
imagePullPolicy: Always # since we have specified latest instead of a version
resources:
requests:
memory: "512Mi"
cpu: "500m"
ephemeral-storage: "2Gi"
limits:
memory: "1Gi"
cpu: "1000m"
ephemeral-storage: "5Gi"
env:
- name: ENABLE_CHROMADB
value: "true"
- name: CHROMADB_URL
value: http://chromadb.default.svc.cluster.local:6000
- name: VLLM_URL
value: http://vllm-server.default.svc.cluster.local:8000/v1
value: http://vllm-server.default.svc.cluster.local:8001/v1
- name: VLLM_MAX_TOKENS
value: "3072"
- name: NVIDIA_BASE_URL
value: http://llama-nano-nim.default.svc.cluster.local:8000/v1
- name: VLLM_SAFETY_URL
value: http://vllm-server-safety.default.svc.cluster.local:8001/v1
value: http://llm-nim-code.default.svc.cluster.local:8000/v1
- name: POSTGRES_HOST
value: postgres-server.default.svc.cluster.local
- name: POSTGRES_PORT
@ -57,8 +64,8 @@ spec:
value: "false"
- name: INFERENCE_MODEL
value: "${INFERENCE_MODEL}"
- name: SAFETY_MODEL
value: "${SAFETY_MODEL}"
- name: CODE_MODEL
value: "${CODE_MODEL}"
- name: TAVILY_SEARCH_API_KEY
value: "${TAVILY_SEARCH_API_KEY}"
command: ["python", "-m", "llama_stack.distribution.server.server", "--config", "/etc/config/stack_run_config.yaml", "--port", "8321"]