forked from phoenix-oss/llama-stack-mirror
17 lines
506 B
Bash
Executable file
17 lines
506 B
Bash
Executable file
#!/usr/bin/env bash
|
|
|
|
export LLAMA_STACK_PORT=8321
|
|
# VLLM_API_TOKEN= env file
|
|
# KEYCLOAK_CLIENT_SECRET= env file
|
|
|
|
|
|
docker run -it \
|
|
-p $LLAMA_STACK_PORT:$LLAMA_STACK_PORT \
|
|
-v $(pwd)/data:/root/.llama \
|
|
--mount type=bind,source="$(pwd)"/llama_stack/templates/kvant/run.yaml,target=/root/.llama/config.yaml,readonly \
|
|
--entrypoint python \
|
|
--env-file ./.env \
|
|
distribution-kvant:dev \
|
|
-m llama_stack.distribution.server.server --config /root/.llama/config.yaml \
|
|
--port $LLAMA_STACK_PORT \
|
|
|