add scripts
Some checks failed
Build and Push container / build (push) Failing after 1m4s
Build and Push playground container / build-playground (push) Successful in 1m4s

This commit is contained in:
Angel Nunez Mencias 2025-06-01 12:43:43 +02:00
parent 4603206065
commit 7bba685dee
Signed by: angel.nunez
SSH key fingerprint: SHA256:z1nFAg1v1AfbhEHrgBetByUJUwziv2R2f4VyN75opcg
5 changed files with 38 additions and 3 deletions

1
.gitignore vendored
View file

@ -24,3 +24,4 @@ venv/
pytest-report.xml
.coverage
.python-version
data

6
kvant_build_local.sh Executable file
View file

@ -0,0 +1,6 @@
#!/usr/bin/env bash
export USE_COPY_NOT_MOUNT=true
export LLAMA_STACK_DIR=.
uvx --from . llama stack build --template kvant --image-type container --image-name kvant

25
kvant_start_local.sh Executable file
View file

@ -0,0 +1,25 @@
#!/usr/bin/env bash
export INFERENCE_MODEL="inference-llama4-maverick"
export EMBEDDING_MODEL="inference-bge-m3"
export EMBEDDING_DIMENSION="1024"
export LLAMA_STACK_PORT=8321
export OPENAI_BASE_URL=https://maas.ai-2.kvant.cloud/v1
export OPENAI_API_KEY=sk-ZqAWqBKFXjb6y3tVej2AaA
export VLLM_MAX_TOKENS=125000
docker run -it \
-p $LLAMA_STACK_PORT:$LLAMA_STACK_PORT \
-v $(pwd)/data:/root/.llama \
--mount type=bind,source="$(pwd)"/llama_stack/templates/kvant/run.yaml,target=/root/.llama/config.yaml,readonly \
--entrypoint python \
distribution-kvant:dev \
-m llama_stack.distribution.server.server --config /root/.llama/config.yaml \
--port $LLAMA_STACK_PORT \
--env VLLM_URL=$OPENAI_BASE_URL \
--env VLLM_API_TOKEN=$OPENAI_API_KEY \
--env PASSTHROUGH_URL=$OPENAI_BASE_URL \
--env PASSTHROUGH_API_KEY=$OPENAI_API_KEY \
--env INFERENCE_MODEL=$INFERENCE_MODEL \
--env EMBEDDING_MODEL=$EMBEDDING_MODEL \
--env EMBEDDING_DIMENSION=$EMBEDDING_DIMENSION \

View file

@ -30,9 +30,6 @@ providers:
type: sqlite
namespace: null
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/kvant}/faiss_store.db
responses_store:
type: sqlite
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/bedrock}/responses_store.db
safety:
- provider_id: llama-guard
provider_type: inline::llama-guard
@ -46,6 +43,9 @@ providers:
type: sqlite
namespace: null
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/kvant}/agents_store.db
responses_store:
type: sqlite
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/kvant}/responses_store.db
telemetry:
- provider_id: meta-reference
provider_type: inline::meta-reference

3
playground_start_local.sh Executable file
View file

@ -0,0 +1,3 @@
#!/usr/bin/env bash
uv run --with ".[ui]" streamlit run llama_stack/distribution/ui/app.py