From a77b554bcf2307076afa2148552dd5200526e1e4 Mon Sep 17 00:00:00 2001 From: Angel Nunez Mencias Date: Mon, 2 Jun 2025 02:34:19 +0200 Subject: [PATCH] update requiements --- kvant_start_local.sh | 12 ++---------- llama_stack/distribution/ui/requirements.txt | 4 ++-- 2 files changed, 4 insertions(+), 12 deletions(-) diff --git a/kvant_start_local.sh b/kvant_start_local.sh index 2bae3e9c3..db5bff84a 100755 --- a/kvant_start_local.sh +++ b/kvant_start_local.sh @@ -1,12 +1,7 @@ #!/usr/bin/env bash -export INFERENCE_MODEL="inference-llama4-maverick" -export EMBEDDING_MODEL="inference-bge-m3" -export EMBEDDING_DIMENSION="1024" export LLAMA_STACK_PORT=8321 -export OPENAI_BASE_URL=https://maas.ai-2.kvant.cloud/v1 -# OPENAI_API_KEY= env file -export VLLM_MAX_TOKENS=125000 +# VLLM_API_TOKEN= env file # KEYCLOAK_CLIENT_SECRET= env file @@ -19,7 +14,4 @@ docker run -it \ distribution-kvant:dev \ -m llama_stack.distribution.server.server --config /root/.llama/config.yaml \ --port $LLAMA_STACK_PORT \ - --env VLLM_URL=$OPENAI_BASE_URL \ - --env INFERENCE_MODEL=$INFERENCE_MODEL \ - --env EMBEDDING_MODEL=$EMBEDDING_MODEL \ - --env EMBEDDING_DIMENSION=$EMBEDDING_DIMENSION \ + diff --git a/llama_stack/distribution/ui/requirements.txt b/llama_stack/distribution/ui/requirements.txt index dac342fe1..390b36a9b 100644 --- a/llama_stack/distribution/ui/requirements.txt +++ b/llama_stack/distribution/ui/requirements.txt @@ -1,5 +1,5 @@ -llama-stack>=0.2.1 -llama-stack-client>=0.2.1 +llama-stack>=0.2.9 +llama-stack-client>=0.2.9 pandas streamlit streamlit-option-menu