forked from phoenix-oss/llama-stack-mirror
use auth for kvant
This commit is contained in:
parent
3bde47e562
commit
96003b55de
4 changed files with 2342 additions and 2305 deletions
|
@ -5,14 +5,18 @@ export EMBEDDING_MODEL="inference-bge-m3"
|
|||
export EMBEDDING_DIMENSION="1024"
|
||||
export LLAMA_STACK_PORT=8321
|
||||
export OPENAI_BASE_URL=https://maas.ai-2.kvant.cloud/v1
|
||||
export OPENAI_API_KEY=sk-ZqAWqBKFXjb6y3tVej2AaA
|
||||
# OPENAI_API_KEY= env file
|
||||
export VLLM_MAX_TOKENS=125000
|
||||
# KEYCLOAK_CLIENT_SECRET= env file
|
||||
export $(cat .env | xargs)
|
||||
|
||||
|
||||
docker run -it \
|
||||
-p $LLAMA_STACK_PORT:$LLAMA_STACK_PORT \
|
||||
-v $(pwd)/data:/root/.llama \
|
||||
--mount type=bind,source="$(pwd)"/llama_stack/templates/kvant/run.yaml,target=/root/.llama/config.yaml,readonly \
|
||||
--entrypoint python \
|
||||
--env-file ./.env \
|
||||
distribution-kvant:dev \
|
||||
-m llama_stack.distribution.server.server --config /root/.llama/config.yaml \
|
||||
--port $LLAMA_STACK_PORT \
|
||||
|
@ -23,3 +27,4 @@ docker run -it \
|
|||
--env INFERENCE_MODEL=$INFERENCE_MODEL \
|
||||
--env EMBEDDING_MODEL=$EMBEDDING_MODEL \
|
||||
--env EMBEDDING_DIMENSION=$EMBEDDING_DIMENSION \
|
||||
--env KEYCLOAK_CLIENT_SECRET=$KEYCLOAK_CLIENT_SECRET \
|
||||
|
|
|
@ -154,3 +154,17 @@ tool_groups:
|
|||
provider_id: rag-runtime
|
||||
server:
|
||||
port: 8321
|
||||
auth:
|
||||
provider_type: "oauth2_token"
|
||||
config:
|
||||
jwks:
|
||||
introspection:
|
||||
url: ${env.KEYCLOAK_INSTROSPECT:https://iam.phoenix-systems.ch/realms/kvant/protocol/openid-connect/token/introspect}
|
||||
client_id: ${env.KEYCLOAK_CLIENT_ID:llama-stack}
|
||||
client_secret: ${env.KEYCLOAK_CLIENT_SECRET}
|
||||
claims_mapping:
|
||||
sub: projects
|
||||
scope: roles
|
||||
#groups: teams
|
||||
customer/id: teams
|
||||
aud: namespaces
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
export KEYCLOAK_URL="https://iam.phoenix-systems.ch"
|
||||
export KEYCLOAK_REALM="kvant"
|
||||
export KEYCLOAK_CLIENT_ID="llama-stack-playground"
|
||||
|
||||
uv run --with ".[ui]" streamlit run llama_stack/distribution/ui/app.py
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue