From f4dc29070546c59392d83f84c94d9b59191871ae Mon Sep 17 00:00:00 2001 From: Jamie Land <38305141+jland-redhat@users.noreply.github.com> Date: Tue, 18 Mar 2025 12:26:49 -0400 Subject: [PATCH] feat: Created Playground Containerfile and Image Workflow (#1256) # What does this PR do? Adds a container file that can be used to build the playground UI. This file will be built by this PR in the stack-ops repo: https://github.com/meta-llama/llama-stack-ops/pull/9 Docker command in the docs will need to change once I know the address of the official repository. ## Test Plan Tested image on my local Openshift Instance using this helm chart: https://github.com/Jaland/llama-stack-helm/tree/main/llama-stack [//]: # (## Documentation) --------- Co-authored-by: Jamie Land --- docs/source/playground/index.md | 27 +++++++++++++++++++++++ llama_stack/distribution/ui/Containerfile | 11 +++++++++ llama_stack/distribution/ui/README.md | 10 +++++++++ 3 files changed, 48 insertions(+) create mode 100644 llama_stack/distribution/ui/Containerfile diff --git a/docs/source/playground/index.md b/docs/source/playground/index.md index 9691609ab..1d52de73f 100644 --- a/docs/source/playground/index.md +++ b/docs/source/playground/index.md @@ -92,6 +92,8 @@ Interactive pages for users to play with and explore Llama Stack API capabilitie ## Starting the Llama Stack Playground +### Llama CLI + To start the Llama Stack Playground, run the following commands: 1. Start up the Llama Stack API server @@ -107,3 +109,28 @@ cd llama_stack/distribution/ui pip install -r requirements.txt streamlit run app.py ``` + +### Docker + +Playground can also be started in a docker image: + +```sh +export LLAMA_STACK_URL=http://localhost:11434 + +docker run \ + -p 8501:8501 \ + -e LLAMA_STACK_ENDPOINT=$LLAMA_STACK_URL \ + quay.io/jland/llama-stack-playground +``` + +## Configurable Environment Variables + +## Environment Variables + +| Environment Variable | Description | Default Value | +|----------------------------|------------------------------------|---------------------------| +| LLAMA_STACK_ENDPOINT | The endpoint for the Llama Stack | http://localhost:8321 | +| FIREWORKS_API_KEY | API key for Fireworks provider | (empty string) | +| TOGETHER_API_KEY | API key for Together provider | (empty string) | +| SAMBANOVA_API_KEY | API key for SambaNova provider | (empty string) | +| OPENAI_API_KEY | API key for OpenAI provider | (empty string) | diff --git a/llama_stack/distribution/ui/Containerfile b/llama_stack/distribution/ui/Containerfile new file mode 100644 index 000000000..a97f25753 --- /dev/null +++ b/llama_stack/distribution/ui/Containerfile @@ -0,0 +1,11 @@ +# More info on playground configuration can be found here: +# https://llama-stack.readthedocs.io/en/latest/playground + +FROM python:3.9-slim +WORKDIR /app +COPY . /app/ +RUN /usr/local/bin/python -m pip install --upgrade pip && \ + /usr/local/bin/pip3 install -r requirements.txt +EXPOSE 8501 + +ENTRYPOINT ["streamlit", "run", "app.py", "--server.port=8501", "--server.address=0.0.0.0"] diff --git a/llama_stack/distribution/ui/README.md b/llama_stack/distribution/ui/README.md index f3df3f07a..fe660544f 100644 --- a/llama_stack/distribution/ui/README.md +++ b/llama_stack/distribution/ui/README.md @@ -40,3 +40,13 @@ cd llama_stack/distribution/ui pip install -r requirements.txt streamlit run app.py ``` + +## Environment Variables + +| Environment Variable | Description | Default Value | +|----------------------------|------------------------------------|---------------------------| +| LLAMA_STACK_ENDPOINT | The endpoint for the Llama Stack | http://localhost:8321 | +| FIREWORKS_API_KEY | API key for Fireworks provider | (empty string) | +| TOGETHER_API_KEY | API key for Together provider | (empty string) | +| SAMBANOVA_API_KEY | API key for SambaNova provider | (empty string) | +| OPENAI_API_KEY | API key for OpenAI provider | (empty string) |