chore(ui): add npm package and dockerfile

# What does this PR do?


## Test Plan
# What does this PR do?


## Test Plan
This commit is contained in:
Eric Huang 2025-11-06 22:36:56 -08:00
parent a2c4c12384
commit 72a476f920
10 changed files with 264 additions and 13 deletions

View file

@ -19,3 +19,4 @@ This section provides an overview of the distributions available in Llama Stack.
- **[Starting Llama Stack Server](./starting_llama_stack_server.mdx)** - How to run distributions
- **[Importing as Library](./importing_as_library.mdx)** - Use distributions in your code
- **[Configuration Reference](./configuration.mdx)** - Configuration file format details
- **[Llama Stack UI](./llama_stack_ui.mdx)** - Web-based user interface for interacting with Llama Stack servers

View file

@ -0,0 +1,109 @@
---
title: Llama Stack UI
description: Web-based user interface for interacting with Llama Stack servers
sidebar_label: Llama Stack UI
sidebar_position: 8
---
# Llama Stack UI
The Llama Stack UI is a web-based interface for interacting with Llama Stack servers. Built with Next.js and React, it provides a visual way to work with agents, manage resources, and view logs.
## Features
- **Logs & Monitoring**: View chat completions, agent responses, and vector store activity
- **Vector Stores**: Create and manage vector databases for RAG (Retrieval-Augmented Generation) workflows
- **Prompt Management**: Create and manage reusable prompts
## Prerequisites
You need a running Llama Stack server. The UI is a client that connects to the Llama Stack backend.
If you don't have a Llama Stack server running yet, see the [Starting Llama Stack Server](../getting_started/starting_llama_stack_server.mdx) guide.
## Running the UI
### Option 1: Using npx (Recommended for Quick Start)
The fastest way to get started is using `npx`:
```bash
npx llama-stack-ui
```
This will start the UI server on `http://localhost:8322` (default port).
### Option 2: Using Docker
Run the UI in a container:
```bash
docker run -p 8322:8322 llamastack/ui
```
Access the UI at `http://localhost:8322`.
## Environment Variables
The UI can be configured using the following environment variables:
| Variable | Description | Default |
|----------|-------------|---------|
| `LLAMA_STACK_BACKEND_URL` | URL of your Llama Stack server | `http://localhost:8321` |
| `LLAMA_STACK_UI_PORT` | Port for the UI server | `8322` |
If the Llama Stack server is running with authentication enabled, you can configure the UI to use it by setting the following environment variables:
| Variable | Description | Default |
|----------|-------------|---------|
| `NEXTAUTH_URL` | NextAuth URL for authentication | `http://localhost:8322` |
| `GITHUB_CLIENT_ID` | GitHub OAuth client ID (optional, for authentication) | - |
| `GITHUB_CLIENT_SECRET` | GitHub OAuth client secret (optional, for authentication) | - |
### Setting Environment Variables
#### For npx:
```bash
LLAMA_STACK_BACKEND_URL=http://localhost:8321 \
LLAMA_STACK_UI_PORT=8080 \
npx llama-stack-ui
```
#### For Docker:
```bash
docker run -p 8080:8080 \
-e LLAMA_STACK_BACKEND_URL=http://localhost:8321 \
-e LLAMA_STACK_UI_PORT=8080 \
llamastack/ui
```
## Using the UI
### Managing Resources
- **Vector Stores**: Create vector databases for RAG workflows, view stored documents and embeddings
- **Prompts**: Create and manage reusable prompt templates
- **Chat Completions**: View history of chat interactions
- **Responses**: Browse detailed agent responses and tool calls
## Development
If you want to run the UI from source for development:
```bash
# From the project root
cd src/llama_stack_ui
# Install dependencies
npm install
# Set environment variables
export LLAMA_STACK_BACKEND_URL=http://localhost:8321
# Start the development server
npm run dev
```
The development server will start on `http://localhost:8322` with hot reloading enabled.

View file

@ -57,6 +57,7 @@ const sidebars: SidebarsConfig = {
'distributions/importing_as_library',
'distributions/configuration',
'distributions/starting_llama_stack_server',
'distributions/llama_stack_ui',
{
type: 'category',
label: 'Self-Hosted Distributions',

View file

@ -0,0 +1,20 @@
.git
.gitignore
.env.local
.env.*.local
.next
node_modules
npm-debug.log
*.md
.DS_Store
.vscode
.idea
playwright-report
e2e
jest.config.ts
jest.setup.ts
eslint.config.mjs
.prettierrc
.prettierignore
.nvmrc
playwright.config.ts

View file

@ -0,0 +1,18 @@
FROM node:22.5.1-alpine
ENV NODE_ENV=production
# Install dumb-init for proper signal handling
RUN apk add --no-cache dumb-init
# Create non-root user for security
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 nextjs
# Install llama-stack-ui from npm
RUN npm install -g llama-stack-ui
USER nextjs
ENTRYPOINT ["dumb-init", "--"]
CMD ["llama-stack-ui"]

34
src/llama_stack_ui/bin/cli.js Executable file
View file

@ -0,0 +1,34 @@
#!/usr/bin/env node
const { spawn } = require('child_process');
const path = require('path');
const port = process.env.LLAMA_STACK_UI_PORT || 8322;
const uiDir = path.resolve(__dirname, '..');
const serverPath = path.join(uiDir, '.next', 'standalone', 'ui', 'src', 'llama_stack_ui', 'server.js');
const serverDir = path.dirname(serverPath);
console.log(`Starting Llama Stack UI on http://localhost:${port}`);
const child = spawn(process.execPath, [serverPath], {
cwd: serverDir,
stdio: 'inherit',
env: {
...process.env,
PORT: port,
},
});
process.on('SIGINT', () => {
child.kill('SIGINT');
process.exit(0);
});
process.on('SIGTERM', () => {
child.kill('SIGTERM');
process.exit(0);
});
child.on('exit', (code) => {
process.exit(code);
});

View file

@ -1,7 +1,13 @@
import type { NextConfig } from "next";
const nextConfig: NextConfig = {
/* config options here */
typescript: {
ignoreBuildErrors: true,
},
output: "standalone",
images: {
unoptimized: true,
},
};
export default nextConfig;

View file

@ -1,12 +1,13 @@
{
"name": "ui",
"version": "0.1.0",
"name": "llama-stack-ui",
"version": "0.4.0-alpha.1",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "ui",
"version": "0.1.0",
"name": "llama-stack-ui",
"version": "0.4.0-alpha.1",
"license": "MIT",
"dependencies": {
"@radix-ui/react-collapsible": "^1.1.12",
"@radix-ui/react-dialog": "^1.1.15",
@ -20,7 +21,7 @@
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"framer-motion": "^12.23.24",
"llama-stack-client": "github:llamastack/llama-stack-client-typescript",
"llama-stack-client": "^0.3.1",
"lucide-react": "^0.545.0",
"next": "15.5.4",
"next-auth": "^4.24.11",
@ -9684,8 +9685,9 @@
"license": "MIT"
},
"node_modules/llama-stack-client": {
"version": "0.4.0-alpha.1",
"resolved": "git+ssh://git@github.com/llamastack/llama-stack-client-typescript.git#78de4862c4b7d77939ac210fa9f9bde77a2c5c5f",
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/llama-stack-client/-/llama-stack-client-0.3.1.tgz",
"integrity": "sha512-4aYoF2aAQiBSfxyZEtczeQmJn8q9T22ePDqGhR+ej5RG6a8wvl5B3v7ZoKuFkft+vcP/kbJ58GQZEPLekxekZA==",
"license": "MIT",
"dependencies": {
"@types/node": "^18.11.18",

View file

@ -1,11 +1,31 @@
{
"name": "ui",
"version": "0.1.0",
"private": true,
"name": "llama-stack-ui",
"version": "0.4.0-alpha.4",
"description": "Web UI for Llama Stack",
"license": "MIT",
"author": "Llama Stack <llamastack@meta.com>",
"repository": {
"type": "git",
"url": "https://github.com/llamastack/llama-stack.git",
"directory": "llama_stack_ui"
},
"bin": {
"llama-stack-ui": "bin/cli.js"
},
"files": [
"bin",
".next",
"public",
"next.config.ts",
"instrumentation.ts",
"tsconfig.json",
"package.json"
],
"scripts": {
"dev": "next dev --turbopack --port ${LLAMA_STACK_UI_PORT:-8322}",
"build": "next build",
"build": "next build && node scripts/postbuild.js",
"start": "next start",
"prepublishOnly": "npm run build",
"lint": "next lint",
"format": "prettier --write \"./**/*.{ts,tsx}\"",
"format:check": "prettier --check \"./**/*.{ts,tsx}\"",
@ -25,7 +45,7 @@
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"framer-motion": "^12.23.24",
"llama-stack-client": "github:llamastack/llama-stack-client-typescript",
"llama-stack-client": "^0.3.1",
"lucide-react": "^0.545.0",
"next": "15.5.4",
"next-auth": "^4.24.11",

View file

@ -0,0 +1,40 @@
const fs = require('fs');
const path = require('path');
// Copy public directory to standalone
const publicSrc = path.join(__dirname, '..', 'public');
const publicDest = path.join(__dirname, '..', '.next', 'standalone', 'ui', 'src', 'llama_stack_ui', 'public');
if (fs.existsSync(publicSrc) && !fs.existsSync(publicDest)) {
console.log('Copying public directory to standalone...');
copyDir(publicSrc, publicDest);
}
// Copy .next/static to standalone
const staticSrc = path.join(__dirname, '..', '.next', 'static');
const staticDest = path.join(__dirname, '..', '.next', 'standalone', 'ui', 'src', 'llama_stack_ui', '.next', 'static');
if (fs.existsSync(staticSrc) && !fs.existsSync(staticDest)) {
console.log('Copying .next/static to standalone...');
copyDir(staticSrc, staticDest);
}
function copyDir(src, dest) {
if (!fs.existsSync(dest)) {
fs.mkdirSync(dest, { recursive: true });
}
const files = fs.readdirSync(src);
files.forEach((file) => {
const srcFile = path.join(src, file);
const destFile = path.join(dest, file);
if (fs.statSync(srcFile).isDirectory()) {
copyDir(srcFile, destFile);
} else {
fs.copyFileSync(srcFile, destFile);
}
});
}
console.log('Postbuild complete!');