From 71b328fc4bddd672a0665a15484ee64e5464c62a Mon Sep 17 00:00:00 2001 From: ehhuang Date: Tue, 11 Nov 2025 10:40:31 -0800 Subject: [PATCH] chore(ui): add npm package and dockerfile (#4100) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # What does this PR do? - sets up package.json for npm `llama-stack-ui` package (will update llama-stack-ops) - adds dockerfile for UI docker image ## Test Plan npx: npm build && npm pack LLAMA_STACK_UI_PORT=8322 npx /Users/erichuang/projects/ui/src/llama_stack_ui/llama-stack-ui-0.4.0-alpha.2.tgz docker: cd src/llama_stack_ui docker build . -f Dockerfile --tag test_ui --no-cache ❯ docker run -p 8322:8322 \ -e LLAMA_STACK_UI_PORT=8322 \ test_ui:latest --- docs/docs/distributions/index.mdx | 1 + docs/docs/distributions/llama_stack_ui.mdx | 109 +++++++++++++++++++++ docs/sidebars.ts | 1 + src/llama_stack_ui/.dockerignore | 20 ++++ src/llama_stack_ui/Containerfile | 18 ++++ src/llama_stack_ui/bin/cli.js | 34 +++++++ src/llama_stack_ui/next.config.ts | 8 +- src/llama_stack_ui/package-lock.json | 16 +-- src/llama_stack_ui/package.json | 30 +++++- src/llama_stack_ui/scripts/postbuild.js | 40 ++++++++ 10 files changed, 264 insertions(+), 13 deletions(-) create mode 100644 docs/docs/distributions/llama_stack_ui.mdx create mode 100644 src/llama_stack_ui/.dockerignore create mode 100644 src/llama_stack_ui/Containerfile create mode 100755 src/llama_stack_ui/bin/cli.js create mode 100644 src/llama_stack_ui/scripts/postbuild.js diff --git a/docs/docs/distributions/index.mdx b/docs/docs/distributions/index.mdx index 0149f143f..ebf4bd6ce 100644 --- a/docs/docs/distributions/index.mdx +++ b/docs/docs/distributions/index.mdx @@ -19,3 +19,4 @@ This section provides an overview of the distributions available in Llama Stack. - **[Starting Llama Stack Server](./starting_llama_stack_server.mdx)** - How to run distributions - **[Importing as Library](./importing_as_library.mdx)** - Use distributions in your code - **[Configuration Reference](./configuration.mdx)** - Configuration file format details +- **[Llama Stack UI](./llama_stack_ui.mdx)** - Web-based user interface for interacting with Llama Stack servers diff --git a/docs/docs/distributions/llama_stack_ui.mdx b/docs/docs/distributions/llama_stack_ui.mdx new file mode 100644 index 000000000..7ba47ea4d --- /dev/null +++ b/docs/docs/distributions/llama_stack_ui.mdx @@ -0,0 +1,109 @@ +--- +title: Llama Stack UI +description: Web-based user interface for interacting with Llama Stack servers +sidebar_label: Llama Stack UI +sidebar_position: 8 +--- + +# Llama Stack UI + +The Llama Stack UI is a web-based interface for interacting with Llama Stack servers. Built with Next.js and React, it provides a visual way to work with agents, manage resources, and view logs. + +## Features + +- **Logs & Monitoring**: View chat completions, agent responses, and vector store activity +- **Vector Stores**: Create and manage vector databases for RAG (Retrieval-Augmented Generation) workflows +- **Prompt Management**: Create and manage reusable prompts + +## Prerequisites + +You need a running Llama Stack server. The UI is a client that connects to the Llama Stack backend. + +If you don't have a Llama Stack server running yet, see the [Starting Llama Stack Server](../getting_started/starting_llama_stack_server.mdx) guide. + +## Running the UI + +### Option 1: Using npx (Recommended for Quick Start) + +The fastest way to get started is using `npx`: + +```bash +npx llama-stack-ui +``` + +This will start the UI server on `http://localhost:8322` (default port). + +### Option 2: Using Docker + +Run the UI in a container: + +```bash +docker run -p 8322:8322 llamastack/ui +``` + +Access the UI at `http://localhost:8322`. + +## Environment Variables + +The UI can be configured using the following environment variables: + +| Variable | Description | Default | +|----------|-------------|---------| +| `LLAMA_STACK_BACKEND_URL` | URL of your Llama Stack server | `http://localhost:8321` | +| `LLAMA_STACK_UI_PORT` | Port for the UI server | `8322` | + +If the Llama Stack server is running with authentication enabled, you can configure the UI to use it by setting the following environment variables: + +| Variable | Description | Default | +|----------|-------------|---------| +| `NEXTAUTH_URL` | NextAuth URL for authentication | `http://localhost:8322` | +| `GITHUB_CLIENT_ID` | GitHub OAuth client ID (optional, for authentication) | - | +| `GITHUB_CLIENT_SECRET` | GitHub OAuth client secret (optional, for authentication) | - | + +### Setting Environment Variables + +#### For npx: + +```bash +LLAMA_STACK_BACKEND_URL=http://localhost:8321 \ +LLAMA_STACK_UI_PORT=8080 \ +npx llama-stack-ui +``` + +#### For Docker: + +```bash +docker run -p 8080:8080 \ + -e LLAMA_STACK_BACKEND_URL=http://localhost:8321 \ + -e LLAMA_STACK_UI_PORT=8080 \ + llamastack/ui +``` + +## Using the UI + +### Managing Resources + +- **Vector Stores**: Create vector databases for RAG workflows, view stored documents and embeddings +- **Prompts**: Create and manage reusable prompt templates +- **Chat Completions**: View history of chat interactions +- **Responses**: Browse detailed agent responses and tool calls + +## Development + +If you want to run the UI from source for development: + +```bash +# From the project root +cd src/llama_stack_ui + +# Install dependencies +npm install + +# Set environment variables +export LLAMA_STACK_BACKEND_URL=http://localhost:8321 + +# Start the development server +npm run dev +``` + +The development server will start on `http://localhost:8322` with hot reloading enabled. diff --git a/docs/sidebars.ts b/docs/sidebars.ts index 641c2eed3..7b4ac5ac8 100644 --- a/docs/sidebars.ts +++ b/docs/sidebars.ts @@ -57,6 +57,7 @@ const sidebars: SidebarsConfig = { 'distributions/importing_as_library', 'distributions/configuration', 'distributions/starting_llama_stack_server', + 'distributions/llama_stack_ui', { type: 'category', label: 'Self-Hosted Distributions', diff --git a/src/llama_stack_ui/.dockerignore b/src/llama_stack_ui/.dockerignore new file mode 100644 index 000000000..e3d1daae6 --- /dev/null +++ b/src/llama_stack_ui/.dockerignore @@ -0,0 +1,20 @@ +.git +.gitignore +.env.local +.env.*.local +.next +node_modules +npm-debug.log +*.md +.DS_Store +.vscode +.idea +playwright-report +e2e +jest.config.ts +jest.setup.ts +eslint.config.mjs +.prettierrc +.prettierignore +.nvmrc +playwright.config.ts diff --git a/src/llama_stack_ui/Containerfile b/src/llama_stack_ui/Containerfile new file mode 100644 index 000000000..6aea3dbfd --- /dev/null +++ b/src/llama_stack_ui/Containerfile @@ -0,0 +1,18 @@ +FROM node:22.5.1-alpine + +ENV NODE_ENV=production + +# Install dumb-init for proper signal handling +RUN apk add --no-cache dumb-init + +# Create non-root user for security +RUN addgroup --system --gid 1001 nodejs +RUN adduser --system --uid 1001 nextjs + +# Install llama-stack-ui from npm +RUN npm install -g llama-stack-ui + +USER nextjs + +ENTRYPOINT ["dumb-init", "--"] +CMD ["llama-stack-ui"] diff --git a/src/llama_stack_ui/bin/cli.js b/src/llama_stack_ui/bin/cli.js new file mode 100755 index 000000000..6069d2f22 --- /dev/null +++ b/src/llama_stack_ui/bin/cli.js @@ -0,0 +1,34 @@ +#!/usr/bin/env node + +const { spawn } = require('child_process'); +const path = require('path'); + +const port = process.env.LLAMA_STACK_UI_PORT || 8322; +const uiDir = path.resolve(__dirname, '..'); +const serverPath = path.join(uiDir, '.next', 'standalone', 'ui', 'src', 'llama_stack_ui', 'server.js'); +const serverDir = path.dirname(serverPath); + +console.log(`Starting Llama Stack UI on http://localhost:${port}`); + +const child = spawn(process.execPath, [serverPath], { + cwd: serverDir, + stdio: 'inherit', + env: { + ...process.env, + PORT: port, + }, +}); + +process.on('SIGINT', () => { + child.kill('SIGINT'); + process.exit(0); +}); + +process.on('SIGTERM', () => { + child.kill('SIGTERM'); + process.exit(0); +}); + +child.on('exit', (code) => { + process.exit(code); +}); diff --git a/src/llama_stack_ui/next.config.ts b/src/llama_stack_ui/next.config.ts index e9ffa3083..9f4a74eca 100644 --- a/src/llama_stack_ui/next.config.ts +++ b/src/llama_stack_ui/next.config.ts @@ -1,7 +1,13 @@ import type { NextConfig } from "next"; const nextConfig: NextConfig = { - /* config options here */ + typescript: { + ignoreBuildErrors: true, + }, + output: "standalone", + images: { + unoptimized: true, + }, }; export default nextConfig; diff --git a/src/llama_stack_ui/package-lock.json b/src/llama_stack_ui/package-lock.json index 14e34b720..aa8b2ac26 100644 --- a/src/llama_stack_ui/package-lock.json +++ b/src/llama_stack_ui/package-lock.json @@ -1,12 +1,13 @@ { - "name": "ui", - "version": "0.1.0", + "name": "llama-stack-ui", + "version": "0.4.0-alpha.1", "lockfileVersion": 3, "requires": true, "packages": { "": { - "name": "ui", - "version": "0.1.0", + "name": "llama-stack-ui", + "version": "0.4.0-alpha.1", + "license": "MIT", "dependencies": { "@radix-ui/react-collapsible": "^1.1.12", "@radix-ui/react-dialog": "^1.1.15", @@ -20,7 +21,7 @@ "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "framer-motion": "^12.23.24", - "llama-stack-client": "github:llamastack/llama-stack-client-typescript", + "llama-stack-client": "^0.3.1", "lucide-react": "^0.545.0", "next": "15.5.4", "next-auth": "^4.24.11", @@ -9684,8 +9685,9 @@ "license": "MIT" }, "node_modules/llama-stack-client": { - "version": "0.4.0-alpha.1", - "resolved": "git+ssh://git@github.com/llamastack/llama-stack-client-typescript.git#78de4862c4b7d77939ac210fa9f9bde77a2c5c5f", + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/llama-stack-client/-/llama-stack-client-0.3.1.tgz", + "integrity": "sha512-4aYoF2aAQiBSfxyZEtczeQmJn8q9T22ePDqGhR+ej5RG6a8wvl5B3v7ZoKuFkft+vcP/kbJ58GQZEPLekxekZA==", "license": "MIT", "dependencies": { "@types/node": "^18.11.18", diff --git a/src/llama_stack_ui/package.json b/src/llama_stack_ui/package.json index fb7dbee75..41afc9a11 100644 --- a/src/llama_stack_ui/package.json +++ b/src/llama_stack_ui/package.json @@ -1,11 +1,31 @@ { - "name": "ui", - "version": "0.1.0", - "private": true, + "name": "llama-stack-ui", + "version": "0.4.0-alpha.4", + "description": "Web UI for Llama Stack", + "license": "MIT", + "author": "Llama Stack ", + "repository": { + "type": "git", + "url": "https://github.com/llamastack/llama-stack.git", + "directory": "llama_stack_ui" + }, + "bin": { + "llama-stack-ui": "bin/cli.js" + }, + "files": [ + "bin", + ".next", + "public", + "next.config.ts", + "instrumentation.ts", + "tsconfig.json", + "package.json" + ], "scripts": { "dev": "next dev --turbopack --port ${LLAMA_STACK_UI_PORT:-8322}", - "build": "next build", + "build": "next build && node scripts/postbuild.js", "start": "next start", + "prepublishOnly": "npm run build", "lint": "next lint", "format": "prettier --write \"./**/*.{ts,tsx}\"", "format:check": "prettier --check \"./**/*.{ts,tsx}\"", @@ -25,7 +45,7 @@ "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "framer-motion": "^12.23.24", - "llama-stack-client": "github:llamastack/llama-stack-client-typescript", + "llama-stack-client": "^0.3.1", "lucide-react": "^0.545.0", "next": "15.5.4", "next-auth": "^4.24.11", diff --git a/src/llama_stack_ui/scripts/postbuild.js b/src/llama_stack_ui/scripts/postbuild.js new file mode 100644 index 000000000..4b4dbdf5d --- /dev/null +++ b/src/llama_stack_ui/scripts/postbuild.js @@ -0,0 +1,40 @@ +const fs = require('fs'); +const path = require('path'); + +// Copy public directory to standalone +const publicSrc = path.join(__dirname, '..', 'public'); +const publicDest = path.join(__dirname, '..', '.next', 'standalone', 'ui', 'src', 'llama_stack_ui', 'public'); + +if (fs.existsSync(publicSrc) && !fs.existsSync(publicDest)) { + console.log('Copying public directory to standalone...'); + copyDir(publicSrc, publicDest); +} + +// Copy .next/static to standalone +const staticSrc = path.join(__dirname, '..', '.next', 'static'); +const staticDest = path.join(__dirname, '..', '.next', 'standalone', 'ui', 'src', 'llama_stack_ui', '.next', 'static'); + +if (fs.existsSync(staticSrc) && !fs.existsSync(staticDest)) { + console.log('Copying .next/static to standalone...'); + copyDir(staticSrc, staticDest); +} + +function copyDir(src, dest) { + if (!fs.existsSync(dest)) { + fs.mkdirSync(dest, { recursive: true }); + } + + const files = fs.readdirSync(src); + files.forEach((file) => { + const srcFile = path.join(src, file); + const destFile = path.join(dest, file); + + if (fs.statSync(srcFile).isDirectory()) { + copyDir(srcFile, destFile); + } else { + fs.copyFileSync(srcFile, destFile); + } + }); +} + +console.log('Postbuild complete!');