From f3d5459647ed0baed269ebf372240a77357e4255 Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Wed, 30 Jul 2025 22:44:16 -0400 Subject: [PATCH] feat(UI): adding MVP playground UI (#2828) # What does this PR do? I've been tinkering a little with a simple chat playground in the UI, so I'm opening the PR with what's kind of a WIP. If you look at the first commit, that includes the big part of the changes. The rest of the files changed come from adding installing the `shadcn` components. Note this is missing a lot; e.g., - sessions - document upload - audio (the shadcn components install these by default from https://shadcn-chatbot-kit.vercel.app/docs/components/chat) I still need to wire up a lot more to make it actually fully functional but it does basic chat using the LS Typescript Client. Basic demo: Image Image ## Test Plan --------- Signed-off-by: Francisco Javier Arceo --- llama_stack/ui/app/chat-playground/page.tsx | 223 ++ llama_stack/ui/components.json | 2 +- .../chat-completions/chat-messasge-item.tsx | 2 +- .../chat-playground/chat-message.tsx | 405 ++++ .../ui/components/chat-playground/chat.tsx | 349 +++ .../chat-playground/interrupt-prompt.tsx | 41 + .../chat-playground/markdown-renderer.tsx | 195 ++ .../message-components.tsx | 0 .../chat-playground/message-input.tsx | 466 ++++ .../chat-playground/message-list.tsx | 45 + .../chat-playground/prompt-suggestions.tsx | 28 + .../chat-playground/typing-indicator.tsx | 15 + .../ui/components/layout/app-sidebar.tsx | 37 + .../responses/items/function-call-item.tsx | 2 +- .../responses/items/generic-item.tsx | 2 +- .../items/grouped-function-call-item.tsx | 2 +- .../responses/items/message-item.tsx | 2 +- .../responses/items/web-search-item.tsx | 2 +- .../ui/components/ui/audio-visualizer.tsx | 198 ++ llama_stack/ui/components/ui/button.tsx | 20 +- llama_stack/ui/components/ui/collapsible.tsx | 33 + llama_stack/ui/components/ui/copy-button.tsx | 44 + llama_stack/ui/components/ui/file-preview.tsx | 153 ++ llama_stack/ui/components/ui/select.tsx | 185 ++ llama_stack/ui/components/ui/sonner.tsx | 25 + llama_stack/ui/hooks/use-audio-recording.ts | 93 + llama_stack/ui/hooks/use-auto-scroll.ts | 73 + llama_stack/ui/hooks/use-autosize-textarea.ts | 39 + llama_stack/ui/hooks/use-copy-to-clipboard.ts | 36 + llama_stack/ui/lib/audio-utils.ts | 50 + llama_stack/ui/package-lock.json | 2130 ++++++++++++++++- llama_stack/ui/package.json | 10 +- 32 files changed, 4876 insertions(+), 31 deletions(-) create mode 100644 llama_stack/ui/app/chat-playground/page.tsx create mode 100644 llama_stack/ui/components/chat-playground/chat-message.tsx create mode 100644 llama_stack/ui/components/chat-playground/chat.tsx create mode 100644 llama_stack/ui/components/chat-playground/interrupt-prompt.tsx create mode 100644 llama_stack/ui/components/chat-playground/markdown-renderer.tsx rename llama_stack/ui/components/{ui => chat-playground}/message-components.tsx (100%) create mode 100644 llama_stack/ui/components/chat-playground/message-input.tsx create mode 100644 llama_stack/ui/components/chat-playground/message-list.tsx create mode 100644 llama_stack/ui/components/chat-playground/prompt-suggestions.tsx create mode 100644 llama_stack/ui/components/chat-playground/typing-indicator.tsx create mode 100644 llama_stack/ui/components/ui/audio-visualizer.tsx create mode 100644 llama_stack/ui/components/ui/collapsible.tsx create mode 100644 llama_stack/ui/components/ui/copy-button.tsx create mode 100644 llama_stack/ui/components/ui/file-preview.tsx create mode 100644 llama_stack/ui/components/ui/select.tsx create mode 100644 llama_stack/ui/components/ui/sonner.tsx create mode 100644 llama_stack/ui/hooks/use-audio-recording.ts create mode 100644 llama_stack/ui/hooks/use-auto-scroll.ts create mode 100644 llama_stack/ui/hooks/use-autosize-textarea.ts create mode 100644 llama_stack/ui/hooks/use-copy-to-clipboard.ts create mode 100644 llama_stack/ui/lib/audio-utils.ts diff --git a/llama_stack/ui/app/chat-playground/page.tsx b/llama_stack/ui/app/chat-playground/page.tsx new file mode 100644 index 000000000..c31248b78 --- /dev/null +++ b/llama_stack/ui/app/chat-playground/page.tsx @@ -0,0 +1,223 @@ +"use client"; + +import { useState, useEffect } from "react"; +import { flushSync } from "react-dom"; +import { Button } from "@/components/ui/button"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "@/components/ui/select"; +import { Chat } from "@/components/chat-playground/chat"; +import { type Message } from "@/components/chat-playground/chat-message"; +import { useAuthClient } from "@/hooks/use-auth-client"; +import type { CompletionCreateParams } from "llama-stack-client/resources/chat/completions"; +import type { Model } from "llama-stack-client/resources/models"; + +export default function ChatPlaygroundPage() { + const [messages, setMessages] = useState([]); + const [input, setInput] = useState(""); + const [isGenerating, setIsGenerating] = useState(false); + const [error, setError] = useState(null); + const [models, setModels] = useState([]); + const [selectedModel, setSelectedModel] = useState(""); + const [modelsLoading, setModelsLoading] = useState(true); + const [modelsError, setModelsError] = useState(null); + const client = useAuthClient(); + + const isModelsLoading = modelsLoading ?? true; + + + useEffect(() => { + const fetchModels = async () => { + try { + setModelsLoading(true); + setModelsError(null); + const modelList = await client.models.list(); + const llmModels = modelList.filter(model => model.model_type === 'llm'); + setModels(llmModels); + if (llmModels.length > 0) { + setSelectedModel(llmModels[0].identifier); + } + } catch (err) { + console.error("Error fetching models:", err); + setModelsError("Failed to fetch available models"); + } finally { + setModelsLoading(false); + } + }; + + fetchModels(); + }, [client]); + + const extractTextContent = (content: unknown): string => { + if (typeof content === 'string') { + return content; + } + if (Array.isArray(content)) { + return content + .filter(item => item && typeof item === 'object' && 'type' in item && item.type === 'text') + .map(item => (item && typeof item === 'object' && 'text' in item) ? String(item.text) : '') + .join(''); + } + if (content && typeof content === 'object' && 'type' in content && content.type === 'text' && 'text' in content) { + return String(content.text) || ''; + } + return ''; + }; + + const handleInputChange = (e: React.ChangeEvent) => { + setInput(e.target.value); + }; + +const handleSubmit = async (event?: { preventDefault?: () => void }) => { + event?.preventDefault?.(); + if (!input.trim()) return; + + // Add user message to chat + const userMessage: Message = { + id: Date.now().toString(), + role: "user", + content: input.trim(), + createdAt: new Date(), + }; + + setMessages(prev => [...prev, userMessage]); + setInput(""); + + // Use the helper function with the content + await handleSubmitWithContent(userMessage.content); +}; + +const handleSubmitWithContent = async (content: string) => { + setIsGenerating(true); + setError(null); + + try { + const messageParams: CompletionCreateParams["messages"] = [ + ...messages.map(msg => { + const msgContent = typeof msg.content === 'string' ? msg.content : extractTextContent(msg.content); + if (msg.role === "user") { + return { role: "user" as const, content: msgContent }; + } else if (msg.role === "assistant") { + return { role: "assistant" as const, content: msgContent }; + } else { + return { role: "system" as const, content: msgContent }; + } + }), + { role: "user" as const, content } + ]; + + const response = await client.chat.completions.create({ + model: selectedModel, + messages: messageParams, + stream: true, + }); + + const assistantMessage: Message = { + id: (Date.now() + 1).toString(), + role: "assistant", + content: "", + createdAt: new Date(), + }; + + setMessages(prev => [...prev, assistantMessage]); + let fullContent = ""; + for await (const chunk of response) { + if (chunk.choices && chunk.choices[0]?.delta?.content) { + const deltaContent = chunk.choices[0].delta.content; + fullContent += deltaContent; + + flushSync(() => { + setMessages(prev => { + const newMessages = [...prev]; + const lastMessage = newMessages[newMessages.length - 1]; + if (lastMessage.role === "assistant") { + lastMessage.content = fullContent; + } + return newMessages; + }); + }); + } + } + } catch (err) { + console.error("Error sending message:", err); + setError("Failed to send message. Please try again."); + setMessages(prev => prev.slice(0, -1)); + } finally { + setIsGenerating(false); + } +}; + const suggestions = [ + "Write a Python function that prints 'Hello, World!'", + "Explain step-by-step how to solve this math problem: If x² + 6x + 9 = 25, what is x?", + "Design a simple algorithm to find the longest palindrome in a string.", + ]; + + const append = (message: { role: "user"; content: string }) => { + const newMessage: Message = { + id: Date.now().toString(), + role: message.role, + content: message.content, + createdAt: new Date(), + }; + setMessages(prev => [...prev, newMessage]) + handleSubmitWithContent(newMessage.content); + }; + + const clearChat = () => { + setMessages([]); + setError(null); + }; + + return ( +
+
+

Chat Playground

+
+ + +
+
+ + {modelsError && ( +
+

{modelsError}

+
+ )} + + {error && ( +
+

{error}

+
+ )} + + +
+ ); +} diff --git a/llama_stack/ui/components.json b/llama_stack/ui/components.json index 4ee62ee10..cef815d9e 100644 --- a/llama_stack/ui/components.json +++ b/llama_stack/ui/components.json @@ -13,7 +13,7 @@ "aliases": { "components": "@/components", "utils": "@/lib/utils", - "ui": "@/components/ui", + "chat": "@/components/chat", "lib": "@/lib", "hooks": "@/hooks" }, diff --git a/llama_stack/ui/components/chat-completions/chat-messasge-item.tsx b/llama_stack/ui/components/chat-completions/chat-messasge-item.tsx index 2e8593bfb..6170e816e 100644 --- a/llama_stack/ui/components/chat-completions/chat-messasge-item.tsx +++ b/llama_stack/ui/components/chat-completions/chat-messasge-item.tsx @@ -7,7 +7,7 @@ import { extractTextFromContentPart } from "@/lib/format-message-content"; import { MessageBlock, ToolCallBlock, -} from "@/components/ui/message-components"; +} from "@/components/chat-playground/message-components"; interface ChatMessageItemProps { message: ChatMessage; diff --git a/llama_stack/ui/components/chat-playground/chat-message.tsx b/llama_stack/ui/components/chat-playground/chat-message.tsx new file mode 100644 index 000000000..e5d621c81 --- /dev/null +++ b/llama_stack/ui/components/chat-playground/chat-message.tsx @@ -0,0 +1,405 @@ +"use client" + +import React, { useMemo, useState } from "react" +import { cva, type VariantProps } from "class-variance-authority" +import { motion } from "framer-motion" +import { Ban, ChevronRight, Code2, Loader2, Terminal } from "lucide-react" + +import { cn } from "@/lib/utils" +import { + Collapsible, + CollapsibleContent, + CollapsibleTrigger, +} from "@/components/ui/collapsible" +import { FilePreview } from "@/components/ui/file-preview" +import { MarkdownRenderer } from "@/components/chat-playground/markdown-renderer" + +const chatBubbleVariants = cva( + "group/message relative break-words rounded-lg p-3 text-sm sm:max-w-[70%]", + { + variants: { + isUser: { + true: "bg-primary text-primary-foreground", + false: "bg-muted text-foreground", + }, + animation: { + none: "", + slide: "duration-300 animate-in fade-in-0", + scale: "duration-300 animate-in fade-in-0 zoom-in-75", + fade: "duration-500 animate-in fade-in-0", + }, + }, + compoundVariants: [ + { + isUser: true, + animation: "slide", + class: "slide-in-from-right", + }, + { + isUser: false, + animation: "slide", + class: "slide-in-from-left", + }, + { + isUser: true, + animation: "scale", + class: "origin-bottom-right", + }, + { + isUser: false, + animation: "scale", + class: "origin-bottom-left", + }, + ], + } +) + +type Animation = VariantProps["animation"] + +interface Attachment { + name?: string + contentType?: string + url: string +} + +interface PartialToolCall { + state: "partial-call" + toolName: string +} + +interface ToolCall { + state: "call" + toolName: string +} + +interface ToolResult { + state: "result" + toolName: string + result: { + __cancelled?: boolean + [key: string]: any + } +} + +type ToolInvocation = PartialToolCall | ToolCall | ToolResult + +interface ReasoningPart { + type: "reasoning" + reasoning: string +} + +interface ToolInvocationPart { + type: "tool-invocation" + toolInvocation: ToolInvocation +} + +interface TextPart { + type: "text" + text: string +} + +// For compatibility with AI SDK types, not used +interface SourcePart { + type: "source" + source?: any +} + +interface FilePart { + type: "file" + mimeType: string + data: string +} + +interface StepStartPart { + type: "step-start" +} + +type MessagePart = + | TextPart + | ReasoningPart + | ToolInvocationPart + | SourcePart + | FilePart + | StepStartPart + +export interface Message { + id: string + role: "user" | "assistant" | (string & {}) + content: string + createdAt?: Date + experimental_attachments?: Attachment[] + toolInvocations?: ToolInvocation[] + parts?: MessagePart[] +} + +export interface ChatMessageProps extends Message { + showTimeStamp?: boolean + animation?: Animation + actions?: React.ReactNode +} + +export const ChatMessage: React.FC = ({ + role, + content, + createdAt, + showTimeStamp = false, + animation = "scale", + actions, + experimental_attachments, + toolInvocations, + parts, +}) => { + const files = useMemo(() => { + return experimental_attachments?.map((attachment) => { + const dataArray = dataUrlToUint8Array(attachment.url) + const file = new File([dataArray], attachment.name ?? "Unknown", { + type: attachment.contentType, + }) + return file + }) + }, [experimental_attachments]) + + const isUser = role === "user" + + const formattedTime = createdAt?.toLocaleTimeString("en-US", { + hour: "2-digit", + minute: "2-digit", + }) + + if (isUser) { + return ( +
+ {files ? ( +
+ {files.map((file, index) => { + return + })} +
+ ) : null} + +
+ {content} +
+ + {showTimeStamp && createdAt ? ( + + ) : null} +
+ ) + } + + if (parts && parts.length > 0) { + return parts.map((part, index) => { + if (part.type === "text") { + return ( +
+
+ {part.text} + {actions ? ( +
+ {actions} +
+ ) : null} +
+ + {showTimeStamp && createdAt ? ( + + ) : null} +
+ ) + } else if (part.type === "reasoning") { + return + } else if (part.type === "tool-invocation") { + return ( + + ) + } + return null + }) + } + + if (toolInvocations && toolInvocations.length > 0) { + return + } + + return ( +
+
+ {content} + {actions ? ( +
+ {actions} +
+ ) : null} +
+ + {showTimeStamp && createdAt ? ( + + ) : null} +
+ ) +} + +function dataUrlToUint8Array(data: string) { + const base64 = data.split(",")[1] + const buf = Buffer.from(base64, "base64") + return new Uint8Array(buf) +} + +const ReasoningBlock = ({ part }: { part: ReasoningPart }) => { + const [isOpen, setIsOpen] = useState(false) + + return ( +
+ +
+ + + +
+ + +
+
+ {part.reasoning} +
+
+
+
+
+
+ ) +} + +function ToolCall({ + toolInvocations, +}: Pick) { + if (!toolInvocations?.length) return null + + return ( +
+ {toolInvocations.map((invocation, index) => { + const isCancelled = + invocation.state === "result" && + invocation.result.__cancelled === true + + if (isCancelled) { + return ( +
+ + + Cancelled{" "} + + {"`"} + {invocation.toolName} + {"`"} + + +
+ ) + } + + switch (invocation.state) { + case "partial-call": + case "call": + return ( +
+ + + Calling{" "} + + {"`"} + {invocation.toolName} + {"`"} + + ... + + +
+ ) + case "result": + return ( +
+
+ + + Result from{" "} + + {"`"} + {invocation.toolName} + {"`"} + + +
+
+                  {JSON.stringify(invocation.result, null, 2)}
+                
+
+ ) + default: + return null + } + })} +
+ ) +} diff --git a/llama_stack/ui/components/chat-playground/chat.tsx b/llama_stack/ui/components/chat-playground/chat.tsx new file mode 100644 index 000000000..ee83fd9bb --- /dev/null +++ b/llama_stack/ui/components/chat-playground/chat.tsx @@ -0,0 +1,349 @@ +"use client" + +import { + forwardRef, + useCallback, + useRef, + useState, + type ReactElement, +} from "react" +import { ArrowDown, ThumbsDown, ThumbsUp } from "lucide-react" + +import { cn } from "@/lib/utils" +import { useAutoScroll } from "@/hooks/use-auto-scroll" +import { Button } from "@/components/ui/button" +import { type Message } from "@/components/chat-playground/chat-message" +import { CopyButton } from "@/components/ui/copy-button" +import { MessageInput } from "@/components/chat-playground/message-input" +import { MessageList } from "@/components/chat-playground/message-list" +import { PromptSuggestions } from "@/components/chat-playground/prompt-suggestions" + +interface ChatPropsBase { + handleSubmit: ( + event?: { preventDefault?: () => void }, + options?: { experimental_attachments?: FileList } + ) => void + messages: Array + input: string + className?: string + handleInputChange: React.ChangeEventHandler + isGenerating: boolean + stop?: () => void + onRateResponse?: ( + messageId: string, + rating: "thumbs-up" | "thumbs-down" + ) => void + setMessages?: (messages: any[]) => void + transcribeAudio?: (blob: Blob) => Promise +} + +interface ChatPropsWithoutSuggestions extends ChatPropsBase { + append?: never + suggestions?: never +} + +interface ChatPropsWithSuggestions extends ChatPropsBase { + append: (message: { role: "user"; content: string }) => void + suggestions: string[] +} + +type ChatProps = ChatPropsWithoutSuggestions | ChatPropsWithSuggestions + +export function Chat({ + messages, + handleSubmit, + input, + handleInputChange, + stop, + isGenerating, + append, + suggestions, + className, + onRateResponse, + setMessages, + transcribeAudio, +}: ChatProps) { + const lastMessage = messages.at(-1) + const isEmpty = messages.length === 0 + const isTyping = lastMessage?.role === "user" + + const messagesRef = useRef(messages) + messagesRef.current = messages + + // Enhanced stop function that marks pending tool calls as cancelled + const handleStop = useCallback(() => { + stop?.() + + if (!setMessages) return + + const latestMessages = [...messagesRef.current] + const lastAssistantMessage = latestMessages.findLast( + (m) => m.role === "assistant" + ) + + if (!lastAssistantMessage) return + + let needsUpdate = false + let updatedMessage = { ...lastAssistantMessage } + + if (lastAssistantMessage.toolInvocations) { + const updatedToolInvocations = lastAssistantMessage.toolInvocations.map( + (toolInvocation) => { + if (toolInvocation.state === "call") { + needsUpdate = true + return { + ...toolInvocation, + state: "result", + result: { + content: "Tool execution was cancelled", + __cancelled: true, // Special marker to indicate cancellation + }, + } as const + } + return toolInvocation + } + ) + + if (needsUpdate) { + updatedMessage = { + ...updatedMessage, + toolInvocations: updatedToolInvocations, + } + } + } + + if (lastAssistantMessage.parts && lastAssistantMessage.parts.length > 0) { + const updatedParts = lastAssistantMessage.parts.map((part: any) => { + if ( + part.type === "tool-invocation" && + part.toolInvocation && + part.toolInvocation.state === "call" + ) { + needsUpdate = true + return { + ...part, + toolInvocation: { + ...part.toolInvocation, + state: "result", + result: { + content: "Tool execution was cancelled", + __cancelled: true, + }, + }, + } + } + return part + }) + + if (needsUpdate) { + updatedMessage = { + ...updatedMessage, + parts: updatedParts, + } + } + } + + if (needsUpdate) { + const messageIndex = latestMessages.findIndex( + (m) => m.id === lastAssistantMessage.id + ) + if (messageIndex !== -1) { + latestMessages[messageIndex] = updatedMessage + setMessages(latestMessages) + } + } + }, [stop, setMessages, messagesRef]) + + const messageOptions = useCallback( + (message: Message) => ({ + actions: onRateResponse ? ( + <> +
+ +
+ + + + ) : ( + + ), + }), + [onRateResponse] + ) + + return ( + +
+ {isEmpty && append && suggestions ? ( +
+ +
+ ) : null} + + {messages.length > 0 ? ( + + + + ) : null} +
+ +
+
+ + {({ files, setFiles }) => ( + + )} + +
+
+
+ ) +} +Chat.displayName = "Chat" + +export function ChatMessages({ + messages, + children, +}: React.PropsWithChildren<{ + messages: Message[] +}>) { + const { + containerRef, + scrollToBottom, + handleScroll, + shouldAutoScroll, + handleTouchStart, + } = useAutoScroll([messages]) + + return ( +
+
+ {children} +
+ + {!shouldAutoScroll && ( +
+
+ +
+
+ )} +
+ ) +} + +export const ChatContainer = forwardRef< + HTMLDivElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => { + return ( +
+ ) +}) +ChatContainer.displayName = "ChatContainer" + +interface ChatFormProps { + className?: string + isPending: boolean + handleSubmit: ( + event?: { preventDefault?: () => void }, + options?: { experimental_attachments?: FileList } + ) => void + children: (props: { + files: File[] | null + setFiles: React.Dispatch> + }) => ReactElement +} + +export const ChatForm = forwardRef( + ({ children, handleSubmit, isPending, className }, ref) => { + const [files, setFiles] = useState(null) + + const onSubmit = (event: React.FormEvent) => { + // if (isPending) { + // event.preventDefault() + // return + // } + + if (!files) { + handleSubmit(event) + return + } + + const fileList = createFileList(files) + handleSubmit(event, { experimental_attachments: fileList }) + setFiles(null) + } + + return ( +
+ {children({ files, setFiles })} +
+ ) + } +) +ChatForm.displayName = "ChatForm" + +function createFileList(files: File[] | FileList): FileList { + const dataTransfer = new DataTransfer() + for (const file of Array.from(files)) { + dataTransfer.items.add(file) + } + return dataTransfer.files +} diff --git a/llama_stack/ui/components/chat-playground/interrupt-prompt.tsx b/llama_stack/ui/components/chat-playground/interrupt-prompt.tsx new file mode 100644 index 000000000..757863c62 --- /dev/null +++ b/llama_stack/ui/components/chat-playground/interrupt-prompt.tsx @@ -0,0 +1,41 @@ +"use client" + +import { AnimatePresence, motion } from "framer-motion" +import { X } from "lucide-react" + +interface InterruptPromptProps { + isOpen: boolean + close: () => void +} + +export function InterruptPrompt({ isOpen, close }: InterruptPromptProps) { + return ( + + {isOpen && ( + + Press Enter again to interrupt + + + )} + + ) +} diff --git a/llama_stack/ui/components/chat-playground/markdown-renderer.tsx b/llama_stack/ui/components/chat-playground/markdown-renderer.tsx new file mode 100644 index 000000000..374f687df --- /dev/null +++ b/llama_stack/ui/components/chat-playground/markdown-renderer.tsx @@ -0,0 +1,195 @@ +import React, { Suspense } from "react" +import Markdown from "react-markdown" +import remarkGfm from "remark-gfm" + +import { cn } from "@/lib/utils" +import { CopyButton } from "@/components/ui/copy-button" + +interface MarkdownRendererProps { + children: string +} + +export function MarkdownRenderer({ children }: MarkdownRendererProps) { + return ( +
+ + {children} + +
+ ) +} + +interface HighlightedPre extends React.HTMLAttributes { + children: string + language: string +} + +const HighlightedPre = React.memo( + async ({ children, language, ...props }: HighlightedPre) => { + const { codeToTokens, bundledLanguages } = await import("shiki") + + if (!(language in bundledLanguages)) { + return
{children}
+ } + + const { tokens } = await codeToTokens(children, { + lang: language as keyof typeof bundledLanguages, + defaultColor: false, + themes: { + light: "github-light", + dark: "github-dark", + }, + }) + + return ( +
+        
+          {tokens.map((line, lineIndex) => (
+            <>
+              
+                {line.map((token, tokenIndex) => {
+                  const style =
+                    typeof token.htmlStyle === "string"
+                      ? undefined
+                      : token.htmlStyle
+
+                  return (
+                    
+                      {token.content}
+                    
+                  )
+                })}
+              
+              {lineIndex !== tokens.length - 1 && "\n"}
+            
+          ))}
+        
+      
+ ) + } +) +HighlightedPre.displayName = "HighlightedCode" + +interface CodeBlockProps extends React.HTMLAttributes { + children: React.ReactNode + className?: string + language: string +} + +const CodeBlock = ({ + children, + className, + language, + ...restProps +}: CodeBlockProps) => { + const code = + typeof children === "string" + ? children + : childrenTakeAllStringContents(children) + + const preClass = cn( + "overflow-x-scroll rounded-md border bg-background/50 p-4 font-mono text-sm [scrollbar-width:none]", + className + ) + + return ( +
+ + {children} + + } + > + + {code} + + + +
+ +
+
+ ) +} + +function childrenTakeAllStringContents(element: any): string { + if (typeof element === "string") { + return element + } + + if (element?.props?.children) { + let children = element.props.children + + if (Array.isArray(children)) { + return children + .map((child) => childrenTakeAllStringContents(child)) + .join("") + } else { + return childrenTakeAllStringContents(children) + } + } + + return "" +} + +const COMPONENTS = { + h1: withClass("h1", "text-2xl font-semibold"), + h2: withClass("h2", "font-semibold text-xl"), + h3: withClass("h3", "font-semibold text-lg"), + h4: withClass("h4", "font-semibold text-base"), + h5: withClass("h5", "font-medium"), + strong: withClass("strong", "font-semibold"), + a: withClass("a", "text-primary underline underline-offset-2"), + blockquote: withClass("blockquote", "border-l-2 border-primary pl-4"), + code: ({ children, className, node, ...rest }: any) => { + const match = /language-(\w+)/.exec(className || "") + return match ? ( + + {children} + + ) : ( + &]:rounded-md [:not(pre)>&]:bg-background/50 [:not(pre)>&]:px-1 [:not(pre)>&]:py-0.5" + )} + {...rest} + > + {children} + + ) + }, + pre: ({ children }: any) => children, + ol: withClass("ol", "list-decimal space-y-2 pl-6"), + ul: withClass("ul", "list-disc space-y-2 pl-6"), + li: withClass("li", "my-1.5"), + table: withClass( + "table", + "w-full border-collapse overflow-y-auto rounded-md border border-foreground/20" + ), + th: withClass( + "th", + "border border-foreground/20 px-4 py-2 text-left font-bold [&[align=center]]:text-center [&[align=right]]:text-right" + ), + td: withClass( + "td", + "border border-foreground/20 px-4 py-2 text-left [&[align=center]]:text-center [&[align=right]]:text-right" + ), + tr: withClass("tr", "m-0 border-t p-0 even:bg-muted"), + p: withClass("p", "whitespace-pre-wrap"), + hr: withClass("hr", "border-foreground/20"), +} + +function withClass(Tag: keyof JSX.IntrinsicElements, classes: string) { + const Component = ({ node, ...props }: any) => ( + + ) + Component.displayName = Tag + return Component +} + +export default MarkdownRenderer diff --git a/llama_stack/ui/components/ui/message-components.tsx b/llama_stack/ui/components/chat-playground/message-components.tsx similarity index 100% rename from llama_stack/ui/components/ui/message-components.tsx rename to llama_stack/ui/components/chat-playground/message-components.tsx diff --git a/llama_stack/ui/components/chat-playground/message-input.tsx b/llama_stack/ui/components/chat-playground/message-input.tsx new file mode 100644 index 000000000..4a29386d9 --- /dev/null +++ b/llama_stack/ui/components/chat-playground/message-input.tsx @@ -0,0 +1,466 @@ +"use client" + +import React, { useEffect, useRef, useState } from "react" +import { AnimatePresence, motion } from "framer-motion" +import { ArrowUp, Info, Loader2, Mic, Paperclip, Square } from "lucide-react" +import { omit } from "remeda" + +import { cn } from "@/lib/utils" +import { useAudioRecording } from "@/hooks/use-audio-recording" +import { useAutosizeTextArea } from "@/hooks/use-autosize-textarea" +import { AudioVisualizer } from "@/components/ui/audio-visualizer" +import { Button } from "@/components/ui/button" +import { FilePreview } from "@/components/ui/file-preview" +import { InterruptPrompt } from "@/components/chat-playground/interrupt-prompt" + +interface MessageInputBaseProps + extends React.TextareaHTMLAttributes { + value: string + submitOnEnter?: boolean + stop?: () => void + isGenerating: boolean + enableInterrupt?: boolean + transcribeAudio?: (blob: Blob) => Promise +} + +interface MessageInputWithoutAttachmentProps extends MessageInputBaseProps { + allowAttachments?: false +} + +interface MessageInputWithAttachmentsProps extends MessageInputBaseProps { + allowAttachments: true + files: File[] | null + setFiles: React.Dispatch> +} + +type MessageInputProps = + | MessageInputWithoutAttachmentProps + | MessageInputWithAttachmentsProps + +export function MessageInput({ + placeholder = "Ask AI...", + className, + onKeyDown: onKeyDownProp, + submitOnEnter = true, + stop, + isGenerating, + enableInterrupt = true, + transcribeAudio, + ...props +}: MessageInputProps) { + const [isDragging, setIsDragging] = useState(false) + const [showInterruptPrompt, setShowInterruptPrompt] = useState(false) + + const { + isListening, + isSpeechSupported, + isRecording, + isTranscribing, + audioStream, + toggleListening, + stopRecording, + } = useAudioRecording({ + transcribeAudio, + onTranscriptionComplete: (text) => { + props.onChange?.({ target: { value: text } } as any) + }, + }) + + useEffect(() => { + if (!isGenerating) { + setShowInterruptPrompt(false) + } + }, [isGenerating]) + + const addFiles = (files: File[] | null) => { + if (props.allowAttachments) { + props.setFiles((currentFiles) => { + if (currentFiles === null) { + return files + } + + if (files === null) { + return currentFiles + } + + return [...currentFiles, ...files] + }) + } + } + + const onDragOver = (event: React.DragEvent) => { + if (props.allowAttachments !== true) return + event.preventDefault() + setIsDragging(true) + } + + const onDragLeave = (event: React.DragEvent) => { + if (props.allowAttachments !== true) return + event.preventDefault() + setIsDragging(false) + } + + const onDrop = (event: React.DragEvent) => { + setIsDragging(false) + if (props.allowAttachments !== true) return + event.preventDefault() + const dataTransfer = event.dataTransfer + if (dataTransfer.files.length) { + addFiles(Array.from(dataTransfer.files)) + } + } + + const onPaste = (event: React.ClipboardEvent) => { + const items = event.clipboardData?.items + if (!items) return + + const text = event.clipboardData.getData("text") + if (text && text.length > 500 && props.allowAttachments) { + event.preventDefault() + const blob = new Blob([text], { type: "text/plain" }) + const file = new File([blob], "Pasted text", { + type: "text/plain", + lastModified: Date.now(), + }) + addFiles([file]) + return + } + + const files = Array.from(items) + .map((item) => item.getAsFile()) + .filter((file) => file !== null) + + if (props.allowAttachments && files.length > 0) { + addFiles(files) + } + } + + const onKeyDown = (event: React.KeyboardEvent) => { + if (submitOnEnter && event.key === "Enter" && !event.shiftKey) { + event.preventDefault() + + if (isGenerating && stop && enableInterrupt) { + if (showInterruptPrompt) { + stop() + setShowInterruptPrompt(false) + event.currentTarget.form?.requestSubmit() + } else if ( + props.value || + (props.allowAttachments && props.files?.length) + ) { + setShowInterruptPrompt(true) + return + } + } + + event.currentTarget.form?.requestSubmit() + } + + onKeyDownProp?.(event) + } + + const textAreaRef = useRef(null) + const [textAreaHeight, setTextAreaHeight] = useState(0) + + useEffect(() => { + if (textAreaRef.current) { + setTextAreaHeight(textAreaRef.current.offsetHeight) + } + }, [props.value]) + + const showFileList = + props.allowAttachments && props.files && props.files.length > 0 + + + useAutosizeTextArea({ + ref: textAreaRef, + maxHeight: 240, + borderWidth: 1, + dependencies: [props.value, showFileList], + }) + + return ( +
+ {enableInterrupt && ( + setShowInterruptPrompt(false)} + /> + )} + + + +
+
+