mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-13 05:17:26 +00:00
# What does this PR do? I've been tinkering a little with a simple chat playground in the UI, so I'm opening the PR with what's kind of a WIP. If you look at the first commit, that includes the big part of the changes. The rest of the files changed come from adding installing the `shadcn` components. Note this is missing a lot; e.g., - sessions - document upload - audio (the shadcn components install these by default from https://shadcn-chatbot-kit.vercel.app/docs/components/chat) I still need to wire up a lot more to make it actually fully functional but it does basic chat using the LS Typescript Client. Basic demo: <img width="1329" height="1430" alt="Image" src="https://github.com/user-attachments/assets/917a2096-36d4-4925-b83b-f1f2cda98698" /> <img width="1319" height="1424" alt="Image" src="https://github.com/user-attachments/assets/fab1583b-1c72-4bf3-baf2-405aee13c6bb" /> <!-- If resolving an issue, uncomment and update the line below --> <!-- Closes #[issue-number] --> ## Test Plan <!-- Describe the tests you ran to verify your changes with result summaries. *Provide clear instructions so the plan can be easily re-executed.* --> --------- Signed-off-by: Francisco Javier Arceo <farceo@redhat.com>
466 lines
13 KiB
TypeScript
466 lines
13 KiB
TypeScript
"use client"
|
|
|
|
import React, { useEffect, useRef, useState } from "react"
|
|
import { AnimatePresence, motion } from "framer-motion"
|
|
import { ArrowUp, Info, Loader2, Mic, Paperclip, Square } from "lucide-react"
|
|
import { omit } from "remeda"
|
|
|
|
import { cn } from "@/lib/utils"
|
|
import { useAudioRecording } from "@/hooks/use-audio-recording"
|
|
import { useAutosizeTextArea } from "@/hooks/use-autosize-textarea"
|
|
import { AudioVisualizer } from "@/components/ui/audio-visualizer"
|
|
import { Button } from "@/components/ui/button"
|
|
import { FilePreview } from "@/components/ui/file-preview"
|
|
import { InterruptPrompt } from "@/components/chat-playground/interrupt-prompt"
|
|
|
|
interface MessageInputBaseProps
|
|
extends React.TextareaHTMLAttributes<HTMLTextAreaElement> {
|
|
value: string
|
|
submitOnEnter?: boolean
|
|
stop?: () => void
|
|
isGenerating: boolean
|
|
enableInterrupt?: boolean
|
|
transcribeAudio?: (blob: Blob) => Promise<string>
|
|
}
|
|
|
|
interface MessageInputWithoutAttachmentProps extends MessageInputBaseProps {
|
|
allowAttachments?: false
|
|
}
|
|
|
|
interface MessageInputWithAttachmentsProps extends MessageInputBaseProps {
|
|
allowAttachments: true
|
|
files: File[] | null
|
|
setFiles: React.Dispatch<React.SetStateAction<File[] | null>>
|
|
}
|
|
|
|
type MessageInputProps =
|
|
| MessageInputWithoutAttachmentProps
|
|
| MessageInputWithAttachmentsProps
|
|
|
|
export function MessageInput({
|
|
placeholder = "Ask AI...",
|
|
className,
|
|
onKeyDown: onKeyDownProp,
|
|
submitOnEnter = true,
|
|
stop,
|
|
isGenerating,
|
|
enableInterrupt = true,
|
|
transcribeAudio,
|
|
...props
|
|
}: MessageInputProps) {
|
|
const [isDragging, setIsDragging] = useState(false)
|
|
const [showInterruptPrompt, setShowInterruptPrompt] = useState(false)
|
|
|
|
const {
|
|
isListening,
|
|
isSpeechSupported,
|
|
isRecording,
|
|
isTranscribing,
|
|
audioStream,
|
|
toggleListening,
|
|
stopRecording,
|
|
} = useAudioRecording({
|
|
transcribeAudio,
|
|
onTranscriptionComplete: (text) => {
|
|
props.onChange?.({ target: { value: text } } as any)
|
|
},
|
|
})
|
|
|
|
useEffect(() => {
|
|
if (!isGenerating) {
|
|
setShowInterruptPrompt(false)
|
|
}
|
|
}, [isGenerating])
|
|
|
|
const addFiles = (files: File[] | null) => {
|
|
if (props.allowAttachments) {
|
|
props.setFiles((currentFiles) => {
|
|
if (currentFiles === null) {
|
|
return files
|
|
}
|
|
|
|
if (files === null) {
|
|
return currentFiles
|
|
}
|
|
|
|
return [...currentFiles, ...files]
|
|
})
|
|
}
|
|
}
|
|
|
|
const onDragOver = (event: React.DragEvent) => {
|
|
if (props.allowAttachments !== true) return
|
|
event.preventDefault()
|
|
setIsDragging(true)
|
|
}
|
|
|
|
const onDragLeave = (event: React.DragEvent) => {
|
|
if (props.allowAttachments !== true) return
|
|
event.preventDefault()
|
|
setIsDragging(false)
|
|
}
|
|
|
|
const onDrop = (event: React.DragEvent) => {
|
|
setIsDragging(false)
|
|
if (props.allowAttachments !== true) return
|
|
event.preventDefault()
|
|
const dataTransfer = event.dataTransfer
|
|
if (dataTransfer.files.length) {
|
|
addFiles(Array.from(dataTransfer.files))
|
|
}
|
|
}
|
|
|
|
const onPaste = (event: React.ClipboardEvent) => {
|
|
const items = event.clipboardData?.items
|
|
if (!items) return
|
|
|
|
const text = event.clipboardData.getData("text")
|
|
if (text && text.length > 500 && props.allowAttachments) {
|
|
event.preventDefault()
|
|
const blob = new Blob([text], { type: "text/plain" })
|
|
const file = new File([blob], "Pasted text", {
|
|
type: "text/plain",
|
|
lastModified: Date.now(),
|
|
})
|
|
addFiles([file])
|
|
return
|
|
}
|
|
|
|
const files = Array.from(items)
|
|
.map((item) => item.getAsFile())
|
|
.filter((file) => file !== null)
|
|
|
|
if (props.allowAttachments && files.length > 0) {
|
|
addFiles(files)
|
|
}
|
|
}
|
|
|
|
const onKeyDown = (event: React.KeyboardEvent<HTMLTextAreaElement>) => {
|
|
if (submitOnEnter && event.key === "Enter" && !event.shiftKey) {
|
|
event.preventDefault()
|
|
|
|
if (isGenerating && stop && enableInterrupt) {
|
|
if (showInterruptPrompt) {
|
|
stop()
|
|
setShowInterruptPrompt(false)
|
|
event.currentTarget.form?.requestSubmit()
|
|
} else if (
|
|
props.value ||
|
|
(props.allowAttachments && props.files?.length)
|
|
) {
|
|
setShowInterruptPrompt(true)
|
|
return
|
|
}
|
|
}
|
|
|
|
event.currentTarget.form?.requestSubmit()
|
|
}
|
|
|
|
onKeyDownProp?.(event)
|
|
}
|
|
|
|
const textAreaRef = useRef<HTMLTextAreaElement>(null)
|
|
const [textAreaHeight, setTextAreaHeight] = useState<number>(0)
|
|
|
|
useEffect(() => {
|
|
if (textAreaRef.current) {
|
|
setTextAreaHeight(textAreaRef.current.offsetHeight)
|
|
}
|
|
}, [props.value])
|
|
|
|
const showFileList =
|
|
props.allowAttachments && props.files && props.files.length > 0
|
|
|
|
|
|
useAutosizeTextArea({
|
|
ref: textAreaRef,
|
|
maxHeight: 240,
|
|
borderWidth: 1,
|
|
dependencies: [props.value, showFileList],
|
|
})
|
|
|
|
return (
|
|
<div
|
|
className="relative flex w-full"
|
|
onDragOver={onDragOver}
|
|
onDragLeave={onDragLeave}
|
|
onDrop={onDrop}
|
|
>
|
|
{enableInterrupt && (
|
|
<InterruptPrompt
|
|
isOpen={showInterruptPrompt}
|
|
close={() => setShowInterruptPrompt(false)}
|
|
/>
|
|
)}
|
|
|
|
<RecordingPrompt
|
|
isVisible={isRecording}
|
|
onStopRecording={stopRecording}
|
|
/>
|
|
|
|
<div className="relative flex w-full items-center space-x-2">
|
|
<div className="relative flex-1">
|
|
<textarea
|
|
aria-label="Write your prompt here"
|
|
placeholder={placeholder}
|
|
ref={textAreaRef}
|
|
onPaste={onPaste}
|
|
onKeyDown={onKeyDown}
|
|
className={cn(
|
|
"z-10 w-full grow resize-none rounded-xl border border-input bg-background p-3 pr-24 text-sm ring-offset-background transition-[border] placeholder:text-muted-foreground focus-visible:border-primary focus-visible:outline-none disabled:cursor-not-allowed disabled:opacity-50",
|
|
showFileList && "pb-16",
|
|
className
|
|
)}
|
|
{...(props.allowAttachments
|
|
? omit(props, ["allowAttachments", "files", "setFiles"])
|
|
: omit(props, ["allowAttachments"]))}
|
|
/>
|
|
|
|
{props.allowAttachments && (
|
|
<div className="absolute inset-x-3 bottom-0 z-20 overflow-x-scroll py-3">
|
|
<div className="flex space-x-3">
|
|
<AnimatePresence mode="popLayout">
|
|
{props.files?.map((file) => {
|
|
return (
|
|
<FilePreview
|
|
key={file.name + String(file.lastModified)}
|
|
file={file}
|
|
onRemove={() => {
|
|
props.setFiles((files) => {
|
|
if (!files) return null
|
|
|
|
const filtered = Array.from(files).filter(
|
|
(f) => f !== file
|
|
)
|
|
if (filtered.length === 0) return null
|
|
return filtered
|
|
})
|
|
}}
|
|
/>
|
|
)
|
|
})}
|
|
</AnimatePresence>
|
|
</div>
|
|
</div>
|
|
)}
|
|
</div>
|
|
</div>
|
|
|
|
<div className="absolute right-3 top-3 z-20 flex gap-2">
|
|
{props.allowAttachments && (
|
|
<Button
|
|
type="button"
|
|
size="icon"
|
|
variant="outline"
|
|
className="h-8 w-8"
|
|
aria-label="Attach a file"
|
|
disabled={true}
|
|
onClick={async () => {
|
|
const files = await showFileUploadDialog()
|
|
addFiles(files)
|
|
}}
|
|
>
|
|
<Paperclip className="h-4 w-4" />
|
|
</Button>
|
|
)}
|
|
{isSpeechSupported && (
|
|
<Button
|
|
type="button"
|
|
variant="outline"
|
|
className={cn("h-8 w-8", isListening && "text-primary")}
|
|
aria-label="Voice input"
|
|
size="icon"
|
|
onClick={toggleListening}
|
|
>
|
|
<Mic className="h-4 w-4" />
|
|
</Button>
|
|
)}
|
|
{isGenerating && stop ? (
|
|
<Button
|
|
type="button"
|
|
size="icon"
|
|
className="h-8 w-8"
|
|
aria-label="Stop generating"
|
|
onClick={stop}
|
|
>
|
|
<Square className="h-3 w-3 animate-pulse" fill="currentColor" />
|
|
</Button>
|
|
) : (
|
|
<Button
|
|
type="submit"
|
|
size="icon"
|
|
className="h-8 w-8 transition-opacity"
|
|
aria-label="Send message"
|
|
disabled={props.value === "" || isGenerating}
|
|
>
|
|
<ArrowUp className="h-5 w-5" />
|
|
</Button>
|
|
)}
|
|
</div>
|
|
|
|
{props.allowAttachments && <FileUploadOverlay isDragging={isDragging} />}
|
|
|
|
<RecordingControls
|
|
isRecording={isRecording}
|
|
isTranscribing={isTranscribing}
|
|
audioStream={audioStream}
|
|
textAreaHeight={textAreaHeight}
|
|
onStopRecording={stopRecording}
|
|
/>
|
|
</div>
|
|
)
|
|
}
|
|
MessageInput.displayName = "MessageInput"
|
|
|
|
interface FileUploadOverlayProps {
|
|
isDragging: boolean
|
|
}
|
|
|
|
function FileUploadOverlay({ isDragging }: FileUploadOverlayProps) {
|
|
return (
|
|
<AnimatePresence>
|
|
{isDragging && (
|
|
<motion.div
|
|
className="pointer-events-none absolute inset-0 z-20 flex items-center justify-center space-x-2 rounded-xl border border-dashed border-border bg-background text-sm text-muted-foreground"
|
|
initial={{ opacity: 0 }}
|
|
animate={{ opacity: 1 }}
|
|
exit={{ opacity: 0 }}
|
|
transition={{ duration: 0.2 }}
|
|
aria-hidden
|
|
>
|
|
<Paperclip className="h-4 w-4" />
|
|
<span>Drop your files here to attach them.</span>
|
|
</motion.div>
|
|
)}
|
|
</AnimatePresence>
|
|
)
|
|
}
|
|
|
|
function showFileUploadDialog() {
|
|
const input = document.createElement("input")
|
|
|
|
input.type = "file"
|
|
input.multiple = true
|
|
input.accept = "*/*"
|
|
input.click()
|
|
|
|
return new Promise<File[] | null>((resolve) => {
|
|
input.onchange = (e) => {
|
|
const files = (e.currentTarget as HTMLInputElement).files
|
|
|
|
if (files) {
|
|
resolve(Array.from(files))
|
|
return
|
|
}
|
|
|
|
resolve(null)
|
|
}
|
|
})
|
|
}
|
|
|
|
function TranscribingOverlay() {
|
|
return (
|
|
<motion.div
|
|
className="flex h-full w-full flex-col items-center justify-center rounded-xl bg-background/80 backdrop-blur-sm"
|
|
initial={{ opacity: 0 }}
|
|
animate={{ opacity: 1 }}
|
|
exit={{ opacity: 0 }}
|
|
transition={{ duration: 0.2 }}
|
|
>
|
|
<div className="relative">
|
|
<Loader2 className="h-8 w-8 animate-spin text-primary" />
|
|
<motion.div
|
|
className="absolute inset-0 h-8 w-8 animate-pulse rounded-full bg-primary/20"
|
|
initial={{ scale: 0.8, opacity: 0 }}
|
|
animate={{ scale: 1.2, opacity: 1 }}
|
|
transition={{
|
|
duration: 1,
|
|
repeat: Infinity,
|
|
repeatType: "reverse",
|
|
ease: "easeInOut",
|
|
}}
|
|
/>
|
|
</div>
|
|
<p className="mt-4 text-sm font-medium text-muted-foreground">
|
|
Transcribing audio...
|
|
</p>
|
|
</motion.div>
|
|
)
|
|
}
|
|
|
|
interface RecordingPromptProps {
|
|
isVisible: boolean
|
|
onStopRecording: () => void
|
|
}
|
|
|
|
function RecordingPrompt({ isVisible, onStopRecording }: RecordingPromptProps) {
|
|
return (
|
|
<AnimatePresence>
|
|
{isVisible && (
|
|
<motion.div
|
|
initial={{ top: 0, filter: "blur(5px)" }}
|
|
animate={{
|
|
top: -40,
|
|
filter: "blur(0px)",
|
|
transition: {
|
|
type: "spring",
|
|
filter: { type: "tween" },
|
|
},
|
|
}}
|
|
exit={{ top: 0, filter: "blur(5px)" }}
|
|
className="absolute left-1/2 flex -translate-x-1/2 cursor-pointer overflow-hidden whitespace-nowrap rounded-full border bg-background py-1 text-center text-sm text-muted-foreground"
|
|
onClick={onStopRecording}
|
|
>
|
|
<span className="mx-2.5 flex items-center">
|
|
<Info className="mr-2 h-3 w-3" />
|
|
Click to finish recording
|
|
</span>
|
|
</motion.div>
|
|
)}
|
|
</AnimatePresence>
|
|
)
|
|
}
|
|
|
|
interface RecordingControlsProps {
|
|
isRecording: boolean
|
|
isTranscribing: boolean
|
|
audioStream: MediaStream | null
|
|
textAreaHeight: number
|
|
onStopRecording: () => void
|
|
}
|
|
|
|
function RecordingControls({
|
|
isRecording,
|
|
isTranscribing,
|
|
audioStream,
|
|
textAreaHeight,
|
|
onStopRecording,
|
|
}: RecordingControlsProps) {
|
|
if (isRecording) {
|
|
return (
|
|
<div
|
|
className="absolute inset-[1px] z-50 overflow-hidden rounded-xl"
|
|
style={{ height: textAreaHeight - 2 }}
|
|
>
|
|
<AudioVisualizer
|
|
stream={audioStream}
|
|
isRecording={isRecording}
|
|
onClick={onStopRecording}
|
|
/>
|
|
</div>
|
|
)
|
|
}
|
|
|
|
if (isTranscribing) {
|
|
return (
|
|
<div
|
|
className="absolute inset-[1px] z-50 overflow-hidden rounded-xl"
|
|
style={{ height: textAreaHeight - 2 }}
|
|
>
|
|
<TranscribingOverlay />
|
|
</div>
|
|
)
|
|
}
|
|
|
|
return null
|
|
}
|