mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-06 04:34:57 +00:00
feat(UI): Adding linter and prettier for UI (#3156)
This commit is contained in:
parent
61582f327c
commit
e69acbafbf
73 changed files with 1452 additions and 1226 deletions
|
@ -1,6 +1,6 @@
|
|||
"use client"
|
||||
"use client";
|
||||
|
||||
import { useEffect, useRef } from "react"
|
||||
import { useEffect, useRef } from "react";
|
||||
|
||||
// Configuration constants for the audio analyzer
|
||||
const AUDIO_CONFIG = {
|
||||
|
@ -14,12 +14,12 @@ const AUDIO_CONFIG = {
|
|||
MAX_INTENSITY: 255, // Maximum gray value (brighter)
|
||||
INTENSITY_RANGE: 155, // MAX_INTENSITY - MIN_INTENSITY
|
||||
},
|
||||
} as const
|
||||
} as const;
|
||||
|
||||
interface AudioVisualizerProps {
|
||||
stream: MediaStream | null
|
||||
isRecording: boolean
|
||||
onClick: () => void
|
||||
stream: MediaStream | null;
|
||||
isRecording: boolean;
|
||||
onClick: () => void;
|
||||
}
|
||||
|
||||
export function AudioVisualizer({
|
||||
|
@ -28,91 +28,91 @@ export function AudioVisualizer({
|
|||
onClick,
|
||||
}: AudioVisualizerProps) {
|
||||
// Refs for managing audio context and animation
|
||||
const canvasRef = useRef<HTMLCanvasElement>(null)
|
||||
const audioContextRef = useRef<AudioContext | null>(null)
|
||||
const analyserRef = useRef<AnalyserNode | null>(null)
|
||||
const animationFrameRef = useRef<number>()
|
||||
const containerRef = useRef<HTMLDivElement>(null)
|
||||
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||
const audioContextRef = useRef<AudioContext | null>(null);
|
||||
const analyserRef = useRef<AnalyserNode | null>(null);
|
||||
const animationFrameRef = useRef<number>();
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
// Cleanup function to stop visualization and close audio context
|
||||
const cleanup = () => {
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current)
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
}
|
||||
if (audioContextRef.current) {
|
||||
audioContextRef.current.close()
|
||||
audioContextRef.current.close();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Cleanup on unmount
|
||||
useEffect(() => {
|
||||
return cleanup
|
||||
}, [])
|
||||
return cleanup;
|
||||
}, []);
|
||||
|
||||
// Start or stop visualization based on recording state
|
||||
useEffect(() => {
|
||||
if (stream && isRecording) {
|
||||
startVisualization()
|
||||
startVisualization();
|
||||
} else {
|
||||
cleanup()
|
||||
cleanup();
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [stream, isRecording])
|
||||
}, [stream, isRecording]);
|
||||
|
||||
// Handle window resize
|
||||
useEffect(() => {
|
||||
const handleResize = () => {
|
||||
if (canvasRef.current && containerRef.current) {
|
||||
const container = containerRef.current
|
||||
const canvas = canvasRef.current
|
||||
const dpr = window.devicePixelRatio || 1
|
||||
const container = containerRef.current;
|
||||
const canvas = canvasRef.current;
|
||||
const dpr = window.devicePixelRatio || 1;
|
||||
|
||||
// Set canvas size based on container and device pixel ratio
|
||||
const rect = container.getBoundingClientRect()
|
||||
const rect = container.getBoundingClientRect();
|
||||
// Account for the 2px total margin (1px on each side)
|
||||
canvas.width = (rect.width - 2) * dpr
|
||||
canvas.height = (rect.height - 2) * dpr
|
||||
canvas.width = (rect.width - 2) * dpr;
|
||||
canvas.height = (rect.height - 2) * dpr;
|
||||
|
||||
// Scale canvas CSS size to match container minus margins
|
||||
canvas.style.width = `${rect.width - 2}px`
|
||||
canvas.style.height = `${rect.height - 2}px`
|
||||
canvas.style.width = `${rect.width - 2}px`;
|
||||
canvas.style.height = `${rect.height - 2}px`;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
window.addEventListener("resize", handleResize)
|
||||
window.addEventListener("resize", handleResize);
|
||||
// Initial setup
|
||||
handleResize()
|
||||
handleResize();
|
||||
|
||||
return () => window.removeEventListener("resize", handleResize)
|
||||
}, [])
|
||||
return () => window.removeEventListener("resize", handleResize);
|
||||
}, []);
|
||||
|
||||
// Initialize audio context and start visualization
|
||||
const startVisualization = async () => {
|
||||
try {
|
||||
const audioContext = new AudioContext()
|
||||
audioContextRef.current = audioContext
|
||||
const audioContext = new AudioContext();
|
||||
audioContextRef.current = audioContext;
|
||||
|
||||
const analyser = audioContext.createAnalyser()
|
||||
analyser.fftSize = AUDIO_CONFIG.FFT_SIZE
|
||||
analyser.smoothingTimeConstant = AUDIO_CONFIG.SMOOTHING
|
||||
analyserRef.current = analyser
|
||||
const analyser = audioContext.createAnalyser();
|
||||
analyser.fftSize = AUDIO_CONFIG.FFT_SIZE;
|
||||
analyser.smoothingTimeConstant = AUDIO_CONFIG.SMOOTHING;
|
||||
analyserRef.current = analyser;
|
||||
|
||||
const source = audioContext.createMediaStreamSource(stream!)
|
||||
source.connect(analyser)
|
||||
const source = audioContext.createMediaStreamSource(stream!);
|
||||
source.connect(analyser);
|
||||
|
||||
draw()
|
||||
draw();
|
||||
} catch (error) {
|
||||
console.error("Error starting visualization:", error)
|
||||
console.error("Error starting visualization:", error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Calculate the color intensity based on bar height
|
||||
const getBarColor = (normalizedHeight: number) => {
|
||||
const intensity =
|
||||
Math.floor(normalizedHeight * AUDIO_CONFIG.COLOR.INTENSITY_RANGE) +
|
||||
AUDIO_CONFIG.COLOR.MIN_INTENSITY
|
||||
return `rgb(${intensity}, ${intensity}, ${intensity})`
|
||||
}
|
||||
AUDIO_CONFIG.COLOR.MIN_INTENSITY;
|
||||
return `rgb(${intensity}, ${intensity}, ${intensity})`;
|
||||
};
|
||||
|
||||
// Draw a single bar of the visualizer
|
||||
const drawBar = (
|
||||
|
@ -123,52 +123,52 @@ export function AudioVisualizer({
|
|||
height: number,
|
||||
color: string
|
||||
) => {
|
||||
ctx.fillStyle = color
|
||||
ctx.fillStyle = color;
|
||||
// Draw upper bar (above center)
|
||||
ctx.fillRect(x, centerY - height, width, height)
|
||||
ctx.fillRect(x, centerY - height, width, height);
|
||||
// Draw lower bar (below center)
|
||||
ctx.fillRect(x, centerY, width, height)
|
||||
}
|
||||
ctx.fillRect(x, centerY, width, height);
|
||||
};
|
||||
|
||||
// Main drawing function
|
||||
const draw = () => {
|
||||
if (!isRecording) return
|
||||
if (!isRecording) return;
|
||||
|
||||
const canvas = canvasRef.current
|
||||
const ctx = canvas?.getContext("2d")
|
||||
if (!canvas || !ctx || !analyserRef.current) return
|
||||
const canvas = canvasRef.current;
|
||||
const ctx = canvas?.getContext("2d");
|
||||
if (!canvas || !ctx || !analyserRef.current) return;
|
||||
|
||||
const dpr = window.devicePixelRatio || 1
|
||||
ctx.scale(dpr, dpr)
|
||||
const dpr = window.devicePixelRatio || 1;
|
||||
ctx.scale(dpr, dpr);
|
||||
|
||||
const analyser = analyserRef.current
|
||||
const bufferLength = analyser.frequencyBinCount
|
||||
const frequencyData = new Uint8Array(bufferLength)
|
||||
const analyser = analyserRef.current;
|
||||
const bufferLength = analyser.frequencyBinCount;
|
||||
const frequencyData = new Uint8Array(bufferLength);
|
||||
|
||||
const drawFrame = () => {
|
||||
animationFrameRef.current = requestAnimationFrame(drawFrame)
|
||||
animationFrameRef.current = requestAnimationFrame(drawFrame);
|
||||
|
||||
// Get current frequency data
|
||||
analyser.getByteFrequencyData(frequencyData)
|
||||
analyser.getByteFrequencyData(frequencyData);
|
||||
|
||||
// Clear canvas - use CSS pixels for clearing
|
||||
ctx.clearRect(0, 0, canvas.width / dpr, canvas.height / dpr)
|
||||
ctx.clearRect(0, 0, canvas.width / dpr, canvas.height / dpr);
|
||||
|
||||
// Calculate dimensions in CSS pixels
|
||||
const barWidth = Math.max(
|
||||
AUDIO_CONFIG.MIN_BAR_WIDTH,
|
||||
canvas.width / dpr / bufferLength - AUDIO_CONFIG.BAR_SPACING
|
||||
)
|
||||
const centerY = canvas.height / dpr / 2
|
||||
let x = 0
|
||||
);
|
||||
const centerY = canvas.height / dpr / 2;
|
||||
let x = 0;
|
||||
|
||||
// Draw each frequency bar
|
||||
for (let i = 0; i < bufferLength; i++) {
|
||||
const normalizedHeight = frequencyData[i] / 255 // Convert to 0-1 range
|
||||
const normalizedHeight = frequencyData[i] / 255; // Convert to 0-1 range
|
||||
const barHeight = Math.max(
|
||||
AUDIO_CONFIG.MIN_BAR_HEIGHT,
|
||||
normalizedHeight * centerY
|
||||
)
|
||||
);
|
||||
|
||||
drawBar(
|
||||
ctx,
|
||||
|
@ -177,14 +177,14 @@ export function AudioVisualizer({
|
|||
barWidth,
|
||||
barHeight,
|
||||
getBarColor(normalizedHeight)
|
||||
)
|
||||
);
|
||||
|
||||
x += barWidth + AUDIO_CONFIG.BAR_SPACING
|
||||
x += barWidth + AUDIO_CONFIG.BAR_SPACING;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
drawFrame()
|
||||
}
|
||||
drawFrame();
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
|
@ -194,5 +194,5 @@ export function AudioVisualizer({
|
|||
>
|
||||
<canvas ref={canvasRef} className="h-full w-full" />
|
||||
</div>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue