|
| 1 | +"use client" |
| 2 | + |
| 3 | +import { useEffect, useRef } from "react" |
| 4 | + |
| 5 | +// Configuration constants for the audio analyzer |
| 6 | +const AUDIO_CONFIG = { |
| 7 | + FFT_SIZE: 512, |
| 8 | + SMOOTHING: 0.8, |
| 9 | + MIN_BAR_HEIGHT: 2, |
| 10 | + MIN_BAR_WIDTH: 2, |
| 11 | + BAR_SPACING: 1, |
| 12 | + COLOR: { |
| 13 | + MIN_INTENSITY: 100, // Minimum gray value (darker) |
| 14 | + MAX_INTENSITY: 255, // Maximum gray value (brighter) |
| 15 | + INTENSITY_RANGE: 155, // MAX_INTENSITY - MIN_INTENSITY |
| 16 | + }, |
| 17 | +} as const |
| 18 | + |
| 19 | +interface AudioVisualizerProps { |
| 20 | + stream: MediaStream | null |
| 21 | + isRecording: boolean |
| 22 | + onClick: () => void |
| 23 | +} |
| 24 | + |
| 25 | +export function AudioVisualizer({ |
| 26 | + stream, |
| 27 | + isRecording, |
| 28 | + onClick, |
| 29 | +}: AudioVisualizerProps) { |
| 30 | + // Refs for managing audio context and animation |
| 31 | + const canvasRef = useRef<HTMLCanvasElement>(null) |
| 32 | + const audioContextRef = useRef<AudioContext | null>(null) |
| 33 | + const analyserRef = useRef<AnalyserNode | null>(null) |
| 34 | + const animationFrameRef = useRef<number>() |
| 35 | + const containerRef = useRef<HTMLDivElement>(null) |
| 36 | + |
| 37 | + // Cleanup function to stop visualization and close audio context |
| 38 | + const cleanup = () => { |
| 39 | + if (animationFrameRef.current) { |
| 40 | + cancelAnimationFrame(animationFrameRef.current) |
| 41 | + } |
| 42 | + if (audioContextRef.current) { |
| 43 | + audioContextRef.current.close() |
| 44 | + } |
| 45 | + } |
| 46 | + |
| 47 | + // Cleanup on unmount |
| 48 | + useEffect(() => { |
| 49 | + return cleanup |
| 50 | + }, []) |
| 51 | + |
| 52 | + // Start or stop visualization based on recording state |
| 53 | + useEffect(() => { |
| 54 | + if (stream && isRecording) { |
| 55 | + startVisualization() |
| 56 | + } else { |
| 57 | + cleanup() |
| 58 | + } |
| 59 | + // eslint-disable-next-line react-hooks/exhaustive-deps |
| 60 | + }, [stream, isRecording]) |
| 61 | + |
| 62 | + // Handle window resize |
| 63 | + useEffect(() => { |
| 64 | + const handleResize = () => { |
| 65 | + if (canvasRef.current && containerRef.current) { |
| 66 | + const container = containerRef.current |
| 67 | + const canvas = canvasRef.current |
| 68 | + const dpr = window.devicePixelRatio || 1 |
| 69 | + |
| 70 | + // Set canvas size based on container and device pixel ratio |
| 71 | + const rect = container.getBoundingClientRect() |
| 72 | + // Account for the 2px total margin (1px on each side) |
| 73 | + canvas.width = (rect.width - 2) * dpr |
| 74 | + canvas.height = (rect.height - 2) * dpr |
| 75 | + |
| 76 | + // Scale canvas CSS size to match container minus margins |
| 77 | + canvas.style.width = `${rect.width - 2}px` |
| 78 | + canvas.style.height = `${rect.height - 2}px` |
| 79 | + } |
| 80 | + } |
| 81 | + |
| 82 | + window.addEventListener("resize", handleResize) |
| 83 | + // Initial setup |
| 84 | + handleResize() |
| 85 | + |
| 86 | + return () => window.removeEventListener("resize", handleResize) |
| 87 | + }, []) |
| 88 | + |
| 89 | + // Initialize audio context and start visualization |
| 90 | + const startVisualization = async () => { |
| 91 | + try { |
| 92 | + const audioContext = new AudioContext() |
| 93 | + audioContextRef.current = audioContext |
| 94 | + |
| 95 | + const analyser = audioContext.createAnalyser() |
| 96 | + analyser.fftSize = AUDIO_CONFIG.FFT_SIZE |
| 97 | + analyser.smoothingTimeConstant = AUDIO_CONFIG.SMOOTHING |
| 98 | + analyserRef.current = analyser |
| 99 | + |
| 100 | + const source = audioContext.createMediaStreamSource(stream!) |
| 101 | + source.connect(analyser) |
| 102 | + |
| 103 | + draw() |
| 104 | + } catch (error) { |
| 105 | + console.error("Error starting visualization:", error) |
| 106 | + } |
| 107 | + } |
| 108 | + |
| 109 | + // Calculate the color intensity based on bar height |
| 110 | + const getBarColor = (normalizedHeight: number) => { |
| 111 | + const intensity = |
| 112 | + Math.floor(normalizedHeight * AUDIO_CONFIG.COLOR.INTENSITY_RANGE) + |
| 113 | + AUDIO_CONFIG.COLOR.MIN_INTENSITY |
| 114 | + return `rgb(${intensity}, ${intensity}, ${intensity})` |
| 115 | + } |
| 116 | + |
| 117 | + // Draw a single bar of the visualizer |
| 118 | + const drawBar = ( |
| 119 | + ctx: CanvasRenderingContext2D, |
| 120 | + x: number, |
| 121 | + centerY: number, |
| 122 | + width: number, |
| 123 | + height: number, |
| 124 | + color: string |
| 125 | + ) => { |
| 126 | + ctx.fillStyle = color |
| 127 | + // Draw upper bar (above center) |
| 128 | + ctx.fillRect(x, centerY - height, width, height) |
| 129 | + // Draw lower bar (below center) |
| 130 | + ctx.fillRect(x, centerY, width, height) |
| 131 | + } |
| 132 | + |
| 133 | + // Main drawing function |
| 134 | + const draw = () => { |
| 135 | + if (!isRecording) return |
| 136 | + |
| 137 | + const canvas = canvasRef.current |
| 138 | + const ctx = canvas?.getContext("2d") |
| 139 | + if (!canvas || !ctx || !analyserRef.current) return |
| 140 | + |
| 141 | + const dpr = window.devicePixelRatio || 1 |
| 142 | + ctx.scale(dpr, dpr) |
| 143 | + |
| 144 | + const analyser = analyserRef.current |
| 145 | + const bufferLength = analyser.frequencyBinCount |
| 146 | + const frequencyData = new Uint8Array(bufferLength) |
| 147 | + |
| 148 | + const drawFrame = () => { |
| 149 | + animationFrameRef.current = requestAnimationFrame(drawFrame) |
| 150 | + |
| 151 | + // Get current frequency data |
| 152 | + analyser.getByteFrequencyData(frequencyData) |
| 153 | + |
| 154 | + // Clear canvas - use CSS pixels for clearing |
| 155 | + ctx.clearRect(0, 0, canvas.width / dpr, canvas.height / dpr) |
| 156 | + |
| 157 | + // Calculate dimensions in CSS pixels |
| 158 | + const barWidth = Math.max( |
| 159 | + AUDIO_CONFIG.MIN_BAR_WIDTH, |
| 160 | + canvas.width / dpr / bufferLength - AUDIO_CONFIG.BAR_SPACING |
| 161 | + ) |
| 162 | + const centerY = canvas.height / dpr / 2 |
| 163 | + let x = 0 |
| 164 | + |
| 165 | + // Draw each frequency bar |
| 166 | + for (let i = 0; i < bufferLength; i++) { |
| 167 | + const normalizedHeight = frequencyData[i] / 255 // Convert to 0-1 range |
| 168 | + const barHeight = Math.max( |
| 169 | + AUDIO_CONFIG.MIN_BAR_HEIGHT, |
| 170 | + normalizedHeight * centerY |
| 171 | + ) |
| 172 | + |
| 173 | + drawBar( |
| 174 | + ctx, |
| 175 | + x, |
| 176 | + centerY, |
| 177 | + barWidth, |
| 178 | + barHeight, |
| 179 | + getBarColor(normalizedHeight) |
| 180 | + ) |
| 181 | + |
| 182 | + x += barWidth + AUDIO_CONFIG.BAR_SPACING |
| 183 | + } |
| 184 | + } |
| 185 | + |
| 186 | + drawFrame() |
| 187 | + } |
| 188 | + |
| 189 | + return ( |
| 190 | + <div |
| 191 | + ref={containerRef} |
| 192 | + className="h-full w-full cursor-pointer rounded-lg bg-background/80 backdrop-blur" |
| 193 | + onClick={onClick} |
| 194 | + > |
| 195 | + <canvas ref={canvasRef} className="h-full w-full" /> |
| 196 | + </div> |
| 197 | + ) |
| 198 | +} |
0 commit comments