Files
audio-ui/components/editor/Waveform.tsx
Sebastian Krüger ebfb4d3fff feat: implement Phase 2 - Web Audio API engine and waveform visualization
Phase 2 Complete Features:
- Web Audio API context management with browser compatibility
- Audio file upload with drag-and-drop support
- Audio decoding for multiple formats (WAV, MP3, OGG, FLAC, AAC, M4A)
- AudioPlayer class with full playback control
- Waveform visualization using Canvas API
- Real-time waveform rendering with progress indicator
- Playback controls (play, pause, stop, seek)
- Volume control with mute/unmute
- Timeline scrubbing
- Audio file information display

Components:
- AudioEditor: Main editor container
- FileUpload: Drag-and-drop file upload component
- AudioInfo: Display audio file metadata
- Waveform: Canvas-based waveform visualization
- PlaybackControls: Transport controls with volume slider

Audio Engine:
- lib/audio/context.ts: AudioContext management
- lib/audio/decoder.ts: Audio file decoding utilities
- lib/audio/player.ts: AudioPlayer class for playback
- lib/waveform/peaks.ts: Waveform peak generation

Hooks:
- useAudioPlayer: Complete audio player state management

Types:
- types/audio.ts: TypeScript definitions for audio types

Features Working:
✓ Load audio files via drag-and-drop or file picker
✓ Display waveform with real-time progress
✓ Play/pause/stop controls
✓ Seek by clicking on waveform or using timeline slider
✓ Volume control with visual feedback
✓ Audio file metadata display (duration, sample rate, channels)
✓ Toast notifications for user feedback
✓ SSR-safe audio context initialization
✓ Dark/light theme support

Tech Stack:
- Web Audio API for playback
- Canvas API for waveform rendering
- React 19 hooks for state management
- TypeScript for type safety

Build verified and working ✓

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-17 15:32:00 +01:00

145 lines
3.8 KiB
TypeScript

'use client';
import * as React from 'react';
import { cn } from '@/lib/utils/cn';
import { generateMinMaxPeaks } from '@/lib/waveform/peaks';
export interface WaveformProps {
audioBuffer: AudioBuffer | null;
currentTime: number;
duration: number;
onSeek?: (time: number) => void;
className?: string;
height?: number;
}
export function Waveform({
audioBuffer,
currentTime,
duration,
onSeek,
className,
height = 128,
}: WaveformProps) {
const canvasRef = React.useRef<HTMLCanvasElement>(null);
const containerRef = React.useRef<HTMLDivElement>(null);
const [width, setWidth] = React.useState(800);
// Handle resize
React.useEffect(() => {
const handleResize = () => {
if (containerRef.current) {
setWidth(containerRef.current.clientWidth);
}
};
handleResize();
window.addEventListener('resize', handleResize);
return () => window.removeEventListener('resize', handleResize);
}, []);
// Draw waveform
React.useEffect(() => {
const canvas = canvasRef.current;
if (!canvas || !audioBuffer) return;
const ctx = canvas.getContext('2d');
if (!ctx) return;
// Set canvas size
const dpr = window.devicePixelRatio || 1;
canvas.width = width * dpr;
canvas.height = height * dpr;
canvas.style.width = `${width}px`;
canvas.style.height = `${height}px`;
ctx.scale(dpr, dpr);
// Clear canvas
ctx.fillStyle = getComputedStyle(canvas).getPropertyValue('--color-waveform-bg') || '#f5f5f5';
ctx.fillRect(0, 0, width, height);
// Generate peaks
const { min, max } = generateMinMaxPeaks(audioBuffer, width, 0);
// Draw waveform
const middle = height / 2;
const scale = height / 2;
// Waveform color
const waveformColor = getComputedStyle(canvas).getPropertyValue('--color-waveform') || '#3b82f6';
const progressColor = getComputedStyle(canvas).getPropertyValue('--color-waveform-progress') || '#10b981';
// Calculate progress position
const progressX = duration > 0 ? (currentTime / duration) * width : 0;
// Draw waveform
for (let i = 0; i < width; i++) {
const minVal = min[i] * scale;
const maxVal = max[i] * scale;
// Use different color for played portion
ctx.fillStyle = i < progressX ? progressColor : waveformColor;
ctx.fillRect(
i,
middle + minVal,
1,
Math.max(1, maxVal - minVal)
);
}
// Draw center line
ctx.strokeStyle = 'rgba(0, 0, 0, 0.1)';
ctx.lineWidth = 1;
ctx.beginPath();
ctx.moveTo(0, middle);
ctx.lineTo(width, middle);
ctx.stroke();
// Draw playhead
if (progressX > 0) {
ctx.strokeStyle = '#ef4444';
ctx.lineWidth = 2;
ctx.beginPath();
ctx.moveTo(progressX, 0);
ctx.lineTo(progressX, height);
ctx.stroke();
}
}, [audioBuffer, width, height, currentTime, duration]);
const handleClick = (e: React.MouseEvent<HTMLCanvasElement>) => {
if (!onSeek || !duration) return;
const canvas = canvasRef.current;
if (!canvas) return;
const rect = canvas.getBoundingClientRect();
const x = e.clientX - rect.left;
const clickedTime = (x / width) * duration;
onSeek(clickedTime);
};
return (
<div ref={containerRef} className={cn('w-full', className)}>
{audioBuffer ? (
<canvas
ref={canvasRef}
onClick={handleClick}
className="w-full cursor-pointer rounded-lg border border-border"
style={{ height: `${height}px` }}
/>
) : (
<div
className="flex items-center justify-center rounded-lg border-2 border-dashed border-border bg-muted/30"
style={{ height: `${height}px` }}
>
<p className="text-sm text-muted-foreground">
Load an audio file to see waveform
</p>
</div>
)}
</div>
);
}