Implemented three major medium effort features to enhance the audio editor: **1. Region Markers System** - Add marker type definitions supporting point markers and regions - Create useMarkers hook for marker state management - Build MarkerTimeline component for visual marker display - Create MarkerDialog component for adding/editing markers - Add keyboard shortcuts: M (add marker), Shift+M (next), Shift+Ctrl+M (previous) - Support marker navigation, editing, and deletion **2. Web Worker for Computations** - Create audio worker for offloading heavy computations - Implement worker functions: generatePeaks, generateMinMaxPeaks, normalizePeaks, analyzeAudio, findPeak - Build useAudioWorker hook for easy worker integration - Integrate worker into Waveform component with peak caching - Significantly improve UI responsiveness during waveform generation **3. Bezier Curve Automation** - Enhance interpolateAutomationValue to support Bezier curves - Implement cubic Bezier interpolation with control handles - Add createSmoothHandles for auto-smooth curve generation - Add generateBezierCurvePoints for smooth curve rendering - Support bezier alongside existing linear and step curves All features are type-safe and integrate seamlessly with the existing codebase. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
335 lines
9.9 KiB
TypeScript
335 lines
9.9 KiB
TypeScript
'use client';
|
|
|
|
import * as React from 'react';
|
|
import { cn } from '@/lib/utils/cn';
|
|
import { useAudioWorker } from '@/lib/hooks/useAudioWorker';
|
|
import type { Selection } from '@/types/selection';
|
|
|
|
export interface WaveformProps {
|
|
audioBuffer: AudioBuffer | null;
|
|
currentTime: number;
|
|
duration: number;
|
|
onSeek?: (time: number, autoPlay?: boolean) => void;
|
|
className?: string;
|
|
height?: number;
|
|
zoom?: number;
|
|
scrollOffset?: number;
|
|
amplitudeScale?: number;
|
|
selection?: Selection | null;
|
|
onSelectionChange?: (selection: Selection | null) => void;
|
|
}
|
|
|
|
export function Waveform({
|
|
audioBuffer,
|
|
currentTime,
|
|
duration,
|
|
onSeek,
|
|
className,
|
|
height = 128,
|
|
zoom = 1,
|
|
scrollOffset = 0,
|
|
amplitudeScale = 1,
|
|
selection = null,
|
|
onSelectionChange,
|
|
}: WaveformProps) {
|
|
const canvasRef = React.useRef<HTMLCanvasElement>(null);
|
|
const containerRef = React.useRef<HTMLDivElement>(null);
|
|
const [width, setWidth] = React.useState(800);
|
|
const [isDragging, setIsDragging] = React.useState(false);
|
|
const [isSelecting, setIsSelecting] = React.useState(false);
|
|
const [selectionStart, setSelectionStart] = React.useState<number | null>(null);
|
|
|
|
// Worker for peak generation
|
|
const worker = useAudioWorker();
|
|
|
|
// Cache peaks to avoid regenerating on every render
|
|
const [peaksCache, setPeaksCache] = React.useState<{
|
|
width: number;
|
|
min: Float32Array;
|
|
max: Float32Array;
|
|
} | null>(null);
|
|
|
|
// Handle resize
|
|
React.useEffect(() => {
|
|
const handleResize = () => {
|
|
if (containerRef.current) {
|
|
setWidth(containerRef.current.clientWidth);
|
|
}
|
|
};
|
|
|
|
handleResize();
|
|
window.addEventListener('resize', handleResize);
|
|
return () => window.removeEventListener('resize', handleResize);
|
|
}, []);
|
|
|
|
// Generate peaks in worker when audioBuffer or zoom changes
|
|
React.useEffect(() => {
|
|
if (!audioBuffer) {
|
|
setPeaksCache(null);
|
|
return;
|
|
}
|
|
|
|
const visibleWidth = Math.floor(width * zoom);
|
|
|
|
// Check if we already have peaks for this width
|
|
if (peaksCache && peaksCache.width === visibleWidth) {
|
|
return;
|
|
}
|
|
|
|
// Generate peaks in worker
|
|
const channelData = audioBuffer.getChannelData(0);
|
|
worker.generateMinMaxPeaks(channelData, visibleWidth).then((peaks) => {
|
|
setPeaksCache({
|
|
width: visibleWidth,
|
|
min: peaks.min,
|
|
max: peaks.max,
|
|
});
|
|
});
|
|
}, [audioBuffer, width, zoom, worker, peaksCache]);
|
|
|
|
// Draw waveform
|
|
React.useEffect(() => {
|
|
const canvas = canvasRef.current;
|
|
if (!canvas || !audioBuffer || !peaksCache) return;
|
|
|
|
const ctx = canvas.getContext('2d');
|
|
if (!ctx) return;
|
|
|
|
// Set canvas size
|
|
const dpr = window.devicePixelRatio || 1;
|
|
canvas.width = width * dpr;
|
|
canvas.height = height * dpr;
|
|
canvas.style.width = `${width}px`;
|
|
canvas.style.height = `${height}px`;
|
|
ctx.scale(dpr, dpr);
|
|
|
|
// Clear canvas
|
|
ctx.fillStyle = getComputedStyle(canvas).getPropertyValue('--color-waveform-bg') || '#f5f5f5';
|
|
ctx.fillRect(0, 0, width, height);
|
|
|
|
// Calculate visible width based on zoom
|
|
const visibleWidth = Math.floor(width * zoom);
|
|
|
|
// Use cached peaks
|
|
const { min, max } = peaksCache;
|
|
|
|
// Draw waveform
|
|
const middle = height / 2;
|
|
const baseScale = (height / 2) * amplitudeScale;
|
|
|
|
// Waveform color
|
|
const waveformColor = getComputedStyle(canvas).getPropertyValue('--color-waveform') || '#3b82f6';
|
|
const progressColor = getComputedStyle(canvas).getPropertyValue('--color-waveform-progress') || '#10b981';
|
|
|
|
// Calculate progress position
|
|
const progressX = duration > 0 ? ((currentTime / duration) * visibleWidth) - scrollOffset : 0;
|
|
|
|
// Draw grid lines (every 1 second)
|
|
ctx.strokeStyle = 'rgba(128, 128, 128, 0.2)';
|
|
ctx.lineWidth = 1;
|
|
const secondsPerPixel = duration / visibleWidth;
|
|
const pixelsPerSecond = visibleWidth / duration;
|
|
|
|
for (let sec = 0; sec < duration; sec++) {
|
|
const x = (sec * pixelsPerSecond) - scrollOffset;
|
|
if (x >= 0 && x <= width) {
|
|
ctx.beginPath();
|
|
ctx.moveTo(x, 0);
|
|
ctx.lineTo(x, height);
|
|
ctx.stroke();
|
|
}
|
|
}
|
|
|
|
// Draw waveform with scroll offset
|
|
const startIdx = Math.max(0, Math.floor(scrollOffset));
|
|
const endIdx = Math.min(visibleWidth, Math.floor(scrollOffset + width));
|
|
|
|
for (let i = startIdx; i < endIdx; i++) {
|
|
const x = i - scrollOffset;
|
|
if (x < 0 || x >= width) continue;
|
|
|
|
const minVal = min[i] * baseScale;
|
|
const maxVal = max[i] * baseScale;
|
|
|
|
// Use different color for played portion
|
|
ctx.fillStyle = x < progressX ? progressColor : waveformColor;
|
|
|
|
ctx.fillRect(
|
|
x,
|
|
middle + minVal,
|
|
1,
|
|
Math.max(1, maxVal - minVal)
|
|
);
|
|
}
|
|
|
|
// Draw center line
|
|
ctx.strokeStyle = 'rgba(0, 0, 0, 0.1)';
|
|
ctx.lineWidth = 1;
|
|
ctx.beginPath();
|
|
ctx.moveTo(0, middle);
|
|
ctx.lineTo(width, middle);
|
|
ctx.stroke();
|
|
|
|
// Draw selection
|
|
if (selection) {
|
|
const selectionStartX = ((selection.start / duration) * visibleWidth) - scrollOffset;
|
|
const selectionEndX = ((selection.end / duration) * visibleWidth) - scrollOffset;
|
|
|
|
if (selectionEndX >= 0 && selectionStartX <= width) {
|
|
const clampedStart = Math.max(0, selectionStartX);
|
|
const clampedEnd = Math.min(width, selectionEndX);
|
|
|
|
ctx.fillStyle = 'rgba(59, 130, 246, 0.3)';
|
|
ctx.fillRect(clampedStart, 0, clampedEnd - clampedStart, height);
|
|
|
|
// Selection borders
|
|
ctx.strokeStyle = '#3b82f6';
|
|
ctx.lineWidth = 2;
|
|
|
|
if (selectionStartX >= 0 && selectionStartX <= width) {
|
|
ctx.beginPath();
|
|
ctx.moveTo(selectionStartX, 0);
|
|
ctx.lineTo(selectionStartX, height);
|
|
ctx.stroke();
|
|
}
|
|
|
|
if (selectionEndX >= 0 && selectionEndX <= width) {
|
|
ctx.beginPath();
|
|
ctx.moveTo(selectionEndX, 0);
|
|
ctx.lineTo(selectionEndX, height);
|
|
ctx.stroke();
|
|
}
|
|
}
|
|
}
|
|
|
|
// Draw playhead
|
|
if (progressX >= 0 && progressX <= width) {
|
|
ctx.strokeStyle = '#ef4444';
|
|
ctx.lineWidth = 2;
|
|
ctx.beginPath();
|
|
ctx.moveTo(progressX, 0);
|
|
ctx.lineTo(progressX, height);
|
|
ctx.stroke();
|
|
}
|
|
}, [audioBuffer, width, height, currentTime, duration, zoom, scrollOffset, amplitudeScale, selection, peaksCache]);
|
|
|
|
const handleClick = (e: React.MouseEvent<HTMLCanvasElement>) => {
|
|
if (!onSeek || !duration || isDragging) return;
|
|
|
|
const canvas = canvasRef.current;
|
|
if (!canvas) return;
|
|
|
|
const rect = canvas.getBoundingClientRect();
|
|
const x = e.clientX - rect.left;
|
|
|
|
// Account for zoom and scroll
|
|
const visibleWidth = width * zoom;
|
|
const actualX = x + scrollOffset;
|
|
const clickedTime = (actualX / visibleWidth) * duration;
|
|
|
|
onSeek(clickedTime);
|
|
};
|
|
|
|
const handleMouseDown = (e: React.MouseEvent<HTMLCanvasElement>) => {
|
|
if (!duration) return;
|
|
|
|
const canvas = canvasRef.current;
|
|
if (!canvas) return;
|
|
|
|
const rect = canvas.getBoundingClientRect();
|
|
const x = e.clientX - rect.left;
|
|
const visibleWidth = width * zoom;
|
|
const actualX = x + scrollOffset;
|
|
const clickedTime = (actualX / visibleWidth) * duration;
|
|
|
|
// Start selection on drag
|
|
setIsSelecting(true);
|
|
setSelectionStart(clickedTime);
|
|
if (onSelectionChange) {
|
|
onSelectionChange({ start: clickedTime, end: clickedTime });
|
|
}
|
|
};
|
|
|
|
const handleMouseMove = (e: React.MouseEvent<HTMLCanvasElement>) => {
|
|
if (!duration) return;
|
|
|
|
const canvas = canvasRef.current;
|
|
if (!canvas) return;
|
|
|
|
const rect = canvas.getBoundingClientRect();
|
|
const x = e.clientX - rect.left;
|
|
const visibleWidth = width * zoom;
|
|
const actualX = x + scrollOffset;
|
|
const currentTime = (actualX / visibleWidth) * duration;
|
|
const clampedTime = Math.max(0, Math.min(duration, currentTime));
|
|
|
|
// Handle selection dragging
|
|
if (isSelecting && onSelectionChange && selectionStart !== null) {
|
|
setIsDragging(true); // Mark that we're dragging
|
|
const start = Math.min(selectionStart, clampedTime);
|
|
const end = Math.max(selectionStart, clampedTime);
|
|
onSelectionChange({ start, end });
|
|
}
|
|
};
|
|
|
|
const handleMouseUp = (e: React.MouseEvent<HTMLCanvasElement>) => {
|
|
// If we didn't drag (just clicked), seek to that position and clear selection
|
|
if (!isDragging && onSeek) {
|
|
const canvas = canvasRef.current;
|
|
if (canvas) {
|
|
const rect = canvas.getBoundingClientRect();
|
|
const x = e.clientX - rect.left;
|
|
const visibleWidth = width * zoom;
|
|
const actualX = x + scrollOffset;
|
|
const clickTime = (actualX / visibleWidth) * duration;
|
|
const clampedTime = Math.max(0, Math.min(duration, clickTime));
|
|
// Seek and auto-play
|
|
onSeek(clampedTime, true);
|
|
// Clear selection on click
|
|
if (onSelectionChange) {
|
|
onSelectionChange(null);
|
|
}
|
|
}
|
|
}
|
|
// If we dragged, the selection is already set via handleMouseMove
|
|
|
|
setIsDragging(false);
|
|
setIsSelecting(false);
|
|
setSelectionStart(null);
|
|
};
|
|
|
|
const handleMouseLeave = () => {
|
|
setIsDragging(false);
|
|
setIsSelecting(false);
|
|
setSelectionStart(null);
|
|
};
|
|
|
|
return (
|
|
<div ref={containerRef} className={cn('w-full', className)}>
|
|
{audioBuffer ? (
|
|
<canvas
|
|
ref={canvasRef}
|
|
onMouseDown={handleMouseDown}
|
|
onMouseMove={handleMouseMove}
|
|
onMouseUp={handleMouseUp}
|
|
onMouseLeave={handleMouseLeave}
|
|
className={cn(
|
|
'w-full rounded-lg border border-border',
|
|
isDragging ? 'cursor-grabbing' : isSelecting ? 'cursor-text' : 'cursor-pointer'
|
|
)}
|
|
style={{ height: `${height}px` }}
|
|
/>
|
|
) : (
|
|
<div
|
|
className="flex items-center justify-center rounded-lg border-2 border-dashed border-border bg-muted/30"
|
|
style={{ height: `${height}px` }}
|
|
>
|
|
<p className="text-sm text-muted-foreground">
|
|
Load an audio file to see waveform
|
|
</p>
|
|
</div>
|
|
)}
|
|
</div>
|
|
);
|
|
}
|