Files
audio-ui/components/tracks/Track.tsx

173 lines
4.5 KiB
TypeScript
Raw Normal View History

feat: implement Phase 7.1-7.2 multi-track infrastructure Added core multi-track support with track management and controls: **Track Types & Utilities:** - Track interface with audio buffer, controls (volume/pan/solo/mute) - Track utility functions for creation, mixing, and gain calculation - Track color system with 9 preset colors - Configurable track heights (60-300px) **Components:** - TrackHeader: Collapsible track controls with inline name editing - Solo/Mute buttons with visual feedback - Volume slider (0-100%) and Pan control (L-C-R) - Track color indicator and remove button - Track: Waveform display component with canvas rendering - Click-to-seek on waveform - Playhead visualization - Support for collapsed state - TrackList: Container managing multiple tracks - Scrollable track list with custom scrollbar - Add track button - Empty state UI **State Management:** - useMultiTrack hook with localStorage persistence - Add/remove/update/reorder track operations - Track buffer management Features implemented: - ✅ Track creation and removal - ✅ Track naming (editable) - ✅ Track colors - ✅ Solo/Mute per track - ✅ Volume fader per track (0-100%) - ✅ Pan control per track (L-C-R) - ✅ Track collapse/expand - ✅ Track height configuration - ✅ Waveform visualization per track - ✅ Multi-track audio mixing utilities Next: Integrate into AudioEditor and implement multi-track playback 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-17 20:59:36 +01:00
'use client';
import * as React from 'react';
import type { Track as TrackType } from '@/types/track';
import { TrackHeader } from './TrackHeader';
import { cn } from '@/lib/utils/cn';
export interface TrackProps {
track: TrackType;
zoom: number;
currentTime: number;
duration: number;
onToggleMute: () => void;
onToggleSolo: () => void;
onToggleCollapse: () => void;
onVolumeChange: (volume: number) => void;
onPanChange: (pan: number) => void;
onRemove: () => void;
onNameChange: (name: string) => void;
onSeek?: (time: number) => void;
}
export function Track({
track,
zoom,
currentTime,
duration,
onToggleMute,
onToggleSolo,
onToggleCollapse,
onVolumeChange,
onPanChange,
onRemove,
onNameChange,
onSeek,
}: TrackProps) {
const canvasRef = React.useRef<HTMLCanvasElement>(null);
const containerRef = React.useRef<HTMLDivElement>(null);
// Draw waveform
React.useEffect(() => {
if (!track.audioBuffer || !canvasRef.current || track.collapsed) return;
const canvas = canvasRef.current;
const ctx = canvas.getContext('2d');
if (!ctx) return;
const dpr = window.devicePixelRatio || 1;
const rect = canvas.getBoundingClientRect();
canvas.width = rect.width * dpr;
canvas.height = rect.height * dpr;
ctx.scale(dpr, dpr);
const width = rect.width;
const height = rect.height;
// Clear canvas
ctx.fillStyle = 'rgb(15, 23, 42)';
ctx.fillRect(0, 0, width, height);
const buffer = track.audioBuffer;
const channelData = buffer.getChannelData(0);
const samplesPerPixel = Math.floor(buffer.length / (width * zoom));
// Draw waveform
ctx.fillStyle = track.color;
ctx.strokeStyle = track.color;
ctx.lineWidth = 1;
for (let x = 0; x < width; x++) {
const startSample = Math.floor(x * samplesPerPixel);
const endSample = Math.floor((x + 1) * samplesPerPixel);
let min = 1.0;
let max = -1.0;
for (let i = startSample; i < endSample && i < channelData.length; i++) {
const sample = channelData[i];
if (sample < min) min = sample;
if (sample > max) max = sample;
}
const y1 = (height / 2) * (1 - max);
const y2 = (height / 2) * (1 - min);
ctx.beginPath();
ctx.moveTo(x, y1);
ctx.lineTo(x, y2);
ctx.stroke();
}
// Draw center line
ctx.strokeStyle = 'rgba(148, 163, 184, 0.2)';
ctx.lineWidth = 1;
ctx.beginPath();
ctx.moveTo(0, height / 2);
ctx.lineTo(width, height / 2);
ctx.stroke();
// Draw playhead
if (duration > 0) {
const playheadX = (currentTime / duration) * width;
ctx.strokeStyle = 'rgba(59, 130, 246, 0.8)';
ctx.lineWidth = 2;
ctx.beginPath();
ctx.moveTo(playheadX, 0);
ctx.lineTo(playheadX, height);
ctx.stroke();
}
}, [track.audioBuffer, track.color, track.collapsed, zoom, currentTime, duration]);
const handleCanvasClick = (e: React.MouseEvent<HTMLCanvasElement>) => {
if (!onSeek || !duration) return;
const rect = e.currentTarget.getBoundingClientRect();
const x = e.clientX - rect.left;
const clickTime = (x / rect.width) * duration;
onSeek(clickTime);
};
if (track.collapsed) {
return (
<div className="border-b border-border">
<TrackHeader
track={track}
onToggleMute={onToggleMute}
onToggleSolo={onToggleSolo}
onToggleCollapse={onToggleCollapse}
onVolumeChange={onVolumeChange}
onPanChange={onPanChange}
onRemove={onRemove}
onNameChange={onNameChange}
/>
</div>
);
}
return (
<div
ref={containerRef}
className={cn(
'border-b border-border',
track.selected && 'ring-2 ring-primary ring-inset'
)}
>
<TrackHeader
track={track}
onToggleMute={onToggleMute}
onToggleSolo={onToggleSolo}
onToggleCollapse={onToggleCollapse}
onVolumeChange={onVolumeChange}
onPanChange={onPanChange}
onRemove={onRemove}
onNameChange={onNameChange}
/>
<div className="relative" style={{ height: track.height }}>
{track.audioBuffer ? (
<canvas
ref={canvasRef}
className="w-full h-full cursor-pointer"
onClick={handleCanvasClick}
/>
) : (
<div className="flex items-center justify-center h-full text-sm text-muted-foreground">
No audio loaded
</div>
)}
</div>
</div>
);
}