'use client'; import * as React from 'react'; import type { Track as TrackType } from '@/types/track'; import { TrackHeader } from './TrackHeader'; import { cn } from '@/lib/utils/cn'; export interface TrackProps { track: TrackType; zoom: number; currentTime: number; duration: number; onToggleMute: () => void; onToggleSolo: () => void; onToggleCollapse: () => void; onVolumeChange: (volume: number) => void; onPanChange: (pan: number) => void; onRemove: () => void; onNameChange: (name: string) => void; onSeek?: (time: number) => void; } export function Track({ track, zoom, currentTime, duration, onToggleMute, onToggleSolo, onToggleCollapse, onVolumeChange, onPanChange, onRemove, onNameChange, onSeek, }: TrackProps) { const canvasRef = React.useRef(null); const containerRef = React.useRef(null); // Draw waveform React.useEffect(() => { if (!track.audioBuffer || !canvasRef.current || track.collapsed) return; const canvas = canvasRef.current; const ctx = canvas.getContext('2d'); if (!ctx) return; const dpr = window.devicePixelRatio || 1; const rect = canvas.getBoundingClientRect(); canvas.width = rect.width * dpr; canvas.height = rect.height * dpr; ctx.scale(dpr, dpr); const width = rect.width; const height = rect.height; // Clear canvas ctx.fillStyle = 'rgb(15, 23, 42)'; ctx.fillRect(0, 0, width, height); const buffer = track.audioBuffer; const channelData = buffer.getChannelData(0); const samplesPerPixel = Math.floor(buffer.length / (width * zoom)); // Draw waveform ctx.fillStyle = track.color; ctx.strokeStyle = track.color; ctx.lineWidth = 1; for (let x = 0; x < width; x++) { const startSample = Math.floor(x * samplesPerPixel); const endSample = Math.floor((x + 1) * samplesPerPixel); let min = 1.0; let max = -1.0; for (let i = startSample; i < endSample && i < channelData.length; i++) { const sample = channelData[i]; if (sample < min) min = sample; if (sample > max) max = sample; } const y1 = (height / 2) * (1 - max); const y2 = (height / 2) * (1 - min); ctx.beginPath(); ctx.moveTo(x, y1); ctx.lineTo(x, y2); ctx.stroke(); } // Draw center line ctx.strokeStyle = 'rgba(148, 163, 184, 0.2)'; ctx.lineWidth = 1; ctx.beginPath(); ctx.moveTo(0, height / 2); ctx.lineTo(width, height / 2); ctx.stroke(); // Draw playhead if (duration > 0) { const playheadX = (currentTime / duration) * width; ctx.strokeStyle = 'rgba(59, 130, 246, 0.8)'; ctx.lineWidth = 2; ctx.beginPath(); ctx.moveTo(playheadX, 0); ctx.lineTo(playheadX, height); ctx.stroke(); } }, [track.audioBuffer, track.color, track.collapsed, zoom, currentTime, duration]); const handleCanvasClick = (e: React.MouseEvent) => { if (!onSeek || !duration) return; const rect = e.currentTarget.getBoundingClientRect(); const x = e.clientX - rect.left; const clickTime = (x / rect.width) * duration; onSeek(clickTime); }; if (track.collapsed) { return (
); } return (
{track.audioBuffer ? ( ) : (
No audio loaded
)}
); }