'use client'; import * as React from 'react'; import { Volume2, VolumeX, Headphones, Trash2, ChevronDown, ChevronRight, UnfoldHorizontal, Upload, Plus, Mic, Gauge } from 'lucide-react'; import type { Track as TrackType } from '@/types/track'; import { Button } from '@/components/ui/Button'; import { Slider } from '@/components/ui/Slider'; import { cn } from '@/lib/utils/cn'; import { EffectBrowser } from '@/components/effects/EffectBrowser'; import { EffectDevice } from '@/components/effects/EffectDevice'; import { createEffect, type EffectType } from '@/lib/audio/effects/chain'; import { InputLevelMeter } from '@/components/recording/InputLevelMeter'; import { RecordingSettings } from '@/components/recording/RecordingSettings'; import type { RecordingSettings as RecordingSettingsType } from '@/lib/hooks/useRecording'; export interface TrackProps { track: TrackType; zoom: number; currentTime: number; duration: number; isSelected?: boolean; onSelect?: () => void; onToggleMute: () => void; onToggleSolo: () => void; onToggleCollapse: () => void; onVolumeChange: (volume: number) => void; onPanChange: (pan: number) => void; onRemove: () => void; onNameChange: (name: string) => void; onSeek?: (time: number) => void; onLoadAudio?: (buffer: AudioBuffer) => void; onToggleEffect?: (effectId: string) => void; onRemoveEffect?: (effectId: string) => void; onUpdateEffect?: (effectId: string, parameters: any) => void; onAddEffect?: (effectType: EffectType) => void; onSelectionChange?: (selection: { start: number; end: number } | null) => void; onToggleRecordEnable?: () => void; isRecording?: boolean; recordingLevel?: number; playbackLevel?: number; recordingSettings?: RecordingSettingsType; onInputGainChange?: (gain: number) => void; onRecordMonoChange?: (mono: boolean) => void; onSampleRateChange?: (sampleRate: number) => void; } export function Track({ track, zoom, currentTime, duration, isSelected, onSelect, onToggleMute, onToggleSolo, onToggleCollapse, onVolumeChange, onPanChange, onRemove, onNameChange, onSeek, onLoadAudio, onToggleEffect, onRemoveEffect, onUpdateEffect, onAddEffect, onSelectionChange, onToggleRecordEnable, isRecording = false, recordingLevel = 0, playbackLevel = 0, recordingSettings, onInputGainChange, onRecordMonoChange, onSampleRateChange, }: TrackProps) { const canvasRef = React.useRef(null); const containerRef = React.useRef(null); const fileInputRef = React.useRef(null); const [isEditingName, setIsEditingName] = React.useState(false); const [nameInput, setNameInput] = React.useState(String(track.name || 'Untitled Track')); const [effectBrowserOpen, setEffectBrowserOpen] = React.useState(false); const [showEffects, setShowEffects] = React.useState(false); const [themeKey, setThemeKey] = React.useState(0); const inputRef = React.useRef(null); // Selection state const [isSelecting, setIsSelecting] = React.useState(false); const [selectionStart, setSelectionStart] = React.useState(null); const [isSelectingByDrag, setIsSelectingByDrag] = React.useState(false); const [dragStartPos, setDragStartPos] = React.useState<{ x: number; y: number } | null>(null); const handleNameClick = () => { setIsEditingName(true); setNameInput(String(track.name || 'Untitled Track')); }; const handleNameBlur = () => { setIsEditingName(false); if (nameInput.trim()) { onNameChange(nameInput.trim()); } else { setNameInput(String(track.name || 'Untitled Track')); } }; const handleNameKeyDown = (e: React.KeyboardEvent) => { if (e.key === 'Enter') { inputRef.current?.blur(); } else if (e.key === 'Escape') { setNameInput(String(track.name || 'Untitled Track')); setIsEditingName(false); } }; React.useEffect(() => { if (isEditingName && inputRef.current) { inputRef.current.focus(); inputRef.current.select(); } }, [isEditingName]); // Listen for theme changes React.useEffect(() => { const observer = new MutationObserver(() => { // Increment key to force waveform redraw setThemeKey((prev) => prev + 1); }); // Watch for class changes on document element (dark mode toggle) observer.observe(document.documentElement, { attributes: true, attributeFilter: ['class'], }); return () => observer.disconnect(); }, []); // Draw waveform React.useEffect(() => { if (!track.audioBuffer || !canvasRef.current) return; const canvas = canvasRef.current; const ctx = canvas.getContext('2d'); if (!ctx) return; // Use parent container's size since canvas is absolute positioned const parent = canvas.parentElement; if (!parent) return; const dpr = window.devicePixelRatio || 1; const rect = parent.getBoundingClientRect(); canvas.width = rect.width * dpr; canvas.height = rect.height * dpr; ctx.scale(dpr, dpr); const width = rect.width; const height = rect.height; // Clear canvas with theme color const bgColor = getComputedStyle(canvas).getPropertyValue('--color-waveform-bg') || 'rgb(15, 23, 42)'; ctx.fillStyle = bgColor; ctx.fillRect(0, 0, width, height); const buffer = track.audioBuffer; const channelData = buffer.getChannelData(0); const samplesPerPixel = Math.floor(buffer.length / (width * zoom)); // Draw waveform ctx.fillStyle = track.color; ctx.strokeStyle = track.color; ctx.lineWidth = 1; for (let x = 0; x < width; x++) { const startSample = Math.floor(x * samplesPerPixel); const endSample = Math.floor((x + 1) * samplesPerPixel); let min = 1.0; let max = -1.0; for (let i = startSample; i < endSample && i < channelData.length; i++) { const sample = channelData[i]; if (sample < min) min = sample; if (sample > max) max = sample; } const y1 = (height / 2) * (1 - max); const y2 = (height / 2) * (1 - min); ctx.beginPath(); ctx.moveTo(x, y1); ctx.lineTo(x, y2); ctx.stroke(); } // Draw center line ctx.strokeStyle = 'rgba(148, 163, 184, 0.2)'; ctx.lineWidth = 1; ctx.beginPath(); ctx.moveTo(0, height / 2); ctx.lineTo(width, height / 2); ctx.stroke(); // Draw selection overlay if (track.selection && duration > 0) { const selStartX = (track.selection.start / duration) * width; const selEndX = (track.selection.end / duration) * width; // Draw selection background ctx.fillStyle = 'rgba(59, 130, 246, 0.2)'; ctx.fillRect(selStartX, 0, selEndX - selStartX, height); // Draw selection borders ctx.strokeStyle = 'rgba(59, 130, 246, 0.8)'; ctx.lineWidth = 2; // Start border ctx.beginPath(); ctx.moveTo(selStartX, 0); ctx.lineTo(selStartX, height); ctx.stroke(); // End border ctx.beginPath(); ctx.moveTo(selEndX, 0); ctx.lineTo(selEndX, height); ctx.stroke(); } // Draw playhead if (duration > 0) { const playheadX = (currentTime / duration) * width; ctx.strokeStyle = 'rgba(239, 68, 68, 0.8)'; ctx.lineWidth = 2; ctx.beginPath(); ctx.moveTo(playheadX, 0); ctx.lineTo(playheadX, height); ctx.stroke(); } }, [track.audioBuffer, track.color, track.collapsed, track.height, zoom, currentTime, duration, themeKey, track.selection]); const handleCanvasMouseDown = (e: React.MouseEvent) => { if (!duration) return; const rect = e.currentTarget.getBoundingClientRect(); const x = e.clientX - rect.left; const y = e.clientY - rect.top; const clickTime = (x / rect.width) * duration; // Store drag start position setDragStartPos({ x: e.clientX, y: e.clientY }); setIsSelectingByDrag(false); // Start selection immediately (will be used if user drags) setIsSelecting(true); setSelectionStart(clickTime); }; const handleCanvasMouseMove = (e: React.MouseEvent) => { if (!isSelecting || selectionStart === null || !duration || !dragStartPos) return; const rect = e.currentTarget.getBoundingClientRect(); const x = e.clientX - rect.left; const currentTime = (x / rect.width) * duration; // Check if user has moved enough to be considered dragging (threshold: 3 pixels) const dragDistance = Math.sqrt( Math.pow(e.clientX - dragStartPos.x, 2) + Math.pow(e.clientY - dragStartPos.y, 2) ); if (dragDistance > 3) { setIsSelectingByDrag(true); } // If dragging, update selection if (isSelectingByDrag || dragDistance > 3) { // Clamp to valid time range const clampedTime = Math.max(0, Math.min(duration, currentTime)); // Update selection (ensure start < end) const start = Math.min(selectionStart, clampedTime); const end = Math.max(selectionStart, clampedTime); onSelectionChange?.({ start, end }); } }; const handleCanvasMouseUp = (e: React.MouseEvent) => { if (!duration) return; const rect = e.currentTarget.getBoundingClientRect(); const x = e.clientX - rect.left; const clickTime = (x / rect.width) * duration; // Check if user actually dragged (check distance directly, not state) const didDrag = dragStartPos ? Math.sqrt( Math.pow(e.clientX - dragStartPos.x, 2) + Math.pow(e.clientY - dragStartPos.y, 2) ) > 3 : false; // If user didn't drag (just clicked), clear selection and seek if (!didDrag) { onSelectionChange?.(null); if (onSeek) { onSeek(clickTime); } } // Reset drag state setIsSelecting(false); setIsSelectingByDrag(false); setDragStartPos(null); }; // Handle mouse leaving canvas during selection React.useEffect(() => { const handleGlobalMouseUp = () => { if (isSelecting) { setIsSelecting(false); setIsSelectingByDrag(false); setDragStartPos(null); } }; window.addEventListener('mouseup', handleGlobalMouseUp); return () => window.removeEventListener('mouseup', handleGlobalMouseUp); }, [isSelecting]); const handleFileChange = async (e: React.ChangeEvent) => { const file = e.target.files?.[0]; if (!file || !onLoadAudio) return; try { const arrayBuffer = await file.arrayBuffer(); const audioContext = new AudioContext(); const audioBuffer = await audioContext.decodeAudioData(arrayBuffer); onLoadAudio(audioBuffer); // Update track name to filename if it's still default if (track.name === 'New Track' || track.name === 'Untitled Track') { const fileName = file.name.replace(/\.[^/.]+$/, ''); onNameChange(fileName); } } catch (error) { console.error('Failed to load audio file:', error); } // Reset input e.target.value = ''; }; const handleLoadAudioClick = () => { fileInputRef.current?.click(); }; const [isDragging, setIsDragging] = React.useState(false); const handleDragOver = (e: React.DragEvent) => { e.preventDefault(); e.stopPropagation(); setIsDragging(true); }; const handleDragLeave = (e: React.DragEvent) => { e.preventDefault(); e.stopPropagation(); setIsDragging(false); }; const handleDrop = async (e: React.DragEvent) => { e.preventDefault(); e.stopPropagation(); setIsDragging(false); const file = e.dataTransfer.files?.[0]; if (!file || !onLoadAudio) return; // Check if it's an audio file if (!file.type.startsWith('audio/')) { console.warn('Dropped file is not an audio file'); return; } try { const arrayBuffer = await file.arrayBuffer(); const audioContext = new AudioContext(); const audioBuffer = await audioContext.decodeAudioData(arrayBuffer); onLoadAudio(audioBuffer); // Update track name to filename if it's still default if (track.name === 'New Track' || track.name === 'Untitled Track') { const fileName = file.name.replace(/\.[^/.]+$/, ''); onNameChange(fileName); } } catch (error) { console.error('Failed to load audio file:', error); } }; const trackHeight = track.collapsed ? 48 : track.height; return (
{/* Top: Track Row (Control Panel + Waveform) */}
{/* Left: Track Control Panel (Fixed Width) */}
e.stopPropagation()} > {/* Track Name & Collapse Toggle */}
{isEditingName ? ( setNameInput(e.target.value)} onBlur={handleNameBlur} onKeyDown={handleNameKeyDown} className="w-full px-2 py-1 text-sm font-medium bg-background border border-border rounded" /> ) : (
{String(track.name || 'Untitled Track')}
)}
{/* Record Enable Button */} {onToggleRecordEnable && ( )} {/* Solo Button */} {/* Mute Button */} {/* Remove Button */}
{/* Track Controls - Only show when not collapsed */} {!track.collapsed && ( <> {/* Volume */}
{Math.round(track.volume * 100)}%
{/* Pan */}
{track.pan === 0 ? 'C' : track.pan < 0 ? `L${Math.abs(Math.round(track.pan * 100))}` : `R${Math.round(track.pan * 100)}`}
{/* Level Meter (shows input when recording, output level otherwise) */}
{(() => { const level = track.recordEnabled || isRecording ? recordingLevel : playbackLevel; // Convert normalized (0-1) back to dB // normalized = (dB - (-60)) / 60, so dB = (normalized * 60) - 60 const db = (level * 60) - 60; return level === 0 ? '-∞' : `${db.toFixed(0)}`; })()}
{/* Recording Settings - Show when track is armed */} {track.recordEnabled && recordingSettings && onInputGainChange && onRecordMonoChange && onSampleRateChange && ( )} )}
{/* Right: Waveform Area (Flexible Width) */}
{track.audioBuffer ? ( ) : ( !track.collapsed && ( <>
{ e.stopPropagation(); handleLoadAudioClick(); }} onDragOver={handleDragOver} onDragLeave={handleDragLeave} onDrop={handleDrop} >

{isDragging ? 'Drop audio file here' : 'Click to load audio file'}

or drag & drop

) )}
{/* Bottom: Effects Section (Collapsible, Full Width) */} {!track.collapsed && (
{/* Effects Header - clickable to toggle */}
setShowEffects(!showEffects)} > {showEffects ? ( ) : ( )} {/* Show mini effect chain when collapsed */} {!showEffects && track.effectChain.effects.length > 0 ? (
{track.effectChain.effects.map((effect) => (
{effect.name}
))}
) : ( Devices ({track.effectChain.effects.length}) )}
{/* Horizontal scrolling device rack - expanded state */} {showEffects && (
{track.effectChain.effects.length === 0 ? (
No devices. Click + to add an effect.
) : ( track.effectChain.effects.map((effect) => ( onToggleEffect?.(effect.id)} onRemove={() => onRemoveEffect?.(effect.id)} onUpdateParameters={(params) => onUpdateEffect?.(effect.id, params)} /> )) )}
)}
)} {/* Effect Browser Dialog */} setEffectBrowserOpen(false)} onSelectEffect={(effectType) => { if (onAddEffect) { onAddEffect(effectType); } }} />
); }