'use client'; import * as React from 'react'; import { Volume2, VolumeX, Headphones, Trash2, ChevronDown, ChevronRight, ChevronUp, UnfoldHorizontal, Upload, Mic, Gauge, Circle, Sparkles } from 'lucide-react'; import type { Track as TrackType } from '@/types/track'; import { COLLAPSED_TRACK_HEIGHT, MIN_TRACK_HEIGHT, MAX_TRACK_HEIGHT } from '@/types/track'; import { Button } from '@/components/ui/Button'; import { Slider } from '@/components/ui/Slider'; import { cn } from '@/lib/utils/cn'; import type { EffectType } from '@/lib/audio/effects/chain'; import { TrackControls } from './TrackControls'; import { AutomationLane } from '@/components/automation/AutomationLane'; import type { AutomationLane as AutomationLaneType, AutomationPoint as AutomationPointType } from '@/types/automation'; import { createAutomationPoint } from '@/lib/audio/automation/utils'; import { createAutomationLane } from '@/lib/audio/automation-utils'; import { EffectDevice } from '@/components/effects/EffectDevice'; import { EffectBrowser } from '@/components/effects/EffectBrowser'; export interface TrackProps { track: TrackType; zoom: number; currentTime: number; duration: number; isSelected?: boolean; onSelect?: () => void; onToggleMute: () => void; onToggleSolo: () => void; onToggleCollapse: () => void; onVolumeChange: (volume: number) => void; onPanChange: (pan: number) => void; onRemove: () => void; onNameChange: (name: string) => void; onUpdateTrack: (trackId: string, updates: Partial) => void; onSeek?: (time: number) => void; onLoadAudio?: (buffer: AudioBuffer) => void; onToggleEffect?: (effectId: string) => void; onRemoveEffect?: (effectId: string) => void; onUpdateEffect?: (effectId: string, parameters: any) => void; onAddEffect?: (effectType: EffectType) => void; onSelectionChange?: (selection: { start: number; end: number } | null) => void; onToggleRecordEnable?: () => void; isRecording?: boolean; recordingLevel?: number; playbackLevel?: number; onParameterTouched?: (trackId: string, laneId: string, touched: boolean) => void; isPlaying?: boolean; } export function Track({ track, zoom, currentTime, duration, isSelected, onSelect, onToggleMute, onToggleSolo, onToggleCollapse, onVolumeChange, onPanChange, onRemove, onNameChange, onUpdateTrack, onSeek, onLoadAudio, onToggleEffect, onRemoveEffect, onUpdateEffect, onAddEffect, onSelectionChange, onToggleRecordEnable, isRecording = false, recordingLevel = 0, playbackLevel = 0, onParameterTouched, isPlaying = false, }: TrackProps) { const canvasRef = React.useRef(null); const containerRef = React.useRef(null); const fileInputRef = React.useRef(null); const [isEditingName, setIsEditingName] = React.useState(false); const [nameInput, setNameInput] = React.useState(String(track.name || 'Untitled Track')); const [themeKey, setThemeKey] = React.useState(0); const inputRef = React.useRef(null); const [isResizing, setIsResizing] = React.useState(false); const resizeStartRef = React.useRef({ y: 0, height: 0 }); const [effectBrowserOpen, setEffectBrowserOpen] = React.useState(false); // Selection state const [isSelecting, setIsSelecting] = React.useState(false); const [selectionStart, setSelectionStart] = React.useState(null); const [isSelectingByDrag, setIsSelectingByDrag] = React.useState(false); const [dragStartPos, setDragStartPos] = React.useState<{ x: number; y: number } | null>(null); // Touch callbacks for automation recording const handlePanTouchStart = React.useCallback(() => { if (isPlaying && onParameterTouched) { const panLane = track.automation.lanes.find(l => l.parameterId === 'pan'); if (panLane && (panLane.mode === 'touch' || panLane.mode === 'latch')) { queueMicrotask(() => onParameterTouched(track.id, panLane.id, true)); } } }, [isPlaying, onParameterTouched, track.id, track.automation.lanes]); const handlePanTouchEnd = React.useCallback(() => { if (isPlaying && onParameterTouched) { const panLane = track.automation.lanes.find(l => l.parameterId === 'pan'); if (panLane && (panLane.mode === 'touch' || panLane.mode === 'latch')) { queueMicrotask(() => onParameterTouched(track.id, panLane.id, false)); } } }, [isPlaying, onParameterTouched, track.id, track.automation.lanes]); const handleVolumeTouchStart = React.useCallback(() => { if (isPlaying && onParameterTouched) { const volumeLane = track.automation.lanes.find(l => l.parameterId === 'volume'); if (volumeLane && (volumeLane.mode === 'touch' || volumeLane.mode === 'latch')) { queueMicrotask(() => onParameterTouched(track.id, volumeLane.id, true)); } } }, [isPlaying, onParameterTouched, track.id, track.automation.lanes]); const handleVolumeTouchEnd = React.useCallback(() => { if (isPlaying && onParameterTouched) { const volumeLane = track.automation.lanes.find(l => l.parameterId === 'volume'); if (volumeLane && (volumeLane.mode === 'touch' || volumeLane.mode === 'latch')) { queueMicrotask(() => onParameterTouched(track.id, volumeLane.id, false)); } } }, [isPlaying, onParameterTouched, track.id, track.automation.lanes]); // Auto-create automation lane for selected parameter if it doesn't exist React.useEffect(() => { if (!track.automation?.showAutomation) return; const selectedParameterId = track.automation.selectedParameterId || 'volume'; const laneExists = track.automation.lanes.some(lane => lane.parameterId === selectedParameterId); if (!laneExists) { // Build list of available parameters const availableParameters: Array<{ id: string; name: string }> = [ { id: 'volume', name: 'Volume' }, { id: 'pan', name: 'Pan' }, ]; track.effectChain.effects.forEach((effect) => { if (effect.parameters) { Object.keys(effect.parameters).forEach((paramKey) => { const parameterId = `effect.${effect.id}.${paramKey}`; const paramName = `${effect.name} - ${paramKey.charAt(0).toUpperCase() + paramKey.slice(1)}`; availableParameters.push({ id: parameterId, name: paramName }); }); } }); const paramInfo = availableParameters.find(p => p.id === selectedParameterId); if (paramInfo) { // Determine value range based on parameter type let valueRange = { min: 0, max: 1 }; let unit = ''; let formatter: ((value: number) => string) | undefined; if (selectedParameterId === 'volume') { unit = 'dB'; } else if (selectedParameterId === 'pan') { formatter = (value: number) => { if (value === 0.5) return 'C'; if (value < 0.5) return `${Math.abs((0.5 - value) * 200).toFixed(0)}L`; return `${((value - 0.5) * 200).toFixed(0)}R`; }; } else if (selectedParameterId.startsWith('effect.')) { // Parse effect parameter: effect.{effectId}.{paramName} const parts = selectedParameterId.split('.'); if (parts.length === 3) { const paramName = parts[2]; // Set ranges based on parameter name if (paramName === 'frequency') { valueRange = { min: 20, max: 20000 }; unit = 'Hz'; } else if (paramName === 'Q') { valueRange = { min: 0.1, max: 20 }; } else if (paramName === 'gain') { valueRange = { min: -40, max: 40 }; unit = 'dB'; } } } const newLane = createAutomationLane( track.id, selectedParameterId, paramInfo.name, { min: valueRange.min, max: valueRange.max, unit, formatter, } ); onUpdateTrack(track.id, { automation: { ...track.automation, lanes: [...track.automation.lanes, newLane], selectedParameterId, }, }); } } }, [track.automation?.showAutomation, track.automation?.selectedParameterId, track.automation?.lanes, track.effectChain.effects, track.id, onUpdateTrack]); const handleNameClick = () => { setIsEditingName(true); setNameInput(String(track.name || 'Untitled Track')); }; const handleNameBlur = () => { setIsEditingName(false); if (nameInput.trim()) { onNameChange(nameInput.trim()); } else { setNameInput(String(track.name || 'Untitled Track')); } }; const handleNameKeyDown = (e: React.KeyboardEvent) => { if (e.key === 'Enter') { inputRef.current?.blur(); } else if (e.key === 'Escape') { setNameInput(String(track.name || 'Untitled Track')); setIsEditingName(false); } }; React.useEffect(() => { if (isEditingName && inputRef.current) { inputRef.current.focus(); inputRef.current.select(); } }, [isEditingName]); // Listen for theme changes React.useEffect(() => { const observer = new MutationObserver(() => { // Increment key to force waveform redraw setThemeKey((prev) => prev + 1); }); // Watch for class changes on document element (dark mode toggle) observer.observe(document.documentElement, { attributes: true, attributeFilter: ['class'], }); return () => observer.disconnect(); }, []); // Draw waveform React.useEffect(() => { if (!track.audioBuffer || !canvasRef.current) return; const canvas = canvasRef.current; const ctx = canvas.getContext('2d'); if (!ctx) return; // Use parent container's size since canvas is absolute positioned const parent = canvas.parentElement; if (!parent) return; const dpr = window.devicePixelRatio || 1; const rect = parent.getBoundingClientRect(); canvas.width = rect.width * dpr; canvas.height = rect.height * dpr; ctx.scale(dpr, dpr); const width = rect.width; const height = rect.height; // Clear canvas with theme color const bgColor = getComputedStyle(canvas).getPropertyValue('--color-waveform-bg') || 'rgb(15, 23, 42)'; ctx.fillStyle = bgColor; ctx.fillRect(0, 0, width, height); const buffer = track.audioBuffer; const channelData = buffer.getChannelData(0); const samplesPerPixel = Math.floor(buffer.length / (width * zoom)); // Draw waveform ctx.fillStyle = track.color; ctx.strokeStyle = track.color; ctx.lineWidth = 1; for (let x = 0; x < width; x++) { const startSample = Math.floor(x * samplesPerPixel); const endSample = Math.floor((x + 1) * samplesPerPixel); let min = 1.0; let max = -1.0; for (let i = startSample; i < endSample && i < channelData.length; i++) { const sample = channelData[i]; if (sample < min) min = sample; if (sample > max) max = sample; } const y1 = (height / 2) * (1 - max); const y2 = (height / 2) * (1 - min); ctx.beginPath(); ctx.moveTo(x, y1); ctx.lineTo(x, y2); ctx.stroke(); } // Draw center line ctx.strokeStyle = 'rgba(148, 163, 184, 0.2)'; ctx.lineWidth = 1; ctx.beginPath(); ctx.moveTo(0, height / 2); ctx.lineTo(width, height / 2); ctx.stroke(); // Draw selection overlay if (track.selection && duration > 0) { const selStartX = (track.selection.start / duration) * width; const selEndX = (track.selection.end / duration) * width; // Draw selection background ctx.fillStyle = 'rgba(59, 130, 246, 0.2)'; ctx.fillRect(selStartX, 0, selEndX - selStartX, height); // Draw selection borders ctx.strokeStyle = 'rgba(59, 130, 246, 0.8)'; ctx.lineWidth = 2; // Start border ctx.beginPath(); ctx.moveTo(selStartX, 0); ctx.lineTo(selStartX, height); ctx.stroke(); // End border ctx.beginPath(); ctx.moveTo(selEndX, 0); ctx.lineTo(selEndX, height); ctx.stroke(); } // Draw playhead if (duration > 0) { const playheadX = (currentTime / duration) * width; ctx.strokeStyle = 'rgba(239, 68, 68, 0.8)'; ctx.lineWidth = 2; ctx.beginPath(); ctx.moveTo(playheadX, 0); ctx.lineTo(playheadX, height); ctx.stroke(); } }, [track.audioBuffer, track.color, track.collapsed, track.height, zoom, currentTime, duration, themeKey, track.selection]); const handleCanvasMouseDown = (e: React.MouseEvent) => { if (!duration) return; const rect = e.currentTarget.getBoundingClientRect(); const x = e.clientX - rect.left; const y = e.clientY - rect.top; const clickTime = (x / rect.width) * duration; // Store drag start position setDragStartPos({ x: e.clientX, y: e.clientY }); setIsSelectingByDrag(false); // Start selection immediately (will be used if user drags) setIsSelecting(true); setSelectionStart(clickTime); }; const handleCanvasMouseMove = (e: React.MouseEvent) => { if (!isSelecting || selectionStart === null || !duration || !dragStartPos) return; const rect = e.currentTarget.getBoundingClientRect(); const x = e.clientX - rect.left; const currentTime = (x / rect.width) * duration; // Check if user has moved enough to be considered dragging (threshold: 3 pixels) const dragDistance = Math.sqrt( Math.pow(e.clientX - dragStartPos.x, 2) + Math.pow(e.clientY - dragStartPos.y, 2) ); if (dragDistance > 3) { setIsSelectingByDrag(true); } // If dragging, update selection if (isSelectingByDrag || dragDistance > 3) { // Clamp to valid time range const clampedTime = Math.max(0, Math.min(duration, currentTime)); // Update selection (ensure start < end) const start = Math.min(selectionStart, clampedTime); const end = Math.max(selectionStart, clampedTime); onSelectionChange?.({ start, end }); } }; const handleCanvasMouseUp = (e: React.MouseEvent) => { if (!duration) return; const rect = e.currentTarget.getBoundingClientRect(); const x = e.clientX - rect.left; const clickTime = (x / rect.width) * duration; // Check if user actually dragged (check distance directly, not state) const didDrag = dragStartPos ? Math.sqrt( Math.pow(e.clientX - dragStartPos.x, 2) + Math.pow(e.clientY - dragStartPos.y, 2) ) > 3 : false; // If user didn't drag (just clicked), clear selection and seek if (!didDrag) { onSelectionChange?.(null); if (onSeek) { onSeek(clickTime); } } // Reset drag state setIsSelecting(false); setIsSelectingByDrag(false); setDragStartPos(null); }; // Handle mouse leaving canvas during selection React.useEffect(() => { const handleGlobalMouseUp = () => { if (isSelecting) { setIsSelecting(false); setIsSelectingByDrag(false); setDragStartPos(null); } }; window.addEventListener('mouseup', handleGlobalMouseUp); return () => window.removeEventListener('mouseup', handleGlobalMouseUp); }, [isSelecting]); const handleFileChange = async (e: React.ChangeEvent) => { const file = e.target.files?.[0]; if (!file || !onLoadAudio) return; try { const arrayBuffer = await file.arrayBuffer(); const audioContext = new AudioContext(); const audioBuffer = await audioContext.decodeAudioData(arrayBuffer); onLoadAudio(audioBuffer); // Update track name to filename if it's still default if (track.name === 'New Track' || track.name === 'Untitled Track') { const fileName = file.name.replace(/\.[^/.]+$/, ''); onNameChange(fileName); } } catch (error) { console.error('Failed to load audio file:', error); } // Reset input e.target.value = ''; }; const handleLoadAudioClick = () => { fileInputRef.current?.click(); }; const [isDragging, setIsDragging] = React.useState(false); const handleDragOver = (e: React.DragEvent) => { e.preventDefault(); e.stopPropagation(); setIsDragging(true); }; const handleDragLeave = (e: React.DragEvent) => { e.preventDefault(); e.stopPropagation(); setIsDragging(false); }; const handleDrop = async (e: React.DragEvent) => { e.preventDefault(); e.stopPropagation(); setIsDragging(false); const file = e.dataTransfer.files?.[0]; if (!file || !onLoadAudio) return; // Check if it's an audio file if (!file.type.startsWith('audio/')) { console.warn('Dropped file is not an audio file'); return; } try { const arrayBuffer = await file.arrayBuffer(); const audioContext = new AudioContext(); const audioBuffer = await audioContext.decodeAudioData(arrayBuffer); onLoadAudio(audioBuffer); // Update track name to filename if it's still default if (track.name === 'New Track' || track.name === 'Untitled Track') { const fileName = file.name.replace(/\.[^/.]+$/, ''); onNameChange(fileName); } } catch (error) { console.error('Failed to load audio file:', error); } }; const trackHeight = track.collapsed ? COLLAPSED_TRACK_HEIGHT : track.height; // Track height resize handlers const handleResizeStart = React.useCallback( (e: React.MouseEvent) => { if (track.collapsed) return; e.preventDefault(); e.stopPropagation(); setIsResizing(true); resizeStartRef.current = { y: e.clientY, height: track.height }; }, [track.collapsed, track.height] ); React.useEffect(() => { if (!isResizing) return; const handleMouseMove = (e: MouseEvent) => { const delta = e.clientY - resizeStartRef.current.y; const newHeight = Math.max( MIN_TRACK_HEIGHT, Math.min(MAX_TRACK_HEIGHT, resizeStartRef.current.height + delta) ); onUpdateTrack(track.id, { height: newHeight }); }; const handleMouseUp = () => { setIsResizing(false); }; window.addEventListener('mousemove', handleMouseMove); window.addEventListener('mouseup', handleMouseUp); return () => { window.removeEventListener('mousemove', handleMouseMove); window.removeEventListener('mouseup', handleMouseUp); }; }, [isResizing, onUpdateTrack, track.id]); return (
{/* Top: Track Row (Control Panel + Waveform) */}
{/* Left: Track Control Panel (Fixed Width) - Ableton Style */}
{ e.stopPropagation(); if (onSelect) onSelect(); }} > {/* Track Name Row - Integrated collapse (DAW style) */}
{ if (!isEditingName) { e.stopPropagation(); onToggleCollapse(); } }} title={track.collapsed ? 'Expand track' : 'Collapse track'} > {/* Small triangle indicator */}
{track.collapsed ? ( ) : ( )}
{/* Color stripe (thicker when selected) */}
{/* Track name (editable) */}
{isEditingName ? ( setNameInput(e.target.value)} onBlur={handleNameBlur} onKeyDown={handleNameKeyDown} onClick={(e) => e.stopPropagation()} className="w-full px-1 py-0.5 text-xs font-semibold bg-background border border-border rounded" /> ) : (
{ e.stopPropagation(); handleNameClick(); }} className="px-1 py-0.5 text-xs font-semibold text-foreground truncate" title={String(track.name || 'Untitled Track')} > {String(track.name || 'Untitled Track')}
)}
{/* Track Controls - Only show when not collapsed */} {!track.collapsed && (
{/* Integrated Track Controls (Pan + Fader + Buttons) */} { onUpdateTrack(track.id, { automation: { ...track.automation, showAutomation: !track.automation?.showAutomation, }, }); }} onEffectsClick={() => { onUpdateTrack(track.id, { showEffects: !track.showEffects, }); }} onVolumeTouchStart={handleVolumeTouchStart} onVolumeTouchEnd={handleVolumeTouchEnd} onPanTouchStart={handlePanTouchStart} onPanTouchEnd={handlePanTouchEnd} />
)}
{/* Right: Waveform Area (Flexible Width) */}
{/* Delete Button - Top Right Overlay */} {track.audioBuffer ? ( <> {/* Waveform Canvas */} ) : ( !track.collapsed && ( <>
{ e.stopPropagation(); handleLoadAudioClick(); }} onDragOver={handleDragOver} onDragLeave={handleDragLeave} onDrop={handleDrop} >

{isDragging ? 'Drop audio file here' : 'Click to load audio file'}

or drag & drop

) )}
{/* Automation Lane */} {!track.collapsed && track.automation?.showAutomation && (() => { // Build list of available parameters from track and effects const availableParameters: Array<{ id: string; name: string }> = [ { id: 'volume', name: 'Volume' }, { id: 'pan', name: 'Pan' }, ]; // Add effect parameters track.effectChain.effects.forEach((effect) => { if (effect.parameters) { Object.keys(effect.parameters).forEach((paramKey) => { const parameterId = `effect.${effect.id}.${paramKey}`; const paramName = `${effect.name} - ${paramKey.charAt(0).toUpperCase() + paramKey.slice(1)}`; availableParameters.push({ id: parameterId, name: paramName }); }); } }); // Get selected parameter ID (default to volume if not set) const selectedParameterId = track.automation.selectedParameterId || 'volume'; // Find or create lane for selected parameter let selectedLane = track.automation.lanes.find(lane => lane.parameterId === selectedParameterId); // If lane doesn't exist yet, we need to create it (but not during render) // This will be handled by a useEffect instead const modes: Array<{ value: string; label: string; color: string }> = [ { value: 'read', label: 'R', color: 'text-muted-foreground' }, { value: 'write', label: 'W', color: 'text-red-500' }, { value: 'touch', label: 'T', color: 'text-yellow-500' }, { value: 'latch', label: 'L', color: 'text-orange-500' }, ]; const currentModeIndex = modes.findIndex(m => m.value === selectedLane?.mode); return selectedLane ? (
{/* Left: Automation Controls (matching track controls width - w-48 = 192px) */}
{/* Parameter selector dropdown */} {/* Automation mode cycle button */} {/* Height controls */}
{/* Right: Automation Lane Canvas (matching waveform width) */}
{ const updatedLanes = track.automation.lanes.map((l) => l.id === selectedLane.id ? { ...l, ...updates } : l ); onUpdateTrack(track.id, { automation: { ...track.automation, lanes: updatedLanes }, }); }} onAddPoint={(time, value) => { const newPoint = createAutomationPoint({ time, value, curve: 'linear', }); const updatedLanes = track.automation.lanes.map((l) => l.id === selectedLane.id ? { ...l, points: [...l.points, newPoint] } : l ); onUpdateTrack(track.id, { automation: { ...track.automation, lanes: updatedLanes }, }); }} onUpdatePoint={(pointId, updates) => { const updatedLanes = track.automation.lanes.map((l) => l.id === selectedLane.id ? { ...l, points: l.points.map((p) => p.id === pointId ? { ...p, ...updates } : p ), } : l ); onUpdateTrack(track.id, { automation: { ...track.automation, lanes: updatedLanes }, }); }} onRemovePoint={(pointId) => { const updatedLanes = track.automation.lanes.map((l) => l.id === selectedLane.id ? { ...l, points: l.points.filter((p) => p.id !== pointId) } : l ); onUpdateTrack(track.id, { automation: { ...track.automation, lanes: updatedLanes }, }); }} />
) : null; })()} {/* Per-Track Effects Panel */} {!track.collapsed && track.showEffects && (
{track.effectChain.effects.length === 0 ? (
No effects on this track
) : (
{track.effectChain.effects.map((effect) => ( onToggleEffect?.(effect.id)} onRemove={() => onRemoveEffect?.(effect.id)} onUpdateParameters={(params) => onUpdateEffect?.(effect.id, params)} onToggleExpanded={() => { const updatedEffects = track.effectChain.effects.map((e) => e.id === effect.id ? { ...e, expanded: !e.expanded } : e ); onUpdateTrack(track.id, { effectChain: { ...track.effectChain, effects: updatedEffects }, }); }} trackId={track.id} isPlaying={isPlaying} onParameterTouched={onParameterTouched} automationLanes={track.automation.lanes} /> ))}
)} {/* Effect Browser Dialog */} setEffectBrowserOpen(false)} onSelectEffect={(effectType) => { onAddEffect?.(effectType); setEffectBrowserOpen(false); }} />
)} {/* Track Height Resize Handle */} {!track.collapsed && (
)}
); }