1063 lines
38 KiB
TypeScript
1063 lines
38 KiB
TypeScript
'use client';
|
|
|
|
import * as React from 'react';
|
|
import { Volume2, VolumeX, Headphones, Trash2, ChevronDown, ChevronRight, ChevronUp, UnfoldHorizontal, Upload, Mic, Gauge, Circle, Sparkles } from 'lucide-react';
|
|
import type { Track as TrackType } from '@/types/track';
|
|
import { COLLAPSED_TRACK_HEIGHT, MIN_TRACK_HEIGHT, MAX_TRACK_HEIGHT } from '@/types/track';
|
|
import { Button } from '@/components/ui/Button';
|
|
import { Slider } from '@/components/ui/Slider';
|
|
import { cn } from '@/lib/utils/cn';
|
|
import type { EffectType } from '@/lib/audio/effects/chain';
|
|
import { TrackControls } from './TrackControls';
|
|
import { AutomationLane } from '@/components/automation/AutomationLane';
|
|
import type { AutomationLane as AutomationLaneType, AutomationPoint as AutomationPointType } from '@/types/automation';
|
|
import { createAutomationPoint } from '@/lib/audio/automation/utils';
|
|
import { createAutomationLane } from '@/lib/audio/automation-utils';
|
|
import { EffectDevice } from '@/components/effects/EffectDevice';
|
|
import { EffectBrowser } from '@/components/effects/EffectBrowser';
|
|
import { ImportDialog } from '@/components/dialogs/ImportDialog';
|
|
import { importAudioFile, type ImportOptions } from '@/lib/audio/decoder';
|
|
|
|
export interface TrackProps {
|
|
track: TrackType;
|
|
zoom: number;
|
|
currentTime: number;
|
|
duration: number;
|
|
isSelected?: boolean;
|
|
onSelect?: () => void;
|
|
onToggleMute: () => void;
|
|
onToggleSolo: () => void;
|
|
onToggleCollapse: () => void;
|
|
onVolumeChange: (volume: number) => void;
|
|
onPanChange: (pan: number) => void;
|
|
onRemove: () => void;
|
|
onNameChange: (name: string) => void;
|
|
onUpdateTrack: (trackId: string, updates: Partial<TrackType>) => void;
|
|
onSeek?: (time: number) => void;
|
|
onLoadAudio?: (buffer: AudioBuffer) => void;
|
|
onToggleEffect?: (effectId: string) => void;
|
|
onRemoveEffect?: (effectId: string) => void;
|
|
onUpdateEffect?: (effectId: string, parameters: any) => void;
|
|
onAddEffect?: (effectType: EffectType) => void;
|
|
onSelectionChange?: (selection: { start: number; end: number } | null) => void;
|
|
onToggleRecordEnable?: () => void;
|
|
isRecording?: boolean;
|
|
recordingLevel?: number;
|
|
playbackLevel?: number;
|
|
onParameterTouched?: (trackId: string, laneId: string, touched: boolean) => void;
|
|
isPlaying?: boolean;
|
|
}
|
|
|
|
export function Track({
|
|
track,
|
|
zoom,
|
|
currentTime,
|
|
duration,
|
|
isSelected,
|
|
onSelect,
|
|
onToggleMute,
|
|
onToggleSolo,
|
|
onToggleCollapse,
|
|
onVolumeChange,
|
|
onPanChange,
|
|
onRemove,
|
|
onNameChange,
|
|
onUpdateTrack,
|
|
onSeek,
|
|
onLoadAudio,
|
|
onToggleEffect,
|
|
onRemoveEffect,
|
|
onUpdateEffect,
|
|
onAddEffect,
|
|
onSelectionChange,
|
|
onToggleRecordEnable,
|
|
isRecording = false,
|
|
recordingLevel = 0,
|
|
playbackLevel = 0,
|
|
onParameterTouched,
|
|
isPlaying = false,
|
|
}: TrackProps) {
|
|
const canvasRef = React.useRef<HTMLCanvasElement>(null);
|
|
const containerRef = React.useRef<HTMLDivElement>(null);
|
|
const fileInputRef = React.useRef<HTMLInputElement>(null);
|
|
const [isEditingName, setIsEditingName] = React.useState(false);
|
|
const [nameInput, setNameInput] = React.useState(String(track.name || 'Untitled Track'));
|
|
const [themeKey, setThemeKey] = React.useState(0);
|
|
const inputRef = React.useRef<HTMLInputElement>(null);
|
|
const [isResizing, setIsResizing] = React.useState(false);
|
|
const resizeStartRef = React.useRef({ y: 0, height: 0 });
|
|
const [effectBrowserOpen, setEffectBrowserOpen] = React.useState(false);
|
|
|
|
// Import dialog state
|
|
const [showImportDialog, setShowImportDialog] = React.useState(false);
|
|
const [pendingFile, setPendingFile] = React.useState<File | null>(null);
|
|
const [fileMetadata, setFileMetadata] = React.useState<{ sampleRate?: number; channels?: number }>({});
|
|
|
|
// Selection state
|
|
const [isSelecting, setIsSelecting] = React.useState(false);
|
|
const [selectionStart, setSelectionStart] = React.useState<number | null>(null);
|
|
const [isSelectingByDrag, setIsSelectingByDrag] = React.useState(false);
|
|
const [dragStartPos, setDragStartPos] = React.useState<{ x: number; y: number } | null>(null);
|
|
|
|
// Touch callbacks for automation recording
|
|
const handlePanTouchStart = React.useCallback(() => {
|
|
if (isPlaying && onParameterTouched) {
|
|
const panLane = track.automation.lanes.find(l => l.parameterId === 'pan');
|
|
if (panLane && (panLane.mode === 'touch' || panLane.mode === 'latch')) {
|
|
queueMicrotask(() => onParameterTouched(track.id, panLane.id, true));
|
|
}
|
|
}
|
|
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
|
|
|
|
const handlePanTouchEnd = React.useCallback(() => {
|
|
if (isPlaying && onParameterTouched) {
|
|
const panLane = track.automation.lanes.find(l => l.parameterId === 'pan');
|
|
if (panLane && (panLane.mode === 'touch' || panLane.mode === 'latch')) {
|
|
queueMicrotask(() => onParameterTouched(track.id, panLane.id, false));
|
|
}
|
|
}
|
|
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
|
|
|
|
const handleVolumeTouchStart = React.useCallback(() => {
|
|
if (isPlaying && onParameterTouched) {
|
|
const volumeLane = track.automation.lanes.find(l => l.parameterId === 'volume');
|
|
if (volumeLane && (volumeLane.mode === 'touch' || volumeLane.mode === 'latch')) {
|
|
queueMicrotask(() => onParameterTouched(track.id, volumeLane.id, true));
|
|
}
|
|
}
|
|
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
|
|
|
|
const handleVolumeTouchEnd = React.useCallback(() => {
|
|
if (isPlaying && onParameterTouched) {
|
|
const volumeLane = track.automation.lanes.find(l => l.parameterId === 'volume');
|
|
if (volumeLane && (volumeLane.mode === 'touch' || volumeLane.mode === 'latch')) {
|
|
queueMicrotask(() => onParameterTouched(track.id, volumeLane.id, false));
|
|
}
|
|
}
|
|
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
|
|
|
|
// Auto-create automation lane for selected parameter if it doesn't exist
|
|
React.useEffect(() => {
|
|
if (!track.automation?.showAutomation) return;
|
|
|
|
const selectedParameterId = track.automation.selectedParameterId || 'volume';
|
|
const laneExists = track.automation.lanes.some(lane => lane.parameterId === selectedParameterId);
|
|
|
|
if (!laneExists) {
|
|
// Build list of available parameters
|
|
const availableParameters: Array<{ id: string; name: string }> = [
|
|
{ id: 'volume', name: 'Volume' },
|
|
{ id: 'pan', name: 'Pan' },
|
|
];
|
|
|
|
track.effectChain.effects.forEach((effect) => {
|
|
if (effect.parameters) {
|
|
Object.keys(effect.parameters).forEach((paramKey) => {
|
|
const parameterId = `effect.${effect.id}.${paramKey}`;
|
|
const paramName = `${effect.name} - ${paramKey.charAt(0).toUpperCase() + paramKey.slice(1)}`;
|
|
availableParameters.push({ id: parameterId, name: paramName });
|
|
});
|
|
}
|
|
});
|
|
|
|
const paramInfo = availableParameters.find(p => p.id === selectedParameterId);
|
|
if (paramInfo) {
|
|
// Determine value range based on parameter type
|
|
let valueRange = { min: 0, max: 1 };
|
|
let unit = '';
|
|
let formatter: ((value: number) => string) | undefined;
|
|
|
|
if (selectedParameterId === 'volume') {
|
|
unit = 'dB';
|
|
} else if (selectedParameterId === 'pan') {
|
|
formatter = (value: number) => {
|
|
if (value === 0.5) return 'C';
|
|
if (value < 0.5) return `${Math.abs((0.5 - value) * 200).toFixed(0)}L`;
|
|
return `${((value - 0.5) * 200).toFixed(0)}R`;
|
|
};
|
|
} else if (selectedParameterId.startsWith('effect.')) {
|
|
// Parse effect parameter: effect.{effectId}.{paramName}
|
|
const parts = selectedParameterId.split('.');
|
|
if (parts.length === 3) {
|
|
const paramName = parts[2];
|
|
// Set ranges based on parameter name
|
|
if (paramName === 'frequency') {
|
|
valueRange = { min: 20, max: 20000 };
|
|
unit = 'Hz';
|
|
} else if (paramName === 'Q') {
|
|
valueRange = { min: 0.1, max: 20 };
|
|
} else if (paramName === 'gain') {
|
|
valueRange = { min: -40, max: 40 };
|
|
unit = 'dB';
|
|
}
|
|
}
|
|
}
|
|
|
|
const newLane = createAutomationLane(
|
|
track.id,
|
|
selectedParameterId,
|
|
paramInfo.name,
|
|
{
|
|
min: valueRange.min,
|
|
max: valueRange.max,
|
|
unit,
|
|
formatter,
|
|
}
|
|
);
|
|
|
|
onUpdateTrack(track.id, {
|
|
automation: {
|
|
...track.automation,
|
|
lanes: [...track.automation.lanes, newLane],
|
|
selectedParameterId,
|
|
},
|
|
});
|
|
}
|
|
}
|
|
}, [track.automation?.showAutomation, track.automation?.selectedParameterId, track.automation?.lanes, track.effectChain.effects, track.id, onUpdateTrack]);
|
|
|
|
const handleNameClick = () => {
|
|
setIsEditingName(true);
|
|
setNameInput(String(track.name || 'Untitled Track'));
|
|
};
|
|
|
|
const handleNameBlur = () => {
|
|
setIsEditingName(false);
|
|
if (nameInput.trim()) {
|
|
onNameChange(nameInput.trim());
|
|
} else {
|
|
setNameInput(String(track.name || 'Untitled Track'));
|
|
}
|
|
};
|
|
|
|
const handleNameKeyDown = (e: React.KeyboardEvent) => {
|
|
if (e.key === 'Enter') {
|
|
inputRef.current?.blur();
|
|
} else if (e.key === 'Escape') {
|
|
setNameInput(String(track.name || 'Untitled Track'));
|
|
setIsEditingName(false);
|
|
}
|
|
};
|
|
|
|
React.useEffect(() => {
|
|
if (isEditingName && inputRef.current) {
|
|
inputRef.current.focus();
|
|
inputRef.current.select();
|
|
}
|
|
}, [isEditingName]);
|
|
|
|
// Listen for theme changes
|
|
React.useEffect(() => {
|
|
const observer = new MutationObserver(() => {
|
|
// Increment key to force waveform redraw
|
|
setThemeKey((prev) => prev + 1);
|
|
});
|
|
|
|
// Watch for class changes on document element (dark mode toggle)
|
|
observer.observe(document.documentElement, {
|
|
attributes: true,
|
|
attributeFilter: ['class'],
|
|
});
|
|
|
|
return () => observer.disconnect();
|
|
}, []);
|
|
|
|
// Draw waveform
|
|
React.useEffect(() => {
|
|
if (!track.audioBuffer || !canvasRef.current) return;
|
|
|
|
const canvas = canvasRef.current;
|
|
const ctx = canvas.getContext('2d');
|
|
if (!ctx) return;
|
|
|
|
// Use parent container's size since canvas is absolute positioned
|
|
const parent = canvas.parentElement;
|
|
if (!parent) return;
|
|
|
|
const dpr = window.devicePixelRatio || 1;
|
|
const rect = parent.getBoundingClientRect();
|
|
|
|
canvas.width = rect.width * dpr;
|
|
canvas.height = rect.height * dpr;
|
|
ctx.scale(dpr, dpr);
|
|
|
|
const width = rect.width;
|
|
const height = rect.height;
|
|
|
|
// Clear canvas with theme color
|
|
const bgColor = getComputedStyle(canvas).getPropertyValue('--color-waveform-bg') || 'rgb(15, 23, 42)';
|
|
ctx.fillStyle = bgColor;
|
|
ctx.fillRect(0, 0, width, height);
|
|
|
|
const buffer = track.audioBuffer;
|
|
const channelData = buffer.getChannelData(0);
|
|
const samplesPerPixel = Math.floor(buffer.length / (width * zoom));
|
|
|
|
// Draw waveform
|
|
ctx.fillStyle = track.color;
|
|
ctx.strokeStyle = track.color;
|
|
ctx.lineWidth = 1;
|
|
|
|
for (let x = 0; x < width; x++) {
|
|
const startSample = Math.floor(x * samplesPerPixel);
|
|
const endSample = Math.floor((x + 1) * samplesPerPixel);
|
|
|
|
let min = 1.0;
|
|
let max = -1.0;
|
|
|
|
for (let i = startSample; i < endSample && i < channelData.length; i++) {
|
|
const sample = channelData[i];
|
|
if (sample < min) min = sample;
|
|
if (sample > max) max = sample;
|
|
}
|
|
|
|
const y1 = (height / 2) * (1 - max);
|
|
const y2 = (height / 2) * (1 - min);
|
|
|
|
ctx.beginPath();
|
|
ctx.moveTo(x, y1);
|
|
ctx.lineTo(x, y2);
|
|
ctx.stroke();
|
|
}
|
|
|
|
// Draw center line
|
|
ctx.strokeStyle = 'rgba(148, 163, 184, 0.2)';
|
|
ctx.lineWidth = 1;
|
|
ctx.beginPath();
|
|
ctx.moveTo(0, height / 2);
|
|
ctx.lineTo(width, height / 2);
|
|
ctx.stroke();
|
|
|
|
// Draw selection overlay
|
|
if (track.selection && duration > 0) {
|
|
const selStartX = (track.selection.start / duration) * width;
|
|
const selEndX = (track.selection.end / duration) * width;
|
|
|
|
// Draw selection background
|
|
ctx.fillStyle = 'rgba(59, 130, 246, 0.2)';
|
|
ctx.fillRect(selStartX, 0, selEndX - selStartX, height);
|
|
|
|
// Draw selection borders
|
|
ctx.strokeStyle = 'rgba(59, 130, 246, 0.8)';
|
|
ctx.lineWidth = 2;
|
|
|
|
// Start border
|
|
ctx.beginPath();
|
|
ctx.moveTo(selStartX, 0);
|
|
ctx.lineTo(selStartX, height);
|
|
ctx.stroke();
|
|
|
|
// End border
|
|
ctx.beginPath();
|
|
ctx.moveTo(selEndX, 0);
|
|
ctx.lineTo(selEndX, height);
|
|
ctx.stroke();
|
|
}
|
|
|
|
// Draw playhead
|
|
if (duration > 0) {
|
|
const playheadX = (currentTime / duration) * width;
|
|
ctx.strokeStyle = 'rgba(239, 68, 68, 0.8)';
|
|
ctx.lineWidth = 2;
|
|
ctx.beginPath();
|
|
ctx.moveTo(playheadX, 0);
|
|
ctx.lineTo(playheadX, height);
|
|
ctx.stroke();
|
|
}
|
|
}, [track.audioBuffer, track.color, track.collapsed, track.height, zoom, currentTime, duration, themeKey, track.selection]);
|
|
|
|
const handleCanvasMouseDown = (e: React.MouseEvent<HTMLCanvasElement>) => {
|
|
if (!duration) return;
|
|
|
|
const rect = e.currentTarget.getBoundingClientRect();
|
|
const x = e.clientX - rect.left;
|
|
const y = e.clientY - rect.top;
|
|
const clickTime = (x / rect.width) * duration;
|
|
|
|
// Store drag start position
|
|
setDragStartPos({ x: e.clientX, y: e.clientY });
|
|
setIsSelectingByDrag(false);
|
|
|
|
// Start selection immediately (will be used if user drags)
|
|
setIsSelecting(true);
|
|
setSelectionStart(clickTime);
|
|
};
|
|
|
|
const handleCanvasMouseMove = (e: React.MouseEvent<HTMLCanvasElement>) => {
|
|
if (!isSelecting || selectionStart === null || !duration || !dragStartPos) return;
|
|
|
|
const rect = e.currentTarget.getBoundingClientRect();
|
|
const x = e.clientX - rect.left;
|
|
const currentTime = (x / rect.width) * duration;
|
|
|
|
// Check if user has moved enough to be considered dragging (threshold: 3 pixels)
|
|
const dragDistance = Math.sqrt(
|
|
Math.pow(e.clientX - dragStartPos.x, 2) + Math.pow(e.clientY - dragStartPos.y, 2)
|
|
);
|
|
|
|
if (dragDistance > 3) {
|
|
setIsSelectingByDrag(true);
|
|
}
|
|
|
|
// If dragging, update selection
|
|
if (isSelectingByDrag || dragDistance > 3) {
|
|
// Clamp to valid time range
|
|
const clampedTime = Math.max(0, Math.min(duration, currentTime));
|
|
|
|
// Update selection (ensure start < end)
|
|
const start = Math.min(selectionStart, clampedTime);
|
|
const end = Math.max(selectionStart, clampedTime);
|
|
|
|
onSelectionChange?.({ start, end });
|
|
}
|
|
};
|
|
|
|
const handleCanvasMouseUp = (e: React.MouseEvent<HTMLCanvasElement>) => {
|
|
if (!duration) return;
|
|
|
|
const rect = e.currentTarget.getBoundingClientRect();
|
|
const x = e.clientX - rect.left;
|
|
const clickTime = (x / rect.width) * duration;
|
|
|
|
// Check if user actually dragged (check distance directly, not state)
|
|
const didDrag = dragStartPos
|
|
? Math.sqrt(
|
|
Math.pow(e.clientX - dragStartPos.x, 2) + Math.pow(e.clientY - dragStartPos.y, 2)
|
|
) > 3
|
|
: false;
|
|
|
|
// If user didn't drag (just clicked), clear selection and seek
|
|
if (!didDrag) {
|
|
onSelectionChange?.(null);
|
|
if (onSeek) {
|
|
onSeek(clickTime);
|
|
}
|
|
}
|
|
|
|
// Reset drag state
|
|
setIsSelecting(false);
|
|
setIsSelectingByDrag(false);
|
|
setDragStartPos(null);
|
|
};
|
|
|
|
// Handle mouse leaving canvas during selection
|
|
React.useEffect(() => {
|
|
const handleGlobalMouseUp = () => {
|
|
if (isSelecting) {
|
|
setIsSelecting(false);
|
|
setIsSelectingByDrag(false);
|
|
setDragStartPos(null);
|
|
}
|
|
};
|
|
|
|
window.addEventListener('mouseup', handleGlobalMouseUp);
|
|
return () => window.removeEventListener('mouseup', handleGlobalMouseUp);
|
|
}, [isSelecting]);
|
|
|
|
const handleFileChange = async (e: React.ChangeEvent<HTMLInputElement>) => {
|
|
const file = e.target.files?.[0];
|
|
if (!file || !onLoadAudio) return;
|
|
|
|
try {
|
|
// Decode to get basic metadata before showing dialog
|
|
const arrayBuffer = await file.arrayBuffer();
|
|
const audioContext = new AudioContext();
|
|
const tempBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
|
|
|
// Set metadata and show import dialog
|
|
setFileMetadata({
|
|
sampleRate: tempBuffer.sampleRate,
|
|
channels: tempBuffer.numberOfChannels,
|
|
});
|
|
setPendingFile(file);
|
|
setShowImportDialog(true);
|
|
} catch (error) {
|
|
console.error('Failed to read audio file metadata:', error);
|
|
}
|
|
|
|
// Reset input
|
|
e.target.value = '';
|
|
};
|
|
|
|
const handleImport = async (options: ImportOptions) => {
|
|
if (!pendingFile || !onLoadAudio) return;
|
|
|
|
try {
|
|
setShowImportDialog(false);
|
|
const { buffer, metadata } = await importAudioFile(pendingFile, options);
|
|
onLoadAudio(buffer);
|
|
|
|
// Update track name to filename if it's still default
|
|
if (track.name === 'New Track' || track.name === 'Untitled Track') {
|
|
const fileName = metadata.fileName.replace(/\.[^/.]+$/, '');
|
|
onNameChange(fileName);
|
|
}
|
|
|
|
console.log('Audio imported:', metadata);
|
|
} catch (error) {
|
|
console.error('Failed to import audio file:', error);
|
|
} finally {
|
|
setPendingFile(null);
|
|
setFileMetadata({});
|
|
}
|
|
};
|
|
|
|
const handleImportCancel = () => {
|
|
setShowImportDialog(false);
|
|
setPendingFile(null);
|
|
setFileMetadata({});
|
|
};
|
|
|
|
const handleLoadAudioClick = () => {
|
|
fileInputRef.current?.click();
|
|
};
|
|
|
|
const [isDragging, setIsDragging] = React.useState(false);
|
|
|
|
const handleDragOver = (e: React.DragEvent) => {
|
|
e.preventDefault();
|
|
e.stopPropagation();
|
|
setIsDragging(true);
|
|
};
|
|
|
|
const handleDragLeave = (e: React.DragEvent) => {
|
|
e.preventDefault();
|
|
e.stopPropagation();
|
|
setIsDragging(false);
|
|
};
|
|
|
|
const handleDrop = async (e: React.DragEvent) => {
|
|
e.preventDefault();
|
|
e.stopPropagation();
|
|
setIsDragging(false);
|
|
|
|
const file = e.dataTransfer.files?.[0];
|
|
if (!file || !onLoadAudio) return;
|
|
|
|
// Check if it's an audio file
|
|
if (!file.type.startsWith('audio/')) {
|
|
console.warn('Dropped file is not an audio file');
|
|
return;
|
|
}
|
|
|
|
try {
|
|
const arrayBuffer = await file.arrayBuffer();
|
|
const audioContext = new AudioContext();
|
|
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
|
onLoadAudio(audioBuffer);
|
|
|
|
// Update track name to filename if it's still default
|
|
if (track.name === 'New Track' || track.name === 'Untitled Track') {
|
|
const fileName = file.name.replace(/\.[^/.]+$/, '');
|
|
onNameChange(fileName);
|
|
}
|
|
} catch (error) {
|
|
console.error('Failed to load audio file:', error);
|
|
}
|
|
};
|
|
|
|
const trackHeight = track.collapsed ? COLLAPSED_TRACK_HEIGHT : track.height;
|
|
|
|
// Track height resize handlers
|
|
const handleResizeStart = React.useCallback(
|
|
(e: React.MouseEvent) => {
|
|
if (track.collapsed) return;
|
|
e.preventDefault();
|
|
e.stopPropagation();
|
|
setIsResizing(true);
|
|
resizeStartRef.current = { y: e.clientY, height: track.height };
|
|
},
|
|
[track.collapsed, track.height]
|
|
);
|
|
|
|
React.useEffect(() => {
|
|
if (!isResizing) return;
|
|
|
|
const handleMouseMove = (e: MouseEvent) => {
|
|
const delta = e.clientY - resizeStartRef.current.y;
|
|
const newHeight = Math.max(
|
|
MIN_TRACK_HEIGHT,
|
|
Math.min(MAX_TRACK_HEIGHT, resizeStartRef.current.height + delta)
|
|
);
|
|
onUpdateTrack(track.id, { height: newHeight });
|
|
};
|
|
|
|
const handleMouseUp = () => {
|
|
setIsResizing(false);
|
|
};
|
|
|
|
window.addEventListener('mousemove', handleMouseMove);
|
|
window.addEventListener('mouseup', handleMouseUp);
|
|
|
|
return () => {
|
|
window.removeEventListener('mousemove', handleMouseMove);
|
|
window.removeEventListener('mouseup', handleMouseUp);
|
|
};
|
|
}, [isResizing, onUpdateTrack, track.id]);
|
|
|
|
return (
|
|
<div
|
|
ref={containerRef}
|
|
className={cn(
|
|
'flex flex-col transition-all duration-200 relative',
|
|
isSelected && 'bg-primary/5'
|
|
)}
|
|
>
|
|
{/* Top: Track Row (Control Panel + Waveform) */}
|
|
<div className="flex" style={{ height: trackHeight }}>
|
|
{/* Left: Track Control Panel (Fixed Width) - Ableton Style */}
|
|
<div
|
|
className={cn(
|
|
"w-48 flex-shrink-0 border-b border-r-4 p-2 flex flex-col gap-2 min-h-0 transition-all duration-200 cursor-pointer border-border",
|
|
isSelected
|
|
? "bg-primary/10 border-r-primary"
|
|
: "bg-card border-r-transparent hover:bg-accent/30"
|
|
)}
|
|
onClick={(e) => {
|
|
e.stopPropagation();
|
|
if (onSelect) onSelect();
|
|
}}
|
|
>
|
|
{/* Track Name Row - Integrated collapse (DAW style) */}
|
|
<div
|
|
className={cn(
|
|
"group flex items-center gap-1.5 px-1 py-0.5 rounded cursor-pointer transition-colors",
|
|
isSelected ? "bg-primary/10" : "hover:bg-accent/50"
|
|
)}
|
|
onClick={(e) => {
|
|
if (!isEditingName) {
|
|
e.stopPropagation();
|
|
onToggleCollapse();
|
|
}
|
|
}}
|
|
title={track.collapsed ? 'Expand track' : 'Collapse track'}
|
|
>
|
|
{/* Small triangle indicator */}
|
|
<div className={cn(
|
|
"flex-shrink-0 transition-colors",
|
|
isSelected ? "text-primary" : "text-muted-foreground group-hover:text-foreground"
|
|
)}>
|
|
{track.collapsed ? (
|
|
<ChevronRight className="h-3 w-3" />
|
|
) : (
|
|
<ChevronDown className="h-3 w-3" />
|
|
)}
|
|
</div>
|
|
|
|
{/* Color stripe (thicker when selected) */}
|
|
<div
|
|
className={cn(
|
|
"h-5 rounded-full flex-shrink-0 transition-all",
|
|
isSelected ? "w-1" : "w-0.5"
|
|
)}
|
|
style={{ backgroundColor: track.color }}
|
|
/>
|
|
|
|
{/* Track name (editable) */}
|
|
<div className="flex-1 min-w-0">
|
|
{isEditingName ? (
|
|
<input
|
|
ref={inputRef}
|
|
type="text"
|
|
value={nameInput}
|
|
onChange={(e) => setNameInput(e.target.value)}
|
|
onBlur={handleNameBlur}
|
|
onKeyDown={handleNameKeyDown}
|
|
onClick={(e) => e.stopPropagation()}
|
|
className="w-full px-1 py-0.5 text-xs font-semibold bg-background border border-border rounded"
|
|
/>
|
|
) : (
|
|
<div
|
|
onClick={(e) => {
|
|
e.stopPropagation();
|
|
handleNameClick();
|
|
}}
|
|
className="px-1 py-0.5 text-xs font-semibold text-foreground truncate"
|
|
title={String(track.name || 'Untitled Track')}
|
|
>
|
|
{String(track.name || 'Untitled Track')}
|
|
</div>
|
|
)}
|
|
</div>
|
|
</div>
|
|
|
|
{/* Track Controls - Only show when not collapsed */}
|
|
{!track.collapsed && (
|
|
<div className="flex-1 flex flex-col items-center justify-between min-h-0 overflow-hidden">
|
|
{/* Integrated Track Controls (Pan + Fader + Buttons) */}
|
|
<TrackControls
|
|
volume={track.volume}
|
|
pan={track.pan}
|
|
peakLevel={track.recordEnabled || isRecording ? recordingLevel : playbackLevel}
|
|
rmsLevel={track.recordEnabled || isRecording ? recordingLevel * 0.7 : playbackLevel * 0.7}
|
|
isMuted={track.mute}
|
|
isSolo={track.solo}
|
|
isRecordEnabled={track.recordEnabled}
|
|
showAutomation={track.automation?.showAutomation}
|
|
showEffects={track.showEffects}
|
|
isRecording={isRecording}
|
|
onVolumeChange={onVolumeChange}
|
|
onPanChange={onPanChange}
|
|
onMuteToggle={onToggleMute}
|
|
onSoloToggle={onToggleSolo}
|
|
onRecordToggle={onToggleRecordEnable}
|
|
onAutomationToggle={() => {
|
|
onUpdateTrack(track.id, {
|
|
automation: {
|
|
...track.automation,
|
|
showAutomation: !track.automation?.showAutomation,
|
|
},
|
|
});
|
|
}}
|
|
onEffectsClick={() => {
|
|
onUpdateTrack(track.id, {
|
|
showEffects: !track.showEffects,
|
|
});
|
|
}}
|
|
onVolumeTouchStart={handleVolumeTouchStart}
|
|
onVolumeTouchEnd={handleVolumeTouchEnd}
|
|
onPanTouchStart={handlePanTouchStart}
|
|
onPanTouchEnd={handlePanTouchEnd}
|
|
/>
|
|
</div>
|
|
)}
|
|
</div>
|
|
|
|
{/* Right: Waveform Area (Flexible Width) */}
|
|
<div
|
|
className="flex-1 relative bg-waveform-bg border-b border-l border-border"
|
|
style={{
|
|
minWidth: track.audioBuffer ? `${duration * zoom * 100}px` : undefined
|
|
}}
|
|
>
|
|
{/* Delete Button - Top Right Overlay */}
|
|
<button
|
|
onClick={(e) => {
|
|
e.stopPropagation();
|
|
onRemove();
|
|
}}
|
|
className={cn(
|
|
'absolute top-2 right-2 z-20 h-6 w-6 rounded flex items-center justify-center transition-all',
|
|
'bg-card/80 hover:bg-destructive/90 text-muted-foreground hover:text-white',
|
|
'border border-border/50 hover:border-destructive',
|
|
'backdrop-blur-sm shadow-sm hover:shadow-md'
|
|
)}
|
|
title="Remove track"
|
|
>
|
|
<Trash2 className="h-3 w-3" />
|
|
</button>
|
|
|
|
{track.audioBuffer ? (
|
|
<>
|
|
{/* Waveform Canvas */}
|
|
<canvas
|
|
ref={canvasRef}
|
|
className="absolute inset-0 w-full h-full cursor-pointer"
|
|
onMouseDown={handleCanvasMouseDown}
|
|
onMouseMove={handleCanvasMouseMove}
|
|
onMouseUp={handleCanvasMouseUp}
|
|
/>
|
|
</>
|
|
) : (
|
|
!track.collapsed && (
|
|
<>
|
|
<div
|
|
className={cn(
|
|
"absolute inset-0 flex flex-col items-center justify-center text-sm text-muted-foreground hover:text-foreground transition-colors cursor-pointer group",
|
|
isDragging ? "bg-primary/20 text-primary border-2 border-primary border-dashed" : "hover:bg-accent/50"
|
|
)}
|
|
onClick={(e) => {
|
|
e.stopPropagation();
|
|
handleLoadAudioClick();
|
|
}}
|
|
onDragOver={handleDragOver}
|
|
onDragLeave={handleDragLeave}
|
|
onDrop={handleDrop}
|
|
>
|
|
<Upload className="h-6 w-6 mb-2 opacity-50 group-hover:opacity-100" />
|
|
<p>{isDragging ? 'Drop audio file here' : 'Click to load audio file'}</p>
|
|
<p className="text-xs opacity-75 mt-1">or drag & drop</p>
|
|
</div>
|
|
<input
|
|
ref={fileInputRef}
|
|
type="file"
|
|
accept="audio/*"
|
|
onChange={handleFileChange}
|
|
className="hidden"
|
|
/>
|
|
</>
|
|
)
|
|
)}
|
|
</div>
|
|
</div>
|
|
|
|
{/* Automation Lane */}
|
|
{!track.collapsed && track.automation?.showAutomation && (() => {
|
|
// Build list of available parameters from track and effects
|
|
const availableParameters: Array<{ id: string; name: string }> = [
|
|
{ id: 'volume', name: 'Volume' },
|
|
{ id: 'pan', name: 'Pan' },
|
|
];
|
|
|
|
// Add effect parameters
|
|
track.effectChain.effects.forEach((effect) => {
|
|
if (effect.parameters) {
|
|
Object.keys(effect.parameters).forEach((paramKey) => {
|
|
const parameterId = `effect.${effect.id}.${paramKey}`;
|
|
const paramName = `${effect.name} - ${paramKey.charAt(0).toUpperCase() + paramKey.slice(1)}`;
|
|
availableParameters.push({ id: parameterId, name: paramName });
|
|
});
|
|
}
|
|
});
|
|
|
|
// Get selected parameter ID (default to volume if not set)
|
|
const selectedParameterId = track.automation.selectedParameterId || 'volume';
|
|
|
|
// Find or create lane for selected parameter
|
|
let selectedLane = track.automation.lanes.find(lane => lane.parameterId === selectedParameterId);
|
|
|
|
// If lane doesn't exist yet, we need to create it (but not during render)
|
|
// This will be handled by a useEffect instead
|
|
|
|
const modes: Array<{ value: string; label: string; color: string }> = [
|
|
{ value: 'read', label: 'R', color: 'text-muted-foreground' },
|
|
{ value: 'write', label: 'W', color: 'text-red-500' },
|
|
{ value: 'touch', label: 'T', color: 'text-yellow-500' },
|
|
{ value: 'latch', label: 'L', color: 'text-orange-500' },
|
|
];
|
|
const currentModeIndex = modes.findIndex(m => m.value === selectedLane?.mode);
|
|
|
|
return selectedLane ? (
|
|
<div className="flex border-b border-border">
|
|
{/* Left: Automation Controls (matching track controls width - w-48 = 192px) */}
|
|
<div className="w-48 flex-shrink-0 bg-muted/30 border-r border-border/50 p-2 flex flex-col gap-2">
|
|
{/* Parameter selector dropdown */}
|
|
<select
|
|
value={selectedParameterId}
|
|
onChange={(e) => {
|
|
onUpdateTrack(track.id, {
|
|
automation: { ...track.automation, selectedParameterId: e.target.value },
|
|
});
|
|
}}
|
|
className="w-full text-xs font-medium text-foreground bg-background/80 border border-border/30 rounded px-2 py-1 hover:bg-background focus:outline-none focus:ring-1 focus:ring-primary"
|
|
>
|
|
{availableParameters.map((param) => (
|
|
<option key={param.id} value={param.id}>
|
|
{param.name}
|
|
</option>
|
|
))}
|
|
</select>
|
|
|
|
{/* Automation mode cycle button */}
|
|
<button
|
|
onClick={() => {
|
|
const nextIndex = (currentModeIndex + 1) % modes.length;
|
|
const updatedLanes = track.automation.lanes.map((l) =>
|
|
l.id === selectedLane.id ? { ...l, mode: modes[nextIndex].value as any } : l
|
|
);
|
|
onUpdateTrack(track.id, {
|
|
automation: { ...track.automation, lanes: updatedLanes },
|
|
});
|
|
}}
|
|
className={cn(
|
|
'w-full px-2 py-1 text-xs font-bold rounded transition-colors border border-border/30',
|
|
'bg-background/50 hover:bg-background',
|
|
modes[currentModeIndex]?.color
|
|
)}
|
|
title={`Mode: ${selectedLane.mode} (click to cycle)`}
|
|
>
|
|
{modes[currentModeIndex]?.label} - {selectedLane.mode.toUpperCase()}
|
|
</button>
|
|
|
|
{/* Height controls */}
|
|
<div className="flex gap-1">
|
|
<button
|
|
onClick={() => {
|
|
const newHeight = Math.max(60, Math.min(180, selectedLane.height + 20));
|
|
const updatedLanes = track.automation.lanes.map((l) =>
|
|
l.id === selectedLane.id ? { ...l, height: newHeight } : l
|
|
);
|
|
onUpdateTrack(track.id, {
|
|
automation: { ...track.automation, lanes: updatedLanes },
|
|
});
|
|
}}
|
|
className="flex-1 px-2 py-1 text-xs bg-background/50 hover:bg-background border border-border/30 rounded transition-colors"
|
|
title="Increase lane height"
|
|
>
|
|
<ChevronUp className="h-3 w-3 mx-auto" />
|
|
</button>
|
|
<button
|
|
onClick={() => {
|
|
const newHeight = Math.max(60, Math.min(180, selectedLane.height - 20));
|
|
const updatedLanes = track.automation.lanes.map((l) =>
|
|
l.id === selectedLane.id ? { ...l, height: newHeight } : l
|
|
);
|
|
onUpdateTrack(track.id, {
|
|
automation: { ...track.automation, lanes: updatedLanes },
|
|
});
|
|
}}
|
|
className="flex-1 px-2 py-1 text-xs bg-background/50 hover:bg-background border border-border/30 rounded transition-colors"
|
|
title="Decrease lane height"
|
|
>
|
|
<ChevronDown className="h-3 w-3 mx-auto" />
|
|
</button>
|
|
</div>
|
|
</div>
|
|
|
|
{/* Right: Automation Lane Canvas (matching waveform width) */}
|
|
<div className="flex-1 border-l border-border/50">
|
|
<AutomationLane
|
|
key={selectedLane.id}
|
|
lane={selectedLane}
|
|
duration={duration}
|
|
zoom={zoom}
|
|
currentTime={currentTime}
|
|
onUpdateLane={(updates) => {
|
|
const updatedLanes = track.automation.lanes.map((l) =>
|
|
l.id === selectedLane.id ? { ...l, ...updates } : l
|
|
);
|
|
onUpdateTrack(track.id, {
|
|
automation: { ...track.automation, lanes: updatedLanes },
|
|
});
|
|
}}
|
|
onAddPoint={(time, value) => {
|
|
const newPoint = createAutomationPoint({
|
|
time,
|
|
value,
|
|
curve: 'linear',
|
|
});
|
|
const updatedLanes = track.automation.lanes.map((l) =>
|
|
l.id === selectedLane.id
|
|
? { ...l, points: [...l.points, newPoint] }
|
|
: l
|
|
);
|
|
onUpdateTrack(track.id, {
|
|
automation: { ...track.automation, lanes: updatedLanes },
|
|
});
|
|
}}
|
|
onUpdatePoint={(pointId, updates) => {
|
|
const updatedLanes = track.automation.lanes.map((l) =>
|
|
l.id === selectedLane.id
|
|
? {
|
|
...l,
|
|
points: l.points.map((p) =>
|
|
p.id === pointId ? { ...p, ...updates } : p
|
|
),
|
|
}
|
|
: l
|
|
);
|
|
onUpdateTrack(track.id, {
|
|
automation: { ...track.automation, lanes: updatedLanes },
|
|
});
|
|
}}
|
|
onRemovePoint={(pointId) => {
|
|
const updatedLanes = track.automation.lanes.map((l) =>
|
|
l.id === selectedLane.id
|
|
? { ...l, points: l.points.filter((p) => p.id !== pointId) }
|
|
: l
|
|
);
|
|
onUpdateTrack(track.id, {
|
|
automation: { ...track.automation, lanes: updatedLanes },
|
|
});
|
|
}}
|
|
/>
|
|
</div>
|
|
</div>
|
|
) : null;
|
|
})()}
|
|
|
|
{/* Per-Track Effects Panel */}
|
|
{!track.collapsed && track.showEffects && (
|
|
<div className="bg-background/30 border-t border-border/50 p-3 h-[280px] flex flex-col">
|
|
{track.effectChain.effects.length === 0 ? (
|
|
<div className="flex flex-col flex-1">
|
|
<Button
|
|
variant="ghost"
|
|
size="sm"
|
|
onClick={() => setEffectBrowserOpen(true)}
|
|
className="h-7 px-2 text-xs self-start mb-2"
|
|
>
|
|
<Sparkles className="h-3 w-3 mr-1" />
|
|
Add Effect
|
|
</Button>
|
|
<div className="text-center text-sm text-muted-foreground flex-1 flex items-center justify-center">
|
|
No effects on this track
|
|
</div>
|
|
</div>
|
|
) : (
|
|
<div className="flex flex-col flex-1">
|
|
<Button
|
|
variant="ghost"
|
|
size="sm"
|
|
onClick={() => setEffectBrowserOpen(true)}
|
|
className="h-7 px-2 text-xs self-start mb-2"
|
|
>
|
|
<Sparkles className="h-3 w-3 mr-1" />
|
|
Add Effect
|
|
</Button>
|
|
<div className="flex gap-3 overflow-x-auto overflow-y-auto custom-scrollbar flex-1">
|
|
{track.effectChain.effects.map((effect) => (
|
|
<EffectDevice
|
|
key={effect.id}
|
|
effect={effect}
|
|
onToggleEnabled={() => onToggleEffect?.(effect.id)}
|
|
onRemove={() => onRemoveEffect?.(effect.id)}
|
|
onUpdateParameters={(params) => onUpdateEffect?.(effect.id, params)}
|
|
onToggleExpanded={() => {
|
|
const updatedEffects = track.effectChain.effects.map((e) =>
|
|
e.id === effect.id ? { ...e, expanded: !e.expanded } : e
|
|
);
|
|
onUpdateTrack(track.id, {
|
|
effectChain: { ...track.effectChain, effects: updatedEffects },
|
|
});
|
|
}}
|
|
trackId={track.id}
|
|
isPlaying={isPlaying}
|
|
onParameterTouched={onParameterTouched}
|
|
automationLanes={track.automation.lanes}
|
|
/>
|
|
))}
|
|
</div>
|
|
</div>
|
|
)}
|
|
|
|
{/* Effect Browser Dialog */}
|
|
<EffectBrowser
|
|
open={effectBrowserOpen}
|
|
onClose={() => setEffectBrowserOpen(false)}
|
|
onSelectEffect={(effectType) => {
|
|
onAddEffect?.(effectType);
|
|
setEffectBrowserOpen(false);
|
|
}}
|
|
/>
|
|
</div>
|
|
)}
|
|
|
|
{/* Track Height Resize Handle */}
|
|
{!track.collapsed && (
|
|
<div
|
|
className={cn(
|
|
'absolute bottom-0 left-0 right-0 h-1 cursor-ns-resize hover:bg-primary/50 transition-all duration-200 z-20 group',
|
|
isResizing && 'bg-primary/50 h-1.5'
|
|
)}
|
|
onMouseDown={handleResizeStart}
|
|
title="Drag to resize track height"
|
|
>
|
|
<div className="absolute inset-x-0 bottom-0 h-px bg-border group-hover:bg-primary transition-colors duration-200" />
|
|
</div>
|
|
)}
|
|
|
|
{/* Import Dialog */}
|
|
{showImportDialog && pendingFile && (
|
|
<ImportDialog
|
|
fileName={pendingFile.name}
|
|
originalSampleRate={fileMetadata.sampleRate}
|
|
originalChannels={fileMetadata.channels}
|
|
onImport={handleImport}
|
|
onCancel={handleImportCancel}
|
|
/>
|
|
)}
|
|
</div>
|
|
);
|
|
}
|