Files
audio-ui/components/editor/AudioEditor.tsx
Sebastian Krüger 38a2b2962d feat: add export scope options (project/selection/tracks)
Implemented Phase 11.3 - Export Regions:
- Added export scope selector in ExportDialog
  - Entire Project: Mix all tracks into single file
  - Selected Region: Export only the selected region (disabled if no selection)
  - Individual Tracks: Export each track as separate file
- Updated ExportSettings interface with scope property
- Refactored handleExport to support all three export modes:
  - Project mode: Mix all tracks (existing behavior)
  - Selection mode: Extract selection from all tracks and mix
  - Tracks mode: Loop through tracks and export separately with sanitized filenames
- Added hasSelection prop to ExportDialog to enable/disable selection option
- Created helper function convertAndDownload to reduce code duplication
- Selection mode uses extractBufferSegment for precise region extraction
- Track names are sanitized for filenames (remove special characters)

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-19 07:47:56 +01:00

1283 lines
42 KiB
TypeScript

'use client';
import * as React from 'react';
import { Music, Plus, Upload, Trash2, Settings, Download } from 'lucide-react';
import { PlaybackControls } from './PlaybackControls';
import { MasterControls } from '@/components/controls/MasterControls';
import { FrequencyAnalyzer } from '@/components/analysis/FrequencyAnalyzer';
import { Spectrogram } from '@/components/analysis/Spectrogram';
import { PhaseCorrelationMeter } from '@/components/analysis/PhaseCorrelationMeter';
import { LUFSMeter } from '@/components/analysis/LUFSMeter';
import { AudioStatistics } from '@/components/analysis/AudioStatistics';
import { ThemeToggle } from '@/components/layout/ThemeToggle';
import { CommandPalette } from '@/components/ui/CommandPalette';
import { GlobalSettingsDialog } from '@/components/settings/GlobalSettingsDialog';
import { ExportDialog, type ExportSettings } from '@/components/dialogs/ExportDialog';
import { Button } from '@/components/ui/Button';
import type { CommandAction } from '@/components/ui/CommandPalette';
import { useMultiTrack } from '@/lib/hooks/useMultiTrack';
import { useMultiTrackPlayer } from '@/lib/hooks/useMultiTrackPlayer';
import { useEffectChain } from '@/lib/hooks/useEffectChain';
import { useToast } from '@/components/ui/Toast';
import { TrackList } from '@/components/tracks/TrackList';
import { ImportTrackDialog } from '@/components/tracks/ImportTrackDialog';
import { formatDuration } from '@/lib/audio/decoder';
import { useHistory } from '@/lib/hooks/useHistory';
import { useRecording } from '@/lib/hooks/useRecording';
import type { EffectType } from '@/lib/audio/effects/chain';
import {
createMultiTrackCutCommand,
createMultiTrackCopyCommand,
createMultiTrackDeleteCommand,
createMultiTrackPasteCommand,
createMultiTrackDuplicateCommand,
} from '@/lib/history/commands/multi-track-edit-command';
import { extractBufferSegment } from '@/lib/audio/buffer-utils';
import { mixTracks, getMaxTrackDuration } from '@/lib/audio/track-utils';
import { audioBufferToWav, audioBufferToMp3, audioBufferToFlac, downloadArrayBuffer } from '@/lib/audio/export';
export function AudioEditor() {
const [importDialogOpen, setImportDialogOpen] = React.useState(false);
const [selectedTrackId, setSelectedTrackId] = React.useState<string | null>(null);
const [zoom, setZoom] = React.useState(1);
const [masterVolume, setMasterVolume] = React.useState(0.8);
const [masterPan, setMasterPan] = React.useState(0);
const [isMasterMuted, setIsMasterMuted] = React.useState(false);
const [clipboard, setClipboard] = React.useState<AudioBuffer | null>(null);
const [recordingTrackId, setRecordingTrackId] = React.useState<string | null>(null);
const [punchInEnabled, setPunchInEnabled] = React.useState(false);
const [punchInTime, setPunchInTime] = React.useState(0);
const [punchOutTime, setPunchOutTime] = React.useState(0);
const [overdubEnabled, setOverdubEnabled] = React.useState(false);
const [settingsDialogOpen, setSettingsDialogOpen] = React.useState(false);
const [exportDialogOpen, setExportDialogOpen] = React.useState(false);
const [isExporting, setIsExporting] = React.useState(false);
const [analyzerView, setAnalyzerView] = React.useState<'frequency' | 'spectrogram' | 'phase' | 'lufs' | 'stats'>('frequency');
const { addToast } = useToast();
// Command history for undo/redo
const { execute: executeCommand, undo, redo, state: historyState } = useHistory();
const canUndo = historyState.canUndo;
const canRedo = historyState.canRedo;
// Recording hook
const {
state: recordingState,
settings: recordingSettings,
startRecording,
stopRecording,
requestPermission,
setInputGain,
setRecordMono,
setSampleRate,
} = useRecording();
// Multi-track hooks
const {
tracks,
addTrack: addTrackOriginal,
addTrackFromBuffer: addTrackFromBufferOriginal,
removeTrack,
updateTrack,
clearTracks,
} = useMultiTrack();
// Track whether we should auto-select on next add (when project is empty)
const shouldAutoSelectRef = React.useRef(true);
React.useEffect(() => {
// Update auto-select flag based on track count
shouldAutoSelectRef.current = tracks.length === 0;
}, [tracks.length]);
// Wrap addTrack to auto-select first track when adding to empty project
const addTrack = React.useCallback((name?: string) => {
const shouldAutoSelect = shouldAutoSelectRef.current;
const track = addTrackOriginal(name);
if (shouldAutoSelect) {
setSelectedTrackId(track.id);
shouldAutoSelectRef.current = false; // Only auto-select once
}
return track;
}, [addTrackOriginal]);
// Wrap addTrackFromBuffer to auto-select first track when adding to empty project
const addTrackFromBuffer = React.useCallback((buffer: AudioBuffer, name?: string) => {
console.log(`[AudioEditor] addTrackFromBuffer wrapper called: ${name}, shouldAutoSelect: ${shouldAutoSelectRef.current}`);
const shouldAutoSelect = shouldAutoSelectRef.current;
const track = addTrackFromBufferOriginal(buffer, name);
console.log(`[AudioEditor] Track created: ${track.name} (${track.id})`);
if (shouldAutoSelect) {
console.log(`[AudioEditor] Auto-selecting track: ${track.id}`);
setSelectedTrackId(track.id);
shouldAutoSelectRef.current = false; // Only auto-select once
}
return track;
}, [addTrackFromBufferOriginal]);
// Track which parameters are being touched (for touch/latch modes)
const [touchedParameters, setTouchedParameters] = React.useState<Set<string>>(new Set());
const [latchTriggered, setLatchTriggered] = React.useState<Set<string>>(new Set());
// Track last recorded values to detect changes
const lastRecordedValuesRef = React.useRef<Map<string, { value: number; time: number }>>(new Map());
// Automation recording callback
const handleAutomationRecording = React.useCallback((
trackId: string,
laneId: string,
currentTime: number,
value: number
) => {
const track = tracks.find(t => t.id === trackId);
if (!track) return;
const lane = track.automation.lanes.find(l => l.id === laneId);
if (!lane) return;
const paramKey = `${trackId}-${laneId}`;
let shouldRecord = false;
// Determine if we should record based on mode
switch (lane.mode) {
case 'write':
// Always record in write mode
shouldRecord = true;
break;
case 'touch':
// Only record when parameter is being touched
shouldRecord = touchedParameters.has(paramKey);
break;
case 'latch':
// Record from first touch until stop
if (touchedParameters.has(paramKey)) {
setLatchTriggered(prev => new Set(prev).add(paramKey));
}
shouldRecord = latchTriggered.has(paramKey);
break;
default:
shouldRecord = false;
}
if (!shouldRecord) return;
// Throttle recording to avoid creating too many automation points
// This doesn't prevent recording, just limits frequency
const lastRecorded = lastRecordedValuesRef.current.get(paramKey);
if (lastRecorded && currentTime - lastRecorded.time < 0.1) {
// Check if value has changed significantly
const valueChanged = Math.abs(lastRecorded.value - value) > 0.001;
if (!valueChanged) {
// Skip if value hasn't changed and we recorded recently
return;
}
}
// Update last recorded value
lastRecordedValuesRef.current.set(paramKey, { value, time: currentTime });
// Create new automation point
const newPoint = {
id: `point-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
time: currentTime,
value,
curve: 'linear' as const,
};
// In write mode, remove existing points near this time (overwrites)
const updatedPoints = lane.mode === 'write'
? [...lane.points.filter(p => Math.abs(p.time - currentTime) > 0.05), newPoint]
: [...lane.points, newPoint];
updatedPoints.sort((a, b) => a.time - b.time);
// Update the lane with new points
const updatedLanes = track.automation.lanes.map(l =>
l.id === laneId ? { ...l, points: updatedPoints } : l
);
updateTrack(trackId, {
automation: {
...track.automation,
lanes: updatedLanes,
},
});
}, [tracks, updateTrack, touchedParameters, latchTriggered]);
// Helper to mark parameter as touched (for touch/latch modes)
const setParameterTouched = React.useCallback((trackId: string, laneId: string, touched: boolean) => {
const paramKey = `${trackId}-${laneId}`;
setTouchedParameters(prev => {
const next = new Set(prev);
if (touched) {
next.add(paramKey);
} else {
next.delete(paramKey);
}
return next;
});
}, []);
const {
isPlaying,
currentTime,
duration,
trackLevels,
masterPeakLevel,
masterRmsLevel,
masterIsClipping,
masterAnalyser,
resetClipIndicator,
play,
pause,
stop,
seek,
togglePlayPause,
} = useMultiTrackPlayer(tracks, masterVolume, handleAutomationRecording);
// Reset latch triggered state when playback stops
React.useEffect(() => {
if (!isPlaying) {
setLatchTriggered(new Set());
lastRecordedValuesRef.current.clear();
}
}, [isPlaying]);
// Record effect parameter values while touched
React.useEffect(() => {
if (!isPlaying) return;
const recordEffectParams = () => {
const time = currentTime;
touchedParameters.forEach(paramKey => {
const [trackId, laneId] = paramKey.split('-');
const track = tracks.find(t => t.id === trackId);
if (!track) return;
const lane = track.automation.lanes.find(l => l.id === laneId);
if (!lane || !lane.parameterId.startsWith('effect.')) return;
// Parse effect parameter ID: effect.{effectId}.{paramName}
const parts = lane.parameterId.split('.');
if (parts.length !== 3) return;
const effectId = parts[1];
const paramName = parts[2];
const effect = track.effectChain.effects.find(e => e.id === effectId);
if (!effect || !effect.parameters) return;
const currentValue = (effect.parameters as any)[paramName];
if (currentValue === undefined) return;
// Normalize value to 0-1 range
const range = lane.valueRange.max - lane.valueRange.min;
const normalizedValue = (currentValue - lane.valueRange.min) / range;
// Record the automation
handleAutomationRecording(trackId, laneId, time, normalizedValue);
});
};
const interval = setInterval(recordEffectParams, 50); // Record every 50ms while touched
return () => clearInterval(interval);
}, [isPlaying, currentTime, touchedParameters, tracks, handleAutomationRecording]);
// Master effect chain
const {
chain: masterEffectChain,
presets: masterEffectPresets,
toggleEffectEnabled: toggleMasterEffect,
removeEffect: removeMasterEffect,
reorder: reorderMasterEffects,
clearChain: clearMasterChain,
savePreset: saveMasterPreset,
loadPresetToChain: loadMasterPreset,
deletePreset: deleteMasterPreset,
} = useEffectChain();
// Multi-track handlers
const handleImportTracks = () => {
setImportDialogOpen(true);
};
const handleImportTrack = (buffer: AudioBuffer, name: string) => {
console.log(`[AudioEditor] handleImportTrack called: ${name}`);
addTrackFromBuffer(buffer, name);
};
const handleClearTracks = () => {
clearTracks();
setSelectedTrackId(null);
addToast({
title: 'Tracks Cleared',
description: 'All tracks have been removed',
variant: 'info',
duration: 2000,
});
};
const handleRemoveTrack = (trackId: string) => {
removeTrack(trackId);
if (selectedTrackId === trackId) {
setSelectedTrackId(null);
}
};
// Per-track effect chain handlers
const handleToggleTrackEffect = (effectId: string) => {
if (!selectedTrack) return;
const updatedChain = {
...selectedTrack.effectChain,
effects: selectedTrack.effectChain.effects.map((e) =>
e.id === effectId ? { ...e, enabled: !e.enabled } : e
),
};
updateTrack(selectedTrack.id, { effectChain: updatedChain });
};
const handleRemoveTrackEffect = (effectId: string) => {
if (!selectedTrack) return;
const updatedChain = {
...selectedTrack.effectChain,
effects: selectedTrack.effectChain.effects.filter((e) => e.id !== effectId),
};
updateTrack(selectedTrack.id, { effectChain: updatedChain });
};
const handleReorderTrackEffects = (fromIndex: number, toIndex: number) => {
if (!selectedTrack) return;
const effects = [...selectedTrack.effectChain.effects];
const [removed] = effects.splice(fromIndex, 1);
effects.splice(toIndex, 0, removed);
const updatedChain = {
...selectedTrack.effectChain,
effects,
};
updateTrack(selectedTrack.id, { effectChain: updatedChain });
};
const handleClearTrackChain = () => {
if (!selectedTrack) return;
const updatedChain = {
...selectedTrack.effectChain,
effects: [],
};
updateTrack(selectedTrack.id, { effectChain: updatedChain });
};
// Effects Panel handlers
const handleAddEffect = React.useCallback((effectType: EffectType) => {
if (!selectedTrackId) return;
const track = tracks.find((t) => t.id === selectedTrackId);
if (!track) return;
// Import createEffect and EFFECT_NAMES dynamically
import('@/lib/audio/effects/chain').then(({ createEffect, EFFECT_NAMES }) => {
const newEffect = createEffect(effectType, EFFECT_NAMES[effectType]);
const updatedChain = {
...track.effectChain,
effects: [...track.effectChain.effects, newEffect],
};
updateTrack(selectedTrackId, { effectChain: updatedChain });
});
}, [selectedTrackId, tracks, updateTrack]);
const handleToggleEffect = React.useCallback((effectId: string) => {
if (!selectedTrackId) return;
const track = tracks.find((t) => t.id === selectedTrackId);
if (!track) return;
const updatedChain = {
...track.effectChain,
effects: track.effectChain.effects.map((e) =>
e.id === effectId ? { ...e, enabled: !e.enabled } : e
),
};
updateTrack(selectedTrackId, { effectChain: updatedChain });
}, [selectedTrackId, tracks, updateTrack]);
const handleRemoveEffect = React.useCallback((effectId: string) => {
if (!selectedTrackId) return;
const track = tracks.find((t) => t.id === selectedTrackId);
if (!track) return;
const updatedChain = {
...track.effectChain,
effects: track.effectChain.effects.filter((e) => e.id !== effectId),
};
updateTrack(selectedTrackId, { effectChain: updatedChain });
}, [selectedTrackId, tracks, updateTrack]);
const handleUpdateEffect = React.useCallback((effectId: string, parameters: any) => {
if (!selectedTrackId) return;
const track = tracks.find((t) => t.id === selectedTrackId);
if (!track) return;
const updatedChain = {
...track.effectChain,
effects: track.effectChain.effects.map((e) =>
e.id === effectId ? { ...e, parameters } : e
),
};
updateTrack(selectedTrackId, { effectChain: updatedChain });
}, [selectedTrackId, tracks, updateTrack]);
const handleToggleEffectExpanded = React.useCallback((effectId: string) => {
if (!selectedTrackId) return;
const track = tracks.find((t) => t.id === selectedTrackId);
if (!track) return;
const updatedChain = {
...track.effectChain,
effects: track.effectChain.effects.map((e) =>
e.id === effectId ? { ...e, expanded: !e.expanded } : e
),
};
updateTrack(selectedTrackId, { effectChain: updatedChain });
}, [selectedTrackId, tracks, updateTrack]);
// Preserve effects panel state - don't auto-open/close on track selection
// Selection handler
const handleSelectionChange = (trackId: string, selection: { start: number; end: number } | null) => {
updateTrack(trackId, { selection });
};
// Recording handlers
const handleToggleRecordEnable = React.useCallback((trackId: string) => {
const track = tracks.find((t) => t.id === trackId);
if (!track) return;
// Toggle record enable
updateTrack(trackId, { recordEnabled: !track.recordEnabled });
}, [tracks, updateTrack]);
const handleStartRecording = React.useCallback(async () => {
// Find first armed track
const armedTrack = tracks.find((t) => t.recordEnabled);
if (!armedTrack) {
addToast({
title: 'No Track Armed',
description: 'Please arm a track for recording first',
variant: 'warning',
duration: 3000,
});
return;
}
// Request permission if needed
const hasPermission = await requestPermission();
if (!hasPermission) {
addToast({
title: 'Microphone Access Denied',
description: 'Please allow microphone access to record',
variant: 'error',
duration: 3000,
});
return;
}
try {
await startRecording();
setRecordingTrackId(armedTrack.id);
addToast({
title: 'Recording Started',
description: `Recording to ${armedTrack.name}`,
variant: 'success',
duration: 2000,
});
} catch (error) {
console.error('Failed to start recording:', error);
addToast({
title: 'Recording Failed',
description: 'Failed to start recording',
variant: 'error',
duration: 3000,
});
}
}, [tracks, startRecording, requestPermission, addToast]);
const handleStopRecording = React.useCallback(async () => {
if (!recordingTrackId) return;
try {
const recordedBuffer = await stopRecording();
if (recordedBuffer) {
const track = tracks.find((t) => t.id === recordingTrackId);
// Check if overdub mode is enabled and track has existing audio
if (overdubEnabled && track?.audioBuffer) {
// Mix recorded audio with existing audio
const audioContext = new AudioContext();
const existingBuffer = track.audioBuffer;
// Create a new buffer that's long enough for both
const maxDuration = Math.max(existingBuffer.duration, recordedBuffer.duration);
const maxChannels = Math.max(existingBuffer.numberOfChannels, recordedBuffer.numberOfChannels);
const mixedBuffer = audioContext.createBuffer(
maxChannels,
Math.floor(maxDuration * existingBuffer.sampleRate),
existingBuffer.sampleRate
);
// Mix each channel
for (let channel = 0; channel < maxChannels; channel++) {
const mixedData = mixedBuffer.getChannelData(channel);
const existingData = channel < existingBuffer.numberOfChannels
? existingBuffer.getChannelData(channel)
: new Float32Array(mixedData.length);
const recordedData = channel < recordedBuffer.numberOfChannels
? recordedBuffer.getChannelData(channel)
: new Float32Array(mixedData.length);
// Mix the samples (average them to avoid clipping)
for (let i = 0; i < mixedData.length; i++) {
const existingSample = i < existingData.length ? existingData[i] : 0;
const recordedSample = i < recordedData.length ? recordedData[i] : 0;
mixedData[i] = (existingSample + recordedSample) / 2;
}
}
updateTrack(recordingTrackId, { audioBuffer: mixedBuffer });
addToast({
title: 'Recording Complete (Overdub)',
description: `Mixed ${recordedBuffer.duration.toFixed(2)}s with existing audio`,
variant: 'success',
duration: 3000,
});
} else {
// Normal mode - replace existing audio
updateTrack(recordingTrackId, { audioBuffer: recordedBuffer });
addToast({
title: 'Recording Complete',
description: `Recorded ${recordedBuffer.duration.toFixed(2)}s of audio`,
variant: 'success',
duration: 3000,
});
}
}
setRecordingTrackId(null);
} catch (error) {
console.error('Failed to stop recording:', error);
addToast({
title: 'Recording Error',
description: 'Failed to save recording',
variant: 'error',
duration: 3000,
});
setRecordingTrackId(null);
}
}, [recordingTrackId, stopRecording, updateTrack, addToast, overdubEnabled, tracks]);
// Edit handlers
const handleCut = React.useCallback(() => {
const track = tracks.find((t) => t.selection);
if (!track || !track.audioBuffer || !track.selection) return;
// Extract to clipboard
const extracted = extractBufferSegment(
track.audioBuffer,
track.selection.start,
track.selection.end
);
setClipboard(extracted);
// Execute cut command
const command = createMultiTrackCutCommand(
track.id,
track.audioBuffer,
track.selection,
(trackId, buffer, selection) => {
updateTrack(trackId, { audioBuffer: buffer, selection });
}
);
executeCommand(command);
addToast({
title: 'Cut',
description: 'Selection cut to clipboard',
variant: 'success',
duration: 2000,
});
}, [tracks, executeCommand, updateTrack, addToast]);
const handleCopy = React.useCallback(() => {
const track = tracks.find((t) => t.selection);
if (!track || !track.audioBuffer || !track.selection) return;
// Extract to clipboard
const extracted = extractBufferSegment(
track.audioBuffer,
track.selection.start,
track.selection.end
);
setClipboard(extracted);
// Execute copy command (doesn't modify buffer, just for undo history)
const command = createMultiTrackCopyCommand(
track.id,
track.audioBuffer,
track.selection,
(trackId, buffer, selection) => {
updateTrack(trackId, { audioBuffer: buffer, selection });
}
);
executeCommand(command);
addToast({
title: 'Copy',
description: 'Selection copied to clipboard',
variant: 'success',
duration: 2000,
});
}, [tracks, executeCommand, updateTrack, addToast]);
const handlePaste = React.useCallback(() => {
if (!clipboard || !selectedTrackId) return;
const track = tracks.find((t) => t.id === selectedTrackId);
if (!track) return;
// Paste at current time or at end of buffer
const pastePosition = currentTime || track.audioBuffer?.duration || 0;
const command = createMultiTrackPasteCommand(
track.id,
track.audioBuffer,
clipboard,
pastePosition,
(trackId, buffer, selection) => {
updateTrack(trackId, { audioBuffer: buffer, selection });
}
);
executeCommand(command);
addToast({
title: 'Paste',
description: 'Clipboard content pasted',
variant: 'success',
duration: 2000,
});
}, [clipboard, selectedTrackId, tracks, currentTime, executeCommand, updateTrack, addToast]);
const handleDelete = React.useCallback(() => {
const track = tracks.find((t) => t.selection);
if (!track || !track.audioBuffer || !track.selection) return;
const command = createMultiTrackDeleteCommand(
track.id,
track.audioBuffer,
track.selection,
(trackId, buffer, selection) => {
updateTrack(trackId, { audioBuffer: buffer, selection });
}
);
executeCommand(command);
addToast({
title: 'Delete',
description: 'Selection deleted',
variant: 'success',
duration: 2000,
});
}, [tracks, executeCommand, updateTrack, addToast]);
const handleDuplicate = React.useCallback(() => {
const track = tracks.find((t) => t.selection);
if (!track || !track.audioBuffer || !track.selection) return;
const command = createMultiTrackDuplicateCommand(
track.id,
track.audioBuffer,
track.selection,
(trackId, buffer, selection) => {
updateTrack(trackId, { audioBuffer: buffer, selection });
}
);
executeCommand(command);
addToast({
title: 'Duplicate',
description: 'Selection duplicated',
variant: 'success',
duration: 2000,
});
}, [tracks, executeCommand, updateTrack, addToast]);
// Export handler
const handleExport = React.useCallback(async (settings: ExportSettings) => {
if (tracks.length === 0) {
addToast({
title: 'No Tracks',
description: 'Add some tracks before exporting',
variant: 'warning',
duration: 3000,
});
return;
}
setIsExporting(true);
try {
const sampleRate = tracks[0]?.audioBuffer?.sampleRate || 44100;
// Helper function to convert and download a buffer
const convertAndDownload = async (buffer: AudioBuffer, filename: string) => {
let exportedBuffer: ArrayBuffer;
let mimeType: string;
let fileExtension: string;
if (settings.format === 'mp3') {
exportedBuffer = await audioBufferToMp3(buffer, {
format: 'mp3',
bitrate: settings.bitrate,
normalize: settings.normalize,
});
mimeType = 'audio/mpeg';
fileExtension = 'mp3';
} else if (settings.format === 'flac') {
exportedBuffer = await audioBufferToFlac(buffer, {
format: 'flac',
bitDepth: settings.bitDepth,
quality: settings.quality,
normalize: settings.normalize,
});
mimeType = 'application/octet-stream';
fileExtension = 'flac';
} else {
exportedBuffer = audioBufferToWav(buffer, {
format: 'wav',
bitDepth: settings.bitDepth,
normalize: settings.normalize,
});
mimeType = 'audio/wav';
fileExtension = 'wav';
}
const fullFilename = `${filename}.${fileExtension}`;
downloadArrayBuffer(exportedBuffer, fullFilename, mimeType);
return fullFilename;
};
if (settings.scope === 'tracks') {
// Export each track individually
let exportedCount = 0;
for (const track of tracks) {
if (!track.audioBuffer) continue;
const trackFilename = `${settings.filename}_${track.name.replace(/[^a-z0-9]/gi, '_')}`;
await convertAndDownload(track.audioBuffer, trackFilename);
exportedCount++;
}
addToast({
title: 'Export Complete',
description: `Exported ${exportedCount} track${exportedCount !== 1 ? 's' : ''}`,
variant: 'success',
duration: 3000,
});
} else if (settings.scope === 'selection') {
// Export selected region
const selectedTrack = tracks.find(t => t.selection);
if (!selectedTrack || !selectedTrack.selection) {
addToast({
title: 'No Selection',
description: 'No region selected for export',
variant: 'warning',
duration: 3000,
});
setIsExporting(false);
return;
}
// Extract selection from all tracks and mix
const selectionStart = selectedTrack.selection.start;
const selectionEnd = selectedTrack.selection.end;
const selectionDuration = selectionEnd - selectionStart;
// Create tracks with only the selected region
const selectedTracks = tracks.map(track => ({
...track,
audioBuffer: track.audioBuffer
? extractBufferSegment(track.audioBuffer, selectionStart, selectionEnd)
: null,
}));
const mixedBuffer = mixTracks(selectedTracks, sampleRate, selectionDuration);
const filename = await convertAndDownload(mixedBuffer, settings.filename);
addToast({
title: 'Export Complete',
description: `Exported ${filename}`,
variant: 'success',
duration: 3000,
});
} else {
// Export entire project (mix all tracks)
const maxDuration = getMaxTrackDuration(tracks);
const mixedBuffer = mixTracks(tracks, sampleRate, maxDuration);
const filename = await convertAndDownload(mixedBuffer, settings.filename);
addToast({
title: 'Export Complete',
description: `Exported ${filename}`,
variant: 'success',
duration: 3000,
});
}
setExportDialogOpen(false);
} catch (error) {
console.error('Export failed:', error);
addToast({
title: 'Export Failed',
description: 'Failed to export audio',
variant: 'error',
duration: 3000,
});
} finally {
setIsExporting(false);
}
}, [tracks, addToast]);
// Zoom controls
const handleZoomIn = () => {
setZoom((prev) => Math.min(20, prev + 1));
};
const handleZoomOut = () => {
setZoom((prev) => Math.max(1, prev - 1));
};
const handleFitToView = () => {
setZoom(1);
};
// Keyboard shortcuts
React.useEffect(() => {
const handleKeyDown = (e: KeyboardEvent) => {
// Spacebar for play/pause - only if not interacting with form elements
if (e.code === 'Space') {
const target = e.target as HTMLElement;
// Don't trigger if user is typing or interacting with buttons/form elements
if (
target instanceof HTMLInputElement ||
target instanceof HTMLTextAreaElement ||
target instanceof HTMLButtonElement ||
target.getAttribute('role') === 'button'
) {
return;
}
e.preventDefault();
togglePlayPause();
}
};
window.addEventListener('keydown', handleKeyDown);
return () => window.removeEventListener('keydown', handleKeyDown);
}, [togglePlayPause]);
// Find selected track
const selectedTrack = tracks.find((t) => t.id === selectedTrackId);
// Command palette actions
const commandActions: CommandAction[] = React.useMemo(() => {
const actions: CommandAction[] = [
// Playback
{
id: 'play',
label: 'Play',
description: 'Start playback',
shortcut: 'Space',
category: 'playback',
action: play,
},
{
id: 'pause',
label: 'Pause',
description: 'Pause playback',
shortcut: 'Space',
category: 'playback',
action: pause,
},
{
id: 'stop',
label: 'Stop',
description: 'Stop playback',
category: 'playback',
action: stop,
},
// View
{
id: 'zoom-in',
label: 'Zoom In',
description: 'Zoom in on waveforms',
category: 'view',
action: handleZoomIn,
},
{
id: 'zoom-out',
label: 'Zoom Out',
description: 'Zoom out on waveforms',
category: 'view',
action: handleZoomOut,
},
{
id: 'fit-to-view',
label: 'Fit to View',
description: 'Reset zoom to fit all tracks',
category: 'view',
action: handleFitToView,
},
// Tracks
{
id: 'add-track',
label: 'Add Empty Track',
description: 'Create a new empty track',
category: 'tracks',
action: () => addTrack(),
},
{
id: 'import-tracks',
label: 'Import Audio Files',
description: 'Import multiple audio files as tracks',
category: 'tracks',
action: handleImportTracks,
},
{
id: 'clear-tracks',
label: 'Clear All Tracks',
description: 'Remove all tracks',
category: 'tracks',
action: handleClearTracks,
},
];
return actions;
}, [play, pause, stop, handleZoomIn, handleZoomOut, handleFitToView, handleImportTracks, handleClearTracks, addTrack]);
// Keyboard shortcuts
React.useEffect(() => {
const handleKeyDown = (e: KeyboardEvent) => {
// Prevent shortcuts if typing in an input
const isTyping = e.target instanceof HTMLInputElement || e.target instanceof HTMLTextAreaElement;
// Spacebar: Play/Pause (always, unless typing in an input)
if (e.code === 'Space' && !isTyping) {
e.preventDefault();
togglePlayPause();
return;
}
if (isTyping) return;
// Ctrl/Cmd+Z: Undo
if ((e.ctrlKey || e.metaKey) && e.key === 'z' && !e.shiftKey) {
e.preventDefault();
if (canUndo) {
undo();
}
return;
}
// Ctrl/Cmd+Shift+Z or Ctrl/Cmd+Y: Redo
if (((e.ctrlKey || e.metaKey) && e.key === 'z' && e.shiftKey) || ((e.ctrlKey || e.metaKey) && e.key === 'y')) {
e.preventDefault();
if (canRedo) {
redo();
}
return;
}
// Ctrl/Cmd+X: Cut
if ((e.ctrlKey || e.metaKey) && e.key === 'x') {
e.preventDefault();
handleCut();
return;
}
// Ctrl/Cmd+C: Copy
if ((e.ctrlKey || e.metaKey) && e.key === 'c') {
e.preventDefault();
handleCopy();
return;
}
// Ctrl/Cmd+V: Paste
if ((e.ctrlKey || e.metaKey) && e.key === 'v') {
e.preventDefault();
handlePaste();
return;
}
// Ctrl/Cmd+D: Duplicate
if ((e.ctrlKey || e.metaKey) && e.key === 'd') {
e.preventDefault();
handleDuplicate();
return;
}
// Delete or Backspace: Delete selection
if (e.key === 'Delete' || e.key === 'Backspace') {
e.preventDefault();
handleDelete();
return;
}
// Escape: Clear selection
if (e.key === 'Escape') {
e.preventDefault();
setSelectedTrackId(null);
}
};
window.addEventListener('keydown', handleKeyDown);
return () => window.removeEventListener('keydown', handleKeyDown);
}, [togglePlayPause, canUndo, canRedo, undo, redo, handleCut, handleCopy, handlePaste, handleDelete, handleDuplicate]);
return (
<>
{/* Compact Header */}
<header className="flex items-center justify-between px-4 py-2 border-b border-border bg-card flex-shrink-0 gap-4">
{/* Left: Logo */}
<div className="flex items-center gap-4 flex-shrink-0">
<div className="flex items-center gap-2">
<Music className="h-5 w-5 text-primary" />
<h1 className="text-lg font-bold text-foreground">Audio UI</h1>
</div>
{/* Track Actions */}
<div className="flex items-center gap-2 border-l border-border pl-4">
<Button variant="outline" size="sm" onClick={() => addTrack()}>
<Plus className="h-4 w-4 mr-1.5" />
Add Track
</Button>
<Button variant="outline" size="sm" onClick={handleImportTracks}>
<Upload className="h-4 w-4 mr-1.5" />
Import
</Button>
{tracks.length > 0 && (
<>
<Button variant="outline" size="sm" onClick={() => setExportDialogOpen(true)}>
<Download className="h-4 w-4 mr-1.5" />
Export
</Button>
<Button variant="outline" size="sm" onClick={handleClearTracks}>
<Trash2 className="h-4 w-4 mr-1.5 text-destructive" />
Clear All
</Button>
</>
)}
</div>
</div>
{/* Right: Command Palette + Settings + Theme Toggle */}
<div className="flex items-center gap-2 flex-shrink-0">
<CommandPalette actions={commandActions} />
<Button
variant="ghost"
size="icon"
onClick={() => setSettingsDialogOpen(true)}
title="Settings"
>
<Settings className="h-5 w-5" />
</Button>
<ThemeToggle />
</div>
</header>
{/* Main content area */}
<div className="flex flex-1 overflow-hidden">
{/* Main canvas area */}
<main className="flex-1 flex flex-col overflow-hidden bg-background">
{/* Multi-Track View */}
<div className="flex-1 flex flex-col overflow-hidden">
<TrackList
tracks={tracks}
zoom={zoom}
currentTime={currentTime}
duration={duration}
selectedTrackId={selectedTrackId}
onSelectTrack={setSelectedTrackId}
onAddTrack={addTrack}
onImportTrack={handleImportTrack}
onRemoveTrack={handleRemoveTrack}
onUpdateTrack={updateTrack}
onSeek={seek}
onSelectionChange={handleSelectionChange}
onToggleRecordEnable={handleToggleRecordEnable}
recordingTrackId={recordingTrackId}
recordingLevel={recordingState.inputLevel}
trackLevels={trackLevels}
onParameterTouched={setParameterTouched}
isPlaying={isPlaying}
/>
</div>
</main>
{/* Right Sidebar - Master Controls & Analyzers */}
<aside className="flex-shrink-0 border-l border-border bg-card flex flex-col p-4 gap-4 w-[280px]">
{/* Master Controls */}
<div className="flex items-center justify-center">
<MasterControls
volume={masterVolume}
pan={masterPan}
peakLevel={masterPeakLevel}
rmsLevel={masterRmsLevel}
isClipping={masterIsClipping}
isMuted={isMasterMuted}
onVolumeChange={setMasterVolume}
onPanChange={setMasterPan}
onMuteToggle={() => {
if (isMasterMuted) {
setMasterVolume(0.8);
setIsMasterMuted(false);
} else {
setMasterVolume(0);
setIsMasterMuted(true);
}
}}
onResetClip={resetClipIndicator}
/>
</div>
{/* Analyzer Toggle */}
<div className="grid grid-cols-5 gap-0.5 bg-muted/20 border border-border/50 rounded-md p-0.5 max-w-[192px] mx-auto">
<button
onClick={() => setAnalyzerView('frequency')}
className={`px-1 py-1 rounded text-[9px] font-bold uppercase tracking-wider transition-all ${
analyzerView === 'frequency'
? 'bg-accent text-accent-foreground shadow-sm'
: 'text-muted-foreground hover:text-foreground'
}`}
title="Frequency Analyzer"
>
FFT
</button>
<button
onClick={() => setAnalyzerView('spectrogram')}
className={`px-1 py-1 rounded text-[9px] font-bold uppercase tracking-wider transition-all ${
analyzerView === 'spectrogram'
? 'bg-accent text-accent-foreground shadow-sm'
: 'text-muted-foreground hover:text-foreground'
}`}
title="Spectrogram"
>
SPEC
</button>
<button
onClick={() => setAnalyzerView('phase')}
className={`px-1 py-1 rounded text-[9px] font-bold uppercase tracking-wider transition-all ${
analyzerView === 'phase'
? 'bg-accent text-accent-foreground shadow-sm'
: 'text-muted-foreground hover:text-foreground'
}`}
title="Phase Correlation"
>
PHS
</button>
<button
onClick={() => setAnalyzerView('lufs')}
className={`px-1 py-1 rounded text-[9px] font-bold uppercase tracking-wider transition-all ${
analyzerView === 'lufs'
? 'bg-accent text-accent-foreground shadow-sm'
: 'text-muted-foreground hover:text-foreground'
}`}
title="LUFS Loudness"
>
LUFS
</button>
<button
onClick={() => setAnalyzerView('stats')}
className={`px-1 py-1 rounded text-[9px] font-bold uppercase tracking-wider transition-all ${
analyzerView === 'stats'
? 'bg-accent text-accent-foreground shadow-sm'
: 'text-muted-foreground hover:text-foreground'
}`}
title="Audio Statistics"
>
INFO
</button>
</div>
{/* Analyzer Display */}
<div className="flex-1 min-h-[360px] flex items-start justify-center">
<div className="w-[192px]">
{analyzerView === 'frequency' && <FrequencyAnalyzer analyserNode={masterAnalyser} />}
{analyzerView === 'spectrogram' && <Spectrogram analyserNode={masterAnalyser} />}
{analyzerView === 'phase' && <PhaseCorrelationMeter analyserNode={masterAnalyser} />}
{analyzerView === 'lufs' && <LUFSMeter analyserNode={masterAnalyser} />}
{analyzerView === 'stats' && <AudioStatistics tracks={tracks} />}
</div>
</div>
</aside>
</div>
{/* Transport Controls */}
<div className="border-t border-border bg-card p-3 flex justify-center">
<PlaybackControls
isPlaying={isPlaying}
isPaused={!isPlaying}
currentTime={currentTime}
duration={duration}
volume={masterVolume}
onPlay={play}
onPause={pause}
onStop={stop}
onSeek={seek}
onVolumeChange={setMasterVolume}
currentTimeFormatted={formatDuration(currentTime)}
durationFormatted={formatDuration(duration)}
isRecording={recordingState.isRecording}
onStartRecording={handleStartRecording}
onStopRecording={handleStopRecording}
punchInEnabled={punchInEnabled}
punchInTime={punchInTime}
punchOutTime={punchOutTime}
onPunchInEnabledChange={setPunchInEnabled}
onPunchInTimeChange={setPunchInTime}
onPunchOutTimeChange={setPunchOutTime}
overdubEnabled={overdubEnabled}
onOverdubEnabledChange={setOverdubEnabled}
/>
</div>
{/* Import Track Dialog */}
<ImportTrackDialog
open={importDialogOpen}
onClose={() => setImportDialogOpen(false)}
onImportTrack={handleImportTrack}
/>
{/* Global Settings Dialog */}
<GlobalSettingsDialog
open={settingsDialogOpen}
onClose={() => setSettingsDialogOpen(false)}
recordingSettings={recordingSettings}
onInputGainChange={setInputGain}
onRecordMonoChange={setRecordMono}
onSampleRateChange={setSampleRate}
/>
{/* Export Dialog */}
<ExportDialog
open={exportDialogOpen}
onClose={() => setExportDialogOpen(false)}
onExport={handleExport}
isExporting={isExporting}
hasSelection={tracks.some(t => t.selection !== null)}
/>
</>
);
}