feat: complete Phase 9.3 - automation recording with write/touch/latch modes
Implemented comprehensive automation recording system for volume, pan, and effect parameters: - Added automation recording modes: - Write: Records continuously during playback when values change - Touch: Records only while control is being touched/moved - Latch: Records from first touch until playback stops - Implemented value change detection (0.001 threshold) to prevent infinite loops - Fixed React setState-in-render errors by: - Using queueMicrotask() to defer state updates - Moving lane creation logic to useEffect - Properly memoizing touch handlers with useMemo - Added proper value ranges for effect parameters: - Frequency: 20-20000 Hz - Q: 0.1-20 - Gain: -40-40 dB - Enhanced automation lane auto-creation with parameter-specific ranges - Added touch callbacks to all parameter controls (volume, pan, effects) - Implemented throttling (100ms) to avoid excessive automation points Technical improvements: - Used tracksRef and onRecordAutomationRef to ensure latest values in animation loops - Added proper cleanup on playback stop - Optimized recording to only trigger when values actually change 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
23
PLAN.md
23
PLAN.md
@@ -643,15 +643,21 @@ audio-ui/
|
|||||||
- [ ] Copy/Paste automation
|
- [ ] Copy/Paste automation
|
||||||
- [ ] Bezier curves
|
- [ ] Bezier curves
|
||||||
|
|
||||||
#### 9.3 Automation Playback
|
#### 9.3 Automation Playback & Recording
|
||||||
- [x] Real-time automation during playback
|
- [x] Real-time automation during playback
|
||||||
- [x] Automation for volume and pan
|
- [x] Automation for volume and pan
|
||||||
- [x] Automation for effect parameters
|
- [x] Automation for effect parameters
|
||||||
- [x] Continuous evaluation via requestAnimationFrame
|
- [x] Continuous evaluation via requestAnimationFrame
|
||||||
- [x] Proper parameter range conversion
|
- [x] Proper parameter range conversion
|
||||||
- [ ] Automation recording (write mode)
|
- [x] Automation recording (write mode) - Volume, Pan, Effect Parameters
|
||||||
- [x] Automation editing modes UI (read/write/touch/latch)
|
- [x] Automation editing modes UI (read/write/touch/latch)
|
||||||
- [ ] Automation modes recording implementation (write/touch/latch)
|
- [x] Automation modes recording implementation (write/touch/latch)
|
||||||
|
- [x] Touch/latch mode tracking with control interaction
|
||||||
|
- [x] Throttled automation point creation (every ~100ms)
|
||||||
|
- [x] Parameter touch callbacks for volume and pan controls
|
||||||
|
- [x] Parameter touch callbacks for effect parameter sliders
|
||||||
|
- [x] Touch/latch modes for effect parameters (frequency, Q, gain, etc.)
|
||||||
|
- [x] Proper prop passing through EffectDevice → EffectParameters → Slider
|
||||||
|
|
||||||
### Phase 10: Analysis Tools
|
### Phase 10: Analysis Tools
|
||||||
|
|
||||||
@@ -684,18 +690,21 @@ audio-ui/
|
|||||||
### Phase 11: Export & Import
|
### Phase 11: Export & Import
|
||||||
|
|
||||||
#### 11.1 Export Formats
|
#### 11.1 Export Formats
|
||||||
- [ ] WAV export (PCM, various bit depths)
|
- [x] WAV export (PCM, various bit depths: 16/24/32-bit)
|
||||||
|
- [x] Export dialog with settings UI
|
||||||
|
- [x] Export button in header
|
||||||
|
- [x] Mix all tracks before export
|
||||||
- [ ] MP3 export (using lamejs)
|
- [ ] MP3 export (using lamejs)
|
||||||
- [ ] OGG Vorbis export
|
- [ ] OGG Vorbis export
|
||||||
- [ ] FLAC export (using fflate)
|
- [ ] FLAC export (using fflate)
|
||||||
- [ ] Format selection UI
|
|
||||||
|
|
||||||
#### 11.2 Export Settings
|
#### 11.2 Export Settings
|
||||||
|
- [x] Bit depth selection (16/24/32-bit)
|
||||||
|
- [x] Normalization before export (with 1% headroom)
|
||||||
|
- [x] Filename customization
|
||||||
- [ ] Sample rate conversion
|
- [ ] Sample rate conversion
|
||||||
- [ ] Bit depth selection
|
|
||||||
- [ ] Quality/bitrate settings (for lossy formats)
|
- [ ] Quality/bitrate settings (for lossy formats)
|
||||||
- [ ] Dithering options
|
- [ ] Dithering options
|
||||||
- [ ] Normalization before export
|
|
||||||
|
|
||||||
#### 11.3 Export Regions
|
#### 11.3 Export Regions
|
||||||
- [ ] Export entire project
|
- [ ] Export entire project
|
||||||
|
|||||||
149
components/dialogs/ExportDialog.tsx
Normal file
149
components/dialogs/ExportDialog.tsx
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
'use client';
|
||||||
|
|
||||||
|
import * as React from 'react';
|
||||||
|
import { X, Download } from 'lucide-react';
|
||||||
|
import { Button } from '@/components/ui/Button';
|
||||||
|
import { cn } from '@/lib/utils/cn';
|
||||||
|
|
||||||
|
export interface ExportSettings {
|
||||||
|
format: 'wav';
|
||||||
|
bitDepth: 16 | 24 | 32;
|
||||||
|
normalize: boolean;
|
||||||
|
filename: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExportDialogProps {
|
||||||
|
open: boolean;
|
||||||
|
onClose: () => void;
|
||||||
|
onExport: (settings: ExportSettings) => void;
|
||||||
|
isExporting?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function ExportDialog({ open, onClose, onExport, isExporting }: ExportDialogProps) {
|
||||||
|
const [settings, setSettings] = React.useState<ExportSettings>({
|
||||||
|
format: 'wav',
|
||||||
|
bitDepth: 16,
|
||||||
|
normalize: true,
|
||||||
|
filename: 'mix',
|
||||||
|
});
|
||||||
|
|
||||||
|
const handleExport = () => {
|
||||||
|
onExport(settings);
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!open) return null;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="fixed inset-0 z-50 flex items-center justify-center bg-black/50">
|
||||||
|
<div className="bg-card border border-border rounded-lg shadow-xl w-full max-w-md p-6">
|
||||||
|
{/* Header */}
|
||||||
|
<div className="flex items-center justify-between mb-6">
|
||||||
|
<h2 className="text-lg font-semibold text-foreground">Export Audio</h2>
|
||||||
|
<button
|
||||||
|
onClick={onClose}
|
||||||
|
className="text-muted-foreground hover:text-foreground transition-colors"
|
||||||
|
disabled={isExporting}
|
||||||
|
>
|
||||||
|
<X className="h-5 w-5" />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Settings */}
|
||||||
|
<div className="space-y-4">
|
||||||
|
{/* Filename */}
|
||||||
|
<div>
|
||||||
|
<label className="block text-sm font-medium text-foreground mb-2">
|
||||||
|
Filename
|
||||||
|
</label>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
value={settings.filename}
|
||||||
|
onChange={(e) => setSettings({ ...settings, filename: e.target.value })}
|
||||||
|
className="w-full px-3 py-2 bg-background border border-border rounded text-foreground focus:outline-none focus:ring-2 focus:ring-primary"
|
||||||
|
disabled={isExporting}
|
||||||
|
/>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1">.wav will be added automatically</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Format */}
|
||||||
|
<div>
|
||||||
|
<label className="block text-sm font-medium text-foreground mb-2">
|
||||||
|
Format
|
||||||
|
</label>
|
||||||
|
<select
|
||||||
|
value={settings.format}
|
||||||
|
onChange={(e) => setSettings({ ...settings, format: e.target.value as 'wav' })}
|
||||||
|
className="w-full px-3 py-2 bg-background border border-border rounded text-foreground focus:outline-none focus:ring-2 focus:ring-primary"
|
||||||
|
disabled={isExporting}
|
||||||
|
>
|
||||||
|
<option value="wav">WAV (Uncompressed)</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Bit Depth */}
|
||||||
|
<div>
|
||||||
|
<label className="block text-sm font-medium text-foreground mb-2">
|
||||||
|
Bit Depth
|
||||||
|
</label>
|
||||||
|
<div className="flex gap-2">
|
||||||
|
{[16, 24, 32].map((depth) => (
|
||||||
|
<button
|
||||||
|
key={depth}
|
||||||
|
onClick={() => setSettings({ ...settings, bitDepth: depth as 16 | 24 | 32 })}
|
||||||
|
className={cn(
|
||||||
|
'flex-1 px-3 py-2 rounded text-sm font-medium transition-colors',
|
||||||
|
settings.bitDepth === depth
|
||||||
|
? 'bg-primary text-primary-foreground'
|
||||||
|
: 'bg-background border border-border text-foreground hover:bg-accent'
|
||||||
|
)}
|
||||||
|
disabled={isExporting}
|
||||||
|
>
|
||||||
|
{depth}-bit {depth === 32 && '(Float)'}
|
||||||
|
</button>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Normalize */}
|
||||||
|
<div>
|
||||||
|
<label className="flex items-center gap-2 cursor-pointer">
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={settings.normalize}
|
||||||
|
onChange={(e) => setSettings({ ...settings, normalize: e.target.checked })}
|
||||||
|
className="w-4 h-4 rounded border-border text-primary focus:ring-primary"
|
||||||
|
disabled={isExporting}
|
||||||
|
/>
|
||||||
|
<span className="text-sm font-medium text-foreground">
|
||||||
|
Normalize audio
|
||||||
|
</span>
|
||||||
|
</label>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1 ml-6">
|
||||||
|
Prevents clipping by adjusting peak levels
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Actions */}
|
||||||
|
<div className="flex gap-3 mt-6">
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
onClick={onClose}
|
||||||
|
className="flex-1"
|
||||||
|
disabled={isExporting}
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
onClick={handleExport}
|
||||||
|
className="flex-1"
|
||||||
|
disabled={isExporting || !settings.filename.trim()}
|
||||||
|
>
|
||||||
|
<Download className="h-4 w-4 mr-2" />
|
||||||
|
{isExporting ? 'Exporting...' : 'Export'}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -1,11 +1,12 @@
|
|||||||
'use client';
|
'use client';
|
||||||
|
|
||||||
import * as React from 'react';
|
import * as React from 'react';
|
||||||
import { Music, Plus, Upload, Trash2, Settings } from 'lucide-react';
|
import { Music, Plus, Upload, Trash2, Settings, Download } from 'lucide-react';
|
||||||
import { PlaybackControls } from './PlaybackControls';
|
import { PlaybackControls } from './PlaybackControls';
|
||||||
import { ThemeToggle } from '@/components/layout/ThemeToggle';
|
import { ThemeToggle } from '@/components/layout/ThemeToggle';
|
||||||
import { CommandPalette } from '@/components/ui/CommandPalette';
|
import { CommandPalette } from '@/components/ui/CommandPalette';
|
||||||
import { GlobalSettingsDialog } from '@/components/settings/GlobalSettingsDialog';
|
import { GlobalSettingsDialog } from '@/components/settings/GlobalSettingsDialog';
|
||||||
|
import { ExportDialog, type ExportSettings } from '@/components/dialogs/ExportDialog';
|
||||||
import { Button } from '@/components/ui/Button';
|
import { Button } from '@/components/ui/Button';
|
||||||
import type { CommandAction } from '@/components/ui/CommandPalette';
|
import type { CommandAction } from '@/components/ui/CommandPalette';
|
||||||
import { useMultiTrack } from '@/lib/hooks/useMultiTrack';
|
import { useMultiTrack } from '@/lib/hooks/useMultiTrack';
|
||||||
@@ -26,6 +27,8 @@ import {
|
|||||||
createMultiTrackDuplicateCommand,
|
createMultiTrackDuplicateCommand,
|
||||||
} from '@/lib/history/commands/multi-track-edit-command';
|
} from '@/lib/history/commands/multi-track-edit-command';
|
||||||
import { extractBufferSegment } from '@/lib/audio/buffer-utils';
|
import { extractBufferSegment } from '@/lib/audio/buffer-utils';
|
||||||
|
import { mixTracks, getMaxTrackDuration } from '@/lib/audio/track-utils';
|
||||||
|
import { audioBufferToWav, downloadArrayBuffer } from '@/lib/audio/export';
|
||||||
|
|
||||||
export function AudioEditor() {
|
export function AudioEditor() {
|
||||||
const [importDialogOpen, setImportDialogOpen] = React.useState(false);
|
const [importDialogOpen, setImportDialogOpen] = React.useState(false);
|
||||||
@@ -39,6 +42,8 @@ export function AudioEditor() {
|
|||||||
const [punchOutTime, setPunchOutTime] = React.useState(0);
|
const [punchOutTime, setPunchOutTime] = React.useState(0);
|
||||||
const [overdubEnabled, setOverdubEnabled] = React.useState(false);
|
const [overdubEnabled, setOverdubEnabled] = React.useState(false);
|
||||||
const [settingsDialogOpen, setSettingsDialogOpen] = React.useState(false);
|
const [settingsDialogOpen, setSettingsDialogOpen] = React.useState(false);
|
||||||
|
const [exportDialogOpen, setExportDialogOpen] = React.useState(false);
|
||||||
|
const [isExporting, setIsExporting] = React.useState(false);
|
||||||
|
|
||||||
const { addToast } = useToast();
|
const { addToast } = useToast();
|
||||||
|
|
||||||
@@ -102,14 +107,112 @@ export function AudioEditor() {
|
|||||||
return track;
|
return track;
|
||||||
}, [addTrackFromBufferOriginal]);
|
}, [addTrackFromBufferOriginal]);
|
||||||
|
|
||||||
// Log tracks to see if they update
|
// Track which parameters are being touched (for touch/latch modes)
|
||||||
React.useEffect(() => {
|
const [touchedParameters, setTouchedParameters] = React.useState<Set<string>>(new Set());
|
||||||
console.log('[AudioEditor] Tracks updated:', tracks.map(t => ({
|
const [latchTriggered, setLatchTriggered] = React.useState<Set<string>>(new Set());
|
||||||
name: t.name,
|
|
||||||
effectCount: t.effectChain.effects.length,
|
// Track last recorded values to detect changes
|
||||||
effects: t.effectChain.effects.map(e => e.name)
|
const lastRecordedValuesRef = React.useRef<Map<string, { value: number; time: number }>>(new Map());
|
||||||
})));
|
|
||||||
}, [tracks]);
|
// Automation recording callback
|
||||||
|
const handleAutomationRecording = React.useCallback((
|
||||||
|
trackId: string,
|
||||||
|
laneId: string,
|
||||||
|
currentTime: number,
|
||||||
|
value: number
|
||||||
|
) => {
|
||||||
|
const track = tracks.find(t => t.id === trackId);
|
||||||
|
if (!track) return;
|
||||||
|
|
||||||
|
const lane = track.automation.lanes.find(l => l.id === laneId);
|
||||||
|
if (!lane) return;
|
||||||
|
|
||||||
|
const paramKey = `${trackId}-${laneId}`;
|
||||||
|
let shouldRecord = false;
|
||||||
|
|
||||||
|
// Determine if we should record based on mode
|
||||||
|
switch (lane.mode) {
|
||||||
|
case 'write':
|
||||||
|
// Always record in write mode
|
||||||
|
shouldRecord = true;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'touch':
|
||||||
|
// Only record when parameter is being touched
|
||||||
|
shouldRecord = touchedParameters.has(paramKey);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'latch':
|
||||||
|
// Record from first touch until stop
|
||||||
|
if (touchedParameters.has(paramKey)) {
|
||||||
|
setLatchTriggered(prev => new Set(prev).add(paramKey));
|
||||||
|
}
|
||||||
|
shouldRecord = latchTriggered.has(paramKey);
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
shouldRecord = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!shouldRecord) return;
|
||||||
|
|
||||||
|
// Throttle recording to avoid creating too many automation points
|
||||||
|
// This doesn't prevent recording, just limits frequency
|
||||||
|
const lastRecorded = lastRecordedValuesRef.current.get(paramKey);
|
||||||
|
|
||||||
|
if (lastRecorded && currentTime - lastRecorded.time < 0.1) {
|
||||||
|
// Check if value has changed significantly
|
||||||
|
const valueChanged = Math.abs(lastRecorded.value - value) > 0.001;
|
||||||
|
if (!valueChanged) {
|
||||||
|
// Skip if value hasn't changed and we recorded recently
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update last recorded value
|
||||||
|
lastRecordedValuesRef.current.set(paramKey, { value, time: currentTime });
|
||||||
|
|
||||||
|
// Create new automation point
|
||||||
|
const newPoint = {
|
||||||
|
id: `point-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
|
||||||
|
time: currentTime,
|
||||||
|
value,
|
||||||
|
curve: 'linear' as const,
|
||||||
|
};
|
||||||
|
|
||||||
|
// In write mode, remove existing points near this time (overwrites)
|
||||||
|
const updatedPoints = lane.mode === 'write'
|
||||||
|
? [...lane.points.filter(p => Math.abs(p.time - currentTime) > 0.05), newPoint]
|
||||||
|
: [...lane.points, newPoint];
|
||||||
|
|
||||||
|
updatedPoints.sort((a, b) => a.time - b.time);
|
||||||
|
|
||||||
|
// Update the lane with new points
|
||||||
|
const updatedLanes = track.automation.lanes.map(l =>
|
||||||
|
l.id === laneId ? { ...l, points: updatedPoints } : l
|
||||||
|
);
|
||||||
|
|
||||||
|
updateTrack(trackId, {
|
||||||
|
automation: {
|
||||||
|
...track.automation,
|
||||||
|
lanes: updatedLanes,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}, [tracks, updateTrack, touchedParameters, latchTriggered]);
|
||||||
|
|
||||||
|
// Helper to mark parameter as touched (for touch/latch modes)
|
||||||
|
const setParameterTouched = React.useCallback((trackId: string, laneId: string, touched: boolean) => {
|
||||||
|
const paramKey = `${trackId}-${laneId}`;
|
||||||
|
setTouchedParameters(prev => {
|
||||||
|
const next = new Set(prev);
|
||||||
|
if (touched) {
|
||||||
|
next.add(paramKey);
|
||||||
|
} else {
|
||||||
|
next.delete(paramKey);
|
||||||
|
}
|
||||||
|
return next;
|
||||||
|
});
|
||||||
|
}, []);
|
||||||
|
|
||||||
const {
|
const {
|
||||||
isPlaying,
|
isPlaying,
|
||||||
@@ -121,7 +224,56 @@ export function AudioEditor() {
|
|||||||
stop,
|
stop,
|
||||||
seek,
|
seek,
|
||||||
togglePlayPause,
|
togglePlayPause,
|
||||||
} = useMultiTrackPlayer(tracks, masterVolume);
|
} = useMultiTrackPlayer(tracks, masterVolume, handleAutomationRecording);
|
||||||
|
|
||||||
|
// Reset latch triggered state when playback stops
|
||||||
|
React.useEffect(() => {
|
||||||
|
if (!isPlaying) {
|
||||||
|
setLatchTriggered(new Set());
|
||||||
|
lastRecordedValuesRef.current.clear();
|
||||||
|
}
|
||||||
|
}, [isPlaying]);
|
||||||
|
|
||||||
|
// Record effect parameter values while touched
|
||||||
|
React.useEffect(() => {
|
||||||
|
if (!isPlaying) return;
|
||||||
|
|
||||||
|
const recordEffectParams = () => {
|
||||||
|
const time = currentTime;
|
||||||
|
|
||||||
|
touchedParameters.forEach(paramKey => {
|
||||||
|
const [trackId, laneId] = paramKey.split('-');
|
||||||
|
const track = tracks.find(t => t.id === trackId);
|
||||||
|
if (!track) return;
|
||||||
|
|
||||||
|
const lane = track.automation.lanes.find(l => l.id === laneId);
|
||||||
|
if (!lane || !lane.parameterId.startsWith('effect.')) return;
|
||||||
|
|
||||||
|
// Parse effect parameter ID: effect.{effectId}.{paramName}
|
||||||
|
const parts = lane.parameterId.split('.');
|
||||||
|
if (parts.length !== 3) return;
|
||||||
|
|
||||||
|
const effectId = parts[1];
|
||||||
|
const paramName = parts[2];
|
||||||
|
|
||||||
|
const effect = track.effectChain.effects.find(e => e.id === effectId);
|
||||||
|
if (!effect || !effect.parameters) return;
|
||||||
|
|
||||||
|
const currentValue = (effect.parameters as any)[paramName];
|
||||||
|
if (currentValue === undefined) return;
|
||||||
|
|
||||||
|
// Normalize value to 0-1 range
|
||||||
|
const range = lane.valueRange.max - lane.valueRange.min;
|
||||||
|
const normalizedValue = (currentValue - lane.valueRange.min) / range;
|
||||||
|
|
||||||
|
// Record the automation
|
||||||
|
handleAutomationRecording(trackId, laneId, time, normalizedValue);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const interval = setInterval(recordEffectParams, 50); // Record every 50ms while touched
|
||||||
|
return () => clearInterval(interval);
|
||||||
|
}, [isPlaying, currentTime, touchedParameters, tracks, handleAutomationRecording]);
|
||||||
|
|
||||||
// Master effect chain
|
// Master effect chain
|
||||||
const {
|
const {
|
||||||
@@ -549,6 +701,60 @@ export function AudioEditor() {
|
|||||||
});
|
});
|
||||||
}, [tracks, executeCommand, updateTrack, addToast]);
|
}, [tracks, executeCommand, updateTrack, addToast]);
|
||||||
|
|
||||||
|
// Export handler
|
||||||
|
const handleExport = React.useCallback(async (settings: ExportSettings) => {
|
||||||
|
if (tracks.length === 0) {
|
||||||
|
addToast({
|
||||||
|
title: 'No Tracks',
|
||||||
|
description: 'Add some tracks before exporting',
|
||||||
|
variant: 'warning',
|
||||||
|
duration: 3000,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setIsExporting(true);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get max duration and sample rate
|
||||||
|
const maxDuration = getMaxTrackDuration(tracks);
|
||||||
|
const sampleRate = tracks[0]?.audioBuffer?.sampleRate || 44100;
|
||||||
|
|
||||||
|
// Mix all tracks into a single buffer
|
||||||
|
const mixedBuffer = mixTracks(tracks, sampleRate, maxDuration);
|
||||||
|
|
||||||
|
// Convert to WAV
|
||||||
|
const wavBuffer = audioBufferToWav(mixedBuffer, {
|
||||||
|
format: settings.format,
|
||||||
|
bitDepth: settings.bitDepth,
|
||||||
|
normalize: settings.normalize,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Download
|
||||||
|
const filename = `${settings.filename}.wav`;
|
||||||
|
downloadArrayBuffer(wavBuffer, filename);
|
||||||
|
|
||||||
|
addToast({
|
||||||
|
title: 'Export Complete',
|
||||||
|
description: `Exported ${filename}`,
|
||||||
|
variant: 'success',
|
||||||
|
duration: 3000,
|
||||||
|
});
|
||||||
|
|
||||||
|
setExportDialogOpen(false);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Export failed:', error);
|
||||||
|
addToast({
|
||||||
|
title: 'Export Failed',
|
||||||
|
description: 'Failed to export audio',
|
||||||
|
variant: 'error',
|
||||||
|
duration: 3000,
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
setIsExporting(false);
|
||||||
|
}
|
||||||
|
}, [tracks, addToast]);
|
||||||
|
|
||||||
// Zoom controls
|
// Zoom controls
|
||||||
const handleZoomIn = () => {
|
const handleZoomIn = () => {
|
||||||
setZoom((prev) => Math.min(20, prev + 1));
|
setZoom((prev) => Math.min(20, prev + 1));
|
||||||
@@ -765,10 +971,16 @@ export function AudioEditor() {
|
|||||||
Import
|
Import
|
||||||
</Button>
|
</Button>
|
||||||
{tracks.length > 0 && (
|
{tracks.length > 0 && (
|
||||||
<Button variant="outline" size="sm" onClick={handleClearTracks}>
|
<>
|
||||||
<Trash2 className="h-4 w-4 mr-1.5 text-destructive" />
|
<Button variant="outline" size="sm" onClick={() => setExportDialogOpen(true)}>
|
||||||
Clear All
|
<Download className="h-4 w-4 mr-1.5" />
|
||||||
</Button>
|
Export
|
||||||
|
</Button>
|
||||||
|
<Button variant="outline" size="sm" onClick={handleClearTracks}>
|
||||||
|
<Trash2 className="h-4 w-4 mr-1.5 text-destructive" />
|
||||||
|
Clear All
|
||||||
|
</Button>
|
||||||
|
</>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -811,6 +1023,8 @@ export function AudioEditor() {
|
|||||||
recordingTrackId={recordingTrackId}
|
recordingTrackId={recordingTrackId}
|
||||||
recordingLevel={recordingState.inputLevel}
|
recordingLevel={recordingState.inputLevel}
|
||||||
trackLevels={trackLevels}
|
trackLevels={trackLevels}
|
||||||
|
onParameterTouched={setParameterTouched}
|
||||||
|
isPlaying={isPlaying}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</main>
|
</main>
|
||||||
@@ -861,6 +1075,14 @@ export function AudioEditor() {
|
|||||||
onRecordMonoChange={setRecordMono}
|
onRecordMonoChange={setRecordMono}
|
||||||
onSampleRateChange={setSampleRate}
|
onSampleRateChange={setSampleRate}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
{/* Export Dialog */}
|
||||||
|
<ExportDialog
|
||||||
|
open={exportDialogOpen}
|
||||||
|
onClose={() => setExportDialogOpen(false)}
|
||||||
|
onExport={handleExport}
|
||||||
|
isExporting={isExporting}
|
||||||
|
/>
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,6 +13,10 @@ export interface EffectDeviceProps {
|
|||||||
onRemove?: () => void;
|
onRemove?: () => void;
|
||||||
onUpdateParameters?: (parameters: any) => void;
|
onUpdateParameters?: (parameters: any) => void;
|
||||||
onToggleExpanded?: () => void;
|
onToggleExpanded?: () => void;
|
||||||
|
trackId?: string;
|
||||||
|
isPlaying?: boolean;
|
||||||
|
onParameterTouched?: (trackId: string, laneId: string, touched: boolean) => void;
|
||||||
|
automationLanes?: Array<{ id: string; parameterId: string; mode: string }>;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function EffectDevice({
|
export function EffectDevice({
|
||||||
@@ -21,6 +25,10 @@ export function EffectDevice({
|
|||||||
onRemove,
|
onRemove,
|
||||||
onUpdateParameters,
|
onUpdateParameters,
|
||||||
onToggleExpanded,
|
onToggleExpanded,
|
||||||
|
trackId,
|
||||||
|
isPlaying,
|
||||||
|
onParameterTouched,
|
||||||
|
automationLanes,
|
||||||
}: EffectDeviceProps) {
|
}: EffectDeviceProps) {
|
||||||
const isExpanded = effect.expanded || false;
|
const isExpanded = effect.expanded || false;
|
||||||
|
|
||||||
@@ -108,7 +116,14 @@ export function EffectDevice({
|
|||||||
|
|
||||||
{/* Device Body */}
|
{/* Device Body */}
|
||||||
<div className="flex-1 min-h-0 overflow-y-auto custom-scrollbar p-3 bg-card/50">
|
<div className="flex-1 min-h-0 overflow-y-auto custom-scrollbar p-3 bg-card/50">
|
||||||
<EffectParameters effect={effect} onUpdateParameters={onUpdateParameters} />
|
<EffectParameters
|
||||||
|
effect={effect}
|
||||||
|
onUpdateParameters={onUpdateParameters}
|
||||||
|
trackId={trackId}
|
||||||
|
isPlaying={isPlaying}
|
||||||
|
onParameterTouched={onParameterTouched}
|
||||||
|
automationLanes={automationLanes}
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -27,9 +27,20 @@ import type { FilterOptions } from '@/lib/audio/effects/filters';
|
|||||||
export interface EffectParametersProps {
|
export interface EffectParametersProps {
|
||||||
effect: ChainEffect;
|
effect: ChainEffect;
|
||||||
onUpdateParameters?: (parameters: any) => void;
|
onUpdateParameters?: (parameters: any) => void;
|
||||||
|
trackId?: string;
|
||||||
|
isPlaying?: boolean;
|
||||||
|
onParameterTouched?: (trackId: string, laneId: string, touched: boolean) => void;
|
||||||
|
automationLanes?: Array<{ id: string; parameterId: string; mode: string }>;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function EffectParameters({ effect, onUpdateParameters }: EffectParametersProps) {
|
export function EffectParameters({
|
||||||
|
effect,
|
||||||
|
onUpdateParameters,
|
||||||
|
trackId,
|
||||||
|
isPlaying,
|
||||||
|
onParameterTouched,
|
||||||
|
automationLanes = []
|
||||||
|
}: EffectParametersProps) {
|
||||||
const params = effect.parameters || {};
|
const params = effect.parameters || {};
|
||||||
|
|
||||||
const updateParam = (key: string, value: any) => {
|
const updateParam = (key: string, value: any) => {
|
||||||
@@ -38,6 +49,47 @@ export function EffectParameters({ effect, onUpdateParameters }: EffectParameter
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Memoize touch handlers for all parameters
|
||||||
|
const touchHandlers = React.useMemo(() => {
|
||||||
|
if (!trackId || !isPlaying || !onParameterTouched || !automationLanes) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
const handlers: Record<string, { onTouchStart: () => void; onTouchEnd: () => void }> = {};
|
||||||
|
|
||||||
|
automationLanes.forEach(lane => {
|
||||||
|
if (!lane.parameterId.startsWith(`effect.${effect.id}.`)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// For effect parameters, write mode works like touch mode
|
||||||
|
if (lane.mode !== 'touch' && lane.mode !== 'latch' && lane.mode !== 'write') {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract parameter name from parameterId (effect.{effectId}.{paramName})
|
||||||
|
const parts = lane.parameterId.split('.');
|
||||||
|
if (parts.length !== 3) return;
|
||||||
|
const paramName = parts[2];
|
||||||
|
|
||||||
|
handlers[paramName] = {
|
||||||
|
onTouchStart: () => {
|
||||||
|
queueMicrotask(() => onParameterTouched(trackId, lane.id, true));
|
||||||
|
},
|
||||||
|
onTouchEnd: () => {
|
||||||
|
queueMicrotask(() => onParameterTouched(trackId, lane.id, false));
|
||||||
|
},
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
return handlers;
|
||||||
|
}, [trackId, isPlaying, onParameterTouched, effect.id, automationLanes]);
|
||||||
|
|
||||||
|
// Helper to get touch handlers for a parameter
|
||||||
|
const getTouchHandlers = (paramName: string) => {
|
||||||
|
return touchHandlers[paramName] || {};
|
||||||
|
};
|
||||||
|
|
||||||
// Filter effects
|
// Filter effects
|
||||||
if (['lowpass', 'highpass', 'bandpass', 'notch', 'lowshelf', 'highshelf', 'peaking'].includes(effect.type)) {
|
if (['lowpass', 'highpass', 'bandpass', 'notch', 'lowshelf', 'highshelf', 'peaking'].includes(effect.type)) {
|
||||||
const filterParams = params as FilterOptions;
|
const filterParams = params as FilterOptions;
|
||||||
@@ -53,6 +105,7 @@ export function EffectParameters({ effect, onUpdateParameters }: EffectParameter
|
|||||||
min={20}
|
min={20}
|
||||||
max={20000}
|
max={20000}
|
||||||
step={1}
|
step={1}
|
||||||
|
{...getTouchHandlers('frequency')}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div className="space-y-1">
|
<div className="space-y-1">
|
||||||
@@ -65,6 +118,7 @@ export function EffectParameters({ effect, onUpdateParameters }: EffectParameter
|
|||||||
min={0.1}
|
min={0.1}
|
||||||
max={20}
|
max={20}
|
||||||
step={0.1}
|
step={0.1}
|
||||||
|
{...getTouchHandlers('Q')}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
{['lowshelf', 'highshelf', 'peaking'].includes(effect.type) && (
|
{['lowshelf', 'highshelf', 'peaking'].includes(effect.type) && (
|
||||||
@@ -78,6 +132,7 @@ export function EffectParameters({ effect, onUpdateParameters }: EffectParameter
|
|||||||
min={-40}
|
min={-40}
|
||||||
max={40}
|
max={40}
|
||||||
step={0.5}
|
step={0.5}
|
||||||
|
{...getTouchHandlers('gain')}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -43,6 +43,8 @@ export interface TrackProps {
|
|||||||
isRecording?: boolean;
|
isRecording?: boolean;
|
||||||
recordingLevel?: number;
|
recordingLevel?: number;
|
||||||
playbackLevel?: number;
|
playbackLevel?: number;
|
||||||
|
onParameterTouched?: (trackId: string, laneId: string, touched: boolean) => void;
|
||||||
|
isPlaying?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function Track({
|
export function Track({
|
||||||
@@ -71,6 +73,8 @@ export function Track({
|
|||||||
isRecording = false,
|
isRecording = false,
|
||||||
recordingLevel = 0,
|
recordingLevel = 0,
|
||||||
playbackLevel = 0,
|
playbackLevel = 0,
|
||||||
|
onParameterTouched,
|
||||||
|
isPlaying = false,
|
||||||
}: TrackProps) {
|
}: TrackProps) {
|
||||||
const canvasRef = React.useRef<HTMLCanvasElement>(null);
|
const canvasRef = React.useRef<HTMLCanvasElement>(null);
|
||||||
const containerRef = React.useRef<HTMLDivElement>(null);
|
const containerRef = React.useRef<HTMLDivElement>(null);
|
||||||
@@ -89,6 +93,123 @@ export function Track({
|
|||||||
const [isSelectingByDrag, setIsSelectingByDrag] = React.useState(false);
|
const [isSelectingByDrag, setIsSelectingByDrag] = React.useState(false);
|
||||||
const [dragStartPos, setDragStartPos] = React.useState<{ x: number; y: number } | null>(null);
|
const [dragStartPos, setDragStartPos] = React.useState<{ x: number; y: number } | null>(null);
|
||||||
|
|
||||||
|
// Touch callbacks for automation recording
|
||||||
|
const handlePanTouchStart = React.useCallback(() => {
|
||||||
|
if (isPlaying && onParameterTouched) {
|
||||||
|
const panLane = track.automation.lanes.find(l => l.parameterId === 'pan');
|
||||||
|
if (panLane && (panLane.mode === 'touch' || panLane.mode === 'latch')) {
|
||||||
|
queueMicrotask(() => onParameterTouched(track.id, panLane.id, true));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
|
||||||
|
|
||||||
|
const handlePanTouchEnd = React.useCallback(() => {
|
||||||
|
if (isPlaying && onParameterTouched) {
|
||||||
|
const panLane = track.automation.lanes.find(l => l.parameterId === 'pan');
|
||||||
|
if (panLane && (panLane.mode === 'touch' || panLane.mode === 'latch')) {
|
||||||
|
queueMicrotask(() => onParameterTouched(track.id, panLane.id, false));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
|
||||||
|
|
||||||
|
const handleVolumeTouchStart = React.useCallback(() => {
|
||||||
|
if (isPlaying && onParameterTouched) {
|
||||||
|
const volumeLane = track.automation.lanes.find(l => l.parameterId === 'volume');
|
||||||
|
if (volumeLane && (volumeLane.mode === 'touch' || volumeLane.mode === 'latch')) {
|
||||||
|
queueMicrotask(() => onParameterTouched(track.id, volumeLane.id, true));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
|
||||||
|
|
||||||
|
const handleVolumeTouchEnd = React.useCallback(() => {
|
||||||
|
if (isPlaying && onParameterTouched) {
|
||||||
|
const volumeLane = track.automation.lanes.find(l => l.parameterId === 'volume');
|
||||||
|
if (volumeLane && (volumeLane.mode === 'touch' || volumeLane.mode === 'latch')) {
|
||||||
|
queueMicrotask(() => onParameterTouched(track.id, volumeLane.id, false));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
|
||||||
|
|
||||||
|
// Auto-create automation lane for selected parameter if it doesn't exist
|
||||||
|
React.useEffect(() => {
|
||||||
|
if (!track.automation?.showAutomation) return;
|
||||||
|
|
||||||
|
const selectedParameterId = track.automation.selectedParameterId || 'volume';
|
||||||
|
const laneExists = track.automation.lanes.some(lane => lane.parameterId === selectedParameterId);
|
||||||
|
|
||||||
|
if (!laneExists) {
|
||||||
|
// Build list of available parameters
|
||||||
|
const availableParameters: Array<{ id: string; name: string }> = [
|
||||||
|
{ id: 'volume', name: 'Volume' },
|
||||||
|
{ id: 'pan', name: 'Pan' },
|
||||||
|
];
|
||||||
|
|
||||||
|
track.effectChain.effects.forEach((effect) => {
|
||||||
|
if (effect.parameters) {
|
||||||
|
Object.keys(effect.parameters).forEach((paramKey) => {
|
||||||
|
const parameterId = `effect.${effect.id}.${paramKey}`;
|
||||||
|
const paramName = `${effect.name} - ${paramKey.charAt(0).toUpperCase() + paramKey.slice(1)}`;
|
||||||
|
availableParameters.push({ id: parameterId, name: paramName });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const paramInfo = availableParameters.find(p => p.id === selectedParameterId);
|
||||||
|
if (paramInfo) {
|
||||||
|
// Determine value range based on parameter type
|
||||||
|
let valueRange = { min: 0, max: 1 };
|
||||||
|
let unit = '';
|
||||||
|
let formatter: ((value: number) => string) | undefined;
|
||||||
|
|
||||||
|
if (selectedParameterId === 'volume') {
|
||||||
|
unit = 'dB';
|
||||||
|
} else if (selectedParameterId === 'pan') {
|
||||||
|
formatter = (value: number) => {
|
||||||
|
if (value === 0.5) return 'C';
|
||||||
|
if (value < 0.5) return `${Math.abs((0.5 - value) * 200).toFixed(0)}L`;
|
||||||
|
return `${((value - 0.5) * 200).toFixed(0)}R`;
|
||||||
|
};
|
||||||
|
} else if (selectedParameterId.startsWith('effect.')) {
|
||||||
|
// Parse effect parameter: effect.{effectId}.{paramName}
|
||||||
|
const parts = selectedParameterId.split('.');
|
||||||
|
if (parts.length === 3) {
|
||||||
|
const paramName = parts[2];
|
||||||
|
// Set ranges based on parameter name
|
||||||
|
if (paramName === 'frequency') {
|
||||||
|
valueRange = { min: 20, max: 20000 };
|
||||||
|
unit = 'Hz';
|
||||||
|
} else if (paramName === 'Q') {
|
||||||
|
valueRange = { min: 0.1, max: 20 };
|
||||||
|
} else if (paramName === 'gain') {
|
||||||
|
valueRange = { min: -40, max: 40 };
|
||||||
|
unit = 'dB';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const newLane = createAutomationLane(
|
||||||
|
track.id,
|
||||||
|
selectedParameterId,
|
||||||
|
paramInfo.name,
|
||||||
|
{
|
||||||
|
min: valueRange.min,
|
||||||
|
max: valueRange.max,
|
||||||
|
unit,
|
||||||
|
formatter,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
onUpdateTrack(track.id, {
|
||||||
|
automation: {
|
||||||
|
...track.automation,
|
||||||
|
lanes: [...track.automation.lanes, newLane],
|
||||||
|
selectedParameterId,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [track.automation?.showAutomation, track.automation?.selectedParameterId, track.automation?.lanes, track.effectChain.effects, track.id, onUpdateTrack]);
|
||||||
|
|
||||||
const handleNameClick = () => {
|
const handleNameClick = () => {
|
||||||
setIsEditingName(true);
|
setIsEditingName(true);
|
||||||
setNameInput(String(track.name || 'Untitled Track'));
|
setNameInput(String(track.name || 'Untitled Track'));
|
||||||
@@ -536,6 +657,8 @@ export function Track({
|
|||||||
step={0.01}
|
step={0.01}
|
||||||
size={48}
|
size={48}
|
||||||
label="PAN"
|
label="PAN"
|
||||||
|
onTouchStart={handlePanTouchStart}
|
||||||
|
onTouchEnd={handlePanTouchEnd}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -549,6 +672,8 @@ export function Track({
|
|||||||
max={1}
|
max={1}
|
||||||
step={0.01}
|
step={0.01}
|
||||||
showDb={true}
|
showDb={true}
|
||||||
|
onTouchStart={handleVolumeTouchStart}
|
||||||
|
onTouchEnd={handleVolumeTouchEnd}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -735,35 +860,8 @@ export function Track({
|
|||||||
// Find or create lane for selected parameter
|
// Find or create lane for selected parameter
|
||||||
let selectedLane = track.automation.lanes.find(lane => lane.parameterId === selectedParameterId);
|
let selectedLane = track.automation.lanes.find(lane => lane.parameterId === selectedParameterId);
|
||||||
|
|
||||||
// If lane doesn't exist yet, create it
|
// If lane doesn't exist yet, we need to create it (but not during render)
|
||||||
if (!selectedLane) {
|
// This will be handled by a useEffect instead
|
||||||
const paramInfo = availableParameters.find(p => p.id === selectedParameterId);
|
|
||||||
if (paramInfo) {
|
|
||||||
selectedLane = createAutomationLane(
|
|
||||||
track.id,
|
|
||||||
selectedParameterId,
|
|
||||||
paramInfo.name,
|
|
||||||
{
|
|
||||||
min: 0,
|
|
||||||
max: 1,
|
|
||||||
unit: selectedParameterId === 'volume' ? 'dB' : '',
|
|
||||||
formatter: selectedParameterId === 'pan' ? (value: number) => {
|
|
||||||
if (value === 0.5) return 'C';
|
|
||||||
if (value < 0.5) return `${Math.abs((0.5 - value) * 200).toFixed(0)}L`;
|
|
||||||
return `${((value - 0.5) * 200).toFixed(0)}R`;
|
|
||||||
} : undefined,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
// Add the new lane to the track
|
|
||||||
onUpdateTrack(track.id, {
|
|
||||||
automation: {
|
|
||||||
...track.automation,
|
|
||||||
lanes: [...track.automation.lanes, selectedLane],
|
|
||||||
selectedParameterId,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const modes: Array<{ value: string; label: string; color: string }> = [
|
const modes: Array<{ value: string; label: string; color: string }> = [
|
||||||
{ value: 'read', label: 'R', color: 'text-muted-foreground' },
|
{ value: 'read', label: 'R', color: 'text-muted-foreground' },
|
||||||
@@ -957,6 +1055,10 @@ export function Track({
|
|||||||
effectChain: { ...track.effectChain, effects: updatedEffects },
|
effectChain: { ...track.effectChain, effects: updatedEffects },
|
||||||
});
|
});
|
||||||
}}
|
}}
|
||||||
|
trackId={track.id}
|
||||||
|
isPlaying={isPlaying}
|
||||||
|
onParameterTouched={onParameterTouched}
|
||||||
|
automationLanes={track.automation.lanes}
|
||||||
/>
|
/>
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -25,6 +25,8 @@ export interface TrackListProps {
|
|||||||
recordingTrackId?: string | null;
|
recordingTrackId?: string | null;
|
||||||
recordingLevel?: number;
|
recordingLevel?: number;
|
||||||
trackLevels?: Record<string, number>;
|
trackLevels?: Record<string, number>;
|
||||||
|
onParameterTouched?: (trackId: string, laneId: string, touched: boolean) => void;
|
||||||
|
isPlaying?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function TrackList({
|
export function TrackList({
|
||||||
@@ -44,6 +46,8 @@ export function TrackList({
|
|||||||
recordingTrackId,
|
recordingTrackId,
|
||||||
recordingLevel = 0,
|
recordingLevel = 0,
|
||||||
trackLevels = {},
|
trackLevels = {},
|
||||||
|
onParameterTouched,
|
||||||
|
isPlaying = false,
|
||||||
}: TrackListProps) {
|
}: TrackListProps) {
|
||||||
const [importDialogOpen, setImportDialogOpen] = React.useState(false);
|
const [importDialogOpen, setImportDialogOpen] = React.useState(false);
|
||||||
|
|
||||||
@@ -168,6 +172,8 @@ export function TrackList({
|
|||||||
isRecording={recordingTrackId === track.id}
|
isRecording={recordingTrackId === track.id}
|
||||||
recordingLevel={recordingTrackId === track.id ? recordingLevel : 0}
|
recordingLevel={recordingTrackId === track.id ? recordingLevel : 0}
|
||||||
playbackLevel={trackLevels[track.id] || 0}
|
playbackLevel={trackLevels[track.id] || 0}
|
||||||
|
onParameterTouched={onParameterTouched}
|
||||||
|
isPlaying={isPlaying}
|
||||||
/>
|
/>
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -13,6 +13,8 @@ export interface CircularKnobProps {
|
|||||||
className?: string;
|
className?: string;
|
||||||
label?: string;
|
label?: string;
|
||||||
formatValue?: (value: number) => string;
|
formatValue?: (value: number) => string;
|
||||||
|
onTouchStart?: () => void;
|
||||||
|
onTouchEnd?: () => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function CircularKnob({
|
export function CircularKnob({
|
||||||
@@ -25,6 +27,8 @@ export function CircularKnob({
|
|||||||
className,
|
className,
|
||||||
label,
|
label,
|
||||||
formatValue,
|
formatValue,
|
||||||
|
onTouchStart,
|
||||||
|
onTouchEnd,
|
||||||
}: CircularKnobProps) {
|
}: CircularKnobProps) {
|
||||||
const knobRef = React.useRef<HTMLDivElement>(null);
|
const knobRef = React.useRef<HTMLDivElement>(null);
|
||||||
const [isDragging, setIsDragging] = React.useState(false);
|
const [isDragging, setIsDragging] = React.useState(false);
|
||||||
@@ -68,8 +72,9 @@ export function CircularKnob({
|
|||||||
y: e.clientY,
|
y: e.clientY,
|
||||||
value,
|
value,
|
||||||
};
|
};
|
||||||
|
onTouchStart?.();
|
||||||
},
|
},
|
||||||
[value]
|
[value, onTouchStart]
|
||||||
);
|
);
|
||||||
|
|
||||||
const handleMouseMove = React.useCallback(
|
const handleMouseMove = React.useCallback(
|
||||||
@@ -83,7 +88,8 @@ export function CircularKnob({
|
|||||||
|
|
||||||
const handleMouseUp = React.useCallback(() => {
|
const handleMouseUp = React.useCallback(() => {
|
||||||
setIsDragging(false);
|
setIsDragging(false);
|
||||||
}, []);
|
onTouchEnd?.();
|
||||||
|
}, [onTouchEnd]);
|
||||||
|
|
||||||
React.useEffect(() => {
|
React.useEffect(() => {
|
||||||
if (isDragging) {
|
if (isDragging) {
|
||||||
|
|||||||
@@ -13,6 +13,8 @@ export interface SliderProps
|
|||||||
step?: number;
|
step?: number;
|
||||||
label?: string;
|
label?: string;
|
||||||
showValue?: boolean;
|
showValue?: boolean;
|
||||||
|
onTouchStart?: () => void;
|
||||||
|
onTouchEnd?: () => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
const Slider = React.forwardRef<HTMLInputElement, SliderProps>(
|
const Slider = React.forwardRef<HTMLInputElement, SliderProps>(
|
||||||
@@ -28,6 +30,8 @@ const Slider = React.forwardRef<HTMLInputElement, SliderProps>(
|
|||||||
label,
|
label,
|
||||||
showValue = false,
|
showValue = false,
|
||||||
disabled,
|
disabled,
|
||||||
|
onTouchStart,
|
||||||
|
onTouchEnd,
|
||||||
...props
|
...props
|
||||||
},
|
},
|
||||||
ref
|
ref
|
||||||
@@ -41,6 +45,21 @@ const Slider = React.forwardRef<HTMLInputElement, SliderProps>(
|
|||||||
onValueChange?.([numValue]);
|
onValueChange?.([numValue]);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleMouseDown = () => {
|
||||||
|
onTouchStart?.();
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleMouseUp = () => {
|
||||||
|
onTouchEnd?.();
|
||||||
|
};
|
||||||
|
|
||||||
|
React.useEffect(() => {
|
||||||
|
if (onTouchEnd) {
|
||||||
|
window.addEventListener('mouseup', handleMouseUp);
|
||||||
|
return () => window.removeEventListener('mouseup', handleMouseUp);
|
||||||
|
}
|
||||||
|
}, [onTouchEnd]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className={cn('w-full', className)}>
|
<div className={cn('w-full', className)}>
|
||||||
{(label || showValue) && (
|
{(label || showValue) && (
|
||||||
@@ -63,6 +82,7 @@ const Slider = React.forwardRef<HTMLInputElement, SliderProps>(
|
|||||||
step={step}
|
step={step}
|
||||||
value={currentValue}
|
value={currentValue}
|
||||||
onChange={handleChange}
|
onChange={handleChange}
|
||||||
|
onMouseDown={handleMouseDown}
|
||||||
disabled={disabled}
|
disabled={disabled}
|
||||||
className={cn(
|
className={cn(
|
||||||
'w-full h-2 bg-secondary rounded-lg appearance-none cursor-pointer',
|
'w-full h-2 bg-secondary rounded-lg appearance-none cursor-pointer',
|
||||||
|
|||||||
@@ -12,6 +12,8 @@ export interface VerticalFaderProps {
|
|||||||
step?: number;
|
step?: number;
|
||||||
className?: string;
|
className?: string;
|
||||||
showDb?: boolean;
|
showDb?: boolean;
|
||||||
|
onTouchStart?: () => void;
|
||||||
|
onTouchEnd?: () => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function VerticalFader({
|
export function VerticalFader({
|
||||||
@@ -23,6 +25,8 @@ export function VerticalFader({
|
|||||||
step = 0.01,
|
step = 0.01,
|
||||||
className,
|
className,
|
||||||
showDb = true,
|
showDb = true,
|
||||||
|
onTouchStart,
|
||||||
|
onTouchEnd,
|
||||||
}: VerticalFaderProps) {
|
}: VerticalFaderProps) {
|
||||||
const trackRef = React.useRef<HTMLDivElement>(null);
|
const trackRef = React.useRef<HTMLDivElement>(null);
|
||||||
const [isDragging, setIsDragging] = React.useState(false);
|
const [isDragging, setIsDragging] = React.useState(false);
|
||||||
@@ -58,8 +62,9 @@ export function VerticalFader({
|
|||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
setIsDragging(true);
|
setIsDragging(true);
|
||||||
updateValue(e.clientY);
|
updateValue(e.clientY);
|
||||||
|
onTouchStart?.();
|
||||||
},
|
},
|
||||||
[updateValue]
|
[updateValue, onTouchStart]
|
||||||
);
|
);
|
||||||
|
|
||||||
const handleMouseMove = React.useCallback(
|
const handleMouseMove = React.useCallback(
|
||||||
@@ -73,7 +78,8 @@ export function VerticalFader({
|
|||||||
|
|
||||||
const handleMouseUp = React.useCallback(() => {
|
const handleMouseUp = React.useCallback(() => {
|
||||||
setIsDragging(false);
|
setIsDragging(false);
|
||||||
}, []);
|
onTouchEnd?.();
|
||||||
|
}, [onTouchEnd]);
|
||||||
|
|
||||||
React.useEffect(() => {
|
React.useEffect(() => {
|
||||||
if (isDragging) {
|
if (isDragging) {
|
||||||
|
|||||||
134
lib/audio/export.ts
Normal file
134
lib/audio/export.ts
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
/**
|
||||||
|
* Audio export utilities
|
||||||
|
* Supports WAV export with various bit depths
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface ExportOptions {
|
||||||
|
format: 'wav';
|
||||||
|
bitDepth: 16 | 24 | 32;
|
||||||
|
sampleRate?: number; // If different from source, will resample
|
||||||
|
normalize?: boolean; // Normalize to prevent clipping
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert an AudioBuffer to WAV file
|
||||||
|
*/
|
||||||
|
export function audioBufferToWav(
|
||||||
|
audioBuffer: AudioBuffer,
|
||||||
|
options: ExportOptions = { format: 'wav', bitDepth: 16 }
|
||||||
|
): ArrayBuffer {
|
||||||
|
const { bitDepth, normalize } = options;
|
||||||
|
const numberOfChannels = audioBuffer.numberOfChannels;
|
||||||
|
const sampleRate = audioBuffer.sampleRate;
|
||||||
|
const length = audioBuffer.length;
|
||||||
|
|
||||||
|
// Get channel data
|
||||||
|
const channels: Float32Array[] = [];
|
||||||
|
for (let i = 0; i < numberOfChannels; i++) {
|
||||||
|
channels.push(audioBuffer.getChannelData(i));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find peak if normalizing
|
||||||
|
let peak = 1.0;
|
||||||
|
if (normalize) {
|
||||||
|
peak = 0;
|
||||||
|
for (const channel of channels) {
|
||||||
|
for (let i = 0; i < channel.length; i++) {
|
||||||
|
const abs = Math.abs(channel[i]);
|
||||||
|
if (abs > peak) peak = abs;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Prevent division by zero and add headroom
|
||||||
|
if (peak === 0) peak = 1.0;
|
||||||
|
else peak = peak * 1.01; // 1% headroom
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate sizes
|
||||||
|
const bytesPerSample = bitDepth / 8;
|
||||||
|
const blockAlign = numberOfChannels * bytesPerSample;
|
||||||
|
const dataSize = length * blockAlign;
|
||||||
|
const bufferSize = 44 + dataSize; // 44 bytes for WAV header
|
||||||
|
|
||||||
|
// Create buffer
|
||||||
|
const buffer = new ArrayBuffer(bufferSize);
|
||||||
|
const view = new DataView(buffer);
|
||||||
|
|
||||||
|
// Write WAV header
|
||||||
|
let offset = 0;
|
||||||
|
|
||||||
|
// RIFF chunk descriptor
|
||||||
|
writeString(view, offset, 'RIFF'); offset += 4;
|
||||||
|
view.setUint32(offset, bufferSize - 8, true); offset += 4; // File size - 8
|
||||||
|
writeString(view, offset, 'WAVE'); offset += 4;
|
||||||
|
|
||||||
|
// fmt sub-chunk
|
||||||
|
writeString(view, offset, 'fmt '); offset += 4;
|
||||||
|
view.setUint32(offset, 16, true); offset += 4; // Subchunk size (16 for PCM)
|
||||||
|
view.setUint16(offset, bitDepth === 32 ? 3 : 1, true); offset += 2; // Audio format (1 = PCM, 3 = IEEE float)
|
||||||
|
view.setUint16(offset, numberOfChannels, true); offset += 2;
|
||||||
|
view.setUint32(offset, sampleRate, true); offset += 4;
|
||||||
|
view.setUint32(offset, sampleRate * blockAlign, true); offset += 4; // Byte rate
|
||||||
|
view.setUint16(offset, blockAlign, true); offset += 2;
|
||||||
|
view.setUint16(offset, bitDepth, true); offset += 2;
|
||||||
|
|
||||||
|
// data sub-chunk
|
||||||
|
writeString(view, offset, 'data'); offset += 4;
|
||||||
|
view.setUint32(offset, dataSize, true); offset += 4;
|
||||||
|
|
||||||
|
// Write interleaved audio data
|
||||||
|
if (bitDepth === 16) {
|
||||||
|
for (let i = 0; i < length; i++) {
|
||||||
|
for (let channel = 0; channel < numberOfChannels; channel++) {
|
||||||
|
const sample = Math.max(-1, Math.min(1, channels[channel][i] / peak));
|
||||||
|
view.setInt16(offset, sample * 0x7fff, true);
|
||||||
|
offset += 2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (bitDepth === 24) {
|
||||||
|
for (let i = 0; i < length; i++) {
|
||||||
|
for (let channel = 0; channel < numberOfChannels; channel++) {
|
||||||
|
const sample = Math.max(-1, Math.min(1, channels[channel][i] / peak));
|
||||||
|
const int24 = Math.round(sample * 0x7fffff);
|
||||||
|
view.setUint8(offset, int24 & 0xff); offset++;
|
||||||
|
view.setUint8(offset, (int24 >> 8) & 0xff); offset++;
|
||||||
|
view.setUint8(offset, (int24 >> 16) & 0xff); offset++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (bitDepth === 32) {
|
||||||
|
for (let i = 0; i < length; i++) {
|
||||||
|
for (let channel = 0; channel < numberOfChannels; channel++) {
|
||||||
|
const sample = channels[channel][i] / peak;
|
||||||
|
view.setFloat32(offset, sample, true);
|
||||||
|
offset += 4;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return buffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Download an ArrayBuffer as a file
|
||||||
|
*/
|
||||||
|
export function downloadArrayBuffer(
|
||||||
|
arrayBuffer: ArrayBuffer,
|
||||||
|
filename: string,
|
||||||
|
mimeType: string = 'audio/wav'
|
||||||
|
): void {
|
||||||
|
const blob = new Blob([arrayBuffer], { type: mimeType });
|
||||||
|
const url = URL.createObjectURL(blob);
|
||||||
|
const link = document.createElement('a');
|
||||||
|
link.href = url;
|
||||||
|
link.download = filename;
|
||||||
|
document.body.appendChild(link);
|
||||||
|
link.click();
|
||||||
|
document.body.removeChild(link);
|
||||||
|
URL.revokeObjectURL(url);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper to write string to DataView
|
||||||
|
function writeString(view: DataView, offset: number, string: string): void {
|
||||||
|
for (let i = 0; i < string.length; i++) {
|
||||||
|
view.setUint8(offset + i, string.charCodeAt(i));
|
||||||
|
}
|
||||||
|
}
|
||||||
173
lib/hooks/useAutomationRecording.ts
Normal file
173
lib/hooks/useAutomationRecording.ts
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
/**
|
||||||
|
* Hook for recording automation data during playback
|
||||||
|
* Supports write, touch, and latch modes
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { useCallback, useRef } from 'react';
|
||||||
|
import type { Track } from '@/types/track';
|
||||||
|
import type { AutomationPoint, AutomationMode } from '@/types/automation';
|
||||||
|
|
||||||
|
export interface AutomationRecordingState {
|
||||||
|
isRecording: boolean;
|
||||||
|
recordingLaneId: string | null;
|
||||||
|
touchActive: boolean; // For touch mode - tracks if control is being touched
|
||||||
|
latchTriggered: boolean; // For latch mode - tracks if recording has started
|
||||||
|
}
|
||||||
|
|
||||||
|
export function useAutomationRecording(
|
||||||
|
track: Track,
|
||||||
|
onUpdateTrack: (trackId: string, updates: Partial<Track>) => void
|
||||||
|
) {
|
||||||
|
const recordingStateRef = useRef<Map<string, AutomationRecordingState>>(new Map());
|
||||||
|
const recordingIntervalRef = useRef<Map<string, number>>(new Map());
|
||||||
|
const lastRecordedValueRef = useRef<Map<string, number>>(new Map());
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start recording automation for a specific lane
|
||||||
|
*/
|
||||||
|
const startRecording = useCallback((laneId: string, mode: AutomationMode) => {
|
||||||
|
const state: AutomationRecordingState = {
|
||||||
|
isRecording: mode === 'write',
|
||||||
|
recordingLaneId: laneId,
|
||||||
|
touchActive: false,
|
||||||
|
latchTriggered: false,
|
||||||
|
};
|
||||||
|
recordingStateRef.current.set(laneId, state);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop recording automation for a specific lane
|
||||||
|
*/
|
||||||
|
const stopRecording = useCallback((laneId: string) => {
|
||||||
|
recordingStateRef.current.delete(laneId);
|
||||||
|
const intervalId = recordingIntervalRef.current.get(laneId);
|
||||||
|
if (intervalId) {
|
||||||
|
clearInterval(intervalId);
|
||||||
|
recordingIntervalRef.current.delete(laneId);
|
||||||
|
}
|
||||||
|
lastRecordedValueRef.current.delete(laneId);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record a single automation point
|
||||||
|
*/
|
||||||
|
const recordPoint = useCallback((
|
||||||
|
laneId: string,
|
||||||
|
currentTime: number,
|
||||||
|
value: number,
|
||||||
|
mode: AutomationMode
|
||||||
|
) => {
|
||||||
|
const lane = track.automation.lanes.find(l => l.id === laneId);
|
||||||
|
if (!lane) return;
|
||||||
|
|
||||||
|
const state = recordingStateRef.current.get(laneId);
|
||||||
|
if (!state) return;
|
||||||
|
|
||||||
|
// Check if we should record based on mode
|
||||||
|
let shouldRecord = false;
|
||||||
|
|
||||||
|
switch (mode) {
|
||||||
|
case 'write':
|
||||||
|
// Always record in write mode
|
||||||
|
shouldRecord = true;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'touch':
|
||||||
|
// Only record when control is being touched
|
||||||
|
shouldRecord = state.touchActive;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'latch':
|
||||||
|
// Record from first touch until stop
|
||||||
|
if (state.touchActive && !state.latchTriggered) {
|
||||||
|
state.latchTriggered = true;
|
||||||
|
}
|
||||||
|
shouldRecord = state.latchTriggered;
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
shouldRecord = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!shouldRecord) return;
|
||||||
|
|
||||||
|
// Check if value has changed significantly (avoid redundant points)
|
||||||
|
const lastValue = lastRecordedValueRef.current.get(laneId);
|
||||||
|
if (lastValue !== undefined && Math.abs(lastValue - value) < 0.001) {
|
||||||
|
return; // Skip if value hasn't changed
|
||||||
|
}
|
||||||
|
|
||||||
|
lastRecordedValueRef.current.set(laneId, value);
|
||||||
|
|
||||||
|
// In write mode, clear existing points in the time range
|
||||||
|
let updatedPoints = [...lane.points];
|
||||||
|
if (mode === 'write') {
|
||||||
|
// Remove points that are within a small time window of current time
|
||||||
|
updatedPoints = updatedPoints.filter(p =>
|
||||||
|
Math.abs(p.time - currentTime) > 0.05 // 50ms threshold
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add new point
|
||||||
|
const newPoint: AutomationPoint = {
|
||||||
|
id: `point-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
|
||||||
|
time: currentTime,
|
||||||
|
value,
|
||||||
|
curve: 'linear',
|
||||||
|
};
|
||||||
|
|
||||||
|
updatedPoints.push(newPoint);
|
||||||
|
|
||||||
|
// Sort points by time
|
||||||
|
updatedPoints.sort((a, b) => a.time - b.time);
|
||||||
|
|
||||||
|
// Update track with new automation points
|
||||||
|
const updatedLanes = track.automation.lanes.map(l =>
|
||||||
|
l.id === laneId ? { ...l, points: updatedPoints } : l
|
||||||
|
);
|
||||||
|
|
||||||
|
onUpdateTrack(track.id, {
|
||||||
|
automation: {
|
||||||
|
...track.automation,
|
||||||
|
lanes: updatedLanes,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}, [track, onUpdateTrack]);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set touch state for touch mode
|
||||||
|
*/
|
||||||
|
const setTouchActive = useCallback((laneId: string, active: boolean) => {
|
||||||
|
const state = recordingStateRef.current.get(laneId);
|
||||||
|
if (state) {
|
||||||
|
state.touchActive = active;
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a lane is currently recording
|
||||||
|
*/
|
||||||
|
const isRecordingLane = useCallback((laneId: string): boolean => {
|
||||||
|
const state = recordingStateRef.current.get(laneId);
|
||||||
|
return state?.isRecording ?? false;
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cleanup - stop all recording
|
||||||
|
*/
|
||||||
|
const cleanup = useCallback(() => {
|
||||||
|
recordingStateRef.current.forEach((_, laneId) => {
|
||||||
|
stopRecording(laneId);
|
||||||
|
});
|
||||||
|
recordingStateRef.current.clear();
|
||||||
|
}, [stopRecording]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
startRecording,
|
||||||
|
stopRecording,
|
||||||
|
recordPoint,
|
||||||
|
setTouchActive,
|
||||||
|
isRecordingLane,
|
||||||
|
cleanup,
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -16,7 +16,15 @@ export interface TrackLevel {
|
|||||||
level: number;
|
level: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
export interface AutomationRecordingCallback {
|
||||||
|
(trackId: string, laneId: string, currentTime: number, value: number): void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function useMultiTrackPlayer(
|
||||||
|
tracks: Track[],
|
||||||
|
masterVolume: number = 1,
|
||||||
|
onRecordAutomation?: AutomationRecordingCallback
|
||||||
|
) {
|
||||||
const [isPlaying, setIsPlaying] = useState(false);
|
const [isPlaying, setIsPlaying] = useState(false);
|
||||||
const [currentTime, setCurrentTime] = useState(0);
|
const [currentTime, setCurrentTime] = useState(0);
|
||||||
const [duration, setDuration] = useState(0);
|
const [duration, setDuration] = useState(0);
|
||||||
@@ -36,12 +44,19 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
const automationFrameRef = useRef<number | null>(null);
|
const automationFrameRef = useRef<number | null>(null);
|
||||||
const isMonitoringLevelsRef = useRef<boolean>(false);
|
const isMonitoringLevelsRef = useRef<boolean>(false);
|
||||||
const tracksRef = useRef<Track[]>(tracks); // Always keep latest tracks
|
const tracksRef = useRef<Track[]>(tracks); // Always keep latest tracks
|
||||||
|
const lastRecordedValuesRef = useRef<Map<string, number>>(new Map()); // Track last recorded values to detect changes
|
||||||
|
const onRecordAutomationRef = useRef<AutomationRecordingCallback | undefined>(onRecordAutomation);
|
||||||
|
|
||||||
// Keep tracksRef in sync with tracks prop
|
// Keep tracksRef in sync with tracks prop
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
tracksRef.current = tracks;
|
tracksRef.current = tracks;
|
||||||
}, [tracks]);
|
}, [tracks]);
|
||||||
|
|
||||||
|
// Keep onRecordAutomationRef in sync
|
||||||
|
useEffect(() => {
|
||||||
|
onRecordAutomationRef.current = onRecordAutomation;
|
||||||
|
}, [onRecordAutomation]);
|
||||||
|
|
||||||
// Calculate total duration from all tracks
|
// Calculate total duration from all tracks
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
let maxDuration = 0;
|
let maxDuration = 0;
|
||||||
@@ -107,16 +122,32 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
|
|
||||||
const currentTime = pausedAtRef.current + (audioContextRef.current.currentTime - startTimeRef.current);
|
const currentTime = pausedAtRef.current + (audioContextRef.current.currentTime - startTimeRef.current);
|
||||||
|
|
||||||
tracks.forEach((track, index) => {
|
tracksRef.current.forEach((track, index) => {
|
||||||
// Apply volume automation
|
// Apply volume automation
|
||||||
const volumeLane = track.automation.lanes.find(lane => lane.parameterId === 'volume');
|
const volumeLane = track.automation.lanes.find(lane => lane.parameterId === 'volume');
|
||||||
if (volumeLane && volumeLane.points.length > 0) {
|
if (volumeLane) {
|
||||||
const automatedValue = evaluateAutomationLinear(volumeLane.points, currentTime);
|
let volumeValue: number | undefined;
|
||||||
if (automatedValue !== undefined && gainNodesRef.current[index]) {
|
|
||||||
|
// In write mode, record current track volume (only if value changed)
|
||||||
|
if (volumeLane.mode === 'write' && onRecordAutomationRef.current) {
|
||||||
|
volumeValue = track.volume;
|
||||||
|
const lastValue = lastRecordedValuesRef.current.get(`${track.id}-volume`);
|
||||||
|
|
||||||
|
// Only record if value has changed
|
||||||
|
if (lastValue === undefined || Math.abs(lastValue - volumeValue) > 0.001) {
|
||||||
|
lastRecordedValuesRef.current.set(`${track.id}-volume`, volumeValue);
|
||||||
|
onRecordAutomationRef.current(track.id, volumeLane.id, currentTime, volumeValue);
|
||||||
|
}
|
||||||
|
} else if (volumeLane.points.length > 0) {
|
||||||
|
// Otherwise play back automation
|
||||||
|
volumeValue = evaluateAutomationLinear(volumeLane.points, currentTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (volumeValue !== undefined && gainNodesRef.current[index]) {
|
||||||
const trackGain = getTrackGain(track, tracks);
|
const trackGain = getTrackGain(track, tracks);
|
||||||
// Apply both track gain (mute/solo) and automated volume
|
// Apply both track gain (mute/solo) and automated volume
|
||||||
gainNodesRef.current[index].gain.setValueAtTime(
|
gainNodesRef.current[index].gain.setValueAtTime(
|
||||||
trackGain * automatedValue,
|
trackGain * volumeValue,
|
||||||
audioContextRef.current!.currentTime
|
audioContextRef.current!.currentTime
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -124,8 +155,24 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
|
|
||||||
// Apply pan automation
|
// Apply pan automation
|
||||||
const panLane = track.automation.lanes.find(lane => lane.parameterId === 'pan');
|
const panLane = track.automation.lanes.find(lane => lane.parameterId === 'pan');
|
||||||
if (panLane && panLane.points.length > 0) {
|
if (panLane) {
|
||||||
const automatedValue = evaluateAutomationLinear(panLane.points, currentTime);
|
let automatedValue: number | undefined;
|
||||||
|
|
||||||
|
// In write mode, record current track pan (only if value changed)
|
||||||
|
if (panLane.mode === 'write' && onRecordAutomationRef.current) {
|
||||||
|
automatedValue = (track.pan + 1) / 2; // Convert -1 to 1 -> 0 to 1
|
||||||
|
const lastValue = lastRecordedValuesRef.current.get(`${track.id}-pan`);
|
||||||
|
|
||||||
|
// Only record if value has changed
|
||||||
|
if (lastValue === undefined || Math.abs(lastValue - automatedValue) > 0.001) {
|
||||||
|
lastRecordedValuesRef.current.set(`${track.id}-pan`, automatedValue);
|
||||||
|
onRecordAutomationRef.current(track.id, panLane.id, currentTime, automatedValue);
|
||||||
|
}
|
||||||
|
} else if (panLane.points.length > 0) {
|
||||||
|
// Otherwise play back automation
|
||||||
|
automatedValue = evaluateAutomationLinear(panLane.points, currentTime);
|
||||||
|
}
|
||||||
|
|
||||||
if (automatedValue !== undefined && panNodesRef.current[index]) {
|
if (automatedValue !== undefined && panNodesRef.current[index]) {
|
||||||
// Pan automation values are 0-1, but StereoPannerNode expects -1 to 1
|
// Pan automation values are 0-1, but StereoPannerNode expects -1 to 1
|
||||||
const panValue = (automatedValue * 2) - 1;
|
const panValue = (automatedValue * 2) - 1;
|
||||||
@@ -139,7 +186,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
// Apply effect parameter automation
|
// Apply effect parameter automation
|
||||||
track.automation.lanes.forEach(lane => {
|
track.automation.lanes.forEach(lane => {
|
||||||
// Check if this is an effect parameter (format: effect.{effectId}.{parameterName})
|
// Check if this is an effect parameter (format: effect.{effectId}.{parameterName})
|
||||||
if (lane.parameterId.startsWith('effect.') && lane.points.length > 0) {
|
if (lane.parameterId.startsWith('effect.')) {
|
||||||
const parts = lane.parameterId.split('.');
|
const parts = lane.parameterId.split('.');
|
||||||
if (parts.length === 3) {
|
if (parts.length === 3) {
|
||||||
const effectId = parts[1];
|
const effectId = parts[1];
|
||||||
@@ -147,13 +194,37 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
|
|
||||||
// Find the effect in the track's effect chain
|
// Find the effect in the track's effect chain
|
||||||
const effectIndex = track.effectChain.effects.findIndex(e => e.id === effectId);
|
const effectIndex = track.effectChain.effects.findIndex(e => e.id === effectId);
|
||||||
if (effectIndex >= 0 && effectNodesRef.current[index] && effectNodesRef.current[index][effectIndex]) {
|
const effect = track.effectChain.effects[effectIndex];
|
||||||
const automatedValue = evaluateAutomationLinear(lane.points, currentTime);
|
|
||||||
if (automatedValue !== undefined) {
|
if (effectIndex >= 0 && effect) {
|
||||||
|
let automatedValue: number | undefined;
|
||||||
|
|
||||||
|
// In write mode, record current effect parameter value (only if value changed)
|
||||||
|
if (lane.mode === 'write' && onRecordAutomationRef.current && effect.parameters) {
|
||||||
|
const currentValue = (effect.parameters as any)[paramName];
|
||||||
|
if (currentValue !== undefined) {
|
||||||
|
// Normalize value to 0-1 range
|
||||||
|
const range = lane.valueRange.max - lane.valueRange.min;
|
||||||
|
const normalizedValue = (currentValue - lane.valueRange.min) / range;
|
||||||
|
|
||||||
|
const lastValue = lastRecordedValuesRef.current.get(`${track.id}-effect-${effectId}-${paramName}`);
|
||||||
|
|
||||||
|
// Only record if value has changed
|
||||||
|
if (lastValue === undefined || Math.abs(lastValue - normalizedValue) > 0.001) {
|
||||||
|
lastRecordedValuesRef.current.set(`${track.id}-effect-${effectId}-${paramName}`, normalizedValue);
|
||||||
|
onRecordAutomationRef.current(track.id, lane.id, currentTime, normalizedValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (lane.points.length > 0) {
|
||||||
|
// Otherwise play back automation
|
||||||
|
automatedValue = evaluateAutomationLinear(lane.points, currentTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply the automated value to the effect
|
||||||
|
if (automatedValue !== undefined && effectNodesRef.current[index] && effectNodesRef.current[index][effectIndex]) {
|
||||||
const effectNodeInfo = effectNodesRef.current[index][effectIndex];
|
const effectNodeInfo = effectNodesRef.current[index][effectIndex];
|
||||||
|
|
||||||
// Convert normalized 0-1 value to actual parameter range
|
// Convert normalized 0-1 value to actual parameter range
|
||||||
const effect = track.effectChain.effects[effectIndex];
|
|
||||||
const actualValue = lane.valueRange.min + (automatedValue * (lane.valueRange.max - lane.valueRange.min));
|
const actualValue = lane.valueRange.min + (automatedValue * (lane.valueRange.max - lane.valueRange.min));
|
||||||
|
|
||||||
// Update the effect parameter
|
// Update the effect parameter
|
||||||
@@ -172,7 +243,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
automationFrameRef.current = requestAnimationFrame(applyAutomation);
|
automationFrameRef.current = requestAnimationFrame(applyAutomation);
|
||||||
}, [tracks]);
|
}, []);
|
||||||
|
|
||||||
const updatePlaybackPosition = useCallback(() => {
|
const updatePlaybackPosition = useCallback(() => {
|
||||||
if (!audioContextRef.current) return;
|
if (!audioContextRef.current) return;
|
||||||
@@ -356,6 +427,8 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
pause();
|
pause();
|
||||||
pausedAtRef.current = 0;
|
pausedAtRef.current = 0;
|
||||||
setCurrentTime(0);
|
setCurrentTime(0);
|
||||||
|
// Clear last recorded values when stopping
|
||||||
|
lastRecordedValuesRef.current.clear();
|
||||||
}, [pause]);
|
}, [pause]);
|
||||||
|
|
||||||
const seek = useCallback((time: number) => {
|
const seek = useCallback((time: number) => {
|
||||||
|
|||||||
Reference in New Issue
Block a user