feat: complete Phase 9.3 - automation recording with write/touch/latch modes

Implemented comprehensive automation recording system for volume, pan, and effect parameters:

- Added automation recording modes:
  - Write: Records continuously during playback when values change
  - Touch: Records only while control is being touched/moved
  - Latch: Records from first touch until playback stops

- Implemented value change detection (0.001 threshold) to prevent infinite loops
- Fixed React setState-in-render errors by:
  - Using queueMicrotask() to defer state updates
  - Moving lane creation logic to useEffect
  - Properly memoizing touch handlers with useMemo

- Added proper value ranges for effect parameters:
  - Frequency: 20-20000 Hz
  - Q: 0.1-20
  - Gain: -40-40 dB

- Enhanced automation lane auto-creation with parameter-specific ranges
- Added touch callbacks to all parameter controls (volume, pan, effects)
- Implemented throttling (100ms) to avoid excessive automation points

Technical improvements:
- Used tracksRef and onRecordAutomationRef to ensure latest values in animation loops
- Added proper cleanup on playback stop
- Optimized recording to only trigger when values actually change

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
2025-11-18 23:29:18 +01:00
parent a1f230a6e6
commit c54d5089c5
13 changed files with 1040 additions and 70 deletions

View File

@@ -0,0 +1,149 @@
'use client';
import * as React from 'react';
import { X, Download } from 'lucide-react';
import { Button } from '@/components/ui/Button';
import { cn } from '@/lib/utils/cn';
export interface ExportSettings {
format: 'wav';
bitDepth: 16 | 24 | 32;
normalize: boolean;
filename: string;
}
export interface ExportDialogProps {
open: boolean;
onClose: () => void;
onExport: (settings: ExportSettings) => void;
isExporting?: boolean;
}
export function ExportDialog({ open, onClose, onExport, isExporting }: ExportDialogProps) {
const [settings, setSettings] = React.useState<ExportSettings>({
format: 'wav',
bitDepth: 16,
normalize: true,
filename: 'mix',
});
const handleExport = () => {
onExport(settings);
};
if (!open) return null;
return (
<div className="fixed inset-0 z-50 flex items-center justify-center bg-black/50">
<div className="bg-card border border-border rounded-lg shadow-xl w-full max-w-md p-6">
{/* Header */}
<div className="flex items-center justify-between mb-6">
<h2 className="text-lg font-semibold text-foreground">Export Audio</h2>
<button
onClick={onClose}
className="text-muted-foreground hover:text-foreground transition-colors"
disabled={isExporting}
>
<X className="h-5 w-5" />
</button>
</div>
{/* Settings */}
<div className="space-y-4">
{/* Filename */}
<div>
<label className="block text-sm font-medium text-foreground mb-2">
Filename
</label>
<input
type="text"
value={settings.filename}
onChange={(e) => setSettings({ ...settings, filename: e.target.value })}
className="w-full px-3 py-2 bg-background border border-border rounded text-foreground focus:outline-none focus:ring-2 focus:ring-primary"
disabled={isExporting}
/>
<p className="text-xs text-muted-foreground mt-1">.wav will be added automatically</p>
</div>
{/* Format */}
<div>
<label className="block text-sm font-medium text-foreground mb-2">
Format
</label>
<select
value={settings.format}
onChange={(e) => setSettings({ ...settings, format: e.target.value as 'wav' })}
className="w-full px-3 py-2 bg-background border border-border rounded text-foreground focus:outline-none focus:ring-2 focus:ring-primary"
disabled={isExporting}
>
<option value="wav">WAV (Uncompressed)</option>
</select>
</div>
{/* Bit Depth */}
<div>
<label className="block text-sm font-medium text-foreground mb-2">
Bit Depth
</label>
<div className="flex gap-2">
{[16, 24, 32].map((depth) => (
<button
key={depth}
onClick={() => setSettings({ ...settings, bitDepth: depth as 16 | 24 | 32 })}
className={cn(
'flex-1 px-3 py-2 rounded text-sm font-medium transition-colors',
settings.bitDepth === depth
? 'bg-primary text-primary-foreground'
: 'bg-background border border-border text-foreground hover:bg-accent'
)}
disabled={isExporting}
>
{depth}-bit {depth === 32 && '(Float)'}
</button>
))}
</div>
</div>
{/* Normalize */}
<div>
<label className="flex items-center gap-2 cursor-pointer">
<input
type="checkbox"
checked={settings.normalize}
onChange={(e) => setSettings({ ...settings, normalize: e.target.checked })}
className="w-4 h-4 rounded border-border text-primary focus:ring-primary"
disabled={isExporting}
/>
<span className="text-sm font-medium text-foreground">
Normalize audio
</span>
</label>
<p className="text-xs text-muted-foreground mt-1 ml-6">
Prevents clipping by adjusting peak levels
</p>
</div>
</div>
{/* Actions */}
<div className="flex gap-3 mt-6">
<Button
variant="outline"
onClick={onClose}
className="flex-1"
disabled={isExporting}
>
Cancel
</Button>
<Button
onClick={handleExport}
className="flex-1"
disabled={isExporting || !settings.filename.trim()}
>
<Download className="h-4 w-4 mr-2" />
{isExporting ? 'Exporting...' : 'Export'}
</Button>
</div>
</div>
</div>
);
}

View File

@@ -1,11 +1,12 @@
'use client';
import * as React from 'react';
import { Music, Plus, Upload, Trash2, Settings } from 'lucide-react';
import { Music, Plus, Upload, Trash2, Settings, Download } from 'lucide-react';
import { PlaybackControls } from './PlaybackControls';
import { ThemeToggle } from '@/components/layout/ThemeToggle';
import { CommandPalette } from '@/components/ui/CommandPalette';
import { GlobalSettingsDialog } from '@/components/settings/GlobalSettingsDialog';
import { ExportDialog, type ExportSettings } from '@/components/dialogs/ExportDialog';
import { Button } from '@/components/ui/Button';
import type { CommandAction } from '@/components/ui/CommandPalette';
import { useMultiTrack } from '@/lib/hooks/useMultiTrack';
@@ -26,6 +27,8 @@ import {
createMultiTrackDuplicateCommand,
} from '@/lib/history/commands/multi-track-edit-command';
import { extractBufferSegment } from '@/lib/audio/buffer-utils';
import { mixTracks, getMaxTrackDuration } from '@/lib/audio/track-utils';
import { audioBufferToWav, downloadArrayBuffer } from '@/lib/audio/export';
export function AudioEditor() {
const [importDialogOpen, setImportDialogOpen] = React.useState(false);
@@ -39,6 +42,8 @@ export function AudioEditor() {
const [punchOutTime, setPunchOutTime] = React.useState(0);
const [overdubEnabled, setOverdubEnabled] = React.useState(false);
const [settingsDialogOpen, setSettingsDialogOpen] = React.useState(false);
const [exportDialogOpen, setExportDialogOpen] = React.useState(false);
const [isExporting, setIsExporting] = React.useState(false);
const { addToast } = useToast();
@@ -102,14 +107,112 @@ export function AudioEditor() {
return track;
}, [addTrackFromBufferOriginal]);
// Log tracks to see if they update
React.useEffect(() => {
console.log('[AudioEditor] Tracks updated:', tracks.map(t => ({
name: t.name,
effectCount: t.effectChain.effects.length,
effects: t.effectChain.effects.map(e => e.name)
})));
}, [tracks]);
// Track which parameters are being touched (for touch/latch modes)
const [touchedParameters, setTouchedParameters] = React.useState<Set<string>>(new Set());
const [latchTriggered, setLatchTriggered] = React.useState<Set<string>>(new Set());
// Track last recorded values to detect changes
const lastRecordedValuesRef = React.useRef<Map<string, { value: number; time: number }>>(new Map());
// Automation recording callback
const handleAutomationRecording = React.useCallback((
trackId: string,
laneId: string,
currentTime: number,
value: number
) => {
const track = tracks.find(t => t.id === trackId);
if (!track) return;
const lane = track.automation.lanes.find(l => l.id === laneId);
if (!lane) return;
const paramKey = `${trackId}-${laneId}`;
let shouldRecord = false;
// Determine if we should record based on mode
switch (lane.mode) {
case 'write':
// Always record in write mode
shouldRecord = true;
break;
case 'touch':
// Only record when parameter is being touched
shouldRecord = touchedParameters.has(paramKey);
break;
case 'latch':
// Record from first touch until stop
if (touchedParameters.has(paramKey)) {
setLatchTriggered(prev => new Set(prev).add(paramKey));
}
shouldRecord = latchTriggered.has(paramKey);
break;
default:
shouldRecord = false;
}
if (!shouldRecord) return;
// Throttle recording to avoid creating too many automation points
// This doesn't prevent recording, just limits frequency
const lastRecorded = lastRecordedValuesRef.current.get(paramKey);
if (lastRecorded && currentTime - lastRecorded.time < 0.1) {
// Check if value has changed significantly
const valueChanged = Math.abs(lastRecorded.value - value) > 0.001;
if (!valueChanged) {
// Skip if value hasn't changed and we recorded recently
return;
}
}
// Update last recorded value
lastRecordedValuesRef.current.set(paramKey, { value, time: currentTime });
// Create new automation point
const newPoint = {
id: `point-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
time: currentTime,
value,
curve: 'linear' as const,
};
// In write mode, remove existing points near this time (overwrites)
const updatedPoints = lane.mode === 'write'
? [...lane.points.filter(p => Math.abs(p.time - currentTime) > 0.05), newPoint]
: [...lane.points, newPoint];
updatedPoints.sort((a, b) => a.time - b.time);
// Update the lane with new points
const updatedLanes = track.automation.lanes.map(l =>
l.id === laneId ? { ...l, points: updatedPoints } : l
);
updateTrack(trackId, {
automation: {
...track.automation,
lanes: updatedLanes,
},
});
}, [tracks, updateTrack, touchedParameters, latchTriggered]);
// Helper to mark parameter as touched (for touch/latch modes)
const setParameterTouched = React.useCallback((trackId: string, laneId: string, touched: boolean) => {
const paramKey = `${trackId}-${laneId}`;
setTouchedParameters(prev => {
const next = new Set(prev);
if (touched) {
next.add(paramKey);
} else {
next.delete(paramKey);
}
return next;
});
}, []);
const {
isPlaying,
@@ -121,7 +224,56 @@ export function AudioEditor() {
stop,
seek,
togglePlayPause,
} = useMultiTrackPlayer(tracks, masterVolume);
} = useMultiTrackPlayer(tracks, masterVolume, handleAutomationRecording);
// Reset latch triggered state when playback stops
React.useEffect(() => {
if (!isPlaying) {
setLatchTriggered(new Set());
lastRecordedValuesRef.current.clear();
}
}, [isPlaying]);
// Record effect parameter values while touched
React.useEffect(() => {
if (!isPlaying) return;
const recordEffectParams = () => {
const time = currentTime;
touchedParameters.forEach(paramKey => {
const [trackId, laneId] = paramKey.split('-');
const track = tracks.find(t => t.id === trackId);
if (!track) return;
const lane = track.automation.lanes.find(l => l.id === laneId);
if (!lane || !lane.parameterId.startsWith('effect.')) return;
// Parse effect parameter ID: effect.{effectId}.{paramName}
const parts = lane.parameterId.split('.');
if (parts.length !== 3) return;
const effectId = parts[1];
const paramName = parts[2];
const effect = track.effectChain.effects.find(e => e.id === effectId);
if (!effect || !effect.parameters) return;
const currentValue = (effect.parameters as any)[paramName];
if (currentValue === undefined) return;
// Normalize value to 0-1 range
const range = lane.valueRange.max - lane.valueRange.min;
const normalizedValue = (currentValue - lane.valueRange.min) / range;
// Record the automation
handleAutomationRecording(trackId, laneId, time, normalizedValue);
});
};
const interval = setInterval(recordEffectParams, 50); // Record every 50ms while touched
return () => clearInterval(interval);
}, [isPlaying, currentTime, touchedParameters, tracks, handleAutomationRecording]);
// Master effect chain
const {
@@ -549,6 +701,60 @@ export function AudioEditor() {
});
}, [tracks, executeCommand, updateTrack, addToast]);
// Export handler
const handleExport = React.useCallback(async (settings: ExportSettings) => {
if (tracks.length === 0) {
addToast({
title: 'No Tracks',
description: 'Add some tracks before exporting',
variant: 'warning',
duration: 3000,
});
return;
}
setIsExporting(true);
try {
// Get max duration and sample rate
const maxDuration = getMaxTrackDuration(tracks);
const sampleRate = tracks[0]?.audioBuffer?.sampleRate || 44100;
// Mix all tracks into a single buffer
const mixedBuffer = mixTracks(tracks, sampleRate, maxDuration);
// Convert to WAV
const wavBuffer = audioBufferToWav(mixedBuffer, {
format: settings.format,
bitDepth: settings.bitDepth,
normalize: settings.normalize,
});
// Download
const filename = `${settings.filename}.wav`;
downloadArrayBuffer(wavBuffer, filename);
addToast({
title: 'Export Complete',
description: `Exported ${filename}`,
variant: 'success',
duration: 3000,
});
setExportDialogOpen(false);
} catch (error) {
console.error('Export failed:', error);
addToast({
title: 'Export Failed',
description: 'Failed to export audio',
variant: 'error',
duration: 3000,
});
} finally {
setIsExporting(false);
}
}, [tracks, addToast]);
// Zoom controls
const handleZoomIn = () => {
setZoom((prev) => Math.min(20, prev + 1));
@@ -765,10 +971,16 @@ export function AudioEditor() {
Import
</Button>
{tracks.length > 0 && (
<Button variant="outline" size="sm" onClick={handleClearTracks}>
<Trash2 className="h-4 w-4 mr-1.5 text-destructive" />
Clear All
</Button>
<>
<Button variant="outline" size="sm" onClick={() => setExportDialogOpen(true)}>
<Download className="h-4 w-4 mr-1.5" />
Export
</Button>
<Button variant="outline" size="sm" onClick={handleClearTracks}>
<Trash2 className="h-4 w-4 mr-1.5 text-destructive" />
Clear All
</Button>
</>
)}
</div>
</div>
@@ -811,6 +1023,8 @@ export function AudioEditor() {
recordingTrackId={recordingTrackId}
recordingLevel={recordingState.inputLevel}
trackLevels={trackLevels}
onParameterTouched={setParameterTouched}
isPlaying={isPlaying}
/>
</div>
</main>
@@ -861,6 +1075,14 @@ export function AudioEditor() {
onRecordMonoChange={setRecordMono}
onSampleRateChange={setSampleRate}
/>
{/* Export Dialog */}
<ExportDialog
open={exportDialogOpen}
onClose={() => setExportDialogOpen(false)}
onExport={handleExport}
isExporting={isExporting}
/>
</>
);
}

View File

@@ -13,6 +13,10 @@ export interface EffectDeviceProps {
onRemove?: () => void;
onUpdateParameters?: (parameters: any) => void;
onToggleExpanded?: () => void;
trackId?: string;
isPlaying?: boolean;
onParameterTouched?: (trackId: string, laneId: string, touched: boolean) => void;
automationLanes?: Array<{ id: string; parameterId: string; mode: string }>;
}
export function EffectDevice({
@@ -21,6 +25,10 @@ export function EffectDevice({
onRemove,
onUpdateParameters,
onToggleExpanded,
trackId,
isPlaying,
onParameterTouched,
automationLanes,
}: EffectDeviceProps) {
const isExpanded = effect.expanded || false;
@@ -108,7 +116,14 @@ export function EffectDevice({
{/* Device Body */}
<div className="flex-1 min-h-0 overflow-y-auto custom-scrollbar p-3 bg-card/50">
<EffectParameters effect={effect} onUpdateParameters={onUpdateParameters} />
<EffectParameters
effect={effect}
onUpdateParameters={onUpdateParameters}
trackId={trackId}
isPlaying={isPlaying}
onParameterTouched={onParameterTouched}
automationLanes={automationLanes}
/>
</div>
</>
)}

View File

@@ -27,9 +27,20 @@ import type { FilterOptions } from '@/lib/audio/effects/filters';
export interface EffectParametersProps {
effect: ChainEffect;
onUpdateParameters?: (parameters: any) => void;
trackId?: string;
isPlaying?: boolean;
onParameterTouched?: (trackId: string, laneId: string, touched: boolean) => void;
automationLanes?: Array<{ id: string; parameterId: string; mode: string }>;
}
export function EffectParameters({ effect, onUpdateParameters }: EffectParametersProps) {
export function EffectParameters({
effect,
onUpdateParameters,
trackId,
isPlaying,
onParameterTouched,
automationLanes = []
}: EffectParametersProps) {
const params = effect.parameters || {};
const updateParam = (key: string, value: any) => {
@@ -38,6 +49,47 @@ export function EffectParameters({ effect, onUpdateParameters }: EffectParameter
}
};
// Memoize touch handlers for all parameters
const touchHandlers = React.useMemo(() => {
if (!trackId || !isPlaying || !onParameterTouched || !automationLanes) {
return {};
}
const handlers: Record<string, { onTouchStart: () => void; onTouchEnd: () => void }> = {};
automationLanes.forEach(lane => {
if (!lane.parameterId.startsWith(`effect.${effect.id}.`)) {
return;
}
// For effect parameters, write mode works like touch mode
if (lane.mode !== 'touch' && lane.mode !== 'latch' && lane.mode !== 'write') {
return;
}
// Extract parameter name from parameterId (effect.{effectId}.{paramName})
const parts = lane.parameterId.split('.');
if (parts.length !== 3) return;
const paramName = parts[2];
handlers[paramName] = {
onTouchStart: () => {
queueMicrotask(() => onParameterTouched(trackId, lane.id, true));
},
onTouchEnd: () => {
queueMicrotask(() => onParameterTouched(trackId, lane.id, false));
},
};
});
return handlers;
}, [trackId, isPlaying, onParameterTouched, effect.id, automationLanes]);
// Helper to get touch handlers for a parameter
const getTouchHandlers = (paramName: string) => {
return touchHandlers[paramName] || {};
};
// Filter effects
if (['lowpass', 'highpass', 'bandpass', 'notch', 'lowshelf', 'highshelf', 'peaking'].includes(effect.type)) {
const filterParams = params as FilterOptions;
@@ -53,6 +105,7 @@ export function EffectParameters({ effect, onUpdateParameters }: EffectParameter
min={20}
max={20000}
step={1}
{...getTouchHandlers('frequency')}
/>
</div>
<div className="space-y-1">
@@ -65,6 +118,7 @@ export function EffectParameters({ effect, onUpdateParameters }: EffectParameter
min={0.1}
max={20}
step={0.1}
{...getTouchHandlers('Q')}
/>
</div>
{['lowshelf', 'highshelf', 'peaking'].includes(effect.type) && (
@@ -78,6 +132,7 @@ export function EffectParameters({ effect, onUpdateParameters }: EffectParameter
min={-40}
max={40}
step={0.5}
{...getTouchHandlers('gain')}
/>
</div>
)}

View File

@@ -43,6 +43,8 @@ export interface TrackProps {
isRecording?: boolean;
recordingLevel?: number;
playbackLevel?: number;
onParameterTouched?: (trackId: string, laneId: string, touched: boolean) => void;
isPlaying?: boolean;
}
export function Track({
@@ -71,6 +73,8 @@ export function Track({
isRecording = false,
recordingLevel = 0,
playbackLevel = 0,
onParameterTouched,
isPlaying = false,
}: TrackProps) {
const canvasRef = React.useRef<HTMLCanvasElement>(null);
const containerRef = React.useRef<HTMLDivElement>(null);
@@ -89,6 +93,123 @@ export function Track({
const [isSelectingByDrag, setIsSelectingByDrag] = React.useState(false);
const [dragStartPos, setDragStartPos] = React.useState<{ x: number; y: number } | null>(null);
// Touch callbacks for automation recording
const handlePanTouchStart = React.useCallback(() => {
if (isPlaying && onParameterTouched) {
const panLane = track.automation.lanes.find(l => l.parameterId === 'pan');
if (panLane && (panLane.mode === 'touch' || panLane.mode === 'latch')) {
queueMicrotask(() => onParameterTouched(track.id, panLane.id, true));
}
}
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
const handlePanTouchEnd = React.useCallback(() => {
if (isPlaying && onParameterTouched) {
const panLane = track.automation.lanes.find(l => l.parameterId === 'pan');
if (panLane && (panLane.mode === 'touch' || panLane.mode === 'latch')) {
queueMicrotask(() => onParameterTouched(track.id, panLane.id, false));
}
}
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
const handleVolumeTouchStart = React.useCallback(() => {
if (isPlaying && onParameterTouched) {
const volumeLane = track.automation.lanes.find(l => l.parameterId === 'volume');
if (volumeLane && (volumeLane.mode === 'touch' || volumeLane.mode === 'latch')) {
queueMicrotask(() => onParameterTouched(track.id, volumeLane.id, true));
}
}
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
const handleVolumeTouchEnd = React.useCallback(() => {
if (isPlaying && onParameterTouched) {
const volumeLane = track.automation.lanes.find(l => l.parameterId === 'volume');
if (volumeLane && (volumeLane.mode === 'touch' || volumeLane.mode === 'latch')) {
queueMicrotask(() => onParameterTouched(track.id, volumeLane.id, false));
}
}
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
// Auto-create automation lane for selected parameter if it doesn't exist
React.useEffect(() => {
if (!track.automation?.showAutomation) return;
const selectedParameterId = track.automation.selectedParameterId || 'volume';
const laneExists = track.automation.lanes.some(lane => lane.parameterId === selectedParameterId);
if (!laneExists) {
// Build list of available parameters
const availableParameters: Array<{ id: string; name: string }> = [
{ id: 'volume', name: 'Volume' },
{ id: 'pan', name: 'Pan' },
];
track.effectChain.effects.forEach((effect) => {
if (effect.parameters) {
Object.keys(effect.parameters).forEach((paramKey) => {
const parameterId = `effect.${effect.id}.${paramKey}`;
const paramName = `${effect.name} - ${paramKey.charAt(0).toUpperCase() + paramKey.slice(1)}`;
availableParameters.push({ id: parameterId, name: paramName });
});
}
});
const paramInfo = availableParameters.find(p => p.id === selectedParameterId);
if (paramInfo) {
// Determine value range based on parameter type
let valueRange = { min: 0, max: 1 };
let unit = '';
let formatter: ((value: number) => string) | undefined;
if (selectedParameterId === 'volume') {
unit = 'dB';
} else if (selectedParameterId === 'pan') {
formatter = (value: number) => {
if (value === 0.5) return 'C';
if (value < 0.5) return `${Math.abs((0.5 - value) * 200).toFixed(0)}L`;
return `${((value - 0.5) * 200).toFixed(0)}R`;
};
} else if (selectedParameterId.startsWith('effect.')) {
// Parse effect parameter: effect.{effectId}.{paramName}
const parts = selectedParameterId.split('.');
if (parts.length === 3) {
const paramName = parts[2];
// Set ranges based on parameter name
if (paramName === 'frequency') {
valueRange = { min: 20, max: 20000 };
unit = 'Hz';
} else if (paramName === 'Q') {
valueRange = { min: 0.1, max: 20 };
} else if (paramName === 'gain') {
valueRange = { min: -40, max: 40 };
unit = 'dB';
}
}
}
const newLane = createAutomationLane(
track.id,
selectedParameterId,
paramInfo.name,
{
min: valueRange.min,
max: valueRange.max,
unit,
formatter,
}
);
onUpdateTrack(track.id, {
automation: {
...track.automation,
lanes: [...track.automation.lanes, newLane],
selectedParameterId,
},
});
}
}
}, [track.automation?.showAutomation, track.automation?.selectedParameterId, track.automation?.lanes, track.effectChain.effects, track.id, onUpdateTrack]);
const handleNameClick = () => {
setIsEditingName(true);
setNameInput(String(track.name || 'Untitled Track'));
@@ -536,6 +657,8 @@ export function Track({
step={0.01}
size={48}
label="PAN"
onTouchStart={handlePanTouchStart}
onTouchEnd={handlePanTouchEnd}
/>
</div>
@@ -549,6 +672,8 @@ export function Track({
max={1}
step={0.01}
showDb={true}
onTouchStart={handleVolumeTouchStart}
onTouchEnd={handleVolumeTouchEnd}
/>
</div>
@@ -735,35 +860,8 @@ export function Track({
// Find or create lane for selected parameter
let selectedLane = track.automation.lanes.find(lane => lane.parameterId === selectedParameterId);
// If lane doesn't exist yet, create it
if (!selectedLane) {
const paramInfo = availableParameters.find(p => p.id === selectedParameterId);
if (paramInfo) {
selectedLane = createAutomationLane(
track.id,
selectedParameterId,
paramInfo.name,
{
min: 0,
max: 1,
unit: selectedParameterId === 'volume' ? 'dB' : '',
formatter: selectedParameterId === 'pan' ? (value: number) => {
if (value === 0.5) return 'C';
if (value < 0.5) return `${Math.abs((0.5 - value) * 200).toFixed(0)}L`;
return `${((value - 0.5) * 200).toFixed(0)}R`;
} : undefined,
}
);
// Add the new lane to the track
onUpdateTrack(track.id, {
automation: {
...track.automation,
lanes: [...track.automation.lanes, selectedLane],
selectedParameterId,
},
});
}
}
// If lane doesn't exist yet, we need to create it (but not during render)
// This will be handled by a useEffect instead
const modes: Array<{ value: string; label: string; color: string }> = [
{ value: 'read', label: 'R', color: 'text-muted-foreground' },
@@ -957,6 +1055,10 @@ export function Track({
effectChain: { ...track.effectChain, effects: updatedEffects },
});
}}
trackId={track.id}
isPlaying={isPlaying}
onParameterTouched={onParameterTouched}
automationLanes={track.automation.lanes}
/>
))}
</div>

View File

@@ -25,6 +25,8 @@ export interface TrackListProps {
recordingTrackId?: string | null;
recordingLevel?: number;
trackLevels?: Record<string, number>;
onParameterTouched?: (trackId: string, laneId: string, touched: boolean) => void;
isPlaying?: boolean;
}
export function TrackList({
@@ -44,6 +46,8 @@ export function TrackList({
recordingTrackId,
recordingLevel = 0,
trackLevels = {},
onParameterTouched,
isPlaying = false,
}: TrackListProps) {
const [importDialogOpen, setImportDialogOpen] = React.useState(false);
@@ -168,6 +172,8 @@ export function TrackList({
isRecording={recordingTrackId === track.id}
recordingLevel={recordingTrackId === track.id ? recordingLevel : 0}
playbackLevel={trackLevels[track.id] || 0}
onParameterTouched={onParameterTouched}
isPlaying={isPlaying}
/>
))}
</div>

View File

@@ -13,6 +13,8 @@ export interface CircularKnobProps {
className?: string;
label?: string;
formatValue?: (value: number) => string;
onTouchStart?: () => void;
onTouchEnd?: () => void;
}
export function CircularKnob({
@@ -25,6 +27,8 @@ export function CircularKnob({
className,
label,
formatValue,
onTouchStart,
onTouchEnd,
}: CircularKnobProps) {
const knobRef = React.useRef<HTMLDivElement>(null);
const [isDragging, setIsDragging] = React.useState(false);
@@ -68,8 +72,9 @@ export function CircularKnob({
y: e.clientY,
value,
};
onTouchStart?.();
},
[value]
[value, onTouchStart]
);
const handleMouseMove = React.useCallback(
@@ -83,7 +88,8 @@ export function CircularKnob({
const handleMouseUp = React.useCallback(() => {
setIsDragging(false);
}, []);
onTouchEnd?.();
}, [onTouchEnd]);
React.useEffect(() => {
if (isDragging) {

View File

@@ -13,6 +13,8 @@ export interface SliderProps
step?: number;
label?: string;
showValue?: boolean;
onTouchStart?: () => void;
onTouchEnd?: () => void;
}
const Slider = React.forwardRef<HTMLInputElement, SliderProps>(
@@ -28,6 +30,8 @@ const Slider = React.forwardRef<HTMLInputElement, SliderProps>(
label,
showValue = false,
disabled,
onTouchStart,
onTouchEnd,
...props
},
ref
@@ -41,6 +45,21 @@ const Slider = React.forwardRef<HTMLInputElement, SliderProps>(
onValueChange?.([numValue]);
};
const handleMouseDown = () => {
onTouchStart?.();
};
const handleMouseUp = () => {
onTouchEnd?.();
};
React.useEffect(() => {
if (onTouchEnd) {
window.addEventListener('mouseup', handleMouseUp);
return () => window.removeEventListener('mouseup', handleMouseUp);
}
}, [onTouchEnd]);
return (
<div className={cn('w-full', className)}>
{(label || showValue) && (
@@ -63,6 +82,7 @@ const Slider = React.forwardRef<HTMLInputElement, SliderProps>(
step={step}
value={currentValue}
onChange={handleChange}
onMouseDown={handleMouseDown}
disabled={disabled}
className={cn(
'w-full h-2 bg-secondary rounded-lg appearance-none cursor-pointer',

View File

@@ -12,6 +12,8 @@ export interface VerticalFaderProps {
step?: number;
className?: string;
showDb?: boolean;
onTouchStart?: () => void;
onTouchEnd?: () => void;
}
export function VerticalFader({
@@ -23,6 +25,8 @@ export function VerticalFader({
step = 0.01,
className,
showDb = true,
onTouchStart,
onTouchEnd,
}: VerticalFaderProps) {
const trackRef = React.useRef<HTMLDivElement>(null);
const [isDragging, setIsDragging] = React.useState(false);
@@ -58,8 +62,9 @@ export function VerticalFader({
e.preventDefault();
setIsDragging(true);
updateValue(e.clientY);
onTouchStart?.();
},
[updateValue]
[updateValue, onTouchStart]
);
const handleMouseMove = React.useCallback(
@@ -73,7 +78,8 @@ export function VerticalFader({
const handleMouseUp = React.useCallback(() => {
setIsDragging(false);
}, []);
onTouchEnd?.();
}, [onTouchEnd]);
React.useEffect(() => {
if (isDragging) {