feat: complete Phase 9.3 - automation recording with write/touch/latch modes

Implemented comprehensive automation recording system for volume, pan, and effect parameters:

- Added automation recording modes:
  - Write: Records continuously during playback when values change
  - Touch: Records only while control is being touched/moved
  - Latch: Records from first touch until playback stops

- Implemented value change detection (0.001 threshold) to prevent infinite loops
- Fixed React setState-in-render errors by:
  - Using queueMicrotask() to defer state updates
  - Moving lane creation logic to useEffect
  - Properly memoizing touch handlers with useMemo

- Added proper value ranges for effect parameters:
  - Frequency: 20-20000 Hz
  - Q: 0.1-20
  - Gain: -40-40 dB

- Enhanced automation lane auto-creation with parameter-specific ranges
- Added touch callbacks to all parameter controls (volume, pan, effects)
- Implemented throttling (100ms) to avoid excessive automation points

Technical improvements:
- Used tracksRef and onRecordAutomationRef to ensure latest values in animation loops
- Added proper cleanup on playback stop
- Optimized recording to only trigger when values actually change

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
2025-11-18 23:29:18 +01:00
parent a1f230a6e6
commit c54d5089c5
13 changed files with 1040 additions and 70 deletions

134
lib/audio/export.ts Normal file
View File

@@ -0,0 +1,134 @@
/**
* Audio export utilities
* Supports WAV export with various bit depths
*/
export interface ExportOptions {
format: 'wav';
bitDepth: 16 | 24 | 32;
sampleRate?: number; // If different from source, will resample
normalize?: boolean; // Normalize to prevent clipping
}
/**
* Convert an AudioBuffer to WAV file
*/
export function audioBufferToWav(
audioBuffer: AudioBuffer,
options: ExportOptions = { format: 'wav', bitDepth: 16 }
): ArrayBuffer {
const { bitDepth, normalize } = options;
const numberOfChannels = audioBuffer.numberOfChannels;
const sampleRate = audioBuffer.sampleRate;
const length = audioBuffer.length;
// Get channel data
const channels: Float32Array[] = [];
for (let i = 0; i < numberOfChannels; i++) {
channels.push(audioBuffer.getChannelData(i));
}
// Find peak if normalizing
let peak = 1.0;
if (normalize) {
peak = 0;
for (const channel of channels) {
for (let i = 0; i < channel.length; i++) {
const abs = Math.abs(channel[i]);
if (abs > peak) peak = abs;
}
}
// Prevent division by zero and add headroom
if (peak === 0) peak = 1.0;
else peak = peak * 1.01; // 1% headroom
}
// Calculate sizes
const bytesPerSample = bitDepth / 8;
const blockAlign = numberOfChannels * bytesPerSample;
const dataSize = length * blockAlign;
const bufferSize = 44 + dataSize; // 44 bytes for WAV header
// Create buffer
const buffer = new ArrayBuffer(bufferSize);
const view = new DataView(buffer);
// Write WAV header
let offset = 0;
// RIFF chunk descriptor
writeString(view, offset, 'RIFF'); offset += 4;
view.setUint32(offset, bufferSize - 8, true); offset += 4; // File size - 8
writeString(view, offset, 'WAVE'); offset += 4;
// fmt sub-chunk
writeString(view, offset, 'fmt '); offset += 4;
view.setUint32(offset, 16, true); offset += 4; // Subchunk size (16 for PCM)
view.setUint16(offset, bitDepth === 32 ? 3 : 1, true); offset += 2; // Audio format (1 = PCM, 3 = IEEE float)
view.setUint16(offset, numberOfChannels, true); offset += 2;
view.setUint32(offset, sampleRate, true); offset += 4;
view.setUint32(offset, sampleRate * blockAlign, true); offset += 4; // Byte rate
view.setUint16(offset, blockAlign, true); offset += 2;
view.setUint16(offset, bitDepth, true); offset += 2;
// data sub-chunk
writeString(view, offset, 'data'); offset += 4;
view.setUint32(offset, dataSize, true); offset += 4;
// Write interleaved audio data
if (bitDepth === 16) {
for (let i = 0; i < length; i++) {
for (let channel = 0; channel < numberOfChannels; channel++) {
const sample = Math.max(-1, Math.min(1, channels[channel][i] / peak));
view.setInt16(offset, sample * 0x7fff, true);
offset += 2;
}
}
} else if (bitDepth === 24) {
for (let i = 0; i < length; i++) {
for (let channel = 0; channel < numberOfChannels; channel++) {
const sample = Math.max(-1, Math.min(1, channels[channel][i] / peak));
const int24 = Math.round(sample * 0x7fffff);
view.setUint8(offset, int24 & 0xff); offset++;
view.setUint8(offset, (int24 >> 8) & 0xff); offset++;
view.setUint8(offset, (int24 >> 16) & 0xff); offset++;
}
}
} else if (bitDepth === 32) {
for (let i = 0; i < length; i++) {
for (let channel = 0; channel < numberOfChannels; channel++) {
const sample = channels[channel][i] / peak;
view.setFloat32(offset, sample, true);
offset += 4;
}
}
}
return buffer;
}
/**
* Download an ArrayBuffer as a file
*/
export function downloadArrayBuffer(
arrayBuffer: ArrayBuffer,
filename: string,
mimeType: string = 'audio/wav'
): void {
const blob = new Blob([arrayBuffer], { type: mimeType });
const url = URL.createObjectURL(blob);
const link = document.createElement('a');
link.href = url;
link.download = filename;
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
URL.revokeObjectURL(url);
}
// Helper to write string to DataView
function writeString(view: DataView, offset: number, string: string): void {
for (let i = 0; i < string.length; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}

View File

@@ -0,0 +1,173 @@
/**
* Hook for recording automation data during playback
* Supports write, touch, and latch modes
*/
import { useCallback, useRef } from 'react';
import type { Track } from '@/types/track';
import type { AutomationPoint, AutomationMode } from '@/types/automation';
export interface AutomationRecordingState {
isRecording: boolean;
recordingLaneId: string | null;
touchActive: boolean; // For touch mode - tracks if control is being touched
latchTriggered: boolean; // For latch mode - tracks if recording has started
}
export function useAutomationRecording(
track: Track,
onUpdateTrack: (trackId: string, updates: Partial<Track>) => void
) {
const recordingStateRef = useRef<Map<string, AutomationRecordingState>>(new Map());
const recordingIntervalRef = useRef<Map<string, number>>(new Map());
const lastRecordedValueRef = useRef<Map<string, number>>(new Map());
/**
* Start recording automation for a specific lane
*/
const startRecording = useCallback((laneId: string, mode: AutomationMode) => {
const state: AutomationRecordingState = {
isRecording: mode === 'write',
recordingLaneId: laneId,
touchActive: false,
latchTriggered: false,
};
recordingStateRef.current.set(laneId, state);
}, []);
/**
* Stop recording automation for a specific lane
*/
const stopRecording = useCallback((laneId: string) => {
recordingStateRef.current.delete(laneId);
const intervalId = recordingIntervalRef.current.get(laneId);
if (intervalId) {
clearInterval(intervalId);
recordingIntervalRef.current.delete(laneId);
}
lastRecordedValueRef.current.delete(laneId);
}, []);
/**
* Record a single automation point
*/
const recordPoint = useCallback((
laneId: string,
currentTime: number,
value: number,
mode: AutomationMode
) => {
const lane = track.automation.lanes.find(l => l.id === laneId);
if (!lane) return;
const state = recordingStateRef.current.get(laneId);
if (!state) return;
// Check if we should record based on mode
let shouldRecord = false;
switch (mode) {
case 'write':
// Always record in write mode
shouldRecord = true;
break;
case 'touch':
// Only record when control is being touched
shouldRecord = state.touchActive;
break;
case 'latch':
// Record from first touch until stop
if (state.touchActive && !state.latchTriggered) {
state.latchTriggered = true;
}
shouldRecord = state.latchTriggered;
break;
default:
shouldRecord = false;
}
if (!shouldRecord) return;
// Check if value has changed significantly (avoid redundant points)
const lastValue = lastRecordedValueRef.current.get(laneId);
if (lastValue !== undefined && Math.abs(lastValue - value) < 0.001) {
return; // Skip if value hasn't changed
}
lastRecordedValueRef.current.set(laneId, value);
// In write mode, clear existing points in the time range
let updatedPoints = [...lane.points];
if (mode === 'write') {
// Remove points that are within a small time window of current time
updatedPoints = updatedPoints.filter(p =>
Math.abs(p.time - currentTime) > 0.05 // 50ms threshold
);
}
// Add new point
const newPoint: AutomationPoint = {
id: `point-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
time: currentTime,
value,
curve: 'linear',
};
updatedPoints.push(newPoint);
// Sort points by time
updatedPoints.sort((a, b) => a.time - b.time);
// Update track with new automation points
const updatedLanes = track.automation.lanes.map(l =>
l.id === laneId ? { ...l, points: updatedPoints } : l
);
onUpdateTrack(track.id, {
automation: {
...track.automation,
lanes: updatedLanes,
},
});
}, [track, onUpdateTrack]);
/**
* Set touch state for touch mode
*/
const setTouchActive = useCallback((laneId: string, active: boolean) => {
const state = recordingStateRef.current.get(laneId);
if (state) {
state.touchActive = active;
}
}, []);
/**
* Check if a lane is currently recording
*/
const isRecordingLane = useCallback((laneId: string): boolean => {
const state = recordingStateRef.current.get(laneId);
return state?.isRecording ?? false;
}, []);
/**
* Cleanup - stop all recording
*/
const cleanup = useCallback(() => {
recordingStateRef.current.forEach((_, laneId) => {
stopRecording(laneId);
});
recordingStateRef.current.clear();
}, [stopRecording]);
return {
startRecording,
stopRecording,
recordPoint,
setTouchActive,
isRecordingLane,
cleanup,
};
}

View File

@@ -16,7 +16,15 @@ export interface TrackLevel {
level: number;
}
export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
export interface AutomationRecordingCallback {
(trackId: string, laneId: string, currentTime: number, value: number): void;
}
export function useMultiTrackPlayer(
tracks: Track[],
masterVolume: number = 1,
onRecordAutomation?: AutomationRecordingCallback
) {
const [isPlaying, setIsPlaying] = useState(false);
const [currentTime, setCurrentTime] = useState(0);
const [duration, setDuration] = useState(0);
@@ -36,12 +44,19 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
const automationFrameRef = useRef<number | null>(null);
const isMonitoringLevelsRef = useRef<boolean>(false);
const tracksRef = useRef<Track[]>(tracks); // Always keep latest tracks
const lastRecordedValuesRef = useRef<Map<string, number>>(new Map()); // Track last recorded values to detect changes
const onRecordAutomationRef = useRef<AutomationRecordingCallback | undefined>(onRecordAutomation);
// Keep tracksRef in sync with tracks prop
useEffect(() => {
tracksRef.current = tracks;
}, [tracks]);
// Keep onRecordAutomationRef in sync
useEffect(() => {
onRecordAutomationRef.current = onRecordAutomation;
}, [onRecordAutomation]);
// Calculate total duration from all tracks
useEffect(() => {
let maxDuration = 0;
@@ -107,16 +122,32 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
const currentTime = pausedAtRef.current + (audioContextRef.current.currentTime - startTimeRef.current);
tracks.forEach((track, index) => {
tracksRef.current.forEach((track, index) => {
// Apply volume automation
const volumeLane = track.automation.lanes.find(lane => lane.parameterId === 'volume');
if (volumeLane && volumeLane.points.length > 0) {
const automatedValue = evaluateAutomationLinear(volumeLane.points, currentTime);
if (automatedValue !== undefined && gainNodesRef.current[index]) {
if (volumeLane) {
let volumeValue: number | undefined;
// In write mode, record current track volume (only if value changed)
if (volumeLane.mode === 'write' && onRecordAutomationRef.current) {
volumeValue = track.volume;
const lastValue = lastRecordedValuesRef.current.get(`${track.id}-volume`);
// Only record if value has changed
if (lastValue === undefined || Math.abs(lastValue - volumeValue) > 0.001) {
lastRecordedValuesRef.current.set(`${track.id}-volume`, volumeValue);
onRecordAutomationRef.current(track.id, volumeLane.id, currentTime, volumeValue);
}
} else if (volumeLane.points.length > 0) {
// Otherwise play back automation
volumeValue = evaluateAutomationLinear(volumeLane.points, currentTime);
}
if (volumeValue !== undefined && gainNodesRef.current[index]) {
const trackGain = getTrackGain(track, tracks);
// Apply both track gain (mute/solo) and automated volume
gainNodesRef.current[index].gain.setValueAtTime(
trackGain * automatedValue,
trackGain * volumeValue,
audioContextRef.current!.currentTime
);
}
@@ -124,8 +155,24 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
// Apply pan automation
const panLane = track.automation.lanes.find(lane => lane.parameterId === 'pan');
if (panLane && panLane.points.length > 0) {
const automatedValue = evaluateAutomationLinear(panLane.points, currentTime);
if (panLane) {
let automatedValue: number | undefined;
// In write mode, record current track pan (only if value changed)
if (panLane.mode === 'write' && onRecordAutomationRef.current) {
automatedValue = (track.pan + 1) / 2; // Convert -1 to 1 -> 0 to 1
const lastValue = lastRecordedValuesRef.current.get(`${track.id}-pan`);
// Only record if value has changed
if (lastValue === undefined || Math.abs(lastValue - automatedValue) > 0.001) {
lastRecordedValuesRef.current.set(`${track.id}-pan`, automatedValue);
onRecordAutomationRef.current(track.id, panLane.id, currentTime, automatedValue);
}
} else if (panLane.points.length > 0) {
// Otherwise play back automation
automatedValue = evaluateAutomationLinear(panLane.points, currentTime);
}
if (automatedValue !== undefined && panNodesRef.current[index]) {
// Pan automation values are 0-1, but StereoPannerNode expects -1 to 1
const panValue = (automatedValue * 2) - 1;
@@ -139,7 +186,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
// Apply effect parameter automation
track.automation.lanes.forEach(lane => {
// Check if this is an effect parameter (format: effect.{effectId}.{parameterName})
if (lane.parameterId.startsWith('effect.') && lane.points.length > 0) {
if (lane.parameterId.startsWith('effect.')) {
const parts = lane.parameterId.split('.');
if (parts.length === 3) {
const effectId = parts[1];
@@ -147,13 +194,37 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
// Find the effect in the track's effect chain
const effectIndex = track.effectChain.effects.findIndex(e => e.id === effectId);
if (effectIndex >= 0 && effectNodesRef.current[index] && effectNodesRef.current[index][effectIndex]) {
const automatedValue = evaluateAutomationLinear(lane.points, currentTime);
if (automatedValue !== undefined) {
const effect = track.effectChain.effects[effectIndex];
if (effectIndex >= 0 && effect) {
let automatedValue: number | undefined;
// In write mode, record current effect parameter value (only if value changed)
if (lane.mode === 'write' && onRecordAutomationRef.current && effect.parameters) {
const currentValue = (effect.parameters as any)[paramName];
if (currentValue !== undefined) {
// Normalize value to 0-1 range
const range = lane.valueRange.max - lane.valueRange.min;
const normalizedValue = (currentValue - lane.valueRange.min) / range;
const lastValue = lastRecordedValuesRef.current.get(`${track.id}-effect-${effectId}-${paramName}`);
// Only record if value has changed
if (lastValue === undefined || Math.abs(lastValue - normalizedValue) > 0.001) {
lastRecordedValuesRef.current.set(`${track.id}-effect-${effectId}-${paramName}`, normalizedValue);
onRecordAutomationRef.current(track.id, lane.id, currentTime, normalizedValue);
}
}
} else if (lane.points.length > 0) {
// Otherwise play back automation
automatedValue = evaluateAutomationLinear(lane.points, currentTime);
}
// Apply the automated value to the effect
if (automatedValue !== undefined && effectNodesRef.current[index] && effectNodesRef.current[index][effectIndex]) {
const effectNodeInfo = effectNodesRef.current[index][effectIndex];
// Convert normalized 0-1 value to actual parameter range
const effect = track.effectChain.effects[effectIndex];
const actualValue = lane.valueRange.min + (automatedValue * (lane.valueRange.max - lane.valueRange.min));
// Update the effect parameter
@@ -172,7 +243,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
});
automationFrameRef.current = requestAnimationFrame(applyAutomation);
}, [tracks]);
}, []);
const updatePlaybackPosition = useCallback(() => {
if (!audioContextRef.current) return;
@@ -356,6 +427,8 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
pause();
pausedAtRef.current = 0;
setCurrentTime(0);
// Clear last recorded values when stopping
lastRecordedValuesRef.current.clear();
}, [pause]);
const seek = useCallback((time: number) => {