Files
audio-ui/lib/hooks/useMultiTrackPlayer.ts
Sebastian Krüger dac8ac4723 feat: implement real-time automation playback for volume and pan
Phase 9.3 - Automation Playback:
- Added real-time automation evaluation during playback
- Automation values are applied continuously via requestAnimationFrame
- Volume automation: Interpolates between points and applies to gain nodes
- Pan automation: Converts 0-1 values to -1 to 1 for StereoPannerNode

Implementation details:
- New applyAutomation() function runs in RAF loop alongside level monitoring
- Evaluates automation at current playback time using evaluateAutomationLinear
- Applies values using setValueAtTime for smooth Web Audio API parameter changes
- Automation loop lifecycle matches playback (start/pause/stop/cleanup)
- Respects automation mode (only applies when mode !== 'read')

Technical improvements:
- Added automationFrameRef for RAF management
- Proper cleanup in pause(), unmount, and playback end scenarios
- Integrated with existing effect chain restart mechanism
- Volume automation multiplied with track gain (mute/solo state)

Next steps:
- Effect parameter automation (TODO in code)
- Automation recording (write mode implementation)
- Touch and latch modes

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-18 19:06:02 +01:00

660 lines
22 KiB
TypeScript

import { useState, useCallback, useRef, useEffect } from 'react';
import { getAudioContext } from '@/lib/audio/context';
import type { Track } from '@/types/track';
import { getTrackGain } from '@/lib/audio/track-utils';
import { applyEffectChain, updateEffectParameters, toggleEffectBypass, type EffectNodeInfo } from '@/lib/audio/effects/processor';
import { evaluateAutomationLinear } from '@/lib/audio/automation-utils';
export interface MultiTrackPlayerState {
isPlaying: boolean;
currentTime: number;
duration: number;
}
export interface TrackLevel {
trackId: string;
level: number;
}
export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
const [isPlaying, setIsPlaying] = useState(false);
const [currentTime, setCurrentTime] = useState(0);
const [duration, setDuration] = useState(0);
const [trackLevels, setTrackLevels] = useState<Record<string, number>>({});
const audioContextRef = useRef<AudioContext | null>(null);
const sourceNodesRef = useRef<AudioBufferSourceNode[]>([]);
const gainNodesRef = useRef<GainNode[]>([]);
const panNodesRef = useRef<StereoPannerNode[]>([]);
const analyserNodesRef = useRef<AnalyserNode[]>([]);
const effectNodesRef = useRef<EffectNodeInfo[][]>([]); // Effect nodes per track
const masterGainNodeRef = useRef<GainNode | null>(null);
const startTimeRef = useRef<number>(0);
const pausedAtRef = useRef<number>(0);
const animationFrameRef = useRef<number | null>(null);
const levelMonitorFrameRef = useRef<number | null>(null);
const automationFrameRef = useRef<number | null>(null);
const isMonitoringLevelsRef = useRef<boolean>(false);
const tracksRef = useRef<Track[]>(tracks); // Always keep latest tracks
// Keep tracksRef in sync with tracks prop
useEffect(() => {
tracksRef.current = tracks;
}, [tracks]);
// Calculate total duration from all tracks
useEffect(() => {
let maxDuration = 0;
for (const track of tracks) {
if (track.audioBuffer) {
maxDuration = Math.max(maxDuration, track.audioBuffer.duration);
}
}
setDuration(maxDuration);
}, [tracks]);
// Convert linear amplitude to dB scale normalized to 0-1 range
const linearToDbScale = (linear: number): number => {
if (linear === 0) return 0;
// Convert to dB (20 * log10(linear))
const db = 20 * Math.log10(linear);
// Normalize -60dB to 0dB range to 0-1
// -60dB or lower = 0%, 0dB = 100%
const minDb = -60;
const maxDb = 0;
const normalized = (db - minDb) / (maxDb - minDb);
// Clamp to 0-1 range
return Math.max(0, Math.min(1, normalized));
};
// Monitor playback levels for all tracks
const monitorPlaybackLevels = useCallback(() => {
if (!isMonitoringLevelsRef.current || analyserNodesRef.current.length === 0) return;
const levels: Record<string, number> = {};
analyserNodesRef.current.forEach((analyser, index) => {
const track = tracksRef.current[index];
if (!track) return;
const dataArray = new Float32Array(analyser.fftSize);
analyser.getFloatTimeDomainData(dataArray);
// Calculate peak level using float data (-1 to +1 range)
let peak = 0;
for (let i = 0; i < dataArray.length; i++) {
const abs = Math.abs(dataArray[i]);
if (abs > peak) {
peak = abs;
}
}
// Convert linear peak to logarithmic dB scale
levels[track.id] = linearToDbScale(peak);
});
setTrackLevels(levels);
levelMonitorFrameRef.current = requestAnimationFrame(monitorPlaybackLevels);
}, []);
// Apply automation values during playback
const applyAutomation = useCallback(() => {
if (!audioContextRef.current) return;
const currentTime = pausedAtRef.current + (audioContextRef.current.currentTime - startTimeRef.current);
tracks.forEach((track, index) => {
// Apply volume automation
const volumeLane = track.automation.lanes.find(lane => lane.parameterId === 'volume');
if (volumeLane && volumeLane.points.length > 0 && volumeLane.mode !== 'read') {
const automatedValue = evaluateAutomationLinear(volumeLane.points, currentTime);
if (automatedValue !== undefined && gainNodesRef.current[index]) {
const trackGain = getTrackGain(track, tracks);
// Apply both track gain (mute/solo) and automated volume
gainNodesRef.current[index].gain.setValueAtTime(
trackGain * automatedValue,
audioContextRef.current!.currentTime
);
}
}
// Apply pan automation
const panLane = track.automation.lanes.find(lane => lane.parameterId === 'pan');
if (panLane && panLane.points.length > 0 && panLane.mode !== 'read') {
const automatedValue = evaluateAutomationLinear(panLane.points, currentTime);
if (automatedValue !== undefined && panNodesRef.current[index]) {
// Pan automation values are 0-1, but StereoPannerNode expects -1 to 1
const panValue = (automatedValue * 2) - 1;
panNodesRef.current[index].pan.setValueAtTime(
panValue,
audioContextRef.current!.currentTime
);
}
}
// TODO: Apply effect parameter automation
});
automationFrameRef.current = requestAnimationFrame(applyAutomation);
}, [tracks]);
const updatePlaybackPosition = useCallback(() => {
if (!audioContextRef.current) return;
const elapsed = audioContextRef.current.currentTime - startTimeRef.current;
const newTime = pausedAtRef.current + elapsed;
if (newTime >= duration) {
setIsPlaying(false);
isMonitoringLevelsRef.current = false;
setCurrentTime(0);
pausedAtRef.current = 0;
setTrackLevels({});
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
animationFrameRef.current = null;
}
if (levelMonitorFrameRef.current) {
cancelAnimationFrame(levelMonitorFrameRef.current);
levelMonitorFrameRef.current = null;
}
if (automationFrameRef.current) {
cancelAnimationFrame(automationFrameRef.current);
automationFrameRef.current = null;
}
return;
}
setCurrentTime(newTime);
animationFrameRef.current = requestAnimationFrame(updatePlaybackPosition);
}, [duration]);
const play = useCallback(() => {
if (tracks.length === 0 || tracks.every(t => !t.audioBuffer)) return;
const audioContext = getAudioContext();
audioContextRef.current = audioContext;
// Stop any existing playback
sourceNodesRef.current.forEach(node => {
try {
node.stop();
node.disconnect();
} catch (e) {
// Ignore errors from already stopped nodes
}
});
gainNodesRef.current.forEach(node => node.disconnect());
panNodesRef.current.forEach(node => node.disconnect());
if (masterGainNodeRef.current) {
masterGainNodeRef.current.disconnect();
}
sourceNodesRef.current = [];
gainNodesRef.current = [];
panNodesRef.current = [];
analyserNodesRef.current = [];
effectNodesRef.current = [];
// Create master gain node
const masterGain = audioContext.createGain();
masterGain.gain.setValueAtTime(masterVolume, audioContext.currentTime);
masterGain.connect(audioContext.destination);
masterGainNodeRef.current = masterGain;
// Create audio graph for each track
for (const track of tracks) {
if (!track.audioBuffer) continue;
const source = audioContext.createBufferSource();
source.buffer = track.audioBuffer;
const gainNode = audioContext.createGain();
const panNode = audioContext.createStereoPanner();
const analyserNode = audioContext.createAnalyser();
analyserNode.fftSize = 256;
analyserNode.smoothingTimeConstant = 0.8;
// Set gain based on track volume and solo/mute state
const trackGain = getTrackGain(track, tracks);
gainNode.gain.setValueAtTime(trackGain, audioContext.currentTime);
// Set pan
panNode.pan.setValueAtTime(track.pan, audioContext.currentTime);
// Connect: source -> analyser -> gain -> pan -> effects -> master gain -> destination
// Analyser is before gain so it shows raw audio levels independent of volume fader
source.connect(analyserNode);
analyserNode.connect(gainNode);
gainNode.connect(panNode);
// Apply effect chain
console.log('[MultiTrackPlayer] Applying effect chain for track:', track.name);
console.log('[MultiTrackPlayer] Effect chain ID:', track.effectChain.id);
console.log('[MultiTrackPlayer] Effect chain name:', track.effectChain.name);
console.log('[MultiTrackPlayer] Number of effects:', track.effectChain.effects.length);
console.log('[MultiTrackPlayer] Effects:', track.effectChain.effects);
const { outputNode, effectNodes } = applyEffectChain(audioContext, panNode, track.effectChain);
// Connect to master gain
outputNode.connect(masterGain);
console.log('[MultiTrackPlayer] Effect output connected with', effectNodes.length, 'effect nodes');
// Start playback from current position
source.start(0, pausedAtRef.current);
// Store references
sourceNodesRef.current.push(source);
gainNodesRef.current.push(gainNode);
panNodesRef.current.push(panNode);
analyserNodesRef.current.push(analyserNode);
effectNodesRef.current.push(effectNodes);
// Handle ended event
source.onended = () => {
if (pausedAtRef.current + (audioContext.currentTime - startTimeRef.current) >= duration) {
setIsPlaying(false);
isMonitoringLevelsRef.current = false;
setCurrentTime(0);
pausedAtRef.current = 0;
setTrackLevels({});
}
};
}
startTimeRef.current = audioContext.currentTime;
setIsPlaying(true);
updatePlaybackPosition();
// Start level monitoring
isMonitoringLevelsRef.current = true;
monitorPlaybackLevels();
// Start automation
applyAutomation();
}, [tracks, duration, masterVolume, updatePlaybackPosition, monitorPlaybackLevels, applyAutomation]);
const pause = useCallback(() => {
if (!audioContextRef.current || !isPlaying) return;
// Stop all source nodes
sourceNodesRef.current.forEach(node => {
try {
node.stop();
node.disconnect();
} catch (e) {
// Ignore errors
}
});
// Update paused position
const elapsed = audioContextRef.current.currentTime - startTimeRef.current;
pausedAtRef.current = Math.min(pausedAtRef.current + elapsed, duration);
setCurrentTime(pausedAtRef.current);
setIsPlaying(false);
// Stop level monitoring
isMonitoringLevelsRef.current = false;
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
animationFrameRef.current = null;
}
if (levelMonitorFrameRef.current) {
cancelAnimationFrame(levelMonitorFrameRef.current);
levelMonitorFrameRef.current = null;
}
if (automationFrameRef.current) {
cancelAnimationFrame(automationFrameRef.current);
automationFrameRef.current = null;
}
// Clear track levels
setTrackLevels({});
}, [isPlaying, duration]);
const stop = useCallback(() => {
pause();
pausedAtRef.current = 0;
setCurrentTime(0);
}, [pause]);
const seek = useCallback((time: number) => {
const wasPlaying = isPlaying;
if (wasPlaying) {
pause();
}
const clampedTime = Math.max(0, Math.min(time, duration));
pausedAtRef.current = clampedTime;
setCurrentTime(clampedTime);
if (wasPlaying) {
// Small delay to ensure state is updated
setTimeout(() => play(), 10);
}
}, [isPlaying, duration, pause, play]);
const togglePlayPause = useCallback(() => {
if (isPlaying) {
pause();
} else {
play();
}
}, [isPlaying, play, pause]);
// Update gain/pan when tracks change (simple updates that don't require graph rebuild)
useEffect(() => {
if (!isPlaying || !audioContextRef.current) return;
tracks.forEach((track, index) => {
if (gainNodesRef.current[index]) {
const trackGain = getTrackGain(track, tracks);
gainNodesRef.current[index].gain.setValueAtTime(
trackGain,
audioContextRef.current!.currentTime
);
}
if (panNodesRef.current[index]) {
panNodesRef.current[index].pan.setValueAtTime(
track.pan,
audioContextRef.current!.currentTime
);
}
});
}, [tracks, isPlaying]);
// Track effect chain structure to detect add/remove operations
const previousEffectStructureRef = useRef<string | null>(null);
// Detect effect chain structure changes (add/remove/reorder) and restart
useEffect(() => {
if (!isPlaying || !audioContextRef.current) return;
// Create a signature of the current effect structure (IDs and count)
const currentStructure = tracks.map(track =>
track.effectChain.effects.map(e => e.id).join(',')
).join('|');
// If structure changed (effects added/removed/reordered) while playing, restart
// Don't restart if tracks is empty (intermediate state during updates)
if (previousEffectStructureRef.current !== null &&
previousEffectStructureRef.current !== currentStructure &&
tracks.length > 0) {
console.log('[useMultiTrackPlayer] Effect chain structure changed, restarting...');
// Update the reference immediately to prevent re-triggering
previousEffectStructureRef.current = currentStructure;
// Update tracksRef with current tracks BEFORE setTimeout
tracksRef.current = tracks;
// Save current position
const elapsed = audioContextRef.current.currentTime - startTimeRef.current;
const currentPos = pausedAtRef.current + elapsed;
// Stop all source nodes
sourceNodesRef.current.forEach(node => {
try {
node.onended = null;
node.stop();
node.disconnect();
} catch (e) {
// Ignore errors
}
});
// Update position
pausedAtRef.current = currentPos;
setCurrentTime(currentPos);
setIsPlaying(false);
// Clear animation frame
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
animationFrameRef.current = null;
}
// Restart after a brief delay
setTimeout(() => {
// Use tracksRef.current to get the latest tracks, not the stale closure
const latestTracks = tracksRef.current;
if (latestTracks.length === 0 || latestTracks.every(t => !t.audioBuffer)) return;
const audioContext = getAudioContext();
audioContextRef.current = audioContext;
// Disconnect old nodes
gainNodesRef.current.forEach(node => node.disconnect());
panNodesRef.current.forEach(node => node.disconnect());
effectNodesRef.current.forEach(trackEffects => {
trackEffects.forEach(effectNodeInfo => {
if (effectNodeInfo.node) {
try {
effectNodeInfo.node.disconnect();
} catch (e) {
// Ignore
}
}
if (effectNodeInfo.dryGain) effectNodeInfo.dryGain.disconnect();
if (effectNodeInfo.wetGain) effectNodeInfo.wetGain.disconnect();
});
});
if (masterGainNodeRef.current) {
masterGainNodeRef.current.disconnect();
}
// Reset refs
sourceNodesRef.current = [];
gainNodesRef.current = [];
panNodesRef.current = [];
analyserNodesRef.current = [];
effectNodesRef.current = [];
// Create master gain node
const masterGain = audioContext.createGain();
masterGain.gain.setValueAtTime(masterVolume, audioContext.currentTime);
masterGain.connect(audioContext.destination);
masterGainNodeRef.current = masterGain;
// Create audio graph for each track
for (const track of latestTracks) {
if (!track.audioBuffer) continue;
const source = audioContext.createBufferSource();
source.buffer = track.audioBuffer;
const gainNode = audioContext.createGain();
const panNode = audioContext.createStereoPanner();
const analyserNode = audioContext.createAnalyser();
analyserNode.fftSize = 256;
analyserNode.smoothingTimeConstant = 0.8;
// Set gain based on track volume and solo/mute state
const trackGain = getTrackGain(track, latestTracks);
gainNode.gain.setValueAtTime(trackGain, audioContext.currentTime);
// Set pan
panNode.pan.setValueAtTime(track.pan, audioContext.currentTime);
// Connect: source -> analyser -> gain -> pan -> effects -> master gain -> destination
// Analyser is before gain so it shows raw audio levels independent of volume fader
source.connect(analyserNode);
analyserNode.connect(gainNode);
gainNode.connect(panNode);
// Apply effect chain
const { outputNode, effectNodes } = applyEffectChain(audioContext, panNode, track.effectChain);
outputNode.connect(masterGain);
// Start playback from current position
source.start(0, pausedAtRef.current);
// Store references
sourceNodesRef.current.push(source);
gainNodesRef.current.push(gainNode);
panNodesRef.current.push(panNode);
analyserNodesRef.current.push(analyserNode);
effectNodesRef.current.push(effectNodes);
// Handle ended event
source.onended = () => {
if (pausedAtRef.current + (audioContext.currentTime - startTimeRef.current) >= duration) {
setIsPlaying(false);
isMonitoringLevelsRef.current = false;
setCurrentTime(0);
pausedAtRef.current = 0;
setTrackLevels({});
}
};
}
startTimeRef.current = audioContext.currentTime;
setIsPlaying(true);
// Start level monitoring
isMonitoringLevelsRef.current = true;
// Start animation frame for position updates
const updatePosition = () => {
if (!audioContextRef.current) return;
const elapsed = audioContextRef.current.currentTime - startTimeRef.current;
const newTime = pausedAtRef.current + elapsed;
if (newTime >= duration) {
setIsPlaying(false);
isMonitoringLevelsRef.current = false;
setCurrentTime(0);
pausedAtRef.current = 0;
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
animationFrameRef.current = null;
}
if (levelMonitorFrameRef.current) {
cancelAnimationFrame(levelMonitorFrameRef.current);
levelMonitorFrameRef.current = null;
}
if (automationFrameRef.current) {
cancelAnimationFrame(automationFrameRef.current);
automationFrameRef.current = null;
}
return;
}
setCurrentTime(newTime);
animationFrameRef.current = requestAnimationFrame(updatePosition);
};
updatePosition();
monitorPlaybackLevels();
applyAutomation();
}, 10);
}
previousEffectStructureRef.current = currentStructure;
}, [tracks, isPlaying, duration, masterVolume, monitorPlaybackLevels, applyAutomation]);
// Stop playback when all tracks are deleted
useEffect(() => {
if (!isPlaying) return;
// If tracks become empty or all tracks have no audio buffer, stop playback
if (tracks.length === 0 || tracks.every(t => !t.audioBuffer)) {
console.log('[useMultiTrackPlayer] All tracks deleted, stopping playback');
stop();
}
}, [tracks, isPlaying, stop]);
// Update effect parameters and bypass state in real-time
useEffect(() => {
if (!isPlaying || !audioContextRef.current) return;
tracks.forEach((track, trackIndex) => {
const effectNodes = effectNodesRef.current[trackIndex];
if (!effectNodes) return;
// Only update if we have the same number of effects (no add/remove)
if (effectNodes.length !== track.effectChain.effects.length) return;
track.effectChain.effects.forEach((effect, effectIndex) => {
const effectNodeInfo = effectNodes[effectIndex];
if (!effectNodeInfo) return;
// Update bypass state
if (effect.enabled !== effectNodeInfo.effect.enabled) {
toggleEffectBypass(audioContextRef.current!, effectNodeInfo, effect.enabled);
effectNodeInfo.effect.enabled = effect.enabled;
}
// Update parameters (only works for certain effect types)
if (JSON.stringify(effect.parameters) !== JSON.stringify(effectNodeInfo.effect.parameters)) {
updateEffectParameters(audioContextRef.current!, effectNodeInfo, effect);
effectNodeInfo.effect.parameters = effect.parameters;
}
});
});
}, [tracks, isPlaying]);
// Update master volume when it changes
useEffect(() => {
if (!isPlaying || !audioContextRef.current || !masterGainNodeRef.current) return;
masterGainNodeRef.current.gain.setValueAtTime(
masterVolume,
audioContextRef.current.currentTime
);
}, [masterVolume, isPlaying]);
// Cleanup on unmount
useEffect(() => {
return () => {
isMonitoringLevelsRef.current = false;
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
}
if (levelMonitorFrameRef.current) {
cancelAnimationFrame(levelMonitorFrameRef.current);
}
if (automationFrameRef.current) {
cancelAnimationFrame(automationFrameRef.current);
}
sourceNodesRef.current.forEach(node => {
try {
node.stop();
node.disconnect();
} catch (e) {
// Ignore
}
});
gainNodesRef.current.forEach(node => node.disconnect());
panNodesRef.current.forEach(node => node.disconnect());
analyserNodesRef.current.forEach(node => node.disconnect());
if (masterGainNodeRef.current) {
masterGainNodeRef.current.disconnect();
}
};
}, []);
return {
isPlaying,
currentTime,
duration,
trackLevels,
play,
pause,
stop,
seek,
togglePlayPause,
};
}