From dac8ac4723e40b3e504f88e710d36d1b38a93817 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Kr=C3=BCger?= Date: Tue, 18 Nov 2025 19:06:02 +0100 Subject: [PATCH] feat: implement real-time automation playback for volume and pan MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Phase 9.3 - Automation Playback: - Added real-time automation evaluation during playback - Automation values are applied continuously via requestAnimationFrame - Volume automation: Interpolates between points and applies to gain nodes - Pan automation: Converts 0-1 values to -1 to 1 for StereoPannerNode Implementation details: - New applyAutomation() function runs in RAF loop alongside level monitoring - Evaluates automation at current playback time using evaluateAutomationLinear - Applies values using setValueAtTime for smooth Web Audio API parameter changes - Automation loop lifecycle matches playback (start/pause/stop/cleanup) - Respects automation mode (only applies when mode !== 'read') Technical improvements: - Added automationFrameRef for RAF management - Proper cleanup in pause(), unmount, and playback end scenarios - Integrated with existing effect chain restart mechanism - Volume automation multiplied with track gain (mute/solo state) Next steps: - Effect parameter automation (TODO in code) - Automation recording (write mode implementation) - Touch and latch modes 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- lib/hooks/useMultiTrackPlayer.ts | 67 +++++++++++++++++++++++++++++++- 1 file changed, 65 insertions(+), 2 deletions(-) diff --git a/lib/hooks/useMultiTrackPlayer.ts b/lib/hooks/useMultiTrackPlayer.ts index 6b923ad..f92cfb0 100644 --- a/lib/hooks/useMultiTrackPlayer.ts +++ b/lib/hooks/useMultiTrackPlayer.ts @@ -3,6 +3,7 @@ import { getAudioContext } from '@/lib/audio/context'; import type { Track } from '@/types/track'; import { getTrackGain } from '@/lib/audio/track-utils'; import { applyEffectChain, updateEffectParameters, toggleEffectBypass, type EffectNodeInfo } from '@/lib/audio/effects/processor'; +import { evaluateAutomationLinear } from '@/lib/audio/automation-utils'; export interface MultiTrackPlayerState { isPlaying: boolean; @@ -32,6 +33,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) { const pausedAtRef = useRef(0); const animationFrameRef = useRef(null); const levelMonitorFrameRef = useRef(null); + const automationFrameRef = useRef(null); const isMonitoringLevelsRef = useRef(false); const tracksRef = useRef(tracks); // Always keep latest tracks @@ -99,6 +101,47 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) { levelMonitorFrameRef.current = requestAnimationFrame(monitorPlaybackLevels); }, []); + // Apply automation values during playback + const applyAutomation = useCallback(() => { + if (!audioContextRef.current) return; + + const currentTime = pausedAtRef.current + (audioContextRef.current.currentTime - startTimeRef.current); + + tracks.forEach((track, index) => { + // Apply volume automation + const volumeLane = track.automation.lanes.find(lane => lane.parameterId === 'volume'); + if (volumeLane && volumeLane.points.length > 0 && volumeLane.mode !== 'read') { + const automatedValue = evaluateAutomationLinear(volumeLane.points, currentTime); + if (automatedValue !== undefined && gainNodesRef.current[index]) { + const trackGain = getTrackGain(track, tracks); + // Apply both track gain (mute/solo) and automated volume + gainNodesRef.current[index].gain.setValueAtTime( + trackGain * automatedValue, + audioContextRef.current!.currentTime + ); + } + } + + // Apply pan automation + const panLane = track.automation.lanes.find(lane => lane.parameterId === 'pan'); + if (panLane && panLane.points.length > 0 && panLane.mode !== 'read') { + const automatedValue = evaluateAutomationLinear(panLane.points, currentTime); + if (automatedValue !== undefined && panNodesRef.current[index]) { + // Pan automation values are 0-1, but StereoPannerNode expects -1 to 1 + const panValue = (automatedValue * 2) - 1; + panNodesRef.current[index].pan.setValueAtTime( + panValue, + audioContextRef.current!.currentTime + ); + } + } + + // TODO: Apply effect parameter automation + }); + + automationFrameRef.current = requestAnimationFrame(applyAutomation); + }, [tracks]); + const updatePlaybackPosition = useCallback(() => { if (!audioContextRef.current) return; @@ -119,6 +162,10 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) { cancelAnimationFrame(levelMonitorFrameRef.current); levelMonitorFrameRef.current = null; } + if (automationFrameRef.current) { + cancelAnimationFrame(automationFrameRef.current); + automationFrameRef.current = null; + } return; } @@ -226,7 +273,10 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) { // Start level monitoring isMonitoringLevelsRef.current = true; monitorPlaybackLevels(); - }, [tracks, duration, masterVolume, updatePlaybackPosition, monitorPlaybackLevels]); + + // Start automation + applyAutomation(); + }, [tracks, duration, masterVolume, updatePlaybackPosition, monitorPlaybackLevels, applyAutomation]); const pause = useCallback(() => { if (!audioContextRef.current || !isPlaying) return; @@ -261,6 +311,11 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) { levelMonitorFrameRef.current = null; } + if (automationFrameRef.current) { + cancelAnimationFrame(automationFrameRef.current); + automationFrameRef.current = null; + } + // Clear track levels setTrackLevels({}); }, [isPlaying, duration]); @@ -490,6 +545,10 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) { cancelAnimationFrame(levelMonitorFrameRef.current); levelMonitorFrameRef.current = null; } + if (automationFrameRef.current) { + cancelAnimationFrame(automationFrameRef.current); + automationFrameRef.current = null; + } return; } @@ -498,11 +557,12 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) { }; updatePosition(); monitorPlaybackLevels(); + applyAutomation(); }, 10); } previousEffectStructureRef.current = currentStructure; - }, [tracks, isPlaying, duration, masterVolume, monitorPlaybackLevels]); + }, [tracks, isPlaying, duration, masterVolume, monitorPlaybackLevels, applyAutomation]); // Stop playback when all tracks are deleted useEffect(() => { @@ -565,6 +625,9 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) { if (levelMonitorFrameRef.current) { cancelAnimationFrame(levelMonitorFrameRef.current); } + if (automationFrameRef.current) { + cancelAnimationFrame(automationFrameRef.current); + } sourceNodesRef.current.forEach(node => { try { node.stop();