feat: implement real-time automation playback for volume and pan
Phase 9.3 - Automation Playback: - Added real-time automation evaluation during playback - Automation values are applied continuously via requestAnimationFrame - Volume automation: Interpolates between points and applies to gain nodes - Pan automation: Converts 0-1 values to -1 to 1 for StereoPannerNode Implementation details: - New applyAutomation() function runs in RAF loop alongside level monitoring - Evaluates automation at current playback time using evaluateAutomationLinear - Applies values using setValueAtTime for smooth Web Audio API parameter changes - Automation loop lifecycle matches playback (start/pause/stop/cleanup) - Respects automation mode (only applies when mode !== 'read') Technical improvements: - Added automationFrameRef for RAF management - Proper cleanup in pause(), unmount, and playback end scenarios - Integrated with existing effect chain restart mechanism - Volume automation multiplied with track gain (mute/solo state) Next steps: - Effect parameter automation (TODO in code) - Automation recording (write mode implementation) - Touch and latch modes 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -3,6 +3,7 @@ import { getAudioContext } from '@/lib/audio/context';
|
|||||||
import type { Track } from '@/types/track';
|
import type { Track } from '@/types/track';
|
||||||
import { getTrackGain } from '@/lib/audio/track-utils';
|
import { getTrackGain } from '@/lib/audio/track-utils';
|
||||||
import { applyEffectChain, updateEffectParameters, toggleEffectBypass, type EffectNodeInfo } from '@/lib/audio/effects/processor';
|
import { applyEffectChain, updateEffectParameters, toggleEffectBypass, type EffectNodeInfo } from '@/lib/audio/effects/processor';
|
||||||
|
import { evaluateAutomationLinear } from '@/lib/audio/automation-utils';
|
||||||
|
|
||||||
export interface MultiTrackPlayerState {
|
export interface MultiTrackPlayerState {
|
||||||
isPlaying: boolean;
|
isPlaying: boolean;
|
||||||
@@ -32,6 +33,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
const pausedAtRef = useRef<number>(0);
|
const pausedAtRef = useRef<number>(0);
|
||||||
const animationFrameRef = useRef<number | null>(null);
|
const animationFrameRef = useRef<number | null>(null);
|
||||||
const levelMonitorFrameRef = useRef<number | null>(null);
|
const levelMonitorFrameRef = useRef<number | null>(null);
|
||||||
|
const automationFrameRef = useRef<number | null>(null);
|
||||||
const isMonitoringLevelsRef = useRef<boolean>(false);
|
const isMonitoringLevelsRef = useRef<boolean>(false);
|
||||||
const tracksRef = useRef<Track[]>(tracks); // Always keep latest tracks
|
const tracksRef = useRef<Track[]>(tracks); // Always keep latest tracks
|
||||||
|
|
||||||
@@ -99,6 +101,47 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
levelMonitorFrameRef.current = requestAnimationFrame(monitorPlaybackLevels);
|
levelMonitorFrameRef.current = requestAnimationFrame(monitorPlaybackLevels);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
|
// Apply automation values during playback
|
||||||
|
const applyAutomation = useCallback(() => {
|
||||||
|
if (!audioContextRef.current) return;
|
||||||
|
|
||||||
|
const currentTime = pausedAtRef.current + (audioContextRef.current.currentTime - startTimeRef.current);
|
||||||
|
|
||||||
|
tracks.forEach((track, index) => {
|
||||||
|
// Apply volume automation
|
||||||
|
const volumeLane = track.automation.lanes.find(lane => lane.parameterId === 'volume');
|
||||||
|
if (volumeLane && volumeLane.points.length > 0 && volumeLane.mode !== 'read') {
|
||||||
|
const automatedValue = evaluateAutomationLinear(volumeLane.points, currentTime);
|
||||||
|
if (automatedValue !== undefined && gainNodesRef.current[index]) {
|
||||||
|
const trackGain = getTrackGain(track, tracks);
|
||||||
|
// Apply both track gain (mute/solo) and automated volume
|
||||||
|
gainNodesRef.current[index].gain.setValueAtTime(
|
||||||
|
trackGain * automatedValue,
|
||||||
|
audioContextRef.current!.currentTime
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply pan automation
|
||||||
|
const panLane = track.automation.lanes.find(lane => lane.parameterId === 'pan');
|
||||||
|
if (panLane && panLane.points.length > 0 && panLane.mode !== 'read') {
|
||||||
|
const automatedValue = evaluateAutomationLinear(panLane.points, currentTime);
|
||||||
|
if (automatedValue !== undefined && panNodesRef.current[index]) {
|
||||||
|
// Pan automation values are 0-1, but StereoPannerNode expects -1 to 1
|
||||||
|
const panValue = (automatedValue * 2) - 1;
|
||||||
|
panNodesRef.current[index].pan.setValueAtTime(
|
||||||
|
panValue,
|
||||||
|
audioContextRef.current!.currentTime
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Apply effect parameter automation
|
||||||
|
});
|
||||||
|
|
||||||
|
automationFrameRef.current = requestAnimationFrame(applyAutomation);
|
||||||
|
}, [tracks]);
|
||||||
|
|
||||||
const updatePlaybackPosition = useCallback(() => {
|
const updatePlaybackPosition = useCallback(() => {
|
||||||
if (!audioContextRef.current) return;
|
if (!audioContextRef.current) return;
|
||||||
|
|
||||||
@@ -119,6 +162,10 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
cancelAnimationFrame(levelMonitorFrameRef.current);
|
cancelAnimationFrame(levelMonitorFrameRef.current);
|
||||||
levelMonitorFrameRef.current = null;
|
levelMonitorFrameRef.current = null;
|
||||||
}
|
}
|
||||||
|
if (automationFrameRef.current) {
|
||||||
|
cancelAnimationFrame(automationFrameRef.current);
|
||||||
|
automationFrameRef.current = null;
|
||||||
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -226,7 +273,10 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
// Start level monitoring
|
// Start level monitoring
|
||||||
isMonitoringLevelsRef.current = true;
|
isMonitoringLevelsRef.current = true;
|
||||||
monitorPlaybackLevels();
|
monitorPlaybackLevels();
|
||||||
}, [tracks, duration, masterVolume, updatePlaybackPosition, monitorPlaybackLevels]);
|
|
||||||
|
// Start automation
|
||||||
|
applyAutomation();
|
||||||
|
}, [tracks, duration, masterVolume, updatePlaybackPosition, monitorPlaybackLevels, applyAutomation]);
|
||||||
|
|
||||||
const pause = useCallback(() => {
|
const pause = useCallback(() => {
|
||||||
if (!audioContextRef.current || !isPlaying) return;
|
if (!audioContextRef.current || !isPlaying) return;
|
||||||
@@ -261,6 +311,11 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
levelMonitorFrameRef.current = null;
|
levelMonitorFrameRef.current = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (automationFrameRef.current) {
|
||||||
|
cancelAnimationFrame(automationFrameRef.current);
|
||||||
|
automationFrameRef.current = null;
|
||||||
|
}
|
||||||
|
|
||||||
// Clear track levels
|
// Clear track levels
|
||||||
setTrackLevels({});
|
setTrackLevels({});
|
||||||
}, [isPlaying, duration]);
|
}, [isPlaying, duration]);
|
||||||
@@ -490,6 +545,10 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
cancelAnimationFrame(levelMonitorFrameRef.current);
|
cancelAnimationFrame(levelMonitorFrameRef.current);
|
||||||
levelMonitorFrameRef.current = null;
|
levelMonitorFrameRef.current = null;
|
||||||
}
|
}
|
||||||
|
if (automationFrameRef.current) {
|
||||||
|
cancelAnimationFrame(automationFrameRef.current);
|
||||||
|
automationFrameRef.current = null;
|
||||||
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -498,11 +557,12 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
};
|
};
|
||||||
updatePosition();
|
updatePosition();
|
||||||
monitorPlaybackLevels();
|
monitorPlaybackLevels();
|
||||||
|
applyAutomation();
|
||||||
}, 10);
|
}, 10);
|
||||||
}
|
}
|
||||||
|
|
||||||
previousEffectStructureRef.current = currentStructure;
|
previousEffectStructureRef.current = currentStructure;
|
||||||
}, [tracks, isPlaying, duration, masterVolume, monitorPlaybackLevels]);
|
}, [tracks, isPlaying, duration, masterVolume, monitorPlaybackLevels, applyAutomation]);
|
||||||
|
|
||||||
// Stop playback when all tracks are deleted
|
// Stop playback when all tracks are deleted
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -565,6 +625,9 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
if (levelMonitorFrameRef.current) {
|
if (levelMonitorFrameRef.current) {
|
||||||
cancelAnimationFrame(levelMonitorFrameRef.current);
|
cancelAnimationFrame(levelMonitorFrameRef.current);
|
||||||
}
|
}
|
||||||
|
if (automationFrameRef.current) {
|
||||||
|
cancelAnimationFrame(automationFrameRef.current);
|
||||||
|
}
|
||||||
sourceNodesRef.current.forEach(node => {
|
sourceNodesRef.current.forEach(node => {
|
||||||
try {
|
try {
|
||||||
node.stop();
|
node.stop();
|
||||||
|
|||||||
Reference in New Issue
Block a user