diff --git a/lib/hooks/useMultiTrackPlayer.ts b/lib/hooks/useMultiTrackPlayer.ts index 6b923ad..f92cfb0 100644 --- a/lib/hooks/useMultiTrackPlayer.ts +++ b/lib/hooks/useMultiTrackPlayer.ts @@ -3,6 +3,7 @@ import { getAudioContext } from '@/lib/audio/context'; import type { Track } from '@/types/track'; import { getTrackGain } from '@/lib/audio/track-utils'; import { applyEffectChain, updateEffectParameters, toggleEffectBypass, type EffectNodeInfo } from '@/lib/audio/effects/processor'; +import { evaluateAutomationLinear } from '@/lib/audio/automation-utils'; export interface MultiTrackPlayerState { isPlaying: boolean; @@ -32,6 +33,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) { const pausedAtRef = useRef(0); const animationFrameRef = useRef(null); const levelMonitorFrameRef = useRef(null); + const automationFrameRef = useRef(null); const isMonitoringLevelsRef = useRef(false); const tracksRef = useRef(tracks); // Always keep latest tracks @@ -99,6 +101,47 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) { levelMonitorFrameRef.current = requestAnimationFrame(monitorPlaybackLevels); }, []); + // Apply automation values during playback + const applyAutomation = useCallback(() => { + if (!audioContextRef.current) return; + + const currentTime = pausedAtRef.current + (audioContextRef.current.currentTime - startTimeRef.current); + + tracks.forEach((track, index) => { + // Apply volume automation + const volumeLane = track.automation.lanes.find(lane => lane.parameterId === 'volume'); + if (volumeLane && volumeLane.points.length > 0 && volumeLane.mode !== 'read') { + const automatedValue = evaluateAutomationLinear(volumeLane.points, currentTime); + if (automatedValue !== undefined && gainNodesRef.current[index]) { + const trackGain = getTrackGain(track, tracks); + // Apply both track gain (mute/solo) and automated volume + gainNodesRef.current[index].gain.setValueAtTime( + trackGain * automatedValue, + audioContextRef.current!.currentTime + ); + } + } + + // Apply pan automation + const panLane = track.automation.lanes.find(lane => lane.parameterId === 'pan'); + if (panLane && panLane.points.length > 0 && panLane.mode !== 'read') { + const automatedValue = evaluateAutomationLinear(panLane.points, currentTime); + if (automatedValue !== undefined && panNodesRef.current[index]) { + // Pan automation values are 0-1, but StereoPannerNode expects -1 to 1 + const panValue = (automatedValue * 2) - 1; + panNodesRef.current[index].pan.setValueAtTime( + panValue, + audioContextRef.current!.currentTime + ); + } + } + + // TODO: Apply effect parameter automation + }); + + automationFrameRef.current = requestAnimationFrame(applyAutomation); + }, [tracks]); + const updatePlaybackPosition = useCallback(() => { if (!audioContextRef.current) return; @@ -119,6 +162,10 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) { cancelAnimationFrame(levelMonitorFrameRef.current); levelMonitorFrameRef.current = null; } + if (automationFrameRef.current) { + cancelAnimationFrame(automationFrameRef.current); + automationFrameRef.current = null; + } return; } @@ -226,7 +273,10 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) { // Start level monitoring isMonitoringLevelsRef.current = true; monitorPlaybackLevels(); - }, [tracks, duration, masterVolume, updatePlaybackPosition, monitorPlaybackLevels]); + + // Start automation + applyAutomation(); + }, [tracks, duration, masterVolume, updatePlaybackPosition, monitorPlaybackLevels, applyAutomation]); const pause = useCallback(() => { if (!audioContextRef.current || !isPlaying) return; @@ -261,6 +311,11 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) { levelMonitorFrameRef.current = null; } + if (automationFrameRef.current) { + cancelAnimationFrame(automationFrameRef.current); + automationFrameRef.current = null; + } + // Clear track levels setTrackLevels({}); }, [isPlaying, duration]); @@ -490,6 +545,10 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) { cancelAnimationFrame(levelMonitorFrameRef.current); levelMonitorFrameRef.current = null; } + if (automationFrameRef.current) { + cancelAnimationFrame(automationFrameRef.current); + automationFrameRef.current = null; + } return; } @@ -498,11 +557,12 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) { }; updatePosition(); monitorPlaybackLevels(); + applyAutomation(); }, 10); } previousEffectStructureRef.current = currentStructure; - }, [tracks, isPlaying, duration, masterVolume, monitorPlaybackLevels]); + }, [tracks, isPlaying, duration, masterVolume, monitorPlaybackLevels, applyAutomation]); // Stop playback when all tracks are deleted useEffect(() => { @@ -565,6 +625,9 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) { if (levelMonitorFrameRef.current) { cancelAnimationFrame(levelMonitorFrameRef.current); } + if (automationFrameRef.current) { + cancelAnimationFrame(automationFrameRef.current); + } sourceNodesRef.current.forEach(node => { try { node.stop();