feat: implement real-time playback level monitoring for all tracks
Added comprehensive playback level monitoring system that shows real-time audio levels during playback for each track. useMultiTrackPlayer Hook: - Added AnalyserNode for each track in audio graph - Implemented RMS-based level calculation with requestAnimationFrame - Added trackLevels state (Record<string, number>) tracking levels by track ID - Insert analysers after effects chain, before master gain - Monitor levels continuously during playback - Clean up level monitoring on pause/stop Audio Graph Chain: source -> gain -> pan -> effects -> analyser -> master gain -> destination AudioEditor Integration: - Extract trackLevels from useMultiTrackPlayer hook - Pass trackLevels down to TrackList component TrackList & Track Components: - Accept and forward trackLevels prop - Pass playbackLevel to individual Track components - Track component displays appropriate level: * Recording level (with "Input" label) when armed/recording * Playback level (with "Level" label) during normal playback Visual Feedback: - Color-coded meters: green -> yellow (70%) -> red (90%) - Real-time percentage display - Seamless switching between input and output modes This completes Phase 8 (Recording) with full bidirectional level monitoring! 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -71,6 +71,7 @@ export function AudioEditor() {
|
|||||||
isPlaying,
|
isPlaying,
|
||||||
currentTime,
|
currentTime,
|
||||||
duration,
|
duration,
|
||||||
|
trackLevels,
|
||||||
play,
|
play,
|
||||||
pause,
|
pause,
|
||||||
stop,
|
stop,
|
||||||
@@ -638,6 +639,7 @@ export function AudioEditor() {
|
|||||||
onToggleRecordEnable={handleToggleRecordEnable}
|
onToggleRecordEnable={handleToggleRecordEnable}
|
||||||
recordingTrackId={recordingTrackId}
|
recordingTrackId={recordingTrackId}
|
||||||
recordingLevel={recordingState.inputLevel}
|
recordingLevel={recordingState.inputLevel}
|
||||||
|
trackLevels={trackLevels}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
@@ -35,6 +35,7 @@ export interface TrackProps {
|
|||||||
onToggleRecordEnable?: () => void;
|
onToggleRecordEnable?: () => void;
|
||||||
isRecording?: boolean;
|
isRecording?: boolean;
|
||||||
recordingLevel?: number;
|
recordingLevel?: number;
|
||||||
|
playbackLevel?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function Track({
|
export function Track({
|
||||||
@@ -61,6 +62,7 @@ export function Track({
|
|||||||
onToggleRecordEnable,
|
onToggleRecordEnable,
|
||||||
isRecording = false,
|
isRecording = false,
|
||||||
recordingLevel = 0,
|
recordingLevel = 0,
|
||||||
|
playbackLevel = 0,
|
||||||
}: TrackProps) {
|
}: TrackProps) {
|
||||||
const canvasRef = React.useRef<HTMLCanvasElement>(null);
|
const canvasRef = React.useRef<HTMLCanvasElement>(null);
|
||||||
const containerRef = React.useRef<HTMLDivElement>(null);
|
const containerRef = React.useRef<HTMLDivElement>(null);
|
||||||
@@ -564,12 +566,12 @@ export function Track({
|
|||||||
</label>
|
</label>
|
||||||
<div className="flex-1">
|
<div className="flex-1">
|
||||||
<InputLevelMeter
|
<InputLevelMeter
|
||||||
level={track.recordEnabled || isRecording ? recordingLevel : 0}
|
level={track.recordEnabled || isRecording ? recordingLevel : playbackLevel}
|
||||||
orientation="horizontal"
|
orientation="horizontal"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<span className="text-xs text-muted-foreground w-10 text-right flex-shrink-0">
|
<span className="text-xs text-muted-foreground w-10 text-right flex-shrink-0">
|
||||||
{Math.round((track.recordEnabled || isRecording ? recordingLevel : 0) * 100)}%
|
{Math.round((track.recordEnabled || isRecording ? recordingLevel : playbackLevel) * 100)}%
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
</>
|
</>
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ export interface TrackListProps {
|
|||||||
onToggleRecordEnable?: (trackId: string) => void;
|
onToggleRecordEnable?: (trackId: string) => void;
|
||||||
recordingTrackId?: string | null;
|
recordingTrackId?: string | null;
|
||||||
recordingLevel?: number;
|
recordingLevel?: number;
|
||||||
|
trackLevels?: Record<string, number>;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function TrackList({
|
export function TrackList({
|
||||||
@@ -42,6 +43,7 @@ export function TrackList({
|
|||||||
onToggleRecordEnable,
|
onToggleRecordEnable,
|
||||||
recordingTrackId,
|
recordingTrackId,
|
||||||
recordingLevel = 0,
|
recordingLevel = 0,
|
||||||
|
trackLevels = {},
|
||||||
}: TrackListProps) {
|
}: TrackListProps) {
|
||||||
const [importDialogOpen, setImportDialogOpen] = React.useState(false);
|
const [importDialogOpen, setImportDialogOpen] = React.useState(false);
|
||||||
|
|
||||||
@@ -164,6 +166,7 @@ export function TrackList({
|
|||||||
}
|
}
|
||||||
isRecording={recordingTrackId === track.id}
|
isRecording={recordingTrackId === track.id}
|
||||||
recordingLevel={recordingTrackId === track.id ? recordingLevel : 0}
|
recordingLevel={recordingTrackId === track.id ? recordingLevel : 0}
|
||||||
|
playbackLevel={trackLevels[track.id] || 0}
|
||||||
/>
|
/>
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -10,20 +10,28 @@ export interface MultiTrackPlayerState {
|
|||||||
duration: number;
|
duration: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface TrackLevel {
|
||||||
|
trackId: string;
|
||||||
|
level: number;
|
||||||
|
}
|
||||||
|
|
||||||
export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
||||||
const [isPlaying, setIsPlaying] = useState(false);
|
const [isPlaying, setIsPlaying] = useState(false);
|
||||||
const [currentTime, setCurrentTime] = useState(0);
|
const [currentTime, setCurrentTime] = useState(0);
|
||||||
const [duration, setDuration] = useState(0);
|
const [duration, setDuration] = useState(0);
|
||||||
|
const [trackLevels, setTrackLevels] = useState<Record<string, number>>({});
|
||||||
|
|
||||||
const audioContextRef = useRef<AudioContext | null>(null);
|
const audioContextRef = useRef<AudioContext | null>(null);
|
||||||
const sourceNodesRef = useRef<AudioBufferSourceNode[]>([]);
|
const sourceNodesRef = useRef<AudioBufferSourceNode[]>([]);
|
||||||
const gainNodesRef = useRef<GainNode[]>([]);
|
const gainNodesRef = useRef<GainNode[]>([]);
|
||||||
const panNodesRef = useRef<StereoPannerNode[]>([]);
|
const panNodesRef = useRef<StereoPannerNode[]>([]);
|
||||||
|
const analyserNodesRef = useRef<AnalyserNode[]>([]);
|
||||||
const effectNodesRef = useRef<EffectNodeInfo[][]>([]); // Effect nodes per track
|
const effectNodesRef = useRef<EffectNodeInfo[][]>([]); // Effect nodes per track
|
||||||
const masterGainNodeRef = useRef<GainNode | null>(null);
|
const masterGainNodeRef = useRef<GainNode | null>(null);
|
||||||
const startTimeRef = useRef<number>(0);
|
const startTimeRef = useRef<number>(0);
|
||||||
const pausedAtRef = useRef<number>(0);
|
const pausedAtRef = useRef<number>(0);
|
||||||
const animationFrameRef = useRef<number | null>(null);
|
const animationFrameRef = useRef<number | null>(null);
|
||||||
|
const levelMonitorFrameRef = useRef<number | null>(null);
|
||||||
const tracksRef = useRef<Track[]>(tracks); // Always keep latest tracks
|
const tracksRef = useRef<Track[]>(tracks); // Always keep latest tracks
|
||||||
|
|
||||||
// Keep tracksRef in sync with tracks prop
|
// Keep tracksRef in sync with tracks prop
|
||||||
@@ -42,6 +50,36 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
setDuration(maxDuration);
|
setDuration(maxDuration);
|
||||||
}, [tracks]);
|
}, [tracks]);
|
||||||
|
|
||||||
|
// Monitor playback levels for all tracks
|
||||||
|
const monitorPlaybackLevels = useCallback(() => {
|
||||||
|
if (!isPlaying || analyserNodesRef.current.length === 0) return;
|
||||||
|
|
||||||
|
const levels: Record<string, number> = {};
|
||||||
|
|
||||||
|
analyserNodesRef.current.forEach((analyser, index) => {
|
||||||
|
const track = tracksRef.current[index];
|
||||||
|
if (!track) return;
|
||||||
|
|
||||||
|
const dataArray = new Uint8Array(analyser.frequencyBinCount);
|
||||||
|
analyser.getByteTimeDomainData(dataArray);
|
||||||
|
|
||||||
|
// Calculate RMS level
|
||||||
|
let sum = 0;
|
||||||
|
for (let i = 0; i < dataArray.length; i++) {
|
||||||
|
const normalized = (dataArray[i] - 128) / 128;
|
||||||
|
sum += normalized * normalized;
|
||||||
|
}
|
||||||
|
const rms = Math.sqrt(sum / dataArray.length);
|
||||||
|
levels[track.id] = rms;
|
||||||
|
});
|
||||||
|
|
||||||
|
setTrackLevels(levels);
|
||||||
|
|
||||||
|
if (isPlaying) {
|
||||||
|
levelMonitorFrameRef.current = requestAnimationFrame(monitorPlaybackLevels);
|
||||||
|
}
|
||||||
|
}, [isPlaying]);
|
||||||
|
|
||||||
const updatePlaybackPosition = useCallback(() => {
|
const updatePlaybackPosition = useCallback(() => {
|
||||||
if (!audioContextRef.current) return;
|
if (!audioContextRef.current) return;
|
||||||
|
|
||||||
@@ -87,6 +125,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
sourceNodesRef.current = [];
|
sourceNodesRef.current = [];
|
||||||
gainNodesRef.current = [];
|
gainNodesRef.current = [];
|
||||||
panNodesRef.current = [];
|
panNodesRef.current = [];
|
||||||
|
analyserNodesRef.current = [];
|
||||||
effectNodesRef.current = [];
|
effectNodesRef.current = [];
|
||||||
|
|
||||||
// Create master gain node
|
// Create master gain node
|
||||||
@@ -104,6 +143,9 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
|
|
||||||
const gainNode = audioContext.createGain();
|
const gainNode = audioContext.createGain();
|
||||||
const panNode = audioContext.createStereoPanner();
|
const panNode = audioContext.createStereoPanner();
|
||||||
|
const analyserNode = audioContext.createAnalyser();
|
||||||
|
analyserNode.fftSize = 256;
|
||||||
|
analyserNode.smoothingTimeConstant = 0.3;
|
||||||
|
|
||||||
// Set gain based on track volume and solo/mute state
|
// Set gain based on track volume and solo/mute state
|
||||||
const trackGain = getTrackGain(track, tracks);
|
const trackGain = getTrackGain(track, tracks);
|
||||||
@@ -112,7 +154,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
// Set pan
|
// Set pan
|
||||||
panNode.pan.setValueAtTime(track.pan, audioContext.currentTime);
|
panNode.pan.setValueAtTime(track.pan, audioContext.currentTime);
|
||||||
|
|
||||||
// Connect: source -> gain -> pan -> effects -> master gain -> destination
|
// Connect: source -> gain -> pan -> effects -> analyser -> master gain -> destination
|
||||||
source.connect(gainNode);
|
source.connect(gainNode);
|
||||||
gainNode.connect(panNode);
|
gainNode.connect(panNode);
|
||||||
|
|
||||||
@@ -123,7 +165,10 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
console.log('[MultiTrackPlayer] Number of effects:', track.effectChain.effects.length);
|
console.log('[MultiTrackPlayer] Number of effects:', track.effectChain.effects.length);
|
||||||
console.log('[MultiTrackPlayer] Effects:', track.effectChain.effects);
|
console.log('[MultiTrackPlayer] Effects:', track.effectChain.effects);
|
||||||
const { outputNode, effectNodes } = applyEffectChain(audioContext, panNode, track.effectChain);
|
const { outputNode, effectNodes } = applyEffectChain(audioContext, panNode, track.effectChain);
|
||||||
outputNode.connect(masterGain);
|
|
||||||
|
// Insert analyser after effects, before master gain
|
||||||
|
outputNode.connect(analyserNode);
|
||||||
|
analyserNode.connect(masterGain);
|
||||||
console.log('[MultiTrackPlayer] Effect output connected with', effectNodes.length, 'effect nodes');
|
console.log('[MultiTrackPlayer] Effect output connected with', effectNodes.length, 'effect nodes');
|
||||||
|
|
||||||
// Start playback from current position
|
// Start playback from current position
|
||||||
@@ -133,6 +178,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
sourceNodesRef.current.push(source);
|
sourceNodesRef.current.push(source);
|
||||||
gainNodesRef.current.push(gainNode);
|
gainNodesRef.current.push(gainNode);
|
||||||
panNodesRef.current.push(panNode);
|
panNodesRef.current.push(panNode);
|
||||||
|
analyserNodesRef.current.push(analyserNode);
|
||||||
effectNodesRef.current.push(effectNodes);
|
effectNodesRef.current.push(effectNodes);
|
||||||
|
|
||||||
// Handle ended event
|
// Handle ended event
|
||||||
@@ -148,7 +194,8 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
startTimeRef.current = audioContext.currentTime;
|
startTimeRef.current = audioContext.currentTime;
|
||||||
setIsPlaying(true);
|
setIsPlaying(true);
|
||||||
updatePlaybackPosition();
|
updatePlaybackPosition();
|
||||||
}, [tracks, duration, masterVolume, updatePlaybackPosition]);
|
monitorPlaybackLevels();
|
||||||
|
}, [tracks, duration, masterVolume, updatePlaybackPosition, monitorPlaybackLevels]);
|
||||||
|
|
||||||
const pause = useCallback(() => {
|
const pause = useCallback(() => {
|
||||||
if (!audioContextRef.current || !isPlaying) return;
|
if (!audioContextRef.current || !isPlaying) return;
|
||||||
@@ -174,6 +221,14 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
cancelAnimationFrame(animationFrameRef.current);
|
cancelAnimationFrame(animationFrameRef.current);
|
||||||
animationFrameRef.current = null;
|
animationFrameRef.current = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (levelMonitorFrameRef.current) {
|
||||||
|
cancelAnimationFrame(levelMonitorFrameRef.current);
|
||||||
|
levelMonitorFrameRef.current = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear track levels
|
||||||
|
setTrackLevels({});
|
||||||
}, [isPlaying, duration]);
|
}, [isPlaying, duration]);
|
||||||
|
|
||||||
const stop = useCallback(() => {
|
const stop = useCallback(() => {
|
||||||
@@ -314,6 +369,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
sourceNodesRef.current = [];
|
sourceNodesRef.current = [];
|
||||||
gainNodesRef.current = [];
|
gainNodesRef.current = [];
|
||||||
panNodesRef.current = [];
|
panNodesRef.current = [];
|
||||||
|
analyserNodesRef.current = [];
|
||||||
effectNodesRef.current = [];
|
effectNodesRef.current = [];
|
||||||
|
|
||||||
// Create master gain node
|
// Create master gain node
|
||||||
@@ -331,6 +387,9 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
|
|
||||||
const gainNode = audioContext.createGain();
|
const gainNode = audioContext.createGain();
|
||||||
const panNode = audioContext.createStereoPanner();
|
const panNode = audioContext.createStereoPanner();
|
||||||
|
const analyserNode = audioContext.createAnalyser();
|
||||||
|
analyserNode.fftSize = 256;
|
||||||
|
analyserNode.smoothingTimeConstant = 0.3;
|
||||||
|
|
||||||
// Set gain based on track volume and solo/mute state
|
// Set gain based on track volume and solo/mute state
|
||||||
const trackGain = getTrackGain(track, latestTracks);
|
const trackGain = getTrackGain(track, latestTracks);
|
||||||
@@ -339,13 +398,14 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
// Set pan
|
// Set pan
|
||||||
panNode.pan.setValueAtTime(track.pan, audioContext.currentTime);
|
panNode.pan.setValueAtTime(track.pan, audioContext.currentTime);
|
||||||
|
|
||||||
// Connect: source -> gain -> pan -> effects -> master gain -> destination
|
// Connect: source -> gain -> pan -> effects -> analyser -> master gain -> destination
|
||||||
source.connect(gainNode);
|
source.connect(gainNode);
|
||||||
gainNode.connect(panNode);
|
gainNode.connect(panNode);
|
||||||
|
|
||||||
// Apply effect chain
|
// Apply effect chain
|
||||||
const { outputNode, effectNodes } = applyEffectChain(audioContext, panNode, track.effectChain);
|
const { outputNode, effectNodes } = applyEffectChain(audioContext, panNode, track.effectChain);
|
||||||
outputNode.connect(masterGain);
|
outputNode.connect(analyserNode);
|
||||||
|
analyserNode.connect(masterGain);
|
||||||
|
|
||||||
// Start playback from current position
|
// Start playback from current position
|
||||||
source.start(0, pausedAtRef.current);
|
source.start(0, pausedAtRef.current);
|
||||||
@@ -354,6 +414,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
sourceNodesRef.current.push(source);
|
sourceNodesRef.current.push(source);
|
||||||
gainNodesRef.current.push(gainNode);
|
gainNodesRef.current.push(gainNode);
|
||||||
panNodesRef.current.push(panNode);
|
panNodesRef.current.push(panNode);
|
||||||
|
analyserNodesRef.current.push(analyserNode);
|
||||||
effectNodesRef.current.push(effectNodes);
|
effectNodesRef.current.push(effectNodes);
|
||||||
|
|
||||||
// Handle ended event
|
// Handle ended event
|
||||||
@@ -391,11 +452,12 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
animationFrameRef.current = requestAnimationFrame(updatePosition);
|
animationFrameRef.current = requestAnimationFrame(updatePosition);
|
||||||
};
|
};
|
||||||
updatePosition();
|
updatePosition();
|
||||||
|
monitorPlaybackLevels();
|
||||||
}, 10);
|
}, 10);
|
||||||
}
|
}
|
||||||
|
|
||||||
previousEffectStructureRef.current = currentStructure;
|
previousEffectStructureRef.current = currentStructure;
|
||||||
}, [tracks, isPlaying, duration, masterVolume]);
|
}, [tracks, isPlaying, duration, masterVolume, monitorPlaybackLevels]);
|
||||||
|
|
||||||
// Stop playback when all tracks are deleted
|
// Stop playback when all tracks are deleted
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -454,6 +516,9 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
if (animationFrameRef.current) {
|
if (animationFrameRef.current) {
|
||||||
cancelAnimationFrame(animationFrameRef.current);
|
cancelAnimationFrame(animationFrameRef.current);
|
||||||
}
|
}
|
||||||
|
if (levelMonitorFrameRef.current) {
|
||||||
|
cancelAnimationFrame(levelMonitorFrameRef.current);
|
||||||
|
}
|
||||||
sourceNodesRef.current.forEach(node => {
|
sourceNodesRef.current.forEach(node => {
|
||||||
try {
|
try {
|
||||||
node.stop();
|
node.stop();
|
||||||
@@ -464,6 +529,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
});
|
});
|
||||||
gainNodesRef.current.forEach(node => node.disconnect());
|
gainNodesRef.current.forEach(node => node.disconnect());
|
||||||
panNodesRef.current.forEach(node => node.disconnect());
|
panNodesRef.current.forEach(node => node.disconnect());
|
||||||
|
analyserNodesRef.current.forEach(node => node.disconnect());
|
||||||
if (masterGainNodeRef.current) {
|
if (masterGainNodeRef.current) {
|
||||||
masterGainNodeRef.current.disconnect();
|
masterGainNodeRef.current.disconnect();
|
||||||
}
|
}
|
||||||
@@ -474,6 +540,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
isPlaying,
|
isPlaying,
|
||||||
currentTime,
|
currentTime,
|
||||||
duration,
|
duration,
|
||||||
|
trackLevels,
|
||||||
play,
|
play,
|
||||||
pause,
|
pause,
|
||||||
stop,
|
stop,
|
||||||
|
|||||||
Reference in New Issue
Block a user