feat: implement real-time playback level monitoring for all tracks

Added comprehensive playback level monitoring system that shows
real-time audio levels during playback for each track.

useMultiTrackPlayer Hook:
- Added AnalyserNode for each track in audio graph
- Implemented RMS-based level calculation with requestAnimationFrame
- Added trackLevels state (Record<string, number>) tracking levels by track ID
- Insert analysers after effects chain, before master gain
- Monitor levels continuously during playback
- Clean up level monitoring on pause/stop

Audio Graph Chain:
source -> gain -> pan -> effects -> analyser -> master gain -> destination

AudioEditor Integration:
- Extract trackLevels from useMultiTrackPlayer hook
- Pass trackLevels down to TrackList component

TrackList & Track Components:
- Accept and forward trackLevels prop
- Pass playbackLevel to individual Track components
- Track component displays appropriate level:
  * Recording level (with "Input" label) when armed/recording
  * Playback level (with "Level" label) during normal playback

Visual Feedback:
- Color-coded meters: green -> yellow (70%) -> red (90%)
- Real-time percentage display
- Seamless switching between input and output modes

This completes Phase 8 (Recording) with full bidirectional level monitoring!

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
2025-11-18 15:01:55 +01:00
parent cf0c37caa6
commit 6fbb677bd2
4 changed files with 82 additions and 8 deletions

View File

@@ -71,6 +71,7 @@ export function AudioEditor() {
isPlaying,
currentTime,
duration,
trackLevels,
play,
pause,
stop,
@@ -638,6 +639,7 @@ export function AudioEditor() {
onToggleRecordEnable={handleToggleRecordEnable}
recordingTrackId={recordingTrackId}
recordingLevel={recordingState.inputLevel}
trackLevels={trackLevels}
/>
</div>

View File

@@ -35,6 +35,7 @@ export interface TrackProps {
onToggleRecordEnable?: () => void;
isRecording?: boolean;
recordingLevel?: number;
playbackLevel?: number;
}
export function Track({
@@ -61,6 +62,7 @@ export function Track({
onToggleRecordEnable,
isRecording = false,
recordingLevel = 0,
playbackLevel = 0,
}: TrackProps) {
const canvasRef = React.useRef<HTMLCanvasElement>(null);
const containerRef = React.useRef<HTMLDivElement>(null);
@@ -564,12 +566,12 @@ export function Track({
</label>
<div className="flex-1">
<InputLevelMeter
level={track.recordEnabled || isRecording ? recordingLevel : 0}
level={track.recordEnabled || isRecording ? recordingLevel : playbackLevel}
orientation="horizontal"
/>
</div>
<span className="text-xs text-muted-foreground w-10 text-right flex-shrink-0">
{Math.round((track.recordEnabled || isRecording ? recordingLevel : 0) * 100)}%
{Math.round((track.recordEnabled || isRecording ? recordingLevel : playbackLevel) * 100)}%
</span>
</div>
</>

View File

@@ -24,6 +24,7 @@ export interface TrackListProps {
onToggleRecordEnable?: (trackId: string) => void;
recordingTrackId?: string | null;
recordingLevel?: number;
trackLevels?: Record<string, number>;
}
export function TrackList({
@@ -42,6 +43,7 @@ export function TrackList({
onToggleRecordEnable,
recordingTrackId,
recordingLevel = 0,
trackLevels = {},
}: TrackListProps) {
const [importDialogOpen, setImportDialogOpen] = React.useState(false);
@@ -164,6 +166,7 @@ export function TrackList({
}
isRecording={recordingTrackId === track.id}
recordingLevel={recordingTrackId === track.id ? recordingLevel : 0}
playbackLevel={trackLevels[track.id] || 0}
/>
))}
</div>

View File

@@ -10,20 +10,28 @@ export interface MultiTrackPlayerState {
duration: number;
}
export interface TrackLevel {
trackId: string;
level: number;
}
export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
const [isPlaying, setIsPlaying] = useState(false);
const [currentTime, setCurrentTime] = useState(0);
const [duration, setDuration] = useState(0);
const [trackLevels, setTrackLevels] = useState<Record<string, number>>({});
const audioContextRef = useRef<AudioContext | null>(null);
const sourceNodesRef = useRef<AudioBufferSourceNode[]>([]);
const gainNodesRef = useRef<GainNode[]>([]);
const panNodesRef = useRef<StereoPannerNode[]>([]);
const analyserNodesRef = useRef<AnalyserNode[]>([]);
const effectNodesRef = useRef<EffectNodeInfo[][]>([]); // Effect nodes per track
const masterGainNodeRef = useRef<GainNode | null>(null);
const startTimeRef = useRef<number>(0);
const pausedAtRef = useRef<number>(0);
const animationFrameRef = useRef<number | null>(null);
const levelMonitorFrameRef = useRef<number | null>(null);
const tracksRef = useRef<Track[]>(tracks); // Always keep latest tracks
// Keep tracksRef in sync with tracks prop
@@ -42,6 +50,36 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
setDuration(maxDuration);
}, [tracks]);
// Monitor playback levels for all tracks
const monitorPlaybackLevels = useCallback(() => {
if (!isPlaying || analyserNodesRef.current.length === 0) return;
const levels: Record<string, number> = {};
analyserNodesRef.current.forEach((analyser, index) => {
const track = tracksRef.current[index];
if (!track) return;
const dataArray = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteTimeDomainData(dataArray);
// Calculate RMS level
let sum = 0;
for (let i = 0; i < dataArray.length; i++) {
const normalized = (dataArray[i] - 128) / 128;
sum += normalized * normalized;
}
const rms = Math.sqrt(sum / dataArray.length);
levels[track.id] = rms;
});
setTrackLevels(levels);
if (isPlaying) {
levelMonitorFrameRef.current = requestAnimationFrame(monitorPlaybackLevels);
}
}, [isPlaying]);
const updatePlaybackPosition = useCallback(() => {
if (!audioContextRef.current) return;
@@ -87,6 +125,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
sourceNodesRef.current = [];
gainNodesRef.current = [];
panNodesRef.current = [];
analyserNodesRef.current = [];
effectNodesRef.current = [];
// Create master gain node
@@ -104,6 +143,9 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
const gainNode = audioContext.createGain();
const panNode = audioContext.createStereoPanner();
const analyserNode = audioContext.createAnalyser();
analyserNode.fftSize = 256;
analyserNode.smoothingTimeConstant = 0.3;
// Set gain based on track volume and solo/mute state
const trackGain = getTrackGain(track, tracks);
@@ -112,7 +154,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
// Set pan
panNode.pan.setValueAtTime(track.pan, audioContext.currentTime);
// Connect: source -> gain -> pan -> effects -> master gain -> destination
// Connect: source -> gain -> pan -> effects -> analyser -> master gain -> destination
source.connect(gainNode);
gainNode.connect(panNode);
@@ -123,7 +165,10 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
console.log('[MultiTrackPlayer] Number of effects:', track.effectChain.effects.length);
console.log('[MultiTrackPlayer] Effects:', track.effectChain.effects);
const { outputNode, effectNodes } = applyEffectChain(audioContext, panNode, track.effectChain);
outputNode.connect(masterGain);
// Insert analyser after effects, before master gain
outputNode.connect(analyserNode);
analyserNode.connect(masterGain);
console.log('[MultiTrackPlayer] Effect output connected with', effectNodes.length, 'effect nodes');
// Start playback from current position
@@ -133,6 +178,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
sourceNodesRef.current.push(source);
gainNodesRef.current.push(gainNode);
panNodesRef.current.push(panNode);
analyserNodesRef.current.push(analyserNode);
effectNodesRef.current.push(effectNodes);
// Handle ended event
@@ -148,7 +194,8 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
startTimeRef.current = audioContext.currentTime;
setIsPlaying(true);
updatePlaybackPosition();
}, [tracks, duration, masterVolume, updatePlaybackPosition]);
monitorPlaybackLevels();
}, [tracks, duration, masterVolume, updatePlaybackPosition, monitorPlaybackLevels]);
const pause = useCallback(() => {
if (!audioContextRef.current || !isPlaying) return;
@@ -174,6 +221,14 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
cancelAnimationFrame(animationFrameRef.current);
animationFrameRef.current = null;
}
if (levelMonitorFrameRef.current) {
cancelAnimationFrame(levelMonitorFrameRef.current);
levelMonitorFrameRef.current = null;
}
// Clear track levels
setTrackLevels({});
}, [isPlaying, duration]);
const stop = useCallback(() => {
@@ -314,6 +369,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
sourceNodesRef.current = [];
gainNodesRef.current = [];
panNodesRef.current = [];
analyserNodesRef.current = [];
effectNodesRef.current = [];
// Create master gain node
@@ -331,6 +387,9 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
const gainNode = audioContext.createGain();
const panNode = audioContext.createStereoPanner();
const analyserNode = audioContext.createAnalyser();
analyserNode.fftSize = 256;
analyserNode.smoothingTimeConstant = 0.3;
// Set gain based on track volume and solo/mute state
const trackGain = getTrackGain(track, latestTracks);
@@ -339,13 +398,14 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
// Set pan
panNode.pan.setValueAtTime(track.pan, audioContext.currentTime);
// Connect: source -> gain -> pan -> effects -> master gain -> destination
// Connect: source -> gain -> pan -> effects -> analyser -> master gain -> destination
source.connect(gainNode);
gainNode.connect(panNode);
// Apply effect chain
const { outputNode, effectNodes } = applyEffectChain(audioContext, panNode, track.effectChain);
outputNode.connect(masterGain);
outputNode.connect(analyserNode);
analyserNode.connect(masterGain);
// Start playback from current position
source.start(0, pausedAtRef.current);
@@ -354,6 +414,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
sourceNodesRef.current.push(source);
gainNodesRef.current.push(gainNode);
panNodesRef.current.push(panNode);
analyserNodesRef.current.push(analyserNode);
effectNodesRef.current.push(effectNodes);
// Handle ended event
@@ -391,11 +452,12 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
animationFrameRef.current = requestAnimationFrame(updatePosition);
};
updatePosition();
monitorPlaybackLevels();
}, 10);
}
previousEffectStructureRef.current = currentStructure;
}, [tracks, isPlaying, duration, masterVolume]);
}, [tracks, isPlaying, duration, masterVolume, monitorPlaybackLevels]);
// Stop playback when all tracks are deleted
useEffect(() => {
@@ -454,6 +516,9 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
}
if (levelMonitorFrameRef.current) {
cancelAnimationFrame(levelMonitorFrameRef.current);
}
sourceNodesRef.current.forEach(node => {
try {
node.stop();
@@ -464,6 +529,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
});
gainNodesRef.current.forEach(node => node.disconnect());
panNodesRef.current.forEach(node => node.disconnect());
analyserNodesRef.current.forEach(node => node.disconnect());
if (masterGainNodeRef.current) {
masterGainNodeRef.current.disconnect();
}
@@ -474,6 +540,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
isPlaying,
currentTime,
duration,
trackLevels,
play,
pause,
stop,