diff --git a/components/analysis/AudioStatistics.tsx b/components/analysis/AudioStatistics.tsx
new file mode 100644
index 0000000..df33b96
--- /dev/null
+++ b/components/analysis/AudioStatistics.tsx
@@ -0,0 +1,159 @@
+'use client';
+
+import * as React from 'react';
+import { cn } from '@/lib/utils/cn';
+import type { Track } from '@/types/track';
+
+export interface AudioStatisticsProps {
+ tracks: Track[];
+ className?: string;
+}
+
+export function AudioStatistics({ tracks, className }: AudioStatisticsProps) {
+ const stats = React.useMemo(() => {
+ if (tracks.length === 0) {
+ return {
+ totalDuration: 0,
+ longestTrack: 0,
+ sampleRate: 0,
+ channels: 0,
+ bitDepth: 32,
+ peakAmplitude: 0,
+ rmsLevel: 0,
+ dynamicRange: 0,
+ trackCount: 0,
+ };
+ }
+
+ let maxDuration = 0;
+ let maxPeak = 0;
+ let sumRms = 0;
+ let minPeak = 1;
+ let sampleRate = 0;
+ let channels = 0;
+
+ tracks.forEach(track => {
+ if (!track.buffer) return;
+
+ const duration = track.buffer.duration;
+ maxDuration = Math.max(maxDuration, duration);
+
+ // Get sample rate and channels from first track
+ if (sampleRate === 0) {
+ sampleRate = track.buffer.sampleRate;
+ channels = track.buffer.numberOfChannels;
+ }
+
+ // Calculate peak and RMS from buffer
+ for (let ch = 0; ch < track.buffer.numberOfChannels; ch++) {
+ const channelData = track.buffer.getChannelData(ch);
+ let chPeak = 0;
+ let chRmsSum = 0;
+
+ for (let i = 0; i < channelData.length; i++) {
+ const abs = Math.abs(channelData[i]);
+ chPeak = Math.max(chPeak, abs);
+ chRmsSum += channelData[i] * channelData[i];
+ }
+
+ maxPeak = Math.max(maxPeak, chPeak);
+ minPeak = Math.min(minPeak, chPeak);
+ sumRms += Math.sqrt(chRmsSum / channelData.length);
+ }
+ });
+
+ const avgRms = sumRms / (tracks.length * Math.max(1, channels));
+ const peakDb = maxPeak > 0 ? 20 * Math.log10(maxPeak) : -Infinity;
+ const rmsDb = avgRms > 0 ? 20 * Math.log10(avgRms) : -Infinity;
+ const dynamicRange = peakDb - rmsDb;
+
+ return {
+ totalDuration: maxDuration,
+ longestTrack: maxDuration,
+ sampleRate,
+ channels,
+ bitDepth: 32, // Web Audio API uses 32-bit float
+ peakAmplitude: maxPeak,
+ rmsLevel: avgRms,
+ dynamicRange: dynamicRange > 0 ? dynamicRange : 0,
+ trackCount: tracks.length,
+ };
+ }, [tracks]);
+
+ const formatDuration = (seconds: number) => {
+ const mins = Math.floor(seconds / 60);
+ const secs = Math.floor(seconds % 60);
+ const ms = Math.floor((seconds % 1) * 1000);
+ return `${mins}:${secs.toString().padStart(2, '0')}.${ms.toString().padStart(3, '0')}`;
+ };
+
+ const formatDb = (linear: number) => {
+ if (linear === 0) return '-∞ dB';
+ const db = 20 * Math.log10(linear);
+ return db > -60 ? `${db.toFixed(1)} dB` : '-∞ dB';
+ };
+
+ return (
+
+
+ Audio Statistics
+
+
+ {/* File Info */}
+
+
Project Info
+
+
Tracks:
+
{stats.trackCount}
+
+
Duration:
+
{formatDuration(stats.totalDuration)}
+
+
Sample Rate:
+
{stats.sampleRate > 0 ? `${(stats.sampleRate / 1000).toFixed(1)} kHz` : 'N/A'}
+
+
Channels:
+
{stats.channels > 0 ? (stats.channels === 1 ? 'Mono' : 'Stereo') : 'N/A'}
+
+
Bit Depth:
+
{stats.bitDepth}-bit float
+
+
+
+ {/* Divider */}
+
+
+ {/* Audio Levels */}
+
+
Levels
+
+
Peak:
+
0.99 ? 'text-red-500 font-bold' : ''
+ )}>
+ {formatDb(stats.peakAmplitude)}
+
+
+
RMS:
+
{formatDb(stats.rmsLevel)}
+
+
Dynamic Range:
+
+ {stats.dynamicRange > 0 ? `${stats.dynamicRange.toFixed(1)} dB` : 'N/A'}
+
+
+
Headroom:
+
0.99 ? 'text-red-500' :
+ stats.peakAmplitude > 0.9 ? 'text-yellow-500' : 'text-green-500'
+ )}>
+ {stats.peakAmplitude > 0 ? `${(20 * Math.log10(1 / stats.peakAmplitude)).toFixed(1)} dB` : 'N/A'}
+
+
+
+
+
+ );
+}
diff --git a/components/analysis/LUFSMeter.tsx b/components/analysis/LUFSMeter.tsx
new file mode 100644
index 0000000..e64855a
--- /dev/null
+++ b/components/analysis/LUFSMeter.tsx
@@ -0,0 +1,167 @@
+'use client';
+
+import * as React from 'react';
+import { cn } from '@/lib/utils/cn';
+
+export interface LUFSMeterProps {
+ analyserNode: AnalyserNode | null;
+ className?: string;
+}
+
+export function LUFSMeter({ analyserNode, className }: LUFSMeterProps) {
+ const canvasRef = React.useRef(null);
+ const animationFrameRef = React.useRef(undefined);
+ const [lufs, setLufs] = React.useState({ integrated: -23, shortTerm: -23, momentary: -23 });
+ const lufsHistoryRef = React.useRef([]);
+
+ React.useEffect(() => {
+ if (!analyserNode || !canvasRef.current) return;
+
+ const canvas = canvasRef.current;
+ const ctx = canvas.getContext('2d');
+ if (!ctx) return;
+
+ // Set canvas size
+ const dpr = window.devicePixelRatio || 1;
+ const rect = canvas.getBoundingClientRect();
+ canvas.width = rect.width * dpr;
+ canvas.height = rect.height * dpr;
+ ctx.scale(dpr, dpr);
+
+ const bufferLength = analyserNode.frequencyBinCount;
+ const dataArray = new Uint8Array(bufferLength);
+
+ const draw = () => {
+ animationFrameRef.current = requestAnimationFrame(draw);
+
+ analyserNode.getByteFrequencyData(dataArray);
+
+ // Calculate RMS from frequency data
+ let sum = 0;
+ for (let i = 0; i < bufferLength; i++) {
+ const normalized = dataArray[i] / 255;
+ sum += normalized * normalized;
+ }
+ const rms = Math.sqrt(sum / bufferLength);
+
+ // Convert to LUFS approximation (simplified K-weighting)
+ // Real LUFS requires proper K-weighting filter, this is an approximation
+ let lufsValue = -23; // Silence baseline
+ if (rms > 0.0001) {
+ lufsValue = 20 * Math.log10(rms) - 0.691; // Simplified LUFS estimation
+ lufsValue = Math.max(-70, Math.min(0, lufsValue));
+ }
+
+ // Store history for integrated measurement
+ lufsHistoryRef.current.push(lufsValue);
+ if (lufsHistoryRef.current.length > 300) { // Keep last 10 seconds at 30fps
+ lufsHistoryRef.current.shift();
+ }
+
+ // Calculate measurements
+ const momentary = lufsValue; // Current value
+ const shortTerm = lufsHistoryRef.current.slice(-90).reduce((a, b) => a + b, 0) / Math.min(90, lufsHistoryRef.current.length); // Last 3 seconds
+ const integrated = lufsHistoryRef.current.reduce((a, b) => a + b, 0) / lufsHistoryRef.current.length; // All time
+
+ setLufs({ integrated, shortTerm, momentary });
+
+ // Clear canvas
+ const bgColor = getComputedStyle(canvas.parentElement!).backgroundColor;
+ ctx.fillStyle = bgColor;
+ ctx.fillRect(0, 0, rect.width, rect.height);
+
+ // Draw LUFS scale (-70 to 0)
+ const lufsToY = (lufs: number) => {
+ return ((0 - lufs) / 70) * rect.height;
+ };
+
+ // Draw reference lines
+ ctx.strokeStyle = 'rgba(128, 128, 128, 0.2)';
+ ctx.lineWidth = 1;
+ [-23, -16, -9, -3].forEach(db => {
+ const y = lufsToY(db);
+ ctx.beginPath();
+ ctx.moveTo(0, y);
+ ctx.lineTo(rect.width, y);
+ ctx.stroke();
+
+ // Labels
+ ctx.fillStyle = 'rgba(255, 255, 255, 0.4)';
+ ctx.font = '9px monospace';
+ ctx.textAlign = 'right';
+ ctx.fillText(`${db}`, rect.width - 2, y - 2);
+ });
+
+ // Draw -23 LUFS broadcast standard line
+ ctx.strokeStyle = 'rgba(59, 130, 246, 0.5)';
+ ctx.lineWidth = 2;
+ const standardY = lufsToY(-23);
+ ctx.beginPath();
+ ctx.moveTo(0, standardY);
+ ctx.lineTo(rect.width, standardY);
+ ctx.stroke();
+
+ // Draw bars
+ const barWidth = rect.width / 4;
+ const drawBar = (value: number, x: number, color: string, label: string) => {
+ const y = lufsToY(value);
+ const height = rect.height - y;
+
+ ctx.fillStyle = color;
+ ctx.fillRect(x, y, barWidth - 4, height);
+
+ // Label
+ ctx.fillStyle = 'rgba(255, 255, 255, 0.7)';
+ ctx.font = 'bold 9px monospace';
+ ctx.textAlign = 'center';
+ ctx.fillText(label, x + barWidth / 2 - 2, rect.height - 2);
+ };
+
+ drawBar(momentary, 0, 'rgba(239, 68, 68, 0.7)', 'M');
+ drawBar(shortTerm, barWidth, 'rgba(251, 146, 60, 0.7)', 'S');
+ drawBar(integrated, barWidth * 2, 'rgba(34, 197, 94, 0.7)', 'I');
+ };
+
+ draw();
+
+ return () => {
+ if (animationFrameRef.current) {
+ cancelAnimationFrame(animationFrameRef.current);
+ }
+ };
+ }, [analyserNode]);
+
+ return (
+
+
+ LUFS Loudness
+
+
+
+
+
+
Momentary
+
-9 ? 'text-red-500' : 'text-foreground')}>
+ {lufs.momentary > -70 ? lufs.momentary.toFixed(1) : '-∞'}
+
+
+
+
Short-term
+
-16 ? 'text-orange-500' : 'text-foreground')}>
+ {lufs.shortTerm > -70 ? lufs.shortTerm.toFixed(1) : '-∞'}
+
+
+
+
Integrated
+
+ {lufs.integrated > -70 ? lufs.integrated.toFixed(1) : '-∞'}
+
+
+
+
+
+ );
+}
diff --git a/components/analysis/PhaseCorrelationMeter.tsx b/components/analysis/PhaseCorrelationMeter.tsx
new file mode 100644
index 0000000..ad0baa4
--- /dev/null
+++ b/components/analysis/PhaseCorrelationMeter.tsx
@@ -0,0 +1,181 @@
+'use client';
+
+import * as React from 'react';
+import { cn } from '@/lib/utils/cn';
+
+export interface PhaseCorrelationMeterProps {
+ analyserNode: AnalyserNode | null;
+ className?: string;
+}
+
+export function PhaseCorrelationMeter({ analyserNode, className }: PhaseCorrelationMeterProps) {
+ const canvasRef = React.useRef(null);
+ const animationFrameRef = React.useRef(undefined);
+ const [correlation, setCorrelation] = React.useState(0);
+
+ React.useEffect(() => {
+ if (!analyserNode || !canvasRef.current) return;
+
+ const canvas = canvasRef.current;
+ const ctx = canvas.getContext('2d');
+ if (!ctx) return;
+
+ // Set canvas size
+ const dpr = window.devicePixelRatio || 1;
+ const rect = canvas.getBoundingClientRect();
+ canvas.width = rect.width * dpr;
+ canvas.height = rect.height * dpr;
+ ctx.scale(dpr, dpr);
+
+ const audioContext = analyserNode.context as AudioContext;
+ const bufferLength = analyserNode.fftSize;
+ const dataArrayL = new Float32Array(bufferLength);
+ const dataArrayR = new Float32Array(bufferLength);
+
+ // Create a splitter to get L/R channels
+ const splitter = audioContext.createChannelSplitter(2);
+ const analyserL = audioContext.createAnalyser();
+ const analyserR = audioContext.createAnalyser();
+
+ analyserL.fftSize = bufferLength;
+ analyserR.fftSize = bufferLength;
+
+ // Try to connect to the analyser node's source
+ // Note: This is a simplified approach - ideally we'd get the source node
+ try {
+ analyserNode.connect(splitter);
+ splitter.connect(analyserL, 0);
+ splitter.connect(analyserR, 1);
+ } catch (e) {
+ // If connection fails, just show static display
+ }
+
+ const draw = () => {
+ animationFrameRef.current = requestAnimationFrame(draw);
+
+ try {
+ analyserL.getFloatTimeDomainData(dataArrayL);
+ analyserR.getFloatTimeDomainData(dataArrayR);
+
+ // Calculate phase correlation (Pearson correlation coefficient)
+ let sumL = 0, sumR = 0, sumLR = 0, sumL2 = 0, sumR2 = 0;
+ const n = bufferLength;
+
+ for (let i = 0; i < n; i++) {
+ sumL += dataArrayL[i];
+ sumR += dataArrayR[i];
+ sumLR += dataArrayL[i] * dataArrayR[i];
+ sumL2 += dataArrayL[i] * dataArrayL[i];
+ sumR2 += dataArrayR[i] * dataArrayR[i];
+ }
+
+ const meanL = sumL / n;
+ const meanR = sumR / n;
+ const covLR = (sumLR / n) - (meanL * meanR);
+ const varL = (sumL2 / n) - (meanL * meanL);
+ const varR = (sumR2 / n) - (meanR * meanR);
+
+ let r = 0;
+ if (varL > 0 && varR > 0) {
+ r = covLR / Math.sqrt(varL * varR);
+ r = Math.max(-1, Math.min(1, r)); // Clamp to [-1, 1]
+ }
+
+ setCorrelation(r);
+
+ // Clear canvas
+ const bgColor = getComputedStyle(canvas.parentElement!).backgroundColor;
+ ctx.fillStyle = bgColor;
+ ctx.fillRect(0, 0, rect.width, rect.height);
+
+ // Draw scale background
+ const centerY = rect.height / 2;
+ const barHeight = 20;
+
+ // Draw scale markers
+ ctx.fillStyle = 'rgba(128, 128, 128, 0.2)';
+ ctx.fillRect(0, centerY - barHeight / 2, rect.width, barHeight);
+
+ // Draw center line (0)
+ ctx.strokeStyle = 'rgba(128, 128, 128, 0.5)';
+ ctx.lineWidth = 1;
+ ctx.beginPath();
+ ctx.moveTo(rect.width / 2, centerY - barHeight / 2 - 5);
+ ctx.lineTo(rect.width / 2, centerY + barHeight / 2 + 5);
+ ctx.stroke();
+
+ // Draw correlation indicator
+ const x = ((r + 1) / 2) * rect.width;
+
+ // Color based on correlation value
+ let color;
+ if (r > 0.9) {
+ color = '#10b981'; // Green - good correlation (mono-ish)
+ } else if (r > 0.5) {
+ color = '#84cc16'; // Lime - moderate correlation
+ } else if (r > -0.5) {
+ color = '#eab308'; // Yellow - decorrelated (good stereo)
+ } else if (r > -0.9) {
+ color = '#f97316'; // Orange - negative correlation
+ } else {
+ color = '#ef4444'; // Red - phase issues
+ }
+
+ ctx.fillStyle = color;
+ ctx.fillRect(x - 2, centerY - barHeight / 2, 4, barHeight);
+
+ // Draw labels
+ ctx.fillStyle = 'rgba(255, 255, 255, 0.7)';
+ ctx.font = '9px monospace';
+ ctx.textAlign = 'left';
+ ctx.fillText('-1', 2, centerY - barHeight / 2 - 8);
+ ctx.textAlign = 'center';
+ ctx.fillText('0', rect.width / 2, centerY - barHeight / 2 - 8);
+ ctx.textAlign = 'right';
+ ctx.fillText('+1', rect.width - 2, centerY - barHeight / 2 - 8);
+
+ // Draw correlation value
+ ctx.textAlign = 'center';
+ ctx.font = 'bold 11px monospace';
+ ctx.fillText(r.toFixed(3), rect.width / 2, centerY + barHeight / 2 + 15);
+ } catch (e) {
+ // Silently handle errors
+ }
+ };
+
+ draw();
+
+ return () => {
+ if (animationFrameRef.current) {
+ cancelAnimationFrame(animationFrameRef.current);
+ }
+ try {
+ splitter.disconnect();
+ analyserL.disconnect();
+ analyserR.disconnect();
+ } catch (e) {
+ // Ignore disconnection errors
+ }
+ };
+ }, [analyserNode]);
+
+ return (
+
+
+ Phase Correlation
+
+
+
+
+ {correlation > 0.9 ? 'Mono-like' :
+ correlation > 0.5 ? 'Good Stereo' :
+ correlation > -0.5 ? 'Wide Stereo' :
+ 'Phase Issues'}
+
+
+
+ );
+}
diff --git a/components/editor/AudioEditor.tsx b/components/editor/AudioEditor.tsx
index e988a02..3efa5e4 100644
--- a/components/editor/AudioEditor.tsx
+++ b/components/editor/AudioEditor.tsx
@@ -6,6 +6,9 @@ import { PlaybackControls } from './PlaybackControls';
import { MasterControls } from '@/components/controls/MasterControls';
import { FrequencyAnalyzer } from '@/components/analysis/FrequencyAnalyzer';
import { Spectrogram } from '@/components/analysis/Spectrogram';
+import { PhaseCorrelationMeter } from '@/components/analysis/PhaseCorrelationMeter';
+import { LUFSMeter } from '@/components/analysis/LUFSMeter';
+import { AudioStatistics } from '@/components/analysis/AudioStatistics';
import { ThemeToggle } from '@/components/layout/ThemeToggle';
import { CommandPalette } from '@/components/ui/CommandPalette';
import { GlobalSettingsDialog } from '@/components/settings/GlobalSettingsDialog';
@@ -49,7 +52,7 @@ export function AudioEditor() {
const [settingsDialogOpen, setSettingsDialogOpen] = React.useState(false);
const [exportDialogOpen, setExportDialogOpen] = React.useState(false);
const [isExporting, setIsExporting] = React.useState(false);
- const [analyzerView, setAnalyzerView] = React.useState<'frequency' | 'spectrogram'>('frequency');
+ const [analyzerView, setAnalyzerView] = React.useState<'frequency' | 'spectrogram' | 'phase' | 'lufs' | 'stats'>('frequency');
const { addToast } = useToast();
@@ -1067,37 +1070,72 @@ export function AudioEditor() {
{/* Analyzer Toggle */}
-
+
+
+
+
{/* Analyzer Display */}
- {analyzerView === 'frequency' ? (
-
- ) : (
-
- )}
+ {analyzerView === 'frequency' &&
}
+ {analyzerView === 'spectrogram' &&
}
+ {analyzerView === 'phase' &&
}
+ {analyzerView === 'lufs' &&
}
+ {analyzerView === 'stats' &&
}