Implemented Phase 11.1 export format support: - Added MP3 export using lamejs library - Added FLAC export using fflate DEFLATE compression - Updated ExportDialog with format selector and format-specific options - MP3: bitrate selector (128/192/256/320 kbps) - FLAC: compression quality slider (0-9) - WAV: bit depth selector (16/24/32-bit) - Updated AudioEditor to route export based on selected format - Created TypeScript declarations for lamejs - Fixed AudioStatistics to use audioBuffer instead of buffer property 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
160 lines
5.5 KiB
TypeScript
160 lines
5.5 KiB
TypeScript
'use client';
|
|
|
|
import * as React from 'react';
|
|
import { cn } from '@/lib/utils/cn';
|
|
import type { Track } from '@/types/track';
|
|
|
|
export interface AudioStatisticsProps {
|
|
tracks: Track[];
|
|
className?: string;
|
|
}
|
|
|
|
export function AudioStatistics({ tracks, className }: AudioStatisticsProps) {
|
|
const stats = React.useMemo(() => {
|
|
if (tracks.length === 0) {
|
|
return {
|
|
totalDuration: 0,
|
|
longestTrack: 0,
|
|
sampleRate: 0,
|
|
channels: 0,
|
|
bitDepth: 32,
|
|
peakAmplitude: 0,
|
|
rmsLevel: 0,
|
|
dynamicRange: 0,
|
|
trackCount: 0,
|
|
};
|
|
}
|
|
|
|
let maxDuration = 0;
|
|
let maxPeak = 0;
|
|
let sumRms = 0;
|
|
let minPeak = 1;
|
|
let sampleRate = 0;
|
|
let channels = 0;
|
|
|
|
tracks.forEach(track => {
|
|
if (!track.audioBuffer) return;
|
|
|
|
const duration = track.audioBuffer.duration;
|
|
maxDuration = Math.max(maxDuration, duration);
|
|
|
|
// Get sample rate and channels from first track
|
|
if (sampleRate === 0) {
|
|
sampleRate = track.audioBuffer.sampleRate;
|
|
channels = track.audioBuffer.numberOfChannels;
|
|
}
|
|
|
|
// Calculate peak and RMS from buffer
|
|
for (let ch = 0; ch < track.audioBuffer.numberOfChannels; ch++) {
|
|
const channelData = track.audioBuffer.getChannelData(ch);
|
|
let chPeak = 0;
|
|
let chRmsSum = 0;
|
|
|
|
for (let i = 0; i < channelData.length; i++) {
|
|
const abs = Math.abs(channelData[i]);
|
|
chPeak = Math.max(chPeak, abs);
|
|
chRmsSum += channelData[i] * channelData[i];
|
|
}
|
|
|
|
maxPeak = Math.max(maxPeak, chPeak);
|
|
minPeak = Math.min(minPeak, chPeak);
|
|
sumRms += Math.sqrt(chRmsSum / channelData.length);
|
|
}
|
|
});
|
|
|
|
const avgRms = sumRms / (tracks.length * Math.max(1, channels));
|
|
const peakDb = maxPeak > 0 ? 20 * Math.log10(maxPeak) : -Infinity;
|
|
const rmsDb = avgRms > 0 ? 20 * Math.log10(avgRms) : -Infinity;
|
|
const dynamicRange = peakDb - rmsDb;
|
|
|
|
return {
|
|
totalDuration: maxDuration,
|
|
longestTrack: maxDuration,
|
|
sampleRate,
|
|
channels,
|
|
bitDepth: 32, // Web Audio API uses 32-bit float
|
|
peakAmplitude: maxPeak,
|
|
rmsLevel: avgRms,
|
|
dynamicRange: dynamicRange > 0 ? dynamicRange : 0,
|
|
trackCount: tracks.length,
|
|
};
|
|
}, [tracks]);
|
|
|
|
const formatDuration = (seconds: number) => {
|
|
const mins = Math.floor(seconds / 60);
|
|
const secs = Math.floor(seconds % 60);
|
|
const ms = Math.floor((seconds % 1) * 1000);
|
|
return `${mins}:${secs.toString().padStart(2, '0')}.${ms.toString().padStart(3, '0')}`;
|
|
};
|
|
|
|
const formatDb = (linear: number) => {
|
|
if (linear === 0) return '-∞ dB';
|
|
const db = 20 * Math.log10(linear);
|
|
return db > -60 ? `${db.toFixed(1)} dB` : '-∞ dB';
|
|
};
|
|
|
|
return (
|
|
<div className={cn('w-full h-full bg-card/50 border-2 border-accent/50 rounded-lg p-3', className)}>
|
|
<div className="text-[10px] font-bold text-accent uppercase tracking-wider mb-3">
|
|
Audio Statistics
|
|
</div>
|
|
<div className="space-y-2 text-[10px]">
|
|
{/* File Info */}
|
|
<div className="space-y-1">
|
|
<div className="text-[9px] text-muted-foreground uppercase tracking-wide">Project Info</div>
|
|
<div className="grid grid-cols-2 gap-x-2 gap-y-1">
|
|
<div className="text-muted-foreground">Tracks:</div>
|
|
<div className="font-mono text-right">{stats.trackCount}</div>
|
|
|
|
<div className="text-muted-foreground">Duration:</div>
|
|
<div className="font-mono text-right">{formatDuration(stats.totalDuration)}</div>
|
|
|
|
<div className="text-muted-foreground">Sample Rate:</div>
|
|
<div className="font-mono text-right">{stats.sampleRate > 0 ? `${(stats.sampleRate / 1000).toFixed(1)} kHz` : 'N/A'}</div>
|
|
|
|
<div className="text-muted-foreground">Channels:</div>
|
|
<div className="font-mono text-right">{stats.channels > 0 ? (stats.channels === 1 ? 'Mono' : 'Stereo') : 'N/A'}</div>
|
|
|
|
<div className="text-muted-foreground">Bit Depth:</div>
|
|
<div className="font-mono text-right">{stats.bitDepth}-bit float</div>
|
|
</div>
|
|
</div>
|
|
|
|
{/* Divider */}
|
|
<div className="border-t border-border/30" />
|
|
|
|
{/* Audio Levels */}
|
|
<div className="space-y-1">
|
|
<div className="text-[9px] text-muted-foreground uppercase tracking-wide">Levels</div>
|
|
<div className="grid grid-cols-2 gap-x-2 gap-y-1">
|
|
<div className="text-muted-foreground">Peak:</div>
|
|
<div className={cn(
|
|
'font-mono text-right',
|
|
stats.peakAmplitude > 0.99 ? 'text-red-500 font-bold' : ''
|
|
)}>
|
|
{formatDb(stats.peakAmplitude)}
|
|
</div>
|
|
|
|
<div className="text-muted-foreground">RMS:</div>
|
|
<div className="font-mono text-right">{formatDb(stats.rmsLevel)}</div>
|
|
|
|
<div className="text-muted-foreground">Dynamic Range:</div>
|
|
<div className="font-mono text-right">
|
|
{stats.dynamicRange > 0 ? `${stats.dynamicRange.toFixed(1)} dB` : 'N/A'}
|
|
</div>
|
|
|
|
<div className="text-muted-foreground">Headroom:</div>
|
|
<div className={cn(
|
|
'font-mono text-right',
|
|
stats.peakAmplitude > 0.99 ? 'text-red-500' :
|
|
stats.peakAmplitude > 0.9 ? 'text-yellow-500' : 'text-green-500'
|
|
)}>
|
|
{stats.peakAmplitude > 0 ? `${(20 * Math.log10(1 / stats.peakAmplitude)).toFixed(1)} dB` : 'N/A'}
|
|
</div>
|
|
</div>
|
|
</div>
|
|
</div>
|
|
</div>
|
|
);
|
|
}
|