Phase 2 Complete Features: - Web Audio API context management with browser compatibility - Audio file upload with drag-and-drop support - Audio decoding for multiple formats (WAV, MP3, OGG, FLAC, AAC, M4A) - AudioPlayer class with full playback control - Waveform visualization using Canvas API - Real-time waveform rendering with progress indicator - Playback controls (play, pause, stop, seek) - Volume control with mute/unmute - Timeline scrubbing - Audio file information display Components: - AudioEditor: Main editor container - FileUpload: Drag-and-drop file upload component - AudioInfo: Display audio file metadata - Waveform: Canvas-based waveform visualization - PlaybackControls: Transport controls with volume slider Audio Engine: - lib/audio/context.ts: AudioContext management - lib/audio/decoder.ts: Audio file decoding utilities - lib/audio/player.ts: AudioPlayer class for playback - lib/waveform/peaks.ts: Waveform peak generation Hooks: - useAudioPlayer: Complete audio player state management Types: - types/audio.ts: TypeScript definitions for audio types Features Working: ✓ Load audio files via drag-and-drop or file picker ✓ Display waveform with real-time progress ✓ Play/pause/stop controls ✓ Seek by clicking on waveform or using timeline slider ✓ Volume control with visual feedback ✓ Audio file metadata display (duration, sample rate, channels) ✓ Toast notifications for user feedback ✓ SSR-safe audio context initialization ✓ Dark/light theme support Tech Stack: - Web Audio API for playback - Canvas API for waveform rendering - React 19 hooks for state management - TypeScript for type safety Build verified and working ✓ 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
68 lines
1.6 KiB
TypeScript
68 lines
1.6 KiB
TypeScript
/**
|
|
* Web Audio API context management
|
|
*/
|
|
|
|
let audioContext: AudioContext | null = null;
|
|
|
|
/**
|
|
* Get or create the global AudioContext
|
|
*/
|
|
export function getAudioContext(): AudioContext {
|
|
if (typeof window === 'undefined') {
|
|
throw new Error('AudioContext is only available in the browser');
|
|
}
|
|
|
|
if (!audioContext) {
|
|
// Create AudioContext with fallback for older browsers
|
|
const AudioContextClass = window.AudioContext || (window as any).webkitAudioContext;
|
|
|
|
if (!AudioContextClass) {
|
|
throw new Error('Web Audio API is not supported in this browser');
|
|
}
|
|
|
|
audioContext = new AudioContextClass();
|
|
}
|
|
|
|
// Resume context if it's suspended (required by browser autoplay policies)
|
|
if (audioContext.state === 'suspended') {
|
|
audioContext.resume();
|
|
}
|
|
|
|
return audioContext;
|
|
}
|
|
|
|
/**
|
|
* Close the AudioContext
|
|
*/
|
|
export async function closeAudioContext(): Promise<void> {
|
|
if (audioContext) {
|
|
await audioContext.close();
|
|
audioContext = null;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get the current AudioContext state
|
|
*/
|
|
export function getAudioContextState(): AudioContextState | null {
|
|
return audioContext?.state ?? null;
|
|
}
|
|
|
|
/**
|
|
* Resume the AudioContext (required after user interaction in some browsers)
|
|
*/
|
|
export async function resumeAudioContext(): Promise<void> {
|
|
if (audioContext && audioContext.state === 'suspended') {
|
|
await audioContext.resume();
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Suspend the AudioContext
|
|
*/
|
|
export async function suspendAudioContext(): Promise<void> {
|
|
if (audioContext && audioContext.state === 'running') {
|
|
await audioContext.suspend();
|
|
}
|
|
}
|