/** * Web Audio API context management */ let audioContext: AudioContext | null = null; /** * Get or create the global AudioContext */ export function getAudioContext(): AudioContext { if (typeof window === 'undefined') { throw new Error('AudioContext is only available in the browser'); } if (!audioContext) { // Create AudioContext with fallback for older browsers const AudioContextClass = window.AudioContext || (window as any).webkitAudioContext; if (!AudioContextClass) { throw new Error('Web Audio API is not supported in this browser'); } audioContext = new AudioContextClass(); } // Resume context if it's suspended (required by browser autoplay policies) if (audioContext.state === 'suspended') { audioContext.resume(); } return audioContext; } /** * Close the AudioContext */ export async function closeAudioContext(): Promise { if (audioContext) { await audioContext.close(); audioContext = null; } } /** * Get the current AudioContext state */ export function getAudioContextState(): AudioContextState | null { return audioContext?.state ?? null; } /** * Resume the AudioContext (required after user interaction in some browsers) */ export async function resumeAudioContext(): Promise { if (audioContext && audioContext.state === 'suspended') { await audioContext.resume(); } } /** * Suspend the AudioContext */ export async function suspendAudioContext(): Promise { if (audioContext && audioContext.state === 'running') { await audioContext.suspend(); } }