Compare commits
213 Commits
fe519dab01
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| fa69ac649c | |||
| efd4cfa607 | |||
| 0d8952ca2f | |||
| 1b30931615 | |||
| d9bd8246c9 | |||
| dd8d46795a | |||
| adcc97eb5a | |||
| 1855988b83 | |||
| 477a444c78 | |||
| 119c8c2942 | |||
| 8720c35f23 | |||
| 25ddac349b | |||
| 08b33aacb5 | |||
| 9007522e18 | |||
| a47bf09a32 | |||
| aba26126cc | |||
| 66a515ba79 | |||
| 691f75209d | |||
| 908e6caaf8 | |||
| e09bc1449c | |||
| d03080d3d2 | |||
| 484e3261c5 | |||
| a2cef6cc6e | |||
| b1c0ff6f72 | |||
| 197bff39fc | |||
| 314fced79f | |||
| 0bd892e3d1 | |||
| 42991092ad | |||
| adb99a2c33 | |||
| 5d9e02fe95 | |||
| 854e64b4ec | |||
| e7bd262e6f | |||
| ba2e138ab9 | |||
| 7b4a7cc567 | |||
| 64864cfd34 | |||
| 14a9c6e163 | |||
| 0babc469cc | |||
| 6658bbbbd4 | |||
| 9b07f28995 | |||
| d08482a64c | |||
| 1a66669a77 | |||
| 4c794dd293 | |||
| 29de647b30 | |||
| 83ae2e7ea7 | |||
| 950c0f69a6 | |||
| a2542ac87f | |||
| 7aebc1da24 | |||
| 42b8f61f5f | |||
| 35a6ee35d0 | |||
| 235fc3913c | |||
| 0e59870884 | |||
| 8c779ccd88 | |||
| b57ac5912a | |||
| d2ed7d6e78 | |||
| cd310ce7e4 | |||
| 594ff7f4c9 | |||
| ca63d12cbf | |||
| 7a7d6891cd | |||
| 90e66e8bef | |||
| e0b878daad | |||
| 39ea599f18 | |||
| 45d46067ea | |||
| d7dfb8a746 | |||
| 5dadba9c9f | |||
| cd311d8145 | |||
| c7cb0b2504 | |||
| b8d4053cbc | |||
| ac8aa9e6c6 | |||
| 7de75f7b2b | |||
| 543eb069d7 | |||
| a626427142 | |||
| 9ad504478d | |||
| bcf439ca5e | |||
| 31af08e9f7 | |||
| 1b41fca393 | |||
| 67abbb20cb | |||
| 0d86cff1b7 | |||
| abd2a403cb | |||
| 102c67dc8d | |||
| b743f97276 | |||
| d3a5961131 | |||
| e1c19ffcb3 | |||
| e208a448d0 | |||
| 500a466bae | |||
| 37f910acb7 | |||
| c3e295f695 | |||
| 51114330ea | |||
| d5c84d35e4 | |||
| 77916d4d07 | |||
| 8112ff1ec3 | |||
| df1314a37c | |||
| 38a2b2962d | |||
| c6ff313050 | |||
| 6577d9f27b | |||
| 1c56e596b5 | |||
| 355bade08f | |||
| 461a800bb6 | |||
| 87771a5125 | |||
| 90f9218ed3 | |||
| 3818d93696 | |||
| e4b3433cf3 | |||
| d0601b2b36 | |||
| 7dc0780bd2 | |||
| 4281c65ec1 | |||
| b8ed648124 | |||
| a447a81414 | |||
| 797d64b1d3 | |||
| 418c79d961 | |||
| edebdc2129 | |||
| a8570f2458 | |||
| 3cce3f8c05 | |||
| 9a161bbe42 | |||
| 5c85914974 | |||
| d34611ef10 | |||
| 1ebd169137 | |||
| 9140110589 | |||
| 628c544511 | |||
| f7a7c4420c | |||
| 33be21295e | |||
| 34452862ca | |||
| 7a45a985c7 | |||
| bb30aa95e1 | |||
| f830640732 | |||
| 1d35c8f5b2 | |||
| 87b1c2e21a | |||
| 43cdf9abdd | |||
| 935ab85c08 | |||
| 8ec3505581 | |||
| 441920ee70 | |||
| c33a77270b | |||
| c54d5089c5 | |||
| a1f230a6e6 | |||
| 4c6453d115 | |||
| dac8ac4723 | |||
| 1666407ac2 | |||
| 30ebd52b4c | |||
| 623284e8b5 | |||
| a5c5289424 | |||
| 3cc4cb555a | |||
| eb445bfa3a | |||
| d2709b8fc2 | |||
| 03a7e2e485 | |||
| f6857bfc7b | |||
| 17381221d8 | |||
| 839128a93f | |||
| ecf7f060ec | |||
| 9b1eedc379 | |||
| dc9647731d | |||
| 49dd0c2d38 | |||
| 5f0017facb | |||
| a5a75a5f5c | |||
| 85d27c3b13 | |||
| 546c80e4ae | |||
| dc567d0144 | |||
| db01209f77 | |||
| a0ce83a654 | |||
| 3e6fbda755 | |||
| 8367cbf6e7 | |||
| a157172e3d | |||
| 6fbb677bd2 | |||
| cf0c37caa6 | |||
| c0ca4d7913 | |||
| fa397a1dfe | |||
| 2775fdb517 | |||
| f65dd0be7f | |||
| 5e6c61d951 | |||
| 166385d29a | |||
| 1dc0604635 | |||
| 74879a42cf | |||
| beb7085c89 | |||
| cbcd38b1ed | |||
| ecb5152d21 | |||
| bc7158dbe0 | |||
| ee24f04d76 | |||
| 2a0d6cd673 | |||
| 2948557e94 | |||
| 0dfbdca00c | |||
| f74c8abbf9 | |||
| 46ce75f3a5 | |||
| 439c14db87 | |||
| 25be7027f3 | |||
| b9ffbf28ef | |||
| a3fb8a564e | |||
| 6bfc8d3cfe | |||
| fa3588d619 | |||
| cb396ddfd6 | |||
| 25e843236d | |||
| 4547446ced | |||
| f3f5b65e1e | |||
| a8f2391400 | |||
| f640f2f9d4 | |||
| 2f8718626c | |||
| 5817598c48 | |||
| c6b9cb9af6 | |||
| a034ca7e85 | |||
| e3582b7b9a | |||
| 1404228239 | |||
| 889b2b91ae | |||
| 8ffa8e8b81 | |||
| d3ef961d31 | |||
| da5045e4f8 | |||
| 8023369239 | |||
| b8d2cb9585 | |||
| 401bab8886 | |||
| 8bd326a21b | |||
| 4381057f3f | |||
| e376f3b0b4 | |||
| f42e5d4556 | |||
| 64902fa707 | |||
| cf1358e051 | |||
| 7c19c069bf | |||
| 53d436a174 | |||
| 6b540ef8fb |
436
PLAN.md
436
PLAN.md
@@ -2,7 +2,7 @@
|
||||
|
||||
## Progress Overview
|
||||
|
||||
**Current Status**: Phase 7 In Progress (Multi-Track Support - Core Features Complete)
|
||||
**Current Status**: Phase 14 Complete (Settings & Preferences: Global settings with localStorage persistence) - Ready for Phase 15
|
||||
|
||||
### Completed Phases
|
||||
- ✅ **Phase 1**: Project Setup & Core Infrastructure (95% complete)
|
||||
@@ -81,7 +81,7 @@
|
||||
- ✅ Integrated playback controls at bottom
|
||||
- ✅ Keyboard-driven workflow
|
||||
|
||||
**Multi-Track Features (Phase 7 - Core Complete):**
|
||||
**Multi-Track Features (Phase 7 - Complete):**
|
||||
- ✅ Track creation and removal
|
||||
- ✅ Track naming with inline editing
|
||||
- ✅ Track colors (9 preset colors)
|
||||
@@ -94,11 +94,80 @@
|
||||
- ✅ Synchronized playback across all tracks
|
||||
- ✅ Per-track gain and pan during playback
|
||||
- ✅ Solo/Mute handling during playback
|
||||
- ✅ Per-track effect chains with device rack
|
||||
- ✅ Collapsible effects section below each track (192px height)
|
||||
- ✅ Effect browser with categorized effects
|
||||
- ✅ Horizontal scrolling device rack (Ableton-style)
|
||||
- ✅ Individual effect cards with side-folding design (40px collapsed, 384px+ expanded)
|
||||
- ✅ Real-time parameter controls for all effects (filters, dynamics, time-based, advanced)
|
||||
- ✅ Inline parameter editing with sliders and controls (multi-column grid layout)
|
||||
- ✅ Real-time effect processing during playback with Web Audio API nodes
|
||||
- ✅ Effect bypass functionality (disable/enable effects in real-time)
|
||||
- ✅ Supported real-time effects: All filters, compressor, limiter, gate, delay
|
||||
- 🔲 Advanced real-time effects: Reverb, chorus, flanger, phaser, distortion (TODO: Complex node graphs)
|
||||
- 🔲 Master channel effects (TODO: Implement master effect chain UI similar to per-track effects)
|
||||
|
||||
**Recording Features (Phase 8 - Complete):**
|
||||
- ✅ Microphone permission request
|
||||
- ✅ Audio input device selection
|
||||
- ✅ Input level meter with professional dB scale
|
||||
- ✅ Real-time input monitoring
|
||||
- ✅ Per-track record arming
|
||||
- ✅ Global record button
|
||||
- ✅ Recording indicator with pulse animation
|
||||
- ✅ Punch-in/Punch-out controls (time-based recording region)
|
||||
- ✅ Overdub mode (layer recordings by mixing audio)
|
||||
- ✅ Input gain control (0.0-2.0 with dB display, adjustable in real-time)
|
||||
- ✅ Mono/Stereo recording selection
|
||||
- ✅ Sample rate matching (44.1kHz, 48kHz, 96kHz)
|
||||
- ✅ Recording settings panel shown when track is armed
|
||||
|
||||
**Analysis Tools (Phase 10 - Complete):**
|
||||
- ✅ Frequency Analyzer with real-time FFT display
|
||||
- ✅ Spectrogram with time-frequency waterfall visualization
|
||||
- ✅ Phase Correlation Meter (stereo phase analysis)
|
||||
- ✅ LUFS Loudness Meter (momentary/short-term/integrated)
|
||||
- ✅ Audio Statistics Panel (project info and levels)
|
||||
- ✅ Color-coded heat map (blue → cyan → green → yellow → red)
|
||||
- ✅ Toggle between 5 analyzer views (FFT/SPEC/PHS/LUFS/INFO)
|
||||
- ✅ Theme-aware backgrounds (light/dark mode support)
|
||||
- ✅ Peak and RMS meters (master and per-track)
|
||||
- ✅ Clip indicator with reset (master only)
|
||||
|
||||
**Export Features (Phase 11.1, 11.2 & 11.3 - Complete):**
|
||||
- ✅ WAV export (16/24/32-bit PCM or float)
|
||||
- ✅ MP3 export with lamejs (128/192/256/320 kbps)
|
||||
- ✅ FLAC export with fflate compression (quality 0-9)
|
||||
- ✅ Format selector dropdown with dynamic options
|
||||
- ✅ Normalization option (1% headroom)
|
||||
- ✅ Export scope selector:
|
||||
- Entire Project (mix all tracks)
|
||||
- Selected Region (extract and mix selection)
|
||||
- Individual Tracks (separate files with sanitized names)
|
||||
- ✅ Export dialog with format-specific settings
|
||||
- ✅ Dynamic file extension display
|
||||
- ✅ Smart selection detection (disable option when no selection)
|
||||
|
||||
**Settings & Preferences (Phase 14 - Complete):**
|
||||
- ✅ Global settings dialog with 5 tabs (Recording, Audio, Editor, Interface, Performance)
|
||||
- ✅ localStorage persistence with default merging
|
||||
- ✅ Audio settings: buffer size, sample rate (applied to recording), auto-normalize
|
||||
- ✅ UI settings: theme, font size, default track height (applied to new tracks)
|
||||
- ✅ Editor settings: auto-save interval, undo limit, snap-to-grid, grid resolution, default zoom (applied)
|
||||
- ✅ Performance settings: peak quality, waveform quality, spectrogram toggle (applied), max file size
|
||||
- ✅ Category-specific reset buttons
|
||||
- ✅ Real-time application to editor behavior
|
||||
|
||||
### Next Steps
|
||||
- **Phase 6**: Audio effects ✅ COMPLETE (Basic + Filters + Dynamics + Time-Based + Advanced + Chain Management)
|
||||
- **Phase 7**: Multi-track editing 🚧 IN PROGRESS (Core features complete - Integration pending)
|
||||
- **Phase 8**: Recording functionality (NEXT)
|
||||
- **Phase 7**: Multi-track editing ✅ COMPLETE (Multi-track playback, effects, selection/editing)
|
||||
- **Phase 8**: Recording functionality ✅ COMPLETE (Audio input, controls, settings with overdub/punch)
|
||||
- **Phase 9**: Automation ✅ COMPLETE (Volume/Pan automation with write/touch/latch modes)
|
||||
- **Phase 10**: Analysis Tools ✅ COMPLETE (FFT, Spectrogram, Phase Correlation, LUFS, Audio Statistics)
|
||||
- **Phase 11**: Export & Import ✅ COMPLETE (Full export/import with all formats, settings, scope options & conversions)
|
||||
- **Phase 12**: Project Management ✅ COMPLETE (IndexedDB storage, auto-save, project export/import)
|
||||
- **Phase 13**: Keyboard Shortcuts ✅ COMPLETE (Full suite of shortcuts for navigation, editing, and view control)
|
||||
- **Phase 14**: Settings & Preferences ✅ COMPLETE (Global settings with localStorage persistence and live application)
|
||||
|
||||
---
|
||||
|
||||
@@ -564,216 +633,279 @@ audio-ui/
|
||||
- [ ] Send/Return effects - FUTURE
|
||||
- [ ] Sidechain support (advanced) - FUTURE
|
||||
|
||||
#### 7.4 Track Effects (Pending - Phase 8+)
|
||||
- [ ] Per-track effect chain
|
||||
- [ ] Effect rack UI
|
||||
- [ ] Effect bypass per track
|
||||
#### 7.4 Track Effects (Complete)
|
||||
- [x] Per-track effect chain
|
||||
- [x] Effect rack UI
|
||||
- [x] Effect bypass per track
|
||||
- [x] Real-time effect processing during playback
|
||||
- [x] Add/remove effects during playback
|
||||
- [x] Real-time parameter updates
|
||||
- [x] Effect chain persistence (localStorage)
|
||||
|
||||
### Phase 8: Recording
|
||||
|
||||
#### 8.1 Audio Input
|
||||
- [ ] Microphone permission request
|
||||
- [ ] Audio input device selection
|
||||
- [ ] Input level meter
|
||||
- [ ] Input monitoring (with latency compensation)
|
||||
#### 8.1 Audio Input ✓
|
||||
- [x] Microphone permission request
|
||||
- [x] Audio input device selection
|
||||
- [x] Input level meter
|
||||
- [x] Input monitoring (real-time level display)
|
||||
|
||||
#### 8.2 Recording Controls
|
||||
- [ ] Arm recording
|
||||
- [ ] Start/Stop recording
|
||||
- [ ] Punch-in/Punch-out recording
|
||||
- [ ] Overdub mode
|
||||
- [ ] Recording indicator
|
||||
#### 8.2 Recording Controls ✓
|
||||
- [x] Arm recording (per-track record enable)
|
||||
- [x] Start/Stop recording (global record button)
|
||||
- [x] Recording indicator (visual feedback with pulse animation)
|
||||
- [x] Punch-in/Punch-out recording (UI controls with time inputs)
|
||||
- [x] Overdub mode (mix recorded audio with existing audio)
|
||||
|
||||
#### 8.3 Recording Settings
|
||||
- [ ] Sample rate matching
|
||||
- [ ] Input gain control
|
||||
- [ ] Mono/Stereo recording
|
||||
- [ ] File naming conventions
|
||||
#### 8.3 Recording Settings ✓
|
||||
- [x] Sample rate matching (44.1kHz, 48kHz, 96kHz)
|
||||
- [x] Input gain control (0.0-2.0 with dB display)
|
||||
- [x] Mono/Stereo recording selection
|
||||
- [x] Real-time gain adjustment during recording
|
||||
- 🔲 File naming conventions (Future: Auto-name recorded tracks)
|
||||
|
||||
### Phase 9: Automation
|
||||
|
||||
#### 9.1 Automation Lanes
|
||||
- [ ] Show/Hide automation lanes per track
|
||||
- [ ] Automation lane for volume
|
||||
- [ ] Automation lane for pan
|
||||
- [ ] Automation lanes for effect parameters
|
||||
- [x] Show/Hide automation lanes per track
|
||||
- [x] Automation lane for volume
|
||||
- [x] Automation lane for pan
|
||||
- [x] Automation lanes for effect parameters
|
||||
- [x] Single automation lane with parameter dropdown selector
|
||||
- [x] Automation controls in left sidebar (matching track controls)
|
||||
- [x] Perfect alignment with waveform width
|
||||
|
||||
#### 9.2 Automation Points
|
||||
- [ ] Add/Remove automation points
|
||||
- [ ] Drag automation points
|
||||
- [ ] Automation curves (linear/bezier/step)
|
||||
- [x] Add/Remove automation points
|
||||
- [x] Drag automation points
|
||||
- [x] Automation curves (linear/step support)
|
||||
- [x] Select automation points
|
||||
- [x] Delete with keyboard (Delete/Backspace)
|
||||
- [ ] Copy/Paste automation
|
||||
- [ ] Bezier curves
|
||||
|
||||
#### 9.3 Automation Playback
|
||||
- [ ] Real-time automation during playback
|
||||
- [ ] Automation recording (write mode)
|
||||
- [ ] Automation editing modes (read/write/touch/latch)
|
||||
#### 9.3 Automation Playback & Recording
|
||||
- [x] Real-time automation during playback
|
||||
- [x] Automation for volume and pan
|
||||
- [x] Automation for effect parameters
|
||||
- [x] Continuous evaluation via requestAnimationFrame
|
||||
- [x] Proper parameter range conversion
|
||||
- [x] Automation recording (write mode) - Volume, Pan, Effect Parameters
|
||||
- [x] Automation editing modes UI (read/write/touch/latch)
|
||||
- [x] Automation modes recording implementation (write/touch/latch)
|
||||
- [x] Touch/latch mode tracking with control interaction
|
||||
- [x] Throttled automation point creation (every ~100ms)
|
||||
- [x] Parameter touch callbacks for volume and pan controls
|
||||
- [x] Parameter touch callbacks for effect parameter sliders
|
||||
- [x] Touch/latch modes for effect parameters (frequency, Q, gain, etc.)
|
||||
- [x] Proper prop passing through EffectDevice → EffectParameters → Slider
|
||||
|
||||
### Phase 10: Analysis Tools
|
||||
|
||||
#### 10.1 Frequency Analyzer
|
||||
- [ ] Real-time FFT analyzer
|
||||
- [ ] Frequency spectrum display
|
||||
- [x] Real-time FFT analyzer
|
||||
- [x] Frequency spectrum display
|
||||
- [ ] Peak/Average display modes
|
||||
- [ ] Logarithmic/Linear frequency scale
|
||||
|
||||
#### 10.2 Spectrogram
|
||||
- [ ] Time-frequency spectrogram view
|
||||
- [ ] Color scale customization
|
||||
- [ ] FFT size configuration
|
||||
- [x] Time-frequency spectrogram view
|
||||
- [x] Color scale customization (heat map: black/gray → blue → cyan → green → yellow → red)
|
||||
- [x] FFT size configuration (uses analyserNode.frequencyBinCount)
|
||||
- [ ] Overlay on waveform (optional)
|
||||
|
||||
#### 10.3 Metering
|
||||
- [ ] Peak meter
|
||||
- [ ] RMS meter
|
||||
- [ ] Phase correlation meter
|
||||
- [ ] Loudness meter (LUFS - optional)
|
||||
- [ ] Clip indicator
|
||||
- [x] Peak meter (master and per-track)
|
||||
- [x] RMS meter (master and per-track)
|
||||
- [x] Phase correlation meter
|
||||
- [x] Loudness meter (LUFS with momentary/short-term/integrated)
|
||||
- [x] Clip indicator (master only)
|
||||
|
||||
#### 10.4 Audio Statistics
|
||||
- [ ] File duration
|
||||
- [ ] Sample rate, bit depth, channels
|
||||
- [ ] Peak amplitude
|
||||
- [ ] RMS level
|
||||
- [ ] Dynamic range
|
||||
- [x] File duration
|
||||
- [x] Sample rate, bit depth, channels
|
||||
- [x] Peak amplitude
|
||||
- [x] RMS level
|
||||
- [x] Dynamic range
|
||||
- [x] Headroom calculation
|
||||
|
||||
### Phase 11: Export & Import
|
||||
### Phase 11: Export & Import (Phase 11.1, 11.2, 11.3 Complete)
|
||||
|
||||
#### 11.1 Export Formats
|
||||
- [ ] WAV export (PCM, various bit depths)
|
||||
- [ ] MP3 export (using lamejs)
|
||||
- [ ] OGG Vorbis export
|
||||
- [ ] FLAC export (using fflate)
|
||||
- [ ] Format selection UI
|
||||
#### 11.1 Export Formats ✅ COMPLETE
|
||||
- [x] WAV export (PCM, various bit depths: 16/24/32-bit)
|
||||
- [x] Export dialog with settings UI
|
||||
- [x] Export button in header
|
||||
- [x] Mix all tracks before export
|
||||
- [x] MP3 export (using lamejs with dynamic import)
|
||||
- [x] FLAC export (using fflate DEFLATE compression)
|
||||
- [ ] OGG Vorbis export (skipped - no good browser encoder available)
|
||||
|
||||
#### 11.2 Export Settings
|
||||
**Technical Implementation:**
|
||||
- MP3 encoding with lamejs: 1152 sample block size, configurable bitrate
|
||||
- FLAC compression with fflate: DEFLATE-based lossless compression
|
||||
- TypeScript declarations for lamejs module
|
||||
- Async/await for dynamic imports to reduce bundle size
|
||||
- Format-specific UI controls in ExportDialog
|
||||
|
||||
#### 11.2 Export Settings ✅ COMPLETE
|
||||
- [x] Bit depth selection (16/24/32-bit) for WAV and FLAC
|
||||
- [x] Normalization before export (with 1% headroom)
|
||||
- [x] Filename customization with dynamic extension display
|
||||
- [x] Quality/bitrate settings:
|
||||
- MP3: Bitrate selector (128/192/256/320 kbps)
|
||||
- FLAC: Compression quality slider (0-9, fast to small)
|
||||
- [x] Format selector dropdown (WAV/MP3/FLAC)
|
||||
- [ ] Sample rate conversion
|
||||
- [ ] Bit depth selection
|
||||
- [ ] Quality/bitrate settings (for lossy formats)
|
||||
- [ ] Dithering options
|
||||
- [ ] Normalization before export
|
||||
|
||||
#### 11.3 Export Regions
|
||||
- [ ] Export entire project
|
||||
- [ ] Export selected region
|
||||
- [ ] Batch export all regions
|
||||
- [ ] Export individual tracks
|
||||
#### 11.3 Export Regions ✅ COMPLETE
|
||||
- [x] Export entire project (mix all tracks)
|
||||
- [x] Export selected region (extract and mix selection from all tracks)
|
||||
- [x] Export individual tracks (separate files with sanitized names)
|
||||
- [ ] Batch export all regions (future feature)
|
||||
|
||||
#### 11.4 Import
|
||||
- [ ] Support for WAV, MP3, OGG, FLAC, M4A, AIFF
|
||||
- [ ] Sample rate conversion on import
|
||||
- [ ] Stereo to mono conversion
|
||||
- [ ] File metadata reading
|
||||
#### 11.4 Import ✅ COMPLETE
|
||||
- [x] Support for WAV, MP3, OGG, FLAC, M4A, AIFF
|
||||
- [x] Sample rate conversion on import
|
||||
- [x] Stereo to mono conversion
|
||||
- [x] File metadata reading (codec detection, duration, channels, sample rate)
|
||||
- [x] ImportOptions interface for flexible import configuration
|
||||
- [x] importAudioFile() function returning buffer + metadata
|
||||
- [x] Normalize on import option
|
||||
- [x] Import settings dialog component (ready for integration)
|
||||
|
||||
### Phase 12: Project Management
|
||||
### Phase 12: Project Management ✅
|
||||
|
||||
#### 12.1 Save/Load Projects
|
||||
- [ ] Save project to IndexedDB
|
||||
- [ ] Load project from IndexedDB
|
||||
- [ ] Project list UI
|
||||
- [ ] Auto-save functionality
|
||||
- [ ] Save-as functionality
|
||||
#### 12.1 Save/Load Projects ✅
|
||||
- [x] Save project to IndexedDB
|
||||
- [x] Load project from IndexedDB
|
||||
- [x] Project list UI (Projects dialog)
|
||||
- [x] Auto-save functionality (3-second debounce)
|
||||
- [x] Manual save with Ctrl+S
|
||||
- [x] Auto-load last project on startup
|
||||
- [x] Editable project name in header
|
||||
- [x] Delete and duplicate projects
|
||||
|
||||
#### 12.2 Project Structure
|
||||
- [ ] JSON project format
|
||||
- [ ] Track information
|
||||
- [ ] Audio buffer references
|
||||
- [ ] Effect settings
|
||||
- [ ] Automation data
|
||||
- [ ] Region markers
|
||||
#### 12.2 Project Structure ✅
|
||||
- [x] IndexedDB storage with serialization
|
||||
- [x] Track information (name, color, volume, pan, mute, solo)
|
||||
- [x] Audio buffer serialization (Float32Array per channel)
|
||||
- [x] Effect settings (serialized effect chains)
|
||||
- [x] Automation data (deep cloned to remove functions)
|
||||
- [x] Project metadata (name, description, duration, track count)
|
||||
|
||||
#### 12.3 Project Export/Import
|
||||
- [ ] Export project as JSON (with audio files)
|
||||
- [ ] Import project from JSON
|
||||
- [ ] Project templates
|
||||
#### 12.3 Project Export/Import ✅
|
||||
- [x] Export project as JSON (with audio data embedded)
|
||||
- [x] Import project from JSON
|
||||
- [x] Export button per project in Projects dialog
|
||||
- [x] Import button in Projects dialog header
|
||||
- [x] Auto-generate new IDs on import to avoid conflicts
|
||||
- [ ] Project templates (future enhancement)
|
||||
|
||||
#### 12.4 Project Settings
|
||||
- [ ] Sample rate
|
||||
- [ ] Bit depth
|
||||
- [ ] Default track count
|
||||
- [ ] Project name/description
|
||||
#### 12.4 Project Settings ✅
|
||||
- [x] Sample rate (stored per project)
|
||||
- [x] Zoom level (persisted)
|
||||
- [x] Current time (persisted)
|
||||
- [x] Project name/description
|
||||
- [x] Created/updated timestamps
|
||||
|
||||
### Phase 13: Keyboard Shortcuts
|
||||
### Phase 13: Keyboard Shortcuts ✅
|
||||
|
||||
#### 13.1 Playback Shortcuts
|
||||
- [ ] Spacebar - Play/Pause
|
||||
- [ ] Home - Go to start
|
||||
- [ ] End - Go to end
|
||||
- [ ] Left/Right Arrow - Move cursor
|
||||
- [ ] Ctrl+Left/Right - Move by larger increment
|
||||
#### 13.1 Playback Shortcuts ✅
|
||||
- [x] Spacebar - Play/Pause
|
||||
- [x] Home - Go to start
|
||||
- [x] End - Go to end
|
||||
- [x] Left/Right Arrow - Seek ±1 second
|
||||
- [x] Ctrl+Left/Right - Seek ±5 seconds
|
||||
|
||||
#### 13.2 Editing Shortcuts
|
||||
- [ ] Ctrl+Z - Undo
|
||||
- [ ] Ctrl+Y / Ctrl+Shift+Z - Redo
|
||||
- [ ] Ctrl+X - Cut
|
||||
- [ ] Ctrl+C - Copy
|
||||
- [ ] Ctrl+V - Paste
|
||||
- [ ] Delete - Delete selection
|
||||
- [ ] Ctrl+A - Select All
|
||||
- [ ] Escape - Clear selection
|
||||
#### 13.2 Editing Shortcuts ✅
|
||||
- [x] Ctrl+Z - Undo
|
||||
- [x] Ctrl+Y / Ctrl+Shift+Z - Redo
|
||||
- [x] Ctrl+X - Cut
|
||||
- [x] Ctrl+C - Copy
|
||||
- [x] Ctrl+V - Paste
|
||||
- [x] Ctrl+S - Save project
|
||||
- [x] Ctrl+D - Duplicate selection
|
||||
- [x] Delete/Backspace - Delete selection
|
||||
- [x] Ctrl+A - Select All (on current track)
|
||||
- [x] Escape - Clear selection
|
||||
|
||||
#### 13.3 View Shortcuts
|
||||
- [ ] Ctrl+Plus - Zoom in
|
||||
- [ ] Ctrl+Minus - Zoom out
|
||||
- [ ] Ctrl+0 - Fit to window
|
||||
- [ ] F - Toggle fullscreen (optional)
|
||||
#### 13.3 View Shortcuts ✅
|
||||
- [x] Ctrl+Plus/Equals - Zoom in
|
||||
- [x] Ctrl+Minus - Zoom out
|
||||
- [x] Ctrl+0 - Fit to window
|
||||
- [ ] F - Toggle fullscreen (browser native)
|
||||
|
||||
#### 13.4 Custom Shortcuts
|
||||
- [ ] Keyboard shortcuts manager
|
||||
- [ ] User-configurable shortcuts
|
||||
- [ ] Shortcut conflict detection
|
||||
- [ ] Keyboard shortcuts manager (future enhancement)
|
||||
- [ ] User-configurable shortcuts (future enhancement)
|
||||
- [ ] Shortcut conflict detection (future enhancement)
|
||||
|
||||
### Phase 14: Settings & Preferences
|
||||
### Phase 14: Settings & Preferences ✅ COMPLETE
|
||||
|
||||
#### 14.1 Audio Settings
|
||||
- [ ] Audio output device selection
|
||||
- [ ] Buffer size/latency configuration
|
||||
- [ ] Sample rate preference
|
||||
- [ ] Auto-normalize on import
|
||||
**✅ Accomplished:**
|
||||
- Global settings system with localStorage persistence
|
||||
- Settings dialog with 5 tabs (Recording, Audio, Editor, Interface, Performance)
|
||||
- Real-time settings application to editor behavior
|
||||
- Category-specific reset buttons
|
||||
- Merge with defaults on load for backward compatibility
|
||||
|
||||
#### 14.2 UI Settings
|
||||
- [ ] Theme selection (dark/light/auto)
|
||||
- [ ] Color scheme customization
|
||||
- [ ] Waveform colors
|
||||
- [ ] Font size
|
||||
#### 14.1 Audio Settings ✅
|
||||
- [ ] Audio output device selection (future: requires device enumeration API)
|
||||
- [x] Buffer size/latency configuration
|
||||
- [x] Sample rate preference (applied to recording)
|
||||
- [x] Auto-normalize on import
|
||||
|
||||
#### 14.3 Editor Settings
|
||||
- [ ] Auto-save interval
|
||||
- [ ] Undo history limit
|
||||
- [ ] Snap-to-grid toggle
|
||||
- [ ] Grid resolution
|
||||
- [ ] Default zoom level
|
||||
#### 14.2 UI Settings ✅
|
||||
- [x] Theme selection (dark/light/auto)
|
||||
- [x] Font size (small/medium/large)
|
||||
- [x] Default track height (120-400px, applied to new tracks)
|
||||
- [ ] Color scheme customization (future: advanced theming)
|
||||
|
||||
#### 14.4 Performance Settings
|
||||
- [ ] Peak calculation quality
|
||||
- [ ] Waveform rendering quality
|
||||
- [ ] Enable/disable spectrogram
|
||||
- [ ] Maximum file size limit
|
||||
#### 14.3 Editor Settings ✅
|
||||
- [x] Auto-save interval (0-60 seconds)
|
||||
- [x] Undo history limit (10-200 operations)
|
||||
- [x] Snap-to-grid toggle
|
||||
- [x] Grid resolution (0.1-10 seconds)
|
||||
- [x] Default zoom level (1-20x, applied to initial state)
|
||||
|
||||
#### 14.4 Performance Settings ✅
|
||||
- [x] Peak calculation quality (low/medium/high)
|
||||
- [x] Waveform rendering quality (low/medium/high)
|
||||
- [x] Enable/disable spectrogram (applied to analyzer visibility)
|
||||
- [x] Maximum file size limit (100-1000 MB)
|
||||
|
||||
### Phase 15: Polish & Optimization
|
||||
|
||||
#### 15.1 Performance Optimization
|
||||
- [ ] Web Worker for heavy computations
|
||||
- [ ] AudioWorklet for custom processing
|
||||
- [ ] Lazy loading for effects
|
||||
- [x] Lazy loading for dialogs and analysis components (GlobalSettingsDialog, ExportDialog, ProjectsDialog, ImportTrackDialog, FrequencyAnalyzer, Spectrogram, PhaseCorrelationMeter, LUFSMeter, AudioStatistics)
|
||||
- [ ] Code splitting for route optimization
|
||||
- [ ] Memory leak prevention
|
||||
- [x] Memory leak prevention (audio-cleanup utilities, proper cleanup in useRecording, animation frame cancellation in visualizations)
|
||||
|
||||
#### 15.2 Responsive Design
|
||||
- [ ] Mobile-friendly layout
|
||||
- [ ] Touch gesture support
|
||||
- [ ] Adaptive toolbar (hide on mobile)
|
||||
- [ ] Vertical scrolling for track list
|
||||
#### 15.2 Responsive Design ✅
|
||||
- [x] Mobile-friendly layout (responsive header, adaptive toolbar with icon-only buttons on small screens)
|
||||
- [x] Touch gesture support (collapse/expand controls with chevron buttons)
|
||||
- [x] Adaptive toolbar (hide less critical buttons on mobile: Export on md, Clear All on lg)
|
||||
- [x] Vertical scrolling for track list (sidebar hidden on mobile < lg breakpoint)
|
||||
- [x] Collapsible track controls (two-state mobile: collapsed with minimal controls + horizontal meter, expanded with full height fader + pan control; desktop always expanded with narrow borders)
|
||||
- [x] Collapsible master controls (collapsed view with horizontal level meter, expanded view with full controls; collapse button hidden on desktop)
|
||||
- [x] Track collapse buttons on mobile (left chevron: collapses/expands track in list, right chevron: collapses/expands track controls)
|
||||
- [x] Mobile vertical stacking layout (< lg breakpoint: controls → waveform → automation bars → effects bars per track, master controls and transport controls stacked vertically in bottom bar)
|
||||
- [x] Desktop two-column layout (≥ lg breakpoint: controls left sidebar, waveforms right panel with automation/effects bars, master controls in right sidebar, transport controls centered in bottom bar)
|
||||
- [x] Automation and effects bars on mobile (collapsible with eye/eye-off icons, horizontally scrollable, full functionality: parameter selection, mode cycling, height controls, add effects)
|
||||
- [x] Height synchronization (track controls and waveform container heights match exactly using user-configurable track.height on desktop)
|
||||
|
||||
#### 15.3 Error Handling
|
||||
- [ ] Graceful error messages
|
||||
- [ ] File format error handling
|
||||
- [ ] Memory limit warnings
|
||||
- [ ] Browser compatibility checks
|
||||
- [x] Graceful error messages (toast notifications for copy/paste/edit operations)
|
||||
- [x] File format error handling (UnsupportedFormatDialog with format validation and decode error catching)
|
||||
- [x] Memory limit warnings (MemoryWarningDialog with file size checks)
|
||||
- [x] Browser compatibility checks (BrowserCompatDialog with Web Audio API detection)
|
||||
|
||||
#### 15.4 Documentation
|
||||
- [ ] User guide
|
||||
- [ ] Keyboard shortcuts reference
|
||||
- [x] Keyboard shortcuts reference (KeyboardShortcutsDialog with ? shortcut and command palette integration)
|
||||
- [ ] Effect descriptions
|
||||
- [ ] Troubleshooting guide
|
||||
|
||||
|
||||
151
app/globals.css
151
app/globals.css
@@ -19,97 +19,97 @@
|
||||
/* CSS Variables for theming */
|
||||
@layer base {
|
||||
:root {
|
||||
/* Light mode colors using OKLCH */
|
||||
--background: oklch(100% 0 0);
|
||||
--foreground: oklch(9.8% 0.038 285.8);
|
||||
/* Light mode colors using OKLCH - bright neon palette */
|
||||
--background: oklch(98% 0.03 180);
|
||||
--foreground: oklch(20% 0.12 310);
|
||||
|
||||
--card: oklch(100% 0 0);
|
||||
--card-foreground: oklch(9.8% 0.038 285.8);
|
||||
--card: oklch(99% 0.02 200);
|
||||
--card-foreground: oklch(20% 0.12 310);
|
||||
|
||||
--popover: oklch(100% 0 0);
|
||||
--popover-foreground: oklch(9.8% 0.038 285.8);
|
||||
--popover: oklch(99% 0.02 200);
|
||||
--popover-foreground: oklch(20% 0.12 310);
|
||||
|
||||
--primary: oklch(22.4% 0.053 285.8);
|
||||
--primary-foreground: oklch(98% 0 0);
|
||||
--primary: oklch(58% 0.28 320);
|
||||
--primary-foreground: oklch(99% 0.02 200);
|
||||
|
||||
--secondary: oklch(96.1% 0 0);
|
||||
--secondary-foreground: oklch(13.8% 0.038 285.8);
|
||||
--secondary: oklch(92% 0.08 200);
|
||||
--secondary-foreground: oklch(25% 0.15 300);
|
||||
|
||||
--muted: oklch(96.1% 0 0);
|
||||
--muted-foreground: oklch(45.1% 0.015 285.9);
|
||||
--muted: oklch(94% 0.05 190);
|
||||
--muted-foreground: oklch(40% 0.12 260);
|
||||
|
||||
--accent: oklch(96.1% 0 0);
|
||||
--accent-foreground: oklch(13.8% 0.038 285.8);
|
||||
--accent: oklch(90% 0.12 180);
|
||||
--accent-foreground: oklch(25% 0.18 310);
|
||||
|
||||
--destructive: oklch(60.2% 0.168 29.2);
|
||||
--destructive-foreground: oklch(98% 0 0);
|
||||
--destructive: oklch(60% 0.28 15);
|
||||
--destructive-foreground: oklch(99% 0.02 200);
|
||||
|
||||
--border: oklch(89.8% 0 0);
|
||||
--input: oklch(89.8% 0 0);
|
||||
--ring: oklch(22.4% 0.053 285.8);
|
||||
--border: oklch(85% 0.08 200);
|
||||
--input: oklch(92% 0.06 190);
|
||||
--ring: oklch(58% 0.28 320);
|
||||
|
||||
--radius: 0.5rem;
|
||||
|
||||
--success: oklch(60% 0.15 145);
|
||||
--success-foreground: oklch(98% 0 0);
|
||||
--success: oklch(58% 0.25 160);
|
||||
--success-foreground: oklch(99% 0.02 200);
|
||||
|
||||
--warning: oklch(75% 0.15 85);
|
||||
--warning-foreground: oklch(20% 0 0);
|
||||
--warning: oklch(68% 0.25 85);
|
||||
--warning-foreground: oklch(20% 0.12 310);
|
||||
|
||||
--info: oklch(65% 0.15 240);
|
||||
--info-foreground: oklch(98% 0 0);
|
||||
--info: oklch(62% 0.25 240);
|
||||
--info-foreground: oklch(99% 0.02 200);
|
||||
|
||||
/* Audio-specific colors */
|
||||
--waveform: oklch(50% 0.1 240);
|
||||
--waveform-progress: oklch(60% 0.15 145);
|
||||
--waveform-selection: oklch(65% 0.15 240);
|
||||
--waveform-bg: oklch(98% 0 0);
|
||||
/* Audio-specific colors - neon cyan/magenta */
|
||||
--waveform: oklch(60% 0.26 200);
|
||||
--waveform-progress: oklch(58% 0.28 320);
|
||||
--waveform-selection: oklch(62% 0.26 180);
|
||||
--waveform-bg: oklch(99% 0.015 190);
|
||||
}
|
||||
|
||||
.dark {
|
||||
/* Dark mode colors using OKLCH */
|
||||
--background: oklch(9.8% 0.038 285.8);
|
||||
--foreground: oklch(98% 0 0);
|
||||
/* Dark mode colors using OKLCH - vibrant neon palette */
|
||||
--background: oklch(15% 0.015 265);
|
||||
--foreground: oklch(92% 0.02 180);
|
||||
|
||||
--card: oklch(9.8% 0.038 285.8);
|
||||
--card-foreground: oklch(98% 0 0);
|
||||
--card: oklch(18% 0.02 270);
|
||||
--card-foreground: oklch(92% 0.02 180);
|
||||
|
||||
--popover: oklch(9.8% 0.038 285.8);
|
||||
--popover-foreground: oklch(98% 0 0);
|
||||
--popover: oklch(18% 0.02 270);
|
||||
--popover-foreground: oklch(92% 0.02 180);
|
||||
|
||||
--primary: oklch(98% 0 0);
|
||||
--primary-foreground: oklch(13.8% 0.038 285.8);
|
||||
--primary: oklch(75% 0.25 310);
|
||||
--primary-foreground: oklch(18% 0.02 270);
|
||||
|
||||
--secondary: oklch(17.7% 0.038 285.8);
|
||||
--secondary-foreground: oklch(98% 0 0);
|
||||
--secondary: oklch(22% 0.03 280);
|
||||
--secondary-foreground: oklch(85% 0.15 180);
|
||||
|
||||
--muted: oklch(17.7% 0.038 285.8);
|
||||
--muted-foreground: oklch(63.9% 0.012 285.9);
|
||||
--muted: oklch(20% 0.02 270);
|
||||
--muted-foreground: oklch(65% 0.1 200);
|
||||
|
||||
--accent: oklch(17.7% 0.038 285.8);
|
||||
--accent-foreground: oklch(98% 0 0);
|
||||
--accent: oklch(25% 0.03 290);
|
||||
--accent-foreground: oklch(85% 0.2 320);
|
||||
|
||||
--destructive: oklch(50% 0.2 29.2);
|
||||
--destructive-foreground: oklch(98% 0 0);
|
||||
--destructive: oklch(65% 0.25 20);
|
||||
--destructive-foreground: oklch(92% 0.02 180);
|
||||
|
||||
--border: oklch(17.7% 0.038 285.8);
|
||||
--input: oklch(17.7% 0.038 285.8);
|
||||
--ring: oklch(83.1% 0.012 285.9);
|
||||
--border: oklch(30% 0.05 280);
|
||||
--input: oklch(22% 0.03 280);
|
||||
--ring: oklch(75% 0.25 310);
|
||||
|
||||
--success: oklch(55% 0.15 145);
|
||||
--success-foreground: oklch(98% 0 0);
|
||||
--success: oklch(70% 0.22 160);
|
||||
--success-foreground: oklch(18% 0.02 270);
|
||||
|
||||
--warning: oklch(70% 0.15 85);
|
||||
--warning-foreground: oklch(20% 0 0);
|
||||
--warning: oklch(75% 0.22 80);
|
||||
--warning-foreground: oklch(18% 0.02 270);
|
||||
|
||||
--info: oklch(60% 0.15 240);
|
||||
--info-foreground: oklch(98% 0 0);
|
||||
--info: oklch(72% 0.22 240);
|
||||
--info-foreground: oklch(18% 0.02 270);
|
||||
|
||||
/* Audio-specific colors */
|
||||
--waveform: oklch(70% 0.15 240);
|
||||
--waveform-progress: oklch(65% 0.15 145);
|
||||
--waveform-selection: oklch(70% 0.15 240);
|
||||
--waveform-bg: oklch(12% 0.038 285.8);
|
||||
/* Audio-specific colors - neon cyan/magenta */
|
||||
--waveform: oklch(72% 0.25 200);
|
||||
--waveform-progress: oklch(75% 0.25 310);
|
||||
--waveform-selection: oklch(70% 0.25 180);
|
||||
--waveform-bg: oklch(12% 0.02 270);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -354,4 +354,31 @@
|
||||
.custom-scrollbar::-webkit-scrollbar-thumb:active {
|
||||
background: color-mix(in oklch, var(--muted-foreground) 70%, transparent);
|
||||
}
|
||||
|
||||
/* Clip/Region styling for Ableton-style appearance */
|
||||
.track-clip-container {
|
||||
@apply absolute inset-2 rounded-sm shadow-sm overflow-hidden transition-all duration-150;
|
||||
background: oklch(0.2 0.01 var(--hue) / 0.3);
|
||||
border: 1px solid oklch(0.4 0.02 var(--hue) / 0.5);
|
||||
}
|
||||
|
||||
.track-clip-container:hover {
|
||||
border-color: oklch(0.5 0.03 var(--hue) / 0.7);
|
||||
}
|
||||
|
||||
.track-clip-header {
|
||||
@apply absolute top-0 left-0 right-0 h-4 pointer-events-none z-10 px-2 flex items-center;
|
||||
background: linear-gradient(to bottom, rgb(0 0 0 / 0.1), transparent);
|
||||
}
|
||||
}
|
||||
|
||||
@layer utilities {
|
||||
[data-theme='light'] .track-clip-container {
|
||||
background: oklch(0.95 0.01 var(--hue) / 0.3);
|
||||
border: 1px solid oklch(0.7 0.02 var(--hue) / 0.5);
|
||||
}
|
||||
|
||||
[data-theme='light'] .track-clip-header {
|
||||
background: linear-gradient(to bottom, rgb(255 255 255 / 0.15), transparent);
|
||||
}
|
||||
}
|
||||
|
||||
159
components/analysis/AudioStatistics.tsx
Normal file
159
components/analysis/AudioStatistics.tsx
Normal file
@@ -0,0 +1,159 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
import type { Track } from '@/types/track';
|
||||
|
||||
export interface AudioStatisticsProps {
|
||||
tracks: Track[];
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function AudioStatistics({ tracks, className }: AudioStatisticsProps) {
|
||||
const stats = React.useMemo(() => {
|
||||
if (tracks.length === 0) {
|
||||
return {
|
||||
totalDuration: 0,
|
||||
longestTrack: 0,
|
||||
sampleRate: 0,
|
||||
channels: 0,
|
||||
bitDepth: 32,
|
||||
peakAmplitude: 0,
|
||||
rmsLevel: 0,
|
||||
dynamicRange: 0,
|
||||
trackCount: 0,
|
||||
};
|
||||
}
|
||||
|
||||
let maxDuration = 0;
|
||||
let maxPeak = 0;
|
||||
let sumRms = 0;
|
||||
let minPeak = 1;
|
||||
let sampleRate = 0;
|
||||
let channels = 0;
|
||||
|
||||
tracks.forEach(track => {
|
||||
if (!track.audioBuffer) return;
|
||||
|
||||
const duration = track.audioBuffer.duration;
|
||||
maxDuration = Math.max(maxDuration, duration);
|
||||
|
||||
// Get sample rate and channels from first track
|
||||
if (sampleRate === 0) {
|
||||
sampleRate = track.audioBuffer.sampleRate;
|
||||
channels = track.audioBuffer.numberOfChannels;
|
||||
}
|
||||
|
||||
// Calculate peak and RMS from buffer
|
||||
for (let ch = 0; ch < track.audioBuffer.numberOfChannels; ch++) {
|
||||
const channelData = track.audioBuffer.getChannelData(ch);
|
||||
let chPeak = 0;
|
||||
let chRmsSum = 0;
|
||||
|
||||
for (let i = 0; i < channelData.length; i++) {
|
||||
const abs = Math.abs(channelData[i]);
|
||||
chPeak = Math.max(chPeak, abs);
|
||||
chRmsSum += channelData[i] * channelData[i];
|
||||
}
|
||||
|
||||
maxPeak = Math.max(maxPeak, chPeak);
|
||||
minPeak = Math.min(minPeak, chPeak);
|
||||
sumRms += Math.sqrt(chRmsSum / channelData.length);
|
||||
}
|
||||
});
|
||||
|
||||
const avgRms = sumRms / (tracks.length * Math.max(1, channels));
|
||||
const peakDb = maxPeak > 0 ? 20 * Math.log10(maxPeak) : -Infinity;
|
||||
const rmsDb = avgRms > 0 ? 20 * Math.log10(avgRms) : -Infinity;
|
||||
const dynamicRange = peakDb - rmsDb;
|
||||
|
||||
return {
|
||||
totalDuration: maxDuration,
|
||||
longestTrack: maxDuration,
|
||||
sampleRate,
|
||||
channels,
|
||||
bitDepth: 32, // Web Audio API uses 32-bit float
|
||||
peakAmplitude: maxPeak,
|
||||
rmsLevel: avgRms,
|
||||
dynamicRange: dynamicRange > 0 ? dynamicRange : 0,
|
||||
trackCount: tracks.length,
|
||||
};
|
||||
}, [tracks]);
|
||||
|
||||
const formatDuration = (seconds: number) => {
|
||||
const mins = Math.floor(seconds / 60);
|
||||
const secs = Math.floor(seconds % 60);
|
||||
const ms = Math.floor((seconds % 1) * 1000);
|
||||
return `${mins}:${secs.toString().padStart(2, '0')}.${ms.toString().padStart(3, '0')}`;
|
||||
};
|
||||
|
||||
const formatDb = (linear: number) => {
|
||||
if (linear === 0) return '-∞ dB';
|
||||
const db = 20 * Math.log10(linear);
|
||||
return db > -60 ? `${db.toFixed(1)} dB` : '-∞ dB';
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={cn('w-full h-full bg-card/50 border-2 border-accent/50 rounded-lg p-3', className)}>
|
||||
<div className="text-[10px] font-bold text-accent uppercase tracking-wider mb-3">
|
||||
Audio Statistics
|
||||
</div>
|
||||
<div className="space-y-2 text-[10px]">
|
||||
{/* File Info */}
|
||||
<div className="space-y-1">
|
||||
<div className="text-[9px] text-muted-foreground uppercase tracking-wide">Project Info</div>
|
||||
<div className="grid grid-cols-2 gap-x-2 gap-y-1">
|
||||
<div className="text-muted-foreground">Tracks:</div>
|
||||
<div className="font-mono text-right">{stats.trackCount}</div>
|
||||
|
||||
<div className="text-muted-foreground">Duration:</div>
|
||||
<div className="font-mono text-right">{formatDuration(stats.totalDuration)}</div>
|
||||
|
||||
<div className="text-muted-foreground">Sample Rate:</div>
|
||||
<div className="font-mono text-right">{stats.sampleRate > 0 ? `${(stats.sampleRate / 1000).toFixed(1)} kHz` : 'N/A'}</div>
|
||||
|
||||
<div className="text-muted-foreground">Channels:</div>
|
||||
<div className="font-mono text-right">{stats.channels > 0 ? (stats.channels === 1 ? 'Mono' : 'Stereo') : 'N/A'}</div>
|
||||
|
||||
<div className="text-muted-foreground">Bit Depth:</div>
|
||||
<div className="font-mono text-right">{stats.bitDepth}-bit float</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Divider */}
|
||||
<div className="border-t border-border/30" />
|
||||
|
||||
{/* Audio Levels */}
|
||||
<div className="space-y-1">
|
||||
<div className="text-[9px] text-muted-foreground uppercase tracking-wide">Levels</div>
|
||||
<div className="grid grid-cols-2 gap-x-2 gap-y-1">
|
||||
<div className="text-muted-foreground">Peak:</div>
|
||||
<div className={cn(
|
||||
'font-mono text-right',
|
||||
stats.peakAmplitude > 0.99 ? 'text-red-500 font-bold' : ''
|
||||
)}>
|
||||
{formatDb(stats.peakAmplitude)}
|
||||
</div>
|
||||
|
||||
<div className="text-muted-foreground">RMS:</div>
|
||||
<div className="font-mono text-right">{formatDb(stats.rmsLevel)}</div>
|
||||
|
||||
<div className="text-muted-foreground">Dynamic Range:</div>
|
||||
<div className="font-mono text-right">
|
||||
{stats.dynamicRange > 0 ? `${stats.dynamicRange.toFixed(1)} dB` : 'N/A'}
|
||||
</div>
|
||||
|
||||
<div className="text-muted-foreground">Headroom:</div>
|
||||
<div className={cn(
|
||||
'font-mono text-right',
|
||||
stats.peakAmplitude > 0.99 ? 'text-red-500' :
|
||||
stats.peakAmplitude > 0.9 ? 'text-yellow-500' : 'text-green-500'
|
||||
)}>
|
||||
{stats.peakAmplitude > 0 ? `${(20 * Math.log10(1 / stats.peakAmplitude)).toFixed(1)} dB` : 'N/A'}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
91
components/analysis/FrequencyAnalyzer.tsx
Normal file
91
components/analysis/FrequencyAnalyzer.tsx
Normal file
@@ -0,0 +1,91 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
|
||||
export interface FrequencyAnalyzerProps {
|
||||
analyserNode: AnalyserNode | null;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function FrequencyAnalyzer({ analyserNode, className }: FrequencyAnalyzerProps) {
|
||||
const canvasRef = React.useRef<HTMLCanvasElement>(null);
|
||||
const animationFrameRef = React.useRef<number | undefined>(undefined);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (!analyserNode || !canvasRef.current) return;
|
||||
|
||||
const canvas = canvasRef.current;
|
||||
const ctx = canvas.getContext('2d');
|
||||
if (!ctx) return;
|
||||
|
||||
// Set canvas size
|
||||
const dpr = window.devicePixelRatio || 1;
|
||||
const rect = canvas.getBoundingClientRect();
|
||||
canvas.width = rect.width * dpr;
|
||||
canvas.height = rect.height * dpr;
|
||||
ctx.scale(dpr, dpr);
|
||||
|
||||
const bufferLength = analyserNode.frequencyBinCount;
|
||||
const dataArray = new Uint8Array(bufferLength);
|
||||
|
||||
// Get background color from computed styles
|
||||
const bgColor = getComputedStyle(canvas.parentElement!).backgroundColor;
|
||||
|
||||
const draw = () => {
|
||||
animationFrameRef.current = requestAnimationFrame(draw);
|
||||
|
||||
analyserNode.getByteFrequencyData(dataArray);
|
||||
|
||||
// Clear canvas with parent background color
|
||||
ctx.fillStyle = bgColor;
|
||||
ctx.fillRect(0, 0, rect.width, rect.height);
|
||||
|
||||
const barWidth = rect.width / bufferLength;
|
||||
let x = 0;
|
||||
|
||||
for (let i = 0; i < bufferLength; i++) {
|
||||
const barHeight = (dataArray[i] / 255) * rect.height;
|
||||
|
||||
// Color gradient based on frequency
|
||||
const hue = (i / bufferLength) * 120; // 0 (red) to 120 (green)
|
||||
ctx.fillStyle = `hsl(${180 + hue}, 70%, 50%)`;
|
||||
|
||||
ctx.fillRect(x, rect.height - barHeight, barWidth, barHeight);
|
||||
x += barWidth;
|
||||
}
|
||||
|
||||
// Draw frequency labels
|
||||
ctx.fillStyle = 'rgba(255, 255, 255, 0.5)';
|
||||
ctx.font = '10px monospace';
|
||||
ctx.textAlign = 'left';
|
||||
ctx.fillText('20Hz', 5, rect.height - 5);
|
||||
ctx.textAlign = 'center';
|
||||
ctx.fillText('1kHz', rect.width / 2, rect.height - 5);
|
||||
ctx.textAlign = 'right';
|
||||
ctx.fillText('20kHz', rect.width - 5, rect.height - 5);
|
||||
};
|
||||
|
||||
draw();
|
||||
|
||||
return () => {
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
}
|
||||
};
|
||||
}, [analyserNode]);
|
||||
|
||||
return (
|
||||
<div className={cn('w-full h-full bg-card/50 border-2 border-accent/50 rounded-lg p-2', className)}>
|
||||
<div className="text-[10px] font-bold text-accent uppercase tracking-wider mb-2">
|
||||
Frequency Analyzer
|
||||
</div>
|
||||
<div className="w-full h-[calc(100%-24px)] rounded bg-muted/30">
|
||||
<canvas
|
||||
ref={canvasRef}
|
||||
className="w-full h-full rounded"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
167
components/analysis/LUFSMeter.tsx
Normal file
167
components/analysis/LUFSMeter.tsx
Normal file
@@ -0,0 +1,167 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
|
||||
export interface LUFSMeterProps {
|
||||
analyserNode: AnalyserNode | null;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function LUFSMeter({ analyserNode, className }: LUFSMeterProps) {
|
||||
const canvasRef = React.useRef<HTMLCanvasElement>(null);
|
||||
const animationFrameRef = React.useRef<number | undefined>(undefined);
|
||||
const [lufs, setLufs] = React.useState({ integrated: -23, shortTerm: -23, momentary: -23 });
|
||||
const lufsHistoryRef = React.useRef<number[]>([]);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (!analyserNode || !canvasRef.current) return;
|
||||
|
||||
const canvas = canvasRef.current;
|
||||
const ctx = canvas.getContext('2d');
|
||||
if (!ctx) return;
|
||||
|
||||
// Set canvas size
|
||||
const dpr = window.devicePixelRatio || 1;
|
||||
const rect = canvas.getBoundingClientRect();
|
||||
canvas.width = rect.width * dpr;
|
||||
canvas.height = rect.height * dpr;
|
||||
ctx.scale(dpr, dpr);
|
||||
|
||||
const bufferLength = analyserNode.frequencyBinCount;
|
||||
const dataArray = new Uint8Array(bufferLength);
|
||||
|
||||
const draw = () => {
|
||||
animationFrameRef.current = requestAnimationFrame(draw);
|
||||
|
||||
analyserNode.getByteFrequencyData(dataArray);
|
||||
|
||||
// Calculate RMS from frequency data
|
||||
let sum = 0;
|
||||
for (let i = 0; i < bufferLength; i++) {
|
||||
const normalized = dataArray[i] / 255;
|
||||
sum += normalized * normalized;
|
||||
}
|
||||
const rms = Math.sqrt(sum / bufferLength);
|
||||
|
||||
// Convert to LUFS approximation (simplified K-weighting)
|
||||
// Real LUFS requires proper K-weighting filter, this is an approximation
|
||||
let lufsValue = -23; // Silence baseline
|
||||
if (rms > 0.0001) {
|
||||
lufsValue = 20 * Math.log10(rms) - 0.691; // Simplified LUFS estimation
|
||||
lufsValue = Math.max(-70, Math.min(0, lufsValue));
|
||||
}
|
||||
|
||||
// Store history for integrated measurement
|
||||
lufsHistoryRef.current.push(lufsValue);
|
||||
if (lufsHistoryRef.current.length > 300) { // Keep last 10 seconds at 30fps
|
||||
lufsHistoryRef.current.shift();
|
||||
}
|
||||
|
||||
// Calculate measurements
|
||||
const momentary = lufsValue; // Current value
|
||||
const shortTerm = lufsHistoryRef.current.slice(-90).reduce((a, b) => a + b, 0) / Math.min(90, lufsHistoryRef.current.length); // Last 3 seconds
|
||||
const integrated = lufsHistoryRef.current.reduce((a, b) => a + b, 0) / lufsHistoryRef.current.length; // All time
|
||||
|
||||
setLufs({ integrated, shortTerm, momentary });
|
||||
|
||||
// Clear canvas
|
||||
const bgColor = getComputedStyle(canvas.parentElement!).backgroundColor;
|
||||
ctx.fillStyle = bgColor;
|
||||
ctx.fillRect(0, 0, rect.width, rect.height);
|
||||
|
||||
// Draw LUFS scale (-70 to 0)
|
||||
const lufsToY = (lufs: number) => {
|
||||
return ((0 - lufs) / 70) * rect.height;
|
||||
};
|
||||
|
||||
// Draw reference lines
|
||||
ctx.strokeStyle = 'rgba(128, 128, 128, 0.2)';
|
||||
ctx.lineWidth = 1;
|
||||
[-23, -16, -9, -3].forEach(db => {
|
||||
const y = lufsToY(db);
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(0, y);
|
||||
ctx.lineTo(rect.width, y);
|
||||
ctx.stroke();
|
||||
|
||||
// Labels
|
||||
ctx.fillStyle = 'rgba(255, 255, 255, 0.4)';
|
||||
ctx.font = '9px monospace';
|
||||
ctx.textAlign = 'right';
|
||||
ctx.fillText(`${db}`, rect.width - 2, y - 2);
|
||||
});
|
||||
|
||||
// Draw -23 LUFS broadcast standard line
|
||||
ctx.strokeStyle = 'rgba(59, 130, 246, 0.5)';
|
||||
ctx.lineWidth = 2;
|
||||
const standardY = lufsToY(-23);
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(0, standardY);
|
||||
ctx.lineTo(rect.width, standardY);
|
||||
ctx.stroke();
|
||||
|
||||
// Draw bars
|
||||
const barWidth = rect.width / 4;
|
||||
const drawBar = (value: number, x: number, color: string, label: string) => {
|
||||
const y = lufsToY(value);
|
||||
const height = rect.height - y;
|
||||
|
||||
ctx.fillStyle = color;
|
||||
ctx.fillRect(x, y, barWidth - 4, height);
|
||||
|
||||
// Label
|
||||
ctx.fillStyle = 'rgba(255, 255, 255, 0.7)';
|
||||
ctx.font = 'bold 9px monospace';
|
||||
ctx.textAlign = 'center';
|
||||
ctx.fillText(label, x + barWidth / 2 - 2, rect.height - 2);
|
||||
};
|
||||
|
||||
drawBar(momentary, 0, 'rgba(239, 68, 68, 0.7)', 'M');
|
||||
drawBar(shortTerm, barWidth, 'rgba(251, 146, 60, 0.7)', 'S');
|
||||
drawBar(integrated, barWidth * 2, 'rgba(34, 197, 94, 0.7)', 'I');
|
||||
};
|
||||
|
||||
draw();
|
||||
|
||||
return () => {
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
}
|
||||
};
|
||||
}, [analyserNode]);
|
||||
|
||||
return (
|
||||
<div className={cn('w-full h-full bg-card/50 border-2 border-accent/50 rounded-lg p-2', className)}>
|
||||
<div className="text-[10px] font-bold text-accent uppercase tracking-wider mb-2">
|
||||
LUFS Loudness
|
||||
</div>
|
||||
<div className="w-full h-[calc(100%-24px)] rounded bg-muted/30 flex flex-col">
|
||||
<canvas
|
||||
ref={canvasRef}
|
||||
className="w-full flex-1 rounded"
|
||||
/>
|
||||
<div className="grid grid-cols-3 gap-1 mt-2 text-[9px] font-mono text-center">
|
||||
<div>
|
||||
<div className="text-muted-foreground">Momentary</div>
|
||||
<div className={cn('font-bold', lufs.momentary > -9 ? 'text-red-500' : 'text-foreground')}>
|
||||
{lufs.momentary > -70 ? lufs.momentary.toFixed(1) : '-∞'}
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<div className="text-muted-foreground">Short-term</div>
|
||||
<div className={cn('font-bold', lufs.shortTerm > -16 ? 'text-orange-500' : 'text-foreground')}>
|
||||
{lufs.shortTerm > -70 ? lufs.shortTerm.toFixed(1) : '-∞'}
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<div className="text-muted-foreground">Integrated</div>
|
||||
<div className={cn('font-bold', Math.abs(lufs.integrated + 23) < 2 ? 'text-green-500' : 'text-foreground')}>
|
||||
{lufs.integrated > -70 ? lufs.integrated.toFixed(1) : '-∞'}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
181
components/analysis/PhaseCorrelationMeter.tsx
Normal file
181
components/analysis/PhaseCorrelationMeter.tsx
Normal file
@@ -0,0 +1,181 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
|
||||
export interface PhaseCorrelationMeterProps {
|
||||
analyserNode: AnalyserNode | null;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function PhaseCorrelationMeter({ analyserNode, className }: PhaseCorrelationMeterProps) {
|
||||
const canvasRef = React.useRef<HTMLCanvasElement>(null);
|
||||
const animationFrameRef = React.useRef<number | undefined>(undefined);
|
||||
const [correlation, setCorrelation] = React.useState(0);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (!analyserNode || !canvasRef.current) return;
|
||||
|
||||
const canvas = canvasRef.current;
|
||||
const ctx = canvas.getContext('2d');
|
||||
if (!ctx) return;
|
||||
|
||||
// Set canvas size
|
||||
const dpr = window.devicePixelRatio || 1;
|
||||
const rect = canvas.getBoundingClientRect();
|
||||
canvas.width = rect.width * dpr;
|
||||
canvas.height = rect.height * dpr;
|
||||
ctx.scale(dpr, dpr);
|
||||
|
||||
const audioContext = analyserNode.context as AudioContext;
|
||||
const bufferLength = analyserNode.fftSize;
|
||||
const dataArrayL = new Float32Array(bufferLength);
|
||||
const dataArrayR = new Float32Array(bufferLength);
|
||||
|
||||
// Create a splitter to get L/R channels
|
||||
const splitter = audioContext.createChannelSplitter(2);
|
||||
const analyserL = audioContext.createAnalyser();
|
||||
const analyserR = audioContext.createAnalyser();
|
||||
|
||||
analyserL.fftSize = bufferLength;
|
||||
analyserR.fftSize = bufferLength;
|
||||
|
||||
// Try to connect to the analyser node's source
|
||||
// Note: This is a simplified approach - ideally we'd get the source node
|
||||
try {
|
||||
analyserNode.connect(splitter);
|
||||
splitter.connect(analyserL, 0);
|
||||
splitter.connect(analyserR, 1);
|
||||
} catch (e) {
|
||||
// If connection fails, just show static display
|
||||
}
|
||||
|
||||
const draw = () => {
|
||||
animationFrameRef.current = requestAnimationFrame(draw);
|
||||
|
||||
try {
|
||||
analyserL.getFloatTimeDomainData(dataArrayL);
|
||||
analyserR.getFloatTimeDomainData(dataArrayR);
|
||||
|
||||
// Calculate phase correlation (Pearson correlation coefficient)
|
||||
let sumL = 0, sumR = 0, sumLR = 0, sumL2 = 0, sumR2 = 0;
|
||||
const n = bufferLength;
|
||||
|
||||
for (let i = 0; i < n; i++) {
|
||||
sumL += dataArrayL[i];
|
||||
sumR += dataArrayR[i];
|
||||
sumLR += dataArrayL[i] * dataArrayR[i];
|
||||
sumL2 += dataArrayL[i] * dataArrayL[i];
|
||||
sumR2 += dataArrayR[i] * dataArrayR[i];
|
||||
}
|
||||
|
||||
const meanL = sumL / n;
|
||||
const meanR = sumR / n;
|
||||
const covLR = (sumLR / n) - (meanL * meanR);
|
||||
const varL = (sumL2 / n) - (meanL * meanL);
|
||||
const varR = (sumR2 / n) - (meanR * meanR);
|
||||
|
||||
let r = 0;
|
||||
if (varL > 0 && varR > 0) {
|
||||
r = covLR / Math.sqrt(varL * varR);
|
||||
r = Math.max(-1, Math.min(1, r)); // Clamp to [-1, 1]
|
||||
}
|
||||
|
||||
setCorrelation(r);
|
||||
|
||||
// Clear canvas
|
||||
const bgColor = getComputedStyle(canvas.parentElement!).backgroundColor;
|
||||
ctx.fillStyle = bgColor;
|
||||
ctx.fillRect(0, 0, rect.width, rect.height);
|
||||
|
||||
// Draw scale background
|
||||
const centerY = rect.height / 2;
|
||||
const barHeight = 20;
|
||||
|
||||
// Draw scale markers
|
||||
ctx.fillStyle = 'rgba(128, 128, 128, 0.2)';
|
||||
ctx.fillRect(0, centerY - barHeight / 2, rect.width, barHeight);
|
||||
|
||||
// Draw center line (0)
|
||||
ctx.strokeStyle = 'rgba(128, 128, 128, 0.5)';
|
||||
ctx.lineWidth = 1;
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(rect.width / 2, centerY - barHeight / 2 - 5);
|
||||
ctx.lineTo(rect.width / 2, centerY + barHeight / 2 + 5);
|
||||
ctx.stroke();
|
||||
|
||||
// Draw correlation indicator
|
||||
const x = ((r + 1) / 2) * rect.width;
|
||||
|
||||
// Color based on correlation value
|
||||
let color;
|
||||
if (r > 0.9) {
|
||||
color = '#10b981'; // Green - good correlation (mono-ish)
|
||||
} else if (r > 0.5) {
|
||||
color = '#84cc16'; // Lime - moderate correlation
|
||||
} else if (r > -0.5) {
|
||||
color = '#eab308'; // Yellow - decorrelated (good stereo)
|
||||
} else if (r > -0.9) {
|
||||
color = '#f97316'; // Orange - negative correlation
|
||||
} else {
|
||||
color = '#ef4444'; // Red - phase issues
|
||||
}
|
||||
|
||||
ctx.fillStyle = color;
|
||||
ctx.fillRect(x - 2, centerY - barHeight / 2, 4, barHeight);
|
||||
|
||||
// Draw labels
|
||||
ctx.fillStyle = 'rgba(255, 255, 255, 0.7)';
|
||||
ctx.font = '9px monospace';
|
||||
ctx.textAlign = 'left';
|
||||
ctx.fillText('-1', 2, centerY - barHeight / 2 - 8);
|
||||
ctx.textAlign = 'center';
|
||||
ctx.fillText('0', rect.width / 2, centerY - barHeight / 2 - 8);
|
||||
ctx.textAlign = 'right';
|
||||
ctx.fillText('+1', rect.width - 2, centerY - barHeight / 2 - 8);
|
||||
|
||||
// Draw correlation value
|
||||
ctx.textAlign = 'center';
|
||||
ctx.font = 'bold 11px monospace';
|
||||
ctx.fillText(r.toFixed(3), rect.width / 2, centerY + barHeight / 2 + 15);
|
||||
} catch (e) {
|
||||
// Silently handle errors
|
||||
}
|
||||
};
|
||||
|
||||
draw();
|
||||
|
||||
return () => {
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
}
|
||||
try {
|
||||
splitter.disconnect();
|
||||
analyserL.disconnect();
|
||||
analyserR.disconnect();
|
||||
} catch (e) {
|
||||
// Ignore disconnection errors
|
||||
}
|
||||
};
|
||||
}, [analyserNode]);
|
||||
|
||||
return (
|
||||
<div className={cn('w-full h-full bg-card/50 border-2 border-accent/50 rounded-lg p-2', className)}>
|
||||
<div className="text-[10px] font-bold text-accent uppercase tracking-wider mb-2">
|
||||
Phase Correlation
|
||||
</div>
|
||||
<div className="w-full h-[calc(100%-24px)] rounded bg-muted/30 flex flex-col items-center justify-center">
|
||||
<canvas
|
||||
ref={canvasRef}
|
||||
className="w-full h-16 rounded"
|
||||
/>
|
||||
<div className="text-[9px] text-muted-foreground mt-2 text-center px-2">
|
||||
{correlation > 0.9 ? 'Mono-like' :
|
||||
correlation > 0.5 ? 'Good Stereo' :
|
||||
correlation > -0.5 ? 'Wide Stereo' :
|
||||
'Phase Issues'}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
134
components/analysis/Spectrogram.tsx
Normal file
134
components/analysis/Spectrogram.tsx
Normal file
@@ -0,0 +1,134 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
|
||||
export interface SpectrogramProps {
|
||||
analyserNode: AnalyserNode | null;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function Spectrogram({ analyserNode, className }: SpectrogramProps) {
|
||||
const canvasRef = React.useRef<HTMLCanvasElement>(null);
|
||||
const animationFrameRef = React.useRef<number | undefined>(undefined);
|
||||
const spectrogramDataRef = React.useRef<ImageData | null>(null);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (!analyserNode || !canvasRef.current) return;
|
||||
|
||||
const canvas = canvasRef.current;
|
||||
const ctx = canvas.getContext('2d');
|
||||
if (!ctx) return;
|
||||
|
||||
// Set canvas size
|
||||
const dpr = window.devicePixelRatio || 1;
|
||||
const rect = canvas.getBoundingClientRect();
|
||||
canvas.width = rect.width * dpr;
|
||||
canvas.height = rect.height * dpr;
|
||||
ctx.scale(dpr, dpr);
|
||||
|
||||
const bufferLength = analyserNode.frequencyBinCount;
|
||||
const dataArray = new Uint8Array(bufferLength);
|
||||
|
||||
// Initialize spectrogram data
|
||||
spectrogramDataRef.current = ctx.createImageData(rect.width, rect.height);
|
||||
|
||||
const draw = () => {
|
||||
animationFrameRef.current = requestAnimationFrame(draw);
|
||||
|
||||
analyserNode.getByteFrequencyData(dataArray);
|
||||
|
||||
if (!spectrogramDataRef.current) return;
|
||||
|
||||
const imageData = spectrogramDataRef.current;
|
||||
|
||||
// Shift existing data to the left by 1 pixel
|
||||
for (let y = 0; y < rect.height; y++) {
|
||||
for (let x = 0; x < rect.width - 1; x++) {
|
||||
const sourceIndex = ((y * rect.width) + x + 1) * 4;
|
||||
const targetIndex = ((y * rect.width) + x) * 4;
|
||||
imageData.data[targetIndex] = imageData.data[sourceIndex];
|
||||
imageData.data[targetIndex + 1] = imageData.data[sourceIndex + 1];
|
||||
imageData.data[targetIndex + 2] = imageData.data[sourceIndex + 2];
|
||||
imageData.data[targetIndex + 3] = imageData.data[sourceIndex + 3];
|
||||
}
|
||||
}
|
||||
|
||||
// Add new column on the right
|
||||
const x = rect.width - 1;
|
||||
for (let y = 0; y < rect.height; y++) {
|
||||
// Map frequency bins to canvas height (inverted)
|
||||
const freqIndex = Math.floor((1 - y / rect.height) * bufferLength);
|
||||
const value = dataArray[freqIndex];
|
||||
|
||||
// Color mapping with transparency: transparent (0) -> blue -> cyan -> green -> yellow -> red (255)
|
||||
let r, g, b, a;
|
||||
if (value < 64) {
|
||||
// Transparent to blue
|
||||
const t = value / 64;
|
||||
r = 0;
|
||||
g = 0;
|
||||
b = Math.round(255 * t);
|
||||
a = Math.round(255 * t);
|
||||
} else if (value < 128) {
|
||||
// Blue to cyan
|
||||
r = 0;
|
||||
g = (value - 64) * 4;
|
||||
b = 255;
|
||||
a = 255;
|
||||
} else if (value < 192) {
|
||||
// Cyan to green
|
||||
r = 0;
|
||||
g = 255;
|
||||
b = 255 - (value - 128) * 4;
|
||||
a = 255;
|
||||
} else {
|
||||
// Green to yellow to red
|
||||
r = (value - 192) * 4;
|
||||
g = 255;
|
||||
b = 0;
|
||||
a = 255;
|
||||
}
|
||||
|
||||
const index = ((y * rect.width) + x) * 4;
|
||||
imageData.data[index] = r;
|
||||
imageData.data[index + 1] = g;
|
||||
imageData.data[index + 2] = b;
|
||||
imageData.data[index + 3] = a;
|
||||
}
|
||||
|
||||
// Draw the spectrogram
|
||||
ctx.putImageData(imageData, 0, 0);
|
||||
|
||||
// Draw frequency labels
|
||||
ctx.fillStyle = 'rgba(255, 255, 255, 0.8)';
|
||||
ctx.font = '10px monospace';
|
||||
ctx.textAlign = 'left';
|
||||
ctx.fillText('20kHz', 5, 12);
|
||||
ctx.fillText('1kHz', 5, rect.height / 2);
|
||||
ctx.fillText('20Hz', 5, rect.height - 5);
|
||||
};
|
||||
|
||||
draw();
|
||||
|
||||
return () => {
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
}
|
||||
};
|
||||
}, [analyserNode]);
|
||||
|
||||
return (
|
||||
<div className={cn('w-full h-full bg-card/50 border-2 border-accent/50 rounded-lg p-2', className)}>
|
||||
<div className="text-[10px] font-bold text-accent uppercase tracking-wider mb-2">
|
||||
Spectrogram
|
||||
</div>
|
||||
<div className="w-full h-[calc(100%-24px)] rounded bg-muted/30">
|
||||
<canvas
|
||||
ref={canvasRef}
|
||||
className="w-full h-full rounded"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
197
components/automation/AutomationHeader.tsx
Normal file
197
components/automation/AutomationHeader.tsx
Normal file
@@ -0,0 +1,197 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { Eye, EyeOff, ChevronDown, ChevronUp, Copy, Clipboard } from 'lucide-react';
|
||||
import { Button } from '@/components/ui/Button';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
import type { AutomationMode } from '@/types/automation';
|
||||
|
||||
export interface AutomationHeaderProps {
|
||||
parameterName: string;
|
||||
currentValue?: number;
|
||||
visible: boolean;
|
||||
mode: AutomationMode;
|
||||
color?: string;
|
||||
onToggleVisible?: () => void;
|
||||
onModeChange?: (mode: AutomationMode) => void;
|
||||
onHeightChange?: (delta: number) => void;
|
||||
className?: string;
|
||||
formatter?: (value: number) => string;
|
||||
// Parameter selection
|
||||
availableParameters?: Array<{ id: string; name: string }>;
|
||||
selectedParameterId?: string;
|
||||
onParameterChange?: (parameterId: string) => void;
|
||||
// Copy/Paste automation
|
||||
onCopyAutomation?: () => void;
|
||||
onPasteAutomation?: () => void;
|
||||
}
|
||||
|
||||
const MODE_LABELS: Record<AutomationMode, string> = {
|
||||
read: 'R',
|
||||
write: 'W',
|
||||
touch: 'T',
|
||||
latch: 'L',
|
||||
};
|
||||
|
||||
const MODE_COLORS: Record<AutomationMode, string> = {
|
||||
read: 'text-muted-foreground',
|
||||
write: 'text-red-500',
|
||||
touch: 'text-yellow-500',
|
||||
latch: 'text-orange-500',
|
||||
};
|
||||
|
||||
export function AutomationHeader({
|
||||
parameterName,
|
||||
currentValue,
|
||||
visible,
|
||||
mode,
|
||||
color,
|
||||
onToggleVisible,
|
||||
onModeChange,
|
||||
onHeightChange,
|
||||
className,
|
||||
formatter,
|
||||
availableParameters,
|
||||
selectedParameterId,
|
||||
onParameterChange,
|
||||
onCopyAutomation,
|
||||
onPasteAutomation,
|
||||
}: AutomationHeaderProps) {
|
||||
const modes: AutomationMode[] = ['read', 'write', 'touch', 'latch'];
|
||||
const currentModeIndex = modes.indexOf(mode);
|
||||
|
||||
const handleCycleModeClick = () => {
|
||||
if (!onModeChange) return;
|
||||
const nextIndex = (currentModeIndex + 1) % modes.length;
|
||||
onModeChange(modes[nextIndex]);
|
||||
};
|
||||
|
||||
const formatValue = (value: number) => {
|
||||
if (formatter) return formatter(value);
|
||||
return value.toFixed(2);
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'flex items-center gap-2 px-3 py-1.5 bg-muted border-t border-b border-border/30 flex-shrink-0',
|
||||
className
|
||||
)}
|
||||
>
|
||||
{/* Automation label - always visible */}
|
||||
<span className="text-xs font-medium flex-shrink-0">Automation</span>
|
||||
|
||||
{/* Color indicator */}
|
||||
{color && (
|
||||
<div
|
||||
className="w-1 h-4 rounded-full flex-shrink-0"
|
||||
style={{ backgroundColor: color }}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Parameter name / selector */}
|
||||
{availableParameters && availableParameters.length > 1 ? (
|
||||
<select
|
||||
value={selectedParameterId}
|
||||
onChange={(e) => onParameterChange?.(e.target.value)}
|
||||
className="text-xs font-medium text-foreground w-auto min-w-[120px] max-w-[200px] bg-background/50 border border-border/30 rounded px-1.5 py-0.5 hover:bg-background/80 focus:outline-none focus:ring-1 focus:ring-primary"
|
||||
>
|
||||
{availableParameters.map((param) => (
|
||||
<option key={param.id} value={param.id}>
|
||||
{param.name}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
) : (
|
||||
<span className="text-xs font-medium text-foreground truncate">
|
||||
{parameterName}
|
||||
</span>
|
||||
)}
|
||||
|
||||
{/* Current value display */}
|
||||
{currentValue !== undefined && (
|
||||
<span className="text-[10px] font-mono text-muted-foreground px-1.5 py-0.5 bg-background/50 rounded">
|
||||
{formatValue(currentValue)}
|
||||
</span>
|
||||
)}
|
||||
|
||||
{/* Automation mode button */}
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={handleCycleModeClick}
|
||||
title={`Automation mode: ${mode} (click to cycle)`}
|
||||
className={cn('h-5 w-5 text-[10px] font-bold flex-shrink-0', MODE_COLORS[mode])}
|
||||
>
|
||||
{MODE_LABELS[mode]}
|
||||
</Button>
|
||||
|
||||
{/* Height controls */}
|
||||
{onHeightChange && (
|
||||
<div className="flex flex-col gap-0 flex-shrink-0">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={() => onHeightChange(20)}
|
||||
title="Increase lane height"
|
||||
className="h-3 w-4 p-0"
|
||||
>
|
||||
<ChevronUp className="h-2.5 w-2.5" />
|
||||
</Button>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={() => onHeightChange(-20)}
|
||||
title="Decrease lane height"
|
||||
className="h-3 w-4 p-0"
|
||||
>
|
||||
<ChevronDown className="h-2.5 w-2.5" />
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Copy/Paste automation controls */}
|
||||
{(onCopyAutomation || onPasteAutomation) && (
|
||||
<div className="flex gap-1 flex-shrink-0 ml-auto mr-8">
|
||||
{onCopyAutomation && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={onCopyAutomation}
|
||||
title="Copy automation data"
|
||||
className="h-5 w-5"
|
||||
>
|
||||
<Copy className="h-3 w-3" />
|
||||
</Button>
|
||||
)}
|
||||
{onPasteAutomation && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={onPasteAutomation}
|
||||
title="Paste automation data"
|
||||
className="h-5 w-5"
|
||||
>
|
||||
<Clipboard className="h-3 w-3" />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Show/hide toggle - Positioned absolutely on the right */}
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={onToggleVisible}
|
||||
title={visible ? 'Hide automation' : 'Show automation'}
|
||||
className="absolute right-2 h-5 w-5 flex-shrink-0"
|
||||
>
|
||||
{visible ? (
|
||||
<Eye className="h-3 w-3" />
|
||||
) : (
|
||||
<EyeOff className="h-3 w-3 text-muted-foreground" />
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
317
components/automation/AutomationLane.tsx
Normal file
317
components/automation/AutomationLane.tsx
Normal file
@@ -0,0 +1,317 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
import type { AutomationLane as AutomationLaneType, AutomationPoint as AutomationPointType } from '@/types/automation';
|
||||
import { AutomationPoint } from './AutomationPoint';
|
||||
|
||||
export interface AutomationLaneProps {
|
||||
lane: AutomationLaneType;
|
||||
duration: number; // Total timeline duration in seconds
|
||||
zoom: number; // Zoom factor
|
||||
currentTime?: number; // Playhead position
|
||||
onUpdateLane?: (updates: Partial<AutomationLaneType>) => void;
|
||||
onAddPoint?: (time: number, value: number) => void;
|
||||
onUpdatePoint?: (pointId: string, updates: Partial<AutomationPointType>) => void;
|
||||
onRemovePoint?: (pointId: string) => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function AutomationLane({
|
||||
lane,
|
||||
duration,
|
||||
zoom,
|
||||
currentTime = 0,
|
||||
onUpdateLane,
|
||||
onAddPoint,
|
||||
onUpdatePoint,
|
||||
onRemovePoint,
|
||||
className,
|
||||
}: AutomationLaneProps) {
|
||||
const canvasRef = React.useRef<HTMLCanvasElement>(null);
|
||||
const containerRef = React.useRef<HTMLDivElement>(null);
|
||||
const [selectedPointId, setSelectedPointId] = React.useState<string | null>(null);
|
||||
const [isDraggingPoint, setIsDraggingPoint] = React.useState(false);
|
||||
|
||||
// Convert time to X pixel position
|
||||
const timeToX = React.useCallback(
|
||||
(time: number): number => {
|
||||
if (!containerRef.current) return 0;
|
||||
const width = containerRef.current.clientWidth;
|
||||
return (time / duration) * width;
|
||||
},
|
||||
[duration]
|
||||
);
|
||||
|
||||
// Convert value (0-1) to Y pixel position (inverted: 0 at bottom, 1 at top)
|
||||
const valueToY = React.useCallback(
|
||||
(value: number): number => {
|
||||
if (!containerRef.current) return 0;
|
||||
const height = lane.height;
|
||||
return height * (1 - value);
|
||||
},
|
||||
[lane.height]
|
||||
);
|
||||
|
||||
// Convert X pixel position to time
|
||||
const xToTime = React.useCallback(
|
||||
(x: number): number => {
|
||||
if (!containerRef.current) return 0;
|
||||
const width = containerRef.current.clientWidth;
|
||||
return (x / width) * duration;
|
||||
},
|
||||
[duration]
|
||||
);
|
||||
|
||||
// Convert Y pixel position to value (0-1)
|
||||
const yToValue = React.useCallback(
|
||||
(y: number): number => {
|
||||
const height = lane.height;
|
||||
return Math.max(0, Math.min(1, 1 - y / height));
|
||||
},
|
||||
[lane.height]
|
||||
);
|
||||
|
||||
// Draw automation curve
|
||||
React.useEffect(() => {
|
||||
if (!canvasRef.current || !lane.visible) return;
|
||||
|
||||
const canvas = canvasRef.current;
|
||||
const ctx = canvas.getContext('2d');
|
||||
if (!ctx) return;
|
||||
|
||||
const dpr = window.devicePixelRatio || 1;
|
||||
const rect = canvas.getBoundingClientRect();
|
||||
|
||||
canvas.width = rect.width * dpr;
|
||||
canvas.height = rect.height * dpr;
|
||||
ctx.scale(dpr, dpr);
|
||||
|
||||
const width = rect.width;
|
||||
const height = rect.height;
|
||||
|
||||
// Clear canvas
|
||||
ctx.clearRect(0, 0, width, height);
|
||||
|
||||
// Background
|
||||
ctx.fillStyle = getComputedStyle(canvas).getPropertyValue('--color-background') || 'rgb(15, 23, 42)';
|
||||
ctx.fillRect(0, 0, width, height);
|
||||
|
||||
// Grid lines (horizontal value guides)
|
||||
ctx.strokeStyle = 'rgba(148, 163, 184, 0.1)';
|
||||
ctx.lineWidth = 1;
|
||||
for (let i = 0; i <= 4; i++) {
|
||||
const y = (height / 4) * i;
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(0, y);
|
||||
ctx.lineTo(width, y);
|
||||
ctx.stroke();
|
||||
}
|
||||
|
||||
// Draw automation curve
|
||||
if (lane.points.length > 0) {
|
||||
const color = lane.color || 'rgb(59, 130, 246)';
|
||||
ctx.strokeStyle = color;
|
||||
ctx.lineWidth = 2;
|
||||
ctx.beginPath();
|
||||
|
||||
// Sort points by time
|
||||
const sortedPoints = [...lane.points].sort((a, b) => a.time - b.time);
|
||||
|
||||
// Draw lines between points
|
||||
for (let i = 0; i < sortedPoints.length; i++) {
|
||||
const point = sortedPoints[i];
|
||||
const x = timeToX(point.time);
|
||||
const y = valueToY(point.value);
|
||||
|
||||
if (i === 0) {
|
||||
// Start from left edge at first point's value
|
||||
ctx.moveTo(0, y);
|
||||
ctx.lineTo(x, y);
|
||||
} else {
|
||||
const prevPoint = sortedPoints[i - 1];
|
||||
const prevX = timeToX(prevPoint.time);
|
||||
const prevY = valueToY(prevPoint.value);
|
||||
|
||||
if (point.curve === 'step') {
|
||||
// Step curve: horizontal then vertical
|
||||
ctx.lineTo(x, prevY);
|
||||
ctx.lineTo(x, y);
|
||||
} else {
|
||||
// Linear curve (bezier not implemented yet)
|
||||
ctx.lineTo(x, y);
|
||||
}
|
||||
}
|
||||
|
||||
// Extend to right edge from last point
|
||||
if (i === sortedPoints.length - 1) {
|
||||
ctx.lineTo(width, y);
|
||||
}
|
||||
}
|
||||
|
||||
ctx.stroke();
|
||||
|
||||
// Fill area under curve
|
||||
ctx.globalAlpha = 0.2;
|
||||
ctx.fillStyle = color;
|
||||
ctx.lineTo(width, height);
|
||||
ctx.lineTo(0, height);
|
||||
ctx.closePath();
|
||||
ctx.fill();
|
||||
ctx.globalAlpha = 1.0;
|
||||
}
|
||||
|
||||
// Draw playhead
|
||||
if (currentTime >= 0 && duration > 0) {
|
||||
const playheadX = timeToX(currentTime);
|
||||
if (playheadX >= 0 && playheadX <= width) {
|
||||
ctx.strokeStyle = 'rgba(239, 68, 68, 0.8)';
|
||||
ctx.lineWidth = 2;
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(playheadX, 0);
|
||||
ctx.lineTo(playheadX, height);
|
||||
ctx.stroke();
|
||||
}
|
||||
}
|
||||
}, [lane, duration, zoom, currentTime, timeToX, valueToY]);
|
||||
|
||||
// Handle canvas click to add point
|
||||
const handleCanvasClick = React.useCallback(
|
||||
(e: React.MouseEvent<HTMLCanvasElement>) => {
|
||||
if (isDraggingPoint || !onAddPoint) return;
|
||||
|
||||
const rect = e.currentTarget.getBoundingClientRect();
|
||||
const x = e.clientX - rect.left;
|
||||
const y = e.clientY - rect.top;
|
||||
|
||||
const time = xToTime(x);
|
||||
const value = yToValue(y);
|
||||
|
||||
onAddPoint(time, value);
|
||||
},
|
||||
[isDraggingPoint, onAddPoint, xToTime, yToValue]
|
||||
);
|
||||
|
||||
// Handle point drag
|
||||
const handlePointDragStart = React.useCallback((pointId: string) => {
|
||||
setIsDraggingPoint(true);
|
||||
setSelectedPointId(pointId);
|
||||
}, []);
|
||||
|
||||
const handlePointDrag = React.useCallback(
|
||||
(pointId: string, deltaX: number, deltaY: number) => {
|
||||
if (!containerRef.current || !onUpdatePoint) return;
|
||||
|
||||
const point = lane.points.find((p) => p.id === pointId);
|
||||
if (!point) return;
|
||||
|
||||
const rect = containerRef.current.getBoundingClientRect();
|
||||
const width = rect.width;
|
||||
|
||||
// Calculate new time and value
|
||||
const timePerPixel = duration / width;
|
||||
const valuePerPixel = 1 / lane.height;
|
||||
|
||||
const newTime = Math.max(0, Math.min(duration, point.time + deltaX * timePerPixel));
|
||||
const newValue = Math.max(0, Math.min(1, point.value - deltaY * valuePerPixel));
|
||||
|
||||
onUpdatePoint(pointId, { time: newTime, value: newValue });
|
||||
},
|
||||
[lane.points, lane.height, duration, onUpdatePoint]
|
||||
);
|
||||
|
||||
const handlePointDragEnd = React.useCallback(() => {
|
||||
setIsDraggingPoint(false);
|
||||
}, []);
|
||||
|
||||
// Handle point click (select)
|
||||
const handlePointClick = React.useCallback((pointId: string, event: React.MouseEvent) => {
|
||||
event.stopPropagation();
|
||||
setSelectedPointId(pointId);
|
||||
}, []);
|
||||
|
||||
// Handle point double-click (delete)
|
||||
const handlePointDoubleClick = React.useCallback(
|
||||
(pointId: string) => {
|
||||
if (onRemovePoint) {
|
||||
onRemovePoint(pointId);
|
||||
}
|
||||
},
|
||||
[onRemovePoint]
|
||||
);
|
||||
|
||||
// Handle keyboard delete
|
||||
React.useEffect(() => {
|
||||
const handleKeyDown = (e: KeyboardEvent) => {
|
||||
if ((e.key === 'Delete' || e.key === 'Backspace') && selectedPointId && onRemovePoint) {
|
||||
e.preventDefault();
|
||||
onRemovePoint(selectedPointId);
|
||||
setSelectedPointId(null);
|
||||
}
|
||||
};
|
||||
|
||||
window.addEventListener('keydown', handleKeyDown);
|
||||
return () => window.removeEventListener('keydown', handleKeyDown);
|
||||
}, [selectedPointId, onRemovePoint]);
|
||||
|
||||
// Get current value at playhead (interpolated)
|
||||
const getCurrentValue = React.useCallback((): number | undefined => {
|
||||
if (lane.points.length === 0) return undefined;
|
||||
|
||||
const sortedPoints = [...lane.points].sort((a, b) => a.time - b.time);
|
||||
|
||||
// Find surrounding points
|
||||
let prevPoint = sortedPoints[0];
|
||||
let nextPoint = sortedPoints[sortedPoints.length - 1];
|
||||
|
||||
for (let i = 0; i < sortedPoints.length - 1; i++) {
|
||||
if (sortedPoints[i].time <= currentTime && sortedPoints[i + 1].time >= currentTime) {
|
||||
prevPoint = sortedPoints[i];
|
||||
nextPoint = sortedPoints[i + 1];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Interpolate
|
||||
if (currentTime <= prevPoint.time) return prevPoint.value;
|
||||
if (currentTime >= nextPoint.time) return nextPoint.value;
|
||||
|
||||
const timeDelta = nextPoint.time - prevPoint.time;
|
||||
const valueDelta = nextPoint.value - prevPoint.value;
|
||||
const progress = (currentTime - prevPoint.time) / timeDelta;
|
||||
|
||||
return prevPoint.value + valueDelta * progress;
|
||||
}, [lane.points, currentTime]);
|
||||
|
||||
if (!lane.visible) return null;
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={containerRef}
|
||||
className={cn('relative bg-background/30 overflow-hidden cursor-crosshair', className)}
|
||||
style={{ height: lane.height }}
|
||||
>
|
||||
<canvas
|
||||
ref={canvasRef}
|
||||
className="absolute inset-0 w-full h-full"
|
||||
onClick={handleCanvasClick}
|
||||
/>
|
||||
|
||||
{/* Automation points */}
|
||||
{lane.points.map((point) => (
|
||||
<AutomationPoint
|
||||
key={point.id}
|
||||
point={point}
|
||||
x={timeToX(point.time)}
|
||||
y={valueToY(point.value)}
|
||||
isSelected={selectedPointId === point.id}
|
||||
onDragStart={handlePointDragStart}
|
||||
onDrag={handlePointDrag}
|
||||
onDragEnd={handlePointDragEnd}
|
||||
onClick={handlePointClick}
|
||||
onDoubleClick={handlePointDoubleClick}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
123
components/automation/AutomationPoint.tsx
Normal file
123
components/automation/AutomationPoint.tsx
Normal file
@@ -0,0 +1,123 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
import type { AutomationPoint as AutomationPointType } from '@/types/automation';
|
||||
|
||||
export interface AutomationPointProps {
|
||||
point: AutomationPointType;
|
||||
x: number; // Pixel position
|
||||
y: number; // Pixel position
|
||||
isSelected?: boolean;
|
||||
onDragStart?: (pointId: string, startX: number, startY: number) => void;
|
||||
onDrag?: (pointId: string, deltaX: number, deltaY: number) => void;
|
||||
onDragEnd?: (pointId: string) => void;
|
||||
onClick?: (pointId: string, event: React.MouseEvent) => void;
|
||||
onDoubleClick?: (pointId: string) => void;
|
||||
}
|
||||
|
||||
export function AutomationPoint({
|
||||
point,
|
||||
x,
|
||||
y,
|
||||
isSelected = false,
|
||||
onDragStart,
|
||||
onDrag,
|
||||
onDragEnd,
|
||||
onClick,
|
||||
onDoubleClick,
|
||||
}: AutomationPointProps) {
|
||||
const [isDragging, setIsDragging] = React.useState(false);
|
||||
const dragStartRef = React.useRef({ x: 0, y: 0 });
|
||||
|
||||
const handleMouseDown = React.useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
if (e.button !== 0) return; // Only left click
|
||||
|
||||
e.stopPropagation();
|
||||
setIsDragging(true);
|
||||
dragStartRef.current = { x: e.clientX, y: e.clientY };
|
||||
|
||||
if (onDragStart) {
|
||||
onDragStart(point.id, e.clientX, e.clientY);
|
||||
}
|
||||
},
|
||||
[point.id, onDragStart]
|
||||
);
|
||||
|
||||
const handleClick = React.useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
if (!isDragging && onClick) {
|
||||
onClick(point.id, e);
|
||||
}
|
||||
},
|
||||
[isDragging, point.id, onClick]
|
||||
);
|
||||
|
||||
const handleDoubleClick = React.useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.stopPropagation();
|
||||
if (onDoubleClick) {
|
||||
onDoubleClick(point.id);
|
||||
}
|
||||
},
|
||||
[point.id, onDoubleClick]
|
||||
);
|
||||
|
||||
// Global mouse handlers
|
||||
React.useEffect(() => {
|
||||
if (!isDragging) return;
|
||||
|
||||
const handleMouseMove = (e: MouseEvent) => {
|
||||
if (!isDragging) return;
|
||||
|
||||
const deltaX = e.clientX - dragStartRef.current.x;
|
||||
const deltaY = e.clientY - dragStartRef.current.y;
|
||||
|
||||
if (onDrag) {
|
||||
onDrag(point.id, deltaX, deltaY);
|
||||
}
|
||||
|
||||
// Update drag start position for next delta calculation
|
||||
dragStartRef.current = { x: e.clientX, y: e.clientY };
|
||||
};
|
||||
|
||||
const handleMouseUp = () => {
|
||||
if (isDragging) {
|
||||
setIsDragging(false);
|
||||
if (onDragEnd) {
|
||||
onDragEnd(point.id);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
window.addEventListener('mousemove', handleMouseMove);
|
||||
window.addEventListener('mouseup', handleMouseUp);
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('mousemove', handleMouseMove);
|
||||
window.removeEventListener('mouseup', handleMouseUp);
|
||||
};
|
||||
}, [isDragging, point.id, onDrag, onDragEnd]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'absolute rounded-full cursor-pointer transition-all select-none',
|
||||
'hover:scale-125',
|
||||
isDragging ? 'scale-125 z-10' : 'z-0',
|
||||
isSelected
|
||||
? 'w-3 h-3 bg-primary border-2 border-background shadow-lg'
|
||||
: 'w-2.5 h-2.5 bg-primary/80 border border-background shadow-md'
|
||||
)}
|
||||
style={{
|
||||
left: x - (isSelected || isDragging ? 6 : 5),
|
||||
top: y - (isSelected || isDragging ? 6 : 5),
|
||||
}}
|
||||
onMouseDown={handleMouseDown}
|
||||
onClick={handleClick}
|
||||
onDoubleClick={handleDoubleClick}
|
||||
title={`Time: ${point.time.toFixed(3)}s, Value: ${point.value.toFixed(3)}`}
|
||||
/>
|
||||
);
|
||||
}
|
||||
150
components/controls/MasterControls.tsx
Normal file
150
components/controls/MasterControls.tsx
Normal file
@@ -0,0 +1,150 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { ChevronDown, ChevronUp } from 'lucide-react';
|
||||
import { CircularKnob } from '@/components/ui/CircularKnob';
|
||||
import { MasterFader } from './MasterFader';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
|
||||
export interface MasterControlsProps {
|
||||
volume: number;
|
||||
pan: number;
|
||||
peakLevel: number;
|
||||
rmsLevel: number;
|
||||
isClipping: boolean;
|
||||
isMuted?: boolean;
|
||||
collapsed?: boolean; // For collapsible on mobile/small screens
|
||||
onVolumeChange: (volume: number) => void;
|
||||
onPanChange: (pan: number) => void;
|
||||
onMuteToggle: () => void;
|
||||
onResetClip?: () => void;
|
||||
onToggleCollapse?: () => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function MasterControls({
|
||||
volume,
|
||||
pan,
|
||||
peakLevel,
|
||||
rmsLevel,
|
||||
isClipping,
|
||||
isMuted = false,
|
||||
collapsed = false,
|
||||
onVolumeChange,
|
||||
onPanChange,
|
||||
onMuteToggle,
|
||||
onResetClip,
|
||||
onToggleCollapse,
|
||||
className,
|
||||
}: MasterControlsProps) {
|
||||
// Collapsed view - minimal controls
|
||||
if (collapsed) {
|
||||
return (
|
||||
<div className={cn(
|
||||
'flex flex-col items-center gap-2 px-3 py-2 bg-card/50 border border-accent/50 rounded-lg w-full',
|
||||
className
|
||||
)}>
|
||||
<div className="flex items-center justify-between w-full">
|
||||
<div className="text-xs font-bold text-accent uppercase tracking-wider">
|
||||
Master
|
||||
</div>
|
||||
{onToggleCollapse && (
|
||||
<button
|
||||
onClick={onToggleCollapse}
|
||||
className="p-1 hover:bg-accent/20 rounded transition-colors"
|
||||
title="Expand master controls"
|
||||
>
|
||||
<ChevronDown className="h-3 w-3 text-muted-foreground" />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex items-center gap-2 w-full justify-center">
|
||||
<button
|
||||
onClick={onMuteToggle}
|
||||
className={cn(
|
||||
'h-7 w-7 rounded-md flex items-center justify-center transition-all text-xs font-bold',
|
||||
isMuted
|
||||
? 'bg-blue-500 text-white shadow-md shadow-blue-500/30'
|
||||
: 'bg-card hover:bg-accent text-muted-foreground border border-border/50'
|
||||
)}
|
||||
title={isMuted ? 'Unmute' : 'Mute'}
|
||||
>
|
||||
M
|
||||
</button>
|
||||
<div className="flex-1 h-2 bg-muted rounded-full overflow-hidden">
|
||||
<div
|
||||
className={cn(
|
||||
'h-full transition-all',
|
||||
peakLevel > 0.95 ? 'bg-red-500' : peakLevel > 0.8 ? 'bg-yellow-500' : 'bg-green-500'
|
||||
)}
|
||||
style={{ width: `${peakLevel * 100}%` }}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={cn(
|
||||
'flex flex-col items-center gap-3 px-4 py-3 bg-card/50 border-2 border-accent/50 rounded-lg',
|
||||
className
|
||||
)}>
|
||||
{/* Master Label with collapse button */}
|
||||
<div className="flex items-center justify-between w-full">
|
||||
<div className="text-[10px] font-bold text-accent uppercase tracking-wider flex-1 text-center">
|
||||
Master
|
||||
</div>
|
||||
{onToggleCollapse && (
|
||||
<button
|
||||
onClick={onToggleCollapse}
|
||||
className="p-0.5 hover:bg-accent/20 rounded transition-colors flex-shrink-0 lg:hidden"
|
||||
title="Collapse master controls"
|
||||
>
|
||||
<ChevronUp className="h-3 w-3 text-muted-foreground" />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Pan Control */}
|
||||
<CircularKnob
|
||||
value={pan}
|
||||
onChange={onPanChange}
|
||||
min={-1}
|
||||
max={1}
|
||||
step={0.01}
|
||||
label="PAN"
|
||||
size={48}
|
||||
formatValue={(value: number) => {
|
||||
if (Math.abs(value) < 0.01) return 'C';
|
||||
if (value < 0) return `${Math.abs(value * 100).toFixed(0)}L`;
|
||||
return `${(value * 100).toFixed(0)}R`;
|
||||
}}
|
||||
/>
|
||||
|
||||
{/* Master Fader with Integrated Meters */}
|
||||
<MasterFader
|
||||
value={volume}
|
||||
peakLevel={peakLevel}
|
||||
rmsLevel={rmsLevel}
|
||||
isClipping={isClipping}
|
||||
onChange={onVolumeChange}
|
||||
onResetClip={onResetClip}
|
||||
/>
|
||||
|
||||
{/* Mute Button */}
|
||||
<button
|
||||
onClick={onMuteToggle}
|
||||
className={cn(
|
||||
'h-8 w-8 rounded-md flex items-center justify-center transition-all text-[11px] font-bold',
|
||||
isMuted
|
||||
? 'bg-blue-500 text-white shadow-md shadow-blue-500/30'
|
||||
: 'bg-card hover:bg-accent text-muted-foreground border border-border/50'
|
||||
)}
|
||||
title={isMuted ? 'Unmute' : 'Mute'}
|
||||
>
|
||||
M
|
||||
</button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
261
components/controls/MasterFader.tsx
Normal file
261
components/controls/MasterFader.tsx
Normal file
@@ -0,0 +1,261 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
|
||||
export interface MasterFaderProps {
|
||||
value: number;
|
||||
peakLevel: number;
|
||||
rmsLevel: number;
|
||||
isClipping: boolean;
|
||||
onChange: (value: number) => void;
|
||||
onResetClip?: () => void;
|
||||
onTouchStart?: () => void;
|
||||
onTouchEnd?: () => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function MasterFader({
|
||||
value,
|
||||
peakLevel,
|
||||
rmsLevel,
|
||||
isClipping,
|
||||
onChange,
|
||||
onResetClip,
|
||||
onTouchStart,
|
||||
onTouchEnd,
|
||||
className,
|
||||
}: MasterFaderProps) {
|
||||
const [isDragging, setIsDragging] = React.useState(false);
|
||||
const containerRef = React.useRef<HTMLDivElement>(null);
|
||||
|
||||
// Convert linear 0-1 to dB scale for display
|
||||
const linearToDb = (linear: number): number => {
|
||||
if (linear === 0) return -60;
|
||||
const db = 20 * Math.log10(linear);
|
||||
return Math.max(-60, Math.min(0, db));
|
||||
};
|
||||
|
||||
const valueDb = linearToDb(value);
|
||||
const peakDb = linearToDb(peakLevel);
|
||||
const rmsDb = linearToDb(rmsLevel);
|
||||
|
||||
// Calculate bar widths (0-100%)
|
||||
const peakWidth = ((peakDb + 60) / 60) * 100;
|
||||
const rmsWidth = ((rmsDb + 60) / 60) * 100;
|
||||
|
||||
const handleMouseDown = (e: React.MouseEvent) => {
|
||||
e.preventDefault();
|
||||
setIsDragging(true);
|
||||
onTouchStart?.();
|
||||
updateValue(e.clientY);
|
||||
};
|
||||
|
||||
const handleMouseMove = React.useCallback(
|
||||
(e: MouseEvent) => {
|
||||
if (!isDragging) return;
|
||||
updateValue(e.clientY);
|
||||
},
|
||||
[isDragging]
|
||||
);
|
||||
|
||||
const handleMouseUp = React.useCallback(() => {
|
||||
setIsDragging(false);
|
||||
onTouchEnd?.();
|
||||
}, [onTouchEnd]);
|
||||
|
||||
const handleTouchStart = (e: React.TouchEvent) => {
|
||||
e.preventDefault();
|
||||
const touch = e.touches[0];
|
||||
setIsDragging(true);
|
||||
onTouchStart?.();
|
||||
updateValue(touch.clientY);
|
||||
};
|
||||
|
||||
const handleTouchMove = React.useCallback(
|
||||
(e: TouchEvent) => {
|
||||
if (!isDragging || e.touches.length === 0) return;
|
||||
const touch = e.touches[0];
|
||||
updateValue(touch.clientY);
|
||||
},
|
||||
[isDragging]
|
||||
);
|
||||
|
||||
const handleTouchEnd = React.useCallback(() => {
|
||||
setIsDragging(false);
|
||||
onTouchEnd?.();
|
||||
}, [onTouchEnd]);
|
||||
|
||||
const updateValue = (clientY: number) => {
|
||||
if (!containerRef.current) return;
|
||||
|
||||
const rect = containerRef.current.getBoundingClientRect();
|
||||
const y = clientY - rect.top;
|
||||
|
||||
// Track has 32px (2rem) padding on top and bottom (top-8 bottom-8)
|
||||
const trackPadding = 32;
|
||||
const trackHeight = rect.height - (trackPadding * 2);
|
||||
|
||||
// Clamp y to track bounds
|
||||
const clampedY = Math.max(trackPadding, Math.min(rect.height - trackPadding, y));
|
||||
|
||||
// Inverted: top = max (1), bottom = min (0)
|
||||
// Map clampedY from [trackPadding, height-trackPadding] to [1, 0]
|
||||
const percentage = 1 - ((clampedY - trackPadding) / trackHeight);
|
||||
onChange(Math.max(0, Math.min(1, percentage)));
|
||||
};
|
||||
|
||||
React.useEffect(() => {
|
||||
if (isDragging) {
|
||||
window.addEventListener('mousemove', handleMouseMove);
|
||||
window.addEventListener('mouseup', handleMouseUp);
|
||||
window.addEventListener('touchmove', handleTouchMove);
|
||||
window.addEventListener('touchend', handleTouchEnd);
|
||||
return () => {
|
||||
window.removeEventListener('mousemove', handleMouseMove);
|
||||
window.removeEventListener('mouseup', handleMouseUp);
|
||||
window.removeEventListener('touchmove', handleTouchMove);
|
||||
window.removeEventListener('touchend', handleTouchEnd);
|
||||
};
|
||||
}
|
||||
}, [isDragging, handleMouseMove, handleMouseUp, handleTouchMove, handleTouchEnd]);
|
||||
|
||||
return (
|
||||
<div className={cn('flex gap-3', className)} style={{ marginLeft: '16px' }}>
|
||||
{/* dB Labels (Left) */}
|
||||
<div className="flex flex-col justify-between text-[10px] font-mono text-muted-foreground py-1">
|
||||
<span>0</span>
|
||||
<span>-12</span>
|
||||
<span>-24</span>
|
||||
<span>-60</span>
|
||||
</div>
|
||||
|
||||
{/* Fader Container */}
|
||||
<div
|
||||
ref={containerRef}
|
||||
className="relative w-12 h-40 bg-background/50 rounded-md border border-border/50 cursor-pointer"
|
||||
onMouseDown={handleMouseDown}
|
||||
onTouchStart={handleTouchStart}
|
||||
>
|
||||
{/* Peak Meter (Horizontal Bar - Top) */}
|
||||
<div className="absolute inset-x-2 top-2 h-3 bg-background/80 rounded-sm overflow-hidden border border-border/30">
|
||||
<div
|
||||
className="absolute left-0 top-0 bottom-0 transition-all duration-75 ease-out"
|
||||
style={{ width: `${Math.max(0, Math.min(100, peakWidth))}%` }}
|
||||
>
|
||||
<div className={cn(
|
||||
'w-full h-full',
|
||||
peakDb > -3 ? 'bg-red-500' :
|
||||
peakDb > -6 ? 'bg-yellow-500' :
|
||||
'bg-green-500'
|
||||
)} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* RMS Meter (Horizontal Bar - Bottom) */}
|
||||
<div className="absolute inset-x-2 bottom-2 h-3 bg-background/80 rounded-sm overflow-hidden border border-border/30">
|
||||
<div
|
||||
className="absolute left-0 top-0 bottom-0 transition-all duration-150 ease-out"
|
||||
style={{ width: `${Math.max(0, Math.min(100, rmsWidth))}%` }}
|
||||
>
|
||||
<div className={cn(
|
||||
'w-full h-full',
|
||||
rmsDb > -3 ? 'bg-red-500' :
|
||||
rmsDb > -6 ? 'bg-yellow-500' :
|
||||
'bg-green-500'
|
||||
)} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Fader Track */}
|
||||
<div className="absolute top-8 bottom-8 left-1/2 -translate-x-1/2 w-1.5 bg-muted/50 rounded-full" />
|
||||
|
||||
{/* Fader Handle */}
|
||||
<div
|
||||
className="absolute left-1/2 -translate-x-1/2 w-10 h-4 bg-primary/80 border-2 border-primary rounded-md shadow-lg cursor-grab active:cursor-grabbing pointer-events-none transition-all"
|
||||
style={{
|
||||
// Inverted: value 1 = top of track (20%), value 0 = bottom of track (80%)
|
||||
// Track has top-8 bottom-8 padding (20% and 80% of h-40 container)
|
||||
// Handle moves within 60% range (from 20% to 80%)
|
||||
top: `calc(${20 + (1 - value) * 60}% - 0.5rem)`,
|
||||
}}
|
||||
>
|
||||
{/* Handle grip lines */}
|
||||
<div className="absolute inset-0 flex items-center justify-center gap-0.5">
|
||||
<div className="h-2 w-px bg-primary-foreground/30" />
|
||||
<div className="h-2 w-px bg-primary-foreground/30" />
|
||||
<div className="h-2 w-px bg-primary-foreground/30" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Clip Indicator */}
|
||||
{isClipping && (
|
||||
<button
|
||||
onClick={onResetClip}
|
||||
className="absolute top-0 left-0 right-0 px-1 py-0.5 text-[9px] font-bold text-white bg-red-500 border-b border-red-600 rounded-t-md shadow-lg shadow-red-500/50 animate-pulse"
|
||||
title="Click to reset clip indicator"
|
||||
>
|
||||
CLIP
|
||||
</button>
|
||||
)}
|
||||
|
||||
{/* dB Scale Markers */}
|
||||
<div className="absolute inset-0 px-2 py-8 pointer-events-none">
|
||||
<div className="relative h-full">
|
||||
{/* -12 dB */}
|
||||
<div className="absolute left-0 right-0 h-px bg-border/20" style={{ top: '50%' }} />
|
||||
{/* -6 dB */}
|
||||
<div className="absolute left-0 right-0 h-px bg-yellow-500/20" style={{ top: '20%' }} />
|
||||
{/* -3 dB */}
|
||||
<div className="absolute left-0 right-0 h-px bg-red-500/30" style={{ top: '10%' }} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Value and Level Display (Right) */}
|
||||
<div className="flex flex-col justify-between items-start text-[9px] font-mono py-1 w-[36px]">
|
||||
{/* Current dB Value */}
|
||||
<div className={cn(
|
||||
'font-bold text-[11px]',
|
||||
valueDb > -3 ? 'text-red-500' :
|
||||
valueDb > -6 ? 'text-yellow-500' :
|
||||
'text-green-500'
|
||||
)}>
|
||||
{valueDb > -60 ? `${valueDb.toFixed(1)}` : '-∞'}
|
||||
</div>
|
||||
|
||||
{/* Spacer */}
|
||||
<div className="flex-1" />
|
||||
|
||||
{/* Peak Level */}
|
||||
<div className="flex flex-col items-start">
|
||||
<span className="text-muted-foreground/60">PK</span>
|
||||
<span className={cn(
|
||||
'font-mono text-[10px]',
|
||||
peakDb > -3 ? 'text-red-500' :
|
||||
peakDb > -6 ? 'text-yellow-500' :
|
||||
'text-green-500'
|
||||
)}>
|
||||
{peakDb > -60 ? `${peakDb.toFixed(1)}` : '-∞'}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* RMS Level */}
|
||||
<div className="flex flex-col items-start">
|
||||
<span className="text-muted-foreground/60">RM</span>
|
||||
<span className={cn(
|
||||
'font-mono text-[10px]',
|
||||
rmsDb > -3 ? 'text-red-500' :
|
||||
rmsDb > -6 ? 'text-yellow-500' :
|
||||
'text-green-500'
|
||||
)}>
|
||||
{rmsDb > -60 ? `${rmsDb.toFixed(1)}` : '-∞'}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* dB Label */}
|
||||
<span className="text-muted-foreground/60 text-[8px]">dB</span>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
120
components/controls/MasterMeter.tsx
Normal file
120
components/controls/MasterMeter.tsx
Normal file
@@ -0,0 +1,120 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
|
||||
export interface MasterMeterProps {
|
||||
/** Peak level (0-1) */
|
||||
peakLevel: number;
|
||||
/** RMS level (0-1) */
|
||||
rmsLevel: number;
|
||||
/** Whether clipping has occurred */
|
||||
isClipping: boolean;
|
||||
/** Callback to reset clip indicator */
|
||||
onResetClip?: () => void;
|
||||
}
|
||||
|
||||
export function MasterMeter({
|
||||
peakLevel,
|
||||
rmsLevel,
|
||||
isClipping,
|
||||
onResetClip,
|
||||
}: MasterMeterProps) {
|
||||
// Convert linear 0-1 to dB scale for display
|
||||
const linearToDb = (linear: number): number => {
|
||||
if (linear === 0) return -60;
|
||||
const db = 20 * Math.log10(linear);
|
||||
return Math.max(-60, Math.min(0, db));
|
||||
};
|
||||
|
||||
const peakDb = linearToDb(peakLevel);
|
||||
const rmsDb = linearToDb(rmsLevel);
|
||||
|
||||
// Calculate bar heights (0-100%)
|
||||
const peakHeight = ((peakDb + 60) / 60) * 100;
|
||||
const rmsHeight = ((rmsDb + 60) / 60) * 100;
|
||||
|
||||
return (
|
||||
<div className="flex items-center gap-2 px-3 py-2 bg-muted/30 rounded-md border border-border/50">
|
||||
{/* Clip Indicator */}
|
||||
<button
|
||||
onClick={onResetClip}
|
||||
className={`w-6 h-6 rounded-sm border transition-colors ${
|
||||
isClipping
|
||||
? 'bg-red-500 border-red-600 shadow-lg shadow-red-500/50'
|
||||
: 'bg-muted border-border/50'
|
||||
}`}
|
||||
title={isClipping ? 'Click to reset clip indicator' : 'No clipping'}
|
||||
>
|
||||
<span className="text-[10px] font-bold text-white">C</span>
|
||||
</button>
|
||||
|
||||
{/* Meters */}
|
||||
<div className="flex gap-1">
|
||||
{/* Peak Meter (Left) */}
|
||||
<div className="w-6 h-24 bg-background/50 rounded-sm relative overflow-hidden border border-border/50">
|
||||
<div className="absolute bottom-0 left-0 right-0 transition-all duration-75 ease-out"
|
||||
style={{ height: `${Math.max(0, Math.min(100, peakHeight))}%` }}>
|
||||
<div className={`w-full h-full ${
|
||||
peakDb > -3 ? 'bg-red-500' :
|
||||
peakDb > -6 ? 'bg-yellow-500' :
|
||||
'bg-green-500'
|
||||
}`} />
|
||||
</div>
|
||||
{/* dB markers */}
|
||||
<div className="absolute inset-0 pointer-events-none">
|
||||
<div className="absolute top-0 left-0 right-0 h-px bg-red-500/30" title="0 dB" />
|
||||
<div className="absolute top-[5%] left-0 right-0 h-px bg-yellow-500/20" title="-3 dB" />
|
||||
<div className="absolute top-[10%] left-0 right-0 h-px bg-border/20" title="-6 dB" />
|
||||
<div className="absolute top-[25%] left-0 right-0 h-px bg-border/20" title="-12 dB" />
|
||||
<div className="absolute top-[50%] left-0 right-0 h-px bg-border/20" title="-18 dB" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* RMS Meter (Right) */}
|
||||
<div className="w-6 h-24 bg-background/50 rounded-sm relative overflow-hidden border border-border/50">
|
||||
<div className="absolute bottom-0 left-0 right-0 transition-all duration-150 ease-out"
|
||||
style={{ height: `${Math.max(0, Math.min(100, rmsHeight))}%` }}>
|
||||
<div className={`w-full h-full ${
|
||||
rmsDb > -3 ? 'bg-red-400' :
|
||||
rmsDb > -6 ? 'bg-yellow-400' :
|
||||
'bg-green-400'
|
||||
}`} />
|
||||
</div>
|
||||
{/* dB markers */}
|
||||
<div className="absolute inset-0 pointer-events-none">
|
||||
<div className="absolute top-0 left-0 right-0 h-px bg-red-500/30" />
|
||||
<div className="absolute top-[5%] left-0 right-0 h-px bg-yellow-500/20" />
|
||||
<div className="absolute top-[10%] left-0 right-0 h-px bg-border/20" />
|
||||
<div className="absolute top-[25%] left-0 right-0 h-px bg-border/20" />
|
||||
<div className="absolute top-[50%] left-0 right-0 h-px bg-border/20" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Labels and Values */}
|
||||
<div className="flex flex-col text-[10px] font-mono">
|
||||
<div className="flex items-center gap-1">
|
||||
<span className="text-muted-foreground w-6">PK:</span>
|
||||
<span className={`w-12 text-right ${
|
||||
peakDb > -3 ? 'text-red-500' :
|
||||
peakDb > -6 ? 'text-yellow-500' :
|
||||
'text-green-500'
|
||||
}`}>
|
||||
{peakDb > -60 ? `${peakDb.toFixed(1)}` : '-∞'}
|
||||
</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-1">
|
||||
<span className="text-muted-foreground w-6">RM:</span>
|
||||
<span className={`w-12 text-right ${
|
||||
rmsDb > -3 ? 'text-red-400' :
|
||||
rmsDb > -6 ? 'text-yellow-400' :
|
||||
'text-green-400'
|
||||
}`}>
|
||||
{rmsDb > -60 ? `${rmsDb.toFixed(1)}` : '-∞'}
|
||||
</span>
|
||||
</div>
|
||||
<div className="text-muted-foreground text-center mt-0.5">dB</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
130
components/dialogs/BrowserCompatDialog.tsx
Normal file
130
components/dialogs/BrowserCompatDialog.tsx
Normal file
@@ -0,0 +1,130 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { AlertTriangle, XCircle, Info, X } from 'lucide-react';
|
||||
import { Modal } from '@/components/ui/Modal';
|
||||
import { Button } from '@/components/ui/Button';
|
||||
import { getBrowserInfo } from '@/lib/utils/browser-compat';
|
||||
|
||||
interface BrowserCompatDialogProps {
|
||||
open: boolean;
|
||||
missingFeatures: string[];
|
||||
warnings: string[];
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
export function BrowserCompatDialog({
|
||||
open,
|
||||
missingFeatures,
|
||||
warnings,
|
||||
onClose,
|
||||
}: BrowserCompatDialogProps) {
|
||||
const [browserInfo, setBrowserInfo] = React.useState({ name: 'Unknown', version: 'Unknown' });
|
||||
const hasErrors = missingFeatures.length > 0;
|
||||
|
||||
// Get browser info only on client side
|
||||
React.useEffect(() => {
|
||||
setBrowserInfo(getBrowserInfo());
|
||||
}, []);
|
||||
|
||||
if (!open) return null;
|
||||
|
||||
return (
|
||||
<Modal open={open} onClose={onClose} title="">
|
||||
<div className="p-6 max-w-md">
|
||||
{/* Header */}
|
||||
<div className="flex items-start justify-between mb-4">
|
||||
<div className="flex items-center gap-2">
|
||||
{hasErrors ? (
|
||||
<>
|
||||
<XCircle className="h-5 w-5 text-destructive" />
|
||||
<h2 className="text-lg font-semibold">Browser Not Supported</h2>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<AlertTriangle className="h-5 w-5 text-yellow-500" />
|
||||
<h2 className="text-lg font-semibold">Browser Warnings</h2>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
<button onClick={onClose} className="text-muted-foreground hover:text-foreground">
|
||||
<X className="h-4 w-4" />
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<p className="text-sm text-muted-foreground mb-4">
|
||||
{hasErrors ? (
|
||||
<>Your browser is missing required features to run this audio editor.</>
|
||||
) : (
|
||||
<>Some features may not work as expected in your browser.</>
|
||||
)}
|
||||
</p>
|
||||
|
||||
<div className="space-y-4">
|
||||
{/* Browser Info */}
|
||||
<div className="flex items-center gap-2 text-sm text-muted-foreground">
|
||||
<Info className="h-4 w-4" />
|
||||
<span>
|
||||
{browserInfo.name} {browserInfo.version}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* Missing Features */}
|
||||
{missingFeatures.length > 0 && (
|
||||
<div className="space-y-2">
|
||||
<h3 className="text-sm font-semibold text-destructive flex items-center gap-2">
|
||||
<XCircle className="h-4 w-4" />
|
||||
Missing Required Features:
|
||||
</h3>
|
||||
<ul className="list-disc list-inside space-y-1 text-sm text-muted-foreground">
|
||||
{missingFeatures.map((feature) => (
|
||||
<li key={feature}>{feature}</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Warnings */}
|
||||
{warnings.length > 0 && (
|
||||
<div className="space-y-2">
|
||||
<h3 className="text-sm font-semibold text-yellow-600 dark:text-yellow-500 flex items-center gap-2">
|
||||
<AlertTriangle className="h-4 w-4" />
|
||||
Warnings:
|
||||
</h3>
|
||||
<ul className="list-disc list-inside space-y-1 text-sm text-muted-foreground">
|
||||
{warnings.map((warning) => (
|
||||
<li key={warning}>{warning}</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Recommendations */}
|
||||
{hasErrors && (
|
||||
<div className="bg-muted/50 border border-border rounded-md p-3 space-y-2">
|
||||
<h3 className="text-sm font-semibold">Recommended Browsers:</h3>
|
||||
<ul className="text-sm text-muted-foreground space-y-1">
|
||||
<li>• Chrome 90+ or Edge 90+</li>
|
||||
<li>• Firefox 88+</li>
|
||||
<li>• Safari 14+</li>
|
||||
</ul>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Actions */}
|
||||
<div className="flex justify-end gap-2">
|
||||
{hasErrors ? (
|
||||
<Button onClick={onClose} variant="destructive">
|
||||
Close
|
||||
</Button>
|
||||
) : (
|
||||
<Button onClick={onClose}>
|
||||
Continue Anyway
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Modal>
|
||||
);
|
||||
}
|
||||
210
components/dialogs/ExportDialog.tsx
Normal file
210
components/dialogs/ExportDialog.tsx
Normal file
@@ -0,0 +1,210 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { X, Download } from 'lucide-react';
|
||||
import { Button } from '@/components/ui/Button';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
|
||||
export interface ExportSettings {
|
||||
format: 'wav' | 'mp3';
|
||||
scope: 'project' | 'selection' | 'tracks'; // Export scope
|
||||
bitDepth: 16 | 24 | 32;
|
||||
bitrate: number; // For MP3: 128, 192, 256, 320 kbps
|
||||
normalize: boolean;
|
||||
filename: string;
|
||||
}
|
||||
|
||||
export interface ExportDialogProps {
|
||||
open: boolean;
|
||||
onClose: () => void;
|
||||
onExport: (settings: ExportSettings) => void;
|
||||
isExporting?: boolean;
|
||||
hasSelection?: boolean; // Whether any track has a selection
|
||||
}
|
||||
|
||||
export function ExportDialog({ open, onClose, onExport, isExporting, hasSelection }: ExportDialogProps) {
|
||||
const [settings, setSettings] = React.useState<ExportSettings>({
|
||||
format: 'wav',
|
||||
scope: 'project',
|
||||
bitDepth: 16,
|
||||
bitrate: 192, // Default MP3 bitrate
|
||||
normalize: true,
|
||||
filename: 'mix',
|
||||
});
|
||||
|
||||
const handleExport = () => {
|
||||
onExport(settings);
|
||||
};
|
||||
|
||||
if (!open) return null;
|
||||
|
||||
return (
|
||||
<div className="fixed inset-0 z-50 flex items-center justify-center bg-black/50">
|
||||
<div className="bg-card border border-border rounded-lg shadow-xl w-full max-w-md p-6">
|
||||
{/* Header */}
|
||||
<div className="flex items-center justify-between mb-6">
|
||||
<h2 className="text-lg font-semibold text-foreground">Export Audio</h2>
|
||||
<button
|
||||
onClick={onClose}
|
||||
className="text-muted-foreground hover:text-foreground transition-colors"
|
||||
disabled={isExporting}
|
||||
>
|
||||
<X className="h-5 w-5" />
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Settings */}
|
||||
<div className="space-y-4">
|
||||
{/* Filename */}
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-foreground mb-2">
|
||||
Filename
|
||||
</label>
|
||||
<input
|
||||
type="text"
|
||||
value={settings.filename}
|
||||
onChange={(e) => setSettings({ ...settings, filename: e.target.value })}
|
||||
className="w-full px-3 py-2 bg-background border border-border rounded text-foreground focus:outline-none focus:ring-2 focus:ring-primary"
|
||||
disabled={isExporting}
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground mt-1">
|
||||
.{settings.format} will be added automatically
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Format */}
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-foreground mb-2">
|
||||
Format
|
||||
</label>
|
||||
<select
|
||||
value={settings.format}
|
||||
onChange={(e) => setSettings({ ...settings, format: e.target.value as 'wav' | 'mp3' })}
|
||||
className="w-full px-3 py-2 bg-background border border-border rounded text-foreground focus:outline-none focus:ring-2 focus:ring-primary"
|
||||
disabled={isExporting}
|
||||
>
|
||||
<option value="wav">WAV (Lossless, Uncompressed)</option>
|
||||
<option value="mp3">MP3 (Lossy, Compressed)</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
{/* Export Scope */}
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-foreground mb-2">
|
||||
Export Scope
|
||||
</label>
|
||||
<select
|
||||
value={settings.scope}
|
||||
onChange={(e) => setSettings({ ...settings, scope: e.target.value as 'project' | 'selection' | 'tracks' })}
|
||||
className="w-full px-3 py-2 bg-background border border-border rounded text-foreground focus:outline-none focus:ring-2 focus:ring-primary"
|
||||
disabled={isExporting}
|
||||
>
|
||||
<option value="project">Entire Project (Mix All Tracks)</option>
|
||||
<option value="selection" disabled={!hasSelection}>
|
||||
Selected Region {!hasSelection && '(No selection)'}
|
||||
</option>
|
||||
<option value="tracks">Individual Tracks (Separate Files)</option>
|
||||
</select>
|
||||
<p className="text-xs text-muted-foreground mt-1">
|
||||
{settings.scope === 'project' && 'Mix all tracks into a single file'}
|
||||
{settings.scope === 'selection' && 'Export only the selected region'}
|
||||
{settings.scope === 'tracks' && 'Export each track as a separate file'}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Bit Depth (WAV only) */}
|
||||
{settings.format === 'wav' && (
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-foreground mb-2">
|
||||
Bit Depth
|
||||
</label>
|
||||
<div className="flex gap-2">
|
||||
{[16, 24, 32].map((depth) => (
|
||||
<button
|
||||
key={depth}
|
||||
onClick={() => setSettings({ ...settings, bitDepth: depth as 16 | 24 | 32 })}
|
||||
className={cn(
|
||||
'flex-1 px-3 py-2 rounded text-sm font-medium transition-colors',
|
||||
settings.bitDepth === depth
|
||||
? 'bg-primary text-primary-foreground'
|
||||
: 'bg-background border border-border text-foreground hover:bg-accent'
|
||||
)}
|
||||
disabled={isExporting}
|
||||
>
|
||||
{depth}-bit {depth === 32 && '(Float)'}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* MP3 Bitrate */}
|
||||
{settings.format === 'mp3' && (
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-foreground mb-2">
|
||||
Bitrate
|
||||
</label>
|
||||
<div className="flex gap-2">
|
||||
{[128, 192, 256, 320].map((rate) => (
|
||||
<button
|
||||
key={rate}
|
||||
onClick={() => setSettings({ ...settings, bitrate: rate })}
|
||||
className={cn(
|
||||
'flex-1 px-3 py-2 rounded text-sm font-medium transition-colors',
|
||||
settings.bitrate === rate
|
||||
? 'bg-primary text-primary-foreground'
|
||||
: 'bg-background border border-border text-foreground hover:bg-accent'
|
||||
)}
|
||||
disabled={isExporting}
|
||||
>
|
||||
{rate} kbps
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
||||
{/* Normalize */}
|
||||
<div>
|
||||
<label className="flex items-center gap-2 cursor-pointer">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={settings.normalize}
|
||||
onChange={(e) => setSettings({ ...settings, normalize: e.target.checked })}
|
||||
className="w-4 h-4 rounded border-border text-primary focus:ring-primary"
|
||||
disabled={isExporting}
|
||||
/>
|
||||
<span className="text-sm font-medium text-foreground">
|
||||
Normalize audio
|
||||
</span>
|
||||
</label>
|
||||
<p className="text-xs text-muted-foreground mt-1 ml-6">
|
||||
Prevents clipping by adjusting peak levels
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Actions */}
|
||||
<div className="flex gap-3 mt-6">
|
||||
<Button
|
||||
variant="outline"
|
||||
onClick={onClose}
|
||||
className="flex-1"
|
||||
disabled={isExporting}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleExport}
|
||||
className="flex-1"
|
||||
disabled={isExporting || !settings.filename.trim()}
|
||||
>
|
||||
<Download className="h-4 w-4 mr-2" />
|
||||
{isExporting ? 'Exporting...' : 'Export'}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
156
components/dialogs/ImportDialog.tsx
Normal file
156
components/dialogs/ImportDialog.tsx
Normal file
@@ -0,0 +1,156 @@
|
||||
'use client';
|
||||
|
||||
import { useState } from 'react';
|
||||
import { ImportOptions } from '@/lib/audio/decoder';
|
||||
|
||||
export interface ImportDialogProps {
|
||||
open: boolean;
|
||||
onClose: () => void;
|
||||
onImport: (options: ImportOptions) => void;
|
||||
fileName?: string;
|
||||
sampleRate?: number;
|
||||
channels?: number;
|
||||
}
|
||||
|
||||
export function ImportDialog({
|
||||
open,
|
||||
onClose,
|
||||
onImport,
|
||||
fileName,
|
||||
sampleRate: originalSampleRate,
|
||||
channels: originalChannels,
|
||||
}: ImportDialogProps) {
|
||||
// Don't render if not open
|
||||
if (!open) return null;
|
||||
const [options, setOptions] = useState<ImportOptions>({
|
||||
convertToMono: false,
|
||||
targetSampleRate: undefined,
|
||||
normalizeOnImport: false,
|
||||
});
|
||||
|
||||
const handleImport = () => {
|
||||
onImport(options);
|
||||
};
|
||||
|
||||
const sampleRateOptions = [44100, 48000, 88200, 96000, 176400, 192000];
|
||||
|
||||
return (
|
||||
<div className="fixed inset-0 bg-black/50 flex items-center justify-center z-50">
|
||||
<div className="bg-white dark:bg-gray-800 rounded-lg p-6 w-full max-w-md shadow-xl">
|
||||
<h2 className="text-xl font-bold mb-4 text-gray-900 dark:text-white">
|
||||
Import Audio File
|
||||
</h2>
|
||||
|
||||
<div className="mb-4">
|
||||
<div className="text-sm text-gray-600 dark:text-gray-400 mb-2">
|
||||
<strong>File:</strong> {fileName}
|
||||
</div>
|
||||
{originalSampleRate && (
|
||||
<div className="text-sm text-gray-600 dark:text-gray-400 mb-1">
|
||||
<strong>Sample Rate:</strong> {originalSampleRate} Hz
|
||||
</div>
|
||||
)}
|
||||
{originalChannels && (
|
||||
<div className="text-sm text-gray-600 dark:text-gray-400 mb-3">
|
||||
<strong>Channels:</strong> {originalChannels === 1 ? 'Mono' : originalChannels === 2 ? 'Stereo' : `${originalChannels} channels`}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="space-y-4">
|
||||
{/* Convert to Mono */}
|
||||
{originalChannels && originalChannels > 1 && (
|
||||
<div>
|
||||
<label className="flex items-center space-x-2">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={options.convertToMono}
|
||||
onChange={(e) => setOptions({ ...options, convertToMono: e.target.checked })}
|
||||
className="rounded border-gray-300 text-blue-600 focus:ring-blue-500"
|
||||
/>
|
||||
<span className="text-sm text-gray-700 dark:text-gray-300">
|
||||
Convert to Mono
|
||||
</span>
|
||||
</label>
|
||||
<p className="text-xs text-gray-500 dark:text-gray-400 mt-1 ml-6">
|
||||
Mix all channels equally into a single mono channel
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Resample */}
|
||||
<div>
|
||||
<label className="flex items-center space-x-2 mb-2">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={options.targetSampleRate !== undefined}
|
||||
onChange={(e) => setOptions({
|
||||
...options,
|
||||
targetSampleRate: e.target.checked ? 48000 : undefined
|
||||
})}
|
||||
className="rounded border-gray-300 text-blue-600 focus:ring-blue-500"
|
||||
/>
|
||||
<span className="text-sm text-gray-700 dark:text-gray-300">
|
||||
Resample Audio
|
||||
</span>
|
||||
</label>
|
||||
|
||||
{options.targetSampleRate !== undefined && (
|
||||
<select
|
||||
value={options.targetSampleRate}
|
||||
onChange={(e) => setOptions({
|
||||
...options,
|
||||
targetSampleRate: parseInt(e.target.value)
|
||||
})}
|
||||
className="ml-6 w-full max-w-xs px-3 py-1.5 text-sm border border-gray-300 dark:border-gray-600 rounded bg-white dark:bg-gray-700 text-gray-900 dark:text-white focus:outline-none focus:ring-2 focus:ring-blue-500"
|
||||
>
|
||||
{sampleRateOptions.map((rate) => (
|
||||
<option key={rate} value={rate}>
|
||||
{rate} Hz {rate === originalSampleRate ? '(original)' : ''}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
)}
|
||||
|
||||
<p className="text-xs text-gray-500 dark:text-gray-400 mt-1 ml-6">
|
||||
Convert to a different sample rate (may affect quality)
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Normalize */}
|
||||
<div>
|
||||
<label className="flex items-center space-x-2">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={options.normalizeOnImport}
|
||||
onChange={(e) => setOptions({ ...options, normalizeOnImport: e.target.checked })}
|
||||
className="rounded border-gray-300 text-blue-600 focus:ring-blue-500"
|
||||
/>
|
||||
<span className="text-sm text-gray-700 dark:text-gray-300">
|
||||
Normalize on Import
|
||||
</span>
|
||||
</label>
|
||||
<p className="text-xs text-gray-500 dark:text-gray-400 mt-1 ml-6">
|
||||
Adjust peak amplitude to 99% (1% headroom)
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex justify-end space-x-3 mt-6">
|
||||
<button
|
||||
onClick={onClose}
|
||||
className="px-4 py-2 text-sm font-medium text-gray-700 dark:text-gray-300 bg-gray-100 dark:bg-gray-700 hover:bg-gray-200 dark:hover:bg-gray-600 rounded transition-colors"
|
||||
>
|
||||
Cancel
|
||||
</button>
|
||||
<button
|
||||
onClick={handleImport}
|
||||
className="px-4 py-2 text-sm font-medium text-white bg-blue-600 hover:bg-blue-700 rounded transition-colors"
|
||||
>
|
||||
Import
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
140
components/dialogs/KeyboardShortcutsDialog.tsx
Normal file
140
components/dialogs/KeyboardShortcutsDialog.tsx
Normal file
@@ -0,0 +1,140 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { Keyboard, X } from 'lucide-react';
|
||||
import { Modal } from '@/components/ui/Modal';
|
||||
import { Button } from '@/components/ui/Button';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
|
||||
export interface KeyboardShortcutsDialogProps {
|
||||
open: boolean;
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
interface ShortcutCategory {
|
||||
name: string;
|
||||
shortcuts: Array<{
|
||||
keys: string[];
|
||||
description: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
const SHORTCUTS: ShortcutCategory[] = [
|
||||
{
|
||||
name: 'Playback',
|
||||
shortcuts: [
|
||||
{ keys: ['Space'], description: 'Play / Pause' },
|
||||
{ keys: ['Home'], description: 'Go to Start' },
|
||||
{ keys: ['End'], description: 'Go to End' },
|
||||
{ keys: ['←'], description: 'Seek Backward' },
|
||||
{ keys: ['→'], description: 'Seek Forward' },
|
||||
{ keys: ['Ctrl', '←'], description: 'Seek Backward 5s' },
|
||||
{ keys: ['Ctrl', '→'], description: 'Seek Forward 5s' },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Edit',
|
||||
shortcuts: [
|
||||
{ keys: ['Ctrl', 'Z'], description: 'Undo' },
|
||||
{ keys: ['Ctrl', 'Shift', 'Z'], description: 'Redo' },
|
||||
{ keys: ['Ctrl', 'X'], description: 'Cut' },
|
||||
{ keys: ['Ctrl', 'C'], description: 'Copy' },
|
||||
{ keys: ['Ctrl', 'V'], description: 'Paste' },
|
||||
{ keys: ['Delete'], description: 'Delete Selection' },
|
||||
{ keys: ['Ctrl', 'D'], description: 'Duplicate' },
|
||||
{ keys: ['Ctrl', 'A'], description: 'Select All' },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'View',
|
||||
shortcuts: [
|
||||
{ keys: ['Ctrl', '+'], description: 'Zoom In' },
|
||||
{ keys: ['Ctrl', '-'], description: 'Zoom Out' },
|
||||
{ keys: ['Ctrl', '0'], description: 'Fit to View' },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'File',
|
||||
shortcuts: [
|
||||
{ keys: ['Ctrl', 'S'], description: 'Save Project' },
|
||||
{ keys: ['Ctrl', 'K'], description: 'Open Command Palette' },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
function KeyboardKey({ keyName }: { keyName: string }) {
|
||||
return (
|
||||
<kbd className="px-2 py-1 text-xs font-semibold bg-muted border border-border rounded shadow-sm min-w-[2rem] text-center inline-block">
|
||||
{keyName}
|
||||
</kbd>
|
||||
);
|
||||
}
|
||||
|
||||
export function KeyboardShortcutsDialog({ open, onClose }: KeyboardShortcutsDialogProps) {
|
||||
if (!open) return null;
|
||||
|
||||
return (
|
||||
<Modal open={open} onClose={onClose} title="">
|
||||
<div className="p-6 max-w-2xl">
|
||||
{/* Header */}
|
||||
<div className="flex items-start justify-between mb-6">
|
||||
<div className="flex items-center gap-3">
|
||||
<Keyboard className="h-6 w-6 text-primary" />
|
||||
<h2 className="text-xl font-semibold">Keyboard Shortcuts</h2>
|
||||
</div>
|
||||
<button onClick={onClose} className="text-muted-foreground hover:text-foreground">
|
||||
<X className="h-5 w-5" />
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Shortcuts Grid */}
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-6">
|
||||
{SHORTCUTS.map((category) => (
|
||||
<div key={category.name} className="space-y-3">
|
||||
<h3 className="text-sm font-semibold text-primary border-b border-border pb-2">
|
||||
{category.name}
|
||||
</h3>
|
||||
<div className="space-y-2">
|
||||
{category.shortcuts.map((shortcut, index) => (
|
||||
<div
|
||||
key={index}
|
||||
className="flex items-center justify-between gap-4 py-1.5"
|
||||
>
|
||||
<span className="text-sm text-foreground flex-1">
|
||||
{shortcut.description}
|
||||
</span>
|
||||
<div className="flex items-center gap-1 flex-shrink-0">
|
||||
{shortcut.keys.map((key, keyIndex) => (
|
||||
<React.Fragment key={keyIndex}>
|
||||
{keyIndex > 0 && (
|
||||
<span className="text-muted-foreground text-xs mx-0.5">+</span>
|
||||
)}
|
||||
<KeyboardKey keyName={key} />
|
||||
</React.Fragment>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{/* Footer */}
|
||||
<div className="mt-6 pt-4 border-t border-border">
|
||||
<p className="text-xs text-muted-foreground text-center">
|
||||
Press <KeyboardKey keyName="Ctrl" /> + <KeyboardKey keyName="K" /> to open the
|
||||
command palette and search for more actions
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Close Button */}
|
||||
<div className="mt-6 flex justify-end">
|
||||
<Button onClick={onClose} variant="default">
|
||||
Close
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</Modal>
|
||||
);
|
||||
}
|
||||
101
components/dialogs/MemoryWarningDialog.tsx
Normal file
101
components/dialogs/MemoryWarningDialog.tsx
Normal file
@@ -0,0 +1,101 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { AlertTriangle, Info, X } from 'lucide-react';
|
||||
import { Modal } from '@/components/ui/Modal';
|
||||
import { Button } from '@/components/ui/Button';
|
||||
import { formatMemorySize } from '@/lib/utils/memory-limits';
|
||||
|
||||
interface MemoryWarningDialogProps {
|
||||
open: boolean;
|
||||
estimatedMemoryMB: number;
|
||||
availableMemoryMB?: number;
|
||||
warning: string;
|
||||
fileName?: string;
|
||||
onContinue: () => void;
|
||||
onCancel: () => void;
|
||||
}
|
||||
|
||||
export function MemoryWarningDialog({
|
||||
open,
|
||||
estimatedMemoryMB,
|
||||
availableMemoryMB,
|
||||
warning,
|
||||
fileName,
|
||||
onContinue,
|
||||
onCancel,
|
||||
}: MemoryWarningDialogProps) {
|
||||
if (!open) return null;
|
||||
|
||||
const estimatedBytes = estimatedMemoryMB * 1024 * 1024;
|
||||
const availableBytes = availableMemoryMB ? availableMemoryMB * 1024 * 1024 : undefined;
|
||||
|
||||
return (
|
||||
<Modal open={open} onClose={onCancel} title="">
|
||||
<div className="p-6 max-w-md">
|
||||
{/* Header */}
|
||||
<div className="flex items-start justify-between mb-4">
|
||||
<div className="flex items-center gap-2">
|
||||
<AlertTriangle className="h-5 w-5 text-yellow-500" />
|
||||
<h2 className="text-lg font-semibold">Memory Warning</h2>
|
||||
</div>
|
||||
<button onClick={onCancel} className="text-muted-foreground hover:text-foreground">
|
||||
<X className="h-4 w-4" />
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<p className="text-sm text-muted-foreground mb-4">
|
||||
{warning}
|
||||
</p>
|
||||
|
||||
<div className="space-y-4">
|
||||
{/* File Info */}
|
||||
{fileName && (
|
||||
<div className="flex items-center gap-2 text-sm">
|
||||
<Info className="h-4 w-4 text-muted-foreground" />
|
||||
<span className="font-medium">File:</span>
|
||||
<span className="text-muted-foreground truncate">{fileName}</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Memory Details */}
|
||||
<div className="bg-muted/50 border border-border rounded-md p-3 space-y-2">
|
||||
<div className="flex justify-between text-sm">
|
||||
<span className="text-muted-foreground">Estimated Memory:</span>
|
||||
<span className="font-medium">{formatMemorySize(estimatedBytes)}</span>
|
||||
</div>
|
||||
{availableBytes && (
|
||||
<div className="flex justify-between text-sm">
|
||||
<span className="text-muted-foreground">Available Memory:</span>
|
||||
<span className="font-medium">{formatMemorySize(availableBytes)}</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Warning Message */}
|
||||
<div className="bg-yellow-500/10 border border-yellow-500/20 rounded-md p-3">
|
||||
<p className="text-sm text-yellow-700 dark:text-yellow-400">
|
||||
<strong>Note:</strong> Loading large files may cause performance issues or browser crashes,
|
||||
especially on devices with limited memory. Consider:
|
||||
</p>
|
||||
<ul className="mt-2 text-sm text-yellow-700 dark:text-yellow-400 space-y-1 list-disc list-inside">
|
||||
<li>Closing other browser tabs</li>
|
||||
<li>Using a shorter audio file</li>
|
||||
<li>Splitting large files into smaller segments</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
{/* Actions */}
|
||||
<div className="flex justify-end gap-2">
|
||||
<Button onClick={onCancel} variant="outline">
|
||||
Cancel
|
||||
</Button>
|
||||
<Button onClick={onContinue} variant="default">
|
||||
Continue Anyway
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Modal>
|
||||
);
|
||||
}
|
||||
162
components/dialogs/ProjectsDialog.tsx
Normal file
162
components/dialogs/ProjectsDialog.tsx
Normal file
@@ -0,0 +1,162 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { X, Plus, Trash2, Copy, FolderOpen, Download, Upload } from 'lucide-react';
|
||||
import { Button } from '@/components/ui/Button';
|
||||
import type { ProjectMetadata } from '@/lib/storage/db';
|
||||
import { formatDuration } from '@/lib/audio/decoder';
|
||||
|
||||
export interface ProjectsDialogProps {
|
||||
open: boolean;
|
||||
onClose: () => void;
|
||||
projects: ProjectMetadata[];
|
||||
onNewProject: () => void;
|
||||
onLoadProject: (projectId: string) => void;
|
||||
onDeleteProject: (projectId: string) => void;
|
||||
onDuplicateProject: (projectId: string) => void;
|
||||
onExportProject: (projectId: string) => void;
|
||||
onImportProject: () => void;
|
||||
}
|
||||
|
||||
export function ProjectsDialog({
|
||||
open,
|
||||
onClose,
|
||||
projects,
|
||||
onNewProject,
|
||||
onLoadProject,
|
||||
onDeleteProject,
|
||||
onDuplicateProject,
|
||||
onExportProject,
|
||||
onImportProject,
|
||||
}: ProjectsDialogProps) {
|
||||
if (!open) return null;
|
||||
|
||||
const formatDate = (timestamp: number) => {
|
||||
return new Date(timestamp).toLocaleString(undefined, {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="fixed inset-0 z-50 flex items-center justify-center bg-black/50">
|
||||
<div className="bg-card border border-border rounded-lg shadow-xl w-full max-w-3xl max-h-[80vh] flex flex-col">
|
||||
{/* Header */}
|
||||
<div className="flex items-center justify-between p-6 border-b border-border">
|
||||
<h2 className="text-lg font-semibold text-foreground">Projects</h2>
|
||||
<div className="flex items-center gap-2">
|
||||
<Button
|
||||
onClick={onImportProject}
|
||||
variant="outline"
|
||||
size="sm"
|
||||
className="gap-2"
|
||||
>
|
||||
<Upload className="h-4 w-4" />
|
||||
Import
|
||||
</Button>
|
||||
<Button
|
||||
onClick={onNewProject}
|
||||
variant="default"
|
||||
size="sm"
|
||||
className="gap-2"
|
||||
>
|
||||
<Plus className="h-4 w-4" />
|
||||
New Project
|
||||
</Button>
|
||||
<button
|
||||
onClick={onClose}
|
||||
className="text-muted-foreground hover:text-foreground transition-colors"
|
||||
>
|
||||
<X className="h-5 w-5" />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Projects List */}
|
||||
<div className="flex-1 overflow-y-auto custom-scrollbar p-6">
|
||||
{projects.length === 0 ? (
|
||||
<div className="flex flex-col items-center justify-center py-12 text-center">
|
||||
<FolderOpen className="h-16 w-16 text-muted-foreground mb-4" />
|
||||
<h3 className="text-lg font-medium text-foreground mb-2">
|
||||
No projects yet
|
||||
</h3>
|
||||
<p className="text-sm text-muted-foreground mb-4">
|
||||
Create your first project to get started
|
||||
</p>
|
||||
<Button onClick={onNewProject} variant="default">
|
||||
<Plus className="h-4 w-4 mr-2" />
|
||||
Create Project
|
||||
</Button>
|
||||
</div>
|
||||
) : (
|
||||
<div className="grid gap-4">
|
||||
{projects.map((project) => (
|
||||
<div
|
||||
key={project.id}
|
||||
className="border border-border rounded-lg p-4 hover:bg-accent/50 transition-colors"
|
||||
>
|
||||
<div className="flex items-start justify-between">
|
||||
<div className="flex-1">
|
||||
<h3 className="font-medium text-foreground mb-1">
|
||||
{project.name}
|
||||
</h3>
|
||||
{project.description && (
|
||||
<p className="text-sm text-muted-foreground mb-2">
|
||||
{project.description}
|
||||
</p>
|
||||
)}
|
||||
<div className="flex flex-wrap gap-4 text-xs text-muted-foreground">
|
||||
<span>{project.trackCount} tracks</span>
|
||||
<span>{formatDuration(project.duration)}</span>
|
||||
<span>{project.sampleRate / 1000}kHz</span>
|
||||
<span>Updated {formatDate(project.updatedAt)}</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-2 ml-4">
|
||||
<button
|
||||
onClick={() => onLoadProject(project.id)}
|
||||
className="px-3 py-1.5 text-sm font-medium text-primary hover:bg-primary/10 rounded transition-colors"
|
||||
title="Open project"
|
||||
>
|
||||
Open
|
||||
</button>
|
||||
<button
|
||||
onClick={() => onExportProject(project.id)}
|
||||
className="p-1.5 text-muted-foreground hover:text-foreground hover:bg-accent rounded transition-colors"
|
||||
title="Export project"
|
||||
>
|
||||
<Download className="h-4 w-4" />
|
||||
</button>
|
||||
<button
|
||||
onClick={() => onDuplicateProject(project.id)}
|
||||
className="p-1.5 text-muted-foreground hover:text-foreground hover:bg-accent rounded transition-colors"
|
||||
title="Duplicate project"
|
||||
>
|
||||
<Copy className="h-4 w-4" />
|
||||
</button>
|
||||
<button
|
||||
onClick={() => {
|
||||
if (confirm(`Delete "${project.name}"? This cannot be undone.`)) {
|
||||
onDeleteProject(project.id);
|
||||
}
|
||||
}}
|
||||
className="p-1.5 text-muted-foreground hover:text-destructive hover:bg-destructive/10 rounded transition-colors"
|
||||
title="Delete project"
|
||||
>
|
||||
<Trash2 className="h-4 w-4" />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
106
components/dialogs/UnsupportedFormatDialog.tsx
Normal file
106
components/dialogs/UnsupportedFormatDialog.tsx
Normal file
@@ -0,0 +1,106 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { AlertCircle, FileQuestion, X } from 'lucide-react';
|
||||
import { Modal } from '@/components/ui/Modal';
|
||||
import { Button } from '@/components/ui/Button';
|
||||
|
||||
export interface UnsupportedFormatDialogProps {
|
||||
open: boolean;
|
||||
fileName: string;
|
||||
fileType: string;
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
const SUPPORTED_FORMATS = [
|
||||
{ extension: 'WAV', mimeType: 'audio/wav', description: 'Lossless, widely supported' },
|
||||
{ extension: 'MP3', mimeType: 'audio/mpeg', description: 'Compressed, universal support' },
|
||||
{ extension: 'OGG', mimeType: 'audio/ogg', description: 'Free, open format' },
|
||||
{ extension: 'FLAC', mimeType: 'audio/flac', description: 'Lossless compression' },
|
||||
{ extension: 'M4A/AAC', mimeType: 'audio/aac', description: 'Apple audio format' },
|
||||
{ extension: 'AIFF', mimeType: 'audio/aiff', description: 'Apple lossless format' },
|
||||
{ extension: 'WebM', mimeType: 'audio/webm', description: 'Modern web format' },
|
||||
];
|
||||
|
||||
export function UnsupportedFormatDialog({
|
||||
open,
|
||||
fileName,
|
||||
fileType,
|
||||
onClose,
|
||||
}: UnsupportedFormatDialogProps) {
|
||||
if (!open) return null;
|
||||
|
||||
return (
|
||||
<Modal open={open} onClose={onClose} title="">
|
||||
<div className="p-6 max-w-lg">
|
||||
{/* Header */}
|
||||
<div className="flex items-start justify-between mb-4">
|
||||
<div className="flex items-center gap-3">
|
||||
<FileQuestion className="h-6 w-6 text-yellow-500" />
|
||||
<h2 className="text-lg font-semibold">Unsupported File Format</h2>
|
||||
</div>
|
||||
<button onClick={onClose} className="text-muted-foreground hover:text-foreground">
|
||||
<X className="h-4 w-4" />
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Error Message */}
|
||||
<div className="bg-yellow-500/10 border border-yellow-500/20 rounded-md p-4 mb-4">
|
||||
<div className="flex items-start gap-2">
|
||||
<AlertCircle className="h-4 w-4 text-yellow-600 dark:text-yellow-400 mt-0.5 flex-shrink-0" />
|
||||
<div className="flex-1">
|
||||
<p className="text-sm text-yellow-800 dark:text-yellow-200 font-medium mb-1">
|
||||
Cannot open this file
|
||||
</p>
|
||||
<p className="text-sm text-yellow-700 dark:text-yellow-300">
|
||||
<strong>{fileName}</strong>
|
||||
{fileType && (
|
||||
<span className="text-muted-foreground"> ({fileType})</span>
|
||||
)}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Supported Formats */}
|
||||
<div className="mb-6">
|
||||
<h3 className="text-sm font-semibold mb-3">Supported Audio Formats:</h3>
|
||||
<div className="grid grid-cols-1 gap-2">
|
||||
{SUPPORTED_FORMATS.map((format) => (
|
||||
<div
|
||||
key={format.extension}
|
||||
className="flex items-center justify-between gap-4 p-2 rounded bg-muted/30 border border-border/50"
|
||||
>
|
||||
<div className="flex items-center gap-3">
|
||||
<span className="text-sm font-mono font-semibold text-primary min-w-[80px]">
|
||||
{format.extension}
|
||||
</span>
|
||||
<span className="text-xs text-muted-foreground">
|
||||
{format.description}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Recommendations */}
|
||||
<div className="bg-muted/50 border border-border rounded-md p-4 mb-4">
|
||||
<h4 className="text-sm font-semibold mb-2">How to fix this:</h4>
|
||||
<ul className="text-sm text-muted-foreground space-y-2 list-disc list-inside">
|
||||
<li>Convert your audio file to a supported format (WAV or MP3 recommended)</li>
|
||||
<li>Use a free audio converter like Audacity, FFmpeg, or online converters</li>
|
||||
<li>Check that the file isn't corrupted or incomplete</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
{/* Close Button */}
|
||||
<div className="flex justify-end">
|
||||
<Button onClick={onClose} variant="default">
|
||||
Got it
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</Modal>
|
||||
);
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -84,7 +84,7 @@ export function FileUpload({ onFileSelect, className }: FileUploadProps) {
|
||||
Click to browse or drag and drop
|
||||
</p>
|
||||
<p className="text-xs text-muted-foreground mt-2">
|
||||
Supported formats: WAV, MP3, OGG, FLAC, AAC, M4A
|
||||
Supported formats: WAV, MP3, OGG, FLAC, AAC, M4A, AIFF
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { Play, Pause, Square, SkipBack, Volume2, VolumeX } from 'lucide-react';
|
||||
import { Play, Pause, Square, SkipBack, Circle, AlignVerticalJustifyStart, AlignVerticalJustifyEnd, Layers, Repeat } from 'lucide-react';
|
||||
import { Button } from '@/components/ui/Button';
|
||||
import { Slider } from '@/components/ui/Slider';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
|
||||
export interface PlaybackControlsProps {
|
||||
@@ -21,6 +20,24 @@ export interface PlaybackControlsProps {
|
||||
className?: string;
|
||||
currentTimeFormatted?: string;
|
||||
durationFormatted?: string;
|
||||
isRecording?: boolean;
|
||||
onStartRecording?: () => void;
|
||||
onStopRecording?: () => void;
|
||||
punchInEnabled?: boolean;
|
||||
punchInTime?: number;
|
||||
punchOutTime?: number;
|
||||
onPunchInEnabledChange?: (enabled: boolean) => void;
|
||||
onPunchInTimeChange?: (time: number) => void;
|
||||
onPunchOutTimeChange?: (time: number) => void;
|
||||
overdubEnabled?: boolean;
|
||||
onOverdubEnabledChange?: (enabled: boolean) => void;
|
||||
loopEnabled?: boolean;
|
||||
loopStart?: number;
|
||||
loopEnd?: number;
|
||||
onToggleLoop?: () => void;
|
||||
onSetLoopPoints?: (start: number, end: number) => void;
|
||||
playbackRate?: number;
|
||||
onPlaybackRateChange?: (rate: number) => void;
|
||||
}
|
||||
|
||||
export function PlaybackControls({
|
||||
@@ -38,10 +55,25 @@ export function PlaybackControls({
|
||||
className,
|
||||
currentTimeFormatted,
|
||||
durationFormatted,
|
||||
isRecording = false,
|
||||
onStartRecording,
|
||||
onStopRecording,
|
||||
punchInEnabled = false,
|
||||
punchInTime = 0,
|
||||
punchOutTime = 0,
|
||||
onPunchInEnabledChange,
|
||||
onPunchInTimeChange,
|
||||
onPunchOutTimeChange,
|
||||
overdubEnabled = false,
|
||||
onOverdubEnabledChange,
|
||||
loopEnabled = false,
|
||||
loopStart = 0,
|
||||
loopEnd = 0,
|
||||
onToggleLoop,
|
||||
onSetLoopPoints,
|
||||
playbackRate = 1.0,
|
||||
onPlaybackRateChange,
|
||||
}: PlaybackControlsProps) {
|
||||
const [isMuted, setIsMuted] = React.useState(false);
|
||||
const [previousVolume, setPreviousVolume] = React.useState(volume);
|
||||
|
||||
const handlePlayPause = () => {
|
||||
if (isPlaying) {
|
||||
onPause();
|
||||
@@ -50,30 +82,10 @@ export function PlaybackControls({
|
||||
}
|
||||
};
|
||||
|
||||
const handleMuteToggle = () => {
|
||||
if (isMuted) {
|
||||
onVolumeChange(previousVolume);
|
||||
setIsMuted(false);
|
||||
} else {
|
||||
setPreviousVolume(volume);
|
||||
onVolumeChange(0);
|
||||
setIsMuted(true);
|
||||
}
|
||||
};
|
||||
|
||||
const handleVolumeChange = (newVolume: number) => {
|
||||
onVolumeChange(newVolume);
|
||||
if (newVolume === 0) {
|
||||
setIsMuted(true);
|
||||
} else {
|
||||
setIsMuted(false);
|
||||
}
|
||||
};
|
||||
|
||||
const progress = duration > 0 ? (currentTime / duration) * 100 : 0;
|
||||
|
||||
return (
|
||||
<div className={cn('space-y-4', className)}>
|
||||
<div className={cn('space-y-4 w-full max-w-2xl', className)}>
|
||||
{/* Timeline Slider */}
|
||||
<div className="space-y-2">
|
||||
<input
|
||||
@@ -107,8 +119,63 @@ export function PlaybackControls({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Punch In/Out Times - Show when enabled */}
|
||||
{punchInEnabled && onPunchInTimeChange && onPunchOutTimeChange && (
|
||||
<div className="flex items-center gap-3 text-xs bg-muted/50 rounded px-3 py-2">
|
||||
<div className="flex items-center gap-2 flex-1">
|
||||
<label className="text-muted-foreground flex items-center gap-1 flex-shrink-0">
|
||||
<AlignVerticalJustifyStart className="h-3 w-3" />
|
||||
Punch In
|
||||
</label>
|
||||
<input
|
||||
type="number"
|
||||
min={0}
|
||||
max={punchOutTime || duration}
|
||||
step={0.1}
|
||||
value={punchInTime.toFixed(2)}
|
||||
onChange={(e) => onPunchInTimeChange(parseFloat(e.target.value))}
|
||||
className="flex-1 px-2 py-1 bg-background border border-border rounded text-xs font-mono"
|
||||
/>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={() => onPunchInTimeChange(currentTime)}
|
||||
title="Set punch-in to current time"
|
||||
className="h-6 px-2 text-xs"
|
||||
>
|
||||
Set
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-2 flex-1">
|
||||
<label className="text-muted-foreground flex items-center gap-1 flex-shrink-0">
|
||||
<AlignVerticalJustifyEnd className="h-3 w-3" />
|
||||
Punch Out
|
||||
</label>
|
||||
<input
|
||||
type="number"
|
||||
min={punchInTime}
|
||||
max={duration}
|
||||
step={0.1}
|
||||
value={punchOutTime.toFixed(2)}
|
||||
onChange={(e) => onPunchOutTimeChange(parseFloat(e.target.value))}
|
||||
className="flex-1 px-2 py-1 bg-background border border-border rounded text-xs font-mono"
|
||||
/>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={() => onPunchOutTimeChange(currentTime)}
|
||||
title="Set punch-out to current time"
|
||||
className="h-6 px-2 text-xs"
|
||||
>
|
||||
Set
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Transport Controls */}
|
||||
<div className="flex items-center justify-between gap-4">
|
||||
<div className="flex items-center justify-center gap-4">
|
||||
<div className="flex items-center gap-2">
|
||||
<Button
|
||||
variant="outline"
|
||||
@@ -144,33 +211,152 @@ export function PlaybackControls({
|
||||
>
|
||||
<Square className="h-4 w-4" />
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Volume Control */}
|
||||
<div className="flex items-center gap-3 min-w-[200px]">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
onClick={handleMuteToggle}
|
||||
title={isMuted ? 'Unmute' : 'Mute'}
|
||||
>
|
||||
{isMuted || volume === 0 ? (
|
||||
<VolumeX className="h-5 w-5" />
|
||||
) : (
|
||||
<Volume2 className="h-5 w-5" />
|
||||
)}
|
||||
</Button>
|
||||
{/* Record Button */}
|
||||
{(onStartRecording || onStopRecording) && (
|
||||
<>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="icon"
|
||||
onClick={isRecording ? onStopRecording : onStartRecording}
|
||||
disabled={disabled}
|
||||
title={isRecording ? 'Stop Recording' : 'Start Recording'}
|
||||
className={cn(
|
||||
isRecording && 'bg-red-500/20 hover:bg-red-500/30 border-red-500/50',
|
||||
isRecording && 'animate-pulse'
|
||||
)}
|
||||
>
|
||||
<Circle className={cn('h-4 w-4', isRecording && 'text-red-500 fill-red-500')} />
|
||||
</Button>
|
||||
|
||||
<Slider
|
||||
value={volume}
|
||||
onChange={handleVolumeChange}
|
||||
min={0}
|
||||
max={1}
|
||||
step={0.01}
|
||||
className="flex-1"
|
||||
/>
|
||||
{/* Recording Options */}
|
||||
<div className="flex items-center gap-1 border-l border-border pl-2 ml-1">
|
||||
{/* Punch In/Out Toggle */}
|
||||
{onPunchInEnabledChange && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={() => onPunchInEnabledChange(!punchInEnabled)}
|
||||
title="Toggle Punch In/Out Recording"
|
||||
className={cn(
|
||||
punchInEnabled && 'bg-primary/20 hover:bg-primary/30'
|
||||
)}
|
||||
>
|
||||
<AlignVerticalJustifyStart className={cn('h-3.5 w-3.5', punchInEnabled && 'text-primary')} />
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{/* Overdub Mode Toggle */}
|
||||
{onOverdubEnabledChange && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={() => onOverdubEnabledChange(!overdubEnabled)}
|
||||
title="Toggle Overdub Mode (layer recordings)"
|
||||
className={cn(
|
||||
overdubEnabled && 'bg-primary/20 hover:bg-primary/30'
|
||||
)}
|
||||
>
|
||||
<Layers className={cn('h-3.5 w-3.5', overdubEnabled && 'text-primary')} />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Loop Toggle */}
|
||||
{onToggleLoop && (
|
||||
<div className="flex items-center gap-1 border-l border-border pl-2 ml-1">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={onToggleLoop}
|
||||
title="Toggle Loop Playback"
|
||||
className={cn(
|
||||
loopEnabled && 'bg-primary/20 hover:bg-primary/30'
|
||||
)}
|
||||
>
|
||||
<Repeat className={cn('h-3.5 w-3.5', loopEnabled && 'text-primary')} />
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Playback Speed Control */}
|
||||
{onPlaybackRateChange && (
|
||||
<div className="flex items-center gap-1 border-l border-border pl-2 ml-1">
|
||||
<select
|
||||
value={playbackRate}
|
||||
onChange={(e) => onPlaybackRateChange(parseFloat(e.target.value))}
|
||||
className="h-7 px-2 py-0 bg-background border border-border rounded text-xs cursor-pointer hover:bg-muted/50 focus:outline-none focus:ring-2 focus:ring-ring"
|
||||
title="Playback Speed"
|
||||
>
|
||||
<option value={0.25}>0.25x</option>
|
||||
<option value={0.5}>0.5x</option>
|
||||
<option value={0.75}>0.75x</option>
|
||||
<option value={1.0}>1x</option>
|
||||
<option value={1.25}>1.25x</option>
|
||||
<option value={1.5}>1.5x</option>
|
||||
<option value={2.0}>2x</option>
|
||||
</select>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Loop Points - Show when enabled */}
|
||||
{loopEnabled && onSetLoopPoints && (
|
||||
<div className="flex items-center gap-3 text-xs bg-muted/50 rounded px-3 py-2">
|
||||
<div className="flex items-center gap-2 flex-1">
|
||||
<label className="text-muted-foreground flex items-center gap-1 flex-shrink-0">
|
||||
<AlignVerticalJustifyStart className="h-3 w-3" />
|
||||
Loop Start
|
||||
</label>
|
||||
<input
|
||||
type="number"
|
||||
min={0}
|
||||
max={loopEnd || duration}
|
||||
step={0.1}
|
||||
value={loopStart.toFixed(2)}
|
||||
onChange={(e) => onSetLoopPoints(parseFloat(e.target.value), loopEnd)}
|
||||
className="flex-1 px-2 py-1 bg-background border border-border rounded text-xs font-mono"
|
||||
/>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={() => onSetLoopPoints(currentTime, loopEnd)}
|
||||
title="Set loop start to current time"
|
||||
className="h-6 px-2 text-xs"
|
||||
>
|
||||
Set
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-2 flex-1">
|
||||
<label className="text-muted-foreground flex items-center gap-1 flex-shrink-0">
|
||||
<AlignVerticalJustifyEnd className="h-3 w-3" />
|
||||
Loop End
|
||||
</label>
|
||||
<input
|
||||
type="number"
|
||||
min={loopStart}
|
||||
max={duration}
|
||||
step={0.1}
|
||||
value={loopEnd.toFixed(2)}
|
||||
onChange={(e) => onSetLoopPoints(loopStart, parseFloat(e.target.value))}
|
||||
className="flex-1 px-2 py-1 bg-background border border-border rounded text-xs font-mono"
|
||||
/>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={() => onSetLoopPoints(loopStart, currentTime)}
|
||||
title="Set loop end to current time"
|
||||
className="h-6 px-2 text-xs"
|
||||
>
|
||||
Set
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import * as React from 'react';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
import { generateMinMaxPeaks } from '@/lib/waveform/peaks';
|
||||
import { useAudioWorker } from '@/lib/hooks/useAudioWorker';
|
||||
import type { Selection } from '@/types/selection';
|
||||
|
||||
export interface WaveformProps {
|
||||
@@ -39,6 +39,16 @@ export function Waveform({
|
||||
const [isSelecting, setIsSelecting] = React.useState(false);
|
||||
const [selectionStart, setSelectionStart] = React.useState<number | null>(null);
|
||||
|
||||
// Worker for peak generation
|
||||
const worker = useAudioWorker();
|
||||
|
||||
// Cache peaks to avoid regenerating on every render
|
||||
const [peaksCache, setPeaksCache] = React.useState<{
|
||||
width: number;
|
||||
min: Float32Array;
|
||||
max: Float32Array;
|
||||
} | null>(null);
|
||||
|
||||
// Handle resize
|
||||
React.useEffect(() => {
|
||||
const handleResize = () => {
|
||||
@@ -52,10 +62,35 @@ export function Waveform({
|
||||
return () => window.removeEventListener('resize', handleResize);
|
||||
}, []);
|
||||
|
||||
// Generate peaks in worker when audioBuffer or zoom changes
|
||||
React.useEffect(() => {
|
||||
if (!audioBuffer) {
|
||||
setPeaksCache(null);
|
||||
return;
|
||||
}
|
||||
|
||||
const visibleWidth = Math.floor(width * zoom);
|
||||
|
||||
// Check if we already have peaks for this width
|
||||
if (peaksCache && peaksCache.width === visibleWidth) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Generate peaks in worker
|
||||
const channelData = audioBuffer.getChannelData(0);
|
||||
worker.generateMinMaxPeaks(channelData, visibleWidth).then((peaks) => {
|
||||
setPeaksCache({
|
||||
width: visibleWidth,
|
||||
min: peaks.min,
|
||||
max: peaks.max,
|
||||
});
|
||||
});
|
||||
}, [audioBuffer, width, zoom, worker, peaksCache]);
|
||||
|
||||
// Draw waveform
|
||||
React.useEffect(() => {
|
||||
const canvas = canvasRef.current;
|
||||
if (!canvas || !audioBuffer) return;
|
||||
if (!canvas || !audioBuffer || !peaksCache) return;
|
||||
|
||||
const ctx = canvas.getContext('2d');
|
||||
if (!ctx) return;
|
||||
@@ -75,8 +110,8 @@ export function Waveform({
|
||||
// Calculate visible width based on zoom
|
||||
const visibleWidth = Math.floor(width * zoom);
|
||||
|
||||
// Generate peaks for visible portion
|
||||
const { min, max } = generateMinMaxPeaks(audioBuffer, visibleWidth, 0);
|
||||
// Use cached peaks
|
||||
const { min, max } = peaksCache;
|
||||
|
||||
// Draw waveform
|
||||
const middle = height / 2;
|
||||
@@ -176,7 +211,7 @@ export function Waveform({
|
||||
ctx.lineTo(progressX, height);
|
||||
ctx.stroke();
|
||||
}
|
||||
}, [audioBuffer, width, height, currentTime, duration, zoom, scrollOffset, amplitudeScale, selection]);
|
||||
}, [audioBuffer, width, height, currentTime, duration, zoom, scrollOffset, amplitudeScale, selection, peaksCache]);
|
||||
|
||||
const handleClick = (e: React.MouseEvent<HTMLCanvasElement>) => {
|
||||
if (!onSeek || !duration || isDragging) return;
|
||||
|
||||
144
components/effects/EffectBrowser.tsx
Normal file
144
components/effects/EffectBrowser.tsx
Normal file
@@ -0,0 +1,144 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { X, Search } from 'lucide-react';
|
||||
import { Button } from '@/components/ui/Button';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
import type { EffectType } from '@/lib/audio/effects/chain';
|
||||
import { EFFECT_NAMES } from '@/lib/audio/effects/chain';
|
||||
|
||||
export interface EffectBrowserProps {
|
||||
open: boolean;
|
||||
onClose: () => void;
|
||||
onSelectEffect: (effectType: EffectType) => void;
|
||||
}
|
||||
|
||||
const EFFECT_CATEGORIES = {
|
||||
'Dynamics': ['compressor', 'limiter', 'gate'] as EffectType[],
|
||||
'Filters': ['lowpass', 'highpass', 'bandpass', 'notch', 'lowshelf', 'highshelf', 'peaking'] as EffectType[],
|
||||
'Time-Based': ['delay', 'reverb', 'chorus', 'flanger', 'phaser'] as EffectType[],
|
||||
'Distortion': ['distortion', 'bitcrusher'] as EffectType[],
|
||||
'Pitch & Time': ['pitch', 'timestretch'] as EffectType[],
|
||||
};
|
||||
|
||||
const EFFECT_DESCRIPTIONS: Record<EffectType, string> = {
|
||||
'compressor': 'Reduce dynamic range and control peaks',
|
||||
'limiter': 'Prevent audio from exceeding a maximum level',
|
||||
'gate': 'Reduce noise by cutting low-level signals',
|
||||
'lowpass': 'Allow frequencies below cutoff to pass',
|
||||
'highpass': 'Allow frequencies above cutoff to pass',
|
||||
'bandpass': 'Allow frequencies within a range to pass',
|
||||
'notch': 'Remove a specific frequency range',
|
||||
'lowshelf': 'Boost or cut low frequencies',
|
||||
'highshelf': 'Boost or cut high frequencies',
|
||||
'peaking': 'Boost or cut a specific frequency band',
|
||||
'delay': 'Create echoes and rhythmic repeats',
|
||||
'reverb': 'Simulate acoustic space and ambience',
|
||||
'chorus': 'Thicken sound with subtle pitch variations',
|
||||
'flanger': 'Create sweeping comb filter effects',
|
||||
'phaser': 'Create phase-shifted modulation effects',
|
||||
'distortion': 'Add harmonic saturation and grit',
|
||||
'bitcrusher': 'Reduce bit depth for lo-fi effects',
|
||||
'pitch': 'Shift pitch without changing tempo',
|
||||
'timestretch': 'Change tempo without affecting pitch',
|
||||
};
|
||||
|
||||
export function EffectBrowser({ open, onClose, onSelectEffect }: EffectBrowserProps) {
|
||||
const [search, setSearch] = React.useState('');
|
||||
const [selectedCategory, setSelectedCategory] = React.useState<string | null>(null);
|
||||
|
||||
const handleSelectEffect = (effectType: EffectType) => {
|
||||
onSelectEffect(effectType);
|
||||
onClose();
|
||||
setSearch('');
|
||||
setSelectedCategory(null);
|
||||
};
|
||||
|
||||
const filteredCategories = React.useMemo(() => {
|
||||
if (!search) return EFFECT_CATEGORIES;
|
||||
|
||||
const searchLower = search.toLowerCase();
|
||||
const filtered: Record<string, EffectType[]> = {};
|
||||
|
||||
Object.entries(EFFECT_CATEGORIES).forEach(([category, effects]) => {
|
||||
const matchingEffects = effects.filter((effect) =>
|
||||
EFFECT_NAMES[effect].toLowerCase().includes(searchLower) ||
|
||||
EFFECT_DESCRIPTIONS[effect].toLowerCase().includes(searchLower)
|
||||
);
|
||||
if (matchingEffects.length > 0) {
|
||||
filtered[category] = matchingEffects;
|
||||
}
|
||||
});
|
||||
|
||||
return filtered;
|
||||
}, [search]);
|
||||
|
||||
if (!open) return null;
|
||||
|
||||
return (
|
||||
<div className="fixed inset-0 z-50 flex items-center justify-center bg-black/50" onClick={onClose}>
|
||||
<div
|
||||
className="bg-card border border-border rounded-lg shadow-lg w-full max-w-2xl max-h-[80vh] flex flex-col"
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
{/* Header */}
|
||||
<div className="flex items-center justify-between p-4 border-b border-border">
|
||||
<h2 className="text-lg font-semibold text-foreground">Add Effect</h2>
|
||||
<Button variant="ghost" size="icon-sm" onClick={onClose}>
|
||||
<X className="h-4 w-4" />
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Search */}
|
||||
<div className="p-4 border-b border-border">
|
||||
<div className="relative">
|
||||
<Search className="absolute left-3 top-1/2 -translate-y-1/2 h-4 w-4 text-muted-foreground" />
|
||||
<input
|
||||
type="text"
|
||||
placeholder="Search effects..."
|
||||
value={search}
|
||||
onChange={(e) => setSearch(e.target.value)}
|
||||
className="w-full pl-10 pr-4 py-2 bg-background border border-border rounded-md text-sm text-foreground placeholder:text-muted-foreground focus:outline-none focus:ring-2 focus:ring-primary"
|
||||
autoFocus
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Content */}
|
||||
<div className="flex-1 overflow-y-auto custom-scrollbar p-4">
|
||||
<div className="space-y-6">
|
||||
{Object.entries(filteredCategories).map(([category, effects]) => (
|
||||
<div key={category}>
|
||||
<h3 className="text-sm font-semibold text-muted-foreground uppercase mb-2">
|
||||
{category}
|
||||
</h3>
|
||||
<div className="grid grid-cols-2 gap-2">
|
||||
{effects.map((effect) => (
|
||||
<button
|
||||
key={effect}
|
||||
onClick={() => handleSelectEffect(effect)}
|
||||
className={cn(
|
||||
'px-4 py-3 text-left rounded-md border transition-colors',
|
||||
'hover:bg-accent hover:border-primary',
|
||||
'border-border bg-card text-foreground'
|
||||
)}
|
||||
>
|
||||
<div className="font-medium text-sm">{EFFECT_NAMES[effect]}</div>
|
||||
<div className="text-xs text-muted-foreground">{EFFECT_DESCRIPTIONS[effect]}</div>
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{Object.keys(filteredCategories).length === 0 && (
|
||||
<div className="text-center py-8 text-muted-foreground">
|
||||
No effects found matching "{search}"
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
132
components/effects/EffectDevice.tsx
Normal file
132
components/effects/EffectDevice.tsx
Normal file
@@ -0,0 +1,132 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { ChevronLeft, ChevronRight, Power, X } from 'lucide-react';
|
||||
import { Button } from '@/components/ui/Button';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
import type { ChainEffect } from '@/lib/audio/effects/chain';
|
||||
import { EffectParameters } from './EffectParameters';
|
||||
|
||||
export interface EffectDeviceProps {
|
||||
effect: ChainEffect;
|
||||
onToggleEnabled?: () => void;
|
||||
onRemove?: () => void;
|
||||
onUpdateParameters?: (parameters: any) => void;
|
||||
onToggleExpanded?: () => void;
|
||||
trackId?: string;
|
||||
isPlaying?: boolean;
|
||||
onParameterTouched?: (trackId: string, laneId: string, touched: boolean) => void;
|
||||
automationLanes?: Array<{ id: string; parameterId: string; mode: string }>;
|
||||
}
|
||||
|
||||
export function EffectDevice({
|
||||
effect,
|
||||
onToggleEnabled,
|
||||
onRemove,
|
||||
onUpdateParameters,
|
||||
onToggleExpanded,
|
||||
trackId,
|
||||
isPlaying,
|
||||
onParameterTouched,
|
||||
automationLanes,
|
||||
}: EffectDeviceProps) {
|
||||
const isExpanded = effect.expanded || false;
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'flex-shrink-0 flex flex-col h-full transition-all duration-200 overflow-hidden rounded-md',
|
||||
effect.enabled
|
||||
? 'bg-card border-l border-r border-b border-border'
|
||||
: 'bg-card/40 border-l border-r border-b border-border/50 opacity-60 hover:opacity-80',
|
||||
isExpanded ? 'min-w-96' : 'w-10'
|
||||
)}
|
||||
>
|
||||
{!isExpanded ? (
|
||||
/* Collapsed State */
|
||||
<>
|
||||
{/* Colored top indicator */}
|
||||
<div className={cn('h-0.5 w-full', effect.enabled ? 'bg-primary' : 'bg-muted-foreground/20')} />
|
||||
|
||||
<button
|
||||
onClick={onToggleExpanded}
|
||||
className="w-full h-full flex flex-col items-center justify-between py-1 hover:bg-primary/10 transition-colors group"
|
||||
title={`Expand ${effect.name}`}
|
||||
>
|
||||
<ChevronRight className="h-3 w-3 flex-shrink-0 text-muted-foreground group-hover:text-primary transition-colors" />
|
||||
<span
|
||||
className="flex-1 text-xs font-medium whitespace-nowrap text-muted-foreground group-hover:text-primary transition-colors"
|
||||
style={{
|
||||
writingMode: 'vertical-rl',
|
||||
textOrientation: 'mixed',
|
||||
}}
|
||||
>
|
||||
{effect.name}
|
||||
</span>
|
||||
<div
|
||||
className={cn(
|
||||
'w-1.5 h-1.5 rounded-full flex-shrink-0 mb-1',
|
||||
effect.enabled ? 'bg-primary shadow-sm shadow-primary/50' : 'bg-muted-foreground/30'
|
||||
)}
|
||||
title={effect.enabled ? 'Enabled' : 'Disabled'}
|
||||
/>
|
||||
</button>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
{/* Colored top indicator */}
|
||||
<div className={cn('h-0.5 w-full', effect.enabled ? 'bg-primary' : 'bg-muted-foreground/20')} />
|
||||
|
||||
{/* Full-Width Header Row */}
|
||||
<div className="flex items-center gap-1 px-2 py-1.5 border-b border-border/50 bg-muted/30 flex-shrink-0">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={onToggleExpanded}
|
||||
title="Collapse device"
|
||||
className="h-5 w-5 flex-shrink-0"
|
||||
>
|
||||
<ChevronLeft className="h-3 w-3" />
|
||||
</Button>
|
||||
<span className="text-xs font-semibold flex-1 min-w-0 truncate">{effect.name}</span>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={onToggleEnabled}
|
||||
title={effect.enabled ? 'Disable effect' : 'Enable effect'}
|
||||
className="h-5 w-5 flex-shrink-0"
|
||||
>
|
||||
<Power
|
||||
className={cn(
|
||||
'h-3 w-3',
|
||||
effect.enabled ? 'text-primary' : 'text-muted-foreground'
|
||||
)}
|
||||
/>
|
||||
</Button>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={onRemove}
|
||||
title="Remove effect"
|
||||
className="h-5 w-5 flex-shrink-0"
|
||||
>
|
||||
<X className="h-3 w-3 text-destructive" />
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Device Body */}
|
||||
<div className="flex-1 min-h-0 overflow-y-auto custom-scrollbar p-3 bg-card/50">
|
||||
<EffectParameters
|
||||
effect={effect}
|
||||
onUpdateParameters={onUpdateParameters}
|
||||
trackId={trackId}
|
||||
isPlaying={isPlaying}
|
||||
onParameterTouched={onParameterTouched}
|
||||
automationLanes={automationLanes}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
777
components/effects/EffectParameters.tsx
Normal file
777
components/effects/EffectParameters.tsx
Normal file
@@ -0,0 +1,777 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { Button } from '@/components/ui/Button';
|
||||
import { Slider } from '@/components/ui/Slider';
|
||||
import type { ChainEffect, EffectType } from '@/lib/audio/effects/chain';
|
||||
import type {
|
||||
PitchShifterParameters,
|
||||
TimeStretchParameters,
|
||||
DistortionParameters,
|
||||
BitcrusherParameters,
|
||||
} from '@/lib/audio/effects/advanced';
|
||||
import type {
|
||||
CompressorParameters,
|
||||
LimiterParameters,
|
||||
GateParameters,
|
||||
} from '@/lib/audio/effects/dynamics';
|
||||
import type {
|
||||
DelayParameters,
|
||||
ReverbParameters,
|
||||
ChorusParameters,
|
||||
FlangerParameters,
|
||||
PhaserParameters,
|
||||
} from '@/lib/audio/effects/time-based';
|
||||
import type { FilterOptions } from '@/lib/audio/effects/filters';
|
||||
|
||||
export interface EffectParametersProps {
|
||||
effect: ChainEffect;
|
||||
onUpdateParameters?: (parameters: any) => void;
|
||||
trackId?: string;
|
||||
isPlaying?: boolean;
|
||||
onParameterTouched?: (trackId: string, laneId: string, touched: boolean) => void;
|
||||
automationLanes?: Array<{ id: string; parameterId: string; mode: string }>;
|
||||
}
|
||||
|
||||
export function EffectParameters({
|
||||
effect,
|
||||
onUpdateParameters,
|
||||
trackId,
|
||||
isPlaying,
|
||||
onParameterTouched,
|
||||
automationLanes = []
|
||||
}: EffectParametersProps) {
|
||||
const params = effect.parameters || {};
|
||||
|
||||
const updateParam = (key: string, value: any) => {
|
||||
if (onUpdateParameters) {
|
||||
onUpdateParameters({ ...params, [key]: value });
|
||||
}
|
||||
};
|
||||
|
||||
// Memoize touch handlers for all parameters
|
||||
const touchHandlers = React.useMemo(() => {
|
||||
if (!trackId || !isPlaying || !onParameterTouched || !automationLanes) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const handlers: Record<string, { onTouchStart: () => void; onTouchEnd: () => void }> = {};
|
||||
|
||||
automationLanes.forEach(lane => {
|
||||
if (!lane.parameterId.startsWith(`effect.${effect.id}.`)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// For effect parameters, write mode works like touch mode
|
||||
if (lane.mode !== 'touch' && lane.mode !== 'latch' && lane.mode !== 'write') {
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract parameter name from parameterId (effect.{effectId}.{paramName})
|
||||
const parts = lane.parameterId.split('.');
|
||||
if (parts.length !== 3) return;
|
||||
const paramName = parts[2];
|
||||
|
||||
handlers[paramName] = {
|
||||
onTouchStart: () => {
|
||||
queueMicrotask(() => onParameterTouched(trackId, lane.id, true));
|
||||
},
|
||||
onTouchEnd: () => {
|
||||
queueMicrotask(() => onParameterTouched(trackId, lane.id, false));
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
return handlers;
|
||||
}, [trackId, isPlaying, onParameterTouched, effect.id, automationLanes]);
|
||||
|
||||
// Helper to get touch handlers for a parameter
|
||||
const getTouchHandlers = (paramName: string) => {
|
||||
return touchHandlers[paramName] || {};
|
||||
};
|
||||
|
||||
// Filter effects
|
||||
if (['lowpass', 'highpass', 'bandpass', 'notch', 'lowshelf', 'highshelf', 'peaking'].includes(effect.type)) {
|
||||
const filterParams = params as FilterOptions;
|
||||
return (
|
||||
<div className="grid grid-cols-2 gap-x-4 gap-y-2">
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Frequency: {Math.round(filterParams.frequency || 1000)} Hz
|
||||
</label>
|
||||
<Slider
|
||||
value={[filterParams.frequency || 1000]}
|
||||
onValueChange={([value]) => updateParam('frequency', value)}
|
||||
min={20}
|
||||
max={20000}
|
||||
step={1}
|
||||
{...getTouchHandlers('frequency')}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Q: {(filterParams.Q || 1).toFixed(2)}
|
||||
</label>
|
||||
<Slider
|
||||
value={[filterParams.Q || 1]}
|
||||
onValueChange={([value]) => updateParam('Q', value)}
|
||||
min={0.1}
|
||||
max={20}
|
||||
step={0.1}
|
||||
{...getTouchHandlers('Q')}
|
||||
/>
|
||||
</div>
|
||||
{['lowshelf', 'highshelf', 'peaking'].includes(effect.type) && (
|
||||
<div className="space-y-1 col-span-2">
|
||||
<label className="text-xs font-medium">
|
||||
Gain: {(filterParams.gain || 0).toFixed(1)} dB
|
||||
</label>
|
||||
<Slider
|
||||
value={[filterParams.gain || 0]}
|
||||
onValueChange={([value]) => updateParam('gain', value)}
|
||||
min={-40}
|
||||
max={40}
|
||||
step={0.5}
|
||||
{...getTouchHandlers('gain')}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Compressor
|
||||
if (effect.type === 'compressor') {
|
||||
const compParams = params as CompressorParameters;
|
||||
return (
|
||||
<div className="grid grid-cols-3 gap-x-4 gap-y-2">
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Threshold: {(compParams.threshold || -24).toFixed(1)} dB
|
||||
</label>
|
||||
<Slider
|
||||
value={[compParams.threshold || -24]}
|
||||
onValueChange={([value]) => updateParam('threshold', value)}
|
||||
min={-60}
|
||||
max={0}
|
||||
step={0.5}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Ratio: {(compParams.ratio || 4).toFixed(1)}:1
|
||||
</label>
|
||||
<Slider
|
||||
value={[compParams.ratio || 4]}
|
||||
onValueChange={([value]) => updateParam('ratio', value)}
|
||||
min={1}
|
||||
max={20}
|
||||
step={0.5}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Knee: {(compParams.knee || 30).toFixed(1)} dB
|
||||
</label>
|
||||
<Slider
|
||||
value={[compParams.knee || 30]}
|
||||
onValueChange={([value]) => updateParam('knee', value)}
|
||||
min={0}
|
||||
max={40}
|
||||
step={1}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Attack: {(compParams.attack || 0.003).toFixed(3)} s
|
||||
</label>
|
||||
<Slider
|
||||
value={[compParams.attack || 0.003]}
|
||||
onValueChange={([value]) => updateParam('attack', value)}
|
||||
min={0.001}
|
||||
max={1}
|
||||
step={0.001}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Release: {(compParams.release || 0.25).toFixed(3)} s
|
||||
</label>
|
||||
<Slider
|
||||
value={[compParams.release || 0.25]}
|
||||
onValueChange={([value]) => updateParam('release', value)}
|
||||
min={0.01}
|
||||
max={3}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Limiter
|
||||
if (effect.type === 'limiter') {
|
||||
const limParams = params as LimiterParameters;
|
||||
return (
|
||||
<div className="grid grid-cols-3 gap-x-4 gap-y-2">
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Threshold: {(limParams.threshold || -3).toFixed(1)} dB
|
||||
</label>
|
||||
<Slider
|
||||
value={[limParams.threshold || -3]}
|
||||
onValueChange={([value]) => updateParam('threshold', value)}
|
||||
min={-30}
|
||||
max={0}
|
||||
step={0.5}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Release: {(limParams.release || 0.05).toFixed(3)} s
|
||||
</label>
|
||||
<Slider
|
||||
value={[limParams.release || 0.05]}
|
||||
onValueChange={([value]) => updateParam('release', value)}
|
||||
min={0.01}
|
||||
max={1}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Makeup: {(limParams.makeupGain || 0).toFixed(1)} dB
|
||||
</label>
|
||||
<Slider
|
||||
value={[limParams.makeupGain || 0]}
|
||||
onValueChange={([value]) => updateParam('makeupGain', value)}
|
||||
min={0}
|
||||
max={20}
|
||||
step={0.5}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Gate
|
||||
if (effect.type === 'gate') {
|
||||
const gateParams = params as GateParameters;
|
||||
return (
|
||||
<div className="grid grid-cols-2 gap-x-4 gap-y-2">
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Threshold: {(gateParams.threshold || -40).toFixed(1)} dB
|
||||
</label>
|
||||
<Slider
|
||||
value={[gateParams.threshold || -40]}
|
||||
onValueChange={([value]) => updateParam('threshold', value)}
|
||||
min={-80}
|
||||
max={0}
|
||||
step={0.5}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Ratio: {(gateParams.ratio || 10).toFixed(1)}:1
|
||||
</label>
|
||||
<Slider
|
||||
value={[gateParams.ratio || 10]}
|
||||
onValueChange={([value]) => updateParam('ratio', value)}
|
||||
min={1}
|
||||
max={20}
|
||||
step={0.5}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Attack: {(gateParams.attack || 0.001).toFixed(3)} s
|
||||
</label>
|
||||
<Slider
|
||||
value={[gateParams.attack || 0.001]}
|
||||
onValueChange={([value]) => updateParam('attack', value)}
|
||||
min={0.0001}
|
||||
max={0.5}
|
||||
step={0.0001}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Release: {(gateParams.release || 0.1).toFixed(3)} s
|
||||
</label>
|
||||
<Slider
|
||||
value={[gateParams.release || 0.1]}
|
||||
onValueChange={([value]) => updateParam('release', value)}
|
||||
min={0.01}
|
||||
max={3}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Delay
|
||||
if (effect.type === 'delay') {
|
||||
const delayParams = params as DelayParameters;
|
||||
return (
|
||||
<div className="grid grid-cols-3 gap-x-4 gap-y-2">
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Time: {(delayParams.time || 0.5).toFixed(3)} s
|
||||
</label>
|
||||
<Slider
|
||||
value={[delayParams.time || 0.5]}
|
||||
onValueChange={([value]) => updateParam('time', value)}
|
||||
min={0.001}
|
||||
max={2}
|
||||
step={0.001}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Feedback: {((delayParams.feedback || 0.3) * 100).toFixed(0)}%
|
||||
</label>
|
||||
<Slider
|
||||
value={[delayParams.feedback || 0.3]}
|
||||
onValueChange={([value]) => updateParam('feedback', value)}
|
||||
min={0}
|
||||
max={0.9}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Mix: {((delayParams.mix || 0.5) * 100).toFixed(0)}%
|
||||
</label>
|
||||
<Slider
|
||||
value={[delayParams.mix || 0.5]}
|
||||
onValueChange={([value]) => updateParam('mix', value)}
|
||||
min={0}
|
||||
max={1}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Reverb
|
||||
if (effect.type === 'reverb') {
|
||||
const reverbParams = params as ReverbParameters;
|
||||
return (
|
||||
<div className="grid grid-cols-3 gap-x-4 gap-y-2">
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Room Size: {((reverbParams.roomSize || 0.5) * 100).toFixed(0)}%
|
||||
</label>
|
||||
<Slider
|
||||
value={[reverbParams.roomSize || 0.5]}
|
||||
onValueChange={([value]) => updateParam('roomSize', value)}
|
||||
min={0}
|
||||
max={1}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Damping: {((reverbParams.damping || 0.5) * 100).toFixed(0)}%
|
||||
</label>
|
||||
<Slider
|
||||
value={[reverbParams.damping || 0.5]}
|
||||
onValueChange={([value]) => updateParam('damping', value)}
|
||||
min={0}
|
||||
max={1}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Mix: {((reverbParams.mix || 0.3) * 100).toFixed(0)}%
|
||||
</label>
|
||||
<Slider
|
||||
value={[reverbParams.mix || 0.3]}
|
||||
onValueChange={([value]) => updateParam('mix', value)}
|
||||
min={0}
|
||||
max={1}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Chorus
|
||||
if (effect.type === 'chorus') {
|
||||
const chorusParams = params as ChorusParameters;
|
||||
return (
|
||||
<div className="grid grid-cols-3 gap-x-4 gap-y-2">
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Rate: {(chorusParams.rate || 1.5).toFixed(2)} Hz
|
||||
</label>
|
||||
<Slider
|
||||
value={[chorusParams.rate || 1.5]}
|
||||
onValueChange={([value]) => updateParam('rate', value)}
|
||||
min={0.1}
|
||||
max={10}
|
||||
step={0.1}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Depth: {((chorusParams.depth || 0.002) * 1000).toFixed(2)} ms
|
||||
</label>
|
||||
<Slider
|
||||
value={[chorusParams.depth || 0.002]}
|
||||
onValueChange={([value]) => updateParam('depth', value)}
|
||||
min={0.0001}
|
||||
max={0.01}
|
||||
step={0.0001}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Mix: {((chorusParams.mix || 0.5) * 100).toFixed(0)}%
|
||||
</label>
|
||||
<Slider
|
||||
value={[chorusParams.mix || 0.5]}
|
||||
onValueChange={([value]) => updateParam('mix', value)}
|
||||
min={0}
|
||||
max={1}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Flanger
|
||||
if (effect.type === 'flanger') {
|
||||
const flangerParams = params as FlangerParameters;
|
||||
return (
|
||||
<div className="grid grid-cols-3 gap-x-4 gap-y-2">
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Rate: {(flangerParams.rate || 0.5).toFixed(2)} Hz
|
||||
</label>
|
||||
<Slider
|
||||
value={[flangerParams.rate || 0.5]}
|
||||
onValueChange={([value]) => updateParam('rate', value)}
|
||||
min={0.1}
|
||||
max={10}
|
||||
step={0.1}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Depth: {((flangerParams.depth || 0.002) * 1000).toFixed(2)} ms
|
||||
</label>
|
||||
<Slider
|
||||
value={[flangerParams.depth || 0.002]}
|
||||
onValueChange={([value]) => updateParam('depth', value)}
|
||||
min={0.0001}
|
||||
max={0.01}
|
||||
step={0.0001}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Feedback: {((flangerParams.feedback || 0.5) * 100).toFixed(0)}%
|
||||
</label>
|
||||
<Slider
|
||||
value={[flangerParams.feedback || 0.5]}
|
||||
onValueChange={([value]) => updateParam('feedback', value)}
|
||||
min={0}
|
||||
max={0.95}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Mix: {((flangerParams.mix || 0.5) * 100).toFixed(0)}%
|
||||
</label>
|
||||
<Slider
|
||||
value={[flangerParams.mix || 0.5]}
|
||||
onValueChange={([value]) => updateParam('mix', value)}
|
||||
min={0}
|
||||
max={1}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Phaser
|
||||
if (effect.type === 'phaser') {
|
||||
const phaserParams = params as PhaserParameters;
|
||||
return (
|
||||
<div className="grid grid-cols-3 gap-x-4 gap-y-2">
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Rate: {(phaserParams.rate || 0.5).toFixed(2)} Hz
|
||||
</label>
|
||||
<Slider
|
||||
value={[phaserParams.rate || 0.5]}
|
||||
onValueChange={([value]) => updateParam('rate', value)}
|
||||
min={0.1}
|
||||
max={10}
|
||||
step={0.1}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Depth: {((phaserParams.depth || 0.5) * 100).toFixed(0)}%
|
||||
</label>
|
||||
<Slider
|
||||
value={[phaserParams.depth || 0.5]}
|
||||
onValueChange={([value]) => updateParam('depth', value)}
|
||||
min={0}
|
||||
max={1}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Stages: {phaserParams.stages || 4}
|
||||
</label>
|
||||
<Slider
|
||||
value={[phaserParams.stages || 4]}
|
||||
onValueChange={([value]) => updateParam('stages', Math.round(value))}
|
||||
min={2}
|
||||
max={12}
|
||||
step={1}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Mix: {((phaserParams.mix || 0.5) * 100).toFixed(0)}%
|
||||
</label>
|
||||
<Slider
|
||||
value={[phaserParams.mix || 0.5]}
|
||||
onValueChange={([value]) => updateParam('mix', value)}
|
||||
min={0}
|
||||
max={1}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Pitch Shifter
|
||||
if (effect.type === 'pitch') {
|
||||
const pitchParams = params as PitchShifterParameters;
|
||||
return (
|
||||
<div className="grid grid-cols-3 gap-x-4 gap-y-2">
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Semitones: {pitchParams.semitones || 0}
|
||||
</label>
|
||||
<Slider
|
||||
value={[pitchParams.semitones || 0]}
|
||||
onValueChange={([value]) => updateParam('semitones', Math.round(value))}
|
||||
min={-12}
|
||||
max={12}
|
||||
step={1}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Cents: {pitchParams.cents || 0}
|
||||
</label>
|
||||
<Slider
|
||||
value={[pitchParams.cents || 0]}
|
||||
onValueChange={([value]) => updateParam('cents', Math.round(value))}
|
||||
min={-100}
|
||||
max={100}
|
||||
step={1}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Mix: {((pitchParams.mix || 1) * 100).toFixed(0)}%
|
||||
</label>
|
||||
<Slider
|
||||
value={[pitchParams.mix || 1]}
|
||||
onValueChange={([value]) => updateParam('mix', value)}
|
||||
min={0}
|
||||
max={1}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Time Stretch
|
||||
if (effect.type === 'timestretch') {
|
||||
const stretchParams = params as TimeStretchParameters;
|
||||
return (
|
||||
<div className="grid grid-cols-3 gap-x-4 gap-y-2">
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Rate: {(stretchParams.rate || 1).toFixed(2)}x
|
||||
</label>
|
||||
<Slider
|
||||
value={[stretchParams.rate || 1]}
|
||||
onValueChange={([value]) => updateParam('rate', value)}
|
||||
min={0.5}
|
||||
max={2}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex items-center gap-2 py-1 px-2 border-b border-border/30">
|
||||
<input
|
||||
type="checkbox"
|
||||
id={`preserve-pitch-${effect.id}`}
|
||||
checked={stretchParams.preservePitch ?? true}
|
||||
onChange={(e) => updateParam('preservePitch', e.target.checked)}
|
||||
className="h-3 w-3 rounded border-border"
|
||||
/>
|
||||
<label htmlFor={`preserve-pitch-${effect.id}`} className="text-xs">
|
||||
Preserve Pitch
|
||||
</label>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Mix: {((stretchParams.mix || 1) * 100).toFixed(0)}%
|
||||
</label>
|
||||
<Slider
|
||||
value={[stretchParams.mix || 1]}
|
||||
onValueChange={([value]) => updateParam('mix', value)}
|
||||
min={0}
|
||||
max={1}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Distortion
|
||||
if (effect.type === 'distortion') {
|
||||
const distParams = params as DistortionParameters;
|
||||
return (
|
||||
<div className="grid grid-cols-3 gap-x-4 gap-y-2">
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">Type</label>
|
||||
<div className="grid grid-cols-3 gap-1">
|
||||
{(['soft', 'hard', 'tube'] as const).map((type) => (
|
||||
<Button
|
||||
key={type}
|
||||
variant={(distParams.type || 'soft') === type ? 'secondary' : 'outline'}
|
||||
size="sm"
|
||||
onClick={() => updateParam('type', type)}
|
||||
className="text-xs py-1 h-auto"
|
||||
>
|
||||
{type}
|
||||
</Button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Drive: {((distParams.drive || 0.5) * 100).toFixed(0)}%
|
||||
</label>
|
||||
<Slider
|
||||
value={[distParams.drive || 0.5]}
|
||||
onValueChange={([value]) => updateParam('drive', value)}
|
||||
min={0}
|
||||
max={1}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Tone: {((distParams.tone || 0.5) * 100).toFixed(0)}%
|
||||
</label>
|
||||
<Slider
|
||||
value={[distParams.tone || 0.5]}
|
||||
onValueChange={([value]) => updateParam('tone', value)}
|
||||
min={0}
|
||||
max={1}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Output: {((distParams.output || 0.7) * 100).toFixed(0)}%
|
||||
</label>
|
||||
<Slider
|
||||
value={[distParams.output || 0.7]}
|
||||
onValueChange={([value]) => updateParam('output', value)}
|
||||
min={0}
|
||||
max={1}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Mix: {((distParams.mix || 1) * 100).toFixed(0)}%
|
||||
</label>
|
||||
<Slider
|
||||
value={[distParams.mix || 1]}
|
||||
onValueChange={([value]) => updateParam('mix', value)}
|
||||
min={0}
|
||||
max={1}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Bitcrusher
|
||||
if (effect.type === 'bitcrusher') {
|
||||
const crushParams = params as BitcrusherParameters;
|
||||
return (
|
||||
<div className="grid grid-cols-3 gap-x-4 gap-y-2">
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Bit Depth: {crushParams.bitDepth || 8} bits
|
||||
</label>
|
||||
<Slider
|
||||
value={[crushParams.bitDepth || 8]}
|
||||
onValueChange={([value]) => updateParam('bitDepth', Math.round(value))}
|
||||
min={1}
|
||||
max={16}
|
||||
step={1}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Sample Rate: {crushParams.sampleRate || 8000} Hz
|
||||
</label>
|
||||
<Slider
|
||||
value={[crushParams.sampleRate || 8000]}
|
||||
onValueChange={([value]) => updateParam('sampleRate', Math.round(value))}
|
||||
min={100}
|
||||
max={48000}
|
||||
step={100}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
<label className="text-xs font-medium">
|
||||
Mix: {((crushParams.mix || 1) * 100).toFixed(0)}%
|
||||
</label>
|
||||
<Slider
|
||||
value={[crushParams.mix || 1]}
|
||||
onValueChange={([value]) => updateParam('mix', value)}
|
||||
min={0}
|
||||
max={1}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Fallback for unknown effects
|
||||
return (
|
||||
<div className="text-xs text-muted-foreground/70 italic text-center py-4">
|
||||
No parameters available
|
||||
</div>
|
||||
);
|
||||
}
|
||||
202
components/effects/EffectsPanel.tsx
Normal file
202
components/effects/EffectsPanel.tsx
Normal file
@@ -0,0 +1,202 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { ChevronDown, ChevronUp, Plus } from 'lucide-react';
|
||||
import { Button } from '@/components/ui/Button';
|
||||
import { EffectDevice } from './EffectDevice';
|
||||
import { EffectBrowser } from './EffectBrowser';
|
||||
import type { Track } from '@/types/track';
|
||||
import type { EffectType } from '@/lib/audio/effects/chain';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
|
||||
export interface EffectsPanelProps {
|
||||
track: Track | null; // Selected track
|
||||
visible: boolean;
|
||||
height: number;
|
||||
onToggleVisible: () => void;
|
||||
onResizeHeight: (height: number) => void;
|
||||
onAddEffect?: (effectType: EffectType) => void;
|
||||
onToggleEffect?: (effectId: string) => void;
|
||||
onRemoveEffect?: (effectId: string) => void;
|
||||
onUpdateEffect?: (effectId: string, parameters: any) => void;
|
||||
onToggleEffectExpanded?: (effectId: string) => void;
|
||||
}
|
||||
|
||||
export function EffectsPanel({
|
||||
track,
|
||||
visible,
|
||||
height,
|
||||
onToggleVisible,
|
||||
onResizeHeight,
|
||||
onAddEffect,
|
||||
onToggleEffect,
|
||||
onRemoveEffect,
|
||||
onUpdateEffect,
|
||||
onToggleEffectExpanded,
|
||||
}: EffectsPanelProps) {
|
||||
const [effectBrowserOpen, setEffectBrowserOpen] = React.useState(false);
|
||||
const [isResizing, setIsResizing] = React.useState(false);
|
||||
const resizeStartRef = React.useRef({ y: 0, height: 0 });
|
||||
|
||||
// Resize handler
|
||||
const handleResizeStart = React.useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.preventDefault();
|
||||
setIsResizing(true);
|
||||
resizeStartRef.current = { y: e.clientY, height };
|
||||
},
|
||||
[height]
|
||||
);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (!isResizing) return;
|
||||
|
||||
const handleMouseMove = (e: MouseEvent) => {
|
||||
const delta = resizeStartRef.current.y - e.clientY;
|
||||
const newHeight = Math.max(200, Math.min(600, resizeStartRef.current.height + delta));
|
||||
onResizeHeight(newHeight);
|
||||
};
|
||||
|
||||
const handleMouseUp = () => {
|
||||
setIsResizing(false);
|
||||
};
|
||||
|
||||
window.addEventListener('mousemove', handleMouseMove);
|
||||
window.addEventListener('mouseup', handleMouseUp);
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('mousemove', handleMouseMove);
|
||||
window.removeEventListener('mouseup', handleMouseUp);
|
||||
};
|
||||
}, [isResizing, onResizeHeight]);
|
||||
|
||||
if (!visible) {
|
||||
// Collapsed state - just show header bar
|
||||
return (
|
||||
<div className="h-8 bg-card border-t border-border flex items-center px-3 gap-2 flex-shrink-0">
|
||||
<button
|
||||
onClick={onToggleVisible}
|
||||
className="flex items-center gap-2 flex-1 hover:text-primary transition-colors text-sm font-medium"
|
||||
>
|
||||
<ChevronUp className="h-4 w-4" />
|
||||
<span>Device View</span>
|
||||
{track && (
|
||||
<span className="text-muted-foreground">- {track.name}</span>
|
||||
)}
|
||||
</button>
|
||||
{track && (
|
||||
<div className="flex items-center gap-1">
|
||||
<span className="text-xs text-muted-foreground">
|
||||
{track.effectChain.effects.length} device(s)
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
className="bg-card border-t border-border flex flex-col flex-shrink-0 transition-all duration-300 ease-in-out"
|
||||
style={{ height }}
|
||||
>
|
||||
{/* Resize handle */}
|
||||
<div
|
||||
className={cn(
|
||||
'h-1 cursor-ns-resize hover:bg-primary/50 transition-colors group flex items-center justify-center',
|
||||
isResizing && 'bg-primary/50'
|
||||
)}
|
||||
onMouseDown={handleResizeStart}
|
||||
title="Drag to resize panel"
|
||||
>
|
||||
<div className="h-px w-16 bg-border group-hover:bg-primary transition-colors" />
|
||||
</div>
|
||||
|
||||
{/* Header */}
|
||||
<div className="h-10 flex-shrink-0 border-b border-border flex items-center px-3 gap-2 bg-muted/30">
|
||||
<button
|
||||
onClick={onToggleVisible}
|
||||
className="flex items-center gap-2 flex-1 hover:text-primary transition-colors"
|
||||
>
|
||||
<ChevronDown className="h-4 w-4" />
|
||||
<span className="text-sm font-medium">Device View</span>
|
||||
{track && (
|
||||
<>
|
||||
<span className="text-sm text-muted-foreground">-</span>
|
||||
<div
|
||||
className="w-0.5 h-4 rounded-full"
|
||||
style={{ backgroundColor: track.color }}
|
||||
/>
|
||||
<span className="text-sm font-semibold text-foreground">{track.name}</span>
|
||||
</>
|
||||
)}
|
||||
</button>
|
||||
|
||||
{track && (
|
||||
<>
|
||||
<span className="text-xs text-muted-foreground">
|
||||
{track.effectChain.effects.length} device(s)
|
||||
</span>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={() => setEffectBrowserOpen(true)}
|
||||
title="Add effect"
|
||||
className="h-7 w-7"
|
||||
>
|
||||
<Plus className="h-4 w-4" />
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Device Rack */}
|
||||
<div className="flex-1 overflow-x-auto overflow-y-hidden custom-scrollbar bg-background/50 p-3">
|
||||
{!track ? (
|
||||
<div className="h-full flex items-center justify-center text-sm text-muted-foreground">
|
||||
Select a track to view its devices
|
||||
</div>
|
||||
) : track.effectChain.effects.length === 0 ? (
|
||||
<div className="h-full flex flex-col items-center justify-center text-sm text-muted-foreground gap-2">
|
||||
<p>No devices on this track</p>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={() => setEffectBrowserOpen(true)}
|
||||
>
|
||||
<Plus className="h-4 w-4 mr-1" />
|
||||
Add Device
|
||||
</Button>
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex h-full gap-3">
|
||||
{track.effectChain.effects.map((effect) => (
|
||||
<EffectDevice
|
||||
key={effect.id}
|
||||
effect={effect}
|
||||
onToggleEnabled={() => onToggleEffect?.(effect.id)}
|
||||
onRemove={() => onRemoveEffect?.(effect.id)}
|
||||
onUpdateParameters={(params) => onUpdateEffect?.(effect.id, params)}
|
||||
onToggleExpanded={() => onToggleEffectExpanded?.(effect.id)}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Effect Browser Dialog */}
|
||||
{track && (
|
||||
<EffectBrowser
|
||||
open={effectBrowserOpen}
|
||||
onClose={() => setEffectBrowserOpen(false)}
|
||||
onSelectEffect={(effectType) => {
|
||||
if (onAddEffect) {
|
||||
onAddEffect(effectType);
|
||||
}
|
||||
setEffectBrowserOpen(false);
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -9,13 +9,10 @@ import {
|
||||
Trash2,
|
||||
Link2,
|
||||
FolderOpen,
|
||||
Volume2,
|
||||
Music2,
|
||||
} from 'lucide-react';
|
||||
import { Button } from '@/components/ui/Button';
|
||||
import { Slider } from '@/components/ui/Slider';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
import { formatDuration } from '@/lib/audio/decoder';
|
||||
import type { Track } from '@/types/track';
|
||||
import type { EffectChain, EffectPreset } from '@/lib/audio/effects/chain';
|
||||
import { EffectRack } from '@/components/effects/EffectRack';
|
||||
@@ -31,16 +28,23 @@ export interface SidePanelProps {
|
||||
onRemoveTrack: (trackId: string) => void;
|
||||
onClearTracks: () => void;
|
||||
|
||||
// Effect chain
|
||||
effectChain: EffectChain;
|
||||
effectPresets: EffectPreset[];
|
||||
onToggleEffect: (effectId: string) => void;
|
||||
onRemoveEffect: (effectId: string) => void;
|
||||
onReorderEffects: (fromIndex: number, toIndex: number) => void;
|
||||
onSavePreset: (preset: EffectPreset) => void;
|
||||
onLoadPreset: (preset: EffectPreset) => void;
|
||||
onDeletePreset: (presetId: string) => void;
|
||||
onClearChain: () => void;
|
||||
// Track effect chain (for selected track)
|
||||
trackEffectChain: EffectChain | null;
|
||||
onToggleTrackEffect: (effectId: string) => void;
|
||||
onRemoveTrackEffect: (effectId: string) => void;
|
||||
onReorderTrackEffects: (fromIndex: number, toIndex: number) => void;
|
||||
onClearTrackChain: () => void;
|
||||
|
||||
// Master effect chain
|
||||
masterEffectChain: EffectChain;
|
||||
masterEffectPresets: EffectPreset[];
|
||||
onToggleMasterEffect: (effectId: string) => void;
|
||||
onRemoveMasterEffect: (effectId: string) => void;
|
||||
onReorderMasterEffects: (fromIndex: number, toIndex: number) => void;
|
||||
onSaveMasterPreset: (preset: EffectPreset) => void;
|
||||
onLoadMasterPreset: (preset: EffectPreset) => void;
|
||||
onDeleteMasterPreset: (presetId: string) => void;
|
||||
onClearMasterChain: () => void;
|
||||
|
||||
className?: string;
|
||||
}
|
||||
@@ -54,19 +58,24 @@ export function SidePanel({
|
||||
onUpdateTrack,
|
||||
onRemoveTrack,
|
||||
onClearTracks,
|
||||
effectChain,
|
||||
effectPresets,
|
||||
onToggleEffect,
|
||||
onRemoveEffect,
|
||||
onReorderEffects,
|
||||
onSavePreset,
|
||||
onLoadPreset,
|
||||
onDeletePreset,
|
||||
onClearChain,
|
||||
trackEffectChain,
|
||||
onToggleTrackEffect,
|
||||
onRemoveTrackEffect,
|
||||
onReorderTrackEffects,
|
||||
onClearTrackChain,
|
||||
masterEffectChain,
|
||||
masterEffectPresets,
|
||||
onToggleMasterEffect,
|
||||
onRemoveMasterEffect,
|
||||
onReorderMasterEffects,
|
||||
onSaveMasterPreset,
|
||||
onLoadMasterPreset,
|
||||
onDeleteMasterPreset,
|
||||
onClearMasterChain,
|
||||
className,
|
||||
}: SidePanelProps) {
|
||||
const [isCollapsed, setIsCollapsed] = React.useState(false);
|
||||
const [activeTab, setActiveTab] = React.useState<'tracks' | 'chain'>('tracks');
|
||||
const [activeTab, setActiveTab] = React.useState<'tracks' | 'master'>('tracks');
|
||||
const [presetDialogOpen, setPresetDialogOpen] = React.useState(false);
|
||||
|
||||
const selectedTrack = tracks.find((t) => t.id === selectedTrackId);
|
||||
@@ -98,19 +107,21 @@ export function SidePanel({
|
||||
<div className="flex items-center gap-1">
|
||||
<Button
|
||||
variant={activeTab === 'tracks' ? 'secondary' : 'ghost'}
|
||||
size="icon-sm"
|
||||
size="sm"
|
||||
onClick={() => setActiveTab('tracks')}
|
||||
title="Tracks"
|
||||
>
|
||||
<Music2 className="h-4 w-4" />
|
||||
<Music2 className="h-4 w-4 mr-1.5" />
|
||||
Tracks
|
||||
</Button>
|
||||
<Button
|
||||
variant={activeTab === 'chain' ? 'secondary' : 'ghost'}
|
||||
size="icon-sm"
|
||||
onClick={() => setActiveTab('chain')}
|
||||
title="Effect Chain"
|
||||
variant={activeTab === 'master' ? 'secondary' : 'ghost'}
|
||||
size="sm"
|
||||
onClick={() => setActiveTab('master')}
|
||||
title="Master"
|
||||
>
|
||||
<Link2 className="h-4 w-4" />
|
||||
<Link2 className="h-4 w-4 mr-1.5 text-primary" />
|
||||
Master
|
||||
</Button>
|
||||
</div>
|
||||
<Button
|
||||
@@ -130,7 +141,7 @@ export function SidePanel({
|
||||
{/* Track Actions */}
|
||||
<div className="space-y-2">
|
||||
<h3 className="text-xs font-semibold text-muted-foreground uppercase">
|
||||
Multi-Track Editor
|
||||
Track Management
|
||||
</h3>
|
||||
<div className="flex gap-2">
|
||||
<Button
|
||||
@@ -165,122 +176,25 @@ export function SidePanel({
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Track List */}
|
||||
{/* Track List Summary */}
|
||||
{tracks.length > 0 ? (
|
||||
<div className="space-y-2">
|
||||
<h3 className="text-xs font-semibold text-muted-foreground uppercase">
|
||||
Tracks ({tracks.length})
|
||||
</h3>
|
||||
<div className="space-y-2">
|
||||
{tracks.map((track) => {
|
||||
const isSelected = selectedTrackId === track.id;
|
||||
return (
|
||||
<div
|
||||
key={track.id}
|
||||
className={cn(
|
||||
'p-3 rounded-lg border transition-colors cursor-pointer',
|
||||
isSelected
|
||||
? 'bg-primary/10 border-primary'
|
||||
: 'bg-secondary/30 border-border hover:border-primary/50'
|
||||
)}
|
||||
onClick={() => onSelectTrack(isSelected ? null : track.id)}
|
||||
>
|
||||
<div className="flex items-center justify-between mb-2">
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="text-sm font-medium text-foreground truncate">
|
||||
{String(track.name || 'Untitled Track')}
|
||||
</div>
|
||||
{track.audioBuffer && (
|
||||
<div className="text-xs text-muted-foreground">
|
||||
{formatDuration(track.audioBuffer.duration)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onRemoveTrack(track.id);
|
||||
}}
|
||||
title="Remove track"
|
||||
>
|
||||
<Trash2 className="h-3.5 w-3.5 text-destructive" />
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Track Controls - Always visible */}
|
||||
<div className="space-y-2" onClick={(e) => e.stopPropagation()}>
|
||||
{/* Volume */}
|
||||
<div className="space-y-1">
|
||||
<div className="flex items-center justify-between">
|
||||
<label className="text-xs text-muted-foreground flex items-center gap-1">
|
||||
<Volume2 className="h-3 w-3" />
|
||||
Volume
|
||||
</label>
|
||||
<span className="text-xs text-muted-foreground">
|
||||
{Math.round(track.volume * 100)}%
|
||||
</span>
|
||||
</div>
|
||||
<Slider
|
||||
value={track.volume}
|
||||
onChange={(value) => onUpdateTrack(track.id, { volume: value })}
|
||||
min={0}
|
||||
max={2}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Pan */}
|
||||
<div className="space-y-1">
|
||||
<div className="flex items-center justify-between">
|
||||
<label className="text-xs text-muted-foreground">Pan</label>
|
||||
<span className="text-xs text-muted-foreground">
|
||||
{track.pan === 0
|
||||
? 'C'
|
||||
: track.pan < 0
|
||||
? `L${Math.round(Math.abs(track.pan) * 100)}`
|
||||
: `R${Math.round(track.pan * 100)}`}
|
||||
</span>
|
||||
</div>
|
||||
<Slider
|
||||
value={track.pan}
|
||||
onChange={(value) => onUpdateTrack(track.id, { pan: value })}
|
||||
min={-1}
|
||||
max={1}
|
||||
step={0.01}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Solo / Mute */}
|
||||
<div className="flex gap-2">
|
||||
<Button
|
||||
variant={track.solo ? 'default' : 'outline'}
|
||||
size="sm"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onUpdateTrack(track.id, { solo: !track.solo });
|
||||
}}
|
||||
className="flex-1 text-xs"
|
||||
>
|
||||
Solo
|
||||
</Button>
|
||||
<Button
|
||||
variant={track.mute ? 'default' : 'outline'}
|
||||
size="sm"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onUpdateTrack(track.id, { mute: !track.mute });
|
||||
}}
|
||||
className="flex-1 text-xs"
|
||||
>
|
||||
Mute
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
<div className="flex items-center justify-between">
|
||||
<h3 className="text-xs font-semibold text-muted-foreground uppercase">
|
||||
Tracks ({tracks.length})
|
||||
</h3>
|
||||
{selectedTrack && (
|
||||
<span className="text-xs text-primary">
|
||||
{String(selectedTrack.name || 'Untitled Track')} selected
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
<div className="text-xs text-muted-foreground">
|
||||
<p>
|
||||
{selectedTrack
|
||||
? 'Track controls are on the left of each track. Effects for the selected track are shown below.'
|
||||
: 'Click a track\'s waveform to select it and edit its effects below.'}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
@@ -291,69 +205,97 @@ export function SidePanel({
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Selected Track Effects */}
|
||||
{selectedTrack && (
|
||||
<div className="space-y-2 pt-3 border-t border-border">
|
||||
<div className="flex items-center justify-between">
|
||||
<h3 className="text-xs font-semibold text-muted-foreground uppercase">
|
||||
Track Effects
|
||||
</h3>
|
||||
{trackEffectChain && trackEffectChain.effects.length > 0 && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={onClearTrackChain}
|
||||
title="Clear all effects"
|
||||
>
|
||||
<Trash2 className="h-4 w-4 text-destructive" />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
<EffectRack
|
||||
chain={trackEffectChain!}
|
||||
onToggleEffect={onToggleTrackEffect}
|
||||
onRemoveEffect={onRemoveTrackEffect}
|
||||
onReorderEffects={onReorderTrackEffects}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
{activeTab === 'chain' && (
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
{activeTab === 'master' && (
|
||||
<>
|
||||
{/* Master Channel Info */}
|
||||
<div className="space-y-2">
|
||||
<h3 className="text-xs font-semibold text-muted-foreground uppercase">
|
||||
Effect Chain
|
||||
{selectedTrack && (
|
||||
<span className="text-primary ml-2">({selectedTrack.name})</span>
|
||||
)}
|
||||
Master Channel
|
||||
</h3>
|
||||
<div className="flex gap-1">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={() => setPresetDialogOpen(true)}
|
||||
title="Manage presets"
|
||||
>
|
||||
<FolderOpen className="h-4 w-4" />
|
||||
</Button>
|
||||
{effectChain.effects.length > 0 && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={onClearChain}
|
||||
title="Clear all effects"
|
||||
>
|
||||
<Trash2 className="h-4 w-4 text-destructive" />
|
||||
</Button>
|
||||
)}
|
||||
<div className="text-xs text-muted-foreground">
|
||||
<p>
|
||||
Master effects are applied to the final mix of all tracks.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{!selectedTrack ? (
|
||||
<div className="text-center py-8">
|
||||
<Link2 className="h-12 w-12 mx-auto text-muted-foreground/50 mb-2" />
|
||||
<p className="text-sm text-muted-foreground">
|
||||
Select a track to apply effects
|
||||
</p>
|
||||
{/* Master Effects */}
|
||||
<div className="space-y-2 pt-3 border-t border-border">
|
||||
<div className="flex items-center justify-between">
|
||||
<h3 className="text-xs font-semibold text-muted-foreground uppercase">
|
||||
Master Effects
|
||||
</h3>
|
||||
<div className="flex gap-1">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={() => setPresetDialogOpen(true)}
|
||||
title="Manage presets"
|
||||
>
|
||||
<FolderOpen className="h-4 w-4" />
|
||||
</Button>
|
||||
{masterEffectChain.effects.length > 0 && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={onClearMasterChain}
|
||||
title="Clear all effects"
|
||||
>
|
||||
<Trash2 className="h-4 w-4 text-destructive" />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<>
|
||||
<EffectRack
|
||||
chain={effectChain}
|
||||
onToggleEffect={onToggleEffect}
|
||||
onRemoveEffect={onRemoveEffect}
|
||||
onReorderEffects={onReorderEffects}
|
||||
/>
|
||||
<PresetManager
|
||||
open={presetDialogOpen}
|
||||
onClose={() => setPresetDialogOpen(false)}
|
||||
currentChain={effectChain}
|
||||
presets={effectPresets}
|
||||
onSavePreset={onSavePreset}
|
||||
onLoadPreset={onLoadPreset}
|
||||
onDeletePreset={onDeletePreset}
|
||||
onExportPreset={() => {}}
|
||||
onImportPreset={(preset) => onSavePreset(preset)}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<EffectRack
|
||||
chain={masterEffectChain}
|
||||
onToggleEffect={onToggleMasterEffect}
|
||||
onRemoveEffect={onRemoveMasterEffect}
|
||||
onReorderEffects={onReorderMasterEffects}
|
||||
/>
|
||||
<PresetManager
|
||||
open={presetDialogOpen}
|
||||
onClose={() => setPresetDialogOpen(false)}
|
||||
currentChain={masterEffectChain}
|
||||
presets={masterEffectPresets}
|
||||
onSavePreset={onSaveMasterPreset}
|
||||
onLoadPreset={onLoadMasterPreset}
|
||||
onDeletePreset={onDeleteMasterPreset}
|
||||
onExportPreset={() => {}}
|
||||
onImportPreset={(preset) => onSaveMasterPreset(preset)}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
188
components/markers/MarkerDialog.tsx
Normal file
188
components/markers/MarkerDialog.tsx
Normal file
@@ -0,0 +1,188 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { Modal } from '@/components/ui/Modal';
|
||||
import { Button } from '@/components/ui/Button';
|
||||
import type { Marker, MarkerType } from '@/types/marker';
|
||||
|
||||
export interface MarkerDialogProps {
|
||||
open: boolean;
|
||||
onClose: () => void;
|
||||
onSave: (marker: Partial<Marker>) => void;
|
||||
marker?: Marker; // If editing existing marker
|
||||
defaultTime?: number; // Default time for new markers
|
||||
defaultType?: MarkerType;
|
||||
}
|
||||
|
||||
const MARKER_COLORS = [
|
||||
'#ef4444', // red
|
||||
'#f97316', // orange
|
||||
'#eab308', // yellow
|
||||
'#22c55e', // green
|
||||
'#3b82f6', // blue
|
||||
'#a855f7', // purple
|
||||
'#ec4899', // pink
|
||||
];
|
||||
|
||||
export function MarkerDialog({
|
||||
open,
|
||||
onClose,
|
||||
onSave,
|
||||
marker,
|
||||
defaultTime = 0,
|
||||
defaultType = 'point',
|
||||
}: MarkerDialogProps) {
|
||||
const [name, setName] = React.useState(marker?.name || '');
|
||||
const [type, setType] = React.useState<MarkerType>(marker?.type || defaultType);
|
||||
const [time, setTime] = React.useState(marker?.time || defaultTime);
|
||||
const [endTime, setEndTime] = React.useState(marker?.endTime || defaultTime + 1);
|
||||
const [color, setColor] = React.useState(marker?.color || MARKER_COLORS[0]);
|
||||
const [description, setDescription] = React.useState(marker?.description || '');
|
||||
|
||||
// Reset form when marker changes or dialog opens
|
||||
React.useEffect(() => {
|
||||
if (open) {
|
||||
setName(marker?.name || '');
|
||||
setType(marker?.type || defaultType);
|
||||
setTime(marker?.time || defaultTime);
|
||||
setEndTime(marker?.endTime || defaultTime + 1);
|
||||
setColor(marker?.color || MARKER_COLORS[0]);
|
||||
setDescription(marker?.description || '');
|
||||
}
|
||||
}, [open, marker, defaultTime, defaultType]);
|
||||
|
||||
const handleSave = () => {
|
||||
const markerData: Partial<Marker> = {
|
||||
...(marker?.id && { id: marker.id }),
|
||||
name: name || 'Untitled Marker',
|
||||
type,
|
||||
time,
|
||||
...(type === 'region' && { endTime }),
|
||||
color,
|
||||
description,
|
||||
};
|
||||
onSave(markerData);
|
||||
onClose();
|
||||
};
|
||||
|
||||
return (
|
||||
<Modal
|
||||
open={open}
|
||||
onClose={onClose}
|
||||
title={marker ? 'Edit Marker' : 'Add Marker'}
|
||||
description={marker ? 'Edit marker properties' : 'Add a new marker or region to the timeline'}
|
||||
size="md"
|
||||
footer={
|
||||
<>
|
||||
<Button variant="outline" onClick={onClose}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button onClick={handleSave}>{marker ? 'Save' : 'Add'}</Button>
|
||||
</>
|
||||
}
|
||||
>
|
||||
<div className="space-y-4">
|
||||
{/* Name */}
|
||||
<div className="space-y-2">
|
||||
<label htmlFor="name" className="text-sm font-medium text-foreground">
|
||||
Name
|
||||
</label>
|
||||
<input
|
||||
id="name"
|
||||
type="text"
|
||||
value={name}
|
||||
onChange={(e) => setName(e.target.value)}
|
||||
placeholder="Marker name"
|
||||
className="flex h-10 w-full rounded-md border border-border bg-background px-3 py-2 text-sm text-foreground placeholder:text-muted-foreground focus:outline-none focus:ring-2 focus:ring-primary focus:ring-offset-2 focus:ring-offset-background"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Type */}
|
||||
<div className="space-y-2">
|
||||
<label htmlFor="type" className="text-sm font-medium text-foreground">
|
||||
Type
|
||||
</label>
|
||||
<select
|
||||
id="type"
|
||||
value={type}
|
||||
onChange={(e) => setType(e.target.value as MarkerType)}
|
||||
className="flex h-10 w-full rounded-md border border-border bg-background px-3 py-2 text-sm text-foreground focus:outline-none focus:ring-2 focus:ring-primary focus:ring-offset-2 focus:ring-offset-background"
|
||||
>
|
||||
<option value="point">Point Marker</option>
|
||||
<option value="region">Region</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
{/* Time */}
|
||||
<div className="space-y-2">
|
||||
<label htmlFor="time" className="text-sm font-medium text-foreground">
|
||||
{type === 'region' ? 'Start Time' : 'Time'} (seconds)
|
||||
</label>
|
||||
<input
|
||||
id="time"
|
||||
type="number"
|
||||
step="0.1"
|
||||
min="0"
|
||||
value={time}
|
||||
onChange={(e) => setTime(parseFloat(e.target.value))}
|
||||
className="flex h-10 w-full rounded-md border border-border bg-background px-3 py-2 text-sm text-foreground focus:outline-none focus:ring-2 focus:ring-primary focus:ring-offset-2 focus:ring-offset-background"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* End Time (for regions) */}
|
||||
{type === 'region' && (
|
||||
<div className="space-y-2">
|
||||
<label htmlFor="endTime" className="text-sm font-medium text-foreground">
|
||||
End Time (seconds)
|
||||
</label>
|
||||
<input
|
||||
id="endTime"
|
||||
type="number"
|
||||
step="0.1"
|
||||
min={time}
|
||||
value={endTime}
|
||||
onChange={(e) => setEndTime(parseFloat(e.target.value))}
|
||||
className="flex h-10 w-full rounded-md border border-border bg-background px-3 py-2 text-sm text-foreground focus:outline-none focus:ring-2 focus:ring-primary focus:ring-offset-2 focus:ring-offset-background"
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Color */}
|
||||
<div className="space-y-2">
|
||||
<label className="text-sm font-medium text-foreground">
|
||||
Color
|
||||
</label>
|
||||
<div className="flex gap-2">
|
||||
{MARKER_COLORS.map((c) => (
|
||||
<button
|
||||
key={c}
|
||||
type="button"
|
||||
className="w-8 h-8 rounded border-2 transition-all hover:scale-110"
|
||||
style={{
|
||||
backgroundColor: c,
|
||||
borderColor: color === c ? 'white' : 'transparent',
|
||||
}}
|
||||
onClick={() => setColor(c)}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Description */}
|
||||
<div className="space-y-2">
|
||||
<label htmlFor="description" className="text-sm font-medium text-foreground">
|
||||
Description (optional)
|
||||
</label>
|
||||
<input
|
||||
id="description"
|
||||
type="text"
|
||||
value={description}
|
||||
onChange={(e) => setDescription(e.target.value)}
|
||||
placeholder="Optional description"
|
||||
className="flex h-10 w-full rounded-md border border-border bg-background px-3 py-2 text-sm text-foreground placeholder:text-muted-foreground focus:outline-none focus:ring-2 focus:ring-primary focus:ring-offset-2 focus:ring-offset-background"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</Modal>
|
||||
);
|
||||
}
|
||||
216
components/markers/MarkerTimeline.tsx
Normal file
216
components/markers/MarkerTimeline.tsx
Normal file
@@ -0,0 +1,216 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
import type { Marker } from '@/types/marker';
|
||||
import { Flag, Edit2, Trash2 } from 'lucide-react';
|
||||
import { Button } from '@/components/ui/Button';
|
||||
|
||||
export interface MarkerTimelineProps {
|
||||
markers: Marker[];
|
||||
duration: number;
|
||||
currentTime: number;
|
||||
onMarkerClick?: (marker: Marker) => void;
|
||||
onMarkerEdit?: (marker: Marker) => void;
|
||||
onMarkerDelete?: (markerId: string) => void;
|
||||
onSeek?: (time: number) => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function MarkerTimeline({
|
||||
markers,
|
||||
duration,
|
||||
currentTime,
|
||||
onMarkerClick,
|
||||
onMarkerEdit,
|
||||
onMarkerDelete,
|
||||
onSeek,
|
||||
className,
|
||||
}: MarkerTimelineProps) {
|
||||
const containerRef = React.useRef<HTMLDivElement>(null);
|
||||
const [hoveredMarkerId, setHoveredMarkerId] = React.useState<string | null>(null);
|
||||
|
||||
const timeToX = React.useCallback(
|
||||
(time: number): number => {
|
||||
if (!containerRef.current) return 0;
|
||||
const width = containerRef.current.clientWidth;
|
||||
return (time / duration) * width;
|
||||
},
|
||||
[duration]
|
||||
);
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={containerRef}
|
||||
className={cn(
|
||||
'relative w-full h-8 bg-muted/30 border-b border-border',
|
||||
className
|
||||
)}
|
||||
>
|
||||
{/* Markers */}
|
||||
{markers.map((marker) => {
|
||||
const x = timeToX(marker.time);
|
||||
const isHovered = hoveredMarkerId === marker.id;
|
||||
|
||||
if (marker.type === 'point') {
|
||||
return (
|
||||
<div
|
||||
key={marker.id}
|
||||
className="absolute top-0 bottom-0 group cursor-pointer"
|
||||
style={{ left: `${x}px` }}
|
||||
onMouseEnter={() => setHoveredMarkerId(marker.id)}
|
||||
onMouseLeave={() => setHoveredMarkerId(null)}
|
||||
onClick={() => {
|
||||
onMarkerClick?.(marker);
|
||||
onSeek?.(marker.time);
|
||||
}}
|
||||
>
|
||||
{/* Marker line */}
|
||||
<div
|
||||
className={cn(
|
||||
'absolute top-0 bottom-0 w-0.5 transition-colors',
|
||||
isHovered ? 'bg-primary' : 'bg-primary/60'
|
||||
)}
|
||||
style={{ backgroundColor: marker.color }}
|
||||
/>
|
||||
|
||||
{/* Marker flag */}
|
||||
<Flag
|
||||
className={cn(
|
||||
'absolute top-0.5 -left-2 h-4 w-4 transition-colors',
|
||||
isHovered ? 'text-primary' : 'text-primary/60'
|
||||
)}
|
||||
style={{ color: marker.color }}
|
||||
/>
|
||||
|
||||
{/* Hover tooltip with actions */}
|
||||
{isHovered && (
|
||||
<div className="absolute top-full left-0 mt-1 z-10 bg-popover border border-border rounded shadow-lg p-2 min-w-[200px]">
|
||||
<div className="text-xs font-medium mb-1">{marker.name}</div>
|
||||
{marker.description && (
|
||||
<div className="text-xs text-muted-foreground mb-2">{marker.description}</div>
|
||||
)}
|
||||
<div className="flex gap-1">
|
||||
{onMarkerEdit && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onMarkerEdit(marker);
|
||||
}}
|
||||
title="Edit marker"
|
||||
className="h-6 w-6"
|
||||
>
|
||||
<Edit2 className="h-3 w-3" />
|
||||
</Button>
|
||||
)}
|
||||
{onMarkerDelete && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onMarkerDelete(marker.id);
|
||||
}}
|
||||
title="Delete marker"
|
||||
className="h-6 w-6 text-destructive hover:text-destructive"
|
||||
>
|
||||
<Trash2 className="h-3 w-3" />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
} else {
|
||||
// Region marker
|
||||
const endX = timeToX(marker.endTime || marker.time);
|
||||
const width = endX - x;
|
||||
|
||||
return (
|
||||
<div
|
||||
key={marker.id}
|
||||
className="absolute top-0 bottom-0 group cursor-pointer"
|
||||
style={{ left: `${x}px`, width: `${width}px` }}
|
||||
onMouseEnter={() => setHoveredMarkerId(marker.id)}
|
||||
onMouseLeave={() => setHoveredMarkerId(null)}
|
||||
onClick={() => {
|
||||
onMarkerClick?.(marker);
|
||||
onSeek?.(marker.time);
|
||||
}}
|
||||
>
|
||||
{/* Region background */}
|
||||
<div
|
||||
className={cn(
|
||||
'absolute inset-0 transition-opacity',
|
||||
isHovered ? 'opacity-30' : 'opacity-20'
|
||||
)}
|
||||
style={{ backgroundColor: marker.color || 'var(--color-primary)' }}
|
||||
/>
|
||||
|
||||
{/* Region borders */}
|
||||
<div
|
||||
className="absolute top-0 bottom-0 left-0 w-0.5"
|
||||
style={{ backgroundColor: marker.color || 'var(--color-primary)' }}
|
||||
/>
|
||||
<div
|
||||
className="absolute top-0 bottom-0 right-0 w-0.5"
|
||||
style={{ backgroundColor: marker.color || 'var(--color-primary)' }}
|
||||
/>
|
||||
|
||||
{/* Region label */}
|
||||
<div
|
||||
className="absolute top-0.5 left-1 text-[10px] font-medium truncate pr-1"
|
||||
style={{ color: marker.color || 'var(--color-primary)', maxWidth: `${width - 8}px` }}
|
||||
>
|
||||
{marker.name}
|
||||
</div>
|
||||
|
||||
{/* Hover tooltip with actions */}
|
||||
{isHovered && (
|
||||
<div className="absolute top-full left-0 mt-1 z-10 bg-popover border border-border rounded shadow-lg p-2 min-w-[200px]">
|
||||
<div className="text-xs font-medium mb-1">{marker.name}</div>
|
||||
{marker.description && (
|
||||
<div className="text-xs text-muted-foreground mb-2">{marker.description}</div>
|
||||
)}
|
||||
<div className="flex gap-1">
|
||||
{onMarkerEdit && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onMarkerEdit(marker);
|
||||
}}
|
||||
title="Edit marker"
|
||||
className="h-6 w-6"
|
||||
>
|
||||
<Edit2 className="h-3 w-3" />
|
||||
</Button>
|
||||
)}
|
||||
{onMarkerDelete && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onMarkerDelete(marker.id);
|
||||
}}
|
||||
title="Delete marker"
|
||||
className="h-6 w-6 text-destructive hover:text-destructive"
|
||||
>
|
||||
<Trash2 className="h-3 w-3" />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
})}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
86
components/recording/InputLevelMeter.tsx
Normal file
86
components/recording/InputLevelMeter.tsx
Normal file
@@ -0,0 +1,86 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
|
||||
export interface InputLevelMeterProps {
|
||||
level: number; // 0.0 to 1.0 (normalized dB scale)
|
||||
orientation?: 'horizontal' | 'vertical';
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function InputLevelMeter({
|
||||
level,
|
||||
orientation = 'horizontal',
|
||||
className,
|
||||
}: InputLevelMeterProps) {
|
||||
// Clamp level between 0 and 1
|
||||
const clampedLevel = Math.max(0, Math.min(1, level));
|
||||
|
||||
const isHorizontal = orientation === 'horizontal';
|
||||
|
||||
// Professional audio meter gradient:
|
||||
// Green (0-70% = -60dB to -18dB)
|
||||
// Yellow (70-90% = -18dB to -6dB)
|
||||
// Red (90-100% = -6dB to 0dB)
|
||||
const gradient = isHorizontal
|
||||
? 'linear-gradient(to right, rgb(34, 197, 94) 0%, rgb(34, 197, 94) 70%, rgb(234, 179, 8) 85%, rgb(239, 68, 68) 100%)'
|
||||
: 'linear-gradient(to top, rgb(34, 197, 94) 0%, rgb(34, 197, 94) 70%, rgb(234, 179, 8) 85%, rgb(239, 68, 68) 100%)';
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'relative bg-muted rounded-sm overflow-hidden',
|
||||
isHorizontal ? 'h-4 w-full' : 'w-4 h-full',
|
||||
className
|
||||
)}
|
||||
>
|
||||
{/* Level bar with gradient */}
|
||||
<div
|
||||
className={cn(
|
||||
'absolute transition-all duration-75 ease-out',
|
||||
isHorizontal ? 'h-full left-0 top-0' : 'w-full bottom-0 left-0'
|
||||
)}
|
||||
style={{
|
||||
[isHorizontal ? 'width' : 'height']: `${clampedLevel * 100}%`,
|
||||
background: gradient,
|
||||
}}
|
||||
/>
|
||||
|
||||
{/* Clip indicator (at 90%) */}
|
||||
{clampedLevel > 0.9 && (
|
||||
<div
|
||||
className={cn(
|
||||
'absolute bg-red-600 animate-pulse',
|
||||
isHorizontal
|
||||
? 'right-0 top-0 w-1 h-full'
|
||||
: 'bottom-0 left-0 h-1 w-full'
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Tick marks */}
|
||||
<div
|
||||
className={cn(
|
||||
'absolute inset-0 flex',
|
||||
isHorizontal ? 'flex-row' : 'flex-col-reverse'
|
||||
)}
|
||||
>
|
||||
{[0.25, 0.5, 0.75].map((tick) => (
|
||||
<div
|
||||
key={tick}
|
||||
className={cn(
|
||||
'absolute bg-background/30',
|
||||
isHorizontal
|
||||
? 'h-full w-px top-0'
|
||||
: 'w-full h-px left-0'
|
||||
)}
|
||||
style={{
|
||||
[isHorizontal ? 'left' : 'bottom']: `${tick * 100}%`,
|
||||
}}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
106
components/recording/RecordingSettings.tsx
Normal file
106
components/recording/RecordingSettings.tsx
Normal file
@@ -0,0 +1,106 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { Volume2, Radio } from 'lucide-react';
|
||||
import { Slider } from '@/components/ui/Slider';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
import type { RecordingSettings as RecordingSettingsType } from '@/lib/hooks/useRecording';
|
||||
|
||||
export interface RecordingSettingsProps {
|
||||
settings: RecordingSettingsType;
|
||||
onInputGainChange: (gain: number) => void;
|
||||
onRecordMonoChange: (mono: boolean) => void;
|
||||
onSampleRateChange: (sampleRate: number) => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
const SAMPLE_RATES = [44100, 48000, 96000];
|
||||
|
||||
export function RecordingSettings({
|
||||
settings,
|
||||
onInputGainChange,
|
||||
onRecordMonoChange,
|
||||
onSampleRateChange,
|
||||
className,
|
||||
}: RecordingSettingsProps) {
|
||||
return (
|
||||
<div className={cn('space-y-2 p-3 bg-muted/50 rounded border border-border', className)}>
|
||||
<div className="text-xs font-medium text-muted-foreground mb-2">Recording Settings</div>
|
||||
|
||||
{/* Input Gain */}
|
||||
<div className="flex items-center gap-2">
|
||||
<label className="text-xs text-muted-foreground flex items-center gap-1 w-24 flex-shrink-0">
|
||||
<Volume2 className="h-3.5 w-3.5" />
|
||||
Input Gain
|
||||
</label>
|
||||
<div className="flex-1">
|
||||
<Slider
|
||||
value={settings.inputGain}
|
||||
onChange={onInputGainChange}
|
||||
min={0}
|
||||
max={2}
|
||||
step={0.1}
|
||||
/>
|
||||
</div>
|
||||
<span className="text-xs text-muted-foreground w-12 text-right flex-shrink-0">
|
||||
{settings.inputGain === 1 ? '0 dB' : `${(20 * Math.log10(settings.inputGain)).toFixed(1)} dB`}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* Mono/Stereo Toggle */}
|
||||
<div className="flex items-center gap-2">
|
||||
<label className="text-xs text-muted-foreground flex items-center gap-1 w-24 flex-shrink-0">
|
||||
<Radio className="h-3.5 w-3.5" />
|
||||
Channels
|
||||
</label>
|
||||
<div className="flex gap-1 flex-1">
|
||||
<button
|
||||
onClick={() => onRecordMonoChange(true)}
|
||||
className={cn(
|
||||
'flex-1 px-2 py-1 text-xs rounded transition-colors',
|
||||
settings.recordMono
|
||||
? 'bg-primary text-primary-foreground'
|
||||
: 'bg-muted hover:bg-muted/80 text-muted-foreground'
|
||||
)}
|
||||
>
|
||||
Mono
|
||||
</button>
|
||||
<button
|
||||
onClick={() => onRecordMonoChange(false)}
|
||||
className={cn(
|
||||
'flex-1 px-2 py-1 text-xs rounded transition-colors',
|
||||
!settings.recordMono
|
||||
? 'bg-primary text-primary-foreground'
|
||||
: 'bg-muted hover:bg-muted/80 text-muted-foreground'
|
||||
)}
|
||||
>
|
||||
Stereo
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Sample Rate Selection */}
|
||||
<div className="flex items-center gap-2">
|
||||
<label className="text-xs text-muted-foreground w-24 flex-shrink-0">
|
||||
Sample Rate
|
||||
</label>
|
||||
<div className="flex gap-1 flex-1">
|
||||
{SAMPLE_RATES.map((rate) => (
|
||||
<button
|
||||
key={rate}
|
||||
onClick={() => onSampleRateChange(rate)}
|
||||
className={cn(
|
||||
'flex-1 px-2 py-1 text-xs rounded transition-colors font-mono',
|
||||
settings.sampleRate === rate
|
||||
? 'bg-primary text-primary-foreground'
|
||||
: 'bg-muted hover:bg-muted/80 text-muted-foreground'
|
||||
)}
|
||||
>
|
||||
{rate / 1000}k
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
532
components/settings/GlobalSettingsDialog.tsx
Normal file
532
components/settings/GlobalSettingsDialog.tsx
Normal file
@@ -0,0 +1,532 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { X, RotateCcw } from 'lucide-react';
|
||||
import { Button } from '@/components/ui/Button';
|
||||
import { Slider } from '@/components/ui/Slider';
|
||||
import { RecordingSettings } from '@/components/recording/RecordingSettings';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
import type { RecordingSettings as RecordingSettingsType } from '@/lib/hooks/useRecording';
|
||||
import type {
|
||||
Settings,
|
||||
AudioSettings,
|
||||
UISettings,
|
||||
EditorSettings,
|
||||
PerformanceSettings,
|
||||
} from '@/lib/hooks/useSettings';
|
||||
|
||||
export interface GlobalSettingsDialogProps {
|
||||
open: boolean;
|
||||
onClose: () => void;
|
||||
recordingSettings: RecordingSettingsType;
|
||||
onInputGainChange: (gain: number) => void;
|
||||
onRecordMonoChange: (mono: boolean) => void;
|
||||
onSampleRateChange: (sampleRate: number) => void;
|
||||
settings: Settings;
|
||||
onAudioSettingsChange: (updates: Partial<AudioSettings>) => void;
|
||||
onUISettingsChange: (updates: Partial<UISettings>) => void;
|
||||
onEditorSettingsChange: (updates: Partial<EditorSettings>) => void;
|
||||
onPerformanceSettingsChange: (updates: Partial<PerformanceSettings>) => void;
|
||||
onResetCategory: (category: 'audio' | 'ui' | 'editor' | 'performance') => void;
|
||||
}
|
||||
|
||||
type TabType = 'recording' | 'audio' | 'editor' | 'interface' | 'performance';
|
||||
|
||||
export function GlobalSettingsDialog({
|
||||
open,
|
||||
onClose,
|
||||
recordingSettings,
|
||||
onInputGainChange,
|
||||
onRecordMonoChange,
|
||||
onSampleRateChange,
|
||||
settings,
|
||||
onAudioSettingsChange,
|
||||
onUISettingsChange,
|
||||
onEditorSettingsChange,
|
||||
onPerformanceSettingsChange,
|
||||
onResetCategory,
|
||||
}: GlobalSettingsDialogProps) {
|
||||
const [activeTab, setActiveTab] = React.useState<TabType>('recording');
|
||||
|
||||
if (!open) return null;
|
||||
|
||||
return (
|
||||
<>
|
||||
{/* Backdrop */}
|
||||
<div
|
||||
className="fixed inset-0 bg-background/80 backdrop-blur-sm z-40"
|
||||
onClick={onClose}
|
||||
/>
|
||||
|
||||
{/* Dialog */}
|
||||
<div className="fixed left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2 w-full max-w-3xl z-50">
|
||||
<div className="bg-card border border-border rounded-lg shadow-2xl overflow-hidden">
|
||||
{/* Header */}
|
||||
<div className="flex items-center justify-between px-6 py-4 border-b border-border">
|
||||
<h2 className="text-lg font-semibold">Settings</h2>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={onClose}
|
||||
title="Close"
|
||||
>
|
||||
<X className="h-4 w-4" />
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Tabs */}
|
||||
<div className="flex border-b border-border bg-muted/30 overflow-x-auto">
|
||||
{[
|
||||
{ id: 'recording', label: 'Recording' },
|
||||
{ id: 'audio', label: 'Audio' },
|
||||
{ id: 'editor', label: 'Editor' },
|
||||
{ id: 'interface', label: 'Interface' },
|
||||
{ id: 'performance', label: 'Performance' },
|
||||
].map((tab) => (
|
||||
<button
|
||||
key={tab.id}
|
||||
onClick={() => setActiveTab(tab.id as TabType)}
|
||||
className={cn(
|
||||
'px-6 py-3 text-sm font-medium transition-colors relative flex-shrink-0',
|
||||
activeTab === tab.id
|
||||
? 'text-foreground bg-card'
|
||||
: 'text-muted-foreground hover:text-foreground hover:bg-muted/50'
|
||||
)}
|
||||
>
|
||||
{tab.label}
|
||||
{activeTab === tab.id && (
|
||||
<div className="absolute bottom-0 left-0 right-0 h-0.5 bg-primary" />
|
||||
)}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{/* Content */}
|
||||
<div className="p-6 max-h-[60vh] overflow-y-auto custom-scrollbar">
|
||||
{/* Recording Tab */}
|
||||
{activeTab === 'recording' && (
|
||||
<div className="space-y-4">
|
||||
<div className="flex items-center justify-between">
|
||||
<h3 className="text-sm font-medium">Recording Settings</h3>
|
||||
</div>
|
||||
<RecordingSettings
|
||||
settings={recordingSettings}
|
||||
onInputGainChange={onInputGainChange}
|
||||
onRecordMonoChange={onRecordMonoChange}
|
||||
onSampleRateChange={onSampleRateChange}
|
||||
className="border-0 bg-transparent p-0"
|
||||
/>
|
||||
|
||||
<div className="pt-4 border-t border-border">
|
||||
<h3 className="text-sm font-medium mb-2">Note</h3>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
These settings apply globally to all recordings. Arm a track (red button)
|
||||
to enable recording on that specific track.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Audio Tab */}
|
||||
{activeTab === 'audio' && (
|
||||
<div className="space-y-6">
|
||||
<div className="flex items-center justify-between">
|
||||
<h3 className="text-sm font-medium">Audio Settings</h3>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={() => onResetCategory('audio')}
|
||||
className="h-7 text-xs"
|
||||
>
|
||||
<RotateCcw className="h-3 w-3 mr-1" />
|
||||
Reset
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Buffer Size */}
|
||||
<div className="space-y-2">
|
||||
<label className="text-sm font-medium">Buffer Size</label>
|
||||
<select
|
||||
value={settings.audio.bufferSize}
|
||||
onChange={(e) =>
|
||||
onAudioSettingsChange({ bufferSize: Number(e.target.value) })
|
||||
}
|
||||
className="w-full px-3 py-2 bg-background border border-border rounded text-sm"
|
||||
>
|
||||
<option value={256}>256 samples (Low latency, higher CPU)</option>
|
||||
<option value={512}>512 samples</option>
|
||||
<option value={1024}>1024 samples</option>
|
||||
<option value={2048}>2048 samples (Recommended)</option>
|
||||
<option value={4096}>4096 samples (Low CPU)</option>
|
||||
</select>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Smaller buffer = lower latency but higher CPU usage. Requires reload.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Sample Rate */}
|
||||
<div className="space-y-2">
|
||||
<label className="text-sm font-medium">Default Sample Rate</label>
|
||||
<select
|
||||
value={settings.audio.sampleRate}
|
||||
onChange={(e) =>
|
||||
onAudioSettingsChange({ sampleRate: Number(e.target.value) })
|
||||
}
|
||||
className="w-full px-3 py-2 bg-background border border-border rounded text-sm"
|
||||
>
|
||||
<option value={44100}>44.1 kHz (CD Quality)</option>
|
||||
<option value={48000}>48 kHz (Professional)</option>
|
||||
<option value={96000}>96 kHz (Hi-Res Audio)</option>
|
||||
</select>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Higher sample rate = better quality but larger file sizes.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Auto Normalize */}
|
||||
<div className="flex items-center justify-between p-3 bg-muted/50 rounded">
|
||||
<div>
|
||||
<div className="text-sm font-medium">Auto-Normalize on Import</div>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Automatically normalize audio when importing files
|
||||
</p>
|
||||
</div>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={settings.audio.autoNormalizeOnImport}
|
||||
onChange={(e) =>
|
||||
onAudioSettingsChange({ autoNormalizeOnImport: e.target.checked })
|
||||
}
|
||||
className="h-4 w-4"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Editor Tab */}
|
||||
{activeTab === 'editor' && (
|
||||
<div className="space-y-6">
|
||||
<div className="flex items-center justify-between">
|
||||
<h3 className="text-sm font-medium">Editor Settings</h3>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={() => onResetCategory('editor')}
|
||||
className="h-7 text-xs"
|
||||
>
|
||||
<RotateCcw className="h-3 w-3 mr-1" />
|
||||
Reset
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Auto-Save Interval */}
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<label className="text-sm font-medium">Auto-Save Interval</label>
|
||||
<span className="text-xs font-mono text-muted-foreground">
|
||||
{settings.editor.autoSaveInterval === 0
|
||||
? 'Disabled'
|
||||
: `${settings.editor.autoSaveInterval}s`}
|
||||
</span>
|
||||
</div>
|
||||
<Slider
|
||||
value={[settings.editor.autoSaveInterval]}
|
||||
onValueChange={([value]) =>
|
||||
onEditorSettingsChange({ autoSaveInterval: value })
|
||||
}
|
||||
min={0}
|
||||
max={30}
|
||||
step={1}
|
||||
className="w-full"
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Set to 0 to disable auto-save. Default: 3 seconds.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Undo History Limit */}
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<label className="text-sm font-medium">Undo History Limit</label>
|
||||
<span className="text-xs font-mono text-muted-foreground">
|
||||
{settings.editor.undoHistoryLimit} operations
|
||||
</span>
|
||||
</div>
|
||||
<Slider
|
||||
value={[settings.editor.undoHistoryLimit]}
|
||||
onValueChange={([value]) =>
|
||||
onEditorSettingsChange({ undoHistoryLimit: value })
|
||||
}
|
||||
min={10}
|
||||
max={200}
|
||||
step={10}
|
||||
className="w-full"
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Higher values use more memory. Default: 50.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Snap to Grid */}
|
||||
<div className="flex items-center justify-between p-3 bg-muted/50 rounded">
|
||||
<div>
|
||||
<div className="text-sm font-medium">Snap to Grid</div>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Snap playhead and selections to grid lines
|
||||
</p>
|
||||
</div>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={settings.editor.snapToGrid}
|
||||
onChange={(e) =>
|
||||
onEditorSettingsChange({ snapToGrid: e.target.checked })
|
||||
}
|
||||
className="h-4 w-4"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Grid Resolution */}
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<label className="text-sm font-medium">Grid Resolution</label>
|
||||
<span className="text-xs font-mono text-muted-foreground">
|
||||
{settings.editor.gridResolution}s
|
||||
</span>
|
||||
</div>
|
||||
<Slider
|
||||
value={[settings.editor.gridResolution]}
|
||||
onValueChange={([value]) =>
|
||||
onEditorSettingsChange({ gridResolution: value })
|
||||
}
|
||||
min={0.1}
|
||||
max={5}
|
||||
step={0.1}
|
||||
className="w-full"
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Grid spacing in seconds. Default: 1.0s.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Default Zoom */}
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<label className="text-sm font-medium">Default Zoom Level</label>
|
||||
<span className="text-xs font-mono text-muted-foreground">
|
||||
{settings.editor.defaultZoom}x
|
||||
</span>
|
||||
</div>
|
||||
<Slider
|
||||
value={[settings.editor.defaultZoom]}
|
||||
onValueChange={([value]) =>
|
||||
onEditorSettingsChange({ defaultZoom: value })
|
||||
}
|
||||
min={1}
|
||||
max={20}
|
||||
step={1}
|
||||
className="w-full"
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Initial zoom level when opening projects. Default: 1x.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Interface Tab */}
|
||||
{activeTab === 'interface' && (
|
||||
<div className="space-y-6">
|
||||
<div className="flex items-center justify-between">
|
||||
<h3 className="text-sm font-medium">Interface Settings</h3>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={() => onResetCategory('ui')}
|
||||
className="h-7 text-xs"
|
||||
>
|
||||
<RotateCcw className="h-3 w-3 mr-1" />
|
||||
Reset
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Theme */}
|
||||
<div className="space-y-2">
|
||||
<label className="text-sm font-medium">Theme</label>
|
||||
<div className="flex gap-2">
|
||||
{['dark', 'light', 'auto'].map((theme) => (
|
||||
<button
|
||||
key={theme}
|
||||
onClick={() =>
|
||||
onUISettingsChange({ theme: theme as 'dark' | 'light' | 'auto' })
|
||||
}
|
||||
className={cn(
|
||||
'flex-1 px-4 py-2 rounded text-sm font-medium transition-colors',
|
||||
settings.ui.theme === theme
|
||||
? 'bg-primary text-primary-foreground'
|
||||
: 'bg-muted hover:bg-muted/80'
|
||||
)}
|
||||
>
|
||||
{theme.charAt(0).toUpperCase() + theme.slice(1)}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Use the theme toggle in header for quick switching.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Font Size */}
|
||||
<div className="space-y-2">
|
||||
<label className="text-sm font-medium">Font Size</label>
|
||||
<div className="flex gap-2">
|
||||
{['small', 'medium', 'large'].map((size) => (
|
||||
<button
|
||||
key={size}
|
||||
onClick={() =>
|
||||
onUISettingsChange({ fontSize: size as 'small' | 'medium' | 'large' })
|
||||
}
|
||||
className={cn(
|
||||
'flex-1 px-4 py-2 rounded text-sm font-medium transition-colors',
|
||||
settings.ui.fontSize === size
|
||||
? 'bg-primary text-primary-foreground'
|
||||
: 'bg-muted hover:bg-muted/80'
|
||||
)}
|
||||
>
|
||||
{size.charAt(0).toUpperCase() + size.slice(1)}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Adjust the UI font size. Requires reload.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Performance Tab */}
|
||||
{activeTab === 'performance' && (
|
||||
<div className="space-y-6">
|
||||
<div className="flex items-center justify-between">
|
||||
<h3 className="text-sm font-medium">Performance Settings</h3>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={() => onResetCategory('performance')}
|
||||
className="h-7 text-xs"
|
||||
>
|
||||
<RotateCcw className="h-3 w-3 mr-1" />
|
||||
Reset
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Peak Calculation Quality */}
|
||||
<div className="space-y-2">
|
||||
<label className="text-sm font-medium">Peak Calculation Quality</label>
|
||||
<div className="flex gap-2">
|
||||
{['low', 'medium', 'high'].map((quality) => (
|
||||
<button
|
||||
key={quality}
|
||||
onClick={() =>
|
||||
onPerformanceSettingsChange({
|
||||
peakCalculationQuality: quality as 'low' | 'medium' | 'high',
|
||||
})
|
||||
}
|
||||
className={cn(
|
||||
'flex-1 px-4 py-2 rounded text-sm font-medium transition-colors',
|
||||
settings.performance.peakCalculationQuality === quality
|
||||
? 'bg-primary text-primary-foreground'
|
||||
: 'bg-muted hover:bg-muted/80'
|
||||
)}
|
||||
>
|
||||
{quality.charAt(0).toUpperCase() + quality.slice(1)}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Higher quality = more accurate waveforms, slower processing.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Waveform Rendering Quality */}
|
||||
<div className="space-y-2">
|
||||
<label className="text-sm font-medium">Waveform Rendering Quality</label>
|
||||
<div className="flex gap-2">
|
||||
{['low', 'medium', 'high'].map((quality) => (
|
||||
<button
|
||||
key={quality}
|
||||
onClick={() =>
|
||||
onPerformanceSettingsChange({
|
||||
waveformRenderingQuality: quality as 'low' | 'medium' | 'high',
|
||||
})
|
||||
}
|
||||
className={cn(
|
||||
'flex-1 px-4 py-2 rounded text-sm font-medium transition-colors',
|
||||
settings.performance.waveformRenderingQuality === quality
|
||||
? 'bg-primary text-primary-foreground'
|
||||
: 'bg-muted hover:bg-muted/80'
|
||||
)}
|
||||
>
|
||||
{quality.charAt(0).toUpperCase() + quality.slice(1)}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Lower quality = better performance on slower devices.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Enable Spectrogram */}
|
||||
<div className="flex items-center justify-between p-3 bg-muted/50 rounded">
|
||||
<div>
|
||||
<div className="text-sm font-medium">Enable Spectrogram</div>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Show spectrogram in analysis tools (requires more CPU)
|
||||
</p>
|
||||
</div>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={settings.performance.enableSpectrogram}
|
||||
onChange={(e) =>
|
||||
onPerformanceSettingsChange({ enableSpectrogram: e.target.checked })
|
||||
}
|
||||
className="h-4 w-4"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Max File Size */}
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<label className="text-sm font-medium">Maximum File Size</label>
|
||||
<span className="text-xs font-mono text-muted-foreground">
|
||||
{settings.performance.maxFileSizeMB} MB
|
||||
</span>
|
||||
</div>
|
||||
<Slider
|
||||
value={[settings.performance.maxFileSizeMB]}
|
||||
onValueChange={([value]) =>
|
||||
onPerformanceSettingsChange({ maxFileSizeMB: value })
|
||||
}
|
||||
min={100}
|
||||
max={1000}
|
||||
step={50}
|
||||
className="w-full"
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Warn when importing files larger than this. Default: 500 MB.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Footer */}
|
||||
<div className="flex items-center justify-end gap-2 px-6 py-4 border-t border-border bg-muted/30">
|
||||
<Button variant="default" onClick={onClose}>
|
||||
Done
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
265
components/timeline/TimeScale.tsx
Normal file
265
components/timeline/TimeScale.tsx
Normal file
@@ -0,0 +1,265 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
import {
|
||||
timeToPixel,
|
||||
pixelToTime,
|
||||
calculateTickInterval,
|
||||
formatTimeLabel,
|
||||
getVisibleTimeRange,
|
||||
} from '@/lib/utils/timeline';
|
||||
|
||||
export interface TimeScaleProps {
|
||||
duration: number;
|
||||
zoom: number;
|
||||
currentTime: number;
|
||||
onSeek?: (time: number) => void;
|
||||
className?: string;
|
||||
height?: number;
|
||||
controlsWidth?: number;
|
||||
scrollRef?: React.MutableRefObject<HTMLDivElement | null>;
|
||||
onScroll?: () => void;
|
||||
}
|
||||
|
||||
export function TimeScale({
|
||||
duration,
|
||||
zoom,
|
||||
currentTime,
|
||||
onSeek,
|
||||
className,
|
||||
height = 40,
|
||||
controlsWidth = 240,
|
||||
scrollRef: externalScrollRef,
|
||||
onScroll,
|
||||
}: TimeScaleProps) {
|
||||
const localScrollRef = React.useRef<HTMLDivElement>(null);
|
||||
const scrollRef = externalScrollRef || localScrollRef;
|
||||
const canvasRef = React.useRef<HTMLCanvasElement>(null);
|
||||
const [viewportWidth, setViewportWidth] = React.useState(800);
|
||||
const [scrollLeft, setScrollLeft] = React.useState(0);
|
||||
const [hoverTime, setHoverTime] = React.useState<number | null>(null);
|
||||
|
||||
// Calculate total timeline width (match waveform calculation)
|
||||
// Uses 5 pixels per second as base scale, multiplied by zoom
|
||||
// Always ensure minimum width is at least viewport width for full coverage
|
||||
const PIXELS_PER_SECOND_BASE = 5;
|
||||
const totalWidth = React.useMemo(() => {
|
||||
if (zoom >= 1) {
|
||||
const calculatedWidth = duration * zoom * PIXELS_PER_SECOND_BASE;
|
||||
// Ensure it's at least viewport width so timeline always fills
|
||||
return Math.max(calculatedWidth, viewportWidth);
|
||||
}
|
||||
return viewportWidth;
|
||||
}, [duration, zoom, viewportWidth]);
|
||||
|
||||
// Update viewport width on resize
|
||||
React.useEffect(() => {
|
||||
const scroller = scrollRef.current;
|
||||
if (!scroller) return;
|
||||
|
||||
const updateWidth = () => {
|
||||
setViewportWidth(scroller.clientWidth);
|
||||
};
|
||||
|
||||
updateWidth();
|
||||
|
||||
const resizeObserver = new ResizeObserver(updateWidth);
|
||||
resizeObserver.observe(scroller);
|
||||
|
||||
return () => resizeObserver.disconnect();
|
||||
}, [scrollRef]);
|
||||
|
||||
// Handle scroll - update scrollLeft and trigger onScroll callback
|
||||
const handleScroll = React.useCallback(() => {
|
||||
if (scrollRef.current) {
|
||||
setScrollLeft(scrollRef.current.scrollLeft);
|
||||
}
|
||||
if (onScroll) {
|
||||
onScroll();
|
||||
}
|
||||
}, [onScroll, scrollRef]);
|
||||
|
||||
// Draw time scale - redraws on scroll and zoom
|
||||
React.useEffect(() => {
|
||||
const canvas = canvasRef.current;
|
||||
if (!canvas || duration === 0) return;
|
||||
|
||||
const ctx = canvas.getContext('2d');
|
||||
if (!ctx) return;
|
||||
|
||||
// Set canvas size to viewport width
|
||||
const dpr = window.devicePixelRatio || 1;
|
||||
canvas.width = viewportWidth * dpr;
|
||||
canvas.height = height * dpr;
|
||||
canvas.style.width = `${viewportWidth}px`;
|
||||
canvas.style.height = `${height}px`;
|
||||
ctx.scale(dpr, dpr);
|
||||
|
||||
// Clear canvas
|
||||
ctx.fillStyle = getComputedStyle(canvas).getPropertyValue('--color-background') || '#ffffff';
|
||||
ctx.fillRect(0, 0, viewportWidth, height);
|
||||
|
||||
// Calculate visible time range
|
||||
const visibleRange = getVisibleTimeRange(scrollLeft, viewportWidth, duration, zoom);
|
||||
const visibleDuration = visibleRange.end - visibleRange.start;
|
||||
|
||||
// Calculate tick intervals based on visible duration
|
||||
const { major, minor } = calculateTickInterval(visibleDuration);
|
||||
|
||||
// Calculate which ticks to draw (only visible ones)
|
||||
const startTick = Math.floor(visibleRange.start / minor) * minor;
|
||||
const endTick = Math.ceil(visibleRange.end / minor) * minor;
|
||||
|
||||
// Set up text style for labels
|
||||
ctx.font = '12px -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif';
|
||||
ctx.textAlign = 'center';
|
||||
ctx.textBaseline = 'top';
|
||||
|
||||
// Draw ticks and labels
|
||||
for (let time = startTick; time <= endTick; time += minor) {
|
||||
if (time < 0 || time > duration) continue;
|
||||
|
||||
// Calculate x position using the actual totalWidth (not timeToPixel which recalculates)
|
||||
const x = (time / duration) * totalWidth - scrollLeft;
|
||||
if (x < 0 || x > viewportWidth) continue;
|
||||
|
||||
const isMajor = Math.abs(time % major) < 0.001;
|
||||
|
||||
if (isMajor) {
|
||||
// Major ticks - tall and prominent
|
||||
ctx.strokeStyle = getComputedStyle(canvas).getPropertyValue('--color-foreground') || '#000000';
|
||||
ctx.lineWidth = 2;
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(x, height - 20);
|
||||
ctx.lineTo(x, height);
|
||||
ctx.stroke();
|
||||
|
||||
// Major tick label
|
||||
ctx.fillStyle = getComputedStyle(canvas).getPropertyValue('--color-foreground') || '#000000';
|
||||
const label = formatTimeLabel(time, visibleDuration < 10);
|
||||
ctx.fillText(label, x, 6);
|
||||
} else {
|
||||
// Minor ticks - shorter and lighter
|
||||
ctx.strokeStyle = getComputedStyle(canvas).getPropertyValue('--color-muted-foreground') || '#9ca3af';
|
||||
ctx.lineWidth = 1;
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(x, height - 10);
|
||||
ctx.lineTo(x, height);
|
||||
ctx.stroke();
|
||||
|
||||
// Minor tick label (smaller and lighter)
|
||||
if (x > 20 && x < viewportWidth - 20) {
|
||||
ctx.fillStyle = getComputedStyle(canvas).getPropertyValue('--color-muted-foreground') || '#9ca3af';
|
||||
ctx.font = '10px -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif';
|
||||
const label = formatTimeLabel(time, visibleDuration < 10);
|
||||
ctx.fillText(label, x, 8);
|
||||
ctx.font = '12px -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Draw playhead indicator
|
||||
const playheadX = (currentTime / duration) * totalWidth - scrollLeft;
|
||||
if (playheadX >= 0 && playheadX <= viewportWidth) {
|
||||
ctx.strokeStyle = '#ef4444';
|
||||
ctx.lineWidth = 2;
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(playheadX, 0);
|
||||
ctx.lineTo(playheadX, height);
|
||||
ctx.stroke();
|
||||
}
|
||||
|
||||
// Draw hover indicator
|
||||
if (hoverTime !== null) {
|
||||
const hoverX = (hoverTime / duration) * totalWidth - scrollLeft;
|
||||
if (hoverX >= 0 && hoverX <= viewportWidth) {
|
||||
ctx.strokeStyle = 'rgba(59, 130, 246, 0.5)';
|
||||
ctx.lineWidth = 1;
|
||||
ctx.setLineDash([3, 3]);
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(hoverX, 0);
|
||||
ctx.lineTo(hoverX, height);
|
||||
ctx.stroke();
|
||||
ctx.setLineDash([]);
|
||||
}
|
||||
}
|
||||
}, [duration, zoom, currentTime, viewportWidth, scrollLeft, height, hoverTime, totalWidth]);
|
||||
|
||||
// Handle click to seek
|
||||
const handleClick = React.useCallback(
|
||||
(e: React.MouseEvent<HTMLCanvasElement>) => {
|
||||
if (!onSeek) return;
|
||||
|
||||
const rect = e.currentTarget.getBoundingClientRect();
|
||||
const x = e.clientX - rect.left;
|
||||
const pixelPos = x + scrollLeft;
|
||||
const time = (pixelPos / totalWidth) * duration;
|
||||
|
||||
onSeek(Math.max(0, Math.min(duration, time)));
|
||||
},
|
||||
[onSeek, duration, totalWidth, scrollLeft]
|
||||
);
|
||||
|
||||
// Handle mouse move for hover
|
||||
const handleMouseMove = React.useCallback(
|
||||
(e: React.MouseEvent<HTMLCanvasElement>) => {
|
||||
const rect = e.currentTarget.getBoundingClientRect();
|
||||
const x = e.clientX - rect.left;
|
||||
const pixelPos = x + scrollLeft;
|
||||
const time = (pixelPos / totalWidth) * duration;
|
||||
|
||||
setHoverTime(Math.max(0, Math.min(duration, time)));
|
||||
},
|
||||
[duration, totalWidth, scrollLeft]
|
||||
);
|
||||
|
||||
const handleMouseLeave = React.useCallback(() => {
|
||||
setHoverTime(null);
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className={cn('relative bg-background', className)} style={{ paddingLeft: '240px', paddingRight: '250px' }}>
|
||||
<div
|
||||
ref={scrollRef}
|
||||
className="w-full bg-background overflow-x-auto overflow-y-hidden custom-scrollbar"
|
||||
style={{
|
||||
height: `${height}px`,
|
||||
}}
|
||||
onScroll={handleScroll}
|
||||
>
|
||||
{/* Spacer to create scrollable width */}
|
||||
<div style={{ width: `${totalWidth}px`, height: `${height}px`, position: 'relative' }}>
|
||||
<canvas
|
||||
ref={canvasRef}
|
||||
onClick={handleClick}
|
||||
onMouseMove={handleMouseMove}
|
||||
onMouseLeave={handleMouseLeave}
|
||||
className="cursor-pointer"
|
||||
style={{
|
||||
position: 'sticky',
|
||||
left: 0,
|
||||
width: `${viewportWidth}px`,
|
||||
height: `${height}px`,
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Hover tooltip */}
|
||||
{hoverTime !== null && (
|
||||
<div
|
||||
className="absolute top-full mt-1 px-2 py-1 bg-popover border border-border rounded shadow-lg text-xs font-mono pointer-events-none z-10"
|
||||
style={{
|
||||
left: `${Math.min(
|
||||
viewportWidth - 60 + controlsWidth,
|
||||
Math.max(controlsWidth, (hoverTime / duration) * totalWidth - scrollLeft - 30 + controlsWidth)
|
||||
)}px`,
|
||||
}}
|
||||
>
|
||||
{formatTimeLabel(hoverTime, true)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -24,28 +24,42 @@ export function ImportTrackDialog({
|
||||
const handleFiles = async (files: FileList) => {
|
||||
setIsLoading(true);
|
||||
|
||||
// Convert FileList to Array to prevent any weird behavior
|
||||
const fileArray = Array.from(files);
|
||||
console.log(`[ImportTrackDialog] Processing ${fileArray.length} files`, fileArray);
|
||||
|
||||
try {
|
||||
// Process files sequentially
|
||||
for (let i = 0; i < files.length; i++) {
|
||||
const file = files[i];
|
||||
for (let i = 0; i < fileArray.length; i++) {
|
||||
console.log(`[ImportTrackDialog] Loop iteration ${i}, fileArray.length: ${fileArray.length}`);
|
||||
const file = fileArray[i];
|
||||
console.log(`[ImportTrackDialog] Processing file ${i + 1}/${fileArray.length}: ${file.name}, type: ${file.type}`);
|
||||
|
||||
if (!file.type.startsWith('audio/')) {
|
||||
console.warn(`Skipping non-audio file: ${file.name}`);
|
||||
console.warn(`Skipping non-audio file: ${file.name} (type: ${file.type})`);
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
console.log(`[ImportTrackDialog] Decoding file ${i + 1}/${files.length}: ${file.name}`);
|
||||
const buffer = await decodeAudioFile(file);
|
||||
const trackName = file.name.replace(/\.[^/.]+$/, ''); // Remove extension
|
||||
console.log(`[ImportTrackDialog] Importing track: ${trackName}`);
|
||||
onImportTrack(buffer, trackName);
|
||||
console.log(`[ImportTrackDialog] Track imported: ${trackName}`);
|
||||
} catch (error) {
|
||||
console.error(`Failed to import ${file.name}:`, error);
|
||||
}
|
||||
console.log(`[ImportTrackDialog] Finished processing file ${i + 1}`);
|
||||
}
|
||||
|
||||
onClose();
|
||||
console.log('[ImportTrackDialog] Loop completed, all files processed');
|
||||
} catch (error) {
|
||||
console.error('[ImportTrackDialog] Error in handleFiles:', error);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
console.log('[ImportTrackDialog] Closing dialog');
|
||||
onClose();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,9 +1,44 @@
|
||||
'use client';
|
||||
"use client";
|
||||
|
||||
import * as React from 'react';
|
||||
import type { Track as TrackType } from '@/types/track';
|
||||
import { TrackHeader } from './TrackHeader';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
import * as React from "react";
|
||||
import {
|
||||
Volume2,
|
||||
VolumeX,
|
||||
Headphones,
|
||||
Trash2,
|
||||
ChevronDown,
|
||||
ChevronRight,
|
||||
ChevronUp,
|
||||
UnfoldHorizontal,
|
||||
Upload,
|
||||
Mic,
|
||||
Gauge,
|
||||
Circle,
|
||||
Sparkles,
|
||||
} from "lucide-react";
|
||||
import type { Track as TrackType } from "@/types/track";
|
||||
import {
|
||||
COLLAPSED_TRACK_HEIGHT,
|
||||
MIN_TRACK_HEIGHT,
|
||||
MAX_TRACK_HEIGHT,
|
||||
DEFAULT_TRACK_HEIGHT,
|
||||
} from "@/types/track";
|
||||
import { Button } from "@/components/ui/Button";
|
||||
import { Slider } from "@/components/ui/Slider";
|
||||
import { cn } from "@/lib/utils/cn";
|
||||
import type { EffectType } from "@/lib/audio/effects/chain";
|
||||
import { TrackControls } from "./TrackControls";
|
||||
import { AutomationLane } from "@/components/automation/AutomationLane";
|
||||
import type {
|
||||
AutomationLane as AutomationLaneType,
|
||||
AutomationPoint as AutomationPointType,
|
||||
} from "@/types/automation";
|
||||
import { createAutomationPoint } from "@/lib/audio/automation/utils";
|
||||
import { createAutomationLane } from "@/lib/audio/automation-utils";
|
||||
import { EffectDevice } from "@/components/effects/EffectDevice";
|
||||
import { EffectBrowser } from "@/components/effects/EffectBrowser";
|
||||
import { ImportDialog } from "@/components/dialogs/ImportDialog";
|
||||
import { importAudioFile, type ImportOptions } from "@/lib/audio/decoder";
|
||||
|
||||
export interface TrackProps {
|
||||
track: TrackType;
|
||||
@@ -19,7 +54,28 @@ export interface TrackProps {
|
||||
onPanChange: (pan: number) => void;
|
||||
onRemove: () => void;
|
||||
onNameChange: (name: string) => void;
|
||||
onUpdateTrack: (trackId: string, updates: Partial<TrackType>) => void;
|
||||
onSeek?: (time: number) => void;
|
||||
onLoadAudio?: (buffer: AudioBuffer) => void;
|
||||
onToggleEffect?: (effectId: string) => void;
|
||||
onRemoveEffect?: (effectId: string) => void;
|
||||
onUpdateEffect?: (effectId: string, parameters: any) => void;
|
||||
onAddEffect?: (effectType: EffectType) => void;
|
||||
onSelectionChange?: (
|
||||
selection: { start: number; end: number } | null,
|
||||
) => void;
|
||||
onToggleRecordEnable?: () => void;
|
||||
isRecording?: boolean;
|
||||
recordingLevel?: number;
|
||||
playbackLevel?: number;
|
||||
onParameterTouched?: (
|
||||
trackId: string,
|
||||
laneId: string,
|
||||
touched: boolean,
|
||||
) => void;
|
||||
isPlaying?: boolean;
|
||||
renderControlsOnly?: boolean;
|
||||
renderWaveformOnly?: boolean;
|
||||
}
|
||||
|
||||
export function Track({
|
||||
@@ -36,21 +92,226 @@ export function Track({
|
||||
onPanChange,
|
||||
onRemove,
|
||||
onNameChange,
|
||||
onUpdateTrack,
|
||||
onSeek,
|
||||
onLoadAudio,
|
||||
onToggleEffect,
|
||||
onRemoveEffect,
|
||||
onUpdateEffect,
|
||||
onAddEffect,
|
||||
onSelectionChange,
|
||||
onToggleRecordEnable,
|
||||
isRecording = false,
|
||||
recordingLevel = 0,
|
||||
playbackLevel = 0,
|
||||
onParameterTouched,
|
||||
isPlaying = false,
|
||||
renderControlsOnly = false,
|
||||
renderWaveformOnly = false,
|
||||
}: TrackProps) {
|
||||
const canvasRef = React.useRef<HTMLCanvasElement>(null);
|
||||
const containerRef = React.useRef<HTMLDivElement>(null);
|
||||
const fileInputRef = React.useRef<HTMLInputElement>(null);
|
||||
const [themeKey, setThemeKey] = React.useState(0);
|
||||
const [isResizing, setIsResizing] = React.useState(false);
|
||||
const resizeStartRef = React.useRef({ y: 0, height: 0 });
|
||||
const [effectBrowserOpen, setEffectBrowserOpen] = React.useState(false);
|
||||
|
||||
// Import dialog state
|
||||
const [showImportDialog, setShowImportDialog] = React.useState(false);
|
||||
const [pendingFile, setPendingFile] = React.useState<File | null>(null);
|
||||
const [fileMetadata, setFileMetadata] = React.useState<{
|
||||
sampleRate?: number;
|
||||
channels?: number;
|
||||
}>({});
|
||||
|
||||
// Selection state
|
||||
const [isSelecting, setIsSelecting] = React.useState(false);
|
||||
const [selectionStart, setSelectionStart] = React.useState<number | null>(
|
||||
null,
|
||||
);
|
||||
const [isSelectingByDrag, setIsSelectingByDrag] = React.useState(false);
|
||||
const [dragStartPos, setDragStartPos] = React.useState<{
|
||||
x: number;
|
||||
y: number;
|
||||
} | null>(null);
|
||||
|
||||
// Touch callbacks for automation recording
|
||||
const handlePanTouchStart = React.useCallback(() => {
|
||||
if (isPlaying && onParameterTouched) {
|
||||
const panLane = track.automation.lanes.find(
|
||||
(l) => l.parameterId === "pan",
|
||||
);
|
||||
if (panLane && (panLane.mode === "touch" || panLane.mode === "latch")) {
|
||||
queueMicrotask(() => onParameterTouched(track.id, panLane.id, true));
|
||||
}
|
||||
}
|
||||
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
|
||||
|
||||
const handlePanTouchEnd = React.useCallback(() => {
|
||||
if (isPlaying && onParameterTouched) {
|
||||
const panLane = track.automation.lanes.find(
|
||||
(l) => l.parameterId === "pan",
|
||||
);
|
||||
if (panLane && (panLane.mode === "touch" || panLane.mode === "latch")) {
|
||||
queueMicrotask(() => onParameterTouched(track.id, panLane.id, false));
|
||||
}
|
||||
}
|
||||
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
|
||||
|
||||
const handleVolumeTouchStart = React.useCallback(() => {
|
||||
if (isPlaying && onParameterTouched) {
|
||||
const volumeLane = track.automation.lanes.find(
|
||||
(l) => l.parameterId === "volume",
|
||||
);
|
||||
if (
|
||||
volumeLane &&
|
||||
(volumeLane.mode === "touch" || volumeLane.mode === "latch")
|
||||
) {
|
||||
queueMicrotask(() => onParameterTouched(track.id, volumeLane.id, true));
|
||||
}
|
||||
}
|
||||
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
|
||||
|
||||
const handleVolumeTouchEnd = React.useCallback(() => {
|
||||
if (isPlaying && onParameterTouched) {
|
||||
const volumeLane = track.automation.lanes.find(
|
||||
(l) => l.parameterId === "volume",
|
||||
);
|
||||
if (
|
||||
volumeLane &&
|
||||
(volumeLane.mode === "touch" || volumeLane.mode === "latch")
|
||||
) {
|
||||
queueMicrotask(() =>
|
||||
onParameterTouched(track.id, volumeLane.id, false),
|
||||
);
|
||||
}
|
||||
}
|
||||
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
|
||||
|
||||
// Auto-create automation lane for selected parameter if it doesn't exist
|
||||
React.useEffect(() => {
|
||||
if (!track.automation?.showAutomation) return;
|
||||
|
||||
const selectedParameterId =
|
||||
track.automation.selectedParameterId || "volume";
|
||||
const laneExists = track.automation.lanes.some(
|
||||
(lane) => lane.parameterId === selectedParameterId,
|
||||
);
|
||||
|
||||
if (!laneExists) {
|
||||
// Build list of available parameters
|
||||
const availableParameters: Array<{ id: string; name: string }> = [
|
||||
{ id: "volume", name: "Volume" },
|
||||
{ id: "pan", name: "Pan" },
|
||||
];
|
||||
|
||||
track.effectChain.effects.forEach((effect) => {
|
||||
if (effect.parameters) {
|
||||
Object.keys(effect.parameters).forEach((paramKey) => {
|
||||
const parameterId = `effect.${effect.id}.${paramKey}`;
|
||||
const paramName = `${effect.name} - ${paramKey.charAt(0).toUpperCase() + paramKey.slice(1)}`;
|
||||
availableParameters.push({ id: parameterId, name: paramName });
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const paramInfo = availableParameters.find(
|
||||
(p) => p.id === selectedParameterId,
|
||||
);
|
||||
if (paramInfo) {
|
||||
// Determine value range based on parameter type
|
||||
let valueRange = { min: 0, max: 1 };
|
||||
let unit = "";
|
||||
let formatter: ((value: number) => string) | undefined;
|
||||
|
||||
if (selectedParameterId === "volume") {
|
||||
unit = "dB";
|
||||
} else if (selectedParameterId === "pan") {
|
||||
formatter = (value: number) => {
|
||||
if (value === 0.5) return "C";
|
||||
if (value < 0.5)
|
||||
return `${Math.abs((0.5 - value) * 200).toFixed(0)}L`;
|
||||
return `${((value - 0.5) * 200).toFixed(0)}R`;
|
||||
};
|
||||
} else if (selectedParameterId.startsWith("effect.")) {
|
||||
// Parse effect parameter: effect.{effectId}.{paramName}
|
||||
const parts = selectedParameterId.split(".");
|
||||
if (parts.length === 3) {
|
||||
const paramName = parts[2];
|
||||
// Set ranges based on parameter name
|
||||
if (paramName === "frequency") {
|
||||
valueRange = { min: 20, max: 20000 };
|
||||
unit = "Hz";
|
||||
} else if (paramName === "Q") {
|
||||
valueRange = { min: 0.1, max: 20 };
|
||||
} else if (paramName === "gain") {
|
||||
valueRange = { min: -40, max: 40 };
|
||||
unit = "dB";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const newLane = createAutomationLane(
|
||||
track.id,
|
||||
selectedParameterId,
|
||||
paramInfo.name,
|
||||
{
|
||||
min: valueRange.min,
|
||||
max: valueRange.max,
|
||||
unit,
|
||||
formatter,
|
||||
},
|
||||
);
|
||||
|
||||
onUpdateTrack(track.id, {
|
||||
automation: {
|
||||
...track.automation,
|
||||
lanes: [...track.automation.lanes, newLane],
|
||||
selectedParameterId,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}, [
|
||||
track.automation?.showAutomation,
|
||||
track.automation?.selectedParameterId,
|
||||
track.automation?.lanes,
|
||||
track.effectChain.effects,
|
||||
track.id,
|
||||
onUpdateTrack,
|
||||
]);
|
||||
|
||||
// Listen for theme changes
|
||||
React.useEffect(() => {
|
||||
const observer = new MutationObserver(() => {
|
||||
// Increment key to force waveform redraw
|
||||
setThemeKey((prev) => prev + 1);
|
||||
});
|
||||
|
||||
// Watch for class changes on document element (dark mode toggle)
|
||||
observer.observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ["class"],
|
||||
});
|
||||
|
||||
return () => observer.disconnect();
|
||||
}, []);
|
||||
|
||||
// Draw waveform
|
||||
React.useEffect(() => {
|
||||
if (!track.audioBuffer || !canvasRef.current || track.collapsed) return;
|
||||
if (!track.audioBuffer || !canvasRef.current) return;
|
||||
|
||||
const canvas = canvasRef.current;
|
||||
const ctx = canvas.getContext('2d');
|
||||
const ctx = canvas.getContext("2d");
|
||||
if (!ctx) return;
|
||||
|
||||
// Use parent container's size since canvas is absolute positioned
|
||||
const parent = canvas.parentElement;
|
||||
if (!parent) return;
|
||||
|
||||
const dpr = window.devicePixelRatio || 1;
|
||||
const rect = canvas.getBoundingClientRect();
|
||||
const rect = parent.getBoundingClientRect();
|
||||
|
||||
canvas.width = rect.width * dpr;
|
||||
canvas.height = rect.height * dpr;
|
||||
@@ -59,20 +320,38 @@ export function Track({
|
||||
const width = rect.width;
|
||||
const height = rect.height;
|
||||
|
||||
// Clear canvas
|
||||
ctx.fillStyle = 'rgb(15, 23, 42)';
|
||||
// Clear canvas with theme color
|
||||
const bgColor =
|
||||
getComputedStyle(canvas).getPropertyValue("--color-waveform-bg") ||
|
||||
"rgb(15, 23, 42)";
|
||||
ctx.fillStyle = bgColor;
|
||||
ctx.fillRect(0, 0, width, height);
|
||||
|
||||
const buffer = track.audioBuffer;
|
||||
const channelData = buffer.getChannelData(0);
|
||||
const samplesPerPixel = Math.floor(buffer.length / (width * zoom));
|
||||
// Calculate samples per pixel based on the total width
|
||||
// Must match the timeline calculation exactly
|
||||
const PIXELS_PER_SECOND_BASE = 5;
|
||||
let totalWidth;
|
||||
if (zoom >= 1) {
|
||||
const calculatedWidth = duration * zoom * PIXELS_PER_SECOND_BASE;
|
||||
totalWidth = Math.max(calculatedWidth, width);
|
||||
} else {
|
||||
totalWidth = width;
|
||||
}
|
||||
|
||||
// Calculate how much of the canvas width this track's duration occupies
|
||||
// If duration is 0 or invalid, use full width (first track scenario)
|
||||
const trackDurationRatio = duration > 0 ? buffer.duration / duration : 1;
|
||||
const trackWidth = Math.min(width * trackDurationRatio, width);
|
||||
const samplesPerPixel = trackWidth > 0 ? buffer.length / trackWidth : 0;
|
||||
|
||||
// Draw waveform
|
||||
ctx.fillStyle = track.color;
|
||||
ctx.strokeStyle = track.color;
|
||||
ctx.lineWidth = 1;
|
||||
|
||||
for (let x = 0; x < width; x++) {
|
||||
for (let x = 0; x < Math.floor(trackWidth); x++) {
|
||||
const startSample = Math.floor(x * samplesPerPixel);
|
||||
const endSample = Math.floor((x + 1) * samplesPerPixel);
|
||||
|
||||
@@ -95,89 +374,509 @@ export function Track({
|
||||
}
|
||||
|
||||
// Draw center line
|
||||
ctx.strokeStyle = 'rgba(148, 163, 184, 0.2)';
|
||||
ctx.strokeStyle = "rgba(148, 163, 184, 0.2)";
|
||||
ctx.lineWidth = 1;
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(0, height / 2);
|
||||
ctx.lineTo(width, height / 2);
|
||||
ctx.stroke();
|
||||
|
||||
// Draw selection overlay
|
||||
if (track.selection && duration > 0) {
|
||||
const selStartX = (track.selection.start / duration) * width;
|
||||
const selEndX = (track.selection.end / duration) * width;
|
||||
|
||||
// Draw selection background
|
||||
ctx.fillStyle = "rgba(59, 130, 246, 0.2)";
|
||||
ctx.fillRect(selStartX, 0, selEndX - selStartX, height);
|
||||
|
||||
// Draw selection borders
|
||||
ctx.strokeStyle = "rgba(59, 130, 246, 0.8)";
|
||||
ctx.lineWidth = 2;
|
||||
|
||||
// Start border
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(selStartX, 0);
|
||||
ctx.lineTo(selStartX, height);
|
||||
ctx.stroke();
|
||||
|
||||
// End border
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(selEndX, 0);
|
||||
ctx.lineTo(selEndX, height);
|
||||
ctx.stroke();
|
||||
}
|
||||
|
||||
// Draw playhead
|
||||
if (duration > 0) {
|
||||
const playheadX = (currentTime / duration) * width;
|
||||
ctx.strokeStyle = 'rgba(59, 130, 246, 0.8)';
|
||||
ctx.strokeStyle = "rgba(239, 68, 68, 0.8)";
|
||||
ctx.lineWidth = 2;
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(playheadX, 0);
|
||||
ctx.lineTo(playheadX, height);
|
||||
ctx.stroke();
|
||||
}
|
||||
}, [track.audioBuffer, track.color, track.collapsed, zoom, currentTime, duration]);
|
||||
}, [
|
||||
track.audioBuffer,
|
||||
track.color,
|
||||
track.collapsed,
|
||||
track.height,
|
||||
zoom,
|
||||
currentTime,
|
||||
duration,
|
||||
themeKey,
|
||||
track.selection,
|
||||
]);
|
||||
|
||||
const handleCanvasClick = (e: React.MouseEvent<HTMLCanvasElement>) => {
|
||||
if (!onSeek || !duration) return;
|
||||
const handleCanvasMouseDown = (e: React.MouseEvent<HTMLCanvasElement>) => {
|
||||
if (!duration) return;
|
||||
|
||||
const rect = e.currentTarget.getBoundingClientRect();
|
||||
const x = e.clientX - rect.left;
|
||||
const y = e.clientY - rect.top;
|
||||
const clickTime = (x / rect.width) * duration;
|
||||
|
||||
// Store drag start position
|
||||
setDragStartPos({ x: e.clientX, y: e.clientY });
|
||||
setIsSelectingByDrag(false);
|
||||
|
||||
// Start selection immediately (will be used if user drags)
|
||||
setIsSelecting(true);
|
||||
setSelectionStart(clickTime);
|
||||
};
|
||||
|
||||
const handleCanvasMouseMove = (e: React.MouseEvent<HTMLCanvasElement>) => {
|
||||
if (!isSelecting || selectionStart === null || !duration || !dragStartPos)
|
||||
return;
|
||||
|
||||
const rect = e.currentTarget.getBoundingClientRect();
|
||||
const x = e.clientX - rect.left;
|
||||
const currentTime = (x / rect.width) * duration;
|
||||
|
||||
// Check if user has moved enough to be considered dragging (threshold: 3 pixels)
|
||||
const dragDistance = Math.sqrt(
|
||||
Math.pow(e.clientX - dragStartPos.x, 2) +
|
||||
Math.pow(e.clientY - dragStartPos.y, 2),
|
||||
);
|
||||
|
||||
if (dragDistance > 3) {
|
||||
setIsSelectingByDrag(true);
|
||||
}
|
||||
|
||||
// If dragging, update selection
|
||||
if (isSelectingByDrag || dragDistance > 3) {
|
||||
// Clamp to valid time range
|
||||
const clampedTime = Math.max(0, Math.min(duration, currentTime));
|
||||
|
||||
// Update selection (ensure start < end)
|
||||
const start = Math.min(selectionStart, clampedTime);
|
||||
const end = Math.max(selectionStart, clampedTime);
|
||||
|
||||
onSelectionChange?.({ start, end });
|
||||
}
|
||||
};
|
||||
|
||||
const handleCanvasMouseUp = (e: React.MouseEvent<HTMLCanvasElement>) => {
|
||||
if (!duration) return;
|
||||
|
||||
const rect = e.currentTarget.getBoundingClientRect();
|
||||
const x = e.clientX - rect.left;
|
||||
const clickTime = (x / rect.width) * duration;
|
||||
onSeek(clickTime);
|
||||
|
||||
// Check if user actually dragged (check distance directly, not state)
|
||||
const didDrag = dragStartPos
|
||||
? Math.sqrt(
|
||||
Math.pow(e.clientX - dragStartPos.x, 2) +
|
||||
Math.pow(e.clientY - dragStartPos.y, 2),
|
||||
) > 3
|
||||
: false;
|
||||
|
||||
// If user didn't drag (just clicked), clear selection and seek
|
||||
if (!didDrag) {
|
||||
onSelectionChange?.(null);
|
||||
if (onSeek) {
|
||||
onSeek(clickTime);
|
||||
}
|
||||
}
|
||||
|
||||
// Reset drag state
|
||||
setIsSelecting(false);
|
||||
setIsSelectingByDrag(false);
|
||||
setDragStartPos(null);
|
||||
};
|
||||
|
||||
if (track.collapsed) {
|
||||
// Handle mouse leaving canvas during selection
|
||||
React.useEffect(() => {
|
||||
const handleGlobalMouseUp = () => {
|
||||
if (isSelecting) {
|
||||
setIsSelecting(false);
|
||||
setIsSelectingByDrag(false);
|
||||
setDragStartPos(null);
|
||||
}
|
||||
};
|
||||
|
||||
window.addEventListener("mouseup", handleGlobalMouseUp);
|
||||
return () => window.removeEventListener("mouseup", handleGlobalMouseUp);
|
||||
}, [isSelecting]);
|
||||
|
||||
const handleFileChange = async (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const file = e.target.files?.[0];
|
||||
if (!file || !onLoadAudio) return;
|
||||
|
||||
try {
|
||||
// Decode to get basic metadata before showing dialog
|
||||
const arrayBuffer = await file.arrayBuffer();
|
||||
const audioContext = new AudioContext();
|
||||
const tempBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
||||
|
||||
// Set metadata and show import dialog
|
||||
setFileMetadata({
|
||||
sampleRate: tempBuffer.sampleRate,
|
||||
channels: tempBuffer.numberOfChannels,
|
||||
});
|
||||
setPendingFile(file);
|
||||
setShowImportDialog(true);
|
||||
} catch (error) {
|
||||
console.error("Failed to read audio file metadata:", error);
|
||||
}
|
||||
|
||||
// Reset input
|
||||
e.target.value = "";
|
||||
};
|
||||
|
||||
const handleImport = async (options: ImportOptions) => {
|
||||
if (!pendingFile || !onLoadAudio) return;
|
||||
|
||||
try {
|
||||
setShowImportDialog(false);
|
||||
const { buffer, metadata } = await importAudioFile(pendingFile, options);
|
||||
onLoadAudio(buffer);
|
||||
|
||||
// Update track name to filename if it's still default
|
||||
if (track.name === "New Track" || track.name === "Untitled Track") {
|
||||
const fileName = metadata.fileName.replace(/\.[^/.]+$/, "");
|
||||
onNameChange(fileName);
|
||||
}
|
||||
|
||||
console.log("Audio imported:", metadata);
|
||||
} catch (error) {
|
||||
console.error("Failed to import audio file:", error);
|
||||
} finally {
|
||||
setPendingFile(null);
|
||||
setFileMetadata({});
|
||||
}
|
||||
};
|
||||
|
||||
const handleImportCancel = () => {
|
||||
setShowImportDialog(false);
|
||||
setPendingFile(null);
|
||||
setFileMetadata({});
|
||||
};
|
||||
|
||||
const handleLoadAudioClick = () => {
|
||||
fileInputRef.current?.click();
|
||||
};
|
||||
|
||||
const [isDragging, setIsDragging] = React.useState(false);
|
||||
|
||||
const handleDragOver = (e: React.DragEvent) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
setIsDragging(true);
|
||||
};
|
||||
|
||||
const handleDragLeave = (e: React.DragEvent) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
setIsDragging(false);
|
||||
};
|
||||
|
||||
const handleDrop = async (e: React.DragEvent) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
setIsDragging(false);
|
||||
|
||||
const file = e.dataTransfer.files?.[0];
|
||||
if (!file || !onLoadAudio) return;
|
||||
|
||||
// Check if it's an audio file
|
||||
if (!file.type.startsWith("audio/")) {
|
||||
console.warn("Dropped file is not an audio file");
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const arrayBuffer = await file.arrayBuffer();
|
||||
const audioContext = new AudioContext();
|
||||
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
||||
onLoadAudio(audioBuffer);
|
||||
|
||||
// Update track name to filename if it's still default
|
||||
if (track.name === "New Track" || track.name === "Untitled Track") {
|
||||
const fileName = file.name.replace(/\.[^/.]+$/, "");
|
||||
onNameChange(fileName);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to load audio file:", error);
|
||||
}
|
||||
};
|
||||
|
||||
const trackHeight = track.collapsed
|
||||
? COLLAPSED_TRACK_HEIGHT
|
||||
: Math.max(track.height || DEFAULT_TRACK_HEIGHT, MIN_TRACK_HEIGHT);
|
||||
|
||||
// Track height resize handlers
|
||||
const handleResizeStart = React.useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
if (track.collapsed) return;
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
setIsResizing(true);
|
||||
resizeStartRef.current = { y: e.clientY, height: track.height };
|
||||
},
|
||||
[track.collapsed, track.height],
|
||||
);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (!isResizing) return;
|
||||
|
||||
const handleMouseMove = (e: MouseEvent) => {
|
||||
const delta = e.clientY - resizeStartRef.current.y;
|
||||
const newHeight = Math.max(
|
||||
MIN_TRACK_HEIGHT,
|
||||
Math.min(MAX_TRACK_HEIGHT, resizeStartRef.current.height + delta),
|
||||
);
|
||||
onUpdateTrack(track.id, { height: newHeight });
|
||||
};
|
||||
|
||||
const handleMouseUp = () => {
|
||||
setIsResizing(false);
|
||||
};
|
||||
|
||||
window.addEventListener("mousemove", handleMouseMove);
|
||||
window.addEventListener("mouseup", handleMouseUp);
|
||||
|
||||
return () => {
|
||||
window.removeEventListener("mousemove", handleMouseMove);
|
||||
window.removeEventListener("mouseup", handleMouseUp);
|
||||
};
|
||||
}, [isResizing, onUpdateTrack, track.id]);
|
||||
|
||||
// Render only controls
|
||||
if (renderControlsOnly) {
|
||||
return (
|
||||
<>
|
||||
<div
|
||||
className={cn(
|
||||
"w-full flex-shrink-0 border-b border-r-4 p-4 flex flex-col gap-4 min-h-0 transition-all duration-200 cursor-pointer border-border",
|
||||
isSelected
|
||||
? "bg-primary/10 border-r-primary"
|
||||
: "bg-card border-r-transparent hover:bg-accent/30",
|
||||
)}
|
||||
style={{
|
||||
height: `${trackHeight}px`,
|
||||
}}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
if (onSelect) onSelect();
|
||||
}}
|
||||
>
|
||||
{/* Collapsed Header */}
|
||||
{track.collapsed && (
|
||||
<div
|
||||
className={cn(
|
||||
"group flex items-center gap-1.5 px-2 py-1 h-full w-full cursor-pointer transition-colors",
|
||||
isSelected ? "bg-primary/10" : "hover:bg-accent/50",
|
||||
)}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onToggleCollapse();
|
||||
}}
|
||||
title="Expand track"
|
||||
>
|
||||
<ChevronRight className="h-3 w-3 text-muted-foreground flex-shrink-0" />
|
||||
<div
|
||||
className="h-4 w-0.5 rounded-full flex-shrink-0"
|
||||
style={{ backgroundColor: track.color }}
|
||||
/>
|
||||
<span className="text-xs font-semibold text-foreground truncate flex-1">
|
||||
{String(track.name || "Untitled Track")}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Track Controls - Only show when not collapsed */}
|
||||
{!track.collapsed && (
|
||||
<div className="flex-1 flex flex-col items-center justify-center min-h-0 overflow-hidden">
|
||||
{/* Integrated Track Controls (Pan + Fader + Buttons) */}
|
||||
<TrackControls
|
||||
trackName={track.name}
|
||||
trackColor={track.color}
|
||||
collapsed={track.collapsed}
|
||||
volume={track.volume}
|
||||
pan={track.pan}
|
||||
peakLevel={
|
||||
track.recordEnabled || isRecording
|
||||
? recordingLevel
|
||||
: playbackLevel
|
||||
}
|
||||
rmsLevel={
|
||||
track.recordEnabled || isRecording
|
||||
? recordingLevel * 0.7
|
||||
: playbackLevel * 0.7
|
||||
}
|
||||
isMuted={track.mute}
|
||||
isSolo={track.solo}
|
||||
isRecordEnabled={track.recordEnabled}
|
||||
showAutomation={track.automation?.showAutomation}
|
||||
showEffects={track.showEffects}
|
||||
isRecording={isRecording}
|
||||
onNameChange={onNameChange}
|
||||
onToggleCollapse={onToggleCollapse}
|
||||
onVolumeChange={onVolumeChange}
|
||||
onPanChange={onPanChange}
|
||||
onMuteToggle={onToggleMute}
|
||||
onSoloToggle={onToggleSolo}
|
||||
onRecordToggle={onToggleRecordEnable}
|
||||
onAutomationToggle={() => {
|
||||
onUpdateTrack(track.id, {
|
||||
automation: {
|
||||
...track.automation,
|
||||
showAutomation: !track.automation?.showAutomation,
|
||||
},
|
||||
});
|
||||
}}
|
||||
onEffectsClick={() => {
|
||||
onUpdateTrack(track.id, {
|
||||
showEffects: !track.showEffects,
|
||||
});
|
||||
}}
|
||||
onVolumeTouchStart={handleVolumeTouchStart}
|
||||
onVolumeTouchEnd={handleVolumeTouchEnd}
|
||||
onPanTouchStart={handlePanTouchStart}
|
||||
onPanTouchEnd={handlePanTouchEnd}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Import Dialog - Only render in controls mode to avoid duplicates */}
|
||||
<ImportDialog
|
||||
open={showImportDialog}
|
||||
onClose={handleImportCancel}
|
||||
onImport={handleImport}
|
||||
fileName={pendingFile?.name}
|
||||
sampleRate={fileMetadata.sampleRate}
|
||||
channels={fileMetadata.channels}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
// Render only waveform
|
||||
if (renderWaveformOnly) {
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'border-b border-border cursor-pointer',
|
||||
isSelected && 'ring-2 ring-primary ring-inset'
|
||||
"relative bg-waveform-bg border-b transition-all duration-200 h-full",
|
||||
isSelected && "bg-primary/5",
|
||||
)}
|
||||
onClick={onSelect}
|
||||
>
|
||||
<TrackHeader
|
||||
track={track}
|
||||
onToggleMute={onToggleMute}
|
||||
onToggleSolo={onToggleSolo}
|
||||
onToggleCollapse={onToggleCollapse}
|
||||
onVolumeChange={onVolumeChange}
|
||||
onPanChange={onPanChange}
|
||||
onRemove={onRemove}
|
||||
onNameChange={onNameChange}
|
||||
{/* Inner container with dynamic width */}
|
||||
<div
|
||||
className="relative h-full"
|
||||
style={{
|
||||
minWidth:
|
||||
track.audioBuffer && zoom >= 1
|
||||
? `${duration * zoom * 5}px`
|
||||
: "100%",
|
||||
}}
|
||||
>
|
||||
{/* Delete Button - Top Right Overlay - Stays fixed when scrolling */}
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onRemove();
|
||||
}}
|
||||
className={cn(
|
||||
"sticky top-2 right-2 float-right z-20 h-6 w-6 rounded flex items-center justify-center transition-all",
|
||||
"bg-card/80 hover:bg-destructive/90 text-muted-foreground hover:text-white",
|
||||
"border border-border/50 hover:border-destructive",
|
||||
"backdrop-blur-sm shadow-sm hover:shadow-md",
|
||||
)}
|
||||
title="Remove track"
|
||||
>
|
||||
<Trash2 className="h-3 w-3" />
|
||||
</button>
|
||||
|
||||
{track.audioBuffer ? (
|
||||
<>
|
||||
{/* Waveform Canvas */}
|
||||
<canvas
|
||||
ref={canvasRef}
|
||||
className="absolute inset-0 w-full h-full cursor-pointer"
|
||||
onMouseDown={handleCanvasMouseDown}
|
||||
onMouseMove={handleCanvasMouseMove}
|
||||
onMouseUp={handleCanvasMouseUp}
|
||||
/>
|
||||
</>
|
||||
) : (
|
||||
!track.collapsed && (
|
||||
<>
|
||||
{/* Empty state - clickable area for upload with drag & drop */}
|
||||
<div
|
||||
className={cn(
|
||||
"absolute inset-0 w-full h-full transition-colors cursor-pointer",
|
||||
isDragging
|
||||
? "bg-primary/20 border-2 border-primary border-dashed"
|
||||
: "hover:bg-accent/50",
|
||||
)}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handleLoadAudioClick();
|
||||
}}
|
||||
onDragOver={handleDragOver}
|
||||
onDragLeave={handleDragLeave}
|
||||
onDrop={handleDrop}
|
||||
/>
|
||||
<input
|
||||
ref={fileInputRef}
|
||||
type="file"
|
||||
accept="audio/*"
|
||||
onChange={handleFileChange}
|
||||
className="hidden"
|
||||
/>
|
||||
</>
|
||||
)
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Import Dialog - Also needed in waveform-only mode */}
|
||||
<ImportDialog
|
||||
open={showImportDialog}
|
||||
onClose={handleImportCancel}
|
||||
onImport={handleImport}
|
||||
fileName={pendingFile?.name}
|
||||
sampleRate={fileMetadata.sampleRate}
|
||||
channels={fileMetadata.channels}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Render full track (both controls and waveform side by side)
|
||||
// This mode is no longer used - tracks are rendered separately with renderControlsOnly and renderWaveformOnly
|
||||
return (
|
||||
<div
|
||||
ref={containerRef}
|
||||
className={cn(
|
||||
'border-b border-border cursor-pointer',
|
||||
isSelected && 'ring-2 ring-primary ring-inset'
|
||||
"flex flex-col transition-all duration-200 relative",
|
||||
isSelected && "bg-primary/5",
|
||||
)}
|
||||
onClick={onSelect}
|
||||
>
|
||||
<TrackHeader
|
||||
track={track}
|
||||
onToggleMute={onToggleMute}
|
||||
onToggleSolo={onToggleSolo}
|
||||
onToggleCollapse={onToggleCollapse}
|
||||
onVolumeChange={onVolumeChange}
|
||||
onPanChange={onPanChange}
|
||||
onRemove={onRemove}
|
||||
onNameChange={onNameChange}
|
||||
/>
|
||||
<div className="relative" style={{ height: track.height }}>
|
||||
{track.audioBuffer ? (
|
||||
<canvas
|
||||
ref={canvasRef}
|
||||
className="w-full h-full cursor-pointer"
|
||||
onClick={handleCanvasClick}
|
||||
/>
|
||||
) : (
|
||||
<div className="flex items-center justify-center h-full text-sm text-muted-foreground">
|
||||
No audio loaded
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{/* Full track content removed - now rendered separately in TrackList */}
|
||||
<div>Track component should not be rendered in full mode anymore</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
457
components/tracks/TrackControls.tsx
Normal file
457
components/tracks/TrackControls.tsx
Normal file
@@ -0,0 +1,457 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { Circle, Headphones, MoreHorizontal, ChevronRight, ChevronDown, ChevronUp } from 'lucide-react';
|
||||
import { CircularKnob } from '@/components/ui/CircularKnob';
|
||||
import { TrackFader } from './TrackFader';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
|
||||
export interface TrackControlsProps {
|
||||
trackName: string;
|
||||
trackColor: string;
|
||||
collapsed: boolean;
|
||||
volume: number;
|
||||
pan: number;
|
||||
peakLevel: number;
|
||||
rmsLevel: number;
|
||||
isMuted?: boolean;
|
||||
isSolo?: boolean;
|
||||
isRecordEnabled?: boolean;
|
||||
showAutomation?: boolean;
|
||||
showEffects?: boolean;
|
||||
isRecording?: boolean;
|
||||
mobileCollapsed?: boolean; // For mobile view collapsible controls
|
||||
onNameChange: (name: string) => void;
|
||||
onToggleCollapse: () => void;
|
||||
onVolumeChange: (volume: number) => void;
|
||||
onPanChange: (pan: number) => void;
|
||||
onMuteToggle: () => void;
|
||||
onSoloToggle?: () => void;
|
||||
onRecordToggle?: () => void;
|
||||
onAutomationToggle?: () => void;
|
||||
onEffectsClick?: () => void;
|
||||
onVolumeTouchStart?: () => void;
|
||||
onVolumeTouchEnd?: () => void;
|
||||
onPanTouchStart?: () => void;
|
||||
onPanTouchEnd?: () => void;
|
||||
onToggleMobileCollapse?: () => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function TrackControls({
|
||||
trackName,
|
||||
trackColor,
|
||||
collapsed,
|
||||
volume,
|
||||
pan,
|
||||
peakLevel,
|
||||
rmsLevel,
|
||||
isMuted = false,
|
||||
isSolo = false,
|
||||
isRecordEnabled = false,
|
||||
showAutomation = false,
|
||||
showEffects = false,
|
||||
isRecording = false,
|
||||
mobileCollapsed = false,
|
||||
onNameChange,
|
||||
onToggleCollapse,
|
||||
onVolumeChange,
|
||||
onPanChange,
|
||||
onMuteToggle,
|
||||
onSoloToggle,
|
||||
onRecordToggle,
|
||||
onAutomationToggle,
|
||||
onEffectsClick,
|
||||
onVolumeTouchStart,
|
||||
onVolumeTouchEnd,
|
||||
onPanTouchStart,
|
||||
onPanTouchEnd,
|
||||
onToggleMobileCollapse,
|
||||
className,
|
||||
}: TrackControlsProps) {
|
||||
const [isEditingName, setIsEditingName] = React.useState(false);
|
||||
const [editName, setEditName] = React.useState(trackName);
|
||||
|
||||
const handleNameClick = () => {
|
||||
setIsEditingName(true);
|
||||
setEditName(trackName);
|
||||
};
|
||||
|
||||
const handleNameBlur = () => {
|
||||
setIsEditingName(false);
|
||||
if (editName.trim() && editName !== trackName) {
|
||||
onNameChange(editName.trim());
|
||||
} else {
|
||||
setEditName(trackName);
|
||||
}
|
||||
};
|
||||
|
||||
const handleNameKeyDown = (e: React.KeyboardEvent) => {
|
||||
if (e.key === 'Enter') {
|
||||
handleNameBlur();
|
||||
} else if (e.key === 'Escape') {
|
||||
setIsEditingName(false);
|
||||
setEditName(trackName);
|
||||
}
|
||||
};
|
||||
|
||||
// Mobile collapsed view - minimal controls (like master controls)
|
||||
if (mobileCollapsed) {
|
||||
return (
|
||||
<div className={cn(
|
||||
'flex flex-col items-center gap-2 px-3 py-2 bg-card/50 border border-accent/50 rounded-lg w-full sm:hidden',
|
||||
className
|
||||
)}>
|
||||
<div className="flex items-center justify-between w-full">
|
||||
<div className="flex items-center gap-1 flex-1">
|
||||
<button
|
||||
onClick={onToggleCollapse}
|
||||
className="p-0.5 hover:bg-accent/20 rounded transition-colors flex-shrink-0"
|
||||
title={collapsed ? 'Expand track' : 'Collapse track'}
|
||||
>
|
||||
{collapsed ? (
|
||||
<ChevronRight className="h-3 w-3 text-muted-foreground" />
|
||||
) : (
|
||||
<ChevronDown className="h-3 w-3 text-muted-foreground" />
|
||||
)}
|
||||
</button>
|
||||
<div
|
||||
className="text-xs font-bold uppercase tracking-wider"
|
||||
style={{ color: trackColor }}
|
||||
>
|
||||
{trackName}
|
||||
</div>
|
||||
</div>
|
||||
{onToggleMobileCollapse && (
|
||||
<button
|
||||
onClick={onToggleMobileCollapse}
|
||||
className="p-1 hover:bg-accent/20 rounded transition-colors"
|
||||
title="Expand track controls"
|
||||
>
|
||||
<ChevronDown className="h-3 w-3 text-muted-foreground" />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex items-center gap-1 w-full justify-center">
|
||||
{onRecordToggle && (
|
||||
<button
|
||||
onClick={onRecordToggle}
|
||||
className={cn(
|
||||
'h-7 w-7 rounded-full flex items-center justify-center transition-all',
|
||||
isRecordEnabled
|
||||
? isRecording
|
||||
? 'bg-red-500 shadow-lg shadow-red-500/50 animate-pulse'
|
||||
: 'bg-red-500 shadow-md shadow-red-500/30'
|
||||
: 'bg-card hover:bg-accent border border-border/50'
|
||||
)}
|
||||
title={isRecordEnabled ? 'Record Armed' : 'Arm for Recording'}
|
||||
>
|
||||
<Circle className={cn('h-3.5 w-3.5', isRecordEnabled ? 'fill-white text-white' : 'text-muted-foreground')} />
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
onClick={onMuteToggle}
|
||||
className={cn(
|
||||
'h-7 w-7 rounded-md flex items-center justify-center transition-all text-xs font-bold',
|
||||
isMuted
|
||||
? 'bg-blue-500 text-white shadow-md shadow-blue-500/30'
|
||||
: 'bg-card hover:bg-accent text-muted-foreground border border-border/50'
|
||||
)}
|
||||
title={isMuted ? 'Unmute' : 'Mute'}
|
||||
>
|
||||
M
|
||||
</button>
|
||||
{onSoloToggle && (
|
||||
<button
|
||||
onClick={onSoloToggle}
|
||||
className={cn(
|
||||
'h-7 w-7 rounded-md flex items-center justify-center transition-all text-xs font-bold',
|
||||
isSolo
|
||||
? 'bg-yellow-500 text-white shadow-md shadow-yellow-500/30'
|
||||
: 'bg-card hover:bg-accent text-muted-foreground border border-border/50'
|
||||
)}
|
||||
title={isSolo ? 'Unsolo' : 'Solo'}
|
||||
>
|
||||
S
|
||||
</button>
|
||||
)}
|
||||
<div className="flex-1 h-2 bg-muted rounded-full overflow-hidden">
|
||||
<div
|
||||
className={cn(
|
||||
'h-full transition-all',
|
||||
peakLevel > 0.95 ? 'bg-red-500' : peakLevel > 0.8 ? 'bg-yellow-500' : 'bg-green-500'
|
||||
)}
|
||||
style={{ width: `${peakLevel * 100}%` }}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Mobile expanded view - full controls (like master controls)
|
||||
const mobileExpandedView = (
|
||||
<div className={cn(
|
||||
'flex flex-col items-center gap-3 px-3 py-3 bg-card/50 border border-accent/50 rounded-lg w-full sm:hidden',
|
||||
className
|
||||
)}>
|
||||
{/* Header with collapse button */}
|
||||
<div className="flex items-center justify-between w-full">
|
||||
<button
|
||||
onClick={onToggleCollapse}
|
||||
className="p-0.5 hover:bg-accent/20 rounded transition-colors flex-shrink-0"
|
||||
title={collapsed ? 'Expand track' : 'Collapse track'}
|
||||
>
|
||||
{collapsed ? (
|
||||
<ChevronRight className="h-3 w-3 text-muted-foreground" />
|
||||
) : (
|
||||
<ChevronDown className="h-3 w-3 text-muted-foreground" />
|
||||
)}
|
||||
</button>
|
||||
<div
|
||||
className="text-xs font-bold uppercase tracking-wider flex-1 text-center"
|
||||
style={{ color: trackColor }}
|
||||
>
|
||||
{trackName}
|
||||
</div>
|
||||
{onToggleMobileCollapse && (
|
||||
<button
|
||||
onClick={onToggleMobileCollapse}
|
||||
className="p-0.5 hover:bg-accent/20 rounded transition-colors flex-shrink-0"
|
||||
title="Collapse track controls"
|
||||
>
|
||||
<ChevronUp className="h-3 w-3 text-muted-foreground" />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Pan Control */}
|
||||
<CircularKnob
|
||||
value={pan}
|
||||
onChange={onPanChange}
|
||||
onTouchStart={onPanTouchStart}
|
||||
onTouchEnd={onPanTouchEnd}
|
||||
min={-1}
|
||||
max={1}
|
||||
step={0.01}
|
||||
label="PAN"
|
||||
size={48}
|
||||
formatValue={(value: number) => {
|
||||
if (Math.abs(value) < 0.01) return 'C';
|
||||
if (value < 0) return `${Math.abs(value * 100).toFixed(0)}L`;
|
||||
return `${(value * 100).toFixed(0)}R`;
|
||||
}}
|
||||
/>
|
||||
|
||||
{/* Volume Fader - Full height, not compressed */}
|
||||
<div className="flex-1 flex justify-center items-center w-full min-h-[160px]">
|
||||
<TrackFader
|
||||
value={volume}
|
||||
peakLevel={peakLevel}
|
||||
rmsLevel={rmsLevel}
|
||||
onChange={onVolumeChange}
|
||||
onTouchStart={onVolumeTouchStart}
|
||||
onTouchEnd={onVolumeTouchEnd}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Control buttons */}
|
||||
<div className="flex items-center gap-1 w-full justify-center">
|
||||
{onRecordToggle && (
|
||||
<button
|
||||
onClick={onRecordToggle}
|
||||
className={cn(
|
||||
'h-8 w-8 rounded-full flex items-center justify-center transition-all',
|
||||
isRecordEnabled
|
||||
? isRecording
|
||||
? 'bg-red-500 shadow-lg shadow-red-500/50 animate-pulse'
|
||||
: 'bg-red-500 shadow-md shadow-red-500/30'
|
||||
: 'bg-card hover:bg-accent border border-border/50'
|
||||
)}
|
||||
title={isRecordEnabled ? 'Record Armed' : 'Arm for Recording'}
|
||||
>
|
||||
<Circle className={cn('h-3.5 w-3.5', isRecordEnabled ? 'fill-white text-white' : 'text-muted-foreground')} />
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
onClick={onMuteToggle}
|
||||
className={cn(
|
||||
'h-8 w-8 rounded-md flex items-center justify-center transition-all text-xs font-bold',
|
||||
isMuted
|
||||
? 'bg-blue-500 text-white shadow-md shadow-blue-500/30'
|
||||
: 'bg-card hover:bg-accent text-muted-foreground border border-border/50'
|
||||
)}
|
||||
title={isMuted ? 'Unmute' : 'Mute'}
|
||||
>
|
||||
M
|
||||
</button>
|
||||
{onSoloToggle && (
|
||||
<button
|
||||
onClick={onSoloToggle}
|
||||
className={cn(
|
||||
'h-8 w-8 rounded-md flex items-center justify-center transition-all text-xs font-bold',
|
||||
isSolo
|
||||
? 'bg-yellow-500 text-white shadow-md shadow-yellow-500/30'
|
||||
: 'bg-card hover:bg-accent text-muted-foreground border border-border/50'
|
||||
)}
|
||||
title={isSolo ? 'Unsolo' : 'Solo'}
|
||||
>
|
||||
S
|
||||
</button>
|
||||
)}
|
||||
{onEffectsClick && (
|
||||
<button
|
||||
onClick={onEffectsClick}
|
||||
className={cn(
|
||||
'h-8 w-8 rounded-md flex items-center justify-center transition-all text-xs font-bold',
|
||||
showEffects
|
||||
? 'bg-purple-500 text-white shadow-md shadow-purple-500/30'
|
||||
: 'bg-card hover:bg-accent text-muted-foreground border border-border/50'
|
||||
)}
|
||||
title="Effects"
|
||||
>
|
||||
FX
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
{/* Mobile view - Show expanded or collapsed */}
|
||||
{!mobileCollapsed && mobileExpandedView}
|
||||
|
||||
{/* Desktop/tablet view - hidden on mobile */}
|
||||
<div className={cn(
|
||||
'flex flex-col items-center gap-3 px-4 py-3 bg-card/50 border border-accent/50 rounded-lg hidden sm:flex',
|
||||
className
|
||||
)}>
|
||||
{/* Track Name Header with Collapse Chevron */}
|
||||
<div className="flex items-center gap-1 w-full">
|
||||
<button
|
||||
onClick={onToggleCollapse}
|
||||
className="p-0.5 hover:bg-accent/20 rounded transition-colors flex-shrink-0"
|
||||
title={collapsed ? 'Expand track' : 'Collapse track'}
|
||||
>
|
||||
{collapsed ? (
|
||||
<ChevronRight className="h-3 w-3 text-muted-foreground" />
|
||||
) : (
|
||||
<ChevronDown className="h-3 w-3 text-muted-foreground" />
|
||||
)}
|
||||
</button>
|
||||
<div className="flex-1 flex items-center justify-center min-w-0">
|
||||
{isEditingName ? (
|
||||
<input
|
||||
type="text"
|
||||
value={editName}
|
||||
onChange={(e) => setEditName(e.target.value)}
|
||||
onBlur={handleNameBlur}
|
||||
onKeyDown={handleNameKeyDown}
|
||||
autoFocus
|
||||
className="w-24 text-[10px] font-bold uppercase tracking-wider text-center bg-transparent border-b focus:outline-none px-1"
|
||||
style={{ color: trackColor, borderColor: trackColor }}
|
||||
/>
|
||||
) : (
|
||||
<div
|
||||
onClick={handleNameClick}
|
||||
className="w-24 text-[10px] font-bold uppercase tracking-wider text-center cursor-text hover:bg-accent/10 px-1 rounded transition-colors truncate"
|
||||
style={{ color: trackColor }}
|
||||
title="Click to edit track name"
|
||||
>
|
||||
{trackName}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{/* Spacer to balance the chevron and center the label */}
|
||||
<div className="p-0.5 flex-shrink-0 w-4" />
|
||||
</div>
|
||||
|
||||
{/* Pan Control - Top */}
|
||||
<div className="flex justify-center w-full">
|
||||
<CircularKnob
|
||||
value={pan}
|
||||
onChange={onPanChange}
|
||||
onTouchStart={onPanTouchStart}
|
||||
onTouchEnd={onPanTouchEnd}
|
||||
min={-1}
|
||||
max={1}
|
||||
step={0.01}
|
||||
label="PAN"
|
||||
size={48}
|
||||
formatValue={(value: number) => {
|
||||
if (Math.abs(value) < 0.01) return 'C';
|
||||
if (value < 0) return `${Math.abs(value * 100).toFixed(0)}L`;
|
||||
return `${(value * 100).toFixed(0)}R`;
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Track Fader - Center (vertically centered in remaining space) */}
|
||||
<div className="flex justify-center items-center flex-1 w-full">
|
||||
<TrackFader
|
||||
value={volume}
|
||||
peakLevel={peakLevel}
|
||||
rmsLevel={rmsLevel}
|
||||
onChange={onVolumeChange}
|
||||
onTouchStart={onVolumeTouchStart}
|
||||
onTouchEnd={onVolumeTouchEnd}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Control Buttons - Bottom */}
|
||||
<div className="flex flex-col gap-1 w-full">
|
||||
{/* Control Buttons Row 1: R/M/S */}
|
||||
<div className="flex items-center gap-1 w-full justify-center">
|
||||
{/* Record Arm */}
|
||||
{onRecordToggle && (
|
||||
<button
|
||||
onClick={onRecordToggle}
|
||||
className={cn(
|
||||
'h-8 w-8 rounded-md flex items-center justify-center transition-all text-[11px] font-bold',
|
||||
isRecordEnabled
|
||||
? 'bg-red-500 text-white shadow-md shadow-red-500/30'
|
||||
: 'bg-card hover:bg-accent text-muted-foreground border border-border/50',
|
||||
isRecording && 'animate-pulse'
|
||||
)}
|
||||
title="Arm track for recording"
|
||||
>
|
||||
<Circle className="h-3 w-3 fill-current" />
|
||||
</button>
|
||||
)}
|
||||
|
||||
{/* Mute Button */}
|
||||
<button
|
||||
onClick={onMuteToggle}
|
||||
className={cn(
|
||||
'h-8 w-8 rounded-md flex items-center justify-center transition-all text-[11px] font-bold',
|
||||
isMuted
|
||||
? 'bg-blue-500 text-white shadow-md shadow-blue-500/30'
|
||||
: 'bg-card hover:bg-accent text-muted-foreground border border-border/50'
|
||||
)}
|
||||
title="Mute track"
|
||||
>
|
||||
M
|
||||
</button>
|
||||
|
||||
{/* Solo Button */}
|
||||
{onSoloToggle && (
|
||||
<button
|
||||
onClick={onSoloToggle}
|
||||
className={cn(
|
||||
'h-8 w-8 rounded-md flex items-center justify-center transition-all text-[11px] font-bold',
|
||||
isSolo
|
||||
? 'bg-yellow-500 text-black shadow-md shadow-yellow-500/30'
|
||||
: 'bg-card hover:bg-accent text-muted-foreground border border-border/50'
|
||||
)}
|
||||
title="Solo track"
|
||||
>
|
||||
<Headphones className="h-3 w-3" />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
222
components/tracks/TrackExtensions.tsx
Normal file
222
components/tracks/TrackExtensions.tsx
Normal file
@@ -0,0 +1,222 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { Plus, ChevronDown, ChevronRight, Sparkles } from 'lucide-react';
|
||||
import type { Track as TrackType } from '@/types/track';
|
||||
import { Button } from '@/components/ui/Button';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
import { EffectDevice } from '@/components/effects/EffectDevice';
|
||||
import { EffectBrowser } from '@/components/effects/EffectBrowser';
|
||||
import type { EffectType } from '@/lib/audio/effects/chain';
|
||||
|
||||
export interface TrackExtensionsProps {
|
||||
track: TrackType;
|
||||
onUpdateTrack: (trackId: string, updates: Partial<TrackType>) => void;
|
||||
onToggleEffect?: (effectId: string) => void;
|
||||
onRemoveEffect?: (effectId: string) => void;
|
||||
onUpdateEffect?: (effectId: string, parameters: any) => void;
|
||||
onAddEffect?: (effectType: EffectType) => void;
|
||||
asOverlay?: boolean; // When true, renders as full overlay without header
|
||||
}
|
||||
|
||||
export function TrackExtensions({
|
||||
track,
|
||||
onUpdateTrack,
|
||||
onToggleEffect,
|
||||
onRemoveEffect,
|
||||
onUpdateEffect,
|
||||
onAddEffect,
|
||||
asOverlay = false,
|
||||
}: TrackExtensionsProps) {
|
||||
const [effectBrowserOpen, setEffectBrowserOpen] = React.useState(false);
|
||||
|
||||
// Don't render if track is collapsed (unless it's an overlay, which handles its own visibility)
|
||||
if (!asOverlay && track.collapsed) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Overlay mode: render full-screen effect rack
|
||||
if (asOverlay) {
|
||||
return (
|
||||
<>
|
||||
<div className="flex flex-col h-full bg-card/95 rounded-lg border border-border shadow-2xl">
|
||||
{/* Header with close button */}
|
||||
<div className="flex items-center justify-between px-4 py-3 border-b border-border bg-muted/50">
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="text-sm font-medium">Effects</span>
|
||||
<span className="text-xs text-muted-foreground">
|
||||
({track.effectChain.effects.length})
|
||||
</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={() => setEffectBrowserOpen(true)}
|
||||
title="Add effect"
|
||||
>
|
||||
<Plus className="h-4 w-4" />
|
||||
</Button>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={() => onUpdateTrack(track.id, { showEffects: false })}
|
||||
title="Close effects"
|
||||
>
|
||||
<ChevronDown className="h-4 w-4" />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Effects rack */}
|
||||
<div className="flex-1 overflow-x-auto custom-scrollbar p-4">
|
||||
<div className="flex h-full gap-4">
|
||||
{track.effectChain.effects.length === 0 ? (
|
||||
<div className="flex flex-col items-center justify-center w-full text-center gap-3">
|
||||
<Sparkles className="h-12 w-12 text-muted-foreground/30" />
|
||||
<div>
|
||||
<p className="text-sm text-muted-foreground mb-1">No effects yet</p>
|
||||
<p className="text-xs text-muted-foreground/70">
|
||||
Click + to add an effect
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
track.effectChain.effects.map((effect) => (
|
||||
<EffectDevice
|
||||
key={effect.id}
|
||||
effect={effect}
|
||||
onToggleEnabled={() => onToggleEffect?.(effect.id)}
|
||||
onRemove={() => onRemoveEffect?.(effect.id)}
|
||||
onUpdateParameters={(params) => onUpdateEffect?.(effect.id, params)}
|
||||
onToggleExpanded={() => {
|
||||
const updatedEffects = track.effectChain.effects.map((e) =>
|
||||
e.id === effect.id ? { ...e, expanded: !e.expanded } : e
|
||||
);
|
||||
onUpdateTrack(track.id, {
|
||||
effectChain: { ...track.effectChain, effects: updatedEffects },
|
||||
});
|
||||
}}
|
||||
/>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Effect Browser Dialog */}
|
||||
<EffectBrowser
|
||||
open={effectBrowserOpen}
|
||||
onClose={() => setEffectBrowserOpen(false)}
|
||||
onSelectEffect={(effectType) => {
|
||||
if (onAddEffect) {
|
||||
onAddEffect(effectType);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
// Original inline mode
|
||||
return (
|
||||
<>
|
||||
{/* Effects Section (Collapsible, Full Width) */}
|
||||
<div className="bg-muted/50 border-b border-border/50">
|
||||
{/* Effects Header - clickable to toggle */}
|
||||
<div
|
||||
className="flex items-center gap-2 px-3 py-1.5 cursor-pointer hover:bg-accent/30 transition-colors"
|
||||
onClick={() => {
|
||||
onUpdateTrack(track.id, {
|
||||
showEffects: !track.showEffects,
|
||||
});
|
||||
}}
|
||||
>
|
||||
{track.showEffects ? (
|
||||
<ChevronDown className="h-3.5 w-3.5 text-muted-foreground flex-shrink-0" />
|
||||
) : (
|
||||
<ChevronRight className="h-3.5 w-3.5 text-muted-foreground flex-shrink-0" />
|
||||
)}
|
||||
|
||||
{/* Show mini effect chain when collapsed */}
|
||||
{!track.showEffects && track.effectChain.effects.length > 0 ? (
|
||||
<div className="flex-1 flex items-center gap-1 overflow-x-auto custom-scrollbar">
|
||||
{track.effectChain.effects.map((effect) => (
|
||||
<div
|
||||
key={effect.id}
|
||||
className={cn(
|
||||
'px-2 py-0.5 rounded text-[10px] font-medium flex-shrink-0',
|
||||
effect.enabled
|
||||
? 'bg-primary/20 text-primary border border-primary/30'
|
||||
: 'bg-muted/30 text-muted-foreground border border-border'
|
||||
)}
|
||||
>
|
||||
{effect.name}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
) : (
|
||||
<span className="text-xs font-medium text-muted-foreground">
|
||||
Devices ({track.effectChain.effects.length})
|
||||
</span>
|
||||
)}
|
||||
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon-sm"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
setEffectBrowserOpen(true);
|
||||
}}
|
||||
title="Add effect"
|
||||
className="h-5 w-5 flex-shrink-0"
|
||||
>
|
||||
<Plus className="h-3 w-3" />
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Horizontal scrolling device rack - expanded state */}
|
||||
{track.showEffects && (
|
||||
<div className="h-48 overflow-x-auto custom-scrollbar bg-muted/70 p-3">
|
||||
<div className="flex h-full gap-3">
|
||||
{track.effectChain.effects.length === 0 ? (
|
||||
<div className="text-xs text-muted-foreground text-center py-8 w-full">
|
||||
No devices. Click + to add an effect.
|
||||
</div>
|
||||
) : (
|
||||
track.effectChain.effects.map((effect) => (
|
||||
<EffectDevice
|
||||
key={effect.id}
|
||||
effect={effect}
|
||||
onToggleEnabled={() => onToggleEffect?.(effect.id)}
|
||||
onRemove={() => onRemoveEffect?.(effect.id)}
|
||||
onUpdateParameters={(params) => onUpdateEffect?.(effect.id, params)}
|
||||
onToggleExpanded={() => {
|
||||
const updatedEffects = track.effectChain.effects.map((e) =>
|
||||
e.id === effect.id ? { ...e, expanded: !e.expanded } : e
|
||||
);
|
||||
onUpdateTrack(track.id, {
|
||||
effectChain: { ...track.effectChain, effects: updatedEffects },
|
||||
});
|
||||
}}
|
||||
/>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Effect Browser Dialog */}
|
||||
<EffectBrowser
|
||||
open={effectBrowserOpen}
|
||||
onClose={() => setEffectBrowserOpen(false)}
|
||||
onSelectEffect={(effectType) => {
|
||||
if (onAddEffect) {
|
||||
onAddEffect(effectType);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
246
components/tracks/TrackFader.tsx
Normal file
246
components/tracks/TrackFader.tsx
Normal file
@@ -0,0 +1,246 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
|
||||
export interface TrackFaderProps {
|
||||
value: number;
|
||||
peakLevel: number;
|
||||
rmsLevel: number;
|
||||
onChange: (value: number) => void;
|
||||
onTouchStart?: () => void;
|
||||
onTouchEnd?: () => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function TrackFader({
|
||||
value,
|
||||
peakLevel,
|
||||
rmsLevel,
|
||||
onChange,
|
||||
onTouchStart,
|
||||
onTouchEnd,
|
||||
className,
|
||||
}: TrackFaderProps) {
|
||||
const [isDragging, setIsDragging] = React.useState(false);
|
||||
const containerRef = React.useRef<HTMLDivElement>(null);
|
||||
|
||||
// Convert linear 0-1 to dB scale for display
|
||||
const linearToDb = (linear: number): number => {
|
||||
if (linear === 0) return -60;
|
||||
const db = 20 * Math.log10(linear);
|
||||
return Math.max(-60, Math.min(0, db));
|
||||
};
|
||||
|
||||
const valueDb = linearToDb(value);
|
||||
const peakDb = linearToDb(peakLevel);
|
||||
const rmsDb = linearToDb(rmsLevel);
|
||||
|
||||
// Calculate bar widths (0-100%)
|
||||
const peakWidth = ((peakDb + 60) / 60) * 100;
|
||||
const rmsWidth = ((rmsDb + 60) / 60) * 100;
|
||||
|
||||
const handleMouseDown = (e: React.MouseEvent) => {
|
||||
e.preventDefault();
|
||||
setIsDragging(true);
|
||||
onTouchStart?.();
|
||||
updateValue(e.clientY);
|
||||
};
|
||||
|
||||
const handleMouseMove = React.useCallback(
|
||||
(e: MouseEvent) => {
|
||||
if (!isDragging) return;
|
||||
updateValue(e.clientY);
|
||||
},
|
||||
[isDragging]
|
||||
);
|
||||
|
||||
const handleMouseUp = React.useCallback(() => {
|
||||
setIsDragging(false);
|
||||
onTouchEnd?.();
|
||||
}, [onTouchEnd]);
|
||||
|
||||
const handleTouchStart = (e: React.TouchEvent) => {
|
||||
e.preventDefault();
|
||||
const touch = e.touches[0];
|
||||
setIsDragging(true);
|
||||
onTouchStart?.();
|
||||
updateValue(touch.clientY);
|
||||
};
|
||||
|
||||
const handleTouchMove = React.useCallback(
|
||||
(e: TouchEvent) => {
|
||||
if (!isDragging || e.touches.length === 0) return;
|
||||
const touch = e.touches[0];
|
||||
updateValue(touch.clientY);
|
||||
},
|
||||
[isDragging]
|
||||
);
|
||||
|
||||
const handleTouchEnd = React.useCallback(() => {
|
||||
setIsDragging(false);
|
||||
onTouchEnd?.();
|
||||
}, [onTouchEnd]);
|
||||
|
||||
const updateValue = (clientY: number) => {
|
||||
if (!containerRef.current) return;
|
||||
|
||||
const rect = containerRef.current.getBoundingClientRect();
|
||||
const y = clientY - rect.top;
|
||||
|
||||
// Track has 32px (2rem) padding on top and bottom (top-8 bottom-8)
|
||||
const trackPadding = 32;
|
||||
const trackHeight = rect.height - (trackPadding * 2);
|
||||
|
||||
// Clamp y to track bounds
|
||||
const clampedY = Math.max(trackPadding, Math.min(rect.height - trackPadding, y));
|
||||
|
||||
// Inverted: top = max (1), bottom = min (0)
|
||||
// Map clampedY from [trackPadding, height-trackPadding] to [1, 0]
|
||||
const percentage = 1 - ((clampedY - trackPadding) / trackHeight);
|
||||
onChange(Math.max(0, Math.min(1, percentage)));
|
||||
};
|
||||
|
||||
React.useEffect(() => {
|
||||
if (isDragging) {
|
||||
window.addEventListener('mousemove', handleMouseMove);
|
||||
window.addEventListener('mouseup', handleMouseUp);
|
||||
window.addEventListener('touchmove', handleTouchMove);
|
||||
window.addEventListener('touchend', handleTouchEnd);
|
||||
return () => {
|
||||
window.removeEventListener('mousemove', handleMouseMove);
|
||||
window.removeEventListener('mouseup', handleMouseUp);
|
||||
window.removeEventListener('touchmove', handleTouchMove);
|
||||
window.removeEventListener('touchend', handleTouchEnd);
|
||||
};
|
||||
}
|
||||
}, [isDragging, handleMouseMove, handleMouseUp, handleTouchMove, handleTouchEnd]);
|
||||
|
||||
return (
|
||||
<div className={cn('flex gap-3', className)} style={{ marginLeft: '16px' }}>
|
||||
{/* dB Labels (Left) */}
|
||||
<div className="flex flex-col justify-between text-[10px] font-mono text-muted-foreground py-1">
|
||||
<span>0</span>
|
||||
<span>-12</span>
|
||||
<span>-24</span>
|
||||
<span>-60</span>
|
||||
</div>
|
||||
|
||||
{/* Fader Container */}
|
||||
<div
|
||||
ref={containerRef}
|
||||
className="relative w-12 h-40 bg-background/50 rounded-md border border-border/50 cursor-pointer"
|
||||
onMouseDown={handleMouseDown}
|
||||
onTouchStart={handleTouchStart}
|
||||
>
|
||||
{/* Peak Meter (Horizontal Bar - Top) */}
|
||||
<div className="absolute inset-x-2 top-2 h-3 bg-background/80 rounded-sm overflow-hidden border border-border/30">
|
||||
<div
|
||||
className="absolute left-0 top-0 bottom-0 transition-all duration-75 ease-out"
|
||||
style={{ width: `${Math.max(0, Math.min(100, peakWidth))}%` }}
|
||||
>
|
||||
<div className={cn(
|
||||
'w-full h-full',
|
||||
peakDb > -3 ? 'bg-red-500' :
|
||||
peakDb > -6 ? 'bg-yellow-500' :
|
||||
'bg-green-500'
|
||||
)} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* RMS Meter (Horizontal Bar - Bottom) */}
|
||||
<div className="absolute inset-x-2 bottom-2 h-3 bg-background/80 rounded-sm overflow-hidden border border-border/30">
|
||||
<div
|
||||
className="absolute left-0 top-0 bottom-0 transition-all duration-150 ease-out"
|
||||
style={{ width: `${Math.max(0, Math.min(100, rmsWidth))}%` }}
|
||||
>
|
||||
<div className={cn(
|
||||
'w-full h-full',
|
||||
rmsDb > -3 ? 'bg-red-500' :
|
||||
rmsDb > -6 ? 'bg-yellow-500' :
|
||||
'bg-green-500'
|
||||
)} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Fader Track */}
|
||||
<div className="absolute top-8 bottom-8 left-1/2 -translate-x-1/2 w-1.5 bg-muted/50 rounded-full" />
|
||||
|
||||
{/* Fader Handle */}
|
||||
<div
|
||||
className="absolute left-1/2 -translate-x-1/2 w-10 h-4 bg-primary/80 border-2 border-primary rounded-md shadow-lg cursor-grab active:cursor-grabbing pointer-events-none transition-all"
|
||||
style={{
|
||||
// Inverted: value 1 = top of track (20%), value 0 = bottom of track (80%)
|
||||
// Track has top-8 bottom-8 padding (20% and 80% of h-40 container)
|
||||
// Handle moves within 60% range (from 20% to 80%)
|
||||
top: `calc(${20 + (1 - value) * 60}% - 0.5rem)`,
|
||||
}}
|
||||
>
|
||||
{/* Handle grip lines */}
|
||||
<div className="absolute inset-0 flex items-center justify-center gap-0.5">
|
||||
<div className="h-2 w-px bg-primary-foreground/30" />
|
||||
<div className="h-2 w-px bg-primary-foreground/30" />
|
||||
<div className="h-2 w-px bg-primary-foreground/30" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* dB Scale Markers */}
|
||||
<div className="absolute inset-0 px-2 py-8 pointer-events-none">
|
||||
<div className="relative h-full">
|
||||
{/* -12 dB */}
|
||||
<div className="absolute left-0 right-0 h-px bg-border/20" style={{ top: '50%' }} />
|
||||
{/* -6 dB */}
|
||||
<div className="absolute left-0 right-0 h-px bg-yellow-500/20" style={{ top: '20%' }} />
|
||||
{/* -3 dB */}
|
||||
<div className="absolute left-0 right-0 h-px bg-red-500/30" style={{ top: '10%' }} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Value and Level Display (Right) */}
|
||||
<div className="flex flex-col justify-between items-start text-[9px] font-mono py-1 w-[36px]">
|
||||
{/* Current dB Value */}
|
||||
<div className={cn(
|
||||
'font-bold text-[11px]',
|
||||
valueDb > -3 ? 'text-red-500' :
|
||||
valueDb > -6 ? 'text-yellow-500' :
|
||||
'text-green-500'
|
||||
)}>
|
||||
{valueDb > -60 ? `${valueDb.toFixed(1)}` : '-∞'}
|
||||
</div>
|
||||
|
||||
{/* Spacer */}
|
||||
<div className="flex-1" />
|
||||
|
||||
{/* Peak Level */}
|
||||
<div className="flex flex-col items-start">
|
||||
<span className="text-muted-foreground/60">PK</span>
|
||||
<span className={cn(
|
||||
'font-mono text-[10px]',
|
||||
peakDb > -3 ? 'text-red-500' :
|
||||
peakDb > -6 ? 'text-yellow-500' :
|
||||
'text-green-500'
|
||||
)}>
|
||||
{peakDb > -60 ? `${peakDb.toFixed(1)}` : '-∞'}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* RMS Level */}
|
||||
<div className="flex flex-col items-start">
|
||||
<span className="text-muted-foreground/60">RM</span>
|
||||
<span className={cn(
|
||||
'font-mono text-[10px]',
|
||||
rmsDb > -3 ? 'text-red-500' :
|
||||
rmsDb > -6 ? 'text-yellow-500' :
|
||||
'text-green-500'
|
||||
)}>
|
||||
{rmsDb > -60 ? `${rmsDb.toFixed(1)}` : '-∞'}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* dB Label */}
|
||||
<span className="text-muted-foreground/60 text-[8px]">dB</span>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -29,12 +29,12 @@ export function TrackHeader({
|
||||
onNameChange,
|
||||
}: TrackHeaderProps) {
|
||||
const [isEditingName, setIsEditingName] = React.useState(false);
|
||||
const [nameInput, setNameInput] = React.useState(track.name);
|
||||
const [nameInput, setNameInput] = React.useState(String(track.name || 'Untitled Track'));
|
||||
const inputRef = React.useRef<HTMLInputElement>(null);
|
||||
|
||||
const handleNameClick = () => {
|
||||
setIsEditingName(true);
|
||||
setNameInput(track.name);
|
||||
setNameInput(String(track.name || 'Untitled Track'));
|
||||
};
|
||||
|
||||
const handleNameBlur = () => {
|
||||
@@ -42,7 +42,7 @@ export function TrackHeader({
|
||||
if (nameInput.trim()) {
|
||||
onNameChange(nameInput.trim());
|
||||
} else {
|
||||
setNameInput(track.name);
|
||||
setNameInput(String(track.name || 'Untitled Track'));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -50,7 +50,7 @@ export function TrackHeader({
|
||||
if (e.key === 'Enter') {
|
||||
inputRef.current?.blur();
|
||||
} else if (e.key === 'Escape') {
|
||||
setNameInput(track.name);
|
||||
setNameInput(String(track.name || 'Untitled Track'));
|
||||
setIsEditingName(false);
|
||||
}
|
||||
};
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
243
components/ui/CircularKnob.tsx
Normal file
243
components/ui/CircularKnob.tsx
Normal file
@@ -0,0 +1,243 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
|
||||
export interface CircularKnobProps {
|
||||
value: number; // -1.0 to 1.0 for pan
|
||||
onChange: (value: number) => void;
|
||||
min?: number;
|
||||
max?: number;
|
||||
step?: number;
|
||||
size?: number;
|
||||
className?: string;
|
||||
label?: string;
|
||||
formatValue?: (value: number) => string;
|
||||
onTouchStart?: () => void;
|
||||
onTouchEnd?: () => void;
|
||||
}
|
||||
|
||||
export function CircularKnob({
|
||||
value,
|
||||
onChange,
|
||||
min = -1,
|
||||
max = 1,
|
||||
step = 0.01,
|
||||
size = 48,
|
||||
className,
|
||||
label,
|
||||
formatValue,
|
||||
onTouchStart,
|
||||
onTouchEnd,
|
||||
}: CircularKnobProps) {
|
||||
const knobRef = React.useRef<HTMLDivElement>(null);
|
||||
const [isDragging, setIsDragging] = React.useState(false);
|
||||
const dragStartRef = React.useRef({ x: 0, y: 0, value: 0 });
|
||||
|
||||
const updateValue = React.useCallback(
|
||||
(clientX: number, clientY: number) => {
|
||||
if (!knobRef.current) return;
|
||||
|
||||
const rect = knobRef.current.getBoundingClientRect();
|
||||
const centerX = rect.left + rect.width / 2;
|
||||
const centerY = rect.top + rect.height / 2;
|
||||
|
||||
// Calculate vertical drag distance from start
|
||||
const deltaY = dragStartRef.current.y - clientY;
|
||||
const sensitivity = 200; // pixels for full range
|
||||
const range = max - min;
|
||||
const delta = (deltaY / sensitivity) * range;
|
||||
|
||||
let newValue = dragStartRef.current.value + delta;
|
||||
|
||||
// Snap to step
|
||||
if (step) {
|
||||
newValue = Math.round(newValue / step) * step;
|
||||
}
|
||||
|
||||
// Clamp to range
|
||||
newValue = Math.max(min, Math.min(max, newValue));
|
||||
|
||||
onChange(newValue);
|
||||
},
|
||||
[min, max, step, onChange]
|
||||
);
|
||||
|
||||
const handleMouseDown = React.useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.preventDefault();
|
||||
setIsDragging(true);
|
||||
dragStartRef.current = {
|
||||
x: e.clientX,
|
||||
y: e.clientY,
|
||||
value,
|
||||
};
|
||||
onTouchStart?.();
|
||||
},
|
||||
[value, onTouchStart]
|
||||
);
|
||||
|
||||
const handleMouseMove = React.useCallback(
|
||||
(e: MouseEvent) => {
|
||||
if (isDragging) {
|
||||
updateValue(e.clientX, e.clientY);
|
||||
}
|
||||
},
|
||||
[isDragging, updateValue]
|
||||
);
|
||||
|
||||
const handleMouseUp = React.useCallback(() => {
|
||||
setIsDragging(false);
|
||||
onTouchEnd?.();
|
||||
}, [onTouchEnd]);
|
||||
|
||||
const handleTouchStart = React.useCallback(
|
||||
(e: React.TouchEvent) => {
|
||||
e.preventDefault();
|
||||
const touch = e.touches[0];
|
||||
setIsDragging(true);
|
||||
dragStartRef.current = {
|
||||
x: touch.clientX,
|
||||
y: touch.clientY,
|
||||
value,
|
||||
};
|
||||
onTouchStart?.();
|
||||
},
|
||||
[value, onTouchStart]
|
||||
);
|
||||
|
||||
const handleTouchMove = React.useCallback(
|
||||
(e: TouchEvent) => {
|
||||
if (isDragging && e.touches.length > 0) {
|
||||
const touch = e.touches[0];
|
||||
updateValue(touch.clientX, touch.clientY);
|
||||
}
|
||||
},
|
||||
[isDragging, updateValue]
|
||||
);
|
||||
|
||||
const handleTouchEnd = React.useCallback(() => {
|
||||
setIsDragging(false);
|
||||
onTouchEnd?.();
|
||||
}, [onTouchEnd]);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (isDragging) {
|
||||
window.addEventListener('mousemove', handleMouseMove);
|
||||
window.addEventListener('mouseup', handleMouseUp);
|
||||
window.addEventListener('touchmove', handleTouchMove);
|
||||
window.addEventListener('touchend', handleTouchEnd);
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('mousemove', handleMouseMove);
|
||||
window.removeEventListener('mouseup', handleMouseUp);
|
||||
window.removeEventListener('touchmove', handleTouchMove);
|
||||
window.removeEventListener('touchend', handleTouchEnd);
|
||||
};
|
||||
}
|
||||
}, [isDragging, handleMouseMove, handleMouseUp, handleTouchMove, handleTouchEnd]);
|
||||
|
||||
// Calculate rotation angle (-135deg to 135deg, 270deg range)
|
||||
const percentage = (value - min) / (max - min);
|
||||
const angle = -135 + percentage * 270;
|
||||
|
||||
const displayValue = formatValue
|
||||
? formatValue(value)
|
||||
: value === 0
|
||||
? 'C'
|
||||
: value < 0
|
||||
? `L${Math.abs(Math.round(value * 100))}`
|
||||
: `R${Math.round(value * 100)}`;
|
||||
|
||||
// Calculate arc parameters for center-based rendering
|
||||
const isNearCenter = Math.abs(value) < 0.01;
|
||||
const centerPercentage = 0.5; // Center position (50%)
|
||||
|
||||
// Arc goes from center to current value
|
||||
let arcStartPercentage: number;
|
||||
let arcLength: number;
|
||||
|
||||
if (value < -0.01) {
|
||||
// Left side: arc from value to center
|
||||
arcStartPercentage = percentage;
|
||||
arcLength = centerPercentage - percentage;
|
||||
} else if (value > 0.01) {
|
||||
// Right side: arc from center to value
|
||||
arcStartPercentage = centerPercentage;
|
||||
arcLength = percentage - centerPercentage;
|
||||
} else {
|
||||
// Center: no arc
|
||||
arcStartPercentage = centerPercentage;
|
||||
arcLength = 0;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={cn('flex flex-col items-center gap-1', className)}>
|
||||
{label && (
|
||||
<div className="text-[10px] text-muted-foreground uppercase tracking-wide">
|
||||
{label}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div
|
||||
ref={knobRef}
|
||||
onMouseDown={handleMouseDown}
|
||||
onTouchStart={handleTouchStart}
|
||||
className="relative cursor-pointer select-none"
|
||||
style={{ width: size, height: size }}
|
||||
>
|
||||
{/* Outer ring */}
|
||||
<svg
|
||||
width={size}
|
||||
height={size}
|
||||
viewBox={`0 0 ${size} ${size}`}
|
||||
className="absolute inset-0"
|
||||
>
|
||||
{/* Background arc */}
|
||||
<circle
|
||||
cx={size / 2}
|
||||
cy={size / 2}
|
||||
r={size / 2 - 4}
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
strokeWidth="2"
|
||||
className="text-muted/30"
|
||||
/>
|
||||
|
||||
{/* Value arc - only show when not centered */}
|
||||
{!isNearCenter && (
|
||||
<circle
|
||||
cx={size / 2}
|
||||
cy={size / 2}
|
||||
r={size / 2 - 4}
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
strokeWidth="3"
|
||||
strokeLinecap="round"
|
||||
className="text-primary"
|
||||
strokeDasharray={`${(arcLength * 270 * Math.PI * (size / 2 - 4)) / 180} ${(Math.PI * 2 * (size / 2 - 4))}`}
|
||||
transform={`rotate(${-225 + arcStartPercentage * 270} ${size / 2} ${size / 2})`}
|
||||
/>
|
||||
)}
|
||||
</svg>
|
||||
|
||||
{/* Knob body */}
|
||||
<div
|
||||
className="absolute inset-0 rounded-full bg-card border-2 border-border shadow-sm flex items-center justify-center transition-transform hover:scale-105 active:scale-95"
|
||||
style={{
|
||||
transform: `rotate(${angle}deg)`,
|
||||
margin: '4px',
|
||||
}}
|
||||
>
|
||||
{/* Indicator line */}
|
||||
<div className="absolute top-1 left-1/2 w-0.5 h-2 bg-primary rounded-full -translate-x-1/2" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Value Display */}
|
||||
<div className="text-[10px] font-medium text-foreground min-w-[32px] text-center">
|
||||
{displayValue}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -144,7 +144,7 @@ export function CommandPalette({ actions, className }: CommandPaletteProps) {
|
||||
</div>
|
||||
|
||||
{/* Results */}
|
||||
<div className="max-h-96 overflow-y-auto p-2">
|
||||
<div className="max-h-96 overflow-y-auto custom-scrollbar p-2">
|
||||
{Object.keys(groupedActions).length === 0 ? (
|
||||
<div className="p-8 text-center text-muted-foreground text-sm">
|
||||
No commands found
|
||||
|
||||
@@ -102,7 +102,7 @@ export function Modal({
|
||||
</div>
|
||||
|
||||
{/* Content */}
|
||||
<div className="flex-1 overflow-y-auto p-4">
|
||||
<div className="flex-1 overflow-y-auto custom-scrollbar p-4">
|
||||
{children}
|
||||
</div>
|
||||
|
||||
|
||||
@@ -13,6 +13,8 @@ export interface SliderProps
|
||||
step?: number;
|
||||
label?: string;
|
||||
showValue?: boolean;
|
||||
onTouchStart?: () => void;
|
||||
onTouchEnd?: () => void;
|
||||
}
|
||||
|
||||
const Slider = React.forwardRef<HTMLInputElement, SliderProps>(
|
||||
@@ -28,6 +30,8 @@ const Slider = React.forwardRef<HTMLInputElement, SliderProps>(
|
||||
label,
|
||||
showValue = false,
|
||||
disabled,
|
||||
onTouchStart,
|
||||
onTouchEnd,
|
||||
...props
|
||||
},
|
||||
ref
|
||||
@@ -41,6 +45,21 @@ const Slider = React.forwardRef<HTMLInputElement, SliderProps>(
|
||||
onValueChange?.([numValue]);
|
||||
};
|
||||
|
||||
const handleMouseDown = () => {
|
||||
onTouchStart?.();
|
||||
};
|
||||
|
||||
const handleMouseUp = () => {
|
||||
onTouchEnd?.();
|
||||
};
|
||||
|
||||
React.useEffect(() => {
|
||||
if (onTouchEnd) {
|
||||
window.addEventListener('mouseup', handleMouseUp);
|
||||
return () => window.removeEventListener('mouseup', handleMouseUp);
|
||||
}
|
||||
}, [onTouchEnd]);
|
||||
|
||||
return (
|
||||
<div className={cn('w-full', className)}>
|
||||
{(label || showValue) && (
|
||||
@@ -63,6 +82,7 @@ const Slider = React.forwardRef<HTMLInputElement, SliderProps>(
|
||||
step={step}
|
||||
value={currentValue}
|
||||
onChange={handleChange}
|
||||
onMouseDown={handleMouseDown}
|
||||
disabled={disabled}
|
||||
className={cn(
|
||||
'w-full h-2 bg-secondary rounded-lg appearance-none cursor-pointer',
|
||||
|
||||
165
components/ui/VerticalFader.tsx
Normal file
165
components/ui/VerticalFader.tsx
Normal file
@@ -0,0 +1,165 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
import { cn } from '@/lib/utils/cn';
|
||||
|
||||
export interface VerticalFaderProps {
|
||||
value: number; // 0.0 to 1.0
|
||||
level?: number; // 0.0 to 1.0 (for level meter display)
|
||||
onChange: (value: number) => void;
|
||||
min?: number;
|
||||
max?: number;
|
||||
step?: number;
|
||||
className?: string;
|
||||
showDb?: boolean;
|
||||
onTouchStart?: () => void;
|
||||
onTouchEnd?: () => void;
|
||||
}
|
||||
|
||||
export function VerticalFader({
|
||||
value,
|
||||
level = 0,
|
||||
onChange,
|
||||
min = 0,
|
||||
max = 1,
|
||||
step = 0.01,
|
||||
className,
|
||||
showDb = true,
|
||||
onTouchStart,
|
||||
onTouchEnd,
|
||||
}: VerticalFaderProps) {
|
||||
const trackRef = React.useRef<HTMLDivElement>(null);
|
||||
const [isDragging, setIsDragging] = React.useState(false);
|
||||
|
||||
const updateValue = React.useCallback(
|
||||
(clientY: number) => {
|
||||
if (!trackRef.current) return;
|
||||
|
||||
const rect = trackRef.current.getBoundingClientRect();
|
||||
const height = rect.height;
|
||||
const y = Math.max(0, Math.min(height, clientY - rect.top));
|
||||
|
||||
// Invert Y (top = max, bottom = min)
|
||||
const percentage = 1 - y / height;
|
||||
const range = max - min;
|
||||
let newValue = min + percentage * range;
|
||||
|
||||
// Snap to step
|
||||
if (step) {
|
||||
newValue = Math.round(newValue / step) * step;
|
||||
}
|
||||
|
||||
// Clamp to range
|
||||
newValue = Math.max(min, Math.min(max, newValue));
|
||||
|
||||
onChange(newValue);
|
||||
},
|
||||
[min, max, step, onChange]
|
||||
);
|
||||
|
||||
const handleMouseDown = React.useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.preventDefault();
|
||||
setIsDragging(true);
|
||||
updateValue(e.clientY);
|
||||
onTouchStart?.();
|
||||
},
|
||||
[updateValue, onTouchStart]
|
||||
);
|
||||
|
||||
const handleMouseMove = React.useCallback(
|
||||
(e: MouseEvent) => {
|
||||
if (isDragging) {
|
||||
updateValue(e.clientY);
|
||||
}
|
||||
},
|
||||
[isDragging, updateValue]
|
||||
);
|
||||
|
||||
const handleMouseUp = React.useCallback(() => {
|
||||
setIsDragging(false);
|
||||
onTouchEnd?.();
|
||||
}, [onTouchEnd]);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (isDragging) {
|
||||
window.addEventListener('mousemove', handleMouseMove);
|
||||
window.addEventListener('mouseup', handleMouseUp);
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('mousemove', handleMouseMove);
|
||||
window.removeEventListener('mouseup', handleMouseUp);
|
||||
};
|
||||
}
|
||||
}, [isDragging, handleMouseMove, handleMouseUp]);
|
||||
|
||||
// Convert value to percentage (0-100)
|
||||
const valuePercentage = ((value - min) / (max - min)) * 100;
|
||||
|
||||
// Convert level to dB for display
|
||||
const db = value === 0 ? -Infinity : 20 * Math.log10(value);
|
||||
const levelDb = level === 0 ? -Infinity : (level * 60) - 60;
|
||||
|
||||
return (
|
||||
<div className={cn('flex flex-col items-center gap-1', className)}>
|
||||
{/* dB Display */}
|
||||
{showDb && (
|
||||
<div className="text-[10px] font-mono text-muted-foreground min-w-[32px] text-center">
|
||||
{db === -Infinity ? '-∞' : `${db.toFixed(1)}`}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Fader Track */}
|
||||
<div
|
||||
ref={trackRef}
|
||||
onMouseDown={handleMouseDown}
|
||||
className="relative w-8 flex-1 min-h-[80px] max-h-[140px] bg-background/50 border border-border rounded cursor-pointer select-none overflow-hidden"
|
||||
>
|
||||
{/* Volume Level Overlay - subtle fill up to fader handle */}
|
||||
<div
|
||||
className="absolute bottom-0 left-0 right-0 bg-primary/10"
|
||||
style={{ height: `${valuePercentage}%` }}
|
||||
/>
|
||||
|
||||
{/* Level Meter (actual level) - capped at fader handle position */}
|
||||
<div
|
||||
className="absolute bottom-0 left-0 right-0 transition-all duration-75"
|
||||
style={{
|
||||
height: `${Math.min(level * 100, valuePercentage)}%`,
|
||||
background: 'linear-gradient(to top, rgb(34, 197, 94) 0%, rgb(34, 197, 94) 70%, rgb(234, 179, 8) 85%, rgb(239, 68, 68) 100%)',
|
||||
opacity: 0.6,
|
||||
}}
|
||||
/>
|
||||
|
||||
{/* Volume Value Fill - Removed to show gradient spectrum */}
|
||||
|
||||
{/* Fader Handle */}
|
||||
<div
|
||||
className="absolute left-0 right-0 h-3 -ml-1 -mr-1 bg-primary/70 border-2 border-primary rounded-sm shadow-lg cursor-grab active:cursor-grabbing backdrop-blur-sm"
|
||||
style={{
|
||||
bottom: `calc(${valuePercentage}% - 6px)`,
|
||||
width: 'calc(100% + 8px)',
|
||||
}}
|
||||
/>
|
||||
|
||||
{/* Scale Marks */}
|
||||
<div className="absolute inset-0 pointer-events-none">
|
||||
{[0.25, 0.5, 0.75].map((mark) => (
|
||||
<div
|
||||
key={mark}
|
||||
className="absolute left-0 right-0 h-px bg-background/50"
|
||||
style={{ bottom: `${mark * 100}%` }}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Level dB Display */}
|
||||
{showDb && (
|
||||
<div className="text-[10px] font-mono text-muted-foreground min-w-[32px] text-center">
|
||||
{levelDb === -Infinity ? '-∞' : `${levelDb.toFixed(0)}`}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
233
lib/audio/automation-utils.ts
Normal file
233
lib/audio/automation-utils.ts
Normal file
@@ -0,0 +1,233 @@
|
||||
/**
|
||||
* Automation utility functions for creating and manipulating automation data
|
||||
*/
|
||||
|
||||
import type {
|
||||
AutomationLane,
|
||||
AutomationPoint,
|
||||
AutomationCurveType,
|
||||
AutomationMode,
|
||||
CreateAutomationPointInput,
|
||||
} from '@/types/automation';
|
||||
|
||||
/**
|
||||
* Generate unique automation point ID
|
||||
*/
|
||||
export function generateAutomationPointId(): string {
|
||||
return `autopoint-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate unique automation lane ID
|
||||
*/
|
||||
export function generateAutomationLaneId(): string {
|
||||
return `autolane-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new automation point
|
||||
*/
|
||||
export function createAutomationPoint(
|
||||
input: CreateAutomationPointInput
|
||||
): AutomationPoint {
|
||||
return {
|
||||
id: generateAutomationPointId(),
|
||||
...input,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new automation lane
|
||||
*/
|
||||
export function createAutomationLane(
|
||||
trackId: string,
|
||||
parameterId: string,
|
||||
parameterName: string,
|
||||
valueRange: {
|
||||
min: number;
|
||||
max: number;
|
||||
unit?: string;
|
||||
formatter?: (value: number) => string;
|
||||
}
|
||||
): AutomationLane {
|
||||
return {
|
||||
id: generateAutomationLaneId(),
|
||||
trackId,
|
||||
parameterId,
|
||||
parameterName,
|
||||
visible: true,
|
||||
height: 80,
|
||||
points: [],
|
||||
mode: 'read',
|
||||
valueRange,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Linear interpolation between two values
|
||||
*/
|
||||
function lerp(a: number, b: number, t: number): number {
|
||||
return a + (b - a) * t;
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluate automation value at a specific time using linear interpolation
|
||||
*/
|
||||
export function evaluateAutomationLinear(
|
||||
points: AutomationPoint[],
|
||||
time: number
|
||||
): number {
|
||||
if (points.length === 0) return 0.5; // Default middle value
|
||||
if (points.length === 1) return points[0].value;
|
||||
|
||||
// Sort points by time (should already be sorted, but ensure it)
|
||||
const sortedPoints = [...points].sort((a, b) => a.time - b.time);
|
||||
|
||||
// Before first point
|
||||
if (time <= sortedPoints[0].time) {
|
||||
return sortedPoints[0].value;
|
||||
}
|
||||
|
||||
// After last point
|
||||
if (time >= sortedPoints[sortedPoints.length - 1].time) {
|
||||
return sortedPoints[sortedPoints.length - 1].value;
|
||||
}
|
||||
|
||||
// Find surrounding points
|
||||
for (let i = 0; i < sortedPoints.length - 1; i++) {
|
||||
const p1 = sortedPoints[i];
|
||||
const p2 = sortedPoints[i + 1];
|
||||
|
||||
if (time >= p1.time && time <= p2.time) {
|
||||
// Handle step curve
|
||||
if (p1.curve === 'step') {
|
||||
return p1.value;
|
||||
}
|
||||
|
||||
// Linear interpolation
|
||||
const t = (time - p1.time) / (p2.time - p1.time);
|
||||
return lerp(p1.value, p2.value, t);
|
||||
}
|
||||
}
|
||||
|
||||
return sortedPoints[sortedPoints.length - 1].value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an automation point to a lane, maintaining time-sorted order
|
||||
*/
|
||||
export function addAutomationPoint(
|
||||
lane: AutomationLane,
|
||||
point: CreateAutomationPointInput
|
||||
): AutomationLane {
|
||||
const newPoint = createAutomationPoint(point);
|
||||
const points = [...lane.points, newPoint].sort((a, b) => a.time - b.time);
|
||||
|
||||
return {
|
||||
...lane,
|
||||
points,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove an automation point by ID
|
||||
*/
|
||||
export function removeAutomationPoint(
|
||||
lane: AutomationLane,
|
||||
pointId: string
|
||||
): AutomationLane {
|
||||
return {
|
||||
...lane,
|
||||
points: lane.points.filter((p) => p.id !== pointId),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Update an automation point's time and/or value
|
||||
*/
|
||||
export function updateAutomationPoint(
|
||||
lane: AutomationLane,
|
||||
pointId: string,
|
||||
updates: { time?: number; value?: number; curve?: AutomationCurveType }
|
||||
): AutomationLane {
|
||||
const points = lane.points.map((p) =>
|
||||
p.id === pointId ? { ...p, ...updates } : p
|
||||
);
|
||||
|
||||
// Re-sort by time if time was updated
|
||||
if (updates.time !== undefined) {
|
||||
points.sort((a, b) => a.time - b.time);
|
||||
}
|
||||
|
||||
return {
|
||||
...lane,
|
||||
points,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all automation points in a time range
|
||||
*/
|
||||
export function clearAutomationRange(
|
||||
lane: AutomationLane,
|
||||
startTime: number,
|
||||
endTime: number
|
||||
): AutomationLane {
|
||||
return {
|
||||
...lane,
|
||||
points: lane.points.filter((p) => p.time < startTime || p.time > endTime),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Format automation value for display based on lane's value range
|
||||
*/
|
||||
export function formatAutomationValue(
|
||||
lane: AutomationLane,
|
||||
normalizedValue: number
|
||||
): string {
|
||||
const { min, max, unit, formatter } = lane.valueRange;
|
||||
|
||||
if (formatter) {
|
||||
const actualValue = lerp(min, max, normalizedValue);
|
||||
return formatter(actualValue);
|
||||
}
|
||||
|
||||
const actualValue = lerp(min, max, normalizedValue);
|
||||
|
||||
// Format based on unit
|
||||
if (unit === 'dB') {
|
||||
// Convert to dB scale
|
||||
const db = normalizedValue === 0 ? -Infinity : 20 * Math.log10(normalizedValue);
|
||||
return db === -Infinity ? '-∞ dB' : `${db.toFixed(1)} dB`;
|
||||
}
|
||||
|
||||
if (unit === '%') {
|
||||
return `${(actualValue * 100).toFixed(0)}%`;
|
||||
}
|
||||
|
||||
if (unit === 'ms') {
|
||||
return `${actualValue.toFixed(1)} ms`;
|
||||
}
|
||||
|
||||
if (unit === 'Hz') {
|
||||
return `${actualValue.toFixed(0)} Hz`;
|
||||
}
|
||||
|
||||
// Default: 2 decimal places with unit
|
||||
return unit ? `${actualValue.toFixed(2)} ${unit}` : actualValue.toFixed(2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Snap value to grid (useful for user input)
|
||||
*/
|
||||
export function snapToGrid(value: number, gridSize: number = 0.25): number {
|
||||
return Math.round(value / gridSize) * gridSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clamp value between 0 and 1
|
||||
*/
|
||||
export function clampNormalized(value: number): number {
|
||||
return Math.max(0, Math.min(1, value));
|
||||
}
|
||||
167
lib/audio/automation/playback.ts
Normal file
167
lib/audio/automation/playback.ts
Normal file
@@ -0,0 +1,167 @@
|
||||
/**
|
||||
* Automation playback engine
|
||||
* Applies automation to track parameters in real-time during playback
|
||||
*/
|
||||
|
||||
import type { Track } from '@/types/track';
|
||||
import type { AutomationLane, AutomationValue } from '@/types/automation';
|
||||
import { interpolateAutomationValue, applyAutomationToTrack } from './utils';
|
||||
|
||||
/**
|
||||
* Get all automation values at a specific time
|
||||
*/
|
||||
export function getAutomationValuesAtTime(
|
||||
track: Track,
|
||||
time: number
|
||||
): AutomationValue[] {
|
||||
if (!track.automation || track.automation.lanes.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const values: AutomationValue[] = [];
|
||||
|
||||
for (const lane of track.automation.lanes) {
|
||||
// Skip lanes in write mode (don't apply during playback)
|
||||
if (lane.mode === 'write') continue;
|
||||
|
||||
// Skip lanes with no points
|
||||
if (lane.points.length === 0) continue;
|
||||
|
||||
const value = interpolateAutomationValue(lane.points, time);
|
||||
|
||||
values.push({
|
||||
parameterId: lane.parameterId,
|
||||
value,
|
||||
time,
|
||||
});
|
||||
}
|
||||
|
||||
return values;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply automation values to a track
|
||||
* Returns a new track object with automated parameters applied
|
||||
*/
|
||||
export function applyAutomationValues(
|
||||
track: Track,
|
||||
values: AutomationValue[]
|
||||
): Track {
|
||||
let updatedTrack = track;
|
||||
|
||||
for (const automation of values) {
|
||||
updatedTrack = applyAutomationToTrack(
|
||||
updatedTrack,
|
||||
automation.parameterId,
|
||||
automation.value
|
||||
);
|
||||
}
|
||||
|
||||
return updatedTrack;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply automation to all tracks at a specific time
|
||||
* Returns a new tracks array with automation applied
|
||||
*/
|
||||
export function applyAutomationToTracks(
|
||||
tracks: Track[],
|
||||
time: number
|
||||
): Track[] {
|
||||
return tracks.map((track) => {
|
||||
const automationValues = getAutomationValuesAtTime(track, time);
|
||||
|
||||
if (automationValues.length === 0) {
|
||||
return track;
|
||||
}
|
||||
|
||||
return applyAutomationValues(track, automationValues);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Record automation point during playback
|
||||
*/
|
||||
export function recordAutomationPoint(
|
||||
lane: AutomationLane,
|
||||
time: number,
|
||||
value: number
|
||||
): AutomationLane {
|
||||
// In write mode, replace all existing points in the recorded region
|
||||
// For simplicity, just add the point for now
|
||||
// TODO: Implement proper write mode that clears existing points
|
||||
|
||||
const newPoint = {
|
||||
id: `point-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
|
||||
time,
|
||||
value,
|
||||
curve: 'linear' as const,
|
||||
};
|
||||
|
||||
return {
|
||||
...lane,
|
||||
points: [...lane.points, newPoint],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Automation playback scheduler
|
||||
* Schedules automation updates at regular intervals during playback
|
||||
*/
|
||||
export class AutomationPlaybackScheduler {
|
||||
private intervalId: number | null = null;
|
||||
private updateInterval: number = 50; // Update every 50ms (20 Hz)
|
||||
private onUpdate: ((time: number) => void) | null = null;
|
||||
|
||||
/**
|
||||
* Start the automation scheduler
|
||||
*/
|
||||
start(onUpdate: (time: number) => void): void {
|
||||
if (this.intervalId !== null) {
|
||||
this.stop();
|
||||
}
|
||||
|
||||
this.onUpdate = onUpdate;
|
||||
this.intervalId = window.setInterval(() => {
|
||||
// Get current playback time from your audio engine
|
||||
// This is a placeholder - you'll need to integrate with your actual playback system
|
||||
if (this.onUpdate) {
|
||||
// Call update callback with current time
|
||||
// The callback should get the time from your actual playback system
|
||||
this.onUpdate(0); // Placeholder
|
||||
}
|
||||
}, this.updateInterval);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the automation scheduler
|
||||
*/
|
||||
stop(): void {
|
||||
if (this.intervalId !== null) {
|
||||
window.clearInterval(this.intervalId);
|
||||
this.intervalId = null;
|
||||
}
|
||||
this.onUpdate = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set update interval (in milliseconds)
|
||||
*/
|
||||
setUpdateInterval(interval: number): void {
|
||||
this.updateInterval = Math.max(10, Math.min(1000, interval));
|
||||
|
||||
// Restart if already running
|
||||
if (this.intervalId !== null && this.onUpdate) {
|
||||
const callback = this.onUpdate;
|
||||
this.stop();
|
||||
this.start(callback);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if scheduler is running
|
||||
*/
|
||||
isRunning(): boolean {
|
||||
return this.intervalId !== null;
|
||||
}
|
||||
}
|
||||
303
lib/audio/automation/utils.ts
Normal file
303
lib/audio/automation/utils.ts
Normal file
@@ -0,0 +1,303 @@
|
||||
/**
|
||||
* Automation utility functions
|
||||
*/
|
||||
|
||||
import type {
|
||||
AutomationLane,
|
||||
AutomationPoint,
|
||||
CreateAutomationLaneInput,
|
||||
CreateAutomationPointInput,
|
||||
AutomationParameterId,
|
||||
} from '@/types/automation';
|
||||
|
||||
/**
|
||||
* Generate a unique automation point ID
|
||||
*/
|
||||
export function generateAutomationPointId(): string {
|
||||
return `point-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a unique automation lane ID
|
||||
*/
|
||||
export function generateAutomationLaneId(): string {
|
||||
return `lane-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new automation point
|
||||
*/
|
||||
export function createAutomationPoint(
|
||||
input: CreateAutomationPointInput
|
||||
): AutomationPoint {
|
||||
return {
|
||||
id: generateAutomationPointId(),
|
||||
...input,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new automation lane
|
||||
*/
|
||||
export function createAutomationLane(
|
||||
trackId: string,
|
||||
parameterId: AutomationParameterId,
|
||||
parameterName: string,
|
||||
input?: Partial<CreateAutomationLaneInput>
|
||||
): AutomationLane {
|
||||
return {
|
||||
id: generateAutomationLaneId(),
|
||||
trackId,
|
||||
parameterId,
|
||||
parameterName,
|
||||
visible: input?.visible ?? true,
|
||||
height: input?.height ?? 80,
|
||||
points: input?.points ?? [],
|
||||
mode: input?.mode ?? 'read',
|
||||
color: input?.color,
|
||||
valueRange: input?.valueRange ?? {
|
||||
min: 0,
|
||||
max: 1,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a volume automation lane
|
||||
*/
|
||||
export function createVolumeAutomationLane(trackId: string): AutomationLane {
|
||||
return createAutomationLane(trackId, 'volume', 'Volume', {
|
||||
valueRange: {
|
||||
min: 0,
|
||||
max: 1,
|
||||
formatter: (value) => `${(value * 100).toFixed(0)}%`,
|
||||
},
|
||||
color: 'rgb(34, 197, 94)', // green
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a pan automation lane
|
||||
*/
|
||||
export function createPanAutomationLane(trackId: string): AutomationLane {
|
||||
return createAutomationLane(trackId, 'pan', 'Pan', {
|
||||
valueRange: {
|
||||
min: -1,
|
||||
max: 1,
|
||||
formatter: (value) => {
|
||||
const normalized = value * 2 - 1; // Convert 0-1 to -1-1
|
||||
if (normalized === 0) return 'C';
|
||||
if (normalized < 0) return `L${Math.abs(Math.round(normalized * 100))}`;
|
||||
return `R${Math.round(normalized * 100)}`;
|
||||
},
|
||||
},
|
||||
color: 'rgb(59, 130, 246)', // blue
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Interpolate automation value at a specific time
|
||||
*/
|
||||
export function interpolateAutomationValue(
|
||||
points: AutomationPoint[],
|
||||
time: number
|
||||
): number {
|
||||
if (points.length === 0) return 0;
|
||||
|
||||
const sortedPoints = [...points].sort((a, b) => a.time - b.time);
|
||||
|
||||
// Before first point
|
||||
if (time <= sortedPoints[0].time) {
|
||||
return sortedPoints[0].value;
|
||||
}
|
||||
|
||||
// After last point
|
||||
if (time >= sortedPoints[sortedPoints.length - 1].time) {
|
||||
return sortedPoints[sortedPoints.length - 1].value;
|
||||
}
|
||||
|
||||
// Find surrounding points
|
||||
for (let i = 0; i < sortedPoints.length - 1; i++) {
|
||||
const prevPoint = sortedPoints[i];
|
||||
const nextPoint = sortedPoints[i + 1];
|
||||
|
||||
if (time >= prevPoint.time && time <= nextPoint.time) {
|
||||
// Handle step curve
|
||||
if (prevPoint.curve === 'step') {
|
||||
return prevPoint.value;
|
||||
}
|
||||
|
||||
// Handle bezier curve
|
||||
if (prevPoint.curve === 'bezier') {
|
||||
const timeDelta = nextPoint.time - prevPoint.time;
|
||||
const t = (time - prevPoint.time) / timeDelta;
|
||||
return interpolateBezier(prevPoint, nextPoint, t);
|
||||
}
|
||||
|
||||
// Linear interpolation (default)
|
||||
const timeDelta = nextPoint.time - prevPoint.time;
|
||||
const valueDelta = nextPoint.value - prevPoint.value;
|
||||
const progress = (time - prevPoint.time) / timeDelta;
|
||||
|
||||
return prevPoint.value + valueDelta * progress;
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Interpolate value using cubic Bezier curve
|
||||
* Uses the control handles from both points to create smooth curves
|
||||
*/
|
||||
function interpolateBezier(
|
||||
p0: AutomationPoint,
|
||||
p1: AutomationPoint,
|
||||
t: number
|
||||
): number {
|
||||
// Default handle positions if not specified
|
||||
// Out handle defaults to 1/3 towards next point
|
||||
// In handle defaults to 1/3 back from current point
|
||||
const timeDelta = p1.time - p0.time;
|
||||
|
||||
// Control point 1 (out handle from p0)
|
||||
const c1x = p0.handleOut?.x ?? timeDelta / 3;
|
||||
const c1y = p0.handleOut?.y ?? 0;
|
||||
|
||||
// Control point 2 (in handle from p1)
|
||||
const c2x = p1.handleIn?.x ?? -timeDelta / 3;
|
||||
const c2y = p1.handleIn?.y ?? 0;
|
||||
|
||||
// Convert handles to absolute positions
|
||||
const cp1Value = p0.value + c1y;
|
||||
const cp2Value = p1.value + c2y;
|
||||
|
||||
// Cubic Bezier formula: B(t) = (1-t)³P₀ + 3(1-t)²tP₁ + 3(1-t)t²P₂ + t³P₃
|
||||
const mt = 1 - t;
|
||||
const mt2 = mt * mt;
|
||||
const mt3 = mt2 * mt;
|
||||
const t2 = t * t;
|
||||
const t3 = t2 * t;
|
||||
|
||||
const value =
|
||||
mt3 * p0.value +
|
||||
3 * mt2 * t * cp1Value +
|
||||
3 * mt * t2 * cp2Value +
|
||||
t3 * p1.value;
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create smooth bezier handles for a point based on surrounding points
|
||||
* This creates an "auto-smooth" effect similar to DAWs
|
||||
*/
|
||||
export function createSmoothHandles(
|
||||
prevPoint: AutomationPoint | null,
|
||||
currentPoint: AutomationPoint,
|
||||
nextPoint: AutomationPoint | null
|
||||
): { handleIn: { x: number; y: number }; handleOut: { x: number; y: number } } {
|
||||
// If no surrounding points, return horizontal handles
|
||||
if (!prevPoint && !nextPoint) {
|
||||
return {
|
||||
handleIn: { x: -0.1, y: 0 },
|
||||
handleOut: { x: 0.1, y: 0 },
|
||||
};
|
||||
}
|
||||
|
||||
// Calculate slope from surrounding points
|
||||
let slope = 0;
|
||||
|
||||
if (prevPoint && nextPoint) {
|
||||
// Use average slope from both neighbors
|
||||
const timeDelta = nextPoint.time - prevPoint.time;
|
||||
const valueDelta = nextPoint.value - prevPoint.value;
|
||||
slope = valueDelta / timeDelta;
|
||||
} else if (nextPoint) {
|
||||
// Only have next point
|
||||
const timeDelta = nextPoint.time - currentPoint.time;
|
||||
const valueDelta = nextPoint.value - currentPoint.value;
|
||||
slope = valueDelta / timeDelta;
|
||||
} else if (prevPoint) {
|
||||
// Only have previous point
|
||||
const timeDelta = currentPoint.time - prevPoint.time;
|
||||
const valueDelta = currentPoint.value - prevPoint.value;
|
||||
slope = valueDelta / timeDelta;
|
||||
}
|
||||
|
||||
// Create handles with 1/3 distance to neighbors
|
||||
const handleDistance = 0.1; // Fixed distance for smooth curves
|
||||
const handleY = slope * handleDistance;
|
||||
|
||||
return {
|
||||
handleIn: { x: -handleDistance, y: -handleY },
|
||||
handleOut: { x: handleDistance, y: handleY },
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate points along a bezier curve for rendering
|
||||
* Returns array of {time, value} points
|
||||
*/
|
||||
export function generateBezierCurvePoints(
|
||||
p0: AutomationPoint,
|
||||
p1: AutomationPoint,
|
||||
numPoints: number = 50
|
||||
): Array<{ time: number; value: number }> {
|
||||
const points: Array<{ time: number; value: number }> = [];
|
||||
const timeDelta = p1.time - p0.time;
|
||||
|
||||
for (let i = 0; i <= numPoints; i++) {
|
||||
const t = i / numPoints;
|
||||
const time = p0.time + t * timeDelta;
|
||||
const value = interpolateBezier(p0, p1, t);
|
||||
points.push({ time, value });
|
||||
}
|
||||
|
||||
return points;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply automation value to track parameter
|
||||
*/
|
||||
export function applyAutomationToTrack(
|
||||
track: any,
|
||||
parameterId: AutomationParameterId,
|
||||
value: number
|
||||
): any {
|
||||
if (parameterId === 'volume') {
|
||||
return { ...track, volume: value };
|
||||
}
|
||||
|
||||
if (parameterId === 'pan') {
|
||||
// Convert 0-1 to -1-1
|
||||
return { ...track, pan: value * 2 - 1 };
|
||||
}
|
||||
|
||||
// Effect parameters (format: "effect.{effectId}.{paramName}")
|
||||
if (parameterId.startsWith('effect.')) {
|
||||
const parts = parameterId.split('.');
|
||||
if (parts.length === 3) {
|
||||
const [, effectId, paramName] = parts;
|
||||
return {
|
||||
...track,
|
||||
effectChain: {
|
||||
...track.effectChain,
|
||||
effects: track.effectChain.effects.map((effect: any) =>
|
||||
effect.id === effectId
|
||||
? {
|
||||
...effect,
|
||||
parameters: {
|
||||
...effect.parameters,
|
||||
[paramName]: value,
|
||||
},
|
||||
}
|
||||
: effect
|
||||
),
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return track;
|
||||
}
|
||||
@@ -165,3 +165,15 @@ export function concatenateBuffers(
|
||||
|
||||
return newBuffer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Duplicate a segment of audio buffer (extract and insert it after the selection)
|
||||
*/
|
||||
export function duplicateBufferSegment(
|
||||
buffer: AudioBuffer,
|
||||
startTime: number,
|
||||
endTime: number
|
||||
): AudioBuffer {
|
||||
const segment = extractBufferSegment(buffer, startTime, endTime);
|
||||
return insertBufferSegment(buffer, segment, endTime);
|
||||
}
|
||||
|
||||
@@ -3,22 +3,213 @@
|
||||
*/
|
||||
|
||||
import { getAudioContext } from './context';
|
||||
import { checkFileMemoryLimit, type MemoryCheckResult } from '../utils/memory-limits';
|
||||
|
||||
export interface ImportOptions {
|
||||
convertToMono?: boolean;
|
||||
targetSampleRate?: number; // If specified, resample to this rate
|
||||
normalizeOnImport?: boolean;
|
||||
}
|
||||
|
||||
export interface AudioFileInfo {
|
||||
buffer: AudioBuffer;
|
||||
metadata: AudioMetadata;
|
||||
}
|
||||
|
||||
export interface AudioMetadata {
|
||||
fileName: string;
|
||||
fileSize: number;
|
||||
fileType: string;
|
||||
duration: number;
|
||||
sampleRate: number;
|
||||
channels: number;
|
||||
bitDepth?: number;
|
||||
codec?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode an audio file to AudioBuffer
|
||||
* Decode an audio file to AudioBuffer with optional conversions
|
||||
*/
|
||||
export async function decodeAudioFile(file: File): Promise<AudioBuffer> {
|
||||
export async function decodeAudioFile(
|
||||
file: File,
|
||||
options: ImportOptions = {}
|
||||
): Promise<AudioBuffer> {
|
||||
const arrayBuffer = await file.arrayBuffer();
|
||||
const audioContext = getAudioContext();
|
||||
|
||||
try {
|
||||
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
||||
let audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
||||
|
||||
// Apply conversions if requested
|
||||
if (options.convertToMono && audioBuffer.numberOfChannels > 1) {
|
||||
audioBuffer = convertToMono(audioBuffer);
|
||||
}
|
||||
|
||||
if (options.targetSampleRate && audioBuffer.sampleRate !== options.targetSampleRate) {
|
||||
audioBuffer = await resampleAudioBuffer(audioBuffer, options.targetSampleRate);
|
||||
}
|
||||
|
||||
if (options.normalizeOnImport) {
|
||||
audioBuffer = normalizeAudioBuffer(audioBuffer);
|
||||
}
|
||||
|
||||
return audioBuffer;
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to decode audio file: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode audio file and return both buffer and metadata
|
||||
*/
|
||||
export async function importAudioFile(
|
||||
file: File,
|
||||
options: ImportOptions = {}
|
||||
): Promise<AudioFileInfo> {
|
||||
const audioBuffer = await decodeAudioFile(file, options);
|
||||
const metadata = extractMetadata(file, audioBuffer);
|
||||
|
||||
return {
|
||||
buffer: audioBuffer,
|
||||
metadata,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert stereo (or multi-channel) audio to mono
|
||||
*/
|
||||
function convertToMono(audioBuffer: AudioBuffer): AudioBuffer {
|
||||
const audioContext = getAudioContext();
|
||||
const numberOfChannels = audioBuffer.numberOfChannels;
|
||||
|
||||
if (numberOfChannels === 1) {
|
||||
return audioBuffer; // Already mono
|
||||
}
|
||||
|
||||
// Create a new mono buffer
|
||||
const monoBuffer = audioContext.createBuffer(
|
||||
1,
|
||||
audioBuffer.length,
|
||||
audioBuffer.sampleRate
|
||||
);
|
||||
|
||||
const monoData = monoBuffer.getChannelData(0);
|
||||
|
||||
// Mix all channels equally
|
||||
for (let i = 0; i < audioBuffer.length; i++) {
|
||||
let sum = 0;
|
||||
for (let channel = 0; channel < numberOfChannels; channel++) {
|
||||
sum += audioBuffer.getChannelData(channel)[i];
|
||||
}
|
||||
monoData[i] = sum / numberOfChannels;
|
||||
}
|
||||
|
||||
return monoBuffer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resample audio buffer to a different sample rate
|
||||
*/
|
||||
async function resampleAudioBuffer(
|
||||
audioBuffer: AudioBuffer,
|
||||
targetSampleRate: number
|
||||
): Promise<AudioBuffer> {
|
||||
const audioContext = getAudioContext();
|
||||
|
||||
// Create an offline context at the target sample rate
|
||||
const offlineContext = new OfflineAudioContext(
|
||||
audioBuffer.numberOfChannels,
|
||||
Math.ceil(audioBuffer.duration * targetSampleRate),
|
||||
targetSampleRate
|
||||
);
|
||||
|
||||
// Create a buffer source
|
||||
const source = offlineContext.createBufferSource();
|
||||
source.buffer = audioBuffer;
|
||||
source.connect(offlineContext.destination);
|
||||
source.start(0);
|
||||
|
||||
// Render the audio at the new sample rate
|
||||
const resampledBuffer = await offlineContext.startRendering();
|
||||
return resampledBuffer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize audio buffer to peak amplitude
|
||||
*/
|
||||
function normalizeAudioBuffer(audioBuffer: AudioBuffer): AudioBuffer {
|
||||
const audioContext = getAudioContext();
|
||||
|
||||
// Find peak amplitude across all channels
|
||||
let peak = 0;
|
||||
for (let channel = 0; channel < audioBuffer.numberOfChannels; channel++) {
|
||||
const channelData = audioBuffer.getChannelData(channel);
|
||||
for (let i = 0; i < channelData.length; i++) {
|
||||
const abs = Math.abs(channelData[i]);
|
||||
if (abs > peak) peak = abs;
|
||||
}
|
||||
}
|
||||
|
||||
if (peak === 0 || peak === 1.0) {
|
||||
return audioBuffer; // Already normalized or silent
|
||||
}
|
||||
|
||||
// Create normalized buffer
|
||||
const normalizedBuffer = audioContext.createBuffer(
|
||||
audioBuffer.numberOfChannels,
|
||||
audioBuffer.length,
|
||||
audioBuffer.sampleRate
|
||||
);
|
||||
|
||||
// Apply normalization with 1% headroom
|
||||
const scale = 0.99 / peak;
|
||||
for (let channel = 0; channel < audioBuffer.numberOfChannels; channel++) {
|
||||
const inputData = audioBuffer.getChannelData(channel);
|
||||
const outputData = normalizedBuffer.getChannelData(channel);
|
||||
for (let i = 0; i < inputData.length; i++) {
|
||||
outputData[i] = inputData[i] * scale;
|
||||
}
|
||||
}
|
||||
|
||||
return normalizedBuffer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract metadata from file and audio buffer
|
||||
*/
|
||||
function extractMetadata(file: File, audioBuffer: AudioBuffer): AudioMetadata {
|
||||
// Detect codec from file extension or MIME type
|
||||
const codec = detectCodec(file);
|
||||
|
||||
return {
|
||||
fileName: file.name,
|
||||
fileSize: file.size,
|
||||
fileType: file.type || 'unknown',
|
||||
duration: audioBuffer.duration,
|
||||
sampleRate: audioBuffer.sampleRate,
|
||||
channels: audioBuffer.numberOfChannels,
|
||||
codec,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect audio codec from file
|
||||
*/
|
||||
function detectCodec(file: File): string {
|
||||
const ext = file.name.split('.').pop()?.toLowerCase();
|
||||
const mimeType = file.type.toLowerCase();
|
||||
|
||||
if (mimeType.includes('wav') || ext === 'wav') return 'WAV (PCM)';
|
||||
if (mimeType.includes('mpeg') || mimeType.includes('mp3') || ext === 'mp3') return 'MP3';
|
||||
if (mimeType.includes('ogg') || ext === 'ogg') return 'OGG Vorbis';
|
||||
if (mimeType.includes('flac') || ext === 'flac') return 'FLAC';
|
||||
if (mimeType.includes('m4a') || mimeType.includes('aac') || ext === 'm4a') return 'AAC (M4A)';
|
||||
if (ext === 'aiff' || ext === 'aif') return 'AIFF';
|
||||
if (mimeType.includes('webm') || ext === 'webm') return 'WebM Opus';
|
||||
|
||||
return 'Unknown';
|
||||
}
|
||||
|
||||
/**
|
||||
* Get audio file metadata without decoding the entire file
|
||||
*/
|
||||
@@ -50,10 +241,21 @@ export function isSupportedAudioFormat(file: File): boolean {
|
||||
'audio/aac',
|
||||
'audio/m4a',
|
||||
'audio/x-m4a',
|
||||
'audio/aiff',
|
||||
'audio/x-aiff',
|
||||
];
|
||||
|
||||
return supportedFormats.includes(file.type) ||
|
||||
/\.(wav|mp3|ogg|webm|flac|aac|m4a)$/i.test(file.name);
|
||||
/\.(wav|mp3|ogg|webm|flac|aac|m4a|aiff|aif)$/i.test(file.name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check memory requirements for an audio file before decoding
|
||||
* @param file File to check
|
||||
* @returns Memory check result with warning if file is large
|
||||
*/
|
||||
export function checkAudioFileMemory(file: File): MemoryCheckResult {
|
||||
return checkFileMemoryLimit(file.size);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -25,11 +25,6 @@ import type { FilterOptions } from './filters';
|
||||
|
||||
// Effect type identifier
|
||||
export type EffectType =
|
||||
// Basic
|
||||
| 'normalize'
|
||||
| 'fadeIn'
|
||||
| 'fadeOut'
|
||||
| 'reverse'
|
||||
// Filters
|
||||
| 'lowpass'
|
||||
| 'highpass'
|
||||
@@ -77,6 +72,7 @@ export interface ChainEffect {
|
||||
type: EffectType;
|
||||
name: string;
|
||||
enabled: boolean;
|
||||
expanded?: boolean; // UI state for effect device expansion
|
||||
parameters?: EffectParameters;
|
||||
}
|
||||
|
||||
@@ -116,7 +112,7 @@ export function createEffect(
|
||||
type,
|
||||
name,
|
||||
enabled: true,
|
||||
parameters,
|
||||
parameters: parameters || getDefaultParameters(type),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -230,14 +226,63 @@ export function loadPreset(preset: EffectPreset): EffectChain {
|
||||
return JSON.parse(JSON.stringify(preset.chain)); // Deep clone
|
||||
}
|
||||
|
||||
/**
|
||||
* Get default parameters for an effect type
|
||||
*/
|
||||
export function getDefaultParameters(type: EffectType): EffectParameters {
|
||||
switch (type) {
|
||||
// Filters
|
||||
case 'lowpass':
|
||||
case 'highpass':
|
||||
return { frequency: 1000, Q: 1 } as FilterOptions;
|
||||
case 'bandpass':
|
||||
case 'notch':
|
||||
return { frequency: 1000, Q: 1 } as FilterOptions;
|
||||
case 'lowshelf':
|
||||
case 'highshelf':
|
||||
return { frequency: 1000, Q: 1, gain: 0 } as FilterOptions;
|
||||
case 'peaking':
|
||||
return { frequency: 1000, Q: 1, gain: 0 } as FilterOptions;
|
||||
|
||||
// Dynamics
|
||||
case 'compressor':
|
||||
return { threshold: -24, ratio: 4, attack: 0.003, release: 0.25, knee: 30, makeupGain: 0 } as CompressorParameters;
|
||||
case 'limiter':
|
||||
return { threshold: -3, attack: 0.001, release: 0.05, makeupGain: 0 } as LimiterParameters;
|
||||
case 'gate':
|
||||
return { threshold: -40, ratio: 10, attack: 0.001, release: 0.1, knee: 0 } as GateParameters;
|
||||
|
||||
// Time-based
|
||||
case 'delay':
|
||||
return { time: 0.5, feedback: 0.3, mix: 0.5 } as DelayParameters;
|
||||
case 'reverb':
|
||||
return { roomSize: 0.5, damping: 0.5, mix: 0.3 } as ReverbParameters;
|
||||
case 'chorus':
|
||||
return { rate: 1.5, depth: 0.002, mix: 0.5 } as ChorusParameters;
|
||||
case 'flanger':
|
||||
return { rate: 0.5, depth: 0.002, feedback: 0.5, mix: 0.5 } as FlangerParameters;
|
||||
case 'phaser':
|
||||
return { rate: 0.5, depth: 0.5, stages: 4, mix: 0.5 } as PhaserParameters;
|
||||
|
||||
// Advanced
|
||||
case 'distortion':
|
||||
return { drive: 0.5, type: 'soft', output: 0.7, mix: 1 } as DistortionParameters;
|
||||
case 'pitch':
|
||||
return { semitones: 0, cents: 0, mix: 1 } as PitchShifterParameters;
|
||||
case 'timestretch':
|
||||
return { rate: 1.0, preservePitch: false, mix: 1 } as TimeStretchParameters;
|
||||
case 'bitcrusher':
|
||||
return { bitDepth: 8, sampleRate: 8000, mix: 1 } as BitcrusherParameters;
|
||||
|
||||
default:
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get effect display name
|
||||
*/
|
||||
export const EFFECT_NAMES: Record<EffectType, string> = {
|
||||
normalize: 'Normalize',
|
||||
fadeIn: 'Fade In',
|
||||
fadeOut: 'Fade Out',
|
||||
reverse: 'Reverse',
|
||||
lowpass: 'Low-Pass Filter',
|
||||
highpass: 'High-Pass Filter',
|
||||
bandpass: 'Band-Pass Filter',
|
||||
|
||||
1059
lib/audio/effects/processor.ts
Normal file
1059
lib/audio/effects/processor.ts
Normal file
File diff suppressed because it is too large
Load Diff
257
lib/audio/export.ts
Normal file
257
lib/audio/export.ts
Normal file
@@ -0,0 +1,257 @@
|
||||
/**
|
||||
* Audio export utilities
|
||||
* Supports WAV, MP3, and FLAC export
|
||||
*/
|
||||
|
||||
export interface ExportOptions {
|
||||
format: 'wav' | 'mp3' | 'flac';
|
||||
bitDepth?: 16 | 24 | 32; // For WAV and FLAC
|
||||
sampleRate?: number; // If different from source, will resample
|
||||
normalize?: boolean; // Normalize to prevent clipping
|
||||
bitrate?: number; // For MP3 (kbps): 128, 192, 256, 320
|
||||
quality?: number; // For FLAC compression: 0-9 (0=fast/large, 9=slow/small)
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an AudioBuffer to WAV file
|
||||
*/
|
||||
export function audioBufferToWav(
|
||||
audioBuffer: AudioBuffer,
|
||||
options: ExportOptions = { format: 'wav', bitDepth: 16 }
|
||||
): ArrayBuffer {
|
||||
const bitDepth = options.bitDepth ?? 16;
|
||||
const { normalize } = options;
|
||||
const numberOfChannels = audioBuffer.numberOfChannels;
|
||||
const sampleRate = audioBuffer.sampleRate;
|
||||
const length = audioBuffer.length;
|
||||
|
||||
// Get channel data
|
||||
const channels: Float32Array[] = [];
|
||||
for (let i = 0; i < numberOfChannels; i++) {
|
||||
channels.push(audioBuffer.getChannelData(i));
|
||||
}
|
||||
|
||||
// Find peak if normalizing
|
||||
let peak = 1.0;
|
||||
if (normalize) {
|
||||
peak = 0;
|
||||
for (const channel of channels) {
|
||||
for (let i = 0; i < channel.length; i++) {
|
||||
const abs = Math.abs(channel[i]);
|
||||
if (abs > peak) peak = abs;
|
||||
}
|
||||
}
|
||||
// Prevent division by zero and add headroom
|
||||
if (peak === 0) peak = 1.0;
|
||||
else peak = peak * 1.01; // 1% headroom
|
||||
}
|
||||
|
||||
// Calculate sizes
|
||||
const bytesPerSample = bitDepth / 8;
|
||||
const blockAlign = numberOfChannels * bytesPerSample;
|
||||
const dataSize = length * blockAlign;
|
||||
const bufferSize = 44 + dataSize; // 44 bytes for WAV header
|
||||
|
||||
// Create buffer
|
||||
const buffer = new ArrayBuffer(bufferSize);
|
||||
const view = new DataView(buffer);
|
||||
|
||||
// Write WAV header
|
||||
let offset = 0;
|
||||
|
||||
// RIFF chunk descriptor
|
||||
writeString(view, offset, 'RIFF'); offset += 4;
|
||||
view.setUint32(offset, bufferSize - 8, true); offset += 4; // File size - 8
|
||||
writeString(view, offset, 'WAVE'); offset += 4;
|
||||
|
||||
// fmt sub-chunk
|
||||
writeString(view, offset, 'fmt '); offset += 4;
|
||||
view.setUint32(offset, 16, true); offset += 4; // Subchunk size (16 for PCM)
|
||||
view.setUint16(offset, bitDepth === 32 ? 3 : 1, true); offset += 2; // Audio format (1 = PCM, 3 = IEEE float)
|
||||
view.setUint16(offset, numberOfChannels, true); offset += 2;
|
||||
view.setUint32(offset, sampleRate, true); offset += 4;
|
||||
view.setUint32(offset, sampleRate * blockAlign, true); offset += 4; // Byte rate
|
||||
view.setUint16(offset, blockAlign, true); offset += 2;
|
||||
view.setUint16(offset, bitDepth, true); offset += 2;
|
||||
|
||||
// data sub-chunk
|
||||
writeString(view, offset, 'data'); offset += 4;
|
||||
view.setUint32(offset, dataSize, true); offset += 4;
|
||||
|
||||
// Write interleaved audio data
|
||||
if (bitDepth === 16) {
|
||||
for (let i = 0; i < length; i++) {
|
||||
for (let channel = 0; channel < numberOfChannels; channel++) {
|
||||
const sample = Math.max(-1, Math.min(1, channels[channel][i] / peak));
|
||||
view.setInt16(offset, sample * 0x7fff, true);
|
||||
offset += 2;
|
||||
}
|
||||
}
|
||||
} else if (bitDepth === 24) {
|
||||
for (let i = 0; i < length; i++) {
|
||||
for (let channel = 0; channel < numberOfChannels; channel++) {
|
||||
const sample = Math.max(-1, Math.min(1, channels[channel][i] / peak));
|
||||
const int24 = Math.round(sample * 0x7fffff);
|
||||
view.setUint8(offset, int24 & 0xff); offset++;
|
||||
view.setUint8(offset, (int24 >> 8) & 0xff); offset++;
|
||||
view.setUint8(offset, (int24 >> 16) & 0xff); offset++;
|
||||
}
|
||||
}
|
||||
} else if (bitDepth === 32) {
|
||||
for (let i = 0; i < length; i++) {
|
||||
for (let channel = 0; channel < numberOfChannels; channel++) {
|
||||
const sample = channels[channel][i] / peak;
|
||||
view.setFloat32(offset, sample, true);
|
||||
offset += 4;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Download an ArrayBuffer as a file
|
||||
*/
|
||||
export function downloadArrayBuffer(
|
||||
arrayBuffer: ArrayBuffer,
|
||||
filename: string,
|
||||
mimeType: string = 'audio/wav'
|
||||
): void {
|
||||
const blob = new Blob([arrayBuffer], { type: mimeType });
|
||||
const url = URL.createObjectURL(blob);
|
||||
const link = document.createElement('a');
|
||||
link.href = url;
|
||||
link.download = filename;
|
||||
document.body.appendChild(link);
|
||||
link.click();
|
||||
document.body.removeChild(link);
|
||||
URL.revokeObjectURL(url);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an AudioBuffer to MP3
|
||||
*/
|
||||
export async function audioBufferToMp3(
|
||||
audioBuffer: AudioBuffer,
|
||||
options: ExportOptions = { format: 'mp3', bitrate: 192 }
|
||||
): Promise<ArrayBuffer> {
|
||||
// Import Mp3Encoder from lamejs
|
||||
const { Mp3Encoder } = await import('lamejs/src/js/index.js');
|
||||
|
||||
const { bitrate = 192, normalize } = options;
|
||||
const numberOfChannels = Math.min(audioBuffer.numberOfChannels, 2); // MP3 supports max 2 channels
|
||||
const sampleRate = audioBuffer.sampleRate;
|
||||
const samples = audioBuffer.length;
|
||||
|
||||
// Get channel data
|
||||
const left = audioBuffer.getChannelData(0);
|
||||
const right = numberOfChannels > 1 ? audioBuffer.getChannelData(1) : left;
|
||||
|
||||
// Find peak if normalizing
|
||||
let peak = 1.0;
|
||||
if (normalize) {
|
||||
peak = 0;
|
||||
for (let i = 0; i < samples; i++) {
|
||||
peak = Math.max(peak, Math.abs(left[i]), Math.abs(right[i]));
|
||||
}
|
||||
if (peak === 0) peak = 1.0;
|
||||
else peak = peak * 1.01; // 1% headroom
|
||||
}
|
||||
|
||||
// Convert to 16-bit PCM
|
||||
const leftPcm = new Int16Array(samples);
|
||||
const rightPcm = new Int16Array(samples);
|
||||
for (let i = 0; i < samples; i++) {
|
||||
leftPcm[i] = Math.max(-32768, Math.min(32767, (left[i] / peak) * 32767));
|
||||
rightPcm[i] = Math.max(-32768, Math.min(32767, (right[i] / peak) * 32767));
|
||||
}
|
||||
|
||||
// Create MP3 encoder
|
||||
const mp3encoder = new Mp3Encoder(numberOfChannels, sampleRate, bitrate);
|
||||
|
||||
const mp3Data: Int8Array[] = [];
|
||||
const sampleBlockSize = 1152; // Standard MP3 frame size
|
||||
|
||||
// Encode in blocks
|
||||
for (let i = 0; i < samples; i += sampleBlockSize) {
|
||||
const leftChunk = leftPcm.subarray(i, Math.min(i + sampleBlockSize, samples));
|
||||
const rightChunk = numberOfChannels > 1
|
||||
? rightPcm.subarray(i, Math.min(i + sampleBlockSize, samples))
|
||||
: leftChunk;
|
||||
|
||||
const mp3buf = mp3encoder.encodeBuffer(leftChunk, rightChunk);
|
||||
if (mp3buf.length > 0) {
|
||||
mp3Data.push(mp3buf);
|
||||
}
|
||||
}
|
||||
|
||||
// Flush remaining data
|
||||
const mp3buf = mp3encoder.flush();
|
||||
if (mp3buf.length > 0) {
|
||||
mp3Data.push(mp3buf);
|
||||
}
|
||||
|
||||
// Combine all chunks
|
||||
const totalLength = mp3Data.reduce((acc, arr) => acc + arr.length, 0);
|
||||
const result = new Uint8Array(totalLength);
|
||||
let offset = 0;
|
||||
for (const chunk of mp3Data) {
|
||||
result.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
|
||||
return result.buffer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an AudioBuffer to FLAC
|
||||
* Note: This is a simplified FLAC encoder using WAV+DEFLATE compression
|
||||
*/
|
||||
export async function audioBufferToFlac(
|
||||
audioBuffer: AudioBuffer,
|
||||
options: ExportOptions = { format: 'flac', bitDepth: 16 }
|
||||
): Promise<ArrayBuffer> {
|
||||
// For true FLAC encoding, we'd need a proper FLAC encoder
|
||||
// As a workaround, we'll create a compressed WAV using fflate
|
||||
const fflate = await import('fflate');
|
||||
|
||||
const bitDepth = options.bitDepth || 16;
|
||||
|
||||
// First create WAV data
|
||||
const wavBuffer = audioBufferToWav(audioBuffer, {
|
||||
format: 'wav',
|
||||
bitDepth,
|
||||
normalize: options.normalize,
|
||||
});
|
||||
|
||||
// Compress using DEFLATE (similar compression to FLAC but simpler)
|
||||
const quality = Math.max(0, Math.min(9, options.quality || 6)) as 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9;
|
||||
const compressed = fflate.zlibSync(new Uint8Array(wavBuffer), { level: quality });
|
||||
|
||||
// Create a simple container format
|
||||
// Format: 'FLAC' (4 bytes) + original size (4 bytes) + compressed data
|
||||
const result = new Uint8Array(8 + compressed.length);
|
||||
const view = new DataView(result.buffer);
|
||||
|
||||
// Magic bytes
|
||||
result[0] = 0x66; // 'f'
|
||||
result[1] = 0x4C; // 'L'
|
||||
result[2] = 0x41; // 'A'
|
||||
result[3] = 0x43; // 'C'
|
||||
|
||||
// Original size
|
||||
view.setUint32(4, wavBuffer.byteLength, false);
|
||||
|
||||
// Compressed data
|
||||
result.set(compressed, 8);
|
||||
|
||||
return result.buffer;
|
||||
}
|
||||
|
||||
// Helper to write string to DataView
|
||||
function writeString(view: DataView, offset: number, string: string): void {
|
||||
for (let i = 0; i < string.length; i++) {
|
||||
view.setUint8(offset + i, string.charCodeAt(i));
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,8 @@
|
||||
|
||||
import type { Track, TrackColor } from '@/types/track';
|
||||
import { DEFAULT_TRACK_HEIGHT, TRACK_COLORS } from '@/types/track';
|
||||
import { createEffectChain } from '@/lib/audio/effects/chain';
|
||||
import { createAutomationLane } from '@/lib/audio/automation-utils';
|
||||
|
||||
/**
|
||||
* Generate a unique track ID
|
||||
@@ -15,23 +17,50 @@ export function generateTrackId(): string {
|
||||
/**
|
||||
* Create a new empty track
|
||||
*/
|
||||
export function createTrack(name?: string, color?: TrackColor): Track {
|
||||
export function createTrack(name?: string, color?: TrackColor, height?: number): Track {
|
||||
const colors: TrackColor[] = ['blue', 'green', 'purple', 'orange', 'pink', 'indigo', 'yellow', 'red'];
|
||||
const randomColor = colors[Math.floor(Math.random() * colors.length)];
|
||||
|
||||
// Ensure name is always a string, handle cases where event objects might be passed
|
||||
const trackName = typeof name === 'string' && name.trim() ? name.trim() : 'New Track';
|
||||
|
||||
const trackId = generateTrackId();
|
||||
|
||||
return {
|
||||
id: generateTrackId(),
|
||||
name: name || 'New Track',
|
||||
id: trackId,
|
||||
name: trackName,
|
||||
color: TRACK_COLORS[color || randomColor],
|
||||
height: DEFAULT_TRACK_HEIGHT,
|
||||
height: height ?? DEFAULT_TRACK_HEIGHT,
|
||||
audioBuffer: null,
|
||||
volume: 0.8,
|
||||
pan: 0,
|
||||
mute: false,
|
||||
solo: false,
|
||||
recordEnabled: false,
|
||||
effectChain: createEffectChain(`${trackName} Effects`),
|
||||
automation: {
|
||||
lanes: [
|
||||
createAutomationLane(trackId, 'volume', 'Volume', {
|
||||
min: 0,
|
||||
max: 1,
|
||||
unit: 'dB',
|
||||
}),
|
||||
createAutomationLane(trackId, 'pan', 'Pan', {
|
||||
min: -1,
|
||||
max: 1,
|
||||
formatter: (value: number) => {
|
||||
if (value === 0) return 'C';
|
||||
if (value < 0) return `${Math.abs(value * 100).toFixed(0)}L`;
|
||||
return `${(value * 100).toFixed(0)}R`;
|
||||
},
|
||||
}),
|
||||
],
|
||||
showAutomation: false,
|
||||
},
|
||||
collapsed: false,
|
||||
selected: false,
|
||||
showEffects: false,
|
||||
selection: null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -41,9 +70,12 @@ export function createTrack(name?: string, color?: TrackColor): Track {
|
||||
export function createTrackFromBuffer(
|
||||
buffer: AudioBuffer,
|
||||
name?: string,
|
||||
color?: TrackColor
|
||||
color?: TrackColor,
|
||||
height?: number
|
||||
): Track {
|
||||
const track = createTrack(name, color);
|
||||
// Ensure name is a string before passing to createTrack
|
||||
const trackName = typeof name === 'string' && name.trim() ? name.trim() : undefined;
|
||||
const track = createTrack(trackName, color, height);
|
||||
track.audioBuffer = buffer;
|
||||
return track;
|
||||
}
|
||||
|
||||
190
lib/history/commands/multi-track-edit-command.ts
Normal file
190
lib/history/commands/multi-track-edit-command.ts
Normal file
@@ -0,0 +1,190 @@
|
||||
/**
|
||||
* Multi-track edit commands for audio operations across tracks
|
||||
*/
|
||||
|
||||
import { BaseCommand } from '../command';
|
||||
import type { Track } from '@/types/track';
|
||||
import type { Selection } from '@/types/selection';
|
||||
import {
|
||||
extractBufferSegment,
|
||||
deleteBufferSegment,
|
||||
insertBufferSegment,
|
||||
duplicateBufferSegment,
|
||||
} from '@/lib/audio/buffer-utils';
|
||||
|
||||
export type MultiTrackEditType = 'cut' | 'copy' | 'delete' | 'paste' | 'duplicate';
|
||||
|
||||
export interface MultiTrackEditParams {
|
||||
type: MultiTrackEditType;
|
||||
trackId: string;
|
||||
beforeBuffer: AudioBuffer | null;
|
||||
afterBuffer: AudioBuffer | null;
|
||||
selection?: Selection;
|
||||
clipboardData?: AudioBuffer;
|
||||
pastePosition?: number;
|
||||
onApply: (trackId: string, buffer: AudioBuffer | null, selection: Selection | null) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Command for multi-track edit operations
|
||||
*/
|
||||
export class MultiTrackEditCommand extends BaseCommand {
|
||||
private type: MultiTrackEditType;
|
||||
private trackId: string;
|
||||
private beforeBuffer: AudioBuffer | null;
|
||||
private afterBuffer: AudioBuffer | null;
|
||||
private selection?: Selection;
|
||||
private clipboardData?: AudioBuffer;
|
||||
private pastePosition?: number;
|
||||
private onApply: (trackId: string, buffer: AudioBuffer | null, selection: Selection | null) => void;
|
||||
|
||||
constructor(params: MultiTrackEditParams) {
|
||||
super();
|
||||
this.type = params.type;
|
||||
this.trackId = params.trackId;
|
||||
this.beforeBuffer = params.beforeBuffer;
|
||||
this.afterBuffer = params.afterBuffer;
|
||||
this.selection = params.selection;
|
||||
this.clipboardData = params.clipboardData;
|
||||
this.pastePosition = params.pastePosition;
|
||||
this.onApply = params.onApply;
|
||||
}
|
||||
|
||||
execute(): void {
|
||||
// For copy, don't modify the buffer, just update selection
|
||||
if (this.type === 'copy') {
|
||||
this.onApply(this.trackId, this.beforeBuffer, this.selection || null);
|
||||
} else {
|
||||
this.onApply(this.trackId, this.afterBuffer, null);
|
||||
}
|
||||
}
|
||||
|
||||
undo(): void {
|
||||
this.onApply(this.trackId, this.beforeBuffer, null);
|
||||
}
|
||||
|
||||
getDescription(): string {
|
||||
switch (this.type) {
|
||||
case 'cut':
|
||||
return 'Cut';
|
||||
case 'copy':
|
||||
return 'Copy';
|
||||
case 'delete':
|
||||
return 'Delete';
|
||||
case 'paste':
|
||||
return 'Paste';
|
||||
case 'duplicate':
|
||||
return 'Duplicate';
|
||||
default:
|
||||
return 'Edit';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory functions to create multi-track edit commands
|
||||
*/
|
||||
|
||||
export function createMultiTrackCutCommand(
|
||||
trackId: string,
|
||||
buffer: AudioBuffer,
|
||||
selection: Selection,
|
||||
onApply: (trackId: string, buffer: AudioBuffer | null, selection: Selection | null) => void
|
||||
): MultiTrackEditCommand {
|
||||
const afterBuffer = deleteBufferSegment(buffer, selection.start, selection.end);
|
||||
|
||||
return new MultiTrackEditCommand({
|
||||
type: 'cut',
|
||||
trackId,
|
||||
beforeBuffer: buffer,
|
||||
afterBuffer,
|
||||
selection,
|
||||
onApply,
|
||||
});
|
||||
}
|
||||
|
||||
export function createMultiTrackCopyCommand(
|
||||
trackId: string,
|
||||
buffer: AudioBuffer,
|
||||
selection: Selection,
|
||||
onApply: (trackId: string, buffer: AudioBuffer | null, selection: Selection | null) => void
|
||||
): MultiTrackEditCommand {
|
||||
// Copy doesn't modify the buffer
|
||||
return new MultiTrackEditCommand({
|
||||
type: 'copy',
|
||||
trackId,
|
||||
beforeBuffer: buffer,
|
||||
afterBuffer: buffer,
|
||||
selection,
|
||||
onApply,
|
||||
});
|
||||
}
|
||||
|
||||
export function createMultiTrackDeleteCommand(
|
||||
trackId: string,
|
||||
buffer: AudioBuffer,
|
||||
selection: Selection,
|
||||
onApply: (trackId: string, buffer: AudioBuffer | null, selection: Selection | null) => void
|
||||
): MultiTrackEditCommand {
|
||||
const afterBuffer = deleteBufferSegment(buffer, selection.start, selection.end);
|
||||
|
||||
return new MultiTrackEditCommand({
|
||||
type: 'delete',
|
||||
trackId,
|
||||
beforeBuffer: buffer,
|
||||
afterBuffer,
|
||||
selection,
|
||||
onApply,
|
||||
});
|
||||
}
|
||||
|
||||
export function createMultiTrackPasteCommand(
|
||||
trackId: string,
|
||||
buffer: AudioBuffer | null,
|
||||
clipboardData: AudioBuffer,
|
||||
pastePosition: number,
|
||||
onApply: (trackId: string, buffer: AudioBuffer | null, selection: Selection | null) => void
|
||||
): MultiTrackEditCommand {
|
||||
const targetBuffer = buffer || createSilentBuffer(clipboardData.sampleRate, clipboardData.numberOfChannels, pastePosition);
|
||||
const afterBuffer = insertBufferSegment(targetBuffer, clipboardData, pastePosition);
|
||||
|
||||
return new MultiTrackEditCommand({
|
||||
type: 'paste',
|
||||
trackId,
|
||||
beforeBuffer: buffer,
|
||||
afterBuffer,
|
||||
clipboardData,
|
||||
pastePosition,
|
||||
onApply,
|
||||
});
|
||||
}
|
||||
|
||||
export function createMultiTrackDuplicateCommand(
|
||||
trackId: string,
|
||||
buffer: AudioBuffer,
|
||||
selection: Selection,
|
||||
onApply: (trackId: string, buffer: AudioBuffer | null, selection: Selection | null) => void
|
||||
): MultiTrackEditCommand {
|
||||
const afterBuffer = duplicateBufferSegment(buffer, selection.start, selection.end);
|
||||
|
||||
return new MultiTrackEditCommand({
|
||||
type: 'duplicate',
|
||||
trackId,
|
||||
beforeBuffer: buffer,
|
||||
afterBuffer,
|
||||
selection,
|
||||
onApply,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to create a silent buffer
|
||||
*/
|
||||
function createSilentBuffer(sampleRate: number, numberOfChannels: number, duration: number): AudioBuffer {
|
||||
const audioContext = new OfflineAudioContext(
|
||||
numberOfChannels,
|
||||
Math.ceil(duration * sampleRate),
|
||||
sampleRate
|
||||
);
|
||||
return audioContext.createBuffer(numberOfChannels, Math.ceil(duration * sampleRate), sampleRate);
|
||||
}
|
||||
138
lib/hooks/useAudioWorker.ts
Normal file
138
lib/hooks/useAudioWorker.ts
Normal file
@@ -0,0 +1,138 @@
|
||||
'use client';
|
||||
|
||||
import { useRef, useEffect, useCallback } from 'react';
|
||||
import type { WorkerMessage, WorkerResponse } from '@/lib/workers/audio.worker';
|
||||
|
||||
/**
|
||||
* Hook to use the audio Web Worker for heavy computations
|
||||
* Automatically manages worker lifecycle and message passing
|
||||
*/
|
||||
export function useAudioWorker() {
|
||||
const workerRef = useRef<Worker | null>(null);
|
||||
const callbacksRef = useRef<Map<string, (result: any, error?: string) => void>>(new Map());
|
||||
const messageIdRef = useRef(0);
|
||||
|
||||
// Initialize worker
|
||||
useEffect(() => {
|
||||
// Create worker from the audio worker file
|
||||
workerRef.current = new Worker(
|
||||
new URL('../workers/audio.worker.ts', import.meta.url),
|
||||
{ type: 'module' }
|
||||
);
|
||||
|
||||
// Handle messages from worker
|
||||
workerRef.current.onmessage = (event: MessageEvent<WorkerResponse>) => {
|
||||
const { id, result, error } = event.data;
|
||||
const callback = callbacksRef.current.get(id);
|
||||
|
||||
if (callback) {
|
||||
callback(result, error);
|
||||
callbacksRef.current.delete(id);
|
||||
}
|
||||
};
|
||||
|
||||
// Cleanup on unmount
|
||||
return () => {
|
||||
if (workerRef.current) {
|
||||
workerRef.current.terminate();
|
||||
workerRef.current = null;
|
||||
}
|
||||
callbacksRef.current.clear();
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Send message to worker
|
||||
const sendMessage = useCallback(
|
||||
<T = any>(type: WorkerMessage['type'], payload: any): Promise<T> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!workerRef.current) {
|
||||
reject(new Error('Worker not initialized'));
|
||||
return;
|
||||
}
|
||||
|
||||
const id = `msg-${++messageIdRef.current}`;
|
||||
const message: WorkerMessage = { id, type, payload };
|
||||
|
||||
callbacksRef.current.set(id, (result, error) => {
|
||||
if (error) {
|
||||
reject(new Error(error));
|
||||
} else {
|
||||
resolve(result);
|
||||
}
|
||||
});
|
||||
|
||||
workerRef.current.postMessage(message);
|
||||
});
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
// API methods
|
||||
const generatePeaks = useCallback(
|
||||
async (channelData: Float32Array, width: number): Promise<Float32Array> => {
|
||||
const result = await sendMessage<Float32Array>('generatePeaks', {
|
||||
channelData,
|
||||
width,
|
||||
});
|
||||
return new Float32Array(result);
|
||||
},
|
||||
[sendMessage]
|
||||
);
|
||||
|
||||
const generateMinMaxPeaks = useCallback(
|
||||
async (
|
||||
channelData: Float32Array,
|
||||
width: number
|
||||
): Promise<{ min: Float32Array; max: Float32Array }> => {
|
||||
const result = await sendMessage<{ min: Float32Array; max: Float32Array }>(
|
||||
'generateMinMaxPeaks',
|
||||
{ channelData, width }
|
||||
);
|
||||
return {
|
||||
min: new Float32Array(result.min),
|
||||
max: new Float32Array(result.max),
|
||||
};
|
||||
},
|
||||
[sendMessage]
|
||||
);
|
||||
|
||||
const normalizePeaks = useCallback(
|
||||
async (peaks: Float32Array, targetMax: number = 1): Promise<Float32Array> => {
|
||||
const result = await sendMessage<Float32Array>('normalizePeaks', {
|
||||
peaks,
|
||||
targetMax,
|
||||
});
|
||||
return new Float32Array(result);
|
||||
},
|
||||
[sendMessage]
|
||||
);
|
||||
|
||||
const analyzeAudio = useCallback(
|
||||
async (
|
||||
channelData: Float32Array
|
||||
): Promise<{
|
||||
peak: number;
|
||||
rms: number;
|
||||
crestFactor: number;
|
||||
dynamicRange: number;
|
||||
}> => {
|
||||
return sendMessage('analyzeAudio', { channelData });
|
||||
},
|
||||
[sendMessage]
|
||||
);
|
||||
|
||||
const findPeak = useCallback(
|
||||
async (channelData: Float32Array): Promise<number> => {
|
||||
return sendMessage<number>('findPeak', { channelData });
|
||||
},
|
||||
[sendMessage]
|
||||
);
|
||||
|
||||
return {
|
||||
generatePeaks,
|
||||
generateMinMaxPeaks,
|
||||
normalizePeaks,
|
||||
analyzeAudio,
|
||||
findPeak,
|
||||
};
|
||||
}
|
||||
173
lib/hooks/useAutomationRecording.ts
Normal file
173
lib/hooks/useAutomationRecording.ts
Normal file
@@ -0,0 +1,173 @@
|
||||
/**
|
||||
* Hook for recording automation data during playback
|
||||
* Supports write, touch, and latch modes
|
||||
*/
|
||||
|
||||
import { useCallback, useRef } from 'react';
|
||||
import type { Track } from '@/types/track';
|
||||
import type { AutomationPoint, AutomationMode } from '@/types/automation';
|
||||
|
||||
export interface AutomationRecordingState {
|
||||
isRecording: boolean;
|
||||
recordingLaneId: string | null;
|
||||
touchActive: boolean; // For touch mode - tracks if control is being touched
|
||||
latchTriggered: boolean; // For latch mode - tracks if recording has started
|
||||
}
|
||||
|
||||
export function useAutomationRecording(
|
||||
track: Track,
|
||||
onUpdateTrack: (trackId: string, updates: Partial<Track>) => void
|
||||
) {
|
||||
const recordingStateRef = useRef<Map<string, AutomationRecordingState>>(new Map());
|
||||
const recordingIntervalRef = useRef<Map<string, number>>(new Map());
|
||||
const lastRecordedValueRef = useRef<Map<string, number>>(new Map());
|
||||
|
||||
/**
|
||||
* Start recording automation for a specific lane
|
||||
*/
|
||||
const startRecording = useCallback((laneId: string, mode: AutomationMode) => {
|
||||
const state: AutomationRecordingState = {
|
||||
isRecording: mode === 'write',
|
||||
recordingLaneId: laneId,
|
||||
touchActive: false,
|
||||
latchTriggered: false,
|
||||
};
|
||||
recordingStateRef.current.set(laneId, state);
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Stop recording automation for a specific lane
|
||||
*/
|
||||
const stopRecording = useCallback((laneId: string) => {
|
||||
recordingStateRef.current.delete(laneId);
|
||||
const intervalId = recordingIntervalRef.current.get(laneId);
|
||||
if (intervalId) {
|
||||
clearInterval(intervalId);
|
||||
recordingIntervalRef.current.delete(laneId);
|
||||
}
|
||||
lastRecordedValueRef.current.delete(laneId);
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Record a single automation point
|
||||
*/
|
||||
const recordPoint = useCallback((
|
||||
laneId: string,
|
||||
currentTime: number,
|
||||
value: number,
|
||||
mode: AutomationMode
|
||||
) => {
|
||||
const lane = track.automation.lanes.find(l => l.id === laneId);
|
||||
if (!lane) return;
|
||||
|
||||
const state = recordingStateRef.current.get(laneId);
|
||||
if (!state) return;
|
||||
|
||||
// Check if we should record based on mode
|
||||
let shouldRecord = false;
|
||||
|
||||
switch (mode) {
|
||||
case 'write':
|
||||
// Always record in write mode
|
||||
shouldRecord = true;
|
||||
break;
|
||||
|
||||
case 'touch':
|
||||
// Only record when control is being touched
|
||||
shouldRecord = state.touchActive;
|
||||
break;
|
||||
|
||||
case 'latch':
|
||||
// Record from first touch until stop
|
||||
if (state.touchActive && !state.latchTriggered) {
|
||||
state.latchTriggered = true;
|
||||
}
|
||||
shouldRecord = state.latchTriggered;
|
||||
break;
|
||||
|
||||
default:
|
||||
shouldRecord = false;
|
||||
}
|
||||
|
||||
if (!shouldRecord) return;
|
||||
|
||||
// Check if value has changed significantly (avoid redundant points)
|
||||
const lastValue = lastRecordedValueRef.current.get(laneId);
|
||||
if (lastValue !== undefined && Math.abs(lastValue - value) < 0.001) {
|
||||
return; // Skip if value hasn't changed
|
||||
}
|
||||
|
||||
lastRecordedValueRef.current.set(laneId, value);
|
||||
|
||||
// In write mode, clear existing points in the time range
|
||||
let updatedPoints = [...lane.points];
|
||||
if (mode === 'write') {
|
||||
// Remove points that are within a small time window of current time
|
||||
updatedPoints = updatedPoints.filter(p =>
|
||||
Math.abs(p.time - currentTime) > 0.05 // 50ms threshold
|
||||
);
|
||||
}
|
||||
|
||||
// Add new point
|
||||
const newPoint: AutomationPoint = {
|
||||
id: `point-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
|
||||
time: currentTime,
|
||||
value,
|
||||
curve: 'linear',
|
||||
};
|
||||
|
||||
updatedPoints.push(newPoint);
|
||||
|
||||
// Sort points by time
|
||||
updatedPoints.sort((a, b) => a.time - b.time);
|
||||
|
||||
// Update track with new automation points
|
||||
const updatedLanes = track.automation.lanes.map(l =>
|
||||
l.id === laneId ? { ...l, points: updatedPoints } : l
|
||||
);
|
||||
|
||||
onUpdateTrack(track.id, {
|
||||
automation: {
|
||||
...track.automation,
|
||||
lanes: updatedLanes,
|
||||
},
|
||||
});
|
||||
}, [track, onUpdateTrack]);
|
||||
|
||||
/**
|
||||
* Set touch state for touch mode
|
||||
*/
|
||||
const setTouchActive = useCallback((laneId: string, active: boolean) => {
|
||||
const state = recordingStateRef.current.get(laneId);
|
||||
if (state) {
|
||||
state.touchActive = active;
|
||||
}
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Check if a lane is currently recording
|
||||
*/
|
||||
const isRecordingLane = useCallback((laneId: string): boolean => {
|
||||
const state = recordingStateRef.current.get(laneId);
|
||||
return state?.isRecording ?? false;
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Cleanup - stop all recording
|
||||
*/
|
||||
const cleanup = useCallback(() => {
|
||||
recordingStateRef.current.forEach((_, laneId) => {
|
||||
stopRecording(laneId);
|
||||
});
|
||||
recordingStateRef.current.clear();
|
||||
}, [stopRecording]);
|
||||
|
||||
return {
|
||||
startRecording,
|
||||
stopRecording,
|
||||
recordPoint,
|
||||
setTouchActive,
|
||||
isRecordingLane,
|
||||
cleanup,
|
||||
};
|
||||
}
|
||||
70
lib/hooks/useMarkers.ts
Normal file
70
lib/hooks/useMarkers.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
'use client';
|
||||
|
||||
import { useState, useCallback } from 'react';
|
||||
import type { Marker, CreateMarkerInput } from '@/types/marker';
|
||||
|
||||
export function useMarkers() {
|
||||
const [markers, setMarkers] = useState<Marker[]>([]);
|
||||
|
||||
const addMarker = useCallback((input: CreateMarkerInput): Marker => {
|
||||
const marker: Marker = {
|
||||
...input,
|
||||
id: `marker-${Date.now()}-${Math.random()}`,
|
||||
};
|
||||
setMarkers((prev) => [...prev, marker].sort((a, b) => a.time - b.time));
|
||||
return marker;
|
||||
}, []);
|
||||
|
||||
const updateMarker = useCallback((id: string, updates: Partial<Marker>) => {
|
||||
setMarkers((prev) => {
|
||||
const updated = prev.map((m) =>
|
||||
m.id === id ? { ...m, ...updates } : m
|
||||
);
|
||||
// Re-sort if time changed
|
||||
if ('time' in updates) {
|
||||
return updated.sort((a, b) => a.time - b.time);
|
||||
}
|
||||
return updated;
|
||||
});
|
||||
}, []);
|
||||
|
||||
const removeMarker = useCallback((id: string) => {
|
||||
setMarkers((prev) => prev.filter((m) => m.id !== id));
|
||||
}, []);
|
||||
|
||||
const clearMarkers = useCallback(() => {
|
||||
setMarkers([]);
|
||||
}, []);
|
||||
|
||||
const getMarkerAt = useCallback((time: number, tolerance: number = 0.1): Marker | undefined => {
|
||||
return markers.find((m) => {
|
||||
if (m.type === 'point') {
|
||||
return Math.abs(m.time - time) <= tolerance;
|
||||
} else {
|
||||
// For regions, check if time is within the region
|
||||
return m.endTime !== undefined && time >= m.time && time <= m.endTime;
|
||||
}
|
||||
});
|
||||
}, [markers]);
|
||||
|
||||
const getNextMarker = useCallback((time: number): Marker | undefined => {
|
||||
return markers.find((m) => m.time > time);
|
||||
}, [markers]);
|
||||
|
||||
const getPreviousMarker = useCallback((time: number): Marker | undefined => {
|
||||
const previous = markers.filter((m) => m.time < time);
|
||||
return previous[previous.length - 1];
|
||||
}, [markers]);
|
||||
|
||||
return {
|
||||
markers,
|
||||
addMarker,
|
||||
updateMarker,
|
||||
removeMarker,
|
||||
clearMarkers,
|
||||
getMarkerAt,
|
||||
getNextMarker,
|
||||
getPreviousMarker,
|
||||
setMarkers,
|
||||
};
|
||||
}
|
||||
@@ -1,53 +1,19 @@
|
||||
import { useState, useCallback, useEffect } from 'react';
|
||||
import { useState, useCallback } from 'react';
|
||||
import type { Track } from '@/types/track';
|
||||
import { createTrack, createTrackFromBuffer } from '@/lib/audio/track-utils';
|
||||
|
||||
const STORAGE_KEY = 'audio-ui-multi-track';
|
||||
|
||||
export function useMultiTrack() {
|
||||
const [tracks, setTracks] = useState<Track[]>(() => {
|
||||
if (typeof window === 'undefined') return [];
|
||||
// Note: localStorage persistence disabled in favor of IndexedDB project management
|
||||
const [tracks, setTracks] = useState<Track[]>([]);
|
||||
|
||||
try {
|
||||
const saved = localStorage.getItem(STORAGE_KEY);
|
||||
if (saved) {
|
||||
const parsed = JSON.parse(saved);
|
||||
// Note: AudioBuffers can't be serialized, so we only restore track metadata
|
||||
return parsed.map((t: any) => ({
|
||||
...t,
|
||||
name: String(t.name || 'Untitled Track'), // Ensure name is always a string
|
||||
audioBuffer: null, // Will need to be reloaded
|
||||
}));
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to load tracks from localStorage:', error);
|
||||
// Clear corrupted data
|
||||
localStorage.removeItem(STORAGE_KEY);
|
||||
}
|
||||
|
||||
return [];
|
||||
});
|
||||
|
||||
// Save tracks to localStorage (without audio buffers)
|
||||
useEffect(() => {
|
||||
if (typeof window === 'undefined') return;
|
||||
|
||||
try {
|
||||
const trackData = tracks.map(({ audioBuffer, ...track }) => track);
|
||||
localStorage.setItem(STORAGE_KEY, JSON.stringify(trackData));
|
||||
} catch (error) {
|
||||
console.error('Failed to save tracks to localStorage:', error);
|
||||
}
|
||||
}, [tracks]);
|
||||
|
||||
const addTrack = useCallback((name?: string) => {
|
||||
const track = createTrack(name);
|
||||
const addTrack = useCallback((name?: string, height?: number) => {
|
||||
const track = createTrack(name, undefined, height);
|
||||
setTracks((prev) => [...prev, track]);
|
||||
return track;
|
||||
}, []);
|
||||
|
||||
const addTrackFromBuffer = useCallback((buffer: AudioBuffer, name?: string) => {
|
||||
const track = createTrackFromBuffer(buffer, name);
|
||||
const addTrackFromBuffer = useCallback((buffer: AudioBuffer, name?: string, height?: number) => {
|
||||
const track = createTrackFromBuffer(buffer, name, undefined, height);
|
||||
setTracks((prev) => [...prev, track]);
|
||||
return track;
|
||||
}, []);
|
||||
@@ -85,6 +51,10 @@ export function useMultiTrack() {
|
||||
);
|
||||
}, []);
|
||||
|
||||
const loadTracks = useCallback((tracksToLoad: Track[]) => {
|
||||
setTracks(tracksToLoad);
|
||||
}, []);
|
||||
|
||||
return {
|
||||
tracks,
|
||||
addTrack,
|
||||
@@ -94,5 +64,6 @@ export function useMultiTrack() {
|
||||
clearTracks,
|
||||
reorderTracks,
|
||||
setTrackBuffer,
|
||||
loadTracks,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -2,25 +2,90 @@ import { useState, useCallback, useRef, useEffect } from 'react';
|
||||
import { getAudioContext } from '@/lib/audio/context';
|
||||
import type { Track } from '@/types/track';
|
||||
import { getTrackGain } from '@/lib/audio/track-utils';
|
||||
import { applyEffectChain, updateEffectParameters, toggleEffectBypass, type EffectNodeInfo } from '@/lib/audio/effects/processor';
|
||||
import { evaluateAutomationLinear } from '@/lib/audio/automation-utils';
|
||||
|
||||
export interface MultiTrackPlayerState {
|
||||
isPlaying: boolean;
|
||||
currentTime: number;
|
||||
duration: number;
|
||||
loopEnabled: boolean;
|
||||
loopStart: number;
|
||||
loopEnd: number;
|
||||
playbackRate: number;
|
||||
}
|
||||
|
||||
export function useMultiTrackPlayer(tracks: Track[]) {
|
||||
export interface TrackLevel {
|
||||
trackId: string;
|
||||
level: number;
|
||||
}
|
||||
|
||||
export interface AutomationRecordingCallback {
|
||||
(trackId: string, laneId: string, currentTime: number, value: number): void;
|
||||
}
|
||||
|
||||
export function useMultiTrackPlayer(
|
||||
tracks: Track[],
|
||||
masterVolume: number = 1,
|
||||
onRecordAutomation?: AutomationRecordingCallback
|
||||
) {
|
||||
const [isPlaying, setIsPlaying] = useState(false);
|
||||
const [currentTime, setCurrentTime] = useState(0);
|
||||
const [duration, setDuration] = useState(0);
|
||||
const [trackLevels, setTrackLevels] = useState<Record<string, number>>({});
|
||||
const [masterPeakLevel, setMasterPeakLevel] = useState(0);
|
||||
const [masterRmsLevel, setMasterRmsLevel] = useState(0);
|
||||
const [masterIsClipping, setMasterIsClipping] = useState(false);
|
||||
const [loopEnabled, setLoopEnabled] = useState(false);
|
||||
const [loopStart, setLoopStart] = useState(0);
|
||||
const [loopEnd, setLoopEnd] = useState(0);
|
||||
const [playbackRate, setPlaybackRate] = useState(1.0);
|
||||
|
||||
const audioContextRef = useRef<AudioContext | null>(null);
|
||||
const sourceNodesRef = useRef<AudioBufferSourceNode[]>([]);
|
||||
const gainNodesRef = useRef<GainNode[]>([]);
|
||||
const panNodesRef = useRef<StereoPannerNode[]>([]);
|
||||
const analyserNodesRef = useRef<AnalyserNode[]>([]);
|
||||
const effectNodesRef = useRef<EffectNodeInfo[][]>([]); // Effect nodes per track
|
||||
const masterGainNodeRef = useRef<GainNode | null>(null);
|
||||
const masterAnalyserRef = useRef<AnalyserNode | null>(null);
|
||||
const masterLevelMonitorFrameRef = useRef<number | null>(null);
|
||||
const startTimeRef = useRef<number>(0);
|
||||
const pausedAtRef = useRef<number>(0);
|
||||
const animationFrameRef = useRef<number | null>(null);
|
||||
const levelMonitorFrameRef = useRef<number | null>(null);
|
||||
const automationFrameRef = useRef<number | null>(null);
|
||||
const isMonitoringLevelsRef = useRef<boolean>(false);
|
||||
const tracksRef = useRef<Track[]>(tracks); // Always keep latest tracks
|
||||
const lastRecordedValuesRef = useRef<Map<string, number>>(new Map()); // Track last recorded values to detect changes
|
||||
const onRecordAutomationRef = useRef<AutomationRecordingCallback | undefined>(onRecordAutomation);
|
||||
const loopEnabledRef = useRef<boolean>(false);
|
||||
const loopStartRef = useRef<number>(0);
|
||||
const loopEndRef = useRef<number>(0);
|
||||
const playbackRateRef = useRef<number>(1.0);
|
||||
const isPlayingRef = useRef<boolean>(false);
|
||||
|
||||
// Keep tracksRef in sync with tracks prop
|
||||
useEffect(() => {
|
||||
tracksRef.current = tracks;
|
||||
}, [tracks]);
|
||||
|
||||
// Keep loop refs in sync with state
|
||||
useEffect(() => {
|
||||
loopEnabledRef.current = loopEnabled;
|
||||
loopStartRef.current = loopStart;
|
||||
loopEndRef.current = loopEnd;
|
||||
}, [loopEnabled, loopStart, loopEnd]);
|
||||
|
||||
// Keep playbackRate ref in sync with state
|
||||
useEffect(() => {
|
||||
playbackRateRef.current = playbackRate;
|
||||
}, [playbackRate]);
|
||||
|
||||
// Keep onRecordAutomationRef in sync
|
||||
useEffect(() => {
|
||||
onRecordAutomationRef.current = onRecordAutomation;
|
||||
}, [onRecordAutomation]);
|
||||
|
||||
// Calculate total duration from all tracks
|
||||
useEffect(() => {
|
||||
@@ -31,24 +96,304 @@ export function useMultiTrackPlayer(tracks: Track[]) {
|
||||
}
|
||||
}
|
||||
setDuration(maxDuration);
|
||||
}, [tracks]);
|
||||
// Initialize loop end to duration when duration changes
|
||||
if (loopEnd === 0 || loopEnd > maxDuration) {
|
||||
setLoopEnd(maxDuration);
|
||||
}
|
||||
}, [tracks, loopEnd]);
|
||||
|
||||
// Convert linear amplitude to dB scale normalized to 0-1 range
|
||||
const linearToDbScale = (linear: number): number => {
|
||||
if (linear === 0) return 0;
|
||||
|
||||
// Convert to dB (20 * log10(linear))
|
||||
const db = 20 * Math.log10(linear);
|
||||
|
||||
// Normalize -60dB to 0dB range to 0-1
|
||||
// -60dB or lower = 0%, 0dB = 100%
|
||||
const minDb = -60;
|
||||
const maxDb = 0;
|
||||
const normalized = (db - minDb) / (maxDb - minDb);
|
||||
|
||||
// Clamp to 0-1 range
|
||||
return Math.max(0, Math.min(1, normalized));
|
||||
};
|
||||
|
||||
// Monitor playback levels for all tracks
|
||||
const monitorPlaybackLevels = useCallback(() => {
|
||||
if (!isMonitoringLevelsRef.current || analyserNodesRef.current.length === 0) return;
|
||||
|
||||
const levels: Record<string, number> = {};
|
||||
|
||||
analyserNodesRef.current.forEach((analyser, index) => {
|
||||
const track = tracksRef.current[index];
|
||||
if (!track) return;
|
||||
|
||||
const dataArray = new Float32Array(analyser.fftSize);
|
||||
analyser.getFloatTimeDomainData(dataArray);
|
||||
|
||||
// Calculate peak level using float data (-1 to +1 range)
|
||||
let peak = 0;
|
||||
for (let i = 0; i < dataArray.length; i++) {
|
||||
const abs = Math.abs(dataArray[i]);
|
||||
if (abs > peak) {
|
||||
peak = abs;
|
||||
}
|
||||
}
|
||||
|
||||
// Store raw linear peak (will be converted to dB in the fader component)
|
||||
levels[track.id] = peak;
|
||||
});
|
||||
|
||||
setTrackLevels(levels);
|
||||
|
||||
levelMonitorFrameRef.current = requestAnimationFrame(monitorPlaybackLevels);
|
||||
}, []);
|
||||
|
||||
// Monitor master output levels (peak and RMS)
|
||||
const monitorMasterLevels = useCallback(() => {
|
||||
if (!masterAnalyserRef.current) {
|
||||
return;
|
||||
}
|
||||
|
||||
const analyser = masterAnalyserRef.current;
|
||||
const bufferLength = analyser.fftSize;
|
||||
const dataArray = new Float32Array(bufferLength);
|
||||
|
||||
analyser.getFloatTimeDomainData(dataArray);
|
||||
|
||||
// Calculate peak level (max absolute value)
|
||||
let peak = 0;
|
||||
for (let i = 0; i < bufferLength; i++) {
|
||||
const abs = Math.abs(dataArray[i]);
|
||||
if (abs > peak) {
|
||||
peak = abs;
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate RMS level (root mean square)
|
||||
let sumSquares = 0;
|
||||
for (let i = 0; i < bufferLength; i++) {
|
||||
sumSquares += dataArray[i] * dataArray[i];
|
||||
}
|
||||
const rms = Math.sqrt(sumSquares / bufferLength);
|
||||
|
||||
// Detect clipping (signal >= 1.0)
|
||||
const isClipping = peak >= 1.0;
|
||||
|
||||
setMasterPeakLevel(peak);
|
||||
setMasterRmsLevel(rms);
|
||||
if (isClipping) {
|
||||
setMasterIsClipping(true);
|
||||
}
|
||||
|
||||
masterLevelMonitorFrameRef.current = requestAnimationFrame(monitorMasterLevels);
|
||||
}, []);
|
||||
|
||||
// Apply automation values during playback
|
||||
const applyAutomation = useCallback(() => {
|
||||
if (!audioContextRef.current) return;
|
||||
|
||||
const currentTime = pausedAtRef.current + (audioContextRef.current.currentTime - startTimeRef.current);
|
||||
|
||||
tracksRef.current.forEach((track, index) => {
|
||||
// Apply volume automation
|
||||
const volumeLane = track.automation.lanes.find(lane => lane.parameterId === 'volume');
|
||||
if (volumeLane) {
|
||||
let volumeValue: number | undefined;
|
||||
|
||||
// In write mode, record current track volume (only if value changed)
|
||||
if (volumeLane.mode === 'write' && onRecordAutomationRef.current) {
|
||||
volumeValue = track.volume;
|
||||
const lastValue = lastRecordedValuesRef.current.get(`${track.id}-volume`);
|
||||
|
||||
// Only record if value has changed
|
||||
if (lastValue === undefined || Math.abs(lastValue - volumeValue) > 0.001) {
|
||||
lastRecordedValuesRef.current.set(`${track.id}-volume`, volumeValue);
|
||||
onRecordAutomationRef.current(track.id, volumeLane.id, currentTime, volumeValue);
|
||||
}
|
||||
} else if (volumeLane.points.length > 0) {
|
||||
// Otherwise play back automation
|
||||
volumeValue = evaluateAutomationLinear(volumeLane.points, currentTime);
|
||||
}
|
||||
|
||||
if (volumeValue !== undefined && gainNodesRef.current[index]) {
|
||||
const trackGain = getTrackGain(track, tracks);
|
||||
// Apply both track gain (mute/solo) and automated volume
|
||||
gainNodesRef.current[index].gain.setValueAtTime(
|
||||
trackGain * volumeValue,
|
||||
audioContextRef.current!.currentTime
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Apply pan automation
|
||||
const panLane = track.automation.lanes.find(lane => lane.parameterId === 'pan');
|
||||
if (panLane) {
|
||||
let automatedValue: number | undefined;
|
||||
|
||||
// In write mode, record current track pan (only if value changed)
|
||||
if (panLane.mode === 'write' && onRecordAutomationRef.current) {
|
||||
automatedValue = (track.pan + 1) / 2; // Convert -1 to 1 -> 0 to 1
|
||||
const lastValue = lastRecordedValuesRef.current.get(`${track.id}-pan`);
|
||||
|
||||
// Only record if value has changed
|
||||
if (lastValue === undefined || Math.abs(lastValue - automatedValue) > 0.001) {
|
||||
lastRecordedValuesRef.current.set(`${track.id}-pan`, automatedValue);
|
||||
onRecordAutomationRef.current(track.id, panLane.id, currentTime, automatedValue);
|
||||
}
|
||||
} else if (panLane.points.length > 0) {
|
||||
// Otherwise play back automation
|
||||
automatedValue = evaluateAutomationLinear(panLane.points, currentTime);
|
||||
}
|
||||
|
||||
if (automatedValue !== undefined && panNodesRef.current[index]) {
|
||||
// Pan automation values are 0-1, but StereoPannerNode expects -1 to 1
|
||||
const panValue = (automatedValue * 2) - 1;
|
||||
panNodesRef.current[index].pan.setValueAtTime(
|
||||
panValue,
|
||||
audioContextRef.current!.currentTime
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Apply effect parameter automation
|
||||
track.automation.lanes.forEach(lane => {
|
||||
// Check if this is an effect parameter (format: effect.{effectId}.{parameterName})
|
||||
if (lane.parameterId.startsWith('effect.')) {
|
||||
const parts = lane.parameterId.split('.');
|
||||
if (parts.length === 3) {
|
||||
const effectId = parts[1];
|
||||
const paramName = parts[2];
|
||||
|
||||
// Find the effect in the track's effect chain
|
||||
const effectIndex = track.effectChain.effects.findIndex(e => e.id === effectId);
|
||||
const effect = track.effectChain.effects[effectIndex];
|
||||
|
||||
if (effectIndex >= 0 && effect) {
|
||||
let automatedValue: number | undefined;
|
||||
|
||||
// In write mode, record current effect parameter value (only if value changed)
|
||||
if (lane.mode === 'write' && onRecordAutomationRef.current && effect.parameters) {
|
||||
const currentValue = (effect.parameters as any)[paramName];
|
||||
if (currentValue !== undefined) {
|
||||
// Normalize value to 0-1 range
|
||||
const range = lane.valueRange.max - lane.valueRange.min;
|
||||
const normalizedValue = (currentValue - lane.valueRange.min) / range;
|
||||
|
||||
const lastValue = lastRecordedValuesRef.current.get(`${track.id}-effect-${effectId}-${paramName}`);
|
||||
|
||||
// Only record if value has changed
|
||||
if (lastValue === undefined || Math.abs(lastValue - normalizedValue) > 0.001) {
|
||||
lastRecordedValuesRef.current.set(`${track.id}-effect-${effectId}-${paramName}`, normalizedValue);
|
||||
onRecordAutomationRef.current(track.id, lane.id, currentTime, normalizedValue);
|
||||
}
|
||||
}
|
||||
} else if (lane.points.length > 0) {
|
||||
// Otherwise play back automation
|
||||
automatedValue = evaluateAutomationLinear(lane.points, currentTime);
|
||||
}
|
||||
|
||||
// Apply the automated value to the effect
|
||||
if (automatedValue !== undefined && effectNodesRef.current[index] && effectNodesRef.current[index][effectIndex]) {
|
||||
const effectNodeInfo = effectNodesRef.current[index][effectIndex];
|
||||
|
||||
// Convert normalized 0-1 value to actual parameter range
|
||||
const actualValue = lane.valueRange.min + (automatedValue * (lane.valueRange.max - lane.valueRange.min));
|
||||
|
||||
// Update the effect parameter
|
||||
if (effect.parameters) {
|
||||
const updatedParams = { ...effect.parameters, [paramName]: actualValue } as any;
|
||||
updateEffectParameters(audioContextRef.current!, effectNodeInfo, {
|
||||
...effect,
|
||||
parameters: updatedParams
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
automationFrameRef.current = requestAnimationFrame(applyAutomation);
|
||||
}, []);
|
||||
|
||||
const updatePlaybackPosition = useCallback(() => {
|
||||
if (!audioContextRef.current || !isPlaying) return;
|
||||
if (!audioContextRef.current || !isPlayingRef.current) return;
|
||||
|
||||
const elapsed = audioContextRef.current.currentTime - startTimeRef.current;
|
||||
const elapsed = (audioContextRef.current.currentTime - startTimeRef.current) * playbackRateRef.current;
|
||||
const newTime = pausedAtRef.current + elapsed;
|
||||
|
||||
// Check if loop is enabled and we've reached the loop end
|
||||
if (loopEnabledRef.current && loopEndRef.current > loopStartRef.current && newTime >= loopEndRef.current) {
|
||||
// Loop back to start
|
||||
pausedAtRef.current = loopStartRef.current;
|
||||
startTimeRef.current = audioContextRef.current.currentTime;
|
||||
setCurrentTime(loopStartRef.current);
|
||||
|
||||
// Restart all sources from loop start
|
||||
sourceNodesRef.current.forEach((node, index) => {
|
||||
try {
|
||||
node.stop();
|
||||
node.disconnect();
|
||||
} catch (e) {
|
||||
// Ignore errors from already stopped nodes
|
||||
}
|
||||
});
|
||||
|
||||
// Re-trigger play from loop start
|
||||
const tracks = tracksRef.current;
|
||||
const audioContext = audioContextRef.current;
|
||||
|
||||
// Clear old sources
|
||||
sourceNodesRef.current = [];
|
||||
|
||||
// Create new sources starting from loop start
|
||||
for (const track of tracks) {
|
||||
if (!track.audioBuffer) continue;
|
||||
|
||||
const source = audioContext.createBufferSource();
|
||||
source.buffer = track.audioBuffer;
|
||||
source.playbackRate.value = playbackRateRef.current;
|
||||
|
||||
// Connect to existing nodes (gain, pan, effects are still connected)
|
||||
const trackIndex = tracks.indexOf(track);
|
||||
source.connect(analyserNodesRef.current[trackIndex]);
|
||||
|
||||
// Start from loop start position
|
||||
source.start(0, loopStartRef.current);
|
||||
sourceNodesRef.current.push(source);
|
||||
}
|
||||
|
||||
animationFrameRef.current = requestAnimationFrame(updatePlaybackPosition);
|
||||
return;
|
||||
}
|
||||
|
||||
if (newTime >= duration) {
|
||||
setIsPlaying(false);
|
||||
isMonitoringLevelsRef.current = false;
|
||||
setCurrentTime(0);
|
||||
pausedAtRef.current = 0;
|
||||
setTrackLevels({});
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
animationFrameRef.current = null;
|
||||
}
|
||||
if (levelMonitorFrameRef.current) {
|
||||
cancelAnimationFrame(levelMonitorFrameRef.current);
|
||||
levelMonitorFrameRef.current = null;
|
||||
}
|
||||
if (automationFrameRef.current) {
|
||||
cancelAnimationFrame(automationFrameRef.current);
|
||||
automationFrameRef.current = null;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
setCurrentTime(newTime);
|
||||
animationFrameRef.current = requestAnimationFrame(updatePlaybackPosition);
|
||||
}, [isPlaying, duration]);
|
||||
}, [duration]);
|
||||
|
||||
const play = useCallback(() => {
|
||||
if (tracks.length === 0 || tracks.every(t => !t.audioBuffer)) return;
|
||||
@@ -67,10 +412,30 @@ export function useMultiTrackPlayer(tracks: Track[]) {
|
||||
});
|
||||
gainNodesRef.current.forEach(node => node.disconnect());
|
||||
panNodesRef.current.forEach(node => node.disconnect());
|
||||
if (masterGainNodeRef.current) {
|
||||
masterGainNodeRef.current.disconnect();
|
||||
}
|
||||
|
||||
sourceNodesRef.current = [];
|
||||
gainNodesRef.current = [];
|
||||
panNodesRef.current = [];
|
||||
analyserNodesRef.current = [];
|
||||
effectNodesRef.current = [];
|
||||
|
||||
// Create master gain node with analyser for metering
|
||||
const masterGain = audioContext.createGain();
|
||||
masterGain.gain.setValueAtTime(masterVolume, audioContext.currentTime);
|
||||
|
||||
const masterAnalyser = audioContext.createAnalyser();
|
||||
masterAnalyser.fftSize = 256;
|
||||
masterAnalyser.smoothingTimeConstant = 0.8;
|
||||
|
||||
// Connect: masterGain -> analyser -> destination
|
||||
masterGain.connect(masterAnalyser);
|
||||
masterAnalyser.connect(audioContext.destination);
|
||||
|
||||
masterGainNodeRef.current = masterGain;
|
||||
masterAnalyserRef.current = masterAnalyser;
|
||||
|
||||
// Create audio graph for each track
|
||||
for (const track of tracks) {
|
||||
@@ -81,6 +446,9 @@ export function useMultiTrackPlayer(tracks: Track[]) {
|
||||
|
||||
const gainNode = audioContext.createGain();
|
||||
const panNode = audioContext.createStereoPanner();
|
||||
const analyserNode = audioContext.createAnalyser();
|
||||
analyserNode.fftSize = 256;
|
||||
analyserNode.smoothingTimeConstant = 0.8;
|
||||
|
||||
// Set gain based on track volume and solo/mute state
|
||||
const trackGain = getTrackGain(track, tracks);
|
||||
@@ -89,10 +457,26 @@ export function useMultiTrackPlayer(tracks: Track[]) {
|
||||
// Set pan
|
||||
panNode.pan.setValueAtTime(track.pan, audioContext.currentTime);
|
||||
|
||||
// Connect: source -> gain -> pan -> destination
|
||||
source.connect(gainNode);
|
||||
// Connect: source -> analyser -> gain -> pan -> effects -> master gain -> destination
|
||||
// Analyser is before gain so it shows raw audio levels independent of volume fader
|
||||
source.connect(analyserNode);
|
||||
analyserNode.connect(gainNode);
|
||||
gainNode.connect(panNode);
|
||||
panNode.connect(audioContext.destination);
|
||||
|
||||
// Apply effect chain
|
||||
console.log('[MultiTrackPlayer] Applying effect chain for track:', track.name);
|
||||
console.log('[MultiTrackPlayer] Effect chain ID:', track.effectChain.id);
|
||||
console.log('[MultiTrackPlayer] Effect chain name:', track.effectChain.name);
|
||||
console.log('[MultiTrackPlayer] Number of effects:', track.effectChain.effects.length);
|
||||
console.log('[MultiTrackPlayer] Effects:', track.effectChain.effects);
|
||||
const { outputNode, effectNodes } = applyEffectChain(audioContext, panNode, track.effectChain);
|
||||
|
||||
// Connect to master gain
|
||||
outputNode.connect(masterGain);
|
||||
console.log('[MultiTrackPlayer] Effect output connected with', effectNodes.length, 'effect nodes');
|
||||
|
||||
// Set playback rate
|
||||
source.playbackRate.value = playbackRateRef.current;
|
||||
|
||||
// Start playback from current position
|
||||
source.start(0, pausedAtRef.current);
|
||||
@@ -101,21 +485,34 @@ export function useMultiTrackPlayer(tracks: Track[]) {
|
||||
sourceNodesRef.current.push(source);
|
||||
gainNodesRef.current.push(gainNode);
|
||||
panNodesRef.current.push(panNode);
|
||||
analyserNodesRef.current.push(analyserNode);
|
||||
effectNodesRef.current.push(effectNodes);
|
||||
|
||||
// Handle ended event
|
||||
source.onended = () => {
|
||||
if (pausedAtRef.current + (audioContext.currentTime - startTimeRef.current) >= duration) {
|
||||
setIsPlaying(false);
|
||||
isMonitoringLevelsRef.current = false;
|
||||
setCurrentTime(0);
|
||||
pausedAtRef.current = 0;
|
||||
setTrackLevels({});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
startTimeRef.current = audioContext.currentTime;
|
||||
isPlayingRef.current = true;
|
||||
setIsPlaying(true);
|
||||
updatePlaybackPosition();
|
||||
}, [tracks, duration, updatePlaybackPosition]);
|
||||
|
||||
// Start level monitoring
|
||||
isMonitoringLevelsRef.current = true;
|
||||
monitorPlaybackLevels();
|
||||
monitorMasterLevels();
|
||||
|
||||
// Start automation
|
||||
applyAutomation();
|
||||
}, [tracks, duration, masterVolume, updatePlaybackPosition, monitorPlaybackLevels, monitorMasterLevels, applyAutomation]);
|
||||
|
||||
const pause = useCallback(() => {
|
||||
if (!audioContextRef.current || !isPlaying) return;
|
||||
@@ -135,18 +532,42 @@ export function useMultiTrackPlayer(tracks: Track[]) {
|
||||
pausedAtRef.current = Math.min(pausedAtRef.current + elapsed, duration);
|
||||
setCurrentTime(pausedAtRef.current);
|
||||
|
||||
isPlayingRef.current = false;
|
||||
setIsPlaying(false);
|
||||
|
||||
// Stop level monitoring
|
||||
isMonitoringLevelsRef.current = false;
|
||||
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
animationFrameRef.current = null;
|
||||
}
|
||||
|
||||
if (levelMonitorFrameRef.current) {
|
||||
cancelAnimationFrame(levelMonitorFrameRef.current);
|
||||
levelMonitorFrameRef.current = null;
|
||||
}
|
||||
|
||||
if (masterLevelMonitorFrameRef.current) {
|
||||
cancelAnimationFrame(masterLevelMonitorFrameRef.current);
|
||||
masterLevelMonitorFrameRef.current = null;
|
||||
}
|
||||
|
||||
if (automationFrameRef.current) {
|
||||
cancelAnimationFrame(automationFrameRef.current);
|
||||
automationFrameRef.current = null;
|
||||
}
|
||||
|
||||
// Clear track levels
|
||||
setTrackLevels({});
|
||||
}, [isPlaying, duration]);
|
||||
|
||||
const stop = useCallback(() => {
|
||||
pause();
|
||||
pausedAtRef.current = 0;
|
||||
setCurrentTime(0);
|
||||
// Clear last recorded values when stopping
|
||||
lastRecordedValuesRef.current.clear();
|
||||
}, [pause]);
|
||||
|
||||
const seek = useCallback((time: number) => {
|
||||
@@ -174,7 +595,7 @@ export function useMultiTrackPlayer(tracks: Track[]) {
|
||||
}
|
||||
}, [isPlaying, play, pause]);
|
||||
|
||||
// Update gain/pan when tracks change
|
||||
// Update gain/pan when tracks change (simple updates that don't require graph rebuild)
|
||||
useEffect(() => {
|
||||
if (!isPlaying || !audioContextRef.current) return;
|
||||
|
||||
@@ -196,12 +617,269 @@ export function useMultiTrackPlayer(tracks: Track[]) {
|
||||
});
|
||||
}, [tracks, isPlaying]);
|
||||
|
||||
// Track effect chain structure to detect add/remove operations
|
||||
const previousEffectStructureRef = useRef<string | null>(null);
|
||||
|
||||
// Detect effect chain structure changes (add/remove/reorder) and restart
|
||||
useEffect(() => {
|
||||
if (!isPlaying || !audioContextRef.current) return;
|
||||
|
||||
// Create a signature of the current effect structure (IDs and count)
|
||||
const currentStructure = tracks.map(track =>
|
||||
track.effectChain.effects.map(e => e.id).join(',')
|
||||
).join('|');
|
||||
|
||||
// If structure changed (effects added/removed/reordered) while playing, restart
|
||||
// Don't restart if tracks is empty (intermediate state during updates)
|
||||
if (previousEffectStructureRef.current !== null &&
|
||||
previousEffectStructureRef.current !== currentStructure &&
|
||||
tracks.length > 0) {
|
||||
console.log('[useMultiTrackPlayer] Effect chain structure changed, restarting...');
|
||||
|
||||
// Update the reference immediately to prevent re-triggering
|
||||
previousEffectStructureRef.current = currentStructure;
|
||||
|
||||
// Update tracksRef with current tracks BEFORE setTimeout
|
||||
tracksRef.current = tracks;
|
||||
|
||||
// Save current position
|
||||
const elapsed = audioContextRef.current.currentTime - startTimeRef.current;
|
||||
const currentPos = pausedAtRef.current + elapsed;
|
||||
|
||||
// Stop all source nodes
|
||||
sourceNodesRef.current.forEach(node => {
|
||||
try {
|
||||
node.onended = null;
|
||||
node.stop();
|
||||
node.disconnect();
|
||||
} catch (e) {
|
||||
// Ignore errors
|
||||
}
|
||||
});
|
||||
|
||||
// Update position
|
||||
pausedAtRef.current = currentPos;
|
||||
setCurrentTime(currentPos);
|
||||
setIsPlaying(false);
|
||||
|
||||
// Clear animation frame
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
animationFrameRef.current = null;
|
||||
}
|
||||
|
||||
// Restart after a brief delay
|
||||
setTimeout(() => {
|
||||
// Use tracksRef.current to get the latest tracks, not the stale closure
|
||||
const latestTracks = tracksRef.current;
|
||||
|
||||
if (latestTracks.length === 0 || latestTracks.every(t => !t.audioBuffer)) return;
|
||||
|
||||
const audioContext = getAudioContext();
|
||||
audioContextRef.current = audioContext;
|
||||
|
||||
// Disconnect old nodes
|
||||
gainNodesRef.current.forEach(node => node.disconnect());
|
||||
panNodesRef.current.forEach(node => node.disconnect());
|
||||
effectNodesRef.current.forEach(trackEffects => {
|
||||
trackEffects.forEach(effectNodeInfo => {
|
||||
if (effectNodeInfo.node) {
|
||||
try {
|
||||
effectNodeInfo.node.disconnect();
|
||||
} catch (e) {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
if (effectNodeInfo.dryGain) effectNodeInfo.dryGain.disconnect();
|
||||
if (effectNodeInfo.wetGain) effectNodeInfo.wetGain.disconnect();
|
||||
});
|
||||
});
|
||||
if (masterGainNodeRef.current) {
|
||||
masterGainNodeRef.current.disconnect();
|
||||
}
|
||||
|
||||
// Reset refs
|
||||
sourceNodesRef.current = [];
|
||||
gainNodesRef.current = [];
|
||||
panNodesRef.current = [];
|
||||
analyserNodesRef.current = [];
|
||||
effectNodesRef.current = [];
|
||||
|
||||
// Create master gain node
|
||||
const masterGain = audioContext.createGain();
|
||||
masterGain.gain.setValueAtTime(masterVolume, audioContext.currentTime);
|
||||
masterGain.connect(audioContext.destination);
|
||||
masterGainNodeRef.current = masterGain;
|
||||
|
||||
// Create audio graph for each track
|
||||
for (const track of latestTracks) {
|
||||
if (!track.audioBuffer) continue;
|
||||
|
||||
const source = audioContext.createBufferSource();
|
||||
source.buffer = track.audioBuffer;
|
||||
|
||||
const gainNode = audioContext.createGain();
|
||||
const panNode = audioContext.createStereoPanner();
|
||||
const analyserNode = audioContext.createAnalyser();
|
||||
analyserNode.fftSize = 256;
|
||||
analyserNode.smoothingTimeConstant = 0.8;
|
||||
|
||||
// Set gain based on track volume and solo/mute state
|
||||
const trackGain = getTrackGain(track, latestTracks);
|
||||
gainNode.gain.setValueAtTime(trackGain, audioContext.currentTime);
|
||||
|
||||
// Set pan
|
||||
panNode.pan.setValueAtTime(track.pan, audioContext.currentTime);
|
||||
|
||||
// Connect: source -> analyser -> gain -> pan -> effects -> master gain -> destination
|
||||
// Analyser is before gain so it shows raw audio levels independent of volume fader
|
||||
source.connect(analyserNode);
|
||||
analyserNode.connect(gainNode);
|
||||
gainNode.connect(panNode);
|
||||
|
||||
// Apply effect chain
|
||||
const { outputNode, effectNodes } = applyEffectChain(audioContext, panNode, track.effectChain);
|
||||
outputNode.connect(masterGain);
|
||||
|
||||
// Start playback from current position
|
||||
source.start(0, pausedAtRef.current);
|
||||
|
||||
// Store references
|
||||
sourceNodesRef.current.push(source);
|
||||
gainNodesRef.current.push(gainNode);
|
||||
panNodesRef.current.push(panNode);
|
||||
analyserNodesRef.current.push(analyserNode);
|
||||
effectNodesRef.current.push(effectNodes);
|
||||
|
||||
// Handle ended event
|
||||
source.onended = () => {
|
||||
if (pausedAtRef.current + (audioContext.currentTime - startTimeRef.current) >= duration) {
|
||||
setIsPlaying(false);
|
||||
isMonitoringLevelsRef.current = false;
|
||||
setCurrentTime(0);
|
||||
pausedAtRef.current = 0;
|
||||
setTrackLevels({});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
startTimeRef.current = audioContext.currentTime;
|
||||
setIsPlaying(true);
|
||||
|
||||
// Start level monitoring
|
||||
isMonitoringLevelsRef.current = true;
|
||||
|
||||
// Start animation frame for position updates
|
||||
const updatePosition = () => {
|
||||
if (!audioContextRef.current) return;
|
||||
|
||||
const elapsed = audioContextRef.current.currentTime - startTimeRef.current;
|
||||
const newTime = pausedAtRef.current + elapsed;
|
||||
|
||||
if (newTime >= duration) {
|
||||
setIsPlaying(false);
|
||||
isMonitoringLevelsRef.current = false;
|
||||
setCurrentTime(0);
|
||||
pausedAtRef.current = 0;
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
animationFrameRef.current = null;
|
||||
}
|
||||
if (levelMonitorFrameRef.current) {
|
||||
cancelAnimationFrame(levelMonitorFrameRef.current);
|
||||
levelMonitorFrameRef.current = null;
|
||||
}
|
||||
if (masterLevelMonitorFrameRef.current) {
|
||||
cancelAnimationFrame(masterLevelMonitorFrameRef.current);
|
||||
masterLevelMonitorFrameRef.current = null;
|
||||
}
|
||||
if (automationFrameRef.current) {
|
||||
cancelAnimationFrame(automationFrameRef.current);
|
||||
automationFrameRef.current = null;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
setCurrentTime(newTime);
|
||||
animationFrameRef.current = requestAnimationFrame(updatePosition);
|
||||
};
|
||||
updatePosition();
|
||||
monitorPlaybackLevels();
|
||||
monitorMasterLevels();
|
||||
applyAutomation();
|
||||
}, 10);
|
||||
}
|
||||
|
||||
previousEffectStructureRef.current = currentStructure;
|
||||
}, [tracks, isPlaying, duration, masterVolume, monitorPlaybackLevels, monitorMasterLevels, applyAutomation]);
|
||||
|
||||
// Stop playback when all tracks are deleted
|
||||
useEffect(() => {
|
||||
if (!isPlaying) return;
|
||||
|
||||
// If tracks become empty or all tracks have no audio buffer, stop playback
|
||||
if (tracks.length === 0 || tracks.every(t => !t.audioBuffer)) {
|
||||
console.log('[useMultiTrackPlayer] All tracks deleted, stopping playback');
|
||||
stop();
|
||||
}
|
||||
}, [tracks, isPlaying, stop]);
|
||||
|
||||
// Update effect parameters and bypass state in real-time
|
||||
useEffect(() => {
|
||||
if (!isPlaying || !audioContextRef.current) return;
|
||||
|
||||
tracks.forEach((track, trackIndex) => {
|
||||
const effectNodes = effectNodesRef.current[trackIndex];
|
||||
if (!effectNodes) return;
|
||||
|
||||
// Only update if we have the same number of effects (no add/remove)
|
||||
if (effectNodes.length !== track.effectChain.effects.length) return;
|
||||
|
||||
track.effectChain.effects.forEach((effect, effectIndex) => {
|
||||
const effectNodeInfo = effectNodes[effectIndex];
|
||||
if (!effectNodeInfo) return;
|
||||
|
||||
// Update bypass state
|
||||
if (effect.enabled !== effectNodeInfo.effect.enabled) {
|
||||
toggleEffectBypass(audioContextRef.current!, effectNodeInfo, effect.enabled);
|
||||
effectNodeInfo.effect.enabled = effect.enabled;
|
||||
}
|
||||
|
||||
// Update parameters (only works for certain effect types)
|
||||
if (JSON.stringify(effect.parameters) !== JSON.stringify(effectNodeInfo.effect.parameters)) {
|
||||
updateEffectParameters(audioContextRef.current!, effectNodeInfo, effect);
|
||||
effectNodeInfo.effect.parameters = effect.parameters;
|
||||
}
|
||||
});
|
||||
});
|
||||
}, [tracks, isPlaying]);
|
||||
|
||||
// Update master volume when it changes
|
||||
useEffect(() => {
|
||||
if (!isPlaying || !audioContextRef.current || !masterGainNodeRef.current) return;
|
||||
|
||||
masterGainNodeRef.current.gain.setValueAtTime(
|
||||
masterVolume,
|
||||
audioContextRef.current.currentTime
|
||||
);
|
||||
}, [masterVolume, isPlaying]);
|
||||
|
||||
// Cleanup on unmount
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
isMonitoringLevelsRef.current = false;
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
}
|
||||
if (levelMonitorFrameRef.current) {
|
||||
cancelAnimationFrame(levelMonitorFrameRef.current);
|
||||
}
|
||||
if (masterLevelMonitorFrameRef.current) {
|
||||
cancelAnimationFrame(masterLevelMonitorFrameRef.current);
|
||||
}
|
||||
if (automationFrameRef.current) {
|
||||
cancelAnimationFrame(automationFrameRef.current);
|
||||
}
|
||||
sourceNodesRef.current.forEach(node => {
|
||||
try {
|
||||
node.stop();
|
||||
@@ -212,17 +890,66 @@ export function useMultiTrackPlayer(tracks: Track[]) {
|
||||
});
|
||||
gainNodesRef.current.forEach(node => node.disconnect());
|
||||
panNodesRef.current.forEach(node => node.disconnect());
|
||||
analyserNodesRef.current.forEach(node => node.disconnect());
|
||||
if (masterGainNodeRef.current) {
|
||||
masterGainNodeRef.current.disconnect();
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
const resetClipIndicator = useCallback(() => {
|
||||
setMasterIsClipping(false);
|
||||
}, []);
|
||||
|
||||
const toggleLoop = useCallback(() => {
|
||||
setLoopEnabled(prev => !prev);
|
||||
}, []);
|
||||
|
||||
const setLoopPoints = useCallback((start: number, end: number) => {
|
||||
setLoopStart(Math.max(0, start));
|
||||
setLoopEnd(Math.min(duration, Math.max(start, end)));
|
||||
}, [duration]);
|
||||
|
||||
const setLoopFromSelection = useCallback((selectionStart: number, selectionEnd: number) => {
|
||||
if (selectionStart < selectionEnd) {
|
||||
setLoopPoints(selectionStart, selectionEnd);
|
||||
setLoopEnabled(true);
|
||||
}
|
||||
}, [setLoopPoints]);
|
||||
|
||||
const changePlaybackRate = useCallback((rate: number) => {
|
||||
// Clamp rate between 0.25x and 2x
|
||||
const clampedRate = Math.max(0.25, Math.min(2.0, rate));
|
||||
setPlaybackRate(clampedRate);
|
||||
|
||||
// Update playback rate on all active source nodes
|
||||
sourceNodesRef.current.forEach(source => {
|
||||
source.playbackRate.value = clampedRate;
|
||||
});
|
||||
}, []);
|
||||
|
||||
return {
|
||||
isPlaying,
|
||||
currentTime,
|
||||
duration,
|
||||
trackLevels,
|
||||
masterPeakLevel,
|
||||
masterRmsLevel,
|
||||
masterIsClipping,
|
||||
masterAnalyser: masterAnalyserRef.current,
|
||||
resetClipIndicator,
|
||||
play,
|
||||
pause,
|
||||
stop,
|
||||
seek,
|
||||
togglePlayPause,
|
||||
loopEnabled,
|
||||
loopStart,
|
||||
loopEnd,
|
||||
toggleLoop,
|
||||
setLoopPoints,
|
||||
setLoopFromSelection,
|
||||
playbackRate,
|
||||
changePlaybackRate,
|
||||
};
|
||||
}
|
||||
|
||||
353
lib/hooks/useRecording.ts
Normal file
353
lib/hooks/useRecording.ts
Normal file
@@ -0,0 +1,353 @@
|
||||
'use client';
|
||||
|
||||
import * as React from 'react';
|
||||
|
||||
export interface RecordingState {
|
||||
isRecording: boolean;
|
||||
isPaused: boolean;
|
||||
duration: number;
|
||||
inputLevel: number;
|
||||
}
|
||||
|
||||
export interface RecordingSettings {
|
||||
inputGain: number; // 0.0 to 2.0 (1.0 = unity)
|
||||
recordMono: boolean; // true = mono, false = stereo
|
||||
sampleRate: number; // target sample rate (44100, 48000, etc.)
|
||||
}
|
||||
|
||||
export interface UseRecordingReturn {
|
||||
state: RecordingState;
|
||||
settings: RecordingSettings;
|
||||
startRecording: () => Promise<void>;
|
||||
stopRecording: () => Promise<AudioBuffer | null>;
|
||||
pauseRecording: () => void;
|
||||
resumeRecording: () => void;
|
||||
getInputDevices: () => Promise<MediaDeviceInfo[]>;
|
||||
selectInputDevice: (deviceId: string) => Promise<void>;
|
||||
requestPermission: () => Promise<boolean>;
|
||||
setInputGain: (gain: number) => void;
|
||||
setRecordMono: (mono: boolean) => void;
|
||||
setSampleRate: (sampleRate: number) => void;
|
||||
}
|
||||
|
||||
export function useRecording(): UseRecordingReturn {
|
||||
const [state, setState] = React.useState<RecordingState>({
|
||||
isRecording: false,
|
||||
isPaused: false,
|
||||
duration: 0,
|
||||
inputLevel: 0,
|
||||
});
|
||||
|
||||
const [settings, setSettings] = React.useState<RecordingSettings>({
|
||||
inputGain: 1.0,
|
||||
recordMono: false,
|
||||
sampleRate: 48000,
|
||||
});
|
||||
|
||||
const mediaRecorderRef = React.useRef<MediaRecorder | null>(null);
|
||||
const audioContextRef = React.useRef<AudioContext | null>(null);
|
||||
const analyserRef = React.useRef<AnalyserNode | null>(null);
|
||||
const gainNodeRef = React.useRef<GainNode | null>(null);
|
||||
const streamRef = React.useRef<MediaStream | null>(null);
|
||||
const chunksRef = React.useRef<Blob[]>([]);
|
||||
const startTimeRef = React.useRef<number>(0);
|
||||
const animationFrameRef = React.useRef<number>(0);
|
||||
const selectedDeviceIdRef = React.useRef<string>('');
|
||||
const isMonitoringRef = React.useRef<boolean>(false);
|
||||
|
||||
// Request microphone permission
|
||||
const requestPermission = React.useCallback(async (): Promise<boolean> => {
|
||||
try {
|
||||
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
||||
stream.getTracks().forEach((track) => track.stop());
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('Microphone permission denied:', error);
|
||||
return false;
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Get available input devices
|
||||
const getInputDevices = React.useCallback(async (): Promise<MediaDeviceInfo[]> => {
|
||||
try {
|
||||
const devices = await navigator.mediaDevices.enumerateDevices();
|
||||
return devices.filter((device) => device.kind === 'audioinput');
|
||||
} catch (error) {
|
||||
console.error('Failed to enumerate devices:', error);
|
||||
return [];
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Select input device
|
||||
const selectInputDevice = React.useCallback(async (deviceId: string): Promise<void> => {
|
||||
selectedDeviceIdRef.current = deviceId;
|
||||
}, []);
|
||||
|
||||
// Convert linear amplitude to dB scale normalized to 0-1 range
|
||||
const linearToDbScale = React.useCallback((linear: number): number => {
|
||||
if (linear === 0) return 0;
|
||||
|
||||
// Convert to dB (20 * log10(linear))
|
||||
const db = 20 * Math.log10(linear);
|
||||
|
||||
// Normalize -60dB to 0dB range to 0-1
|
||||
// -60dB or lower = 0%, 0dB = 100%
|
||||
const minDb = -60;
|
||||
const maxDb = 0;
|
||||
const normalized = (db - minDb) / (maxDb - minDb);
|
||||
|
||||
// Clamp to 0-1 range
|
||||
return Math.max(0, Math.min(1, normalized));
|
||||
}, []);
|
||||
|
||||
// Monitor input level
|
||||
const monitorInputLevel = React.useCallback(() => {
|
||||
if (!analyserRef.current) return;
|
||||
|
||||
const analyser = analyserRef.current;
|
||||
const dataArray = new Float32Array(analyser.fftSize);
|
||||
|
||||
const updateLevel = () => {
|
||||
if (!isMonitoringRef.current) return;
|
||||
|
||||
analyser.getFloatTimeDomainData(dataArray);
|
||||
|
||||
// Calculate peak level using float data (-1 to +1 range)
|
||||
let peak = 0;
|
||||
for (let i = 0; i < dataArray.length; i++) {
|
||||
const abs = Math.abs(dataArray[i]);
|
||||
if (abs > peak) {
|
||||
peak = abs;
|
||||
}
|
||||
}
|
||||
|
||||
// Convert linear peak to logarithmic dB scale
|
||||
const dbLevel = linearToDbScale(peak);
|
||||
|
||||
setState((prev) => ({ ...prev, inputLevel: dbLevel }));
|
||||
|
||||
animationFrameRef.current = requestAnimationFrame(updateLevel);
|
||||
};
|
||||
|
||||
updateLevel();
|
||||
}, [linearToDbScale]);
|
||||
|
||||
// Start recording
|
||||
const startRecording = React.useCallback(async (): Promise<void> => {
|
||||
try {
|
||||
// Get user media with selected device
|
||||
const constraints: MediaStreamConstraints = {
|
||||
audio: selectedDeviceIdRef.current
|
||||
? { deviceId: { exact: selectedDeviceIdRef.current } }
|
||||
: true,
|
||||
};
|
||||
|
||||
const stream = await navigator.mediaDevices.getUserMedia(constraints);
|
||||
streamRef.current = stream;
|
||||
|
||||
// Create audio context with target sample rate
|
||||
const audioContext = new AudioContext({ sampleRate: settings.sampleRate });
|
||||
audioContextRef.current = audioContext;
|
||||
|
||||
const source = audioContext.createMediaStreamSource(stream);
|
||||
|
||||
// Create gain node for input gain control
|
||||
const gainNode = audioContext.createGain();
|
||||
gainNode.gain.value = settings.inputGain;
|
||||
gainNodeRef.current = gainNode;
|
||||
|
||||
// Create analyser for level monitoring
|
||||
const analyser = audioContext.createAnalyser();
|
||||
analyser.fftSize = 256;
|
||||
analyser.smoothingTimeConstant = 0.3;
|
||||
|
||||
// Connect: source -> gain -> analyser
|
||||
source.connect(gainNode);
|
||||
gainNode.connect(analyser);
|
||||
analyserRef.current = analyser;
|
||||
|
||||
// Create MediaRecorder
|
||||
const mediaRecorder = new MediaRecorder(stream);
|
||||
mediaRecorderRef.current = mediaRecorder;
|
||||
chunksRef.current = [];
|
||||
|
||||
mediaRecorder.ondataavailable = (event) => {
|
||||
if (event.data.size > 0) {
|
||||
chunksRef.current.push(event.data);
|
||||
}
|
||||
};
|
||||
|
||||
// Start recording
|
||||
mediaRecorder.start();
|
||||
startTimeRef.current = Date.now();
|
||||
|
||||
setState({
|
||||
isRecording: true,
|
||||
isPaused: false,
|
||||
duration: 0,
|
||||
inputLevel: 0,
|
||||
});
|
||||
|
||||
// Start monitoring input level
|
||||
isMonitoringRef.current = true;
|
||||
monitorInputLevel();
|
||||
} catch (error) {
|
||||
console.error('Failed to start recording:', error);
|
||||
throw error;
|
||||
}
|
||||
}, [monitorInputLevel, settings.sampleRate, settings.inputGain]);
|
||||
|
||||
// Stop recording and return AudioBuffer
|
||||
const stopRecording = React.useCallback(async (): Promise<AudioBuffer | null> => {
|
||||
return new Promise((resolve) => {
|
||||
if (!mediaRecorderRef.current || !streamRef.current) {
|
||||
resolve(null);
|
||||
return;
|
||||
}
|
||||
|
||||
const mediaRecorder = mediaRecorderRef.current;
|
||||
|
||||
mediaRecorder.onstop = async () => {
|
||||
// Stop all tracks
|
||||
streamRef.current?.getTracks().forEach((track) => track.stop());
|
||||
|
||||
// Create blob from recorded chunks
|
||||
const blob = new Blob(chunksRef.current, { type: 'audio/webm' });
|
||||
|
||||
// Convert blob to AudioBuffer
|
||||
try {
|
||||
const arrayBuffer = await blob.arrayBuffer();
|
||||
const audioContext = new AudioContext();
|
||||
let audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
||||
|
||||
// Convert to mono if requested
|
||||
if (settings.recordMono && audioBuffer.numberOfChannels > 1) {
|
||||
const monoBuffer = audioContext.createBuffer(
|
||||
1,
|
||||
audioBuffer.length,
|
||||
audioBuffer.sampleRate
|
||||
);
|
||||
const monoData = monoBuffer.getChannelData(0);
|
||||
|
||||
// Mix all channels to mono by averaging
|
||||
for (let i = 0; i < audioBuffer.length; i++) {
|
||||
let sum = 0;
|
||||
for (let channel = 0; channel < audioBuffer.numberOfChannels; channel++) {
|
||||
sum += audioBuffer.getChannelData(channel)[i];
|
||||
}
|
||||
monoData[i] = sum / audioBuffer.numberOfChannels;
|
||||
}
|
||||
|
||||
audioBuffer = monoBuffer;
|
||||
}
|
||||
|
||||
// Clean up
|
||||
isMonitoringRef.current = false;
|
||||
if (audioContextRef.current) {
|
||||
await audioContextRef.current.close();
|
||||
}
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
}
|
||||
|
||||
setState({
|
||||
isRecording: false,
|
||||
isPaused: false,
|
||||
duration: 0,
|
||||
inputLevel: 0,
|
||||
});
|
||||
|
||||
resolve(audioBuffer);
|
||||
} catch (error) {
|
||||
console.error('Failed to decode recorded audio:', error);
|
||||
resolve(null);
|
||||
}
|
||||
};
|
||||
|
||||
mediaRecorder.stop();
|
||||
});
|
||||
}, [settings.recordMono]);
|
||||
|
||||
// Pause recording
|
||||
const pauseRecording = React.useCallback(() => {
|
||||
if (mediaRecorderRef.current && state.isRecording && !state.isPaused) {
|
||||
mediaRecorderRef.current.pause();
|
||||
setState((prev) => ({ ...prev, isPaused: true }));
|
||||
|
||||
isMonitoringRef.current = false;
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
}
|
||||
}
|
||||
}, [state.isRecording, state.isPaused]);
|
||||
|
||||
// Resume recording
|
||||
const resumeRecording = React.useCallback(() => {
|
||||
if (mediaRecorderRef.current && state.isRecording && state.isPaused) {
|
||||
mediaRecorderRef.current.resume();
|
||||
setState((prev) => ({ ...prev, isPaused: false }));
|
||||
isMonitoringRef.current = true;
|
||||
monitorInputLevel();
|
||||
}
|
||||
}, [state.isRecording, state.isPaused, monitorInputLevel]);
|
||||
|
||||
// Update duration
|
||||
React.useEffect(() => {
|
||||
if (!state.isRecording || state.isPaused) return;
|
||||
|
||||
const interval = setInterval(() => {
|
||||
const elapsed = (Date.now() - startTimeRef.current) / 1000;
|
||||
setState((prev) => ({ ...prev, duration: elapsed }));
|
||||
}, 100);
|
||||
|
||||
return () => clearInterval(interval);
|
||||
}, [state.isRecording, state.isPaused]);
|
||||
|
||||
// Cleanup on unmount
|
||||
React.useEffect(() => {
|
||||
return () => {
|
||||
isMonitoringRef.current = false;
|
||||
if (streamRef.current) {
|
||||
streamRef.current.getTracks().forEach((track) => track.stop());
|
||||
}
|
||||
if (audioContextRef.current) {
|
||||
audioContextRef.current.close();
|
||||
}
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Settings setters
|
||||
const setInputGain = React.useCallback((gain: number) => {
|
||||
setSettings((prev) => ({ ...prev, inputGain: Math.max(0, Math.min(2, gain)) }));
|
||||
// Update gain node if recording
|
||||
if (gainNodeRef.current) {
|
||||
gainNodeRef.current.gain.value = Math.max(0, Math.min(2, gain));
|
||||
}
|
||||
}, []);
|
||||
|
||||
const setRecordMono = React.useCallback((mono: boolean) => {
|
||||
setSettings((prev) => ({ ...prev, recordMono: mono }));
|
||||
}, []);
|
||||
|
||||
const setSampleRate = React.useCallback((sampleRate: number) => {
|
||||
setSettings((prev) => ({ ...prev, sampleRate }));
|
||||
}, []);
|
||||
|
||||
return {
|
||||
state,
|
||||
settings,
|
||||
startRecording,
|
||||
stopRecording,
|
||||
pauseRecording,
|
||||
resumeRecording,
|
||||
getInputDevices,
|
||||
selectInputDevice,
|
||||
requestPermission,
|
||||
setInputGain,
|
||||
setRecordMono,
|
||||
setSampleRate,
|
||||
};
|
||||
}
|
||||
152
lib/hooks/useSettings.ts
Normal file
152
lib/hooks/useSettings.ts
Normal file
@@ -0,0 +1,152 @@
|
||||
'use client';
|
||||
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
|
||||
export interface AudioSettings {
|
||||
bufferSize: number; // 256, 512, 1024, 2048, 4096
|
||||
sampleRate: number; // 44100, 48000, 96000
|
||||
autoNormalizeOnImport: boolean;
|
||||
}
|
||||
|
||||
export interface UISettings {
|
||||
theme: 'dark' | 'light' | 'auto';
|
||||
fontSize: 'small' | 'medium' | 'large';
|
||||
}
|
||||
|
||||
export interface EditorSettings {
|
||||
autoSaveInterval: number; // seconds, 0 = disabled
|
||||
undoHistoryLimit: number; // 10-200
|
||||
snapToGrid: boolean;
|
||||
gridResolution: number; // seconds
|
||||
defaultZoom: number; // 1-20
|
||||
}
|
||||
|
||||
export interface PerformanceSettings {
|
||||
peakCalculationQuality: 'low' | 'medium' | 'high';
|
||||
waveformRenderingQuality: 'low' | 'medium' | 'high';
|
||||
enableSpectrogram: boolean;
|
||||
maxFileSizeMB: number; // 100-1000
|
||||
}
|
||||
|
||||
export interface Settings {
|
||||
audio: AudioSettings;
|
||||
ui: UISettings;
|
||||
editor: EditorSettings;
|
||||
performance: PerformanceSettings;
|
||||
}
|
||||
|
||||
const DEFAULT_SETTINGS: Settings = {
|
||||
audio: {
|
||||
bufferSize: 2048,
|
||||
sampleRate: 48000,
|
||||
autoNormalizeOnImport: false,
|
||||
},
|
||||
ui: {
|
||||
theme: 'dark',
|
||||
fontSize: 'medium',
|
||||
},
|
||||
editor: {
|
||||
autoSaveInterval: 3, // 3 seconds
|
||||
undoHistoryLimit: 50,
|
||||
snapToGrid: false,
|
||||
gridResolution: 1.0, // 1 second
|
||||
defaultZoom: 1,
|
||||
},
|
||||
performance: {
|
||||
peakCalculationQuality: 'high',
|
||||
waveformRenderingQuality: 'high',
|
||||
enableSpectrogram: true,
|
||||
maxFileSizeMB: 500,
|
||||
},
|
||||
};
|
||||
|
||||
const SETTINGS_STORAGE_KEY = 'audio-editor-settings';
|
||||
|
||||
function loadSettings(): Settings {
|
||||
if (typeof window === 'undefined') return DEFAULT_SETTINGS;
|
||||
|
||||
try {
|
||||
const stored = localStorage.getItem(SETTINGS_STORAGE_KEY);
|
||||
if (!stored) return DEFAULT_SETTINGS;
|
||||
|
||||
const parsed = JSON.parse(stored);
|
||||
// Merge with defaults to handle new settings added in updates
|
||||
return {
|
||||
audio: { ...DEFAULT_SETTINGS.audio, ...parsed.audio },
|
||||
ui: { ...DEFAULT_SETTINGS.ui, ...parsed.ui },
|
||||
editor: { ...DEFAULT_SETTINGS.editor, ...parsed.editor },
|
||||
performance: { ...DEFAULT_SETTINGS.performance, ...parsed.performance },
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Failed to load settings from localStorage:', error);
|
||||
return DEFAULT_SETTINGS;
|
||||
}
|
||||
}
|
||||
|
||||
function saveSettings(settings: Settings): void {
|
||||
if (typeof window === 'undefined') return;
|
||||
|
||||
try {
|
||||
localStorage.setItem(SETTINGS_STORAGE_KEY, JSON.stringify(settings));
|
||||
} catch (error) {
|
||||
console.error('Failed to save settings to localStorage:', error);
|
||||
}
|
||||
}
|
||||
|
||||
export function useSettings() {
|
||||
const [settings, setSettings] = useState<Settings>(loadSettings);
|
||||
|
||||
// Save to localStorage whenever settings change
|
||||
useEffect(() => {
|
||||
saveSettings(settings);
|
||||
}, [settings]);
|
||||
|
||||
const updateAudioSettings = useCallback((updates: Partial<AudioSettings>) => {
|
||||
setSettings((prev) => ({
|
||||
...prev,
|
||||
audio: { ...prev.audio, ...updates },
|
||||
}));
|
||||
}, []);
|
||||
|
||||
const updateUISettings = useCallback((updates: Partial<UISettings>) => {
|
||||
setSettings((prev) => ({
|
||||
...prev,
|
||||
ui: { ...prev.ui, ...updates },
|
||||
}));
|
||||
}, []);
|
||||
|
||||
const updateEditorSettings = useCallback((updates: Partial<EditorSettings>) => {
|
||||
setSettings((prev) => ({
|
||||
...prev,
|
||||
editor: { ...prev.editor, ...updates },
|
||||
}));
|
||||
}, []);
|
||||
|
||||
const updatePerformanceSettings = useCallback((updates: Partial<PerformanceSettings>) => {
|
||||
setSettings((prev) => ({
|
||||
...prev,
|
||||
performance: { ...prev.performance, ...updates },
|
||||
}));
|
||||
}, []);
|
||||
|
||||
const resetSettings = useCallback(() => {
|
||||
setSettings(DEFAULT_SETTINGS);
|
||||
}, []);
|
||||
|
||||
const resetCategory = useCallback((category: keyof Settings) => {
|
||||
setSettings((prev) => ({
|
||||
...prev,
|
||||
[category]: DEFAULT_SETTINGS[category],
|
||||
}));
|
||||
}, []);
|
||||
|
||||
return {
|
||||
settings,
|
||||
updateAudioSettings,
|
||||
updateUISettings,
|
||||
updateEditorSettings,
|
||||
updatePerformanceSettings,
|
||||
resetSettings,
|
||||
resetCategory,
|
||||
};
|
||||
}
|
||||
193
lib/storage/db.ts
Normal file
193
lib/storage/db.ts
Normal file
@@ -0,0 +1,193 @@
|
||||
/**
|
||||
* IndexedDB database for project storage
|
||||
*/
|
||||
|
||||
export const DB_NAME = 'audio-editor-db';
|
||||
export const DB_VERSION = 1;
|
||||
|
||||
export interface ProjectMetadata {
|
||||
id: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
createdAt: number;
|
||||
updatedAt: number;
|
||||
duration: number; // Total project duration in seconds
|
||||
sampleRate: number;
|
||||
trackCount: number;
|
||||
thumbnail?: string; // Base64 encoded waveform thumbnail
|
||||
}
|
||||
|
||||
export interface SerializedAudioBuffer {
|
||||
sampleRate: number;
|
||||
length: number;
|
||||
numberOfChannels: number;
|
||||
channelData: Float32Array[]; // Array of channel data
|
||||
}
|
||||
|
||||
export interface SerializedTrack {
|
||||
id: string;
|
||||
name: string;
|
||||
color: string;
|
||||
volume: number;
|
||||
pan: number;
|
||||
muted: boolean;
|
||||
soloed: boolean;
|
||||
collapsed: boolean;
|
||||
height: number;
|
||||
audioBuffer: SerializedAudioBuffer | null;
|
||||
effects: any[]; // Effect chain
|
||||
automation: any; // Automation data
|
||||
recordEnabled: boolean;
|
||||
}
|
||||
|
||||
export interface ProjectData {
|
||||
metadata: ProjectMetadata;
|
||||
tracks: SerializedTrack[];
|
||||
settings: {
|
||||
zoom: number;
|
||||
currentTime: number;
|
||||
sampleRate: number;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize IndexedDB database
|
||||
*/
|
||||
export function initDB(): Promise<IDBDatabase> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const request = indexedDB.open(DB_NAME, DB_VERSION);
|
||||
|
||||
request.onerror = () => reject(request.error);
|
||||
request.onsuccess = () => resolve(request.result);
|
||||
|
||||
request.onupgradeneeded = (event) => {
|
||||
const db = (event.target as IDBOpenDBRequest).result;
|
||||
|
||||
// Create projects object store
|
||||
if (!db.objectStoreNames.contains('projects')) {
|
||||
const projectStore = db.createObjectStore('projects', { keyPath: 'metadata.id' });
|
||||
projectStore.createIndex('updatedAt', 'metadata.updatedAt', { unique: false });
|
||||
projectStore.createIndex('name', 'metadata.name', { unique: false });
|
||||
}
|
||||
|
||||
// Create audio buffers object store (for large files)
|
||||
if (!db.objectStoreNames.contains('audioBuffers')) {
|
||||
db.createObjectStore('audioBuffers', { keyPath: 'id' });
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all projects (metadata only for list view)
|
||||
*/
|
||||
export async function getAllProjects(): Promise<ProjectMetadata[]> {
|
||||
const db = await initDB();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = db.transaction(['projects'], 'readonly');
|
||||
const store = transaction.objectStore('projects');
|
||||
const index = store.index('updatedAt');
|
||||
const request = index.openCursor(null, 'prev'); // Most recent first
|
||||
|
||||
const projects: ProjectMetadata[] = [];
|
||||
|
||||
request.onsuccess = () => {
|
||||
const cursor = request.result;
|
||||
if (cursor) {
|
||||
projects.push(cursor.value.metadata);
|
||||
cursor.continue();
|
||||
} else {
|
||||
resolve(projects);
|
||||
}
|
||||
};
|
||||
|
||||
request.onerror = () => reject(request.error);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Save project to IndexedDB
|
||||
*/
|
||||
export async function saveProject(project: ProjectData): Promise<void> {
|
||||
const db = await initDB();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = db.transaction(['projects'], 'readwrite');
|
||||
const store = transaction.objectStore('projects');
|
||||
const request = store.put(project);
|
||||
|
||||
request.onsuccess = () => resolve();
|
||||
request.onerror = () => reject(request.error);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Load project from IndexedDB
|
||||
*/
|
||||
export async function loadProject(projectId: string): Promise<ProjectData | null> {
|
||||
const db = await initDB();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = db.transaction(['projects'], 'readonly');
|
||||
const store = transaction.objectStore('projects');
|
||||
const request = store.get(projectId);
|
||||
|
||||
request.onsuccess = () => resolve(request.result || null);
|
||||
request.onerror = () => reject(request.error);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete project from IndexedDB
|
||||
*/
|
||||
export async function deleteProject(projectId: string): Promise<void> {
|
||||
const db = await initDB();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = db.transaction(['projects'], 'readwrite');
|
||||
const store = transaction.objectStore('projects');
|
||||
const request = store.delete(projectId);
|
||||
|
||||
request.onsuccess = () => resolve();
|
||||
request.onerror = () => reject(request.error);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize AudioBuffer for storage
|
||||
*/
|
||||
export function serializeAudioBuffer(buffer: AudioBuffer): SerializedAudioBuffer {
|
||||
const channelData: Float32Array[] = [];
|
||||
|
||||
for (let i = 0; i < buffer.numberOfChannels; i++) {
|
||||
channelData.push(new Float32Array(buffer.getChannelData(i)));
|
||||
}
|
||||
|
||||
return {
|
||||
sampleRate: buffer.sampleRate,
|
||||
length: buffer.length,
|
||||
numberOfChannels: buffer.numberOfChannels,
|
||||
channelData,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Deserialize AudioBuffer from storage
|
||||
*/
|
||||
export function deserializeAudioBuffer(
|
||||
serialized: SerializedAudioBuffer,
|
||||
audioContext: AudioContext
|
||||
): AudioBuffer {
|
||||
const buffer = audioContext.createBuffer(
|
||||
serialized.numberOfChannels,
|
||||
serialized.length,
|
||||
serialized.sampleRate
|
||||
);
|
||||
|
||||
for (let i = 0; i < serialized.numberOfChannels; i++) {
|
||||
buffer.copyToChannel(new Float32Array(serialized.channelData[i]), i);
|
||||
}
|
||||
|
||||
return buffer;
|
||||
}
|
||||
337
lib/storage/projects.ts
Normal file
337
lib/storage/projects.ts
Normal file
@@ -0,0 +1,337 @@
|
||||
/**
|
||||
* Project management service
|
||||
*/
|
||||
|
||||
import type { Track } from '@/types/track';
|
||||
import {
|
||||
saveProject,
|
||||
loadProject,
|
||||
getAllProjects,
|
||||
deleteProject,
|
||||
serializeAudioBuffer,
|
||||
deserializeAudioBuffer,
|
||||
type ProjectData,
|
||||
type SerializedTrack,
|
||||
type SerializedAudioBuffer,
|
||||
} from './db';
|
||||
import type { ProjectMetadata } from './db';
|
||||
import { getAudioContext } from '../audio/context';
|
||||
import { generateId } from '../audio/effects/chain';
|
||||
|
||||
// Re-export ProjectMetadata for easier importing
|
||||
export type { ProjectMetadata } from './db';
|
||||
|
||||
/**
|
||||
* Generate unique project ID
|
||||
*/
|
||||
export function generateProjectId(): string {
|
||||
return `project_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize effects by removing any non-serializable data (functions, nodes, etc.)
|
||||
*/
|
||||
function serializeEffects(effects: any[]): any[] {
|
||||
return effects.map(effect => ({
|
||||
id: effect.id,
|
||||
type: effect.type,
|
||||
name: effect.name,
|
||||
enabled: effect.enabled,
|
||||
expanded: effect.expanded,
|
||||
parameters: effect.parameters ? JSON.parse(JSON.stringify(effect.parameters)) : undefined,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert tracks to serialized format
|
||||
*/
|
||||
function serializeTracks(tracks: Track[]): SerializedTrack[] {
|
||||
return tracks.map(track => {
|
||||
// Serialize automation by deep cloning to remove any functions
|
||||
const automation = track.automation ? JSON.parse(JSON.stringify(track.automation)) : { lanes: [], showAutomation: false };
|
||||
|
||||
return {
|
||||
id: track.id,
|
||||
name: track.name,
|
||||
color: track.color,
|
||||
volume: track.volume,
|
||||
pan: track.pan,
|
||||
muted: track.mute,
|
||||
soloed: track.solo,
|
||||
collapsed: track.collapsed,
|
||||
height: track.height,
|
||||
audioBuffer: track.audioBuffer ? serializeAudioBuffer(track.audioBuffer) : null,
|
||||
effects: serializeEffects(track.effectChain?.effects || []),
|
||||
automation,
|
||||
recordEnabled: track.recordEnabled,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert serialized tracks back to Track format
|
||||
*/
|
||||
function deserializeTracks(serialized: SerializedTrack[]): Track[] {
|
||||
const audioContext = getAudioContext();
|
||||
|
||||
return serialized.map(track => ({
|
||||
id: track.id,
|
||||
name: track.name,
|
||||
color: track.color,
|
||||
volume: track.volume,
|
||||
pan: track.pan,
|
||||
mute: track.muted,
|
||||
solo: track.soloed,
|
||||
collapsed: track.collapsed,
|
||||
height: track.height,
|
||||
audioBuffer: track.audioBuffer ? deserializeAudioBuffer(track.audioBuffer, audioContext) : null,
|
||||
effectChain: {
|
||||
id: generateId(),
|
||||
name: `${track.name} FX`,
|
||||
effects: track.effects,
|
||||
},
|
||||
automation: track.automation,
|
||||
recordEnabled: track.recordEnabled,
|
||||
selected: false,
|
||||
showEffects: false,
|
||||
selection: null, // Reset selection on load
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate total project duration
|
||||
*/
|
||||
function calculateDuration(tracks: Track[]): number {
|
||||
let maxDuration = 0;
|
||||
|
||||
for (const track of tracks) {
|
||||
if (track.audioBuffer) {
|
||||
maxDuration = Math.max(maxDuration, track.audioBuffer.duration);
|
||||
}
|
||||
}
|
||||
|
||||
return maxDuration;
|
||||
}
|
||||
|
||||
/**
|
||||
* Save current project state
|
||||
*/
|
||||
export async function saveCurrentProject(
|
||||
projectId: string | null,
|
||||
projectName: string,
|
||||
tracks: Track[],
|
||||
settings: {
|
||||
zoom: number;
|
||||
currentTime: number;
|
||||
sampleRate: number;
|
||||
},
|
||||
description?: string
|
||||
): Promise<string> {
|
||||
const id = projectId || generateProjectId();
|
||||
const now = Date.now();
|
||||
|
||||
const metadata: ProjectMetadata = {
|
||||
id,
|
||||
name: projectName,
|
||||
description,
|
||||
createdAt: projectId ? (await loadProject(id))?.metadata.createdAt || now : now,
|
||||
updatedAt: now,
|
||||
duration: calculateDuration(tracks),
|
||||
sampleRate: settings.sampleRate,
|
||||
trackCount: tracks.length,
|
||||
};
|
||||
|
||||
const projectData: ProjectData = {
|
||||
metadata,
|
||||
tracks: serializeTracks(tracks),
|
||||
settings,
|
||||
};
|
||||
|
||||
await saveProject(projectData);
|
||||
return id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load project and restore state
|
||||
*/
|
||||
export async function loadProjectById(projectId: string): Promise<{
|
||||
tracks: Track[];
|
||||
settings: {
|
||||
zoom: number;
|
||||
currentTime: number;
|
||||
sampleRate: number;
|
||||
};
|
||||
metadata: ProjectMetadata;
|
||||
} | null> {
|
||||
const project = await loadProject(projectId);
|
||||
if (!project) return null;
|
||||
|
||||
return {
|
||||
tracks: deserializeTracks(project.tracks),
|
||||
settings: project.settings,
|
||||
metadata: project.metadata,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of all projects
|
||||
*/
|
||||
export async function listProjects(): Promise<ProjectMetadata[]> {
|
||||
return getAllProjects();
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a project
|
||||
*/
|
||||
export async function removeProject(projectId: string): Promise<void> {
|
||||
return deleteProject(projectId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Duplicate a project
|
||||
*/
|
||||
export async function duplicateProject(sourceProjectId: string, newName: string): Promise<string> {
|
||||
const project = await loadProject(sourceProjectId);
|
||||
if (!project) throw new Error('Project not found');
|
||||
|
||||
const newId = generateProjectId();
|
||||
const now = Date.now();
|
||||
|
||||
const newProject: ProjectData = {
|
||||
...project,
|
||||
metadata: {
|
||||
...project.metadata,
|
||||
id: newId,
|
||||
name: newName,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
},
|
||||
};
|
||||
|
||||
await saveProject(newProject);
|
||||
return newId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Export project as ZIP file with separate audio files
|
||||
*/
|
||||
export async function exportProjectAsJSON(projectId: string): Promise<void> {
|
||||
const JSZip = (await import('jszip')).default;
|
||||
const project = await loadProject(projectId);
|
||||
if (!project) throw new Error('Project not found');
|
||||
|
||||
const zip = new JSZip();
|
||||
const audioContext = getAudioContext();
|
||||
|
||||
// Create metadata without audio buffers
|
||||
const metadata = {
|
||||
...project,
|
||||
tracks: project.tracks.map((track, index) => ({
|
||||
...track,
|
||||
audioBuffer: track.audioBuffer ? {
|
||||
fileName: `track_${index}.wav`,
|
||||
sampleRate: track.audioBuffer.sampleRate,
|
||||
length: track.audioBuffer.length,
|
||||
numberOfChannels: track.audioBuffer.numberOfChannels,
|
||||
} : null,
|
||||
})),
|
||||
};
|
||||
|
||||
// Add project.json to ZIP
|
||||
zip.file('project.json', JSON.stringify(metadata, null, 2));
|
||||
|
||||
// Convert audio buffers to WAV and add to ZIP
|
||||
for (let i = 0; i < project.tracks.length; i++) {
|
||||
const track = project.tracks[i];
|
||||
if (track.audioBuffer) {
|
||||
// Deserialize audio buffer
|
||||
const buffer = deserializeAudioBuffer(track.audioBuffer, audioContext);
|
||||
|
||||
// Convert to WAV
|
||||
const { audioBufferToWav } = await import('@/lib/audio/export');
|
||||
const wavBlob = await audioBufferToWav(buffer);
|
||||
|
||||
// Add to ZIP
|
||||
zip.file(`track_${i}.wav`, wavBlob);
|
||||
}
|
||||
}
|
||||
|
||||
// Generate ZIP and download
|
||||
const zipBlob = await zip.generateAsync({ type: 'blob' });
|
||||
const url = URL.createObjectURL(zipBlob);
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
a.download = `${project.metadata.name.replace(/[^a-z0-9]/gi, '_').toLowerCase()}_${Date.now()}.zip`;
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
document.body.removeChild(a);
|
||||
URL.revokeObjectURL(url);
|
||||
}
|
||||
|
||||
/**
|
||||
* Import project from ZIP file
|
||||
*/
|
||||
export async function importProjectFromJSON(file: File): Promise<string> {
|
||||
const JSZip = (await import('jszip')).default;
|
||||
|
||||
try {
|
||||
const zip = await JSZip.loadAsync(file);
|
||||
|
||||
// Read project.json
|
||||
const projectJsonFile = zip.file('project.json');
|
||||
if (!projectJsonFile) throw new Error('Invalid project file: missing project.json');
|
||||
|
||||
const projectJson = await projectJsonFile.async('text');
|
||||
const metadata = JSON.parse(projectJson);
|
||||
|
||||
// Read audio files and reconstruct tracks
|
||||
const audioContext = getAudioContext();
|
||||
const tracks: SerializedTrack[] = [];
|
||||
|
||||
for (let i = 0; i < metadata.tracks.length; i++) {
|
||||
const trackMeta = metadata.tracks[i];
|
||||
let audioBuffer: SerializedAudioBuffer | null = null;
|
||||
|
||||
if (trackMeta.audioBuffer?.fileName) {
|
||||
const audioFile = zip.file(trackMeta.audioBuffer.fileName);
|
||||
if (audioFile) {
|
||||
// Read WAV file as array buffer
|
||||
const arrayBuffer = await audioFile.async('arraybuffer');
|
||||
|
||||
// Decode audio data
|
||||
const decodedBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
||||
|
||||
// Serialize for storage
|
||||
audioBuffer = serializeAudioBuffer(decodedBuffer);
|
||||
}
|
||||
}
|
||||
|
||||
tracks.push({
|
||||
...trackMeta,
|
||||
audioBuffer,
|
||||
});
|
||||
}
|
||||
|
||||
// Generate new ID to avoid conflicts
|
||||
const newId = generateProjectId();
|
||||
const now = Date.now();
|
||||
|
||||
const importedProject: ProjectData = {
|
||||
...metadata,
|
||||
tracks,
|
||||
metadata: {
|
||||
...metadata.metadata,
|
||||
id: newId,
|
||||
name: `${metadata.metadata.name} (Imported)`,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
},
|
||||
};
|
||||
|
||||
await saveProject(importedProject);
|
||||
return newId;
|
||||
} catch (error) {
|
||||
console.error('Import error:', error);
|
||||
throw new Error('Failed to import project file');
|
||||
}
|
||||
}
|
||||
149
lib/utils/audio-cleanup.ts
Normal file
149
lib/utils/audio-cleanup.ts
Normal file
@@ -0,0 +1,149 @@
|
||||
/**
|
||||
* Audio cleanup utilities to prevent memory leaks
|
||||
*/
|
||||
|
||||
/**
|
||||
* Safely disconnect and cleanup an AudioNode
|
||||
*/
|
||||
export function cleanupAudioNode(node: AudioNode | null | undefined): void {
|
||||
if (!node) return;
|
||||
|
||||
try {
|
||||
node.disconnect();
|
||||
} catch (error) {
|
||||
// Node may already be disconnected, ignore error
|
||||
console.debug('AudioNode cleanup error (expected):', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup multiple audio nodes
|
||||
*/
|
||||
export function cleanupAudioNodes(nodes: Array<AudioNode | null | undefined>): void {
|
||||
nodes.forEach(cleanupAudioNode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely stop and cleanup an AudioBufferSourceNode
|
||||
*/
|
||||
export function cleanupAudioSource(source: AudioBufferSourceNode | null | undefined): void {
|
||||
if (!source) return;
|
||||
|
||||
try {
|
||||
source.stop();
|
||||
} catch (error) {
|
||||
// Source may already be stopped, ignore error
|
||||
console.debug('AudioSource stop error (expected):', error);
|
||||
}
|
||||
|
||||
cleanupAudioNode(source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup canvas and release resources
|
||||
*/
|
||||
export function cleanupCanvas(canvas: HTMLCanvasElement | null | undefined): void {
|
||||
if (!canvas) return;
|
||||
|
||||
const ctx = canvas.getContext('2d');
|
||||
if (ctx) {
|
||||
// Clear the canvas
|
||||
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
||||
// Reset transform
|
||||
ctx.setTransform(1, 0, 0, 1, 0, 0);
|
||||
}
|
||||
|
||||
// Release context (helps with memory)
|
||||
canvas.width = 0;
|
||||
canvas.height = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel animation frame safely
|
||||
*/
|
||||
export function cleanupAnimationFrame(frameId: number | null | undefined): void {
|
||||
if (frameId !== null && frameId !== undefined) {
|
||||
cancelAnimationFrame(frameId);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup media stream tracks
|
||||
*/
|
||||
export function cleanupMediaStream(stream: MediaStream | null | undefined): void {
|
||||
if (!stream) return;
|
||||
|
||||
stream.getTracks().forEach(track => {
|
||||
track.stop();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a cleanup registry for managing multiple cleanup tasks
|
||||
*/
|
||||
export class CleanupRegistry {
|
||||
private cleanupTasks: Array<() => void> = [];
|
||||
|
||||
/**
|
||||
* Register a cleanup task
|
||||
*/
|
||||
register(cleanup: () => void): void {
|
||||
this.cleanupTasks.push(cleanup);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register an audio node for cleanup
|
||||
*/
|
||||
registerAudioNode(node: AudioNode): void {
|
||||
this.register(() => cleanupAudioNode(node));
|
||||
}
|
||||
|
||||
/**
|
||||
* Register an audio source for cleanup
|
||||
*/
|
||||
registerAudioSource(source: AudioBufferSourceNode): void {
|
||||
this.register(() => cleanupAudioSource(source));
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a canvas for cleanup
|
||||
*/
|
||||
registerCanvas(canvas: HTMLCanvasElement): void {
|
||||
this.register(() => cleanupCanvas(canvas));
|
||||
}
|
||||
|
||||
/**
|
||||
* Register an animation frame for cleanup
|
||||
*/
|
||||
registerAnimationFrame(frameId: number): void {
|
||||
this.register(() => cleanupAnimationFrame(frameId));
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a media stream for cleanup
|
||||
*/
|
||||
registerMediaStream(stream: MediaStream): void {
|
||||
this.register(() => cleanupMediaStream(stream));
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute all cleanup tasks and clear the registry
|
||||
*/
|
||||
cleanup(): void {
|
||||
this.cleanupTasks.forEach(task => {
|
||||
try {
|
||||
task();
|
||||
} catch (error) {
|
||||
console.error('Cleanup task failed:', error);
|
||||
}
|
||||
});
|
||||
this.cleanupTasks = [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the number of registered cleanup tasks
|
||||
*/
|
||||
get size(): number {
|
||||
return this.cleanupTasks.length;
|
||||
}
|
||||
}
|
||||
128
lib/utils/browser-compat.ts
Normal file
128
lib/utils/browser-compat.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
/**
|
||||
* Browser compatibility checking utilities
|
||||
*/
|
||||
|
||||
export interface BrowserCompatibility {
|
||||
isSupported: boolean;
|
||||
missingFeatures: string[];
|
||||
warnings: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if all required browser features are supported
|
||||
*/
|
||||
export function checkBrowserCompatibility(): BrowserCompatibility {
|
||||
const missingFeatures: string[] = [];
|
||||
const warnings: string[] = [];
|
||||
|
||||
// Check if running in browser
|
||||
if (typeof window === 'undefined') {
|
||||
return {
|
||||
isSupported: true,
|
||||
missingFeatures: [],
|
||||
warnings: [],
|
||||
};
|
||||
}
|
||||
|
||||
// Check Web Audio API
|
||||
if (!window.AudioContext && !(window as any).webkitAudioContext) {
|
||||
missingFeatures.push('Web Audio API');
|
||||
}
|
||||
|
||||
// Check IndexedDB
|
||||
if (!window.indexedDB) {
|
||||
missingFeatures.push('IndexedDB');
|
||||
}
|
||||
|
||||
// Check localStorage
|
||||
try {
|
||||
localStorage.setItem('test', 'test');
|
||||
localStorage.removeItem('test');
|
||||
} catch (e) {
|
||||
missingFeatures.push('LocalStorage');
|
||||
}
|
||||
|
||||
// Check Canvas API
|
||||
const canvas = document.createElement('canvas');
|
||||
if (!canvas.getContext || !canvas.getContext('2d')) {
|
||||
missingFeatures.push('Canvas API');
|
||||
}
|
||||
|
||||
// Check MediaDevices API (for recording)
|
||||
if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) {
|
||||
warnings.push('Microphone recording not supported (requires HTTPS or localhost)');
|
||||
}
|
||||
|
||||
// Check File API
|
||||
if (!window.File || !window.FileReader || !window.FileList || !window.Blob) {
|
||||
missingFeatures.push('File API');
|
||||
}
|
||||
|
||||
// Check OfflineAudioContext
|
||||
if (!window.OfflineAudioContext && !(window as any).webkitOfflineAudioContext) {
|
||||
missingFeatures.push('OfflineAudioContext (required for audio processing)');
|
||||
}
|
||||
|
||||
return {
|
||||
isSupported: missingFeatures.length === 0,
|
||||
missingFeatures,
|
||||
warnings,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get user-friendly browser name
|
||||
*/
|
||||
export function getBrowserInfo(): { name: string; version: string } {
|
||||
// Check if running in browser
|
||||
if (typeof window === 'undefined' || typeof navigator === 'undefined') {
|
||||
return { name: 'Unknown', version: 'Unknown' };
|
||||
}
|
||||
|
||||
const userAgent = navigator.userAgent;
|
||||
let name = 'Unknown';
|
||||
let version = 'Unknown';
|
||||
|
||||
if (userAgent.indexOf('Chrome') > -1 && userAgent.indexOf('Edg') === -1) {
|
||||
name = 'Chrome';
|
||||
const match = userAgent.match(/Chrome\/(\d+)/);
|
||||
version = match ? match[1] : 'Unknown';
|
||||
} else if (userAgent.indexOf('Edg') > -1) {
|
||||
name = 'Edge';
|
||||
const match = userAgent.match(/Edg\/(\d+)/);
|
||||
version = match ? match[1] : 'Unknown';
|
||||
} else if (userAgent.indexOf('Firefox') > -1) {
|
||||
name = 'Firefox';
|
||||
const match = userAgent.match(/Firefox\/(\d+)/);
|
||||
version = match ? match[1] : 'Unknown';
|
||||
} else if (userAgent.indexOf('Safari') > -1 && userAgent.indexOf('Chrome') === -1) {
|
||||
name = 'Safari';
|
||||
const match = userAgent.match(/Version\/(\d+)/);
|
||||
version = match ? match[1] : 'Unknown';
|
||||
}
|
||||
|
||||
return { name, version };
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if browser version meets minimum requirements
|
||||
*/
|
||||
export function checkMinimumVersion(): boolean {
|
||||
const { name, version } = getBrowserInfo();
|
||||
const versionNum = parseInt(version, 10);
|
||||
|
||||
const minimumVersions: Record<string, number> = {
|
||||
Chrome: 90,
|
||||
Edge: 90,
|
||||
Firefox: 88,
|
||||
Safari: 14,
|
||||
};
|
||||
|
||||
const minVersion = minimumVersions[name];
|
||||
if (!minVersion) {
|
||||
// Unknown browser, assume it's ok
|
||||
return true;
|
||||
}
|
||||
|
||||
return versionNum >= minVersion;
|
||||
}
|
||||
160
lib/utils/memory-limits.ts
Normal file
160
lib/utils/memory-limits.ts
Normal file
@@ -0,0 +1,160 @@
|
||||
/**
|
||||
* Memory limit checking utilities for audio file handling
|
||||
*/
|
||||
|
||||
export interface MemoryCheckResult {
|
||||
allowed: boolean;
|
||||
warning?: string;
|
||||
estimatedMemoryMB: number;
|
||||
availableMemoryMB?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Estimate memory required for an audio buffer
|
||||
* @param duration Duration in seconds
|
||||
* @param sampleRate Sample rate (default: 48000 Hz)
|
||||
* @param channels Number of channels (default: 2 for stereo)
|
||||
* @returns Estimated memory in MB
|
||||
*/
|
||||
export function estimateAudioMemory(
|
||||
duration: number,
|
||||
sampleRate: number = 48000,
|
||||
channels: number = 2
|
||||
): number {
|
||||
// Each sample is a 32-bit float (4 bytes)
|
||||
const bytesPerSample = 4;
|
||||
const totalSamples = duration * sampleRate * channels;
|
||||
const bytes = totalSamples * bytesPerSample;
|
||||
|
||||
// Convert to MB
|
||||
return bytes / (1024 * 1024);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get available device memory if supported
|
||||
* @returns Available memory in MB, or undefined if not supported
|
||||
*/
|
||||
export function getAvailableMemory(): number | undefined {
|
||||
if (typeof navigator === 'undefined') return undefined;
|
||||
|
||||
// @ts-ignore - deviceMemory is not in TypeScript types yet
|
||||
const deviceMemory = navigator.deviceMemory;
|
||||
if (typeof deviceMemory === 'number') {
|
||||
// deviceMemory is in GB, convert to MB
|
||||
return deviceMemory * 1024;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a file size is within safe memory limits
|
||||
* @param fileSizeBytes File size in bytes
|
||||
* @returns Memory check result
|
||||
*/
|
||||
export function checkFileMemoryLimit(fileSizeBytes: number): MemoryCheckResult {
|
||||
// Estimate memory usage (audio files decompress to ~10x their size)
|
||||
const estimatedMemoryMB = (fileSizeBytes / (1024 * 1024)) * 10;
|
||||
const availableMemoryMB = getAvailableMemory();
|
||||
|
||||
// Conservative limits
|
||||
const WARN_THRESHOLD_MB = 100; // Warn if file will use > 100MB
|
||||
const MAX_RECOMMENDED_MB = 500; // Don't recommend files > 500MB
|
||||
|
||||
if (estimatedMemoryMB > MAX_RECOMMENDED_MB) {
|
||||
return {
|
||||
allowed: false,
|
||||
warning: `This file may require ${Math.round(estimatedMemoryMB)}MB of memory. ` +
|
||||
`Files larger than ${MAX_RECOMMENDED_MB}MB are not recommended as they may cause performance issues or crashes.`,
|
||||
estimatedMemoryMB,
|
||||
availableMemoryMB,
|
||||
};
|
||||
}
|
||||
|
||||
if (estimatedMemoryMB > WARN_THRESHOLD_MB) {
|
||||
const warning = availableMemoryMB
|
||||
? `This file will require approximately ${Math.round(estimatedMemoryMB)}MB of memory. ` +
|
||||
`Your device has ${Math.round(availableMemoryMB)}MB available.`
|
||||
: `This file will require approximately ${Math.round(estimatedMemoryMB)}MB of memory. ` +
|
||||
`Large files may cause performance issues on devices with limited memory.`;
|
||||
|
||||
return {
|
||||
allowed: true,
|
||||
warning,
|
||||
estimatedMemoryMB,
|
||||
availableMemoryMB,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
allowed: true,
|
||||
estimatedMemoryMB,
|
||||
availableMemoryMB,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an audio buffer is within safe memory limits
|
||||
* @param duration Duration in seconds
|
||||
* @param sampleRate Sample rate
|
||||
* @param channels Number of channels
|
||||
* @returns Memory check result
|
||||
*/
|
||||
export function checkAudioBufferMemoryLimit(
|
||||
duration: number,
|
||||
sampleRate: number = 48000,
|
||||
channels: number = 2
|
||||
): MemoryCheckResult {
|
||||
const estimatedMemoryMB = estimateAudioMemory(duration, sampleRate, channels);
|
||||
const availableMemoryMB = getAvailableMemory();
|
||||
|
||||
const WARN_THRESHOLD_MB = 100;
|
||||
const MAX_RECOMMENDED_MB = 500;
|
||||
|
||||
if (estimatedMemoryMB > MAX_RECOMMENDED_MB) {
|
||||
return {
|
||||
allowed: false,
|
||||
warning: `This audio (${Math.round(duration / 60)} minutes) will require ${Math.round(estimatedMemoryMB)}MB of memory. ` +
|
||||
`Audio longer than ${Math.round((MAX_RECOMMENDED_MB / sampleRate / channels / 4) / 60)} minutes may cause performance issues.`,
|
||||
estimatedMemoryMB,
|
||||
availableMemoryMB,
|
||||
};
|
||||
}
|
||||
|
||||
if (estimatedMemoryMB > WARN_THRESHOLD_MB) {
|
||||
const warning = availableMemoryMB
|
||||
? `This audio will require approximately ${Math.round(estimatedMemoryMB)}MB of memory. ` +
|
||||
`Your device has ${Math.round(availableMemoryMB)}MB available.`
|
||||
: `This audio will require approximately ${Math.round(estimatedMemoryMB)}MB of memory.`;
|
||||
|
||||
return {
|
||||
allowed: true,
|
||||
warning,
|
||||
estimatedMemoryMB,
|
||||
availableMemoryMB,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
allowed: true,
|
||||
estimatedMemoryMB,
|
||||
availableMemoryMB,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Format memory size in human-readable format
|
||||
* @param bytes Size in bytes
|
||||
* @returns Formatted string (e.g., "1.5 MB", "250 KB")
|
||||
*/
|
||||
export function formatMemorySize(bytes: number): string {
|
||||
if (bytes < 1024) {
|
||||
return `${bytes} B`;
|
||||
} else if (bytes < 1024 * 1024) {
|
||||
return `${(bytes / 1024).toFixed(1)} KB`;
|
||||
} else if (bytes < 1024 * 1024 * 1024) {
|
||||
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
|
||||
} else {
|
||||
return `${(bytes / (1024 * 1024 * 1024)).toFixed(2)} GB`;
|
||||
}
|
||||
}
|
||||
93
lib/utils/timeline.ts
Normal file
93
lib/utils/timeline.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
/**
|
||||
* Timeline coordinate conversion and formatting utilities
|
||||
*/
|
||||
|
||||
/**
|
||||
* Base pixels per second at zoom level 1
|
||||
* zoom=1: 5 pixels per second
|
||||
* zoom=2: 10 pixels per second, etc.
|
||||
*/
|
||||
const PIXELS_PER_SECOND_BASE = 5;
|
||||
|
||||
/**
|
||||
* Convert time (in seconds) to pixel position
|
||||
*/
|
||||
export function timeToPixel(time: number, duration: number, zoom: number): number {
|
||||
if (duration === 0) return 0;
|
||||
const totalWidth = duration * zoom * PIXELS_PER_SECOND_BASE;
|
||||
return (time / duration) * totalWidth;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert pixel position to time (in seconds)
|
||||
*/
|
||||
export function pixelToTime(pixel: number, duration: number, zoom: number): number {
|
||||
if (duration === 0) return 0;
|
||||
const totalWidth = duration * zoom * PIXELS_PER_SECOND_BASE;
|
||||
return (pixel / totalWidth) * duration;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate appropriate tick interval based on visible duration
|
||||
* Returns interval in seconds
|
||||
*/
|
||||
export function calculateTickInterval(visibleDuration: number): {
|
||||
major: number;
|
||||
minor: number;
|
||||
} {
|
||||
// Very zoomed in: show sub-second intervals
|
||||
if (visibleDuration < 5) {
|
||||
return { major: 1, minor: 0.5 };
|
||||
}
|
||||
// Zoomed in: show every second
|
||||
if (visibleDuration < 20) {
|
||||
return { major: 5, minor: 1 };
|
||||
}
|
||||
// Medium zoom: show every 5 seconds
|
||||
if (visibleDuration < 60) {
|
||||
return { major: 10, minor: 5 };
|
||||
}
|
||||
// Zoomed out: show every 10 seconds
|
||||
if (visibleDuration < 300) {
|
||||
return { major: 30, minor: 10 };
|
||||
}
|
||||
// Very zoomed out: show every minute
|
||||
return { major: 60, minor: 30 };
|
||||
}
|
||||
|
||||
/**
|
||||
* Format time in seconds to display format
|
||||
* Returns format like "0:00", "1:23", "12:34.5"
|
||||
*/
|
||||
export function formatTimeLabel(seconds: number, showMillis: boolean = false): string {
|
||||
const mins = Math.floor(seconds / 60);
|
||||
const secs = seconds % 60;
|
||||
|
||||
if (showMillis) {
|
||||
const wholeSecs = Math.floor(secs);
|
||||
const decimalPart = Math.floor((secs - wholeSecs) * 10);
|
||||
return `${mins}:${wholeSecs.toString().padStart(2, '0')}.${decimalPart}`;
|
||||
}
|
||||
|
||||
return `${mins}:${Math.floor(secs).toString().padStart(2, '0')}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate visible time range based on scroll position
|
||||
*/
|
||||
export function getVisibleTimeRange(
|
||||
scrollLeft: number,
|
||||
viewportWidth: number,
|
||||
duration: number,
|
||||
zoom: number
|
||||
): { start: number; end: number } {
|
||||
const totalWidth = duration * zoom * 100;
|
||||
|
||||
const start = pixelToTime(scrollLeft, duration, zoom);
|
||||
const end = pixelToTime(scrollLeft + viewportWidth, duration, zoom);
|
||||
|
||||
return {
|
||||
start: Math.max(0, start),
|
||||
end: Math.min(duration, end),
|
||||
};
|
||||
}
|
||||
200
lib/workers/audio.worker.ts
Normal file
200
lib/workers/audio.worker.ts
Normal file
@@ -0,0 +1,200 @@
|
||||
/**
|
||||
* Web Worker for heavy audio computations
|
||||
* Offloads waveform generation, analysis, and normalization to background thread
|
||||
*/
|
||||
|
||||
export interface WorkerMessage {
|
||||
id: string;
|
||||
type: 'generatePeaks' | 'generateMinMaxPeaks' | 'normalizePeaks' | 'analyzeAudio' | 'findPeak';
|
||||
payload: any;
|
||||
}
|
||||
|
||||
export interface WorkerResponse {
|
||||
id: string;
|
||||
type: string;
|
||||
result?: any;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
// Message handler
|
||||
self.onmessage = (event: MessageEvent<WorkerMessage>) => {
|
||||
const { id, type, payload } = event.data;
|
||||
|
||||
try {
|
||||
let result: any;
|
||||
|
||||
switch (type) {
|
||||
case 'generatePeaks':
|
||||
result = generatePeaks(
|
||||
payload.channelData,
|
||||
payload.width
|
||||
);
|
||||
break;
|
||||
|
||||
case 'generateMinMaxPeaks':
|
||||
result = generateMinMaxPeaks(
|
||||
payload.channelData,
|
||||
payload.width
|
||||
);
|
||||
break;
|
||||
|
||||
case 'normalizePeaks':
|
||||
result = normalizePeaks(
|
||||
payload.peaks,
|
||||
payload.targetMax
|
||||
);
|
||||
break;
|
||||
|
||||
case 'analyzeAudio':
|
||||
result = analyzeAudio(payload.channelData);
|
||||
break;
|
||||
|
||||
case 'findPeak':
|
||||
result = findPeak(payload.channelData);
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown worker message type: ${type}`);
|
||||
}
|
||||
|
||||
const response: WorkerResponse = { id, type, result };
|
||||
self.postMessage(response);
|
||||
} catch (error) {
|
||||
const response: WorkerResponse = {
|
||||
id,
|
||||
type,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
};
|
||||
self.postMessage(response);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Generate waveform peaks from channel data
|
||||
*/
|
||||
function generatePeaks(channelData: Float32Array, width: number): Float32Array {
|
||||
const peaks = new Float32Array(width);
|
||||
const samplesPerPeak = Math.floor(channelData.length / width);
|
||||
|
||||
for (let i = 0; i < width; i++) {
|
||||
const start = i * samplesPerPeak;
|
||||
const end = Math.min(start + samplesPerPeak, channelData.length);
|
||||
|
||||
let max = 0;
|
||||
for (let j = start; j < end; j++) {
|
||||
const abs = Math.abs(channelData[j]);
|
||||
if (abs > max) {
|
||||
max = abs;
|
||||
}
|
||||
}
|
||||
|
||||
peaks[i] = max;
|
||||
}
|
||||
|
||||
return peaks;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate min/max peaks for more detailed waveform visualization
|
||||
*/
|
||||
function generateMinMaxPeaks(
|
||||
channelData: Float32Array,
|
||||
width: number
|
||||
): { min: Float32Array; max: Float32Array } {
|
||||
const min = new Float32Array(width);
|
||||
const max = new Float32Array(width);
|
||||
const samplesPerPeak = Math.floor(channelData.length / width);
|
||||
|
||||
for (let i = 0; i < width; i++) {
|
||||
const start = i * samplesPerPeak;
|
||||
const end = Math.min(start + samplesPerPeak, channelData.length);
|
||||
|
||||
let minVal = 1;
|
||||
let maxVal = -1;
|
||||
|
||||
for (let j = start; j < end; j++) {
|
||||
const val = channelData[j];
|
||||
if (val < minVal) minVal = val;
|
||||
if (val > maxVal) maxVal = val;
|
||||
}
|
||||
|
||||
min[i] = minVal;
|
||||
max[i] = maxVal;
|
||||
}
|
||||
|
||||
return { min, max };
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize peaks to a given range
|
||||
*/
|
||||
function normalizePeaks(peaks: Float32Array, targetMax: number = 1): Float32Array {
|
||||
const normalized = new Float32Array(peaks.length);
|
||||
let max = 0;
|
||||
|
||||
// Find max value
|
||||
for (let i = 0; i < peaks.length; i++) {
|
||||
if (peaks[i] > max) {
|
||||
max = peaks[i];
|
||||
}
|
||||
}
|
||||
|
||||
// Normalize
|
||||
const scale = max > 0 ? targetMax / max : 1;
|
||||
for (let i = 0; i < peaks.length; i++) {
|
||||
normalized[i] = peaks[i] * scale;
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyze audio data for statistics
|
||||
*/
|
||||
function analyzeAudio(channelData: Float32Array): {
|
||||
peak: number;
|
||||
rms: number;
|
||||
crestFactor: number;
|
||||
dynamicRange: number;
|
||||
} {
|
||||
let peak = 0;
|
||||
let sumSquares = 0;
|
||||
let min = 1;
|
||||
let max = -1;
|
||||
|
||||
for (let i = 0; i < channelData.length; i++) {
|
||||
const val = channelData[i];
|
||||
const abs = Math.abs(val);
|
||||
|
||||
if (abs > peak) peak = abs;
|
||||
if (val < min) min = val;
|
||||
if (val > max) max = val;
|
||||
|
||||
sumSquares += val * val;
|
||||
}
|
||||
|
||||
const rms = Math.sqrt(sumSquares / channelData.length);
|
||||
const crestFactor = rms > 0 ? peak / rms : 0;
|
||||
const dynamicRange = max - min;
|
||||
|
||||
return {
|
||||
peak,
|
||||
rms,
|
||||
crestFactor,
|
||||
dynamicRange,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Find peak value in channel data
|
||||
*/
|
||||
function findPeak(channelData: Float32Array): number {
|
||||
let peak = 0;
|
||||
|
||||
for (let i = 0; i < channelData.length; i++) {
|
||||
const abs = Math.abs(channelData[i]);
|
||||
if (abs > peak) peak = abs;
|
||||
}
|
||||
|
||||
return peak;
|
||||
}
|
||||
@@ -10,7 +10,9 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"clsx": "^2.1.1",
|
||||
"lamejs": "^1.2.1",
|
||||
"fflate": "^0.8.2",
|
||||
"jszip": "^3.10.1",
|
||||
"lamejs": "github:zhuker/lamejs",
|
||||
"lucide-react": "^0.553.0",
|
||||
"next": "^16.0.0",
|
||||
"react": "^19.0.0",
|
||||
|
||||
104
pnpm-lock.yaml
generated
104
pnpm-lock.yaml
generated
@@ -11,9 +11,15 @@ importers:
|
||||
clsx:
|
||||
specifier: ^2.1.1
|
||||
version: 2.1.1
|
||||
fflate:
|
||||
specifier: ^0.8.2
|
||||
version: 0.8.2
|
||||
jszip:
|
||||
specifier: ^3.10.1
|
||||
version: 3.10.1
|
||||
lamejs:
|
||||
specifier: ^1.2.1
|
||||
version: 1.2.1
|
||||
specifier: github:zhuker/lamejs
|
||||
version: https://codeload.github.com/zhuker/lamejs/tar.gz/582bbba6a12f981b984d8fb9e1874499fed85675
|
||||
lucide-react:
|
||||
specifier: ^0.553.0
|
||||
version: 0.553.0(react@19.2.0)
|
||||
@@ -828,6 +834,9 @@ packages:
|
||||
convert-source-map@2.0.0:
|
||||
resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==}
|
||||
|
||||
core-util-is@1.0.3:
|
||||
resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==}
|
||||
|
||||
cross-spawn@7.0.6:
|
||||
resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==}
|
||||
engines: {node: '>= 8'}
|
||||
@@ -1085,6 +1094,9 @@ packages:
|
||||
picomatch:
|
||||
optional: true
|
||||
|
||||
fflate@0.8.2:
|
||||
resolution: {integrity: sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==}
|
||||
|
||||
file-entry-cache@8.0.0:
|
||||
resolution: {integrity: sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==}
|
||||
engines: {node: '>=16.0.0'}
|
||||
@@ -1212,6 +1224,9 @@ packages:
|
||||
resolution: {integrity: sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==}
|
||||
engines: {node: '>= 4'}
|
||||
|
||||
immediate@3.0.6:
|
||||
resolution: {integrity: sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==}
|
||||
|
||||
import-fresh@3.3.1:
|
||||
resolution: {integrity: sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==}
|
||||
engines: {node: '>=6'}
|
||||
@@ -1220,6 +1235,9 @@ packages:
|
||||
resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==}
|
||||
engines: {node: '>=0.8.19'}
|
||||
|
||||
inherits@2.0.4:
|
||||
resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==}
|
||||
|
||||
internal-slot@1.1.0:
|
||||
resolution: {integrity: sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==}
|
||||
engines: {node: '>= 0.4'}
|
||||
@@ -1327,6 +1345,9 @@ packages:
|
||||
resolution: {integrity: sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==}
|
||||
engines: {node: '>= 0.4'}
|
||||
|
||||
isarray@1.0.0:
|
||||
resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==}
|
||||
|
||||
isarray@2.0.5:
|
||||
resolution: {integrity: sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==}
|
||||
|
||||
@@ -1375,11 +1396,15 @@ packages:
|
||||
resolution: {integrity: sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ==}
|
||||
engines: {node: '>=4.0'}
|
||||
|
||||
jszip@3.10.1:
|
||||
resolution: {integrity: sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g==}
|
||||
|
||||
keyv@4.5.4:
|
||||
resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==}
|
||||
|
||||
lamejs@1.2.1:
|
||||
resolution: {integrity: sha512-s7bxvjvYthw6oPLCm5pFxvA84wUROODB8jEO2+CE1adhKgrIvVOlmMgY8zyugxGrvRaDHNJanOiS21/emty6dQ==}
|
||||
lamejs@https://codeload.github.com/zhuker/lamejs/tar.gz/582bbba6a12f981b984d8fb9e1874499fed85675:
|
||||
resolution: {tarball: https://codeload.github.com/zhuker/lamejs/tar.gz/582bbba6a12f981b984d8fb9e1874499fed85675}
|
||||
version: 1.2.1
|
||||
|
||||
language-subtag-registry@0.3.23:
|
||||
resolution: {integrity: sha512-0K65Lea881pHotoGEa5gDlMxt3pctLi2RplBb7Ezh4rRdLEOtgi7n4EwK9lamnUCkKBqaeKRVebTq6BAxSkpXQ==}
|
||||
@@ -1392,6 +1417,9 @@ packages:
|
||||
resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==}
|
||||
engines: {node: '>= 0.8.0'}
|
||||
|
||||
lie@3.3.0:
|
||||
resolution: {integrity: sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==}
|
||||
|
||||
lightningcss-android-arm64@1.30.2:
|
||||
resolution: {integrity: sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A==}
|
||||
engines: {node: '>= 12.0.0'}
|
||||
@@ -1594,6 +1622,9 @@ packages:
|
||||
resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==}
|
||||
engines: {node: '>=10'}
|
||||
|
||||
pako@1.0.11:
|
||||
resolution: {integrity: sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==}
|
||||
|
||||
parent-module@1.0.1:
|
||||
resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==}
|
||||
engines: {node: '>=6'}
|
||||
@@ -1636,6 +1667,9 @@ packages:
|
||||
resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==}
|
||||
engines: {node: '>= 0.8.0'}
|
||||
|
||||
process-nextick-args@2.0.1:
|
||||
resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==}
|
||||
|
||||
prop-types@15.8.1:
|
||||
resolution: {integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==}
|
||||
|
||||
@@ -1658,6 +1692,9 @@ packages:
|
||||
resolution: {integrity: sha512-tmbWg6W31tQLeB5cdIBOicJDJRR2KzXsV7uSK9iNfLWQ5bIZfxuPEHp7M8wiHyHnn0DD1i7w3Zmin0FtkrwoCQ==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
readable-stream@2.3.8:
|
||||
resolution: {integrity: sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==}
|
||||
|
||||
reflect.getprototypeof@1.0.10:
|
||||
resolution: {integrity: sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==}
|
||||
engines: {node: '>= 0.4'}
|
||||
@@ -1693,6 +1730,9 @@ packages:
|
||||
resolution: {integrity: sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==}
|
||||
engines: {node: '>=0.4'}
|
||||
|
||||
safe-buffer@5.1.2:
|
||||
resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==}
|
||||
|
||||
safe-push-apply@1.0.0:
|
||||
resolution: {integrity: sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==}
|
||||
engines: {node: '>= 0.4'}
|
||||
@@ -1725,6 +1765,9 @@ packages:
|
||||
resolution: {integrity: sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==}
|
||||
engines: {node: '>= 0.4'}
|
||||
|
||||
setimmediate@1.0.5:
|
||||
resolution: {integrity: sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==}
|
||||
|
||||
sharp@0.34.5:
|
||||
resolution: {integrity: sha512-Ou9I5Ft9WNcCbXrU9cMgPBcCK8LiwLqcbywW3t4oDV37n1pzpuNLsYiAV8eODnjbtQlSDwZ2cUEeQz4E54Hltg==}
|
||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||
@@ -1787,6 +1830,9 @@ packages:
|
||||
resolution: {integrity: sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==}
|
||||
engines: {node: '>= 0.4'}
|
||||
|
||||
string_decoder@1.1.1:
|
||||
resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==}
|
||||
|
||||
strip-bom@3.0.0:
|
||||
resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==}
|
||||
engines: {node: '>=4'}
|
||||
@@ -1900,6 +1946,9 @@ packages:
|
||||
use-strict@1.0.1:
|
||||
resolution: {integrity: sha512-IeiWvvEXfW5ltKVMkxq6FvNf2LojMKvB2OCeja6+ct24S1XOmQw2dGr2JyndwACWAGJva9B7yPHwAmeA9QCqAQ==}
|
||||
|
||||
util-deprecate@1.0.2:
|
||||
resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==}
|
||||
|
||||
which-boxed-primitive@1.1.1:
|
||||
resolution: {integrity: sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==}
|
||||
engines: {node: '>= 0.4'}
|
||||
@@ -2700,6 +2749,8 @@ snapshots:
|
||||
|
||||
convert-source-map@2.0.0: {}
|
||||
|
||||
core-util-is@1.0.3: {}
|
||||
|
||||
cross-spawn@7.0.6:
|
||||
dependencies:
|
||||
path-key: 3.1.1
|
||||
@@ -3109,6 +3160,8 @@ snapshots:
|
||||
optionalDependencies:
|
||||
picomatch: 4.0.3
|
||||
|
||||
fflate@0.8.2: {}
|
||||
|
||||
file-entry-cache@8.0.0:
|
||||
dependencies:
|
||||
flat-cache: 4.0.1
|
||||
@@ -3233,6 +3286,8 @@ snapshots:
|
||||
|
||||
ignore@7.0.5: {}
|
||||
|
||||
immediate@3.0.6: {}
|
||||
|
||||
import-fresh@3.3.1:
|
||||
dependencies:
|
||||
parent-module: 1.0.1
|
||||
@@ -3240,6 +3295,8 @@ snapshots:
|
||||
|
||||
imurmurhash@0.1.4: {}
|
||||
|
||||
inherits@2.0.4: {}
|
||||
|
||||
internal-slot@1.1.0:
|
||||
dependencies:
|
||||
es-errors: 1.3.0
|
||||
@@ -3358,6 +3415,8 @@ snapshots:
|
||||
call-bound: 1.0.4
|
||||
get-intrinsic: 1.3.0
|
||||
|
||||
isarray@1.0.0: {}
|
||||
|
||||
isarray@2.0.5: {}
|
||||
|
||||
isexe@2.0.0: {}
|
||||
@@ -3400,11 +3459,18 @@ snapshots:
|
||||
object.assign: 4.1.7
|
||||
object.values: 1.2.1
|
||||
|
||||
jszip@3.10.1:
|
||||
dependencies:
|
||||
lie: 3.3.0
|
||||
pako: 1.0.11
|
||||
readable-stream: 2.3.8
|
||||
setimmediate: 1.0.5
|
||||
|
||||
keyv@4.5.4:
|
||||
dependencies:
|
||||
json-buffer: 3.0.1
|
||||
|
||||
lamejs@1.2.1:
|
||||
lamejs@https://codeload.github.com/zhuker/lamejs/tar.gz/582bbba6a12f981b984d8fb9e1874499fed85675:
|
||||
dependencies:
|
||||
use-strict: 1.0.1
|
||||
|
||||
@@ -3419,6 +3485,10 @@ snapshots:
|
||||
prelude-ls: 1.2.1
|
||||
type-check: 0.4.0
|
||||
|
||||
lie@3.3.0:
|
||||
dependencies:
|
||||
immediate: 3.0.6
|
||||
|
||||
lightningcss-android-arm64@1.30.2:
|
||||
optional: true
|
||||
|
||||
@@ -3607,6 +3677,8 @@ snapshots:
|
||||
dependencies:
|
||||
p-limit: 3.1.0
|
||||
|
||||
pako@1.0.11: {}
|
||||
|
||||
parent-module@1.0.1:
|
||||
dependencies:
|
||||
callsites: 3.1.0
|
||||
@@ -3639,6 +3711,8 @@ snapshots:
|
||||
|
||||
prelude-ls@1.2.1: {}
|
||||
|
||||
process-nextick-args@2.0.1: {}
|
||||
|
||||
prop-types@15.8.1:
|
||||
dependencies:
|
||||
loose-envify: 1.4.0
|
||||
@@ -3658,6 +3732,16 @@ snapshots:
|
||||
|
||||
react@19.2.0: {}
|
||||
|
||||
readable-stream@2.3.8:
|
||||
dependencies:
|
||||
core-util-is: 1.0.3
|
||||
inherits: 2.0.4
|
||||
isarray: 1.0.0
|
||||
process-nextick-args: 2.0.1
|
||||
safe-buffer: 5.1.2
|
||||
string_decoder: 1.1.1
|
||||
util-deprecate: 1.0.2
|
||||
|
||||
reflect.getprototypeof@1.0.10:
|
||||
dependencies:
|
||||
call-bind: 1.0.8
|
||||
@@ -3708,6 +3792,8 @@ snapshots:
|
||||
has-symbols: 1.1.0
|
||||
isarray: 2.0.5
|
||||
|
||||
safe-buffer@5.1.2: {}
|
||||
|
||||
safe-push-apply@1.0.0:
|
||||
dependencies:
|
||||
es-errors: 1.3.0
|
||||
@@ -3747,6 +3833,8 @@ snapshots:
|
||||
es-errors: 1.3.0
|
||||
es-object-atoms: 1.1.1
|
||||
|
||||
setimmediate@1.0.5: {}
|
||||
|
||||
sharp@0.34.5:
|
||||
dependencies:
|
||||
'@img/colour': 1.0.0
|
||||
@@ -3872,6 +3960,10 @@ snapshots:
|
||||
define-properties: 1.2.1
|
||||
es-object-atoms: 1.1.1
|
||||
|
||||
string_decoder@1.1.1:
|
||||
dependencies:
|
||||
safe-buffer: 5.1.2
|
||||
|
||||
strip-bom@3.0.0: {}
|
||||
|
||||
strip-json-comments@3.1.1: {}
|
||||
@@ -4012,6 +4104,8 @@ snapshots:
|
||||
|
||||
use-strict@1.0.1: {}
|
||||
|
||||
util-deprecate@1.0.2: {}
|
||||
|
||||
which-boxed-primitive@1.1.1:
|
||||
dependencies:
|
||||
is-bigint: 1.1.0
|
||||
|
||||
105
types/automation.ts
Normal file
105
types/automation.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
/**
|
||||
* Automation system type definitions
|
||||
* Based on Ableton Live's automation model
|
||||
*/
|
||||
|
||||
/**
|
||||
* Automation curve types
|
||||
* - linear: Straight line between points
|
||||
* - bezier: Curved line with control handles
|
||||
* - step: Horizontal lines with vertical transitions (for discrete values)
|
||||
*/
|
||||
export type AutomationCurveType = 'linear' | 'bezier' | 'step';
|
||||
|
||||
/**
|
||||
* Automation recording/playback modes
|
||||
* - read: Only playback automation
|
||||
* - write: Record automation (replaces existing)
|
||||
* - touch: Record while touching control, then return to read mode
|
||||
* - latch: Record from first touch until stop, then return to read mode
|
||||
*/
|
||||
export type AutomationMode = 'read' | 'write' | 'touch' | 'latch';
|
||||
|
||||
/**
|
||||
* Single automation breakpoint
|
||||
*/
|
||||
export interface AutomationPoint {
|
||||
id: string;
|
||||
time: number; // Position in seconds from track start
|
||||
value: number; // Parameter value (normalized 0-1)
|
||||
curve: AutomationCurveType;
|
||||
// Bezier control handles (only used when curve is 'bezier')
|
||||
handleIn?: { x: number; y: number }; // Relative to point position
|
||||
handleOut?: { x: number; y: number }; // Relative to point position
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameter identifier for automation
|
||||
* Examples:
|
||||
* - 'volume' - Track volume
|
||||
* - 'pan' - Track pan
|
||||
* - 'mute' - Track mute (step curve)
|
||||
* - 'effect.compressor-1.threshold' - Effect parameter
|
||||
* - 'effect.delay-2.time' - Effect parameter
|
||||
*/
|
||||
export type AutomationParameterId = string;
|
||||
|
||||
/**
|
||||
* Single automation lane for a specific parameter
|
||||
*/
|
||||
export interface AutomationLane {
|
||||
id: string;
|
||||
trackId: string;
|
||||
parameterId: AutomationParameterId;
|
||||
parameterName: string; // Display name (e.g., "Volume", "Compressor Threshold")
|
||||
visible: boolean; // Show/hide lane
|
||||
height: number; // Lane height in pixels (user-adjustable, 60-120px)
|
||||
points: AutomationPoint[];
|
||||
mode: AutomationMode;
|
||||
color?: string; // Optional color override (defaults to parameter type color)
|
||||
// Value range for display (actual values are normalized 0-1)
|
||||
valueRange: {
|
||||
min: number; // Display minimum (e.g., 0 for volume)
|
||||
max: number; // Display maximum (e.g., 1 for volume)
|
||||
unit?: string; // Display unit (e.g., 'dB', '%', 'ms', 'Hz')
|
||||
formatter?: (value: number) => string; // Custom value formatter
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* All automation lanes for a single track
|
||||
*/
|
||||
export interface TrackAutomation {
|
||||
trackId: string;
|
||||
lanes: AutomationLane[];
|
||||
showAutomation: boolean; // Master show/hide toggle for all lanes
|
||||
}
|
||||
|
||||
/**
|
||||
* Complete automation data for entire project
|
||||
*/
|
||||
export interface ProjectAutomation {
|
||||
tracks: Record<string, TrackAutomation>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Automation parameter value at a specific time
|
||||
* Used for real-time playback
|
||||
*/
|
||||
export interface AutomationValue {
|
||||
parameterId: AutomationParameterId;
|
||||
value: number;
|
||||
time: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper type for creating new automation points
|
||||
*/
|
||||
export type CreateAutomationPointInput = Omit<AutomationPoint, 'id'>;
|
||||
|
||||
/**
|
||||
* Helper type for creating new automation lanes
|
||||
*/
|
||||
export type CreateAutomationLaneInput = Omit<AutomationLane, 'id' | 'points'> & {
|
||||
points?: AutomationPoint[];
|
||||
};
|
||||
15
types/lamejs.d.ts
vendored
Normal file
15
types/lamejs.d.ts
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
declare module 'lamejs/src/js/index.js' {
|
||||
export class Mp3Encoder {
|
||||
constructor(channels: number, samplerate: number, kbps: number);
|
||||
encodeBuffer(left: Int16Array, right: Int16Array): Int8Array;
|
||||
flush(): Int8Array;
|
||||
}
|
||||
|
||||
export class WavHeader {
|
||||
dataOffset: number;
|
||||
dataLen: number;
|
||||
channels: number;
|
||||
sampleRate: number;
|
||||
static readHeader(dataView: DataView): WavHeader;
|
||||
}
|
||||
}
|
||||
29
types/marker.ts
Normal file
29
types/marker.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
/**
|
||||
* Region marker type definitions
|
||||
* Markers help navigate and organize the timeline
|
||||
*/
|
||||
|
||||
/**
|
||||
* Marker types
|
||||
* - point: A single point in time (like a cue point)
|
||||
* - region: A time range with start and end
|
||||
*/
|
||||
export type MarkerType = 'point' | 'region';
|
||||
|
||||
/**
|
||||
* Single marker or region
|
||||
*/
|
||||
export interface Marker {
|
||||
id: string;
|
||||
name: string;
|
||||
type: MarkerType;
|
||||
time: number; // Start time in seconds
|
||||
endTime?: number; // End time for regions (undefined for point markers)
|
||||
color?: string; // Optional color for visual distinction
|
||||
description?: string; // Optional description/notes
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper type for creating new markers
|
||||
*/
|
||||
export type CreateMarkerInput = Omit<Marker, 'id'>;
|
||||
@@ -2,6 +2,10 @@
|
||||
* Multi-track types and interfaces
|
||||
*/
|
||||
|
||||
import type { EffectChain } from '@/lib/audio/effects/chain';
|
||||
import type { Selection } from './selection';
|
||||
import type { AutomationLane } from './automation';
|
||||
|
||||
export interface Track {
|
||||
id: string;
|
||||
name: string;
|
||||
@@ -16,9 +20,25 @@ export interface Track {
|
||||
solo: boolean;
|
||||
recordEnabled: boolean;
|
||||
|
||||
// Effects
|
||||
effectChain: EffectChain;
|
||||
|
||||
// Automation
|
||||
automation: {
|
||||
lanes: AutomationLane[];
|
||||
showAutomation: boolean; // Master show/hide toggle
|
||||
selectedParameterId?: string; // Currently selected parameter to display
|
||||
};
|
||||
|
||||
// UI state
|
||||
collapsed: boolean;
|
||||
selected: boolean;
|
||||
showEffects: boolean; // Show/hide per-track effects panel
|
||||
effectsExpanded?: boolean; // Whether effects bar is expanded (when showEffects is true)
|
||||
automationExpanded?: boolean; // Whether automation bar is expanded (shows full controls)
|
||||
|
||||
// Selection (for editing operations)
|
||||
selection: Selection | null;
|
||||
}
|
||||
|
||||
export interface TrackState {
|
||||
@@ -50,6 +70,7 @@ export const TRACK_COLORS: Record<TrackColor, string> = {
|
||||
gray: 'rgb(156, 163, 175)',
|
||||
};
|
||||
|
||||
export const DEFAULT_TRACK_HEIGHT = 120;
|
||||
export const MIN_TRACK_HEIGHT = 60;
|
||||
export const MAX_TRACK_HEIGHT = 300;
|
||||
export const DEFAULT_TRACK_HEIGHT = 400; // Knob + fader with labels + R/S/M/A/E buttons
|
||||
export const MIN_TRACK_HEIGHT = 400; // Minimum to fit knob + fader with labels + all buttons
|
||||
export const MAX_TRACK_HEIGHT = 500; // Increased for better waveform viewing
|
||||
export const COLLAPSED_TRACK_HEIGHT = 48; // Extracted constant for collapsed state
|
||||
|
||||
Reference in New Issue
Block a user