Files
audio-ui/components/tracks/Track.tsx
Sebastian Krüger 546c80e4ae feat: display dB values and use gauge icon for level meters
Changed level meter display to show professional dB values instead
of percentages, and replaced volume icon with gauge icon.

Display Changes:
- Show dB values (-60 to 0) instead of percentage (0-100%)
- Use monospace font for consistent digit alignment
- Show "-∞" for silence (level = 0) instead of "-60"
- Rounded to nearest integer dB (no decimals for cleaner display)

Icon Updates:
- Replaced Volume2 icon with Gauge icon for playback levels
- Kept Mic icon for recording input levels
- Gauge better represents metering/measurement vs audio control

dB Conversion:
- Formula: dB = (normalized * 60) - 60
- Reverse of: normalized = (dB + 60) / 60
- Range: -60dB (silence) to 0dB (full scale)

Display Examples:
  0.00 (0%) = -∞   (silence)
  0.17 (17%) = -50dB (very quiet)
  0.50 (50%) = -30dB (moderate)
  0.83 (83%) = -10dB (loud)
  1.00 (100%) = 0dB (full scale)

Benefits:
 Professional dB notation matching industry standards
 Clearer for audio engineers and musicians
 Easier to identify levels relative to 0dBFS
 Gauge icon better conveys "measurement"
 Monospace font prevents number jumping

Now meters show "-18" instead of "70%" making it immediately
clear where you are in the dynamic range.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-18 15:28:35 +01:00

722 lines
24 KiB
TypeScript

'use client';
import * as React from 'react';
import { Volume2, VolumeX, Headphones, Trash2, ChevronDown, ChevronRight, CircleArrowOutUpRight, Upload, Plus, Mic, Gauge } from 'lucide-react';
import type { Track as TrackType } from '@/types/track';
import { Button } from '@/components/ui/Button';
import { Slider } from '@/components/ui/Slider';
import { cn } from '@/lib/utils/cn';
import { EffectBrowser } from '@/components/effects/EffectBrowser';
import { EffectDevice } from '@/components/effects/EffectDevice';
import { createEffect, type EffectType } from '@/lib/audio/effects/chain';
import { InputLevelMeter } from '@/components/recording/InputLevelMeter';
export interface TrackProps {
track: TrackType;
zoom: number;
currentTime: number;
duration: number;
isSelected?: boolean;
onSelect?: () => void;
onToggleMute: () => void;
onToggleSolo: () => void;
onToggleCollapse: () => void;
onVolumeChange: (volume: number) => void;
onPanChange: (pan: number) => void;
onRemove: () => void;
onNameChange: (name: string) => void;
onSeek?: (time: number) => void;
onLoadAudio?: (buffer: AudioBuffer) => void;
onToggleEffect?: (effectId: string) => void;
onRemoveEffect?: (effectId: string) => void;
onUpdateEffect?: (effectId: string, parameters: any) => void;
onAddEffect?: (effectType: EffectType) => void;
onSelectionChange?: (selection: { start: number; end: number } | null) => void;
onToggleRecordEnable?: () => void;
isRecording?: boolean;
recordingLevel?: number;
playbackLevel?: number;
}
export function Track({
track,
zoom,
currentTime,
duration,
isSelected,
onSelect,
onToggleMute,
onToggleSolo,
onToggleCollapse,
onVolumeChange,
onPanChange,
onRemove,
onNameChange,
onSeek,
onLoadAudio,
onToggleEffect,
onRemoveEffect,
onUpdateEffect,
onAddEffect,
onSelectionChange,
onToggleRecordEnable,
isRecording = false,
recordingLevel = 0,
playbackLevel = 0,
}: TrackProps) {
const canvasRef = React.useRef<HTMLCanvasElement>(null);
const containerRef = React.useRef<HTMLDivElement>(null);
const fileInputRef = React.useRef<HTMLInputElement>(null);
const [isEditingName, setIsEditingName] = React.useState(false);
const [nameInput, setNameInput] = React.useState(String(track.name || 'Untitled Track'));
const [effectBrowserOpen, setEffectBrowserOpen] = React.useState(false);
const [showEffects, setShowEffects] = React.useState(false);
const [themeKey, setThemeKey] = React.useState(0);
const inputRef = React.useRef<HTMLInputElement>(null);
// Selection state
const [isSelecting, setIsSelecting] = React.useState(false);
const [selectionStart, setSelectionStart] = React.useState<number | null>(null);
const [isSelectingByDrag, setIsSelectingByDrag] = React.useState(false);
const [dragStartPos, setDragStartPos] = React.useState<{ x: number; y: number } | null>(null);
const handleNameClick = () => {
setIsEditingName(true);
setNameInput(String(track.name || 'Untitled Track'));
};
const handleNameBlur = () => {
setIsEditingName(false);
if (nameInput.trim()) {
onNameChange(nameInput.trim());
} else {
setNameInput(String(track.name || 'Untitled Track'));
}
};
const handleNameKeyDown = (e: React.KeyboardEvent) => {
if (e.key === 'Enter') {
inputRef.current?.blur();
} else if (e.key === 'Escape') {
setNameInput(String(track.name || 'Untitled Track'));
setIsEditingName(false);
}
};
React.useEffect(() => {
if (isEditingName && inputRef.current) {
inputRef.current.focus();
inputRef.current.select();
}
}, [isEditingName]);
// Listen for theme changes
React.useEffect(() => {
const observer = new MutationObserver(() => {
// Increment key to force waveform redraw
setThemeKey((prev) => prev + 1);
});
// Watch for class changes on document element (dark mode toggle)
observer.observe(document.documentElement, {
attributes: true,
attributeFilter: ['class'],
});
return () => observer.disconnect();
}, []);
// Draw waveform
React.useEffect(() => {
if (!track.audioBuffer || !canvasRef.current) return;
const canvas = canvasRef.current;
const ctx = canvas.getContext('2d');
if (!ctx) return;
// Use parent container's size since canvas is absolute positioned
const parent = canvas.parentElement;
if (!parent) return;
const dpr = window.devicePixelRatio || 1;
const rect = parent.getBoundingClientRect();
canvas.width = rect.width * dpr;
canvas.height = rect.height * dpr;
ctx.scale(dpr, dpr);
const width = rect.width;
const height = rect.height;
// Clear canvas with theme color
const bgColor = getComputedStyle(canvas).getPropertyValue('--color-waveform-bg') || 'rgb(15, 23, 42)';
ctx.fillStyle = bgColor;
ctx.fillRect(0, 0, width, height);
const buffer = track.audioBuffer;
const channelData = buffer.getChannelData(0);
const samplesPerPixel = Math.floor(buffer.length / (width * zoom));
// Draw waveform
ctx.fillStyle = track.color;
ctx.strokeStyle = track.color;
ctx.lineWidth = 1;
for (let x = 0; x < width; x++) {
const startSample = Math.floor(x * samplesPerPixel);
const endSample = Math.floor((x + 1) * samplesPerPixel);
let min = 1.0;
let max = -1.0;
for (let i = startSample; i < endSample && i < channelData.length; i++) {
const sample = channelData[i];
if (sample < min) min = sample;
if (sample > max) max = sample;
}
const y1 = (height / 2) * (1 - max);
const y2 = (height / 2) * (1 - min);
ctx.beginPath();
ctx.moveTo(x, y1);
ctx.lineTo(x, y2);
ctx.stroke();
}
// Draw center line
ctx.strokeStyle = 'rgba(148, 163, 184, 0.2)';
ctx.lineWidth = 1;
ctx.beginPath();
ctx.moveTo(0, height / 2);
ctx.lineTo(width, height / 2);
ctx.stroke();
// Draw selection overlay
if (track.selection && duration > 0) {
const selStartX = (track.selection.start / duration) * width;
const selEndX = (track.selection.end / duration) * width;
// Draw selection background
ctx.fillStyle = 'rgba(59, 130, 246, 0.2)';
ctx.fillRect(selStartX, 0, selEndX - selStartX, height);
// Draw selection borders
ctx.strokeStyle = 'rgba(59, 130, 246, 0.8)';
ctx.lineWidth = 2;
// Start border
ctx.beginPath();
ctx.moveTo(selStartX, 0);
ctx.lineTo(selStartX, height);
ctx.stroke();
// End border
ctx.beginPath();
ctx.moveTo(selEndX, 0);
ctx.lineTo(selEndX, height);
ctx.stroke();
}
// Draw playhead
if (duration > 0) {
const playheadX = (currentTime / duration) * width;
ctx.strokeStyle = 'rgba(239, 68, 68, 0.8)';
ctx.lineWidth = 2;
ctx.beginPath();
ctx.moveTo(playheadX, 0);
ctx.lineTo(playheadX, height);
ctx.stroke();
}
}, [track.audioBuffer, track.color, track.collapsed, track.height, zoom, currentTime, duration, themeKey, track.selection]);
const handleCanvasMouseDown = (e: React.MouseEvent<HTMLCanvasElement>) => {
if (!duration) return;
const rect = e.currentTarget.getBoundingClientRect();
const x = e.clientX - rect.left;
const y = e.clientY - rect.top;
const clickTime = (x / rect.width) * duration;
// Store drag start position
setDragStartPos({ x: e.clientX, y: e.clientY });
setIsSelectingByDrag(false);
// Start selection immediately (will be used if user drags)
setIsSelecting(true);
setSelectionStart(clickTime);
};
const handleCanvasMouseMove = (e: React.MouseEvent<HTMLCanvasElement>) => {
if (!isSelecting || selectionStart === null || !duration || !dragStartPos) return;
const rect = e.currentTarget.getBoundingClientRect();
const x = e.clientX - rect.left;
const currentTime = (x / rect.width) * duration;
// Check if user has moved enough to be considered dragging (threshold: 3 pixels)
const dragDistance = Math.sqrt(
Math.pow(e.clientX - dragStartPos.x, 2) + Math.pow(e.clientY - dragStartPos.y, 2)
);
if (dragDistance > 3) {
setIsSelectingByDrag(true);
}
// If dragging, update selection
if (isSelectingByDrag || dragDistance > 3) {
// Clamp to valid time range
const clampedTime = Math.max(0, Math.min(duration, currentTime));
// Update selection (ensure start < end)
const start = Math.min(selectionStart, clampedTime);
const end = Math.max(selectionStart, clampedTime);
onSelectionChange?.({ start, end });
}
};
const handleCanvasMouseUp = (e: React.MouseEvent<HTMLCanvasElement>) => {
if (!duration) return;
const rect = e.currentTarget.getBoundingClientRect();
const x = e.clientX - rect.left;
const clickTime = (x / rect.width) * duration;
// Check if user actually dragged (check distance directly, not state)
const didDrag = dragStartPos
? Math.sqrt(
Math.pow(e.clientX - dragStartPos.x, 2) + Math.pow(e.clientY - dragStartPos.y, 2)
) > 3
: false;
// If user didn't drag (just clicked), clear selection and seek
if (!didDrag) {
onSelectionChange?.(null);
if (onSeek) {
onSeek(clickTime);
}
}
// Reset drag state
setIsSelecting(false);
setIsSelectingByDrag(false);
setDragStartPos(null);
};
// Handle mouse leaving canvas during selection
React.useEffect(() => {
const handleGlobalMouseUp = () => {
if (isSelecting) {
setIsSelecting(false);
setIsSelectingByDrag(false);
setDragStartPos(null);
}
};
window.addEventListener('mouseup', handleGlobalMouseUp);
return () => window.removeEventListener('mouseup', handleGlobalMouseUp);
}, [isSelecting]);
const handleFileChange = async (e: React.ChangeEvent<HTMLInputElement>) => {
const file = e.target.files?.[0];
if (!file || !onLoadAudio) return;
try {
const arrayBuffer = await file.arrayBuffer();
const audioContext = new AudioContext();
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
onLoadAudio(audioBuffer);
// Update track name to filename if it's still default
if (track.name === 'New Track' || track.name === 'Untitled Track') {
const fileName = file.name.replace(/\.[^/.]+$/, '');
onNameChange(fileName);
}
} catch (error) {
console.error('Failed to load audio file:', error);
}
// Reset input
e.target.value = '';
};
const handleLoadAudioClick = () => {
fileInputRef.current?.click();
};
const [isDragging, setIsDragging] = React.useState(false);
const handleDragOver = (e: React.DragEvent) => {
e.preventDefault();
e.stopPropagation();
setIsDragging(true);
};
const handleDragLeave = (e: React.DragEvent) => {
e.preventDefault();
e.stopPropagation();
setIsDragging(false);
};
const handleDrop = async (e: React.DragEvent) => {
e.preventDefault();
e.stopPropagation();
setIsDragging(false);
const file = e.dataTransfer.files?.[0];
if (!file || !onLoadAudio) return;
// Check if it's an audio file
if (!file.type.startsWith('audio/')) {
console.warn('Dropped file is not an audio file');
return;
}
try {
const arrayBuffer = await file.arrayBuffer();
const audioContext = new AudioContext();
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
onLoadAudio(audioBuffer);
// Update track name to filename if it's still default
if (track.name === 'New Track' || track.name === 'Untitled Track') {
const fileName = file.name.replace(/\.[^/.]+$/, '');
onNameChange(fileName);
}
} catch (error) {
console.error('Failed to load audio file:', error);
}
};
const trackHeight = track.collapsed ? 48 : track.height;
return (
<div
ref={containerRef}
className={cn(
'flex flex-col',
isSelected && 'ring-2 ring-primary ring-inset'
)}
>
{/* Top: Track Row (Control Panel + Waveform) */}
<div className="flex" style={{ height: trackHeight }}>
{/* Left: Track Control Panel (Fixed Width) */}
<div
className="w-72 flex-shrink-0 bg-card border-r border-border border-b border-border p-3 flex flex-col gap-2"
onClick={(e) => e.stopPropagation()}
>
{/* Track Name & Collapse Toggle */}
<div className="flex items-center gap-2">
<Button
variant="ghost"
size="icon-sm"
onClick={onToggleCollapse}
title={track.collapsed ? 'Expand track' : 'Collapse track'}
>
{track.collapsed ? (
<ChevronRight className="h-4 w-4" />
) : (
<ChevronDown className="h-4 w-4" />
)}
</Button>
<div
className="w-1 h-8 rounded-full flex-shrink-0"
style={{ backgroundColor: track.color }}
/>
<div className="flex-1 min-w-0">
{isEditingName ? (
<input
ref={inputRef}
type="text"
value={nameInput}
onChange={(e) => setNameInput(e.target.value)}
onBlur={handleNameBlur}
onKeyDown={handleNameKeyDown}
className="w-full px-2 py-1 text-sm font-medium bg-background border border-border rounded"
/>
) : (
<div
onClick={handleNameClick}
className="px-2 py-1 text-sm font-medium text-foreground truncate cursor-pointer hover:bg-accent rounded"
title={String(track.name || 'Untitled Track')}
>
{String(track.name || 'Untitled Track')}
</div>
)}
</div>
{/* Record Enable Button */}
{onToggleRecordEnable && (
<Button
variant="ghost"
size="icon-sm"
onClick={onToggleRecordEnable}
title="Arm track for recording"
className={cn(
track.recordEnabled && 'bg-red-500/20 hover:bg-red-500/30',
isRecording && 'animate-pulse'
)}
>
<Mic className={cn('h-4 w-4', track.recordEnabled && 'text-red-500')} />
</Button>
)}
{/* Solo Button */}
<Button
variant="ghost"
size="icon-sm"
onClick={onToggleSolo}
title="Solo track"
className={cn(track.solo && 'bg-yellow-500/20 hover:bg-yellow-500/30')}
>
<Headphones className={cn('h-4 w-4', track.solo && 'text-yellow-500')} />
</Button>
{/* Mute Button */}
<Button
variant="ghost"
size="icon-sm"
onClick={onToggleMute}
title="Mute track"
className={cn(track.mute && 'bg-red-500/20 hover:bg-red-500/30')}
>
{track.mute ? (
<VolumeX className="h-4 w-4 text-red-500" />
) : (
<Volume2 className="h-4 w-4" />
)}
</Button>
{/* Remove Button */}
<Button
variant="ghost"
size="icon-sm"
onClick={onRemove}
title="Remove track"
className="text-destructive hover:text-destructive hover:bg-destructive/10"
>
<Trash2 className="h-4 w-4" />
</Button>
</div>
{/* Track Controls - Only show when not collapsed */}
{!track.collapsed && (
<>
{/* Volume */}
<div className="flex items-center gap-2">
<label className="text-xs text-muted-foreground flex items-center gap-1 w-16 flex-shrink-0">
<Volume2 className="h-3.5 w-3.5" />
Volume
</label>
<div className="flex-1">
<Slider
value={track.volume}
onChange={onVolumeChange}
min={0}
max={1}
step={0.01}
/>
</div>
<span className="text-xs text-muted-foreground w-10 text-right flex-shrink-0">
{Math.round(track.volume * 100)}%
</span>
</div>
{/* Pan */}
<div className="flex items-center gap-2">
<label className="text-xs text-muted-foreground flex items-center gap-1 w-16 flex-shrink-0">
<CircleArrowOutUpRight className="h-3 w-3" />
Pan
</label>
<div className="flex-1">
<Slider
value={track.pan}
onChange={onPanChange}
min={-1}
max={1}
step={0.01}
/>
</div>
<span className="text-xs text-muted-foreground w-10 text-right flex-shrink-0">
{track.pan === 0
? 'C'
: track.pan < 0
? `L${Math.abs(Math.round(track.pan * 100))}`
: `R${Math.round(track.pan * 100)}`}
</span>
</div>
{/* Level Meter (shows input when recording, output level otherwise) */}
<div className="flex items-center gap-2">
<label className="text-xs text-muted-foreground flex items-center gap-1 w-16 flex-shrink-0">
{track.recordEnabled || isRecording ? (
<>
<Mic className="h-3 w-3" />
Input
</>
) : (
<>
<Gauge className="h-3 w-3" />
Level
</>
)}
</label>
<div className="flex-1">
<InputLevelMeter
level={track.recordEnabled || isRecording ? recordingLevel : playbackLevel}
orientation="horizontal"
/>
</div>
<span className="text-xs text-muted-foreground w-12 text-right flex-shrink-0 font-mono">
{(() => {
const level = track.recordEnabled || isRecording ? recordingLevel : playbackLevel;
// Convert normalized (0-1) back to dB
// normalized = (dB - (-60)) / 60, so dB = (normalized * 60) - 60
const db = (level * 60) - 60;
return level === 0 ? '-∞' : `${db.toFixed(0)}`;
})()}
</span>
</div>
</>
)}
</div>
{/* Right: Waveform Area (Flexible Width) */}
<div
className="flex-1 relative bg-waveform-bg border-b border-border cursor-pointer"
onClick={onSelect}
>
{track.audioBuffer ? (
<canvas
ref={canvasRef}
className="absolute inset-0 w-full h-full cursor-crosshair"
onMouseDown={handleCanvasMouseDown}
onMouseMove={handleCanvasMouseMove}
onMouseUp={handleCanvasMouseUp}
/>
) : (
!track.collapsed && (
<>
<div
className={cn(
"absolute inset-0 flex flex-col items-center justify-center text-sm text-muted-foreground hover:text-foreground transition-colors cursor-pointer group",
isDragging ? "bg-primary/20 text-primary border-2 border-primary border-dashed" : "hover:bg-accent/50"
)}
onClick={(e) => {
e.stopPropagation();
handleLoadAudioClick();
}}
onDragOver={handleDragOver}
onDragLeave={handleDragLeave}
onDrop={handleDrop}
>
<Upload className="h-6 w-6 mb-2 opacity-50 group-hover:opacity-100" />
<p>{isDragging ? 'Drop audio file here' : 'Click to load audio file'}</p>
<p className="text-xs opacity-75 mt-1">or drag & drop</p>
</div>
<input
ref={fileInputRef}
type="file"
accept="audio/*"
onChange={handleFileChange}
className="hidden"
/>
</>
)
)}
</div>
</div>
{/* Bottom: Effects Section (Collapsible, Full Width) */}
{!track.collapsed && (
<div className="bg-muted/50 border-b border-border/50">
{/* Effects Header - clickable to toggle */}
<div
className="flex items-center gap-2 px-3 py-1.5 cursor-pointer hover:bg-accent/30 transition-colors"
onClick={() => setShowEffects(!showEffects)}
>
{showEffects ? (
<ChevronDown className="h-3.5 w-3.5 text-muted-foreground flex-shrink-0" />
) : (
<ChevronRight className="h-3.5 w-3.5 text-muted-foreground flex-shrink-0" />
)}
{/* Show mini effect chain when collapsed */}
{!showEffects && track.effectChain.effects.length > 0 ? (
<div className="flex-1 flex items-center gap-1 overflow-x-auto custom-scrollbar">
{track.effectChain.effects.map((effect) => (
<div
key={effect.id}
className={cn(
'px-2 py-0.5 rounded text-[10px] font-medium flex-shrink-0',
effect.enabled
? 'bg-primary/20 text-primary border border-primary/30'
: 'bg-muted/30 text-muted-foreground border border-border'
)}
>
{effect.name}
</div>
))}
</div>
) : (
<span className="text-xs font-medium text-muted-foreground">
Devices ({track.effectChain.effects.length})
</span>
)}
<Button
variant="ghost"
size="icon-sm"
onClick={(e) => {
e.stopPropagation();
setEffectBrowserOpen(true);
}}
title="Add effect"
className="h-5 w-5 flex-shrink-0"
>
<Plus className="h-3 w-3" />
</Button>
</div>
{/* Horizontal scrolling device rack - expanded state */}
{showEffects && (
<div className="h-48 overflow-x-auto custom-scrollbar bg-muted/70">
<div className="flex h-full">
{track.effectChain.effects.length === 0 ? (
<div className="text-xs text-muted-foreground text-center py-8 w-full">
No devices. Click + to add an effect.
</div>
) : (
track.effectChain.effects.map((effect) => (
<EffectDevice
key={effect.id}
effect={effect}
onToggleEnabled={() => onToggleEffect?.(effect.id)}
onRemove={() => onRemoveEffect?.(effect.id)}
onUpdateParameters={(params) => onUpdateEffect?.(effect.id, params)}
/>
))
)}
</div>
</div>
)}
</div>
)}
{/* Effect Browser Dialog */}
<EffectBrowser
open={effectBrowserOpen}
onClose={() => setEffectBrowserOpen(false)}
onSelectEffect={(effectType) => {
if (onAddEffect) {
onAddEffect(effectType);
}
}}
/>
</div>
);
}