Files
audio-ui/components/tracks/Track.tsx
Sebastian Krüger d7dfb8a746 feat: implement synchronized horizontal scrolling for track waveforms
Refactored track layout to use a two-column design where:
- Left column: All track control panels (fixed, vertical scroll)
- Right column: All waveforms (shared horizontal scroll container)

This ensures all track waveforms scroll together horizontally when
zoomed in, providing a more cohesive DAW-like experience.

Changes:
- Added renderControlsOnly and renderWaveformOnly props to Track component
- Track component now supports conditional rendering of just controls or just waveform
- TrackList renders each track twice: once for controls, once for waveforms
- Waveforms share a common scrollable container for synchronized scrolling
- Track controls stay fixed while waveforms scroll horizontally together

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-19 11:05:27 +01:00

920 lines
28 KiB
TypeScript

"use client";
import * as React from "react";
import {
Volume2,
VolumeX,
Headphones,
Trash2,
ChevronDown,
ChevronRight,
ChevronUp,
UnfoldHorizontal,
Upload,
Mic,
Gauge,
Circle,
Sparkles,
} from "lucide-react";
import type { Track as TrackType } from "@/types/track";
import {
COLLAPSED_TRACK_HEIGHT,
MIN_TRACK_HEIGHT,
MAX_TRACK_HEIGHT,
} from "@/types/track";
import { Button } from "@/components/ui/Button";
import { Slider } from "@/components/ui/Slider";
import { cn } from "@/lib/utils/cn";
import type { EffectType } from "@/lib/audio/effects/chain";
import { TrackControls } from "./TrackControls";
import { AutomationLane } from "@/components/automation/AutomationLane";
import type {
AutomationLane as AutomationLaneType,
AutomationPoint as AutomationPointType,
} from "@/types/automation";
import { createAutomationPoint } from "@/lib/audio/automation/utils";
import { createAutomationLane } from "@/lib/audio/automation-utils";
import { EffectDevice } from "@/components/effects/EffectDevice";
import { EffectBrowser } from "@/components/effects/EffectBrowser";
import { ImportDialog } from "@/components/dialogs/ImportDialog";
import { importAudioFile, type ImportOptions } from "@/lib/audio/decoder";
export interface TrackProps {
track: TrackType;
zoom: number;
currentTime: number;
duration: number;
isSelected?: boolean;
onSelect?: () => void;
onToggleMute: () => void;
onToggleSolo: () => void;
onToggleCollapse: () => void;
onVolumeChange: (volume: number) => void;
onPanChange: (pan: number) => void;
onRemove: () => void;
onNameChange: (name: string) => void;
onUpdateTrack: (trackId: string, updates: Partial<TrackType>) => void;
onSeek?: (time: number) => void;
onLoadAudio?: (buffer: AudioBuffer) => void;
onToggleEffect?: (effectId: string) => void;
onRemoveEffect?: (effectId: string) => void;
onUpdateEffect?: (effectId: string, parameters: any) => void;
onAddEffect?: (effectType: EffectType) => void;
onSelectionChange?: (
selection: { start: number; end: number } | null,
) => void;
onToggleRecordEnable?: () => void;
isRecording?: boolean;
recordingLevel?: number;
playbackLevel?: number;
onParameterTouched?: (
trackId: string,
laneId: string,
touched: boolean,
) => void;
isPlaying?: boolean;
renderControlsOnly?: boolean;
renderWaveformOnly?: boolean;
}
export function Track({
track,
zoom,
currentTime,
duration,
isSelected,
onSelect,
onToggleMute,
onToggleSolo,
onToggleCollapse,
onVolumeChange,
onPanChange,
onRemove,
onNameChange,
onUpdateTrack,
onSeek,
onLoadAudio,
onToggleEffect,
onRemoveEffect,
onUpdateEffect,
onAddEffect,
onSelectionChange,
onToggleRecordEnable,
isRecording = false,
recordingLevel = 0,
playbackLevel = 0,
onParameterTouched,
isPlaying = false,
renderControlsOnly = false,
renderWaveformOnly = false,
}: TrackProps) {
const canvasRef = React.useRef<HTMLCanvasElement>(null);
const containerRef = React.useRef<HTMLDivElement>(null);
const fileInputRef = React.useRef<HTMLInputElement>(null);
const [isEditingName, setIsEditingName] = React.useState(false);
const [nameInput, setNameInput] = React.useState(
String(track.name || "Untitled Track"),
);
const [themeKey, setThemeKey] = React.useState(0);
const inputRef = React.useRef<HTMLInputElement>(null);
const [isResizing, setIsResizing] = React.useState(false);
const resizeStartRef = React.useRef({ y: 0, height: 0 });
const [effectBrowserOpen, setEffectBrowserOpen] = React.useState(false);
// Import dialog state
const [showImportDialog, setShowImportDialog] = React.useState(false);
const [pendingFile, setPendingFile] = React.useState<File | null>(null);
const [fileMetadata, setFileMetadata] = React.useState<{
sampleRate?: number;
channels?: number;
}>({});
// Selection state
const [isSelecting, setIsSelecting] = React.useState(false);
const [selectionStart, setSelectionStart] = React.useState<number | null>(
null,
);
const [isSelectingByDrag, setIsSelectingByDrag] = React.useState(false);
const [dragStartPos, setDragStartPos] = React.useState<{
x: number;
y: number;
} | null>(null);
// Touch callbacks for automation recording
const handlePanTouchStart = React.useCallback(() => {
if (isPlaying && onParameterTouched) {
const panLane = track.automation.lanes.find(
(l) => l.parameterId === "pan",
);
if (panLane && (panLane.mode === "touch" || panLane.mode === "latch")) {
queueMicrotask(() => onParameterTouched(track.id, panLane.id, true));
}
}
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
const handlePanTouchEnd = React.useCallback(() => {
if (isPlaying && onParameterTouched) {
const panLane = track.automation.lanes.find(
(l) => l.parameterId === "pan",
);
if (panLane && (panLane.mode === "touch" || panLane.mode === "latch")) {
queueMicrotask(() => onParameterTouched(track.id, panLane.id, false));
}
}
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
const handleVolumeTouchStart = React.useCallback(() => {
if (isPlaying && onParameterTouched) {
const volumeLane = track.automation.lanes.find(
(l) => l.parameterId === "volume",
);
if (
volumeLane &&
(volumeLane.mode === "touch" || volumeLane.mode === "latch")
) {
queueMicrotask(() => onParameterTouched(track.id, volumeLane.id, true));
}
}
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
const handleVolumeTouchEnd = React.useCallback(() => {
if (isPlaying && onParameterTouched) {
const volumeLane = track.automation.lanes.find(
(l) => l.parameterId === "volume",
);
if (
volumeLane &&
(volumeLane.mode === "touch" || volumeLane.mode === "latch")
) {
queueMicrotask(() =>
onParameterTouched(track.id, volumeLane.id, false),
);
}
}
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
// Auto-create automation lane for selected parameter if it doesn't exist
React.useEffect(() => {
if (!track.automation?.showAutomation) return;
const selectedParameterId =
track.automation.selectedParameterId || "volume";
const laneExists = track.automation.lanes.some(
(lane) => lane.parameterId === selectedParameterId,
);
if (!laneExists) {
// Build list of available parameters
const availableParameters: Array<{ id: string; name: string }> = [
{ id: "volume", name: "Volume" },
{ id: "pan", name: "Pan" },
];
track.effectChain.effects.forEach((effect) => {
if (effect.parameters) {
Object.keys(effect.parameters).forEach((paramKey) => {
const parameterId = `effect.${effect.id}.${paramKey}`;
const paramName = `${effect.name} - ${paramKey.charAt(0).toUpperCase() + paramKey.slice(1)}`;
availableParameters.push({ id: parameterId, name: paramName });
});
}
});
const paramInfo = availableParameters.find(
(p) => p.id === selectedParameterId,
);
if (paramInfo) {
// Determine value range based on parameter type
let valueRange = { min: 0, max: 1 };
let unit = "";
let formatter: ((value: number) => string) | undefined;
if (selectedParameterId === "volume") {
unit = "dB";
} else if (selectedParameterId === "pan") {
formatter = (value: number) => {
if (value === 0.5) return "C";
if (value < 0.5)
return `${Math.abs((0.5 - value) * 200).toFixed(0)}L`;
return `${((value - 0.5) * 200).toFixed(0)}R`;
};
} else if (selectedParameterId.startsWith("effect.")) {
// Parse effect parameter: effect.{effectId}.{paramName}
const parts = selectedParameterId.split(".");
if (parts.length === 3) {
const paramName = parts[2];
// Set ranges based on parameter name
if (paramName === "frequency") {
valueRange = { min: 20, max: 20000 };
unit = "Hz";
} else if (paramName === "Q") {
valueRange = { min: 0.1, max: 20 };
} else if (paramName === "gain") {
valueRange = { min: -40, max: 40 };
unit = "dB";
}
}
}
const newLane = createAutomationLane(
track.id,
selectedParameterId,
paramInfo.name,
{
min: valueRange.min,
max: valueRange.max,
unit,
formatter,
},
);
onUpdateTrack(track.id, {
automation: {
...track.automation,
lanes: [...track.automation.lanes, newLane],
selectedParameterId,
},
});
}
}
}, [
track.automation?.showAutomation,
track.automation?.selectedParameterId,
track.automation?.lanes,
track.effectChain.effects,
track.id,
onUpdateTrack,
]);
const handleNameClick = () => {
setIsEditingName(true);
setNameInput(String(track.name || "Untitled Track"));
};
const handleNameBlur = () => {
setIsEditingName(false);
if (nameInput.trim()) {
onNameChange(nameInput.trim());
} else {
setNameInput(String(track.name || "Untitled Track"));
}
};
const handleNameKeyDown = (e: React.KeyboardEvent) => {
if (e.key === "Enter") {
inputRef.current?.blur();
} else if (e.key === "Escape") {
setNameInput(String(track.name || "Untitled Track"));
setIsEditingName(false);
}
};
React.useEffect(() => {
if (isEditingName && inputRef.current) {
inputRef.current.focus();
inputRef.current.select();
}
}, [isEditingName]);
// Listen for theme changes
React.useEffect(() => {
const observer = new MutationObserver(() => {
// Increment key to force waveform redraw
setThemeKey((prev) => prev + 1);
});
// Watch for class changes on document element (dark mode toggle)
observer.observe(document.documentElement, {
attributes: true,
attributeFilter: ["class"],
});
return () => observer.disconnect();
}, []);
// Draw waveform
React.useEffect(() => {
if (!track.audioBuffer || !canvasRef.current) return;
const canvas = canvasRef.current;
const ctx = canvas.getContext("2d");
if (!ctx) return;
// Use parent container's size since canvas is absolute positioned
const parent = canvas.parentElement;
if (!parent) return;
const dpr = window.devicePixelRatio || 1;
const rect = parent.getBoundingClientRect();
canvas.width = rect.width * dpr;
canvas.height = rect.height * dpr;
ctx.scale(dpr, dpr);
const width = rect.width;
const height = rect.height;
// Clear canvas with theme color
const bgColor =
getComputedStyle(canvas).getPropertyValue("--color-waveform-bg") ||
"rgb(15, 23, 42)";
ctx.fillStyle = bgColor;
ctx.fillRect(0, 0, width, height);
const buffer = track.audioBuffer;
const channelData = buffer.getChannelData(0);
const samplesPerPixel = Math.floor(buffer.length / (width * zoom));
// Draw waveform
ctx.fillStyle = track.color;
ctx.strokeStyle = track.color;
ctx.lineWidth = 1;
for (let x = 0; x < width; x++) {
const startSample = Math.floor(x * samplesPerPixel);
const endSample = Math.floor((x + 1) * samplesPerPixel);
let min = 1.0;
let max = -1.0;
for (let i = startSample; i < endSample && i < channelData.length; i++) {
const sample = channelData[i];
if (sample < min) min = sample;
if (sample > max) max = sample;
}
const y1 = (height / 2) * (1 - max);
const y2 = (height / 2) * (1 - min);
ctx.beginPath();
ctx.moveTo(x, y1);
ctx.lineTo(x, y2);
ctx.stroke();
}
// Draw center line
ctx.strokeStyle = "rgba(148, 163, 184, 0.2)";
ctx.lineWidth = 1;
ctx.beginPath();
ctx.moveTo(0, height / 2);
ctx.lineTo(width, height / 2);
ctx.stroke();
// Draw selection overlay
if (track.selection && duration > 0) {
const selStartX = (track.selection.start / duration) * width;
const selEndX = (track.selection.end / duration) * width;
// Draw selection background
ctx.fillStyle = "rgba(59, 130, 246, 0.2)";
ctx.fillRect(selStartX, 0, selEndX - selStartX, height);
// Draw selection borders
ctx.strokeStyle = "rgba(59, 130, 246, 0.8)";
ctx.lineWidth = 2;
// Start border
ctx.beginPath();
ctx.moveTo(selStartX, 0);
ctx.lineTo(selStartX, height);
ctx.stroke();
// End border
ctx.beginPath();
ctx.moveTo(selEndX, 0);
ctx.lineTo(selEndX, height);
ctx.stroke();
}
// Draw playhead
if (duration > 0) {
const playheadX = (currentTime / duration) * width;
ctx.strokeStyle = "rgba(239, 68, 68, 0.8)";
ctx.lineWidth = 2;
ctx.beginPath();
ctx.moveTo(playheadX, 0);
ctx.lineTo(playheadX, height);
ctx.stroke();
}
}, [
track.audioBuffer,
track.color,
track.collapsed,
track.height,
zoom,
currentTime,
duration,
themeKey,
track.selection,
]);
const handleCanvasMouseDown = (e: React.MouseEvent<HTMLCanvasElement>) => {
if (!duration) return;
const rect = e.currentTarget.getBoundingClientRect();
const x = e.clientX - rect.left;
const y = e.clientY - rect.top;
const clickTime = (x / rect.width) * duration;
// Store drag start position
setDragStartPos({ x: e.clientX, y: e.clientY });
setIsSelectingByDrag(false);
// Start selection immediately (will be used if user drags)
setIsSelecting(true);
setSelectionStart(clickTime);
};
const handleCanvasMouseMove = (e: React.MouseEvent<HTMLCanvasElement>) => {
if (!isSelecting || selectionStart === null || !duration || !dragStartPos)
return;
const rect = e.currentTarget.getBoundingClientRect();
const x = e.clientX - rect.left;
const currentTime = (x / rect.width) * duration;
// Check if user has moved enough to be considered dragging (threshold: 3 pixels)
const dragDistance = Math.sqrt(
Math.pow(e.clientX - dragStartPos.x, 2) +
Math.pow(e.clientY - dragStartPos.y, 2),
);
if (dragDistance > 3) {
setIsSelectingByDrag(true);
}
// If dragging, update selection
if (isSelectingByDrag || dragDistance > 3) {
// Clamp to valid time range
const clampedTime = Math.max(0, Math.min(duration, currentTime));
// Update selection (ensure start < end)
const start = Math.min(selectionStart, clampedTime);
const end = Math.max(selectionStart, clampedTime);
onSelectionChange?.({ start, end });
}
};
const handleCanvasMouseUp = (e: React.MouseEvent<HTMLCanvasElement>) => {
if (!duration) return;
const rect = e.currentTarget.getBoundingClientRect();
const x = e.clientX - rect.left;
const clickTime = (x / rect.width) * duration;
// Check if user actually dragged (check distance directly, not state)
const didDrag = dragStartPos
? Math.sqrt(
Math.pow(e.clientX - dragStartPos.x, 2) +
Math.pow(e.clientY - dragStartPos.y, 2),
) > 3
: false;
// If user didn't drag (just clicked), clear selection and seek
if (!didDrag) {
onSelectionChange?.(null);
if (onSeek) {
onSeek(clickTime);
}
}
// Reset drag state
setIsSelecting(false);
setIsSelectingByDrag(false);
setDragStartPos(null);
};
// Handle mouse leaving canvas during selection
React.useEffect(() => {
const handleGlobalMouseUp = () => {
if (isSelecting) {
setIsSelecting(false);
setIsSelectingByDrag(false);
setDragStartPos(null);
}
};
window.addEventListener("mouseup", handleGlobalMouseUp);
return () => window.removeEventListener("mouseup", handleGlobalMouseUp);
}, [isSelecting]);
const handleFileChange = async (e: React.ChangeEvent<HTMLInputElement>) => {
const file = e.target.files?.[0];
if (!file || !onLoadAudio) return;
try {
// Decode to get basic metadata before showing dialog
const arrayBuffer = await file.arrayBuffer();
const audioContext = new AudioContext();
const tempBuffer = await audioContext.decodeAudioData(arrayBuffer);
// Set metadata and show import dialog
setFileMetadata({
sampleRate: tempBuffer.sampleRate,
channels: tempBuffer.numberOfChannels,
});
setPendingFile(file);
setShowImportDialog(true);
} catch (error) {
console.error("Failed to read audio file metadata:", error);
}
// Reset input
e.target.value = "";
};
const handleImport = async (options: ImportOptions) => {
if (!pendingFile || !onLoadAudio) return;
try {
setShowImportDialog(false);
const { buffer, metadata } = await importAudioFile(pendingFile, options);
onLoadAudio(buffer);
// Update track name to filename if it's still default
if (track.name === "New Track" || track.name === "Untitled Track") {
const fileName = metadata.fileName.replace(/\.[^/.]+$/, "");
onNameChange(fileName);
}
console.log("Audio imported:", metadata);
} catch (error) {
console.error("Failed to import audio file:", error);
} finally {
setPendingFile(null);
setFileMetadata({});
}
};
const handleImportCancel = () => {
setShowImportDialog(false);
setPendingFile(null);
setFileMetadata({});
};
const handleLoadAudioClick = () => {
fileInputRef.current?.click();
};
const [isDragging, setIsDragging] = React.useState(false);
const handleDragOver = (e: React.DragEvent) => {
e.preventDefault();
e.stopPropagation();
setIsDragging(true);
};
const handleDragLeave = (e: React.DragEvent) => {
e.preventDefault();
e.stopPropagation();
setIsDragging(false);
};
const handleDrop = async (e: React.DragEvent) => {
e.preventDefault();
e.stopPropagation();
setIsDragging(false);
const file = e.dataTransfer.files?.[0];
if (!file || !onLoadAudio) return;
// Check if it's an audio file
if (!file.type.startsWith("audio/")) {
console.warn("Dropped file is not an audio file");
return;
}
try {
const arrayBuffer = await file.arrayBuffer();
const audioContext = new AudioContext();
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
onLoadAudio(audioBuffer);
// Update track name to filename if it's still default
if (track.name === "New Track" || track.name === "Untitled Track") {
const fileName = file.name.replace(/\.[^/.]+$/, "");
onNameChange(fileName);
}
} catch (error) {
console.error("Failed to load audio file:", error);
}
};
const trackHeight = track.collapsed ? COLLAPSED_TRACK_HEIGHT : track.height;
// Track height resize handlers
const handleResizeStart = React.useCallback(
(e: React.MouseEvent) => {
if (track.collapsed) return;
e.preventDefault();
e.stopPropagation();
setIsResizing(true);
resizeStartRef.current = { y: e.clientY, height: track.height };
},
[track.collapsed, track.height],
);
React.useEffect(() => {
if (!isResizing) return;
const handleMouseMove = (e: MouseEvent) => {
const delta = e.clientY - resizeStartRef.current.y;
const newHeight = Math.max(
MIN_TRACK_HEIGHT,
Math.min(MAX_TRACK_HEIGHT, resizeStartRef.current.height + delta),
);
onUpdateTrack(track.id, { height: newHeight });
};
const handleMouseUp = () => {
setIsResizing(false);
};
window.addEventListener("mousemove", handleMouseMove);
window.addEventListener("mouseup", handleMouseUp);
return () => {
window.removeEventListener("mousemove", handleMouseMove);
window.removeEventListener("mouseup", handleMouseUp);
};
}, [isResizing, onUpdateTrack, track.id]);
// Render only controls
if (renderControlsOnly) {
return (
<div
className={cn(
"w-48 flex-shrink-0 border-b border-r-4 p-2 flex flex-col gap-2 min-h-0 transition-all duration-200 cursor-pointer border-border",
isSelected
? "bg-primary/10 border-r-primary"
: "bg-card border-r-transparent hover:bg-accent/30",
)}
style={{ height: trackHeight }}
onClick={(e) => {
e.stopPropagation();
if (onSelect) onSelect();
}}
>
{/* Track Name Row - Integrated collapse (DAW style) */}
<div
className={cn(
"group flex items-center gap-1.5 px-1 py-0.5 rounded cursor-pointer transition-colors",
isSelected ? "bg-primary/10" : "hover:bg-accent/50",
)}
onClick={(e) => {
if (!isEditingName) {
e.stopPropagation();
onToggleCollapse();
}
}}
title={track.collapsed ? "Expand track" : "Collapse track"}
>
{/* Small triangle indicator */}
<div
className={cn(
"flex-shrink-0 transition-colors",
isSelected
? "text-primary"
: "text-muted-foreground group-hover:text-foreground",
)}
>
{track.collapsed ? (
<ChevronRight className="h-3 w-3" />
) : (
<ChevronDown className="h-3 w-3" />
)}
</div>
{/* Color stripe (thicker when selected) */}
<div
className={cn(
"h-5 rounded-full flex-shrink-0 transition-all",
isSelected ? "w-1" : "w-0.5",
)}
style={{ backgroundColor: track.color }}
></div>
<div className="flex-1 min-w-0">
{isEditingName ? (
<input
ref={inputRef}
type="text"
value={nameInput}
onChange={(e) => setNameInput(e.target.value)}
onBlur={handleNameBlur}
onKeyDown={handleNameKeyDown}
onClick={(e) => e.stopPropagation()}
className="w-full px-1 py-0.5 text-xs font-semibold bg-background border border-border rounded"
/>
) : (
<div
onClick={(e) => {
e.stopPropagation();
handleNameClick();
}}
className="px-1 py-0.5 text-xs font-semibold text-foreground truncate"
title={String(track.name || "Untitled Track")}
>
{String(track.name || "Untitled Track")}
</div>
)}
</div>
</div>
{/* Track Controls - Only show when not collapsed */}
{!track.collapsed && (
<div className="flex-1 flex flex-col items-center justify-between min-h-0 overflow-hidden">
{/* Integrated Track Controls (Pan + Fader + Buttons) */}
<TrackControls
volume={track.volume}
pan={track.pan}
peakLevel={
track.recordEnabled || isRecording
? recordingLevel
: playbackLevel
}
rmsLevel={
track.recordEnabled || isRecording
? recordingLevel * 0.7
: playbackLevel * 0.7
}
isMuted={track.mute}
isSolo={track.solo}
isRecordEnabled={track.recordEnabled}
showAutomation={track.automation?.showAutomation}
showEffects={track.showEffects}
isRecording={isRecording}
onVolumeChange={onVolumeChange}
onPanChange={onPanChange}
onMuteToggle={onToggleMute}
onSoloToggle={onToggleSolo}
onRecordToggle={onToggleRecordEnable}
onAutomationToggle={() => {
onUpdateTrack(track.id, {
automation: {
...track.automation,
showAutomation: !track.automation?.showAutomation,
},
});
}}
onEffectsClick={() => {
onUpdateTrack(track.id, {
showEffects: !track.showEffects,
});
}}
onVolumeTouchStart={handleVolumeTouchStart}
onVolumeTouchEnd={handleVolumeTouchEnd}
onPanTouchStart={handlePanTouchStart}
onPanTouchEnd={handlePanTouchEnd}
/>
</div>
)}
</div>
);
}
// Render only waveform
if (renderWaveformOnly) {
return (
<div
className={cn(
"relative bg-waveform-bg border-b transition-all duration-200",
isSelected && "bg-primary/5",
)}
style={{ height: trackHeight }}
>
{/* Inner container with dynamic width */}
<div
className="relative h-full"
style={{
minWidth:
track.audioBuffer && zoom > 1
? `${duration * zoom * 100}px`
: "100%",
}}
>
{/* Delete Button - Top Right Overlay */}
<button
onClick={(e) => {
e.stopPropagation();
onRemove();
}}
className={cn(
"absolute top-2 right-2 z-20 h-6 w-6 rounded flex items-center justify-center transition-all",
"bg-card/80 hover:bg-destructive/90 text-muted-foreground hover:text-white",
"border border-border/50 hover:border-destructive",
"backdrop-blur-sm shadow-sm hover:shadow-md",
)}
title="Remove track"
>
<Trash2 className="h-3 w-3" />
</button>
{track.audioBuffer ? (
<>
{/* Waveform Canvas */}
<canvas
ref={canvasRef}
className="absolute inset-0 w-full h-full cursor-pointer"
onMouseDown={handleCanvasMouseDown}
onMouseMove={handleCanvasMouseMove}
onMouseUp={handleCanvasMouseUp}
/>
</>
) : (
!track.collapsed && (
<>
<div
className={cn(
"absolute inset-0 flex flex-col items-center justify-center text-sm text-muted-foreground hover:text-foreground transition-colors cursor-pointer group",
isDragging
? "bg-primary/20 text-primary border-2 border-primary border-dashed"
: "hover:bg-accent/50",
)}
onClick={(e) => {
e.stopPropagation();
handleLoadAudioClick();
}}
onDragOver={handleDragOver}
onDragLeave={handleDragLeave}
onDrop={handleDrop}
>
<Upload className="h-6 w-6 mb-2 opacity-50 group-hover:opacity-100" />
<p>
{isDragging
? "Drop audio file here"
: "Click to load audio file"}
</p>
<p className="text-xs opacity-75 mt-1">or drag & drop</p>
</div>
<input
ref={fileInputRef}
type="file"
accept="audio/*"
onChange={handleFileChange}
className="hidden"
/>
</>
)
)}
</div>{" "}
{/* Close inner container with minWidth */}
</div>
);
}
// Render full track (both controls and waveform side by side)
return (
<div
ref={containerRef}
className={cn(
"flex flex-col transition-all duration-200 relative",
isSelected && "bg-primary/5",
)}
>
{/* Full track content removed - now rendered separately in TrackList */}
<div>Track component should not be rendered in full mode anymore</div>
</div>
);
}