- Add safety check for duration === 0 (first track scenario) - Ensure trackWidth doesn't exceed canvas width - Use Math.floor for integer loop iterations - Prevent division by zero in samplesPerPixel calculation 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
883 lines
27 KiB
TypeScript
883 lines
27 KiB
TypeScript
"use client";
|
|
|
|
import * as React from "react";
|
|
import {
|
|
Volume2,
|
|
VolumeX,
|
|
Headphones,
|
|
Trash2,
|
|
ChevronDown,
|
|
ChevronRight,
|
|
ChevronUp,
|
|
UnfoldHorizontal,
|
|
Upload,
|
|
Mic,
|
|
Gauge,
|
|
Circle,
|
|
Sparkles,
|
|
} from "lucide-react";
|
|
import type { Track as TrackType } from "@/types/track";
|
|
import {
|
|
COLLAPSED_TRACK_HEIGHT,
|
|
MIN_TRACK_HEIGHT,
|
|
MAX_TRACK_HEIGHT,
|
|
DEFAULT_TRACK_HEIGHT,
|
|
} from "@/types/track";
|
|
import { Button } from "@/components/ui/Button";
|
|
import { Slider } from "@/components/ui/Slider";
|
|
import { cn } from "@/lib/utils/cn";
|
|
import type { EffectType } from "@/lib/audio/effects/chain";
|
|
import { TrackControls } from "./TrackControls";
|
|
import { AutomationLane } from "@/components/automation/AutomationLane";
|
|
import type {
|
|
AutomationLane as AutomationLaneType,
|
|
AutomationPoint as AutomationPointType,
|
|
} from "@/types/automation";
|
|
import { createAutomationPoint } from "@/lib/audio/automation/utils";
|
|
import { createAutomationLane } from "@/lib/audio/automation-utils";
|
|
import { EffectDevice } from "@/components/effects/EffectDevice";
|
|
import { EffectBrowser } from "@/components/effects/EffectBrowser";
|
|
import { ImportDialog } from "@/components/dialogs/ImportDialog";
|
|
import { importAudioFile, type ImportOptions } from "@/lib/audio/decoder";
|
|
|
|
export interface TrackProps {
|
|
track: TrackType;
|
|
zoom: number;
|
|
currentTime: number;
|
|
duration: number;
|
|
isSelected?: boolean;
|
|
onSelect?: () => void;
|
|
onToggleMute: () => void;
|
|
onToggleSolo: () => void;
|
|
onToggleCollapse: () => void;
|
|
onVolumeChange: (volume: number) => void;
|
|
onPanChange: (pan: number) => void;
|
|
onRemove: () => void;
|
|
onNameChange: (name: string) => void;
|
|
onUpdateTrack: (trackId: string, updates: Partial<TrackType>) => void;
|
|
onSeek?: (time: number) => void;
|
|
onLoadAudio?: (buffer: AudioBuffer) => void;
|
|
onToggleEffect?: (effectId: string) => void;
|
|
onRemoveEffect?: (effectId: string) => void;
|
|
onUpdateEffect?: (effectId: string, parameters: any) => void;
|
|
onAddEffect?: (effectType: EffectType) => void;
|
|
onSelectionChange?: (
|
|
selection: { start: number; end: number } | null,
|
|
) => void;
|
|
onToggleRecordEnable?: () => void;
|
|
isRecording?: boolean;
|
|
recordingLevel?: number;
|
|
playbackLevel?: number;
|
|
onParameterTouched?: (
|
|
trackId: string,
|
|
laneId: string,
|
|
touched: boolean,
|
|
) => void;
|
|
isPlaying?: boolean;
|
|
renderControlsOnly?: boolean;
|
|
renderWaveformOnly?: boolean;
|
|
}
|
|
|
|
export function Track({
|
|
track,
|
|
zoom,
|
|
currentTime,
|
|
duration,
|
|
isSelected,
|
|
onSelect,
|
|
onToggleMute,
|
|
onToggleSolo,
|
|
onToggleCollapse,
|
|
onVolumeChange,
|
|
onPanChange,
|
|
onRemove,
|
|
onNameChange,
|
|
onUpdateTrack,
|
|
onSeek,
|
|
onLoadAudio,
|
|
onToggleEffect,
|
|
onRemoveEffect,
|
|
onUpdateEffect,
|
|
onAddEffect,
|
|
onSelectionChange,
|
|
onToggleRecordEnable,
|
|
isRecording = false,
|
|
recordingLevel = 0,
|
|
playbackLevel = 0,
|
|
onParameterTouched,
|
|
isPlaying = false,
|
|
renderControlsOnly = false,
|
|
renderWaveformOnly = false,
|
|
}: TrackProps) {
|
|
const canvasRef = React.useRef<HTMLCanvasElement>(null);
|
|
const containerRef = React.useRef<HTMLDivElement>(null);
|
|
const fileInputRef = React.useRef<HTMLInputElement>(null);
|
|
const [themeKey, setThemeKey] = React.useState(0);
|
|
const [isResizing, setIsResizing] = React.useState(false);
|
|
const resizeStartRef = React.useRef({ y: 0, height: 0 });
|
|
const [effectBrowserOpen, setEffectBrowserOpen] = React.useState(false);
|
|
|
|
// Import dialog state
|
|
const [showImportDialog, setShowImportDialog] = React.useState(false);
|
|
const [pendingFile, setPendingFile] = React.useState<File | null>(null);
|
|
const [fileMetadata, setFileMetadata] = React.useState<{
|
|
sampleRate?: number;
|
|
channels?: number;
|
|
}>({});
|
|
|
|
// Selection state
|
|
const [isSelecting, setIsSelecting] = React.useState(false);
|
|
const [selectionStart, setSelectionStart] = React.useState<number | null>(
|
|
null,
|
|
);
|
|
const [isSelectingByDrag, setIsSelectingByDrag] = React.useState(false);
|
|
const [dragStartPos, setDragStartPos] = React.useState<{
|
|
x: number;
|
|
y: number;
|
|
} | null>(null);
|
|
|
|
// Touch callbacks for automation recording
|
|
const handlePanTouchStart = React.useCallback(() => {
|
|
if (isPlaying && onParameterTouched) {
|
|
const panLane = track.automation.lanes.find(
|
|
(l) => l.parameterId === "pan",
|
|
);
|
|
if (panLane && (panLane.mode === "touch" || panLane.mode === "latch")) {
|
|
queueMicrotask(() => onParameterTouched(track.id, panLane.id, true));
|
|
}
|
|
}
|
|
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
|
|
|
|
const handlePanTouchEnd = React.useCallback(() => {
|
|
if (isPlaying && onParameterTouched) {
|
|
const panLane = track.automation.lanes.find(
|
|
(l) => l.parameterId === "pan",
|
|
);
|
|
if (panLane && (panLane.mode === "touch" || panLane.mode === "latch")) {
|
|
queueMicrotask(() => onParameterTouched(track.id, panLane.id, false));
|
|
}
|
|
}
|
|
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
|
|
|
|
const handleVolumeTouchStart = React.useCallback(() => {
|
|
if (isPlaying && onParameterTouched) {
|
|
const volumeLane = track.automation.lanes.find(
|
|
(l) => l.parameterId === "volume",
|
|
);
|
|
if (
|
|
volumeLane &&
|
|
(volumeLane.mode === "touch" || volumeLane.mode === "latch")
|
|
) {
|
|
queueMicrotask(() => onParameterTouched(track.id, volumeLane.id, true));
|
|
}
|
|
}
|
|
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
|
|
|
|
const handleVolumeTouchEnd = React.useCallback(() => {
|
|
if (isPlaying && onParameterTouched) {
|
|
const volumeLane = track.automation.lanes.find(
|
|
(l) => l.parameterId === "volume",
|
|
);
|
|
if (
|
|
volumeLane &&
|
|
(volumeLane.mode === "touch" || volumeLane.mode === "latch")
|
|
) {
|
|
queueMicrotask(() =>
|
|
onParameterTouched(track.id, volumeLane.id, false),
|
|
);
|
|
}
|
|
}
|
|
}, [isPlaying, onParameterTouched, track.id, track.automation.lanes]);
|
|
|
|
// Auto-create automation lane for selected parameter if it doesn't exist
|
|
React.useEffect(() => {
|
|
if (!track.automation?.showAutomation) return;
|
|
|
|
const selectedParameterId =
|
|
track.automation.selectedParameterId || "volume";
|
|
const laneExists = track.automation.lanes.some(
|
|
(lane) => lane.parameterId === selectedParameterId,
|
|
);
|
|
|
|
if (!laneExists) {
|
|
// Build list of available parameters
|
|
const availableParameters: Array<{ id: string; name: string }> = [
|
|
{ id: "volume", name: "Volume" },
|
|
{ id: "pan", name: "Pan" },
|
|
];
|
|
|
|
track.effectChain.effects.forEach((effect) => {
|
|
if (effect.parameters) {
|
|
Object.keys(effect.parameters).forEach((paramKey) => {
|
|
const parameterId = `effect.${effect.id}.${paramKey}`;
|
|
const paramName = `${effect.name} - ${paramKey.charAt(0).toUpperCase() + paramKey.slice(1)}`;
|
|
availableParameters.push({ id: parameterId, name: paramName });
|
|
});
|
|
}
|
|
});
|
|
|
|
const paramInfo = availableParameters.find(
|
|
(p) => p.id === selectedParameterId,
|
|
);
|
|
if (paramInfo) {
|
|
// Determine value range based on parameter type
|
|
let valueRange = { min: 0, max: 1 };
|
|
let unit = "";
|
|
let formatter: ((value: number) => string) | undefined;
|
|
|
|
if (selectedParameterId === "volume") {
|
|
unit = "dB";
|
|
} else if (selectedParameterId === "pan") {
|
|
formatter = (value: number) => {
|
|
if (value === 0.5) return "C";
|
|
if (value < 0.5)
|
|
return `${Math.abs((0.5 - value) * 200).toFixed(0)}L`;
|
|
return `${((value - 0.5) * 200).toFixed(0)}R`;
|
|
};
|
|
} else if (selectedParameterId.startsWith("effect.")) {
|
|
// Parse effect parameter: effect.{effectId}.{paramName}
|
|
const parts = selectedParameterId.split(".");
|
|
if (parts.length === 3) {
|
|
const paramName = parts[2];
|
|
// Set ranges based on parameter name
|
|
if (paramName === "frequency") {
|
|
valueRange = { min: 20, max: 20000 };
|
|
unit = "Hz";
|
|
} else if (paramName === "Q") {
|
|
valueRange = { min: 0.1, max: 20 };
|
|
} else if (paramName === "gain") {
|
|
valueRange = { min: -40, max: 40 };
|
|
unit = "dB";
|
|
}
|
|
}
|
|
}
|
|
|
|
const newLane = createAutomationLane(
|
|
track.id,
|
|
selectedParameterId,
|
|
paramInfo.name,
|
|
{
|
|
min: valueRange.min,
|
|
max: valueRange.max,
|
|
unit,
|
|
formatter,
|
|
},
|
|
);
|
|
|
|
onUpdateTrack(track.id, {
|
|
automation: {
|
|
...track.automation,
|
|
lanes: [...track.automation.lanes, newLane],
|
|
selectedParameterId,
|
|
},
|
|
});
|
|
}
|
|
}
|
|
}, [
|
|
track.automation?.showAutomation,
|
|
track.automation?.selectedParameterId,
|
|
track.automation?.lanes,
|
|
track.effectChain.effects,
|
|
track.id,
|
|
onUpdateTrack,
|
|
]);
|
|
|
|
// Listen for theme changes
|
|
React.useEffect(() => {
|
|
const observer = new MutationObserver(() => {
|
|
// Increment key to force waveform redraw
|
|
setThemeKey((prev) => prev + 1);
|
|
});
|
|
|
|
// Watch for class changes on document element (dark mode toggle)
|
|
observer.observe(document.documentElement, {
|
|
attributes: true,
|
|
attributeFilter: ["class"],
|
|
});
|
|
|
|
return () => observer.disconnect();
|
|
}, []);
|
|
|
|
// Draw waveform
|
|
React.useEffect(() => {
|
|
if (!track.audioBuffer || !canvasRef.current) return;
|
|
|
|
const canvas = canvasRef.current;
|
|
const ctx = canvas.getContext("2d");
|
|
if (!ctx) return;
|
|
|
|
// Use parent container's size since canvas is absolute positioned
|
|
const parent = canvas.parentElement;
|
|
if (!parent) return;
|
|
|
|
const dpr = window.devicePixelRatio || 1;
|
|
const rect = parent.getBoundingClientRect();
|
|
|
|
canvas.width = rect.width * dpr;
|
|
canvas.height = rect.height * dpr;
|
|
ctx.scale(dpr, dpr);
|
|
|
|
const width = rect.width;
|
|
const height = rect.height;
|
|
|
|
// Clear canvas with theme color
|
|
const bgColor =
|
|
getComputedStyle(canvas).getPropertyValue("--color-waveform-bg") ||
|
|
"rgb(15, 23, 42)";
|
|
ctx.fillStyle = bgColor;
|
|
ctx.fillRect(0, 0, width, height);
|
|
|
|
const buffer = track.audioBuffer;
|
|
const channelData = buffer.getChannelData(0);
|
|
// Calculate samples per pixel based on the total width
|
|
// Must match the timeline calculation exactly
|
|
const PIXELS_PER_SECOND_BASE = 5;
|
|
let totalWidth;
|
|
if (zoom >= 1) {
|
|
const calculatedWidth = duration * zoom * PIXELS_PER_SECOND_BASE;
|
|
totalWidth = Math.max(calculatedWidth, width);
|
|
} else {
|
|
totalWidth = width;
|
|
}
|
|
|
|
// Calculate how much of the canvas width this track's duration occupies
|
|
// If duration is 0 or invalid, use full width (first track scenario)
|
|
const trackDurationRatio = duration > 0 ? buffer.duration / duration : 1;
|
|
const trackWidth = Math.min(width * trackDurationRatio, width);
|
|
const samplesPerPixel = trackWidth > 0 ? buffer.length / trackWidth : 0;
|
|
|
|
// Draw waveform
|
|
ctx.fillStyle = track.color;
|
|
ctx.strokeStyle = track.color;
|
|
ctx.lineWidth = 1;
|
|
|
|
for (let x = 0; x < Math.floor(trackWidth); x++) {
|
|
const startSample = Math.floor(x * samplesPerPixel);
|
|
const endSample = Math.floor((x + 1) * samplesPerPixel);
|
|
|
|
let min = 1.0;
|
|
let max = -1.0;
|
|
|
|
for (let i = startSample; i < endSample && i < channelData.length; i++) {
|
|
const sample = channelData[i];
|
|
if (sample < min) min = sample;
|
|
if (sample > max) max = sample;
|
|
}
|
|
|
|
const y1 = (height / 2) * (1 - max);
|
|
const y2 = (height / 2) * (1 - min);
|
|
|
|
ctx.beginPath();
|
|
ctx.moveTo(x, y1);
|
|
ctx.lineTo(x, y2);
|
|
ctx.stroke();
|
|
}
|
|
|
|
// Draw center line
|
|
ctx.strokeStyle = "rgba(148, 163, 184, 0.2)";
|
|
ctx.lineWidth = 1;
|
|
ctx.beginPath();
|
|
ctx.moveTo(0, height / 2);
|
|
ctx.lineTo(width, height / 2);
|
|
ctx.stroke();
|
|
|
|
// Draw selection overlay
|
|
if (track.selection && duration > 0) {
|
|
const selStartX = (track.selection.start / duration) * width;
|
|
const selEndX = (track.selection.end / duration) * width;
|
|
|
|
// Draw selection background
|
|
ctx.fillStyle = "rgba(59, 130, 246, 0.2)";
|
|
ctx.fillRect(selStartX, 0, selEndX - selStartX, height);
|
|
|
|
// Draw selection borders
|
|
ctx.strokeStyle = "rgba(59, 130, 246, 0.8)";
|
|
ctx.lineWidth = 2;
|
|
|
|
// Start border
|
|
ctx.beginPath();
|
|
ctx.moveTo(selStartX, 0);
|
|
ctx.lineTo(selStartX, height);
|
|
ctx.stroke();
|
|
|
|
// End border
|
|
ctx.beginPath();
|
|
ctx.moveTo(selEndX, 0);
|
|
ctx.lineTo(selEndX, height);
|
|
ctx.stroke();
|
|
}
|
|
|
|
// Draw playhead
|
|
if (duration > 0) {
|
|
const playheadX = (currentTime / duration) * width;
|
|
ctx.strokeStyle = "rgba(239, 68, 68, 0.8)";
|
|
ctx.lineWidth = 2;
|
|
ctx.beginPath();
|
|
ctx.moveTo(playheadX, 0);
|
|
ctx.lineTo(playheadX, height);
|
|
ctx.stroke();
|
|
}
|
|
}, [
|
|
track.audioBuffer,
|
|
track.color,
|
|
track.collapsed,
|
|
track.height,
|
|
zoom,
|
|
currentTime,
|
|
duration,
|
|
themeKey,
|
|
track.selection,
|
|
]);
|
|
|
|
const handleCanvasMouseDown = (e: React.MouseEvent<HTMLCanvasElement>) => {
|
|
if (!duration) return;
|
|
|
|
const rect = e.currentTarget.getBoundingClientRect();
|
|
const x = e.clientX - rect.left;
|
|
const y = e.clientY - rect.top;
|
|
const clickTime = (x / rect.width) * duration;
|
|
|
|
// Store drag start position
|
|
setDragStartPos({ x: e.clientX, y: e.clientY });
|
|
setIsSelectingByDrag(false);
|
|
|
|
// Start selection immediately (will be used if user drags)
|
|
setIsSelecting(true);
|
|
setSelectionStart(clickTime);
|
|
};
|
|
|
|
const handleCanvasMouseMove = (e: React.MouseEvent<HTMLCanvasElement>) => {
|
|
if (!isSelecting || selectionStart === null || !duration || !dragStartPos)
|
|
return;
|
|
|
|
const rect = e.currentTarget.getBoundingClientRect();
|
|
const x = e.clientX - rect.left;
|
|
const currentTime = (x / rect.width) * duration;
|
|
|
|
// Check if user has moved enough to be considered dragging (threshold: 3 pixels)
|
|
const dragDistance = Math.sqrt(
|
|
Math.pow(e.clientX - dragStartPos.x, 2) +
|
|
Math.pow(e.clientY - dragStartPos.y, 2),
|
|
);
|
|
|
|
if (dragDistance > 3) {
|
|
setIsSelectingByDrag(true);
|
|
}
|
|
|
|
// If dragging, update selection
|
|
if (isSelectingByDrag || dragDistance > 3) {
|
|
// Clamp to valid time range
|
|
const clampedTime = Math.max(0, Math.min(duration, currentTime));
|
|
|
|
// Update selection (ensure start < end)
|
|
const start = Math.min(selectionStart, clampedTime);
|
|
const end = Math.max(selectionStart, clampedTime);
|
|
|
|
onSelectionChange?.({ start, end });
|
|
}
|
|
};
|
|
|
|
const handleCanvasMouseUp = (e: React.MouseEvent<HTMLCanvasElement>) => {
|
|
if (!duration) return;
|
|
|
|
const rect = e.currentTarget.getBoundingClientRect();
|
|
const x = e.clientX - rect.left;
|
|
const clickTime = (x / rect.width) * duration;
|
|
|
|
// Check if user actually dragged (check distance directly, not state)
|
|
const didDrag = dragStartPos
|
|
? Math.sqrt(
|
|
Math.pow(e.clientX - dragStartPos.x, 2) +
|
|
Math.pow(e.clientY - dragStartPos.y, 2),
|
|
) > 3
|
|
: false;
|
|
|
|
// If user didn't drag (just clicked), clear selection and seek
|
|
if (!didDrag) {
|
|
onSelectionChange?.(null);
|
|
if (onSeek) {
|
|
onSeek(clickTime);
|
|
}
|
|
}
|
|
|
|
// Reset drag state
|
|
setIsSelecting(false);
|
|
setIsSelectingByDrag(false);
|
|
setDragStartPos(null);
|
|
};
|
|
|
|
// Handle mouse leaving canvas during selection
|
|
React.useEffect(() => {
|
|
const handleGlobalMouseUp = () => {
|
|
if (isSelecting) {
|
|
setIsSelecting(false);
|
|
setIsSelectingByDrag(false);
|
|
setDragStartPos(null);
|
|
}
|
|
};
|
|
|
|
window.addEventListener("mouseup", handleGlobalMouseUp);
|
|
return () => window.removeEventListener("mouseup", handleGlobalMouseUp);
|
|
}, [isSelecting]);
|
|
|
|
const handleFileChange = async (e: React.ChangeEvent<HTMLInputElement>) => {
|
|
const file = e.target.files?.[0];
|
|
if (!file || !onLoadAudio) return;
|
|
|
|
try {
|
|
// Decode to get basic metadata before showing dialog
|
|
const arrayBuffer = await file.arrayBuffer();
|
|
const audioContext = new AudioContext();
|
|
const tempBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
|
|
|
// Set metadata and show import dialog
|
|
setFileMetadata({
|
|
sampleRate: tempBuffer.sampleRate,
|
|
channels: tempBuffer.numberOfChannels,
|
|
});
|
|
setPendingFile(file);
|
|
setShowImportDialog(true);
|
|
} catch (error) {
|
|
console.error("Failed to read audio file metadata:", error);
|
|
}
|
|
|
|
// Reset input
|
|
e.target.value = "";
|
|
};
|
|
|
|
const handleImport = async (options: ImportOptions) => {
|
|
if (!pendingFile || !onLoadAudio) return;
|
|
|
|
try {
|
|
setShowImportDialog(false);
|
|
const { buffer, metadata } = await importAudioFile(pendingFile, options);
|
|
onLoadAudio(buffer);
|
|
|
|
// Update track name to filename if it's still default
|
|
if (track.name === "New Track" || track.name === "Untitled Track") {
|
|
const fileName = metadata.fileName.replace(/\.[^/.]+$/, "");
|
|
onNameChange(fileName);
|
|
}
|
|
|
|
console.log("Audio imported:", metadata);
|
|
} catch (error) {
|
|
console.error("Failed to import audio file:", error);
|
|
} finally {
|
|
setPendingFile(null);
|
|
setFileMetadata({});
|
|
}
|
|
};
|
|
|
|
const handleImportCancel = () => {
|
|
setShowImportDialog(false);
|
|
setPendingFile(null);
|
|
setFileMetadata({});
|
|
};
|
|
|
|
const handleLoadAudioClick = () => {
|
|
fileInputRef.current?.click();
|
|
};
|
|
|
|
const [isDragging, setIsDragging] = React.useState(false);
|
|
|
|
const handleDragOver = (e: React.DragEvent) => {
|
|
e.preventDefault();
|
|
e.stopPropagation();
|
|
setIsDragging(true);
|
|
};
|
|
|
|
const handleDragLeave = (e: React.DragEvent) => {
|
|
e.preventDefault();
|
|
e.stopPropagation();
|
|
setIsDragging(false);
|
|
};
|
|
|
|
const handleDrop = async (e: React.DragEvent) => {
|
|
e.preventDefault();
|
|
e.stopPropagation();
|
|
setIsDragging(false);
|
|
|
|
const file = e.dataTransfer.files?.[0];
|
|
if (!file || !onLoadAudio) return;
|
|
|
|
// Check if it's an audio file
|
|
if (!file.type.startsWith("audio/")) {
|
|
console.warn("Dropped file is not an audio file");
|
|
return;
|
|
}
|
|
|
|
try {
|
|
const arrayBuffer = await file.arrayBuffer();
|
|
const audioContext = new AudioContext();
|
|
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
|
onLoadAudio(audioBuffer);
|
|
|
|
// Update track name to filename if it's still default
|
|
if (track.name === "New Track" || track.name === "Untitled Track") {
|
|
const fileName = file.name.replace(/\.[^/.]+$/, "");
|
|
onNameChange(fileName);
|
|
}
|
|
} catch (error) {
|
|
console.error("Failed to load audio file:", error);
|
|
}
|
|
};
|
|
|
|
const trackHeight = track.collapsed
|
|
? COLLAPSED_TRACK_HEIGHT
|
|
: Math.max(track.height || DEFAULT_TRACK_HEIGHT, MIN_TRACK_HEIGHT);
|
|
|
|
// Track height resize handlers
|
|
const handleResizeStart = React.useCallback(
|
|
(e: React.MouseEvent) => {
|
|
if (track.collapsed) return;
|
|
e.preventDefault();
|
|
e.stopPropagation();
|
|
setIsResizing(true);
|
|
resizeStartRef.current = { y: e.clientY, height: track.height };
|
|
},
|
|
[track.collapsed, track.height],
|
|
);
|
|
|
|
React.useEffect(() => {
|
|
if (!isResizing) return;
|
|
|
|
const handleMouseMove = (e: MouseEvent) => {
|
|
const delta = e.clientY - resizeStartRef.current.y;
|
|
const newHeight = Math.max(
|
|
MIN_TRACK_HEIGHT,
|
|
Math.min(MAX_TRACK_HEIGHT, resizeStartRef.current.height + delta),
|
|
);
|
|
onUpdateTrack(track.id, { height: newHeight });
|
|
};
|
|
|
|
const handleMouseUp = () => {
|
|
setIsResizing(false);
|
|
};
|
|
|
|
window.addEventListener("mousemove", handleMouseMove);
|
|
window.addEventListener("mouseup", handleMouseUp);
|
|
|
|
return () => {
|
|
window.removeEventListener("mousemove", handleMouseMove);
|
|
window.removeEventListener("mouseup", handleMouseUp);
|
|
};
|
|
}, [isResizing, onUpdateTrack, track.id]);
|
|
|
|
// Render only controls
|
|
if (renderControlsOnly) {
|
|
return (
|
|
<>
|
|
<div
|
|
className={cn(
|
|
"w-full flex-shrink-0 border-b border-r-4 p-4 flex flex-col gap-4 min-h-0 transition-all duration-200 cursor-pointer border-border",
|
|
isSelected
|
|
? "bg-primary/10 border-r-primary"
|
|
: "bg-card border-r-transparent hover:bg-accent/30",
|
|
)}
|
|
style={{
|
|
height: `${trackHeight}px`,
|
|
}}
|
|
onClick={(e) => {
|
|
e.stopPropagation();
|
|
if (onSelect) onSelect();
|
|
}}
|
|
>
|
|
{/* Collapsed Header */}
|
|
{track.collapsed && (
|
|
<div
|
|
className={cn(
|
|
"group flex items-center gap-1.5 px-2 py-1 h-full w-full cursor-pointer transition-colors",
|
|
isSelected ? "bg-primary/10" : "hover:bg-accent/50",
|
|
)}
|
|
onClick={(e) => {
|
|
e.stopPropagation();
|
|
onToggleCollapse();
|
|
}}
|
|
title="Expand track"
|
|
>
|
|
<ChevronRight className="h-3 w-3 text-muted-foreground flex-shrink-0" />
|
|
<div
|
|
className="h-4 w-0.5 rounded-full flex-shrink-0"
|
|
style={{ backgroundColor: track.color }}
|
|
/>
|
|
<span className="text-xs font-semibold text-foreground truncate flex-1">
|
|
{String(track.name || "Untitled Track")}
|
|
</span>
|
|
</div>
|
|
)}
|
|
|
|
{/* Track Controls - Only show when not collapsed */}
|
|
{!track.collapsed && (
|
|
<div className="flex-1 flex flex-col items-center justify-center min-h-0 overflow-hidden">
|
|
{/* Integrated Track Controls (Pan + Fader + Buttons) */}
|
|
<TrackControls
|
|
trackName={track.name}
|
|
trackColor={track.color}
|
|
collapsed={track.collapsed}
|
|
volume={track.volume}
|
|
pan={track.pan}
|
|
peakLevel={
|
|
track.recordEnabled || isRecording
|
|
? recordingLevel
|
|
: playbackLevel
|
|
}
|
|
rmsLevel={
|
|
track.recordEnabled || isRecording
|
|
? recordingLevel * 0.7
|
|
: playbackLevel * 0.7
|
|
}
|
|
isMuted={track.mute}
|
|
isSolo={track.solo}
|
|
isRecordEnabled={track.recordEnabled}
|
|
showAutomation={track.automation?.showAutomation}
|
|
showEffects={track.showEffects}
|
|
isRecording={isRecording}
|
|
onNameChange={onNameChange}
|
|
onToggleCollapse={onToggleCollapse}
|
|
onVolumeChange={onVolumeChange}
|
|
onPanChange={onPanChange}
|
|
onMuteToggle={onToggleMute}
|
|
onSoloToggle={onToggleSolo}
|
|
onRecordToggle={onToggleRecordEnable}
|
|
onAutomationToggle={() => {
|
|
onUpdateTrack(track.id, {
|
|
automation: {
|
|
...track.automation,
|
|
showAutomation: !track.automation?.showAutomation,
|
|
},
|
|
});
|
|
}}
|
|
onEffectsClick={() => {
|
|
onUpdateTrack(track.id, {
|
|
showEffects: !track.showEffects,
|
|
});
|
|
}}
|
|
onVolumeTouchStart={handleVolumeTouchStart}
|
|
onVolumeTouchEnd={handleVolumeTouchEnd}
|
|
onPanTouchStart={handlePanTouchStart}
|
|
onPanTouchEnd={handlePanTouchEnd}
|
|
/>
|
|
</div>
|
|
)}
|
|
</div>
|
|
|
|
{/* Import Dialog - Only render in controls mode to avoid duplicates */}
|
|
<ImportDialog
|
|
open={showImportDialog}
|
|
onClose={handleImportCancel}
|
|
onImport={handleImport}
|
|
fileName={pendingFile?.name}
|
|
sampleRate={fileMetadata.sampleRate}
|
|
channels={fileMetadata.channels}
|
|
/>
|
|
</>
|
|
);
|
|
}
|
|
|
|
// Render only waveform
|
|
if (renderWaveformOnly) {
|
|
return (
|
|
<div
|
|
className={cn(
|
|
"relative bg-waveform-bg border-b transition-all duration-200 h-full",
|
|
isSelected && "bg-primary/5",
|
|
)}
|
|
>
|
|
{/* Inner container with dynamic width */}
|
|
<div
|
|
className="relative h-full"
|
|
style={{
|
|
minWidth:
|
|
track.audioBuffer && zoom >= 1
|
|
? `${duration * zoom * 5}px`
|
|
: "100%",
|
|
}}
|
|
>
|
|
{/* Delete Button - Top Right Overlay - Stays fixed when scrolling */}
|
|
<button
|
|
onClick={(e) => {
|
|
e.stopPropagation();
|
|
onRemove();
|
|
}}
|
|
className={cn(
|
|
"sticky top-2 right-2 float-right z-20 h-6 w-6 rounded flex items-center justify-center transition-all",
|
|
"bg-card/80 hover:bg-destructive/90 text-muted-foreground hover:text-white",
|
|
"border border-border/50 hover:border-destructive",
|
|
"backdrop-blur-sm shadow-sm hover:shadow-md",
|
|
)}
|
|
title="Remove track"
|
|
>
|
|
<Trash2 className="h-3 w-3" />
|
|
</button>
|
|
|
|
{track.audioBuffer ? (
|
|
<>
|
|
{/* Waveform Canvas */}
|
|
<canvas
|
|
ref={canvasRef}
|
|
className="absolute inset-0 w-full h-full cursor-pointer"
|
|
onMouseDown={handleCanvasMouseDown}
|
|
onMouseMove={handleCanvasMouseMove}
|
|
onMouseUp={handleCanvasMouseUp}
|
|
/>
|
|
</>
|
|
) : (
|
|
!track.collapsed && (
|
|
<>
|
|
{/* Empty state - clickable area for upload with drag & drop */}
|
|
<div
|
|
className={cn(
|
|
"absolute inset-0 w-full h-full transition-colors cursor-pointer",
|
|
isDragging
|
|
? "bg-primary/20 border-2 border-primary border-dashed"
|
|
: "hover:bg-accent/50",
|
|
)}
|
|
onClick={(e) => {
|
|
e.stopPropagation();
|
|
handleLoadAudioClick();
|
|
}}
|
|
onDragOver={handleDragOver}
|
|
onDragLeave={handleDragLeave}
|
|
onDrop={handleDrop}
|
|
/>
|
|
<input
|
|
ref={fileInputRef}
|
|
type="file"
|
|
accept="audio/*"
|
|
onChange={handleFileChange}
|
|
className="hidden"
|
|
/>
|
|
</>
|
|
)
|
|
)}
|
|
</div>
|
|
|
|
{/* Import Dialog - Also needed in waveform-only mode */}
|
|
<ImportDialog
|
|
open={showImportDialog}
|
|
onClose={handleImportCancel}
|
|
onImport={handleImport}
|
|
fileName={pendingFile?.name}
|
|
sampleRate={fileMetadata.sampleRate}
|
|
channels={fileMetadata.channels}
|
|
/>
|
|
</div>
|
|
);
|
|
}
|
|
|
|
// Render full track (both controls and waveform side by side)
|
|
// This mode is no longer used - tracks are rendered separately with renderControlsOnly and renderWaveformOnly
|
|
return (
|
|
<div
|
|
ref={containerRef}
|
|
className={cn(
|
|
"flex flex-col transition-all duration-200 relative",
|
|
isSelected && "bg-primary/5",
|
|
)}
|
|
>
|
|
{/* Full track content removed - now rendered separately in TrackList */}
|
|
<div>Track component should not be rendered in full mode anymore</div>
|
|
</div>
|
|
);
|
|
}
|