Added comprehensive automation lane UI with Ableton-style design: **Automation Components:** - AutomationLane: Canvas-based rendering with grid lines, curves, and points - AutomationHeader: Parameter name, mode selector, value display - AutomationPoint: Interactive draggable points with hover states **Automation Utilities:** - createAutomationLane/Point: Factory functions - evaluateAutomationLinear: Interpolation between points - formatAutomationValue: Display formatting with unit support - addAutomationPoint/updateAutomationPoint/removeAutomationPoint **Track Integration:** - Added "A" toggle button in track control panel - Automation lanes render below waveform - Default lanes for Volume (orange) and Pan (green) - Support for add/edit/delete automation points - Click to add, drag to move, double-click to delete **Visual Design:** - Dark background with horizontal grid lines - Colored curves with semi-transparent fill (20% opacity) - 4-6px automation points, 8px on hover - Mode indicators (Read/Write/Touch/Latch) with colors - Value labels and current value display Ready for playback integration in next step. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
770 lines
26 KiB
TypeScript
770 lines
26 KiB
TypeScript
'use client';
|
|
|
|
import * as React from 'react';
|
|
import { Volume2, VolumeX, Headphones, Trash2, ChevronDown, ChevronRight, UnfoldHorizontal, Upload, Mic, Gauge, Circle } from 'lucide-react';
|
|
import type { Track as TrackType } from '@/types/track';
|
|
import { COLLAPSED_TRACK_HEIGHT, MIN_TRACK_HEIGHT, MAX_TRACK_HEIGHT } from '@/types/track';
|
|
import { Button } from '@/components/ui/Button';
|
|
import { Slider } from '@/components/ui/Slider';
|
|
import { cn } from '@/lib/utils/cn';
|
|
import type { EffectType } from '@/lib/audio/effects/chain';
|
|
import { VerticalFader } from '@/components/ui/VerticalFader';
|
|
import { CircularKnob } from '@/components/ui/CircularKnob';
|
|
import { AutomationLane } from '@/components/automation/AutomationLane';
|
|
import type { AutomationLane as AutomationLaneType, AutomationPoint as AutomationPointType } from '@/types/automation';
|
|
import { createAutomationPoint } from '@/lib/audio/automation/utils';
|
|
|
|
export interface TrackProps {
|
|
track: TrackType;
|
|
zoom: number;
|
|
currentTime: number;
|
|
duration: number;
|
|
isSelected?: boolean;
|
|
onSelect?: () => void;
|
|
onToggleMute: () => void;
|
|
onToggleSolo: () => void;
|
|
onToggleCollapse: () => void;
|
|
onVolumeChange: (volume: number) => void;
|
|
onPanChange: (pan: number) => void;
|
|
onRemove: () => void;
|
|
onNameChange: (name: string) => void;
|
|
onUpdateTrack: (trackId: string, updates: Partial<TrackType>) => void;
|
|
onSeek?: (time: number) => void;
|
|
onLoadAudio?: (buffer: AudioBuffer) => void;
|
|
onToggleEffect?: (effectId: string) => void;
|
|
onRemoveEffect?: (effectId: string) => void;
|
|
onUpdateEffect?: (effectId: string, parameters: any) => void;
|
|
onAddEffect?: (effectType: EffectType) => void;
|
|
onSelectionChange?: (selection: { start: number; end: number } | null) => void;
|
|
onToggleRecordEnable?: () => void;
|
|
isRecording?: boolean;
|
|
recordingLevel?: number;
|
|
playbackLevel?: number;
|
|
}
|
|
|
|
export function Track({
|
|
track,
|
|
zoom,
|
|
currentTime,
|
|
duration,
|
|
isSelected,
|
|
onSelect,
|
|
onToggleMute,
|
|
onToggleSolo,
|
|
onToggleCollapse,
|
|
onVolumeChange,
|
|
onPanChange,
|
|
onRemove,
|
|
onNameChange,
|
|
onUpdateTrack,
|
|
onSeek,
|
|
onLoadAudio,
|
|
onToggleEffect,
|
|
onRemoveEffect,
|
|
onUpdateEffect,
|
|
onAddEffect,
|
|
onSelectionChange,
|
|
onToggleRecordEnable,
|
|
isRecording = false,
|
|
recordingLevel = 0,
|
|
playbackLevel = 0,
|
|
}: TrackProps) {
|
|
const canvasRef = React.useRef<HTMLCanvasElement>(null);
|
|
const containerRef = React.useRef<HTMLDivElement>(null);
|
|
const fileInputRef = React.useRef<HTMLInputElement>(null);
|
|
const [isEditingName, setIsEditingName] = React.useState(false);
|
|
const [nameInput, setNameInput] = React.useState(String(track.name || 'Untitled Track'));
|
|
const [themeKey, setThemeKey] = React.useState(0);
|
|
const inputRef = React.useRef<HTMLInputElement>(null);
|
|
const [isResizing, setIsResizing] = React.useState(false);
|
|
const resizeStartRef = React.useRef({ y: 0, height: 0 });
|
|
|
|
// Selection state
|
|
const [isSelecting, setIsSelecting] = React.useState(false);
|
|
const [selectionStart, setSelectionStart] = React.useState<number | null>(null);
|
|
const [isSelectingByDrag, setIsSelectingByDrag] = React.useState(false);
|
|
const [dragStartPos, setDragStartPos] = React.useState<{ x: number; y: number } | null>(null);
|
|
|
|
const handleNameClick = () => {
|
|
setIsEditingName(true);
|
|
setNameInput(String(track.name || 'Untitled Track'));
|
|
};
|
|
|
|
const handleNameBlur = () => {
|
|
setIsEditingName(false);
|
|
if (nameInput.trim()) {
|
|
onNameChange(nameInput.trim());
|
|
} else {
|
|
setNameInput(String(track.name || 'Untitled Track'));
|
|
}
|
|
};
|
|
|
|
const handleNameKeyDown = (e: React.KeyboardEvent) => {
|
|
if (e.key === 'Enter') {
|
|
inputRef.current?.blur();
|
|
} else if (e.key === 'Escape') {
|
|
setNameInput(String(track.name || 'Untitled Track'));
|
|
setIsEditingName(false);
|
|
}
|
|
};
|
|
|
|
React.useEffect(() => {
|
|
if (isEditingName && inputRef.current) {
|
|
inputRef.current.focus();
|
|
inputRef.current.select();
|
|
}
|
|
}, [isEditingName]);
|
|
|
|
// Listen for theme changes
|
|
React.useEffect(() => {
|
|
const observer = new MutationObserver(() => {
|
|
// Increment key to force waveform redraw
|
|
setThemeKey((prev) => prev + 1);
|
|
});
|
|
|
|
// Watch for class changes on document element (dark mode toggle)
|
|
observer.observe(document.documentElement, {
|
|
attributes: true,
|
|
attributeFilter: ['class'],
|
|
});
|
|
|
|
return () => observer.disconnect();
|
|
}, []);
|
|
|
|
// Draw waveform
|
|
React.useEffect(() => {
|
|
if (!track.audioBuffer || !canvasRef.current) return;
|
|
|
|
const canvas = canvasRef.current;
|
|
const ctx = canvas.getContext('2d');
|
|
if (!ctx) return;
|
|
|
|
// Use parent container's size since canvas is absolute positioned
|
|
const parent = canvas.parentElement;
|
|
if (!parent) return;
|
|
|
|
const dpr = window.devicePixelRatio || 1;
|
|
const rect = parent.getBoundingClientRect();
|
|
|
|
canvas.width = rect.width * dpr;
|
|
canvas.height = rect.height * dpr;
|
|
ctx.scale(dpr, dpr);
|
|
|
|
const width = rect.width;
|
|
const height = rect.height;
|
|
|
|
// Clear canvas with theme color
|
|
const bgColor = getComputedStyle(canvas).getPropertyValue('--color-waveform-bg') || 'rgb(15, 23, 42)';
|
|
ctx.fillStyle = bgColor;
|
|
ctx.fillRect(0, 0, width, height);
|
|
|
|
const buffer = track.audioBuffer;
|
|
const channelData = buffer.getChannelData(0);
|
|
const samplesPerPixel = Math.floor(buffer.length / (width * zoom));
|
|
|
|
// Draw waveform
|
|
ctx.fillStyle = track.color;
|
|
ctx.strokeStyle = track.color;
|
|
ctx.lineWidth = 1;
|
|
|
|
for (let x = 0; x < width; x++) {
|
|
const startSample = Math.floor(x * samplesPerPixel);
|
|
const endSample = Math.floor((x + 1) * samplesPerPixel);
|
|
|
|
let min = 1.0;
|
|
let max = -1.0;
|
|
|
|
for (let i = startSample; i < endSample && i < channelData.length; i++) {
|
|
const sample = channelData[i];
|
|
if (sample < min) min = sample;
|
|
if (sample > max) max = sample;
|
|
}
|
|
|
|
const y1 = (height / 2) * (1 - max);
|
|
const y2 = (height / 2) * (1 - min);
|
|
|
|
ctx.beginPath();
|
|
ctx.moveTo(x, y1);
|
|
ctx.lineTo(x, y2);
|
|
ctx.stroke();
|
|
}
|
|
|
|
// Draw center line
|
|
ctx.strokeStyle = 'rgba(148, 163, 184, 0.2)';
|
|
ctx.lineWidth = 1;
|
|
ctx.beginPath();
|
|
ctx.moveTo(0, height / 2);
|
|
ctx.lineTo(width, height / 2);
|
|
ctx.stroke();
|
|
|
|
// Draw selection overlay
|
|
if (track.selection && duration > 0) {
|
|
const selStartX = (track.selection.start / duration) * width;
|
|
const selEndX = (track.selection.end / duration) * width;
|
|
|
|
// Draw selection background
|
|
ctx.fillStyle = 'rgba(59, 130, 246, 0.2)';
|
|
ctx.fillRect(selStartX, 0, selEndX - selStartX, height);
|
|
|
|
// Draw selection borders
|
|
ctx.strokeStyle = 'rgba(59, 130, 246, 0.8)';
|
|
ctx.lineWidth = 2;
|
|
|
|
// Start border
|
|
ctx.beginPath();
|
|
ctx.moveTo(selStartX, 0);
|
|
ctx.lineTo(selStartX, height);
|
|
ctx.stroke();
|
|
|
|
// End border
|
|
ctx.beginPath();
|
|
ctx.moveTo(selEndX, 0);
|
|
ctx.lineTo(selEndX, height);
|
|
ctx.stroke();
|
|
}
|
|
|
|
// Draw playhead
|
|
if (duration > 0) {
|
|
const playheadX = (currentTime / duration) * width;
|
|
ctx.strokeStyle = 'rgba(239, 68, 68, 0.8)';
|
|
ctx.lineWidth = 2;
|
|
ctx.beginPath();
|
|
ctx.moveTo(playheadX, 0);
|
|
ctx.lineTo(playheadX, height);
|
|
ctx.stroke();
|
|
}
|
|
}, [track.audioBuffer, track.color, track.collapsed, track.height, zoom, currentTime, duration, themeKey, track.selection]);
|
|
|
|
const handleCanvasMouseDown = (e: React.MouseEvent<HTMLCanvasElement>) => {
|
|
if (!duration) return;
|
|
|
|
const rect = e.currentTarget.getBoundingClientRect();
|
|
const x = e.clientX - rect.left;
|
|
const y = e.clientY - rect.top;
|
|
const clickTime = (x / rect.width) * duration;
|
|
|
|
// Store drag start position
|
|
setDragStartPos({ x: e.clientX, y: e.clientY });
|
|
setIsSelectingByDrag(false);
|
|
|
|
// Start selection immediately (will be used if user drags)
|
|
setIsSelecting(true);
|
|
setSelectionStart(clickTime);
|
|
};
|
|
|
|
const handleCanvasMouseMove = (e: React.MouseEvent<HTMLCanvasElement>) => {
|
|
if (!isSelecting || selectionStart === null || !duration || !dragStartPos) return;
|
|
|
|
const rect = e.currentTarget.getBoundingClientRect();
|
|
const x = e.clientX - rect.left;
|
|
const currentTime = (x / rect.width) * duration;
|
|
|
|
// Check if user has moved enough to be considered dragging (threshold: 3 pixels)
|
|
const dragDistance = Math.sqrt(
|
|
Math.pow(e.clientX - dragStartPos.x, 2) + Math.pow(e.clientY - dragStartPos.y, 2)
|
|
);
|
|
|
|
if (dragDistance > 3) {
|
|
setIsSelectingByDrag(true);
|
|
}
|
|
|
|
// If dragging, update selection
|
|
if (isSelectingByDrag || dragDistance > 3) {
|
|
// Clamp to valid time range
|
|
const clampedTime = Math.max(0, Math.min(duration, currentTime));
|
|
|
|
// Update selection (ensure start < end)
|
|
const start = Math.min(selectionStart, clampedTime);
|
|
const end = Math.max(selectionStart, clampedTime);
|
|
|
|
onSelectionChange?.({ start, end });
|
|
}
|
|
};
|
|
|
|
const handleCanvasMouseUp = (e: React.MouseEvent<HTMLCanvasElement>) => {
|
|
if (!duration) return;
|
|
|
|
const rect = e.currentTarget.getBoundingClientRect();
|
|
const x = e.clientX - rect.left;
|
|
const clickTime = (x / rect.width) * duration;
|
|
|
|
// Check if user actually dragged (check distance directly, not state)
|
|
const didDrag = dragStartPos
|
|
? Math.sqrt(
|
|
Math.pow(e.clientX - dragStartPos.x, 2) + Math.pow(e.clientY - dragStartPos.y, 2)
|
|
) > 3
|
|
: false;
|
|
|
|
// If user didn't drag (just clicked), clear selection and seek
|
|
if (!didDrag) {
|
|
onSelectionChange?.(null);
|
|
if (onSeek) {
|
|
onSeek(clickTime);
|
|
}
|
|
}
|
|
|
|
// Reset drag state
|
|
setIsSelecting(false);
|
|
setIsSelectingByDrag(false);
|
|
setDragStartPos(null);
|
|
};
|
|
|
|
// Handle mouse leaving canvas during selection
|
|
React.useEffect(() => {
|
|
const handleGlobalMouseUp = () => {
|
|
if (isSelecting) {
|
|
setIsSelecting(false);
|
|
setIsSelectingByDrag(false);
|
|
setDragStartPos(null);
|
|
}
|
|
};
|
|
|
|
window.addEventListener('mouseup', handleGlobalMouseUp);
|
|
return () => window.removeEventListener('mouseup', handleGlobalMouseUp);
|
|
}, [isSelecting]);
|
|
|
|
const handleFileChange = async (e: React.ChangeEvent<HTMLInputElement>) => {
|
|
const file = e.target.files?.[0];
|
|
if (!file || !onLoadAudio) return;
|
|
|
|
try {
|
|
const arrayBuffer = await file.arrayBuffer();
|
|
const audioContext = new AudioContext();
|
|
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
|
onLoadAudio(audioBuffer);
|
|
|
|
// Update track name to filename if it's still default
|
|
if (track.name === 'New Track' || track.name === 'Untitled Track') {
|
|
const fileName = file.name.replace(/\.[^/.]+$/, '');
|
|
onNameChange(fileName);
|
|
}
|
|
} catch (error) {
|
|
console.error('Failed to load audio file:', error);
|
|
}
|
|
|
|
// Reset input
|
|
e.target.value = '';
|
|
};
|
|
|
|
const handleLoadAudioClick = () => {
|
|
fileInputRef.current?.click();
|
|
};
|
|
|
|
const [isDragging, setIsDragging] = React.useState(false);
|
|
|
|
const handleDragOver = (e: React.DragEvent) => {
|
|
e.preventDefault();
|
|
e.stopPropagation();
|
|
setIsDragging(true);
|
|
};
|
|
|
|
const handleDragLeave = (e: React.DragEvent) => {
|
|
e.preventDefault();
|
|
e.stopPropagation();
|
|
setIsDragging(false);
|
|
};
|
|
|
|
const handleDrop = async (e: React.DragEvent) => {
|
|
e.preventDefault();
|
|
e.stopPropagation();
|
|
setIsDragging(false);
|
|
|
|
const file = e.dataTransfer.files?.[0];
|
|
if (!file || !onLoadAudio) return;
|
|
|
|
// Check if it's an audio file
|
|
if (!file.type.startsWith('audio/')) {
|
|
console.warn('Dropped file is not an audio file');
|
|
return;
|
|
}
|
|
|
|
try {
|
|
const arrayBuffer = await file.arrayBuffer();
|
|
const audioContext = new AudioContext();
|
|
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
|
onLoadAudio(audioBuffer);
|
|
|
|
// Update track name to filename if it's still default
|
|
if (track.name === 'New Track' || track.name === 'Untitled Track') {
|
|
const fileName = file.name.replace(/\.[^/.]+$/, '');
|
|
onNameChange(fileName);
|
|
}
|
|
} catch (error) {
|
|
console.error('Failed to load audio file:', error);
|
|
}
|
|
};
|
|
|
|
const trackHeight = track.collapsed ? COLLAPSED_TRACK_HEIGHT : track.height;
|
|
|
|
// Track height resize handlers
|
|
const handleResizeStart = React.useCallback(
|
|
(e: React.MouseEvent) => {
|
|
if (track.collapsed) return;
|
|
e.preventDefault();
|
|
e.stopPropagation();
|
|
setIsResizing(true);
|
|
resizeStartRef.current = { y: e.clientY, height: track.height };
|
|
},
|
|
[track.collapsed, track.height]
|
|
);
|
|
|
|
React.useEffect(() => {
|
|
if (!isResizing) return;
|
|
|
|
const handleMouseMove = (e: MouseEvent) => {
|
|
const delta = e.clientY - resizeStartRef.current.y;
|
|
const newHeight = Math.max(
|
|
MIN_TRACK_HEIGHT,
|
|
Math.min(MAX_TRACK_HEIGHT, resizeStartRef.current.height + delta)
|
|
);
|
|
onUpdateTrack(track.id, { height: newHeight });
|
|
};
|
|
|
|
const handleMouseUp = () => {
|
|
setIsResizing(false);
|
|
};
|
|
|
|
window.addEventListener('mousemove', handleMouseMove);
|
|
window.addEventListener('mouseup', handleMouseUp);
|
|
|
|
return () => {
|
|
window.removeEventListener('mousemove', handleMouseMove);
|
|
window.removeEventListener('mouseup', handleMouseUp);
|
|
};
|
|
}, [isResizing, onUpdateTrack, track.id]);
|
|
|
|
return (
|
|
<div
|
|
ref={containerRef}
|
|
className={cn(
|
|
'flex flex-col transition-all duration-200 relative',
|
|
isSelected && 'bg-primary/5'
|
|
)}
|
|
>
|
|
{/* Top: Track Row (Control Panel + Waveform) */}
|
|
<div className="flex" style={{ height: trackHeight }}>
|
|
{/* Left: Track Control Panel (Fixed Width) - Ableton Style */}
|
|
<div
|
|
className={cn(
|
|
"w-48 flex-shrink-0 border-b border-r-4 p-2 flex flex-col gap-2 min-h-0 transition-all duration-200 cursor-pointer border-border",
|
|
isSelected
|
|
? "bg-primary/10 border-r-primary"
|
|
: "bg-card border-r-transparent hover:bg-accent/30"
|
|
)}
|
|
onClick={(e) => {
|
|
e.stopPropagation();
|
|
if (onSelect) onSelect();
|
|
}}
|
|
>
|
|
{/* Track Name Row - Integrated collapse (DAW style) */}
|
|
<div
|
|
className={cn(
|
|
"group flex items-center gap-1.5 px-1 py-0.5 rounded cursor-pointer transition-colors",
|
|
isSelected ? "bg-primary/10" : "hover:bg-accent/50"
|
|
)}
|
|
onClick={(e) => {
|
|
if (!isEditingName) {
|
|
e.stopPropagation();
|
|
onToggleCollapse();
|
|
}
|
|
}}
|
|
title={track.collapsed ? 'Expand track' : 'Collapse track'}
|
|
>
|
|
{/* Small triangle indicator */}
|
|
<div className={cn(
|
|
"flex-shrink-0 transition-colors",
|
|
isSelected ? "text-primary" : "text-muted-foreground group-hover:text-foreground"
|
|
)}>
|
|
{track.collapsed ? (
|
|
<ChevronRight className="h-3 w-3" />
|
|
) : (
|
|
<ChevronDown className="h-3 w-3" />
|
|
)}
|
|
</div>
|
|
|
|
{/* Color stripe (thicker when selected) */}
|
|
<div
|
|
className={cn(
|
|
"h-5 rounded-full flex-shrink-0 transition-all",
|
|
isSelected ? "w-1" : "w-0.5"
|
|
)}
|
|
style={{ backgroundColor: track.color }}
|
|
/>
|
|
|
|
{/* Track name (editable) */}
|
|
<div className="flex-1 min-w-0">
|
|
{isEditingName ? (
|
|
<input
|
|
ref={inputRef}
|
|
type="text"
|
|
value={nameInput}
|
|
onChange={(e) => setNameInput(e.target.value)}
|
|
onBlur={handleNameBlur}
|
|
onKeyDown={handleNameKeyDown}
|
|
onClick={(e) => e.stopPropagation()}
|
|
className="w-full px-1 py-0.5 text-xs font-semibold bg-background border border-border rounded"
|
|
/>
|
|
) : (
|
|
<div
|
|
onClick={(e) => {
|
|
e.stopPropagation();
|
|
handleNameClick();
|
|
}}
|
|
className="px-1 py-0.5 text-xs font-semibold text-foreground truncate"
|
|
title={String(track.name || 'Untitled Track')}
|
|
>
|
|
{String(track.name || 'Untitled Track')}
|
|
</div>
|
|
)}
|
|
</div>
|
|
</div>
|
|
|
|
{/* Track Controls - Only show when not collapsed */}
|
|
{!track.collapsed && (
|
|
<div className="flex-1 flex flex-col items-center justify-between min-h-0 overflow-hidden">
|
|
{/* Pan Knob */}
|
|
<div className="flex-shrink-0">
|
|
<CircularKnob
|
|
value={track.pan}
|
|
onChange={onPanChange}
|
|
min={-1}
|
|
max={1}
|
|
step={0.01}
|
|
size={48}
|
|
label="PAN"
|
|
/>
|
|
</div>
|
|
|
|
{/* Vertical Volume Fader with integrated meter */}
|
|
<div className="flex-1 flex items-center justify-center min-h-0">
|
|
<VerticalFader
|
|
value={track.volume}
|
|
level={track.recordEnabled || isRecording ? recordingLevel : playbackLevel}
|
|
onChange={onVolumeChange}
|
|
min={0}
|
|
max={1}
|
|
step={0.01}
|
|
showDb={true}
|
|
/>
|
|
</div>
|
|
|
|
{/* Inline Button Row - Below fader */}
|
|
<div className="flex-shrink-0 w-full">
|
|
{/* R/S/M inline row with icons */}
|
|
<div className="flex items-center gap-1 justify-center">
|
|
{/* Record Arm */}
|
|
{onToggleRecordEnable && (
|
|
<button
|
|
onClick={onToggleRecordEnable}
|
|
className={cn(
|
|
'h-6 w-6 rounded flex items-center justify-center transition-all',
|
|
track.recordEnabled
|
|
? 'bg-red-500 text-white shadow-md shadow-red-500/30'
|
|
: 'bg-card hover:bg-accent text-muted-foreground border border-border/50',
|
|
isRecording && 'animate-pulse'
|
|
)}
|
|
title="Arm track for recording"
|
|
>
|
|
<Circle className="h-3 w-3 fill-current" />
|
|
</button>
|
|
)}
|
|
|
|
{/* Solo Button */}
|
|
<button
|
|
onClick={onToggleSolo}
|
|
className={cn(
|
|
'h-6 w-6 rounded flex items-center justify-center transition-all',
|
|
track.solo
|
|
? 'bg-yellow-500 text-black shadow-md shadow-yellow-500/30'
|
|
: 'bg-card hover:bg-accent text-muted-foreground border border-border/50'
|
|
)}
|
|
title="Solo track"
|
|
>
|
|
<Headphones className="h-3 w-3" />
|
|
</button>
|
|
|
|
{/* Mute Button */}
|
|
<button
|
|
onClick={onToggleMute}
|
|
className={cn(
|
|
'h-6 w-6 rounded flex items-center justify-center transition-all',
|
|
track.mute
|
|
? 'bg-blue-500 text-white shadow-md shadow-blue-500/30'
|
|
: 'bg-card hover:bg-accent text-muted-foreground border border-border/50'
|
|
)}
|
|
title="Mute track"
|
|
>
|
|
{track.mute ? <VolumeX className="h-3 w-3" /> : <Volume2 className="h-3 w-3" />}
|
|
</button>
|
|
|
|
{/* Automation Toggle */}
|
|
<button
|
|
onClick={() => {
|
|
onUpdateTrack(track.id, {
|
|
automation: {
|
|
...track.automation,
|
|
showAutomation: !track.automation?.showAutomation,
|
|
},
|
|
});
|
|
}}
|
|
className={cn(
|
|
'h-6 w-6 rounded flex items-center justify-center transition-all text-[10px] font-bold',
|
|
track.automation?.showAutomation
|
|
? 'bg-primary text-primary-foreground shadow-md shadow-primary/30'
|
|
: 'bg-card hover:bg-accent text-muted-foreground border border-border/50'
|
|
)}
|
|
title={track.automation?.showAutomation ? 'Hide automation' : 'Show automation'}
|
|
>
|
|
A
|
|
</button>
|
|
</div>
|
|
</div>
|
|
</div>
|
|
)}
|
|
</div>
|
|
|
|
{/* Right: Waveform Area (Flexible Width) */}
|
|
<div
|
|
className="flex-1 relative bg-waveform-bg border-b border-l border-border"
|
|
>
|
|
{/* Delete Button - Top Right Overlay */}
|
|
<button
|
|
onClick={(e) => {
|
|
e.stopPropagation();
|
|
onRemove();
|
|
}}
|
|
className={cn(
|
|
'absolute top-2 right-2 z-20 h-6 w-6 rounded flex items-center justify-center transition-all',
|
|
'bg-card/80 hover:bg-destructive/90 text-muted-foreground hover:text-white',
|
|
'border border-border/50 hover:border-destructive',
|
|
'backdrop-blur-sm shadow-sm hover:shadow-md'
|
|
)}
|
|
title="Remove track"
|
|
>
|
|
<Trash2 className="h-3 w-3" />
|
|
</button>
|
|
|
|
{track.audioBuffer ? (
|
|
<>
|
|
{/* Waveform Canvas */}
|
|
<canvas
|
|
ref={canvasRef}
|
|
className="absolute inset-0 w-full h-full cursor-pointer"
|
|
onMouseDown={handleCanvasMouseDown}
|
|
onMouseMove={handleCanvasMouseMove}
|
|
onMouseUp={handleCanvasMouseUp}
|
|
/>
|
|
</>
|
|
) : (
|
|
!track.collapsed && (
|
|
<>
|
|
<div
|
|
className={cn(
|
|
"absolute inset-0 flex flex-col items-center justify-center text-sm text-muted-foreground hover:text-foreground transition-colors cursor-pointer group",
|
|
isDragging ? "bg-primary/20 text-primary border-2 border-primary border-dashed" : "hover:bg-accent/50"
|
|
)}
|
|
onClick={(e) => {
|
|
e.stopPropagation();
|
|
handleLoadAudioClick();
|
|
}}
|
|
onDragOver={handleDragOver}
|
|
onDragLeave={handleDragLeave}
|
|
onDrop={handleDrop}
|
|
>
|
|
<Upload className="h-6 w-6 mb-2 opacity-50 group-hover:opacity-100" />
|
|
<p>{isDragging ? 'Drop audio file here' : 'Click to load audio file'}</p>
|
|
<p className="text-xs opacity-75 mt-1">or drag & drop</p>
|
|
</div>
|
|
<input
|
|
ref={fileInputRef}
|
|
type="file"
|
|
accept="audio/*"
|
|
onChange={handleFileChange}
|
|
className="hidden"
|
|
/>
|
|
</>
|
|
)
|
|
)}
|
|
</div>
|
|
</div>
|
|
|
|
{/* Automation Lanes */}
|
|
{!track.collapsed && track.automation?.showAutomation && (
|
|
<div className="bg-background/30">
|
|
{track.automation.lanes.map((lane) => (
|
|
<AutomationLane
|
|
key={lane.id}
|
|
lane={lane}
|
|
duration={duration}
|
|
zoom={zoom}
|
|
currentTime={currentTime}
|
|
onUpdateLane={(updates) => {
|
|
const updatedLanes = track.automation.lanes.map((l) =>
|
|
l.id === lane.id ? { ...l, ...updates } : l
|
|
);
|
|
onUpdateTrack(track.id, {
|
|
automation: { ...track.automation, lanes: updatedLanes },
|
|
});
|
|
}}
|
|
onAddPoint={(time, value) => {
|
|
const newPoint = createAutomationPoint({
|
|
time,
|
|
value,
|
|
curve: 'linear',
|
|
});
|
|
const updatedLanes = track.automation.lanes.map((l) =>
|
|
l.id === lane.id
|
|
? { ...l, points: [...l.points, newPoint] }
|
|
: l
|
|
);
|
|
onUpdateTrack(track.id, {
|
|
automation: { ...track.automation, lanes: updatedLanes },
|
|
});
|
|
}}
|
|
onUpdatePoint={(pointId, updates) => {
|
|
const updatedLanes = track.automation.lanes.map((l) =>
|
|
l.id === lane.id
|
|
? {
|
|
...l,
|
|
points: l.points.map((p) =>
|
|
p.id === pointId ? { ...p, ...updates } : p
|
|
),
|
|
}
|
|
: l
|
|
);
|
|
onUpdateTrack(track.id, {
|
|
automation: { ...track.automation, lanes: updatedLanes },
|
|
});
|
|
}}
|
|
onRemovePoint={(pointId) => {
|
|
const updatedLanes = track.automation.lanes.map((l) =>
|
|
l.id === lane.id
|
|
? { ...l, points: l.points.filter((p) => p.id !== pointId) }
|
|
: l
|
|
);
|
|
onUpdateTrack(track.id, {
|
|
automation: { ...track.automation, lanes: updatedLanes },
|
|
});
|
|
}}
|
|
/>
|
|
))}
|
|
</div>
|
|
)}
|
|
|
|
{/* Track Height Resize Handle */}
|
|
{!track.collapsed && (
|
|
<div
|
|
className={cn(
|
|
'absolute bottom-0 left-0 right-0 h-1 cursor-ns-resize hover:bg-primary/50 transition-all duration-200 z-20 group',
|
|
isResizing && 'bg-primary/50 h-1.5'
|
|
)}
|
|
onMouseDown={handleResizeStart}
|
|
title="Drag to resize track height"
|
|
>
|
|
<div className="absolute inset-x-0 bottom-0 h-px bg-border group-hover:bg-primary transition-colors duration-200" />
|
|
</div>
|
|
)}
|
|
</div>
|
|
);
|
|
}
|