Files
audio-ui/components/tracks/Track.tsx
Sebastian Krüger ecf7f060ec feat: Complete Ableton-style track layout redesign
Track height & spacing improvements:
- Increased DEFAULT_TRACK_HEIGHT from 180px to 240px for vertical controls
- Increased MIN_TRACK_HEIGHT from 60px to 120px
- Increased MAX_TRACK_HEIGHT from 300px to 400px
- Added COLLAPSED_TRACK_HEIGHT constant (48px)
- Reduced control panel gap from 8px to 6px for tighter spacing
- Added min-h-0 and overflow-hidden to prevent flex overflow
- Optimized fader container with max-h-[140px] constraint

Clip-style waveform visualization (Ableton-like):
- Wrapped waveform canvas in visible "clip" container
- Added border, rounded corners, and shadow for clip identity
- Added 16px clip header bar showing track name
- Implemented hover state for better interactivity
- Added gradient background from-foreground/5 to-transparent

Track height resize functionality:
- Added draggable bottom-edge resize handle
- Implemented cursor-ns-resize with hover feedback
- Constrain resizing to MIN/MAX height range
- Real-time height updates with smooth visual feedback
- Active state highlighting during resize

Effects section visual integration:
- Changed from solid background to gradient (from-muted/80 to-muted/60)
- Reduced device rack height from 192px to 176px for better proportion
- Improved visual hierarchy and connection to track row

Flexible VerticalFader component:
- Changed from fixed h-32 (128px) to flex-1 layout
- Added min-h-[80px] and max-h-[140px] constraints
- Allows parent container to control actual height
- Maintains readability and proportions at all sizes

CSS enhancements (globals.css):
- Added .track-clip-container utility class
- Added .track-clip-header utility class
- Theme-aware clip styling for light/dark modes
- OKLCH color space for consistent appearance

Visual results:
- Professional DAW appearance matching Ableton Live
- Clear clip/region boundaries for audio editing
- Better proportions for vertical controls (240px tracks)
- Resizable track heights (120-400px range)
- Improved visual hierarchy and organization

Files modified:
- types/track.ts (height constants)
- components/tracks/Track.tsx (layout + clip styling + resize)
- components/ui/VerticalFader.tsx (flexible height)
- app/globals.css (clip styling)

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-18 16:41:58 +01:00

815 lines
27 KiB
TypeScript

'use client';
import * as React from 'react';
import { Volume2, VolumeX, Headphones, Trash2, ChevronDown, ChevronRight, UnfoldHorizontal, Upload, Plus, Mic, Gauge } from 'lucide-react';
import type { Track as TrackType } from '@/types/track';
import { COLLAPSED_TRACK_HEIGHT, MIN_TRACK_HEIGHT, MAX_TRACK_HEIGHT } from '@/types/track';
import { Button } from '@/components/ui/Button';
import { Slider } from '@/components/ui/Slider';
import { cn } from '@/lib/utils/cn';
import { EffectBrowser } from '@/components/effects/EffectBrowser';
import { EffectDevice } from '@/components/effects/EffectDevice';
import { createEffect, type EffectType } from '@/lib/audio/effects/chain';
import { VerticalFader } from '@/components/ui/VerticalFader';
import { CircularKnob } from '@/components/ui/CircularKnob';
import { AutomationLane } from '@/components/automation/AutomationLane';
import type { AutomationLane as AutomationLaneType, AutomationPoint as AutomationPointType } from '@/types/automation';
import { createAutomationPoint } from '@/lib/audio/automation/utils';
export interface TrackProps {
track: TrackType;
zoom: number;
currentTime: number;
duration: number;
isSelected?: boolean;
onSelect?: () => void;
onToggleMute: () => void;
onToggleSolo: () => void;
onToggleCollapse: () => void;
onVolumeChange: (volume: number) => void;
onPanChange: (pan: number) => void;
onRemove: () => void;
onNameChange: (name: string) => void;
onSeek?: (time: number) => void;
onLoadAudio?: (buffer: AudioBuffer) => void;
onToggleEffect?: (effectId: string) => void;
onRemoveEffect?: (effectId: string) => void;
onUpdateEffect?: (effectId: string, parameters: any) => void;
onAddEffect?: (effectType: EffectType) => void;
onSelectionChange?: (selection: { start: number; end: number } | null) => void;
onToggleRecordEnable?: () => void;
isRecording?: boolean;
recordingLevel?: number;
playbackLevel?: number;
}
export function Track({
track,
zoom,
currentTime,
duration,
isSelected,
onSelect,
onToggleMute,
onToggleSolo,
onToggleCollapse,
onVolumeChange,
onPanChange,
onRemove,
onNameChange,
onSeek,
onLoadAudio,
onToggleEffect,
onRemoveEffect,
onUpdateEffect,
onAddEffect,
onSelectionChange,
onToggleRecordEnable,
isRecording = false,
recordingLevel = 0,
playbackLevel = 0,
}: TrackProps) {
const canvasRef = React.useRef<HTMLCanvasElement>(null);
const containerRef = React.useRef<HTMLDivElement>(null);
const fileInputRef = React.useRef<HTMLInputElement>(null);
const [isEditingName, setIsEditingName] = React.useState(false);
const [nameInput, setNameInput] = React.useState(String(track.name || 'Untitled Track'));
const [effectBrowserOpen, setEffectBrowserOpen] = React.useState(false);
const [showEffects, setShowEffects] = React.useState(false);
const [themeKey, setThemeKey] = React.useState(0);
const inputRef = React.useRef<HTMLInputElement>(null);
const [isResizing, setIsResizing] = React.useState(false);
const resizeStartRef = React.useRef({ y: 0, height: 0 });
// Selection state
const [isSelecting, setIsSelecting] = React.useState(false);
const [selectionStart, setSelectionStart] = React.useState<number | null>(null);
const [isSelectingByDrag, setIsSelectingByDrag] = React.useState(false);
const [dragStartPos, setDragStartPos] = React.useState<{ x: number; y: number } | null>(null);
const handleNameClick = () => {
setIsEditingName(true);
setNameInput(String(track.name || 'Untitled Track'));
};
const handleNameBlur = () => {
setIsEditingName(false);
if (nameInput.trim()) {
onNameChange(nameInput.trim());
} else {
setNameInput(String(track.name || 'Untitled Track'));
}
};
const handleNameKeyDown = (e: React.KeyboardEvent) => {
if (e.key === 'Enter') {
inputRef.current?.blur();
} else if (e.key === 'Escape') {
setNameInput(String(track.name || 'Untitled Track'));
setIsEditingName(false);
}
};
React.useEffect(() => {
if (isEditingName && inputRef.current) {
inputRef.current.focus();
inputRef.current.select();
}
}, [isEditingName]);
// Listen for theme changes
React.useEffect(() => {
const observer = new MutationObserver(() => {
// Increment key to force waveform redraw
setThemeKey((prev) => prev + 1);
});
// Watch for class changes on document element (dark mode toggle)
observer.observe(document.documentElement, {
attributes: true,
attributeFilter: ['class'],
});
return () => observer.disconnect();
}, []);
// Draw waveform
React.useEffect(() => {
if (!track.audioBuffer || !canvasRef.current) return;
const canvas = canvasRef.current;
const ctx = canvas.getContext('2d');
if (!ctx) return;
// Use parent container's size since canvas is absolute positioned
const parent = canvas.parentElement;
if (!parent) return;
const dpr = window.devicePixelRatio || 1;
const rect = parent.getBoundingClientRect();
canvas.width = rect.width * dpr;
canvas.height = rect.height * dpr;
ctx.scale(dpr, dpr);
const width = rect.width;
const height = rect.height;
// Clear canvas with theme color
const bgColor = getComputedStyle(canvas).getPropertyValue('--color-waveform-bg') || 'rgb(15, 23, 42)';
ctx.fillStyle = bgColor;
ctx.fillRect(0, 0, width, height);
const buffer = track.audioBuffer;
const channelData = buffer.getChannelData(0);
const samplesPerPixel = Math.floor(buffer.length / (width * zoom));
// Draw waveform
ctx.fillStyle = track.color;
ctx.strokeStyle = track.color;
ctx.lineWidth = 1;
for (let x = 0; x < width; x++) {
const startSample = Math.floor(x * samplesPerPixel);
const endSample = Math.floor((x + 1) * samplesPerPixel);
let min = 1.0;
let max = -1.0;
for (let i = startSample; i < endSample && i < channelData.length; i++) {
const sample = channelData[i];
if (sample < min) min = sample;
if (sample > max) max = sample;
}
const y1 = (height / 2) * (1 - max);
const y2 = (height / 2) * (1 - min);
ctx.beginPath();
ctx.moveTo(x, y1);
ctx.lineTo(x, y2);
ctx.stroke();
}
// Draw center line
ctx.strokeStyle = 'rgba(148, 163, 184, 0.2)';
ctx.lineWidth = 1;
ctx.beginPath();
ctx.moveTo(0, height / 2);
ctx.lineTo(width, height / 2);
ctx.stroke();
// Draw selection overlay
if (track.selection && duration > 0) {
const selStartX = (track.selection.start / duration) * width;
const selEndX = (track.selection.end / duration) * width;
// Draw selection background
ctx.fillStyle = 'rgba(59, 130, 246, 0.2)';
ctx.fillRect(selStartX, 0, selEndX - selStartX, height);
// Draw selection borders
ctx.strokeStyle = 'rgba(59, 130, 246, 0.8)';
ctx.lineWidth = 2;
// Start border
ctx.beginPath();
ctx.moveTo(selStartX, 0);
ctx.lineTo(selStartX, height);
ctx.stroke();
// End border
ctx.beginPath();
ctx.moveTo(selEndX, 0);
ctx.lineTo(selEndX, height);
ctx.stroke();
}
// Draw playhead
if (duration > 0) {
const playheadX = (currentTime / duration) * width;
ctx.strokeStyle = 'rgba(239, 68, 68, 0.8)';
ctx.lineWidth = 2;
ctx.beginPath();
ctx.moveTo(playheadX, 0);
ctx.lineTo(playheadX, height);
ctx.stroke();
}
}, [track.audioBuffer, track.color, track.collapsed, track.height, zoom, currentTime, duration, themeKey, track.selection]);
const handleCanvasMouseDown = (e: React.MouseEvent<HTMLCanvasElement>) => {
if (!duration) return;
const rect = e.currentTarget.getBoundingClientRect();
const x = e.clientX - rect.left;
const y = e.clientY - rect.top;
const clickTime = (x / rect.width) * duration;
// Store drag start position
setDragStartPos({ x: e.clientX, y: e.clientY });
setIsSelectingByDrag(false);
// Start selection immediately (will be used if user drags)
setIsSelecting(true);
setSelectionStart(clickTime);
};
const handleCanvasMouseMove = (e: React.MouseEvent<HTMLCanvasElement>) => {
if (!isSelecting || selectionStart === null || !duration || !dragStartPos) return;
const rect = e.currentTarget.getBoundingClientRect();
const x = e.clientX - rect.left;
const currentTime = (x / rect.width) * duration;
// Check if user has moved enough to be considered dragging (threshold: 3 pixels)
const dragDistance = Math.sqrt(
Math.pow(e.clientX - dragStartPos.x, 2) + Math.pow(e.clientY - dragStartPos.y, 2)
);
if (dragDistance > 3) {
setIsSelectingByDrag(true);
}
// If dragging, update selection
if (isSelectingByDrag || dragDistance > 3) {
// Clamp to valid time range
const clampedTime = Math.max(0, Math.min(duration, currentTime));
// Update selection (ensure start < end)
const start = Math.min(selectionStart, clampedTime);
const end = Math.max(selectionStart, clampedTime);
onSelectionChange?.({ start, end });
}
};
const handleCanvasMouseUp = (e: React.MouseEvent<HTMLCanvasElement>) => {
if (!duration) return;
const rect = e.currentTarget.getBoundingClientRect();
const x = e.clientX - rect.left;
const clickTime = (x / rect.width) * duration;
// Check if user actually dragged (check distance directly, not state)
const didDrag = dragStartPos
? Math.sqrt(
Math.pow(e.clientX - dragStartPos.x, 2) + Math.pow(e.clientY - dragStartPos.y, 2)
) > 3
: false;
// If user didn't drag (just clicked), clear selection and seek
if (!didDrag) {
onSelectionChange?.(null);
if (onSeek) {
onSeek(clickTime);
}
}
// Reset drag state
setIsSelecting(false);
setIsSelectingByDrag(false);
setDragStartPos(null);
};
// Handle mouse leaving canvas during selection
React.useEffect(() => {
const handleGlobalMouseUp = () => {
if (isSelecting) {
setIsSelecting(false);
setIsSelectingByDrag(false);
setDragStartPos(null);
}
};
window.addEventListener('mouseup', handleGlobalMouseUp);
return () => window.removeEventListener('mouseup', handleGlobalMouseUp);
}, [isSelecting]);
const handleFileChange = async (e: React.ChangeEvent<HTMLInputElement>) => {
const file = e.target.files?.[0];
if (!file || !onLoadAudio) return;
try {
const arrayBuffer = await file.arrayBuffer();
const audioContext = new AudioContext();
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
onLoadAudio(audioBuffer);
// Update track name to filename if it's still default
if (track.name === 'New Track' || track.name === 'Untitled Track') {
const fileName = file.name.replace(/\.[^/.]+$/, '');
onNameChange(fileName);
}
} catch (error) {
console.error('Failed to load audio file:', error);
}
// Reset input
e.target.value = '';
};
const handleLoadAudioClick = () => {
fileInputRef.current?.click();
};
const [isDragging, setIsDragging] = React.useState(false);
const handleDragOver = (e: React.DragEvent) => {
e.preventDefault();
e.stopPropagation();
setIsDragging(true);
};
const handleDragLeave = (e: React.DragEvent) => {
e.preventDefault();
e.stopPropagation();
setIsDragging(false);
};
const handleDrop = async (e: React.DragEvent) => {
e.preventDefault();
e.stopPropagation();
setIsDragging(false);
const file = e.dataTransfer.files?.[0];
if (!file || !onLoadAudio) return;
// Check if it's an audio file
if (!file.type.startsWith('audio/')) {
console.warn('Dropped file is not an audio file');
return;
}
try {
const arrayBuffer = await file.arrayBuffer();
const audioContext = new AudioContext();
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
onLoadAudio(audioBuffer);
// Update track name to filename if it's still default
if (track.name === 'New Track' || track.name === 'Untitled Track') {
const fileName = file.name.replace(/\.[^/.]+$/, '');
onNameChange(fileName);
}
} catch (error) {
console.error('Failed to load audio file:', error);
}
};
const trackHeight = track.collapsed ? COLLAPSED_TRACK_HEIGHT : track.height;
// Track height resize handlers
const handleResizeStart = React.useCallback(
(e: React.MouseEvent) => {
if (track.collapsed) return;
e.preventDefault();
e.stopPropagation();
setIsResizing(true);
resizeStartRef.current = { y: e.clientY, height: track.height };
},
[track.collapsed, track.height]
);
React.useEffect(() => {
if (!isResizing) return;
const handleMouseMove = (e: MouseEvent) => {
const delta = e.clientY - resizeStartRef.current.y;
const newHeight = Math.max(
MIN_TRACK_HEIGHT,
Math.min(MAX_TRACK_HEIGHT, resizeStartRef.current.height + delta)
);
onUpdateTrack(track.id, { height: newHeight });
};
const handleMouseUp = () => {
setIsResizing(false);
};
window.addEventListener('mousemove', handleMouseMove);
window.addEventListener('mouseup', handleMouseUp);
return () => {
window.removeEventListener('mousemove', handleMouseMove);
window.removeEventListener('mouseup', handleMouseUp);
};
}, [isResizing, onUpdateTrack, track.id]);
return (
<div
ref={containerRef}
className={cn(
'flex flex-col',
isSelected && 'ring-2 ring-primary ring-inset'
)}
>
{/* Top: Track Row (Control Panel + Waveform) */}
<div className="flex" style={{ height: trackHeight }}>
{/* Left: Track Control Panel (Fixed Width) - Ableton Style */}
<div
className="w-48 flex-shrink-0 bg-card border-r border-border border-b border-border p-2 flex flex-col gap-1.5 min-h-0"
onClick={(e) => e.stopPropagation()}
>
{/* Track Name (Full Width) */}
<div className="flex items-center gap-1">
<Button
variant="ghost"
size="icon-sm"
onClick={onToggleCollapse}
title={track.collapsed ? 'Expand track' : 'Collapse track'}
className="flex-shrink-0 h-6 w-6"
>
{track.collapsed ? (
<ChevronRight className="h-3 w-3" />
) : (
<ChevronDown className="h-3 w-3" />
)}
</Button>
<div
className="w-1 h-6 rounded-full flex-shrink-0"
style={{ backgroundColor: track.color }}
/>
<div className="flex-1 min-w-0">
{isEditingName ? (
<input
ref={inputRef}
type="text"
value={nameInput}
onChange={(e) => setNameInput(e.target.value)}
onBlur={handleNameBlur}
onKeyDown={handleNameKeyDown}
className="w-full px-1 py-0.5 text-xs font-medium bg-background border border-border rounded"
/>
) : (
<div
onClick={handleNameClick}
className="px-1 py-0.5 text-xs font-medium text-foreground truncate cursor-pointer hover:bg-accent rounded"
title={String(track.name || 'Untitled Track')}
>
{String(track.name || 'Untitled Track')}
</div>
)}
</div>
</div>
{/* Compact Button Row */}
<div className="flex items-center justify-center gap-1">
{/* Record Enable Button */}
{onToggleRecordEnable && (
<Button
variant="ghost"
size="icon-sm"
onClick={onToggleRecordEnable}
title="Arm track for recording"
className={cn(
'h-6 w-6',
track.recordEnabled && 'bg-red-500/20 hover:bg-red-500/30',
isRecording && 'animate-pulse'
)}
>
<div
className={cn(
'h-3 w-3 rounded-full border-2',
track.recordEnabled ? 'bg-red-500 border-red-500' : 'border-current'
)}
/>
</Button>
)}
{/* Solo Button */}
<Button
variant="ghost"
size="icon-sm"
onClick={onToggleSolo}
title="Solo track"
className={cn(
'h-6 w-6 text-[10px] font-bold',
track.solo && 'bg-yellow-500/20 hover:bg-yellow-500/30 text-yellow-500'
)}
>
S
</Button>
{/* Mute Button */}
<Button
variant="ghost"
size="icon-sm"
onClick={onToggleMute}
title="Mute track"
className={cn(
'h-6 w-6 text-[10px] font-bold',
track.mute && 'bg-red-500/20 hover:bg-red-500/30 text-red-500'
)}
>
M
</Button>
{/* Remove Button */}
<Button
variant="ghost"
size="icon-sm"
onClick={onRemove}
title="Remove track"
className="h-6 w-6 text-destructive hover:text-destructive hover:bg-destructive/10"
>
<Trash2 className="h-3 w-3" />
</Button>
</div>
{/* Track Controls - Only show when not collapsed */}
{!track.collapsed && (
<div className="flex-1 flex flex-col items-center justify-between py-1 min-h-0 overflow-hidden">
{/* Pan Knob */}
<div className="flex-shrink-0">
<CircularKnob
value={track.pan}
onChange={onPanChange}
min={-1}
max={1}
step={0.01}
size={40}
label="PAN"
/>
</div>
{/* Vertical Volume Fader with integrated meter */}
<div className="flex-1 flex items-center justify-center min-h-0 max-h-[140px]">
<VerticalFader
value={track.volume}
level={track.recordEnabled || isRecording ? recordingLevel : playbackLevel}
onChange={onVolumeChange}
min={0}
max={1}
step={0.01}
showDb={true}
/>
</div>
</div>
)}
</div>
{/* Right: Waveform Area (Flexible Width) */}
<div
className="flex-1 relative bg-waveform-bg border-b border-border"
onClick={onSelect}
>
{track.audioBuffer ? (
<div className="absolute inset-2 bg-card/30 border border-primary/20 rounded-sm shadow-sm overflow-hidden transition-colors hover:border-primary/40">
{/* Clip Header */}
<div className="absolute top-0 left-0 right-0 h-4 bg-gradient-to-b from-foreground/5 to-transparent pointer-events-none z-10 px-2 flex items-center">
<span className="text-[9px] text-foreground/60 font-medium truncate">
{track.name}
</span>
</div>
{/* Waveform Canvas */}
<canvas
ref={canvasRef}
className="absolute inset-0 w-full h-full cursor-crosshair"
onMouseDown={handleCanvasMouseDown}
onMouseMove={handleCanvasMouseMove}
onMouseUp={handleCanvasMouseUp}
/>
</div>
) : (
!track.collapsed && (
<>
<div
className={cn(
"absolute inset-0 flex flex-col items-center justify-center text-sm text-muted-foreground hover:text-foreground transition-colors cursor-pointer group",
isDragging ? "bg-primary/20 text-primary border-2 border-primary border-dashed" : "hover:bg-accent/50"
)}
onClick={(e) => {
e.stopPropagation();
handleLoadAudioClick();
}}
onDragOver={handleDragOver}
onDragLeave={handleDragLeave}
onDrop={handleDrop}
>
<Upload className="h-6 w-6 mb-2 opacity-50 group-hover:opacity-100" />
<p>{isDragging ? 'Drop audio file here' : 'Click to load audio file'}</p>
<p className="text-xs opacity-75 mt-1">or drag & drop</p>
</div>
<input
ref={fileInputRef}
type="file"
accept="audio/*"
onChange={handleFileChange}
className="hidden"
/>
</>
)
)}
</div>
</div>
{/* Bottom: Effects Section (Collapsible, Full Width) - Ableton Style */}
{!track.collapsed && (
<div className="bg-gradient-to-b from-muted/80 to-muted/60 border-b border-border shadow-inner">
{/* Effects Header - clickable to toggle */}
<div
className="flex items-center gap-2 px-3 py-1.5 cursor-pointer hover:bg-accent/30 transition-colors border-b border-border/30"
onClick={() => setShowEffects(!showEffects)}
>
{showEffects ? (
<ChevronDown className="h-3.5 w-3.5 text-muted-foreground flex-shrink-0" />
) : (
<ChevronRight className="h-3.5 w-3.5 text-muted-foreground flex-shrink-0" />
)}
{/* Show mini effect chain when collapsed */}
{!showEffects && track.effectChain.effects.length > 0 ? (
<div className="flex-1 flex items-center gap-1 overflow-x-auto custom-scrollbar">
{track.effectChain.effects.map((effect) => (
<div
key={effect.id}
className={cn(
'px-2 py-0.5 rounded text-[10px] font-medium flex-shrink-0',
effect.enabled
? 'bg-primary/20 text-primary border border-primary/30'
: 'bg-muted/30 text-muted-foreground border border-border'
)}
>
{effect.name}
</div>
))}
</div>
) : (
<span className="text-xs font-medium text-muted-foreground">
Devices ({track.effectChain.effects.length})
</span>
)}
<Button
variant="ghost"
size="icon-sm"
onClick={(e) => {
e.stopPropagation();
setEffectBrowserOpen(true);
}}
title="Add effect"
className="h-5 w-5 flex-shrink-0"
>
<Plus className="h-3 w-3" />
</Button>
</div>
{/* Horizontal scrolling device rack - expanded state */}
{showEffects && (
<div className="h-44 overflow-x-auto custom-scrollbar bg-background/50 p-2">
<div className="flex h-full gap-2">
{track.effectChain.effects.length === 0 ? (
<div className="text-xs text-muted-foreground text-center py-8 w-full">
No devices. Click + to add an effect.
</div>
) : (
track.effectChain.effects.map((effect) => (
<EffectDevice
key={effect.id}
effect={effect}
onToggleEnabled={() => onToggleEffect?.(effect.id)}
onRemove={() => onRemoveEffect?.(effect.id)}
onUpdateParameters={(params) => onUpdateEffect?.(effect.id, params)}
/>
))
)}
</div>
</div>
)}
</div>
)}
{/* Automation Lanes */}
{!track.collapsed && track.automation?.showAutomation && (
<div className="bg-background/30">
{track.automation.lanes.map((lane) => (
<AutomationLane
key={lane.id}
lane={lane}
duration={duration}
zoom={zoom}
currentTime={currentTime}
onUpdateLane={(updates) => {
const updatedLanes = track.automation.lanes.map((l) =>
l.id === lane.id ? { ...l, ...updates } : l
);
onUpdateTrack(track.id, {
automation: { ...track.automation, lanes: updatedLanes },
});
}}
onAddPoint={(time, value) => {
const newPoint = createAutomationPoint({
time,
value,
curve: 'linear',
});
const updatedLanes = track.automation.lanes.map((l) =>
l.id === lane.id
? { ...l, points: [...l.points, newPoint] }
: l
);
onUpdateTrack(track.id, {
automation: { ...track.automation, lanes: updatedLanes },
});
}}
onUpdatePoint={(pointId, updates) => {
const updatedLanes = track.automation.lanes.map((l) =>
l.id === lane.id
? {
...l,
points: l.points.map((p) =>
p.id === pointId ? { ...p, ...updates } : p
),
}
: l
);
onUpdateTrack(track.id, {
automation: { ...track.automation, lanes: updatedLanes },
});
}}
onRemovePoint={(pointId) => {
const updatedLanes = track.automation.lanes.map((l) =>
l.id === lane.id
? { ...l, points: l.points.filter((p) => p.id !== pointId) }
: l
);
onUpdateTrack(track.id, {
automation: { ...track.automation, lanes: updatedLanes },
});
}}
/>
))}
</div>
)}
{/* Track Height Resize Handle */}
{!track.collapsed && (
<div
className={cn(
'absolute bottom-0 left-0 right-0 h-1 cursor-ns-resize hover:bg-primary/50 transition-colors z-20 group',
isResizing && 'bg-primary/50'
)}
onMouseDown={handleResizeStart}
title="Drag to resize track height"
>
<div className="absolute inset-x-0 bottom-0 h-px bg-border group-hover:bg-primary" />
</div>
)}
{/* Effect Browser Dialog */}
<EffectBrowser
open={effectBrowserOpen}
onClose={() => setEffectBrowserOpen(false)}
onSelectEffect={(effectType) => {
if (onAddEffect) {
onAddEffect(effectType);
}
}}
/>
</div>
);
}