'use client'; import * as React from 'react'; import { Volume2, VolumeX, Headphones, Trash2, ChevronDown, ChevronRight, CircleArrowOutUpRight, Upload } from 'lucide-react'; import type { Track as TrackType } from '@/types/track'; import { Button } from '@/components/ui/Button'; import { Slider } from '@/components/ui/Slider'; import { cn } from '@/lib/utils/cn'; export interface TrackProps { track: TrackType; zoom: number; currentTime: number; duration: number; isSelected?: boolean; onSelect?: () => void; onToggleMute: () => void; onToggleSolo: () => void; onToggleCollapse: () => void; onVolumeChange: (volume: number) => void; onPanChange: (pan: number) => void; onRemove: () => void; onNameChange: (name: string) => void; onSeek?: (time: number) => void; onLoadAudio?: (buffer: AudioBuffer) => void; } export function Track({ track, zoom, currentTime, duration, isSelected, onSelect, onToggleMute, onToggleSolo, onToggleCollapse, onVolumeChange, onPanChange, onRemove, onNameChange, onSeek, onLoadAudio, }: TrackProps) { const canvasRef = React.useRef(null); const containerRef = React.useRef(null); const fileInputRef = React.useRef(null); const [isEditingName, setIsEditingName] = React.useState(false); const [nameInput, setNameInput] = React.useState(String(track.name || 'Untitled Track')); const inputRef = React.useRef(null); const handleNameClick = () => { setIsEditingName(true); setNameInput(String(track.name || 'Untitled Track')); }; const handleNameBlur = () => { setIsEditingName(false); if (nameInput.trim()) { onNameChange(nameInput.trim()); } else { setNameInput(String(track.name || 'Untitled Track')); } }; const handleNameKeyDown = (e: React.KeyboardEvent) => { if (e.key === 'Enter') { inputRef.current?.blur(); } else if (e.key === 'Escape') { setNameInput(String(track.name || 'Untitled Track')); setIsEditingName(false); } }; React.useEffect(() => { if (isEditingName && inputRef.current) { inputRef.current.focus(); inputRef.current.select(); } }, [isEditingName]); // Draw waveform React.useEffect(() => { if (!track.audioBuffer || !canvasRef.current) return; const canvas = canvasRef.current; const ctx = canvas.getContext('2d'); if (!ctx) return; // Use parent container's size since canvas is absolute positioned const parent = canvas.parentElement; if (!parent) return; const dpr = window.devicePixelRatio || 1; const rect = parent.getBoundingClientRect(); canvas.width = rect.width * dpr; canvas.height = rect.height * dpr; ctx.scale(dpr, dpr); const width = rect.width; const height = rect.height; // Clear canvas ctx.fillStyle = 'rgb(15, 23, 42)'; ctx.fillRect(0, 0, width, height); const buffer = track.audioBuffer; const channelData = buffer.getChannelData(0); const samplesPerPixel = Math.floor(buffer.length / (width * zoom)); // Draw waveform ctx.fillStyle = track.color; ctx.strokeStyle = track.color; ctx.lineWidth = 1; for (let x = 0; x < width; x++) { const startSample = Math.floor(x * samplesPerPixel); const endSample = Math.floor((x + 1) * samplesPerPixel); let min = 1.0; let max = -1.0; for (let i = startSample; i < endSample && i < channelData.length; i++) { const sample = channelData[i]; if (sample < min) min = sample; if (sample > max) max = sample; } const y1 = (height / 2) * (1 - max); const y2 = (height / 2) * (1 - min); ctx.beginPath(); ctx.moveTo(x, y1); ctx.lineTo(x, y2); ctx.stroke(); } // Draw center line ctx.strokeStyle = 'rgba(148, 163, 184, 0.2)'; ctx.lineWidth = 1; ctx.beginPath(); ctx.moveTo(0, height / 2); ctx.lineTo(width, height / 2); ctx.stroke(); // Draw playhead if (duration > 0) { const playheadX = (currentTime / duration) * width; ctx.strokeStyle = 'rgba(59, 130, 246, 0.8)'; ctx.lineWidth = 2; ctx.beginPath(); ctx.moveTo(playheadX, 0); ctx.lineTo(playheadX, height); ctx.stroke(); } }, [track.audioBuffer, track.color, track.collapsed, track.height, zoom, currentTime, duration]); const handleCanvasClick = (e: React.MouseEvent) => { if (!onSeek || !duration) return; const rect = e.currentTarget.getBoundingClientRect(); const x = e.clientX - rect.left; const clickTime = (x / rect.width) * duration; onSeek(clickTime); }; const handleFileChange = async (e: React.ChangeEvent) => { const file = e.target.files?.[0]; if (!file || !onLoadAudio) return; try { const arrayBuffer = await file.arrayBuffer(); const audioContext = new AudioContext(); const audioBuffer = await audioContext.decodeAudioData(arrayBuffer); onLoadAudio(audioBuffer); // Update track name to filename if it's still default if (track.name === 'New Track' || track.name === 'Untitled Track') { const fileName = file.name.replace(/\.[^/.]+$/, ''); onNameChange(fileName); } } catch (error) { console.error('Failed to load audio file:', error); } // Reset input e.target.value = ''; }; const handleLoadAudioClick = () => { fileInputRef.current?.click(); }; const [isDragging, setIsDragging] = React.useState(false); const handleDragOver = (e: React.DragEvent) => { e.preventDefault(); e.stopPropagation(); setIsDragging(true); }; const handleDragLeave = (e: React.DragEvent) => { e.preventDefault(); e.stopPropagation(); setIsDragging(false); }; const handleDrop = async (e: React.DragEvent) => { e.preventDefault(); e.stopPropagation(); setIsDragging(false); const file = e.dataTransfer.files?.[0]; if (!file || !onLoadAudio) return; // Check if it's an audio file if (!file.type.startsWith('audio/')) { console.warn('Dropped file is not an audio file'); return; } try { const arrayBuffer = await file.arrayBuffer(); const audioContext = new AudioContext(); const audioBuffer = await audioContext.decodeAudioData(arrayBuffer); onLoadAudio(audioBuffer); // Update track name to filename if it's still default if (track.name === 'New Track' || track.name === 'Untitled Track') { const fileName = file.name.replace(/\.[^/.]+$/, ''); onNameChange(fileName); } } catch (error) { console.error('Failed to load audio file:', error); } }; const trackHeight = track.collapsed ? 48 : track.height; return (
{/* Left: Track Control Panel (Fixed Width) */}
e.stopPropagation()} > {/* Track Name & Collapse Toggle */}
{isEditingName ? ( setNameInput(e.target.value)} onBlur={handleNameBlur} onKeyDown={handleNameKeyDown} className="w-full px-2 py-1 text-sm font-medium bg-background border border-border rounded" /> ) : (
{String(track.name || 'Untitled Track')}
)}
{/* Solo Button */} {/* Mute Button */} {/* Remove Button */}
{/* Track Controls - Only show when not collapsed */} {!track.collapsed && ( <> {/* Volume */}
{Math.round(track.volume * 100)}%
{/* Pan */}
{track.pan === 0 ? 'C' : track.pan < 0 ? `L${Math.abs(Math.round(track.pan * 100))}` : `R${Math.round(track.pan * 100)}`}
)}
{/* Right: Waveform Area (Flexible Width) */}
{track.audioBuffer ? ( ) : ( !track.collapsed && ( <>
{ e.stopPropagation(); handleLoadAudioClick(); }} onDragOver={handleDragOver} onDragLeave={handleDragLeave} onDrop={handleDrop} >

{isDragging ? 'Drop audio file here' : 'Click to load audio file'}

or drag & drop

) )}
); }