feat: add effect parameter automation and fix mode logic
Completed Phase 9.3 - Full Automation Playback: - ✅ Effect parameter automation implementation - ✅ Fixed automation mode logic (now applies in all modes) - ✅ Automatic parameter range conversion (normalized to actual values) Effect parameter automation: - Parses effect parameter IDs (format: effect.{effectId}.{paramName}) - Finds corresponding effect nodes in audio graph - Converts normalized 0-1 automation values to actual parameter ranges - Applies parameters using updateEffectParameters during playback - Works with all effect types (filters, dynamics, time-based, etc.) Automation mode fix: - Removed incorrect mode !== 'read' checks - Automation now plays back in all modes (read/write/touch/latch) - Mode will control recording behavior, not playback Technical notes: - Used type assertion (as any) for dynamic parameter updates - Maintains parameter range from automation lane valueRange - Integrated with existing effect update mechanism Phase 9 Status: ✅ 9.1: Automation lanes UI complete ✅ 9.2: Automation points complete ✅ 9.3: Real-time playback (volume, pan, effects) complete ⏳ 9.3: Automation recording (next milestone) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -110,7 +110,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
tracks.forEach((track, index) => {
|
tracks.forEach((track, index) => {
|
||||||
// Apply volume automation
|
// Apply volume automation
|
||||||
const volumeLane = track.automation.lanes.find(lane => lane.parameterId === 'volume');
|
const volumeLane = track.automation.lanes.find(lane => lane.parameterId === 'volume');
|
||||||
if (volumeLane && volumeLane.points.length > 0 && volumeLane.mode !== 'read') {
|
if (volumeLane && volumeLane.points.length > 0) {
|
||||||
const automatedValue = evaluateAutomationLinear(volumeLane.points, currentTime);
|
const automatedValue = evaluateAutomationLinear(volumeLane.points, currentTime);
|
||||||
if (automatedValue !== undefined && gainNodesRef.current[index]) {
|
if (automatedValue !== undefined && gainNodesRef.current[index]) {
|
||||||
const trackGain = getTrackGain(track, tracks);
|
const trackGain = getTrackGain(track, tracks);
|
||||||
@@ -124,7 +124,7 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
|
|
||||||
// Apply pan automation
|
// Apply pan automation
|
||||||
const panLane = track.automation.lanes.find(lane => lane.parameterId === 'pan');
|
const panLane = track.automation.lanes.find(lane => lane.parameterId === 'pan');
|
||||||
if (panLane && panLane.points.length > 0 && panLane.mode !== 'read') {
|
if (panLane && panLane.points.length > 0) {
|
||||||
const automatedValue = evaluateAutomationLinear(panLane.points, currentTime);
|
const automatedValue = evaluateAutomationLinear(panLane.points, currentTime);
|
||||||
if (automatedValue !== undefined && panNodesRef.current[index]) {
|
if (automatedValue !== undefined && panNodesRef.current[index]) {
|
||||||
// Pan automation values are 0-1, but StereoPannerNode expects -1 to 1
|
// Pan automation values are 0-1, but StereoPannerNode expects -1 to 1
|
||||||
@@ -136,7 +136,39 @@ export function useMultiTrackPlayer(tracks: Track[], masterVolume: number = 1) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Apply effect parameter automation
|
// Apply effect parameter automation
|
||||||
|
track.automation.lanes.forEach(lane => {
|
||||||
|
// Check if this is an effect parameter (format: effect.{effectId}.{parameterName})
|
||||||
|
if (lane.parameterId.startsWith('effect.') && lane.points.length > 0) {
|
||||||
|
const parts = lane.parameterId.split('.');
|
||||||
|
if (parts.length === 3) {
|
||||||
|
const effectId = parts[1];
|
||||||
|
const paramName = parts[2];
|
||||||
|
|
||||||
|
// Find the effect in the track's effect chain
|
||||||
|
const effectIndex = track.effectChain.effects.findIndex(e => e.id === effectId);
|
||||||
|
if (effectIndex >= 0 && effectNodesRef.current[index] && effectNodesRef.current[index][effectIndex]) {
|
||||||
|
const automatedValue = evaluateAutomationLinear(lane.points, currentTime);
|
||||||
|
if (automatedValue !== undefined) {
|
||||||
|
const effectNodeInfo = effectNodesRef.current[index][effectIndex];
|
||||||
|
|
||||||
|
// Convert normalized 0-1 value to actual parameter range
|
||||||
|
const effect = track.effectChain.effects[effectIndex];
|
||||||
|
const actualValue = lane.valueRange.min + (automatedValue * (lane.valueRange.max - lane.valueRange.min));
|
||||||
|
|
||||||
|
// Update the effect parameter
|
||||||
|
if (effect.parameters) {
|
||||||
|
const updatedParams = { ...effect.parameters, [paramName]: actualValue } as any;
|
||||||
|
updateEffectParameters(audioContextRef.current!, effectNodeInfo, {
|
||||||
|
...effect,
|
||||||
|
parameters: updatedParams
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
automationFrameRef.current = requestAnimationFrame(applyAutomation);
|
automationFrameRef.current = requestAnimationFrame(applyAutomation);
|
||||||
|
|||||||
Reference in New Issue
Block a user