From 988bbfd1a99506dbc33bb91c47c5802f185991a9 Mon Sep 17 00:00:00 2001 From: Skyler Lehmkuhl Date: Sun, 2 Nov 2025 01:27:22 -0500 Subject: [PATCH] Add automation and drag nodes into connections in the graph --- daw-backend/src/audio/engine.rs | 187 +++++ daw-backend/src/audio/node_graph/graph.rs | 24 +- .../src/audio/node_graph/node_trait.rs | 6 + .../src/audio/node_graph/nodes/add_as_any.sh | 46 ++ .../src/audio/node_graph/nodes/adsr.rs | 8 + .../src/audio/node_graph/nodes/audio_to_cv.rs | 8 + .../node_graph/nodes/automation_input.rs | 288 +++++++ .../src/audio/node_graph/nodes/bit_crusher.rs | 8 + .../src/audio/node_graph/nodes/chorus.rs | 8 + .../src/audio/node_graph/nodes/compressor.rs | 8 + .../src/audio/node_graph/nodes/constant.rs | 8 + .../src/audio/node_graph/nodes/delay.rs | 8 + .../src/audio/node_graph/nodes/distortion.rs | 8 + .../node_graph/nodes/envelope_follower.rs | 8 + daw-backend/src/audio/node_graph/nodes/eq.rs | 8 + .../src/audio/node_graph/nodes/filter.rs | 8 + .../src/audio/node_graph/nodes/flanger.rs | 8 + .../src/audio/node_graph/nodes/fm_synth.rs | 8 + .../src/audio/node_graph/nodes/gain.rs | 8 + daw-backend/src/audio/node_graph/nodes/lfo.rs | 8 + .../src/audio/node_graph/nodes/limiter.rs | 8 + .../src/audio/node_graph/nodes/math.rs | 8 + .../src/audio/node_graph/nodes/midi_input.rs | 8 + .../src/audio/node_graph/nodes/midi_to_cv.rs | 8 + .../src/audio/node_graph/nodes/mixer.rs | 8 + daw-backend/src/audio/node_graph/nodes/mod.rs | 2 + .../audio/node_graph/nodes/multi_sampler.rs | 8 + .../src/audio/node_graph/nodes/noise.rs | 8 + .../src/audio/node_graph/nodes/oscillator.rs | 8 + .../audio/node_graph/nodes/oscilloscope.rs | 8 + .../src/audio/node_graph/nodes/output.rs | 8 + daw-backend/src/audio/node_graph/nodes/pan.rs | 8 + .../src/audio/node_graph/nodes/phaser.rs | 8 + .../src/audio/node_graph/nodes/quantizer.rs | 8 + .../src/audio/node_graph/nodes/reverb.rs | 8 + .../audio/node_graph/nodes/ring_modulator.rs | 8 + .../src/audio/node_graph/nodes/sample_hold.rs | 8 + .../audio/node_graph/nodes/simple_sampler.rs | 8 + .../audio/node_graph/nodes/slew_limiter.rs | 8 + .../src/audio/node_graph/nodes/splitter.rs | 8 + .../src/audio/node_graph/nodes/template_io.rs | 16 + .../src/audio/node_graph/nodes/vocoder.rs | 8 + .../audio/node_graph/nodes/voice_allocator.rs | 11 +- .../node_graph/nodes/wavetable_oscillator.rs | 8 + daw-backend/src/audio/track.rs | 6 +- daw-backend/src/command/types.rs | 26 + daw-backend/src/lib.rs | 1 + src-tauri/src/audio.rs | 99 +++ src-tauri/src/lib.rs | 5 + src/actions/index.js | 133 ++++ src/assets/focus-animation.svg | 49 +- src/assets/focus-music.svg | 99 ++- src/assets/focus-video.svg | 141 +++- src/main.js | 733 +++++++++++++++++- src/models/animation.js | 91 +++ src/nodeTypes.js | 25 + src/startscreen.js | 17 +- src/state.js | 2 + src/styles.css | 12 + src/timeline.js | 201 ++++- src/utils.js | 45 +- src/widgets.js | 151 +++- 62 files changed, 2622 insertions(+), 82 deletions(-) create mode 100755 daw-backend/src/audio/node_graph/nodes/add_as_any.sh create mode 100644 daw-backend/src/audio/node_graph/nodes/automation_input.rs diff --git a/daw-backend/src/audio/engine.rs b/daw-backend/src/audio/engine.rs index 6e389be..ef20869 100644 --- a/daw-backend/src/audio/engine.rs +++ b/daw-backend/src/audio/engine.rs @@ -729,6 +729,7 @@ impl Engine { "MidiInput" => Box::new(MidiInputNode::new("MIDI Input".to_string())), "MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV".to_string())), "AudioToCV" => Box::new(AudioToCVNode::new("Audio→CV".to_string())), + "AutomationInput" => Box::new(AutomationInputNode::new("Automation".to_string())), "Oscilloscope" => Box::new(OscilloscopeNode::new("Oscilloscope".to_string())), "TemplateInput" => Box::new(TemplateInputNode::new("Template Input".to_string())), "TemplateOutput" => Box::new(TemplateOutputNode::new("Template Output".to_string())), @@ -803,6 +804,7 @@ impl Engine { "MidiInput" => Box::new(MidiInputNode::new("MIDI Input".to_string())), "MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV".to_string())), "AudioToCV" => Box::new(AudioToCVNode::new("Audio→CV".to_string())), + "AutomationInput" => Box::new(AutomationInputNode::new("Automation".to_string())), "Oscilloscope" => Box::new(OscilloscopeNode::new("Oscilloscope".to_string())), "TemplateInput" => Box::new(TemplateInputNode::new("Template Input".to_string())), "TemplateOutput" => Box::new(TemplateOutputNode::new("Template Output".to_string())), @@ -1117,6 +1119,77 @@ impl Engine { } } } + + Command::AutomationAddKeyframe(track_id, node_id, time, value, interpolation_str, ease_out, ease_in) => { + use crate::audio::node_graph::nodes::{AutomationInputNode, AutomationKeyframe, InterpolationType}; + + // Parse interpolation type + let interpolation = match interpolation_str.to_lowercase().as_str() { + "linear" => InterpolationType::Linear, + "bezier" => InterpolationType::Bezier, + "step" => InterpolationType::Step, + "hold" => InterpolationType::Hold, + _ => { + eprintln!("Unknown interpolation type: {}, defaulting to Linear", interpolation_str); + InterpolationType::Linear + } + }; + + if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { + let graph = &mut track.instrument_graph; + let node_idx = NodeIndex::new(node_id as usize); + + if let Some(graph_node) = graph.get_graph_node_mut(node_idx) { + // Downcast to AutomationInputNode using as_any_mut + if let Some(auto_node) = graph_node.node.as_any_mut().downcast_mut::() { + let keyframe = AutomationKeyframe { + time, + value, + interpolation, + ease_out, + ease_in, + }; + auto_node.add_keyframe(keyframe); + } else { + eprintln!("Node {} is not an AutomationInputNode", node_id); + } + } + } + } + + Command::AutomationRemoveKeyframe(track_id, node_id, time) => { + use crate::audio::node_graph::nodes::AutomationInputNode; + + if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { + let graph = &mut track.instrument_graph; + let node_idx = NodeIndex::new(node_id as usize); + + if let Some(graph_node) = graph.get_graph_node_mut(node_idx) { + if let Some(auto_node) = graph_node.node.as_any_mut().downcast_mut::() { + auto_node.remove_keyframe_at_time(time, 0.001); // 1ms tolerance + } else { + eprintln!("Node {} is not an AutomationInputNode", node_id); + } + } + } + } + + Command::AutomationSetName(track_id, node_id, name) => { + use crate::audio::node_graph::nodes::AutomationInputNode; + + if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { + let graph = &mut track.instrument_graph; + let node_idx = NodeIndex::new(node_id as usize); + + if let Some(graph_node) = graph.get_graph_node_mut(node_idx) { + if let Some(auto_node) = graph_node.node.as_any_mut().downcast_mut::() { + auto_node.set_display_name(name); + } else { + eprintln!("Node {} is not an AutomationInputNode", node_id); + } + } + } + } } } @@ -1185,6 +1258,71 @@ impl Engine { QueryResponse::MidiClipData(Err(format!("Track {} not found or is not a MIDI track", track_id))) } } + + Query::GetAutomationKeyframes(track_id, node_id) => { + use crate::audio::node_graph::nodes::{AutomationInputNode, InterpolationType}; + use crate::command::types::AutomationKeyframeData; + + if let Some(TrackNode::Midi(track)) = self.project.get_track(track_id) { + let graph = &track.instrument_graph; + let node_idx = NodeIndex::new(node_id as usize); + + if let Some(graph_node) = graph.get_graph_node(node_idx) { + // Downcast to AutomationInputNode + if let Some(auto_node) = graph_node.node.as_any().downcast_ref::() { + let keyframes: Vec = auto_node.keyframes() + .iter() + .map(|kf| { + let interpolation_str = match kf.interpolation { + InterpolationType::Linear => "linear", + InterpolationType::Bezier => "bezier", + InterpolationType::Step => "step", + InterpolationType::Hold => "hold", + }.to_string(); + + AutomationKeyframeData { + time: kf.time, + value: kf.value, + interpolation: interpolation_str, + ease_out: kf.ease_out, + ease_in: kf.ease_in, + } + }) + .collect(); + + QueryResponse::AutomationKeyframes(Ok(keyframes)) + } else { + QueryResponse::AutomationKeyframes(Err(format!("Node {} is not an AutomationInputNode", node_id))) + } + } else { + QueryResponse::AutomationKeyframes(Err(format!("Node {} not found in track {}", node_id, track_id))) + } + } else { + QueryResponse::AutomationKeyframes(Err(format!("Track {} not found or is not a MIDI track", track_id))) + } + } + + Query::GetAutomationName(track_id, node_id) => { + use crate::audio::node_graph::nodes::AutomationInputNode; + + if let Some(TrackNode::Midi(track)) = self.project.get_track(track_id) { + let graph = &track.instrument_graph; + let node_idx = NodeIndex::new(node_id as usize); + + if let Some(graph_node) = graph.get_graph_node(node_idx) { + // Downcast to AutomationInputNode + if let Some(auto_node) = graph_node.node.as_any().downcast_ref::() { + QueryResponse::AutomationName(Ok(auto_node.display_name().to_string())) + } else { + QueryResponse::AutomationName(Err(format!("Node {} is not an AutomationInputNode", node_id))) + } + } else { + QueryResponse::AutomationName(Err(format!("Node {} not found in track {}", node_id, track_id))) + } + } else { + QueryResponse::AutomationName(Err(format!("Track {} not found or is not a MIDI track", track_id))) + } + } }; // Send response back @@ -1503,6 +1641,11 @@ impl EngineController { let _ = self.command_tx.push(Command::MoveClip(track_id, clip_id, new_start_time)); } + /// Send a generic command to the audio thread + pub fn send_command(&mut self, command: Command) { + let _ = self.command_tx.push(command); + } + /// Get current playhead position in samples pub fn get_playhead_samples(&self) -> u64 { self.playhead.load(Ordering::Relaxed) @@ -1871,4 +2014,48 @@ impl EngineController { Err("Query timeout".to_string()) } + + /// Query automation keyframes from an AutomationInput node + pub fn query_automation_keyframes(&mut self, track_id: TrackId, node_id: u32) -> Result, String> { + // Send query + if let Err(_) = self.query_tx.push(Query::GetAutomationKeyframes(track_id, node_id)) { + return Err("Failed to send query - queue full".to_string()); + } + + // Wait for response (with timeout) + let start = std::time::Instant::now(); + let timeout = std::time::Duration::from_millis(100); + + while start.elapsed() < timeout { + if let Ok(QueryResponse::AutomationKeyframes(result)) = self.query_response_rx.pop() { + return result; + } + // Small sleep to avoid busy-waiting + std::thread::sleep(std::time::Duration::from_micros(50)); + } + + Err("Query timeout".to_string()) + } + + /// Query automation node display name + pub fn query_automation_name(&mut self, track_id: TrackId, node_id: u32) -> Result { + // Send query + if let Err(_) = self.query_tx.push(Query::GetAutomationName(track_id, node_id)) { + return Err("Failed to send query - queue full".to_string()); + } + + // Wait for response (with timeout) + let start = std::time::Instant::now(); + let timeout = std::time::Duration::from_millis(100); + + while start.elapsed() < timeout { + if let Ok(QueryResponse::AutomationName(result)) = self.query_response_rx.pop() { + return result; + } + // Small sleep to avoid busy-waiting + std::thread::sleep(std::time::Duration::from_micros(50)); + } + + Err("Query timeout".to_string()) + } } diff --git a/daw-backend/src/audio/node_graph/graph.rs b/daw-backend/src/audio/node_graph/graph.rs index 7bff3b5..5178d7c 100644 --- a/daw-backend/src/audio/node_graph/graph.rs +++ b/daw-backend/src/audio/node_graph/graph.rs @@ -81,6 +81,9 @@ pub struct InstrumentGraph { /// UI positions for nodes (node_index -> (x, y)) node_positions: std::collections::HashMap, + + /// Current playback time (for automation nodes) + playback_time: f64, } impl InstrumentGraph { @@ -98,6 +101,7 @@ impl InstrumentGraph { // Pre-allocate MIDI input buffers (max 128 events per port) midi_input_buffers: (0..16).map(|_| Vec::with_capacity(128)).collect(), node_positions: std::collections::HashMap::new(), + playback_time: 0.0, } } @@ -319,7 +323,19 @@ impl InstrumentGraph { } /// Process the graph and produce audio output - pub fn process(&mut self, output_buffer: &mut [f32], midi_events: &[MidiEvent]) { + pub fn process(&mut self, output_buffer: &mut [f32], midi_events: &[MidiEvent], playback_time: f64) { + // Update playback time + self.playback_time = playback_time; + + // Update playback time for all automation nodes before processing + use super::nodes::AutomationInputNode; + for node in self.graph.node_weights_mut() { + // Try to downcast to AutomationInputNode and update its playback time + if let Some(auto_node) = node.node.as_any_mut().downcast_mut::() { + auto_node.set_playback_time(playback_time); + } + } + // Use the requested output buffer size for processing let process_size = output_buffer.len(); @@ -504,6 +520,11 @@ impl InstrumentGraph { self.get_node(idx).and_then(|node| node.get_oscilloscope_cv_data(sample_count)) } + /// Get node by index (read-only) + pub fn get_graph_node(&self, idx: NodeIndex) -> Option<&GraphNode> { + self.graph.node_weight(idx) + } + /// Get node mutably by index /// Note: Due to lifetime constraints with trait objects, this returns a mutable reference /// to the GraphNode, from which you can access the node @@ -816,6 +837,7 @@ impl InstrumentGraph { "MidiInput" => Box::new(MidiInputNode::new("MIDI Input")), "MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV")), "AudioToCV" => Box::new(AudioToCVNode::new("Audio→CV")), + "AutomationInput" => Box::new(AutomationInputNode::new("Automation")), "Oscilloscope" => Box::new(OscilloscopeNode::new("Oscilloscope")), "TemplateInput" => Box::new(TemplateInputNode::new("Template Input")), "TemplateOutput" => Box::new(TemplateOutputNode::new("Template Output")), diff --git a/daw-backend/src/audio/node_graph/node_trait.rs b/daw-backend/src/audio/node_graph/node_trait.rs index ec4a3c7..f11ec1a 100644 --- a/daw-backend/src/audio/node_graph/node_trait.rs +++ b/daw-backend/src/audio/node_graph/node_trait.rs @@ -70,4 +70,10 @@ pub trait AudioNode: Send { fn get_oscilloscope_cv_data(&self, _sample_count: usize) -> Option> { None } + + /// Downcast to `&mut dyn Any` for type-specific operations + fn as_any_mut(&mut self) -> &mut dyn std::any::Any; + + /// Downcast to `&dyn Any` for type-specific read-only operations + fn as_any(&self) -> &dyn std::any::Any; } diff --git a/daw-backend/src/audio/node_graph/nodes/add_as_any.sh b/daw-backend/src/audio/node_graph/nodes/add_as_any.sh new file mode 100755 index 0000000..039eb6b --- /dev/null +++ b/daw-backend/src/audio/node_graph/nodes/add_as_any.sh @@ -0,0 +1,46 @@ +#!/bin/bash +for file in *.rs; do + if [ "$file" = "mod.rs" ]; then + continue + fi + + echo "Processing $file" + + # Create a backup + cp "$file" "$file.bak" + + # Add as_any() method right after as_any_mut() + awk ' + { + lines[NR] = $0 + if (/fn as_any_mut\(&mut self\)/) { + # Found as_any_mut, look for its closing brace + found_method = NR + } + if (found_method > 0 && /^ }$/ && !inserted) { + closing_brace = NR + inserted = 1 + } + } + END { + for (i = 1; i <= NR; i++) { + print lines[i] + if (i == closing_brace) { + print "" + print " fn as_any(&self) -> &dyn std::any::Any {" + print " self" + print " }" + } + } + } + ' "$file.bak" > "$file" + + # Verify the change was made + if grep -q "fn as_any(&self)" "$file"; then + echo " ✓ Successfully added as_any() to $file" + rm "$file.bak" + else + echo " ✗ Failed to add as_any() to $file - restoring backup" + mv "$file.bak" "$file" + fi +done diff --git a/daw-backend/src/audio/node_graph/nodes/adsr.rs b/daw-backend/src/audio/node_graph/nodes/adsr.rs index 4f62f38..c65de34 100644 --- a/daw-backend/src/audio/node_graph/nodes/adsr.rs +++ b/daw-backend/src/audio/node_graph/nodes/adsr.rs @@ -212,4 +212,12 @@ impl AudioNode for ADSRNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/audio_to_cv.rs b/daw-backend/src/audio/node_graph/nodes/audio_to_cv.rs index 3c89495..c59c956 100644 --- a/daw-backend/src/audio/node_graph/nodes/audio_to_cv.rs +++ b/daw-backend/src/audio/node_graph/nodes/audio_to_cv.rs @@ -148,4 +148,12 @@ impl AudioNode for AudioToCVNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/automation_input.rs b/daw-backend/src/audio/node_graph/nodes/automation_input.rs new file mode 100644 index 0000000..0d1ca39 --- /dev/null +++ b/daw-backend/src/audio/node_graph/nodes/automation_input.rs @@ -0,0 +1,288 @@ +use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, SignalType}; +use crate::audio::midi::MidiEvent; +use serde::{Deserialize, Serialize}; +use std::sync::{Arc, RwLock}; + +/// Interpolation type for automation curves +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum InterpolationType { + Linear, + Bezier, + Step, + Hold, +} + +/// A single keyframe in an automation curve +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct AutomationKeyframe { + /// Time in seconds (absolute project time) + pub time: f64, + /// CV output value + pub value: f32, + /// Interpolation type to next keyframe + pub interpolation: InterpolationType, + /// Bezier ease-out control point (for bezier interpolation) + pub ease_out: (f32, f32), + /// Bezier ease-in control point (for bezier interpolation) + pub ease_in: (f32, f32), +} + +impl AutomationKeyframe { + pub fn new(time: f64, value: f32) -> Self { + Self { + time, + value, + interpolation: InterpolationType::Linear, + ease_out: (0.58, 1.0), + ease_in: (0.42, 0.0), + } + } +} + +/// Automation Input Node - outputs CV signal controlled by timeline curves +pub struct AutomationInputNode { + name: String, + display_name: String, // User-editable name shown in UI + keyframes: Vec, + outputs: Vec, + parameters: Vec, + /// Shared playback time (set by the graph before processing) + playback_time: Arc>, +} + +impl AutomationInputNode { + pub fn new(name: impl Into) -> Self { + let name = name.into(); + + let outputs = vec![ + NodePort::new("CV Out", SignalType::CV, 0), + ]; + + Self { + name: name.clone(), + display_name: "Automation".to_string(), + keyframes: Vec::new(), + outputs, + parameters: Vec::new(), + playback_time: Arc::new(RwLock::new(0.0)), + } + } + + /// Set the playback time (called by graph before processing) + pub fn set_playback_time(&mut self, time: f64) { + if let Ok(mut playback) = self.playback_time.write() { + *playback = time; + } + } + + /// Get the display name (shown in UI) + pub fn display_name(&self) -> &str { + &self.display_name + } + + /// Set the display name + pub fn set_display_name(&mut self, name: String) { + self.display_name = name; + } + + /// Add a keyframe to the curve (maintains sorted order by time) + pub fn add_keyframe(&mut self, keyframe: AutomationKeyframe) { + // Find insertion position to maintain sorted order + let pos = self.keyframes.binary_search_by(|kf| { + kf.time.partial_cmp(&keyframe.time).unwrap_or(std::cmp::Ordering::Equal) + }); + + match pos { + Ok(idx) => { + // Replace existing keyframe at same time + self.keyframes[idx] = keyframe; + } + Err(idx) => { + // Insert at correct position + self.keyframes.insert(idx, keyframe); + } + } + } + + /// Remove keyframe at specific time (with tolerance) + pub fn remove_keyframe_at_time(&mut self, time: f64, tolerance: f64) -> bool { + if let Some(idx) = self.keyframes.iter().position(|kf| (kf.time - time).abs() < tolerance) { + self.keyframes.remove(idx); + true + } else { + false + } + } + + /// Update an existing keyframe + pub fn update_keyframe(&mut self, keyframe: AutomationKeyframe) { + // Remove old keyframe at this time, then add new one + self.remove_keyframe_at_time(keyframe.time, 0.001); + self.add_keyframe(keyframe); + } + + /// Get all keyframes + pub fn keyframes(&self) -> &[AutomationKeyframe] { + &self.keyframes + } + + /// Clear all keyframes + pub fn clear_keyframes(&mut self) { + self.keyframes.clear(); + } + + /// Evaluate curve at a specific time + fn evaluate_at_time(&self, time: f64) -> f32 { + if self.keyframes.is_empty() { + return 0.0; + } + + // Before first keyframe + if time <= self.keyframes[0].time { + return self.keyframes[0].value; + } + + // After last keyframe + let last_idx = self.keyframes.len() - 1; + if time >= self.keyframes[last_idx].time { + return self.keyframes[last_idx].value; + } + + // Find bracketing keyframes + for i in 0..self.keyframes.len() - 1 { + let kf1 = &self.keyframes[i]; + let kf2 = &self.keyframes[i + 1]; + + if time >= kf1.time && time <= kf2.time { + return self.interpolate(kf1, kf2, time); + } + } + + 0.0 + } + + /// Interpolate between two keyframes + fn interpolate(&self, kf1: &AutomationKeyframe, kf2: &AutomationKeyframe, time: f64) -> f32 { + // Calculate normalized position between keyframes (0.0 to 1.0) + let t = if kf2.time == kf1.time { + 0.0 + } else { + ((time - kf1.time) / (kf2.time - kf1.time)) as f32 + }; + + match kf1.interpolation { + InterpolationType::Linear => { + // Simple linear interpolation + kf1.value + (kf2.value - kf1.value) * t + } + InterpolationType::Bezier => { + // Cubic bezier interpolation using control points + let eased_t = self.cubic_bezier_ease(t, kf1.ease_out, kf2.ease_in); + kf1.value + (kf2.value - kf1.value) * eased_t + } + InterpolationType::Step | InterpolationType::Hold => { + // Hold value until next keyframe + kf1.value + } + } + } + + /// Cubic bezier easing function + fn cubic_bezier_ease(&self, t: f32, ease_out: (f32, f32), ease_in: (f32, f32)) -> f32 { + // Simplified cubic bezier for 0,0 -> easeOut -> easeIn -> 1,1 + let u = 1.0 - t; + 3.0 * u * u * t * ease_out.1 + + 3.0 * u * t * t * ease_in.1 + + t * t * t + } +} + +impl AudioNode for AutomationInputNode { + fn category(&self) -> NodeCategory { + NodeCategory::Input + } + + fn inputs(&self) -> &[NodePort] { + &[] // No inputs + } + + fn outputs(&self) -> &[NodePort] { + &self.outputs + } + + fn parameters(&self) -> &[Parameter] { + &self.parameters + } + + fn set_parameter(&mut self, _id: u32, _value: f32) { + // No parameters + } + + fn get_parameter(&self, _id: u32) -> f32 { + 0.0 + } + + fn process( + &mut self, + _inputs: &[&[f32]], + outputs: &mut [&mut [f32]], + _midi_inputs: &[&[MidiEvent]], + _midi_outputs: &mut [&mut Vec], + sample_rate: u32, + ) { + if outputs.is_empty() { + return; + } + + let output = &mut outputs[0]; + let length = output.len(); + + // Get the starting playback time + let playhead = if let Ok(playback) = self.playback_time.read() { + *playback + } else { + 0.0 + }; + + // Calculate time per sample + let sample_duration = 1.0 / sample_rate as f64; + + // Evaluate curve for each sample + for i in 0..length { + let time = playhead + (i as f64 * sample_duration); + output[i] = self.evaluate_at_time(time); + } + } + + fn reset(&mut self) { + // No state to reset + } + + fn node_type(&self) -> &str { + "AutomationInput" + } + + fn name(&self) -> &str { + &self.name + } + + fn clone_node(&self) -> Box { + Box::new(Self { + name: self.name.clone(), + display_name: self.display_name.clone(), + keyframes: self.keyframes.clone(), + outputs: self.outputs.clone(), + parameters: self.parameters.clone(), + playback_time: Arc::new(RwLock::new(0.0)), + }) + } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } +} diff --git a/daw-backend/src/audio/node_graph/nodes/bit_crusher.rs b/daw-backend/src/audio/node_graph/nodes/bit_crusher.rs index 7452d86..c105925 100644 --- a/daw-backend/src/audio/node_graph/nodes/bit_crusher.rs +++ b/daw-backend/src/audio/node_graph/nodes/bit_crusher.rs @@ -184,4 +184,12 @@ impl AudioNode for BitCrusherNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/chorus.rs b/daw-backend/src/audio/node_graph/nodes/chorus.rs index f86f915..8ae7859 100644 --- a/daw-backend/src/audio/node_graph/nodes/chorus.rs +++ b/daw-backend/src/audio/node_graph/nodes/chorus.rs @@ -231,4 +231,12 @@ impl AudioNode for ChorusNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/compressor.rs b/daw-backend/src/audio/node_graph/nodes/compressor.rs index 495a4bf..742380e 100644 --- a/daw-backend/src/audio/node_graph/nodes/compressor.rs +++ b/daw-backend/src/audio/node_graph/nodes/compressor.rs @@ -250,4 +250,12 @@ impl AudioNode for CompressorNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/constant.rs b/daw-backend/src/audio/node_graph/nodes/constant.rs index 5e96937..4a8ee1c 100644 --- a/daw-backend/src/audio/node_graph/nodes/constant.rs +++ b/daw-backend/src/audio/node_graph/nodes/constant.rs @@ -110,4 +110,12 @@ impl AudioNode for ConstantNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/delay.rs b/daw-backend/src/audio/node_graph/nodes/delay.rs index 0787d82..0960618 100644 --- a/daw-backend/src/audio/node_graph/nodes/delay.rs +++ b/daw-backend/src/audio/node_graph/nodes/delay.rs @@ -208,4 +208,12 @@ impl AudioNode for DelayNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/distortion.rs b/daw-backend/src/audio/node_graph/nodes/distortion.rs index 41fb25f..424b106 100644 --- a/daw-backend/src/audio/node_graph/nodes/distortion.rs +++ b/daw-backend/src/audio/node_graph/nodes/distortion.rs @@ -254,4 +254,12 @@ impl AudioNode for DistortionNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/envelope_follower.rs b/daw-backend/src/audio/node_graph/nodes/envelope_follower.rs index 693eb1c..ebe6436 100644 --- a/daw-backend/src/audio/node_graph/nodes/envelope_follower.rs +++ b/daw-backend/src/audio/node_graph/nodes/envelope_follower.rs @@ -155,4 +155,12 @@ impl AudioNode for EnvelopeFollowerNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/eq.rs b/daw-backend/src/audio/node_graph/nodes/eq.rs index 5fde158..f711e89 100644 --- a/daw-backend/src/audio/node_graph/nodes/eq.rs +++ b/daw-backend/src/audio/node_graph/nodes/eq.rs @@ -256,4 +256,12 @@ impl AudioNode for EQNode { node.update_filters(); Box::new(node) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/filter.rs b/daw-backend/src/audio/node_graph/nodes/filter.rs index 6487d06..16e4cc0 100644 --- a/daw-backend/src/audio/node_graph/nodes/filter.rs +++ b/daw-backend/src/audio/node_graph/nodes/filter.rs @@ -198,4 +198,12 @@ impl AudioNode for FilterNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/flanger.rs b/daw-backend/src/audio/node_graph/nodes/flanger.rs index 26c2dde..883c385 100644 --- a/daw-backend/src/audio/node_graph/nodes/flanger.rs +++ b/daw-backend/src/audio/node_graph/nodes/flanger.rs @@ -240,4 +240,12 @@ impl AudioNode for FlangerNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/fm_synth.rs b/daw-backend/src/audio/node_graph/nodes/fm_synth.rs index 6940e16..7efda85 100644 --- a/daw-backend/src/audio/node_graph/nodes/fm_synth.rs +++ b/daw-backend/src/audio/node_graph/nodes/fm_synth.rs @@ -300,4 +300,12 @@ impl AudioNode for FMSynthNode { fn clone_node(&self) -> Box { Box::new(Self::new(self.name.clone())) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/gain.rs b/daw-backend/src/audio/node_graph/nodes/gain.rs index 02cbb62..f4c10bc 100644 --- a/daw-backend/src/audio/node_graph/nodes/gain.rs +++ b/daw-backend/src/audio/node_graph/nodes/gain.rs @@ -127,4 +127,12 @@ impl AudioNode for GainNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/lfo.rs b/daw-backend/src/audio/node_graph/nodes/lfo.rs index e6e2dab..7aecfc9 100644 --- a/daw-backend/src/audio/node_graph/nodes/lfo.rs +++ b/daw-backend/src/audio/node_graph/nodes/lfo.rs @@ -219,4 +219,12 @@ impl AudioNode for LFONode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/limiter.rs b/daw-backend/src/audio/node_graph/nodes/limiter.rs index 9dd71dd..b9f6388 100644 --- a/daw-backend/src/audio/node_graph/nodes/limiter.rs +++ b/daw-backend/src/audio/node_graph/nodes/limiter.rs @@ -212,4 +212,12 @@ impl AudioNode for LimiterNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/math.rs b/daw-backend/src/audio/node_graph/nodes/math.rs index 4f3c52f..fbdf993 100644 --- a/daw-backend/src/audio/node_graph/nodes/math.rs +++ b/daw-backend/src/audio/node_graph/nodes/math.rs @@ -161,4 +161,12 @@ impl AudioNode for MathNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/midi_input.rs b/daw-backend/src/audio/node_graph/nodes/midi_input.rs index ee67d2f..20cf01a 100644 --- a/daw-backend/src/audio/node_graph/nodes/midi_input.rs +++ b/daw-backend/src/audio/node_graph/nodes/midi_input.rs @@ -102,4 +102,12 @@ impl AudioNode for MidiInputNode { fn handle_midi(&mut self, event: &MidiEvent) { self.pending_events.push(*event); } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/midi_to_cv.rs b/daw-backend/src/audio/node_graph/nodes/midi_to_cv.rs index f2e26cc..77f4d41 100644 --- a/daw-backend/src/audio/node_graph/nodes/midi_to_cv.rs +++ b/daw-backend/src/audio/node_graph/nodes/midi_to_cv.rs @@ -183,4 +183,12 @@ impl AudioNode for MidiToCVNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/mixer.rs b/daw-backend/src/audio/node_graph/nodes/mixer.rs index 89cc9d2..76e7517 100644 --- a/daw-backend/src/audio/node_graph/nodes/mixer.rs +++ b/daw-backend/src/audio/node_graph/nodes/mixer.rs @@ -142,4 +142,12 @@ impl AudioNode for MixerNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/mod.rs b/daw-backend/src/audio/node_graph/nodes/mod.rs index 252ab77..9445e8c 100644 --- a/daw-backend/src/audio/node_graph/nodes/mod.rs +++ b/daw-backend/src/audio/node_graph/nodes/mod.rs @@ -1,5 +1,6 @@ mod adsr; mod audio_to_cv; +mod automation_input; mod bit_crusher; mod chorus; mod compressor; @@ -39,6 +40,7 @@ mod wavetable_oscillator; pub use adsr::ADSRNode; pub use audio_to_cv::AudioToCVNode; +pub use automation_input::{AutomationInputNode, AutomationKeyframe, InterpolationType}; pub use bit_crusher::BitCrusherNode; pub use chorus::ChorusNode; pub use compressor::CompressorNode; diff --git a/daw-backend/src/audio/node_graph/nodes/multi_sampler.rs b/daw-backend/src/audio/node_graph/nodes/multi_sampler.rs index 88b51e5..3f4e4c7 100644 --- a/daw-backend/src/audio/node_graph/nodes/multi_sampler.rs +++ b/daw-backend/src/audio/node_graph/nodes/multi_sampler.rs @@ -508,4 +508,12 @@ impl AudioNode for MultiSamplerNode { fn clone_node(&self) -> Box { Box::new(Self::new(self.name.clone())) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/noise.rs b/daw-backend/src/audio/node_graph/nodes/noise.rs index e500319..cc1bb39 100644 --- a/daw-backend/src/audio/node_graph/nodes/noise.rs +++ b/daw-backend/src/audio/node_graph/nodes/noise.rs @@ -194,4 +194,12 @@ impl AudioNode for NoiseGeneratorNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/oscillator.rs b/daw-backend/src/audio/node_graph/nodes/oscillator.rs index 3d88a11..cfbb376 100644 --- a/daw-backend/src/audio/node_graph/nodes/oscillator.rs +++ b/daw-backend/src/audio/node_graph/nodes/oscillator.rs @@ -194,4 +194,12 @@ impl AudioNode for OscillatorNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/oscilloscope.rs b/daw-backend/src/audio/node_graph/nodes/oscilloscope.rs index 64488cc..3c8e80e 100644 --- a/daw-backend/src/audio/node_graph/nodes/oscilloscope.rs +++ b/daw-backend/src/audio/node_graph/nodes/oscilloscope.rs @@ -299,4 +299,12 @@ impl AudioNode for OscilloscopeNode { fn get_oscilloscope_cv_data(&self, sample_count: usize) -> Option> { Some(self.read_cv_samples(sample_count)) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/output.rs b/daw-backend/src/audio/node_graph/nodes/output.rs index 3481c20..8286c79 100644 --- a/daw-backend/src/audio/node_graph/nodes/output.rs +++ b/daw-backend/src/audio/node_graph/nodes/output.rs @@ -93,4 +93,12 @@ impl AudioNode for AudioOutputNode { outputs: self.outputs.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/pan.rs b/daw-backend/src/audio/node_graph/nodes/pan.rs index c2db78b..b91d2d7 100644 --- a/daw-backend/src/audio/node_graph/nodes/pan.rs +++ b/daw-backend/src/audio/node_graph/nodes/pan.rs @@ -165,4 +165,12 @@ impl AudioNode for PanNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/phaser.rs b/daw-backend/src/audio/node_graph/nodes/phaser.rs index d0723df..42ec906 100644 --- a/daw-backend/src/audio/node_graph/nodes/phaser.rs +++ b/daw-backend/src/audio/node_graph/nodes/phaser.rs @@ -286,4 +286,12 @@ impl AudioNode for PhaserNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/quantizer.rs b/daw-backend/src/audio/node_graph/nodes/quantizer.rs index 3bf2331..6ff3063 100644 --- a/daw-backend/src/audio/node_graph/nodes/quantizer.rs +++ b/daw-backend/src/audio/node_graph/nodes/quantizer.rs @@ -221,4 +221,12 @@ impl AudioNode for QuantizerNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/reverb.rs b/daw-backend/src/audio/node_graph/nodes/reverb.rs index 723d5f8..bd4d50c 100644 --- a/daw-backend/src/audio/node_graph/nodes/reverb.rs +++ b/daw-backend/src/audio/node_graph/nodes/reverb.rs @@ -310,4 +310,12 @@ impl AudioNode for ReverbNode { fn clone_node(&self) -> Box { Box::new(Self::new(self.name.clone())) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/ring_modulator.rs b/daw-backend/src/audio/node_graph/nodes/ring_modulator.rs index e9f2b61..90c4099 100644 --- a/daw-backend/src/audio/node_graph/nodes/ring_modulator.rs +++ b/daw-backend/src/audio/node_graph/nodes/ring_modulator.rs @@ -134,4 +134,12 @@ impl AudioNode for RingModulatorNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/sample_hold.rs b/daw-backend/src/audio/node_graph/nodes/sample_hold.rs index dc5b8d0..4aa7f95 100644 --- a/daw-backend/src/audio/node_graph/nodes/sample_hold.rs +++ b/daw-backend/src/audio/node_graph/nodes/sample_hold.rs @@ -134,4 +134,12 @@ impl AudioNode for SampleHoldNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/simple_sampler.rs b/daw-backend/src/audio/node_graph/nodes/simple_sampler.rs index f1b692a..33d9f6c 100644 --- a/daw-backend/src/audio/node_graph/nodes/simple_sampler.rs +++ b/daw-backend/src/audio/node_graph/nodes/simple_sampler.rs @@ -275,4 +275,12 @@ impl AudioNode for SimpleSamplerNode { fn clone_node(&self) -> Box { Box::new(Self::new(self.name.clone())) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/slew_limiter.rs b/daw-backend/src/audio/node_graph/nodes/slew_limiter.rs index 7fed1dc..b9e44a6 100644 --- a/daw-backend/src/audio/node_graph/nodes/slew_limiter.rs +++ b/daw-backend/src/audio/node_graph/nodes/slew_limiter.rs @@ -153,4 +153,12 @@ impl AudioNode for SlewLimiterNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/splitter.rs b/daw-backend/src/audio/node_graph/nodes/splitter.rs index ff6b168..092b9ea 100644 --- a/daw-backend/src/audio/node_graph/nodes/splitter.rs +++ b/daw-backend/src/audio/node_graph/nodes/splitter.rs @@ -101,4 +101,12 @@ impl AudioNode for SplitterNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/template_io.rs b/daw-backend/src/audio/node_graph/nodes/template_io.rs index b9fae6c..d72ff6a 100644 --- a/daw-backend/src/audio/node_graph/nodes/template_io.rs +++ b/daw-backend/src/audio/node_graph/nodes/template_io.rs @@ -85,6 +85,14 @@ impl AudioNode for TemplateInputNode { fn handle_midi(&mut self, _event: &MidiEvent) { // Pass through to connected nodes } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } /// Template Output node - represents the audio output from one voice in a VoiceAllocator @@ -173,4 +181,12 @@ impl AudioNode for TemplateOutputNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/vocoder.rs b/daw-backend/src/audio/node_graph/nodes/vocoder.rs index 8240048..ce1285d 100644 --- a/daw-backend/src/audio/node_graph/nodes/vocoder.rs +++ b/daw-backend/src/audio/node_graph/nodes/vocoder.rs @@ -359,4 +359,12 @@ impl AudioNode for VocoderNode { node.setup_bands(); Box::new(node) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/voice_allocator.rs b/daw-backend/src/audio/node_graph/nodes/voice_allocator.rs index 522c2b3..efefd30 100644 --- a/daw-backend/src/audio/node_graph/nodes/voice_allocator.rs +++ b/daw-backend/src/audio/node_graph/nodes/voice_allocator.rs @@ -288,7 +288,8 @@ impl AudioNode for VoiceAllocatorNode { mix_slice.fill(0.0); // Process this voice's graph with its MIDI events - self.voice_instances[voice_idx].process(mix_slice, &midi_events); + // Note: playback_time is 0.0 since voice allocator doesn't track time + self.voice_instances[voice_idx].process(mix_slice, &midi_events, 0.0); // Mix into output (accumulate) for (i, sample) in mix_slice.iter().enumerate() { @@ -341,4 +342,12 @@ impl AudioNode for VoiceAllocatorNode { parameters: self.parameters.clone(), }) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/node_graph/nodes/wavetable_oscillator.rs b/daw-backend/src/audio/node_graph/nodes/wavetable_oscillator.rs index 1f35a92..c8a93ee 100644 --- a/daw-backend/src/audio/node_graph/nodes/wavetable_oscillator.rs +++ b/daw-backend/src/audio/node_graph/nodes/wavetable_oscillator.rs @@ -283,4 +283,12 @@ impl AudioNode for WavetableOscillatorNode { fn clone_node(&self) -> Box { Box::new(Self::new(self.name.clone())) } + + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/daw-backend/src/audio/track.rs b/daw-backend/src/audio/track.rs index 76ebedb..840dc39 100644 --- a/daw-backend/src/audio/track.rs +++ b/daw-backend/src/audio/track.rs @@ -383,7 +383,7 @@ impl MidiTrack { // Create a silent buffer to process the note-offs let buffer_size = 512 * 2; // stereo let mut silent_buffer = vec![0.0f32; buffer_size]; - self.instrument_graph.process(&mut silent_buffer, ¬e_offs); + self.instrument_graph.process(&mut silent_buffer, ¬e_offs, 0.0); } /// Queue a live MIDI event (from virtual keyboard or MIDI controller) @@ -405,7 +405,7 @@ impl MidiTrack { _channels: u32, ) { // Generate audio using instrument graph with live MIDI events - self.instrument_graph.process(output, &self.live_midi_queue); + self.instrument_graph.process(output, &self.live_midi_queue, 0.0); // Clear the queue after processing self.live_midi_queue.clear(); @@ -445,7 +445,7 @@ impl MidiTrack { midi_events.extend(self.live_midi_queue.drain(..)); // Generate audio using instrument graph - self.instrument_graph.process(output, &midi_events); + self.instrument_graph.process(output, &midi_events, playhead_seconds); // Evaluate and apply automation let effective_volume = self.evaluate_automation_at_time(playhead_seconds); diff --git a/daw-backend/src/command/types.rs b/daw-backend/src/command/types.rs index 4145d10..a753b75 100644 --- a/daw-backend/src/command/types.rs +++ b/daw-backend/src/command/types.rs @@ -149,6 +149,14 @@ pub enum Command { MultiSamplerUpdateLayer(TrackId, u32, usize, u8, u8, u8, u8, u8), /// Remove a layer from a MultiSampler node (track_id, node_id, layer_index) MultiSamplerRemoveLayer(TrackId, u32, usize), + + // Automation Input Node commands + /// Add or update a keyframe on an AutomationInput node (track_id, node_id, time, value, interpolation, ease_out, ease_in) + AutomationAddKeyframe(TrackId, u32, f64, f32, String, (f32, f32), (f32, f32)), + /// Remove a keyframe from an AutomationInput node (track_id, node_id, time) + AutomationRemoveKeyframe(TrackId, u32, f64), + /// Set the display name of an AutomationInput node (track_id, node_id, name) + AutomationSetName(TrackId, u32, String), } /// Events sent from audio thread back to UI/control thread @@ -212,6 +220,10 @@ pub enum Query { GetOscilloscopeData(TrackId, u32, usize), /// Get MIDI clip data (track_id, clip_id) GetMidiClip(TrackId, MidiClipId), + /// Get keyframes from an AutomationInput node (track_id, node_id) + GetAutomationKeyframes(TrackId, u32), + /// Get the display name of an AutomationInput node (track_id, node_id) + GetAutomationName(TrackId, u32), } /// Oscilloscope data from a node @@ -230,6 +242,16 @@ pub struct MidiClipData { pub events: Vec, } +/// Automation keyframe data for serialization +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct AutomationKeyframeData { + pub time: f64, + pub value: f32, + pub interpolation: String, + pub ease_out: (f32, f32), + pub ease_in: (f32, f32), +} + /// Responses to synchronous queries #[derive(Debug)] pub enum QueryResponse { @@ -239,4 +261,8 @@ pub enum QueryResponse { OscilloscopeData(Result), /// MIDI clip data MidiClipData(Result), + /// Automation keyframes + AutomationKeyframes(Result, String>), + /// Automation node name + AutomationName(Result), } diff --git a/daw-backend/src/lib.rs b/daw-backend/src/lib.rs index b0858d4..f31b729 100644 --- a/daw-backend/src/lib.rs +++ b/daw-backend/src/lib.rs @@ -19,6 +19,7 @@ pub use audio::{ }; pub use audio::node_graph::{GraphPreset, InstrumentGraph, PresetMetadata, SerializedConnection, SerializedNode}; pub use command::{AudioEvent, Command, OscilloscopeData}; +pub use command::types::AutomationKeyframeData; pub use io::{load_midi_file, AudioFile, WaveformPeak, WavWriter}; use cpal::traits::{DeviceTrait, HostTrait, StreamTrait}; diff --git a/src-tauri/src/audio.rs b/src-tauri/src/audio.rs index fb2781c..93092aa 100644 --- a/src-tauri/src/audio.rs +++ b/src-tauri/src/audio.rs @@ -1208,6 +1208,105 @@ pub async fn get_oscilloscope_data( } } +// ===== Automation Input Node Commands ===== + +#[tauri::command] +pub async fn automation_add_keyframe( + state: tauri::State<'_, Arc>>, + track_id: u32, + node_id: u32, + keyframe: daw_backend::AutomationKeyframeData, +) -> Result<(), String> { + let mut audio_state = state.lock().unwrap(); + + if let Some(controller) = &mut audio_state.controller { + controller.send_command(daw_backend::Command::AutomationAddKeyframe( + track_id, + node_id, + keyframe.time, + keyframe.value, + keyframe.interpolation, + keyframe.ease_out, + keyframe.ease_in, + )); + Ok(()) + } else { + Err("Audio not initialized".to_string()) + } +} + +#[tauri::command] +pub async fn automation_remove_keyframe( + state: tauri::State<'_, Arc>>, + track_id: u32, + node_id: u32, + time: f64, +) -> Result<(), String> { + let mut audio_state = state.lock().unwrap(); + + if let Some(controller) = &mut audio_state.controller { + controller.send_command(daw_backend::Command::AutomationRemoveKeyframe( + track_id, + node_id, + time, + )); + Ok(()) + } else { + Err("Audio not initialized".to_string()) + } +} + +#[tauri::command] +pub async fn automation_get_keyframes( + state: tauri::State<'_, Arc>>, + track_id: u32, + node_id: u32, +) -> Result, String> { + let mut audio_state = state.lock().unwrap(); + + if let Some(controller) = &mut audio_state.controller { + controller.query_automation_keyframes(track_id, node_id) + } else { + Err("Audio not initialized".to_string()) + } +} + +#[tauri::command] +pub async fn automation_set_name( + state: tauri::State<'_, Arc>>, + track_id: u32, + node_id: u32, + name: String, +) -> Result<(), String> { + let mut audio_state = state.lock().unwrap(); + + if let Some(controller) = &mut audio_state.controller { + controller.send_command(daw_backend::Command::AutomationSetName( + track_id, + node_id, + name, + )); + Ok(()) + } else { + Err("Audio not initialized".to_string()) + } +} + +#[tauri::command] +pub async fn automation_get_name( + state: tauri::State<'_, Arc>>, + track_id: u32, + node_id: u32, +) -> Result { + let mut audio_state = state.lock().unwrap(); + + if let Some(controller) = &mut audio_state.controller { + controller.query_automation_name(track_id, node_id) + } else { + Err("Audio not initialized".to_string()) + } +} + #[derive(serde::Serialize, Clone)] #[serde(tag = "type")] pub enum SerializedAudioEvent { diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index 1c5eee3..01dd0eb 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -236,6 +236,11 @@ pub fn run() { audio::multi_sampler_update_layer, audio::multi_sampler_remove_layer, audio::get_oscilloscope_data, + audio::automation_add_keyframe, + audio::automation_remove_keyframe, + audio::automation_get_keyframes, + audio::automation_set_name, + audio::automation_get_name, ]) // .manage(window_counter) .build(tauri::generate_context!()) diff --git a/src/actions/index.js b/src/actions/index.js index 3a22f18..8060023 100644 --- a/src/actions/index.js +++ b/src/actions/index.js @@ -34,6 +34,127 @@ function uuidv4() { ); } +/** + * Initialize a timeline curve for an AutomationInput node + * Creates the curve with a default keyframe at time 0 + * @param {number} trackId - Track ID + * @param {number} nodeId - Backend node ID + */ +async function initializeAutomationCurve(trackId, nodeId) { + try { + // Find the audio/MIDI track + const track = context.activeObject.audioTracks?.find(t => t.audioTrackId === trackId); + if (!track) { + console.error(`Track ${trackId} not found`); + return; + } + + // Create curve parameter name: "automation.{nodeId}" + const curveName = `automation.${nodeId}`; + + // Check if curve already exists + if (track.animationData.curves[curveName]) { + console.log(`Curve ${curveName} already exists`); + return; + } + + // Create the curve with a default keyframe at time 0, value 0 + const curve = track.animationData.getOrCreateCurve(curveName); + curve.addKeyframe({ + time: 0, + value: 0, + interpolation: 'linear', + easeIn: { x: 0.42, y: 0 }, + easeOut: { x: 0.58, y: 1 }, + idx: `${Date.now()}-${Math.random()}` + }); + + console.log(`Initialized automation curve: ${curveName}`); + + // Redraw timeline if it's open + if (context.timeline?.requestRedraw) { + context.timeline.requestRedraw(); + } + } catch (err) { + console.error('Failed to initialize automation curve:', err); + } +} + +/** + * Update automation node name based on its connection + * If the source node is an AutomationInput, generate a friendly name from the target + * @param {number} trackId - Track ID + * @param {number} fromNode - Source node ID + * @param {number} toNode - Target node ID + * @param {string} toPortClass - Target port name (frontend) + */ +async function updateAutomationName(trackId, fromNode, toNode, toPortClass) { + try { + // Get the full graph state to find node types and port information + const graphStateJson = await invoke('graph_get_state', { trackId }); + const graphState = JSON.parse(graphStateJson); + + // Find the source node + const sourceNode = graphState.nodes.find(n => n.id === fromNode); + if (!sourceNode || sourceNode.node_type !== 'AutomationInput') { + return; // Not an AutomationInput, nothing to do + } + + // Find the target node + const targetNode = graphState.nodes.find(n => n.id === toNode); + if (!targetNode) { + return; + } + + // Find the connection from this AutomationInput to the target node + const connection = graphState.connections.find(c => + c.from_node === fromNode && c.to_node === toNode + ); + + if (!connection) { + return; + } + + // Use the backend port name from the connection + // This will be something like "cutoff", "frequency", etc. + const portName = connection.to_port; + + // Generate a friendly name: "{TargetType} {PortName}" + // e.g., "Filter cutoff" or "Oscillator frequency" + const name = `${targetNode.node_type} ${portName}`; + + // Set the automation name in the backend + await invoke('automation_set_name', { + trackId: trackId, + nodeId: fromNode, + name + }); + + // Update the node UI display if the node editor is open + if (context.nodeEditor) { + const nameElement = document.getElementById(`automation-name-${fromNode}`); + if (nameElement) { + nameElement.textContent = name; + } + } + + // Invalidate the timeline cache for this automation node + if (context.timelineWidget) { + const cacheKey = `${trackId}:${fromNode}`; + context.timelineWidget.automationNameCache.delete(cacheKey); + + // Trigger a redraw to fetch and display the new name + if (context.timelineWidget.requestRedraw) { + context.timelineWidget.requestRedraw(); + } + } + + console.log(`Auto-named automation node ${fromNode}: "${name}"`); + } catch (err) { + console.error('Failed to update automation name:', err); + } +} + // Dependencies that will be injected let undoStack = null; let redoStack = null; @@ -56,6 +177,9 @@ let config = null; * @param {Function} deps.invoke - Tauri invoke function * @param {Object} deps.config - Application config object */ +// Export the auto-naming function for use in main.js +export { updateAutomationName }; + export function initializeActions(deps) { undoStack = deps.undoStack; redoStack = deps.redoStack; @@ -1977,6 +2101,12 @@ export const actions = { posX: action.position.x, posY: action.position.y }); + + // If this is an AutomationInput node, create a timeline curve for it + if (action.nodeType === 'AutomationInput') { + await initializeAutomationCurve(action.trackId, result); + } + // Reload the entire graph to show the restored node if (context.reloadNodeEditor) { await context.reloadNodeEditor(); @@ -2083,6 +2213,9 @@ export const actions = { ); } } + + // Auto-name AutomationInput nodes when connected + await updateAutomationName(action.trackId, action.fromNode, action.toNode, action.toPortClass); } finally { if (context.nodeEditorState) { context.nodeEditorState.suppressActionRecording = false; diff --git a/src/assets/focus-animation.svg b/src/assets/focus-animation.svg index 50b2ed1..31a7035 100644 --- a/src/assets/focus-animation.svg +++ b/src/assets/focus-animation.svg @@ -1,5 +1,48 @@ - + + + + - - + + diff --git a/src/assets/focus-music.svg b/src/assets/focus-music.svg index e415f3c..2f5b7ff 100644 --- a/src/assets/focus-music.svg +++ b/src/assets/focus-music.svg @@ -1,10 +1,93 @@ - + + + + - - - - - - - + + + + + + + diff --git a/src/assets/focus-video.svg b/src/assets/focus-video.svg index a806538..7ff7f0b 100644 --- a/src/assets/focus-video.svg +++ b/src/assets/focus-video.svg @@ -1,9 +1,136 @@ - + + + + - - - - - - + + + + + + + + + + + diff --git a/src/main.js b/src/main.js index 90ec49a..816e9e4 100644 --- a/src/main.js +++ b/src/main.js @@ -49,6 +49,7 @@ import { multiplyMatrices, growBoundingBox, createMissingTexturePattern, + distanceToLineSegment, } from "./utils.js"; import { backgroundColor, @@ -107,7 +108,7 @@ import { initializeGraphicsObjectDependencies } from "./models/graphics-object.js"; import { createRoot } from "./models/root.js"; -import { actions, initializeActions } from "./actions/index.js"; +import { actions, initializeActions, updateAutomationName } from "./actions/index.js"; // Layout system import { defaultLayouts, getLayout, getLayoutNames } from "./layouts.js"; @@ -1540,6 +1541,11 @@ function _newFile(width, height, fps, layoutKey) { config.defaultLayout = layoutKey; console.log('[_newFile] Switching to layout:', layoutKey); switchLayout(layoutKey); + + // Set default time format to measures for music mode + if (layoutKey === 'audioDaw' && context.timelineWidget?.timelineState) { + context.timelineWidget.timelineState.timeFormat = 'measures'; + } } // Define frameRate as a non-configurable property with a backing variable @@ -4160,6 +4166,8 @@ function timelineV2() { const currentTime = context.activeObject?.currentTime || 0; const timeFormat = timelineWidget.timelineState.timeFormat; const framerate = timelineWidget.timelineState.framerate; + const bpm = timelineWidget.timelineState.bpm; + const timeSignature = timelineWidget.timelineState.timeSignature; if (timeFormat === 'frames') { // Frames mode: show frame number and framerate @@ -4173,6 +4181,22 @@ function timelineV2() {
FPS
`; + } else if (timeFormat === 'measures') { + // Measures mode: show measure.beat, BPM, and time signature + const { measure, beat } = timelineWidget.timelineState.timeToMeasure(currentTime); + + timeDisplay.innerHTML = ` +
${measure}.${beat}
+
BAR
+
+
${bpm}
+
BPM
+
+
+
${timeSignature.numerator}/${timeSignature.denominator}
+
TIME
+
+ `; } else { // Seconds mode: show MM:SS.mmm or HH:MM:SS.mmm const totalSeconds = Math.floor(currentTime); @@ -4243,6 +4267,129 @@ function timelineV2() { } console.log('[FPS Edit] Done'); } + } else if (action === 'edit-bpm') { + // Clicked on BPM - show input to edit BPM + const currentBpm = timelineWidget.timelineState.bpm; + const newBpm = prompt('Enter BPM (Beats Per Minute):', currentBpm); + + if (newBpm !== null && !isNaN(newBpm) && newBpm > 0) { + const bpm = parseFloat(newBpm); + timelineWidget.timelineState.bpm = bpm; + context.config.bpm = bpm; + updateTimeDisplay(); + if (timelineWidget.requestRedraw) { + timelineWidget.requestRedraw(); + } + } + } else if (action === 'edit-time-signature') { + // Clicked on time signature - show custom dropdown with common options + const currentTimeSig = timelineWidget.timelineState.timeSignature; + const currentValue = `${currentTimeSig.numerator}/${currentTimeSig.denominator}`; + + // Create a custom dropdown list + const dropdown = document.createElement('div'); + dropdown.className = 'time-signature-dropdown'; + dropdown.style.position = 'absolute'; + dropdown.style.left = e.clientX + 'px'; + dropdown.style.top = e.clientY + 'px'; + dropdown.style.fontSize = '14px'; + dropdown.style.backgroundColor = 'var(--background-color)'; + dropdown.style.color = 'var(--label-color)'; + dropdown.style.border = '1px solid var(--shadow)'; + dropdown.style.borderRadius = '4px'; + dropdown.style.zIndex = '10000'; + dropdown.style.maxHeight = '300px'; + dropdown.style.overflowY = 'auto'; + dropdown.style.boxShadow = '0 4px 8px rgba(0,0,0,0.3)'; + + // Common time signatures + const commonTimeSigs = ['2/4', '3/4', '4/4', '5/4', '6/8', '7/8', '9/8', '12/8', 'Other...']; + + commonTimeSigs.forEach(sig => { + const item = document.createElement('div'); + item.textContent = sig; + item.style.padding = '8px 12px'; + item.style.cursor = 'pointer'; + item.style.backgroundColor = 'var(--background-color)'; + item.style.color = 'var(--label-color)'; + + if (sig === currentValue) { + item.style.backgroundColor = 'var(--foreground-color)'; + } + + item.addEventListener('mouseenter', () => { + item.style.backgroundColor = 'var(--foreground-color)'; + }); + + item.addEventListener('mouseleave', () => { + if (sig !== currentValue) { + item.style.backgroundColor = 'var(--background-color)'; + } + }); + + item.addEventListener('click', () => { + document.body.removeChild(dropdown); + + if (sig === 'Other...') { + // Show prompt for custom time signature + const newTimeSig = prompt( + 'Enter time signature (e.g., "4/4", "3/4", "6/8"):', + currentValue + ); + + if (newTimeSig !== null) { + const match = newTimeSig.match(/^(\d+)\/(\d+)$/); + if (match) { + const numerator = parseInt(match[1]); + const denominator = parseInt(match[2]); + + if (numerator > 0 && denominator > 0) { + timelineWidget.timelineState.timeSignature = { numerator, denominator }; + context.config.timeSignature = { numerator, denominator }; + updateTimeDisplay(); + if (timelineWidget.requestRedraw) { + timelineWidget.requestRedraw(); + } + } + } else { + alert('Invalid time signature format. Please use format like "4/4" or "6/8".'); + } + } + } else { + // Parse the selected common time signature + const match = sig.match(/^(\d+)\/(\d+)$/); + if (match) { + const numerator = parseInt(match[1]); + const denominator = parseInt(match[2]); + timelineWidget.timelineState.timeSignature = { numerator, denominator }; + context.config.timeSignature = { numerator, denominator }; + updateTimeDisplay(); + if (timelineWidget.requestRedraw) { + timelineWidget.requestRedraw(); + } + } + } + }); + + dropdown.appendChild(item); + }); + + document.body.appendChild(dropdown); + dropdown.focus(); + + // Close dropdown when clicking outside + const closeDropdown = (event) => { + if (!dropdown.contains(event.target)) { + if (document.body.contains(dropdown)) { + document.body.removeChild(dropdown); + } + document.removeEventListener('click', closeDropdown); + } + }; + + setTimeout(() => { + document.addEventListener('click', closeDropdown); + }, 0); } }); @@ -6814,6 +6961,15 @@ function nodeEditor() { drawflowDiv.addEventListener('dragover', (e) => { e.preventDefault(); e.dataTransfer.dropEffect = 'copy'; + + // Check if dragging over a connection for insertion + const nodeType = e.dataTransfer.getData('text/plain') || draggedNodeType; + if (nodeType) { + const nodeDef = nodeTypes[nodeType]; + if (nodeDef) { + checkConnectionInsertionDuringDrag(e, nodeDef); + } + } }); drawflowDiv.addEventListener('drop', (e) => { @@ -6873,10 +7029,21 @@ function nodeEditor() { // Add the node console.log(`Adding node ${nodeType} at (${x}, ${y}) with parent ${parentNodeId}`); - addNode(nodeType, x, y, parentNodeId); + const newNodeId = addNode(nodeType, x, y, parentNodeId); - // Clear the draggedNodeType + // Check if we should insert into a connection + if (pendingInsertionFromDrag && newNodeId) { + console.log('Pending insertion detected, will insert node into connection'); + // Defer insertion until after node is fully created + setTimeout(() => { + performConnectionInsertion(newNodeId, pendingInsertionFromDrag); + pendingInsertionFromDrag = null; + }, 100); + } + + // Clear the draggedNodeType and highlights draggedNodeType = null; + clearConnectionHighlights(); }); // Connection event handlers @@ -6905,7 +7072,10 @@ function nodeEditor() { }, 50); }); - // Track node drag start for undo/redo + // Track which node is being dragged + let draggingNodeId = null; + + // Track node drag start for undo/redo and connection insertion drawflowDiv.addEventListener('mousedown', (e) => { const nodeElement = e.target.closest('.drawflow-node'); if (nodeElement && !e.target.closest('.input') && !e.target.closest('.output')) { @@ -6913,39 +7083,61 @@ function nodeEditor() { const node = editor.getNodeFromId(nodeId); if (node) { nodeMoveTracker.set(nodeId, { x: node.pos_x, y: node.pos_y }); + draggingNodeId = nodeId; } } }); - // Node moved - resize parent VoiceAllocator + // Check for connection insertion while dragging existing nodes + drawflowDiv.addEventListener('mousemove', (e) => { + if (draggingNodeId !== null) { + checkConnectionInsertion(draggingNodeId); + } + }); + + // Node moved - resize parent VoiceAllocator and check for connection insertion editor.on("nodeMoved", (nodeId) => { const node = editor.getNodeFromId(nodeId); if (node && node.data.parentNodeId) { resizeVoiceAllocatorToFit(node.data.parentNodeId); } + + // Check if node should be inserted into a connection + checkConnectionInsertion(nodeId); }); - // Track node drag end for undo/redo + // Track node drag end for undo/redo and handle connection insertion drawflowDiv.addEventListener('mouseup', (e) => { - // Check all tracked nodes for position changes + // Check all tracked nodes for position changes and pending insertions for (const [nodeId, oldPos] of nodeMoveTracker.entries()) { const node = editor.getNodeFromId(nodeId); - if (node && (node.pos_x !== oldPos.x || node.pos_y !== oldPos.y)) { - // Position changed - record action - redoStack.length = 0; - undoStack.push({ - name: "graphMoveNode", - action: { - nodeId: nodeId, - oldPosition: oldPos, - newPosition: { x: node.pos_x, y: node.pos_y } - } - }); - updateMenu(); + const hasPendingInsertion = pendingNodeInsertions.has(nodeId); + + if (node) { + // Check for pending insertion first + if (hasPendingInsertion) { + const insertionMatch = pendingNodeInsertions.get(nodeId); + performConnectionInsertion(nodeId, insertionMatch); + pendingNodeInsertions.delete(nodeId); + } else if (node.pos_x !== oldPos.x || node.pos_y !== oldPos.y) { + // Position changed - record action + redoStack.length = 0; + undoStack.push({ + name: "graphMoveNode", + action: { + nodeId: nodeId, + oldPosition: oldPos, + newPosition: { x: node.pos_x, y: node.pos_y } + } + }); + updateMenu(); + } } } - // Clear tracker + // Clear tracker, dragging state, and highlights nodeMoveTracker.clear(); + draggingNodeId = null; + clearConnectionHighlights(); }); // Node removed - prevent deletion of template nodes @@ -7171,6 +7363,39 @@ function nodeEditor() { } } + // If this is an AutomationInput node, create timeline curve + if (nodeType === "AutomationInput" && !parentNodeId) { + const currentTrackId = getCurrentMidiTrack(); + if (currentTrackId !== null) { + // Find the audio/MIDI track + const track = root.audioTracks?.find(t => t.audioTrackId === currentTrackId); + if (track) { + // Create curve parameter name: "automation.{nodeId}" + const curveName = `automation.${backendNodeId}`; + + // Check if curve already exists + if (!track.animationData.curves[curveName]) { + // Create the curve with a default keyframe at time 0, value 0 + const curve = track.animationData.getOrCreateCurve(curveName); + curve.addKeyframe({ + time: 0, + value: 0, + interpolation: 'linear', + easeIn: { x: 0.42, y: 0 }, + easeOut: { x: 0.58, y: 1 }, + idx: `${Date.now()}-${Math.random()}` + }); + console.log(`Initialized automation curve: ${curveName}`); + + // Redraw timeline if it's open + if (context.timeline?.requestRedraw) { + context.timeline.requestRedraw(); + } + } + } + } + } + // If this is an Oscilloscope node, start the visualization if (nodeType === "Oscilloscope") { const currentTrackId = getCurrentMidiTrack(); @@ -7200,6 +7425,8 @@ function nodeEditor() { console.error("Failed to add node to backend:", err); showError("Failed to add node: " + err); }); + + return drawflowNodeId; } // Auto-resize VoiceAllocator to fit its child nodes @@ -7812,6 +8039,461 @@ function nodeEditor() { } } + // Push nodes away from a point using gaussian falloff + function pushNodesAway(centerX, centerY, maxDistance, excludeNodeId) { + const module = editor.module; + const allNodes = editor.drawflow.drawflow[module]?.data || {}; + + // Gaussian parameters + const sigma = maxDistance / 3; // Standard deviation for falloff + const maxPush = 150; // Maximum push distance at the center + + for (const [id, node] of Object.entries(allNodes)) { + const nodeIdNum = parseInt(id); + if (nodeIdNum === excludeNodeId) continue; + + // Calculate distance from center + const dx = node.pos_x - centerX; + const dy = node.pos_y - centerY; + const distance = Math.sqrt(dx * dx + dy * dy); + + if (distance < maxDistance && distance > 0) { + // Calculate push strength using gaussian falloff + const falloff = Math.exp(-(distance * distance) / (2 * sigma * sigma)); + const pushStrength = maxPush * falloff; + + // Calculate push direction (normalized) + const dirX = dx / distance; + const dirY = dy / distance; + + // Calculate new position + const newX = node.pos_x + dirX * pushStrength; + const newY = node.pos_y + dirY * pushStrength; + + // Update position in the data structure + node.pos_x = newX; + node.pos_y = newY; + + // Update the DOM element position + const nodeElement = document.getElementById(`node-${nodeIdNum}`); + if (nodeElement) { + nodeElement.style.left = newX + 'px'; + nodeElement.style.top = newY + 'px'; + } + + // Trigger connection redraw + editor.updateConnectionNodes(`node-${nodeIdNum}`); + } + } + } + + // Perform the actual connection insertion + function performConnectionInsertion(nodeId, match) { + + const node = editor.getNodeFromId(nodeId); + const sourceNode = editor.getNodeFromId(match.sourceNodeId); + const targetNode = editor.getNodeFromId(match.targetNodeId); + + if (!node || !sourceNode || !targetNode) { + console.error("Missing nodes for insertion"); + return; + } + + // Position the node between source and target + const sourceElement = document.getElementById(`node-${match.sourceNodeId}`); + const targetElement = document.getElementById(`node-${match.targetNodeId}`); + + if (sourceElement && targetElement) { + const sourceRect = sourceElement.getBoundingClientRect(); + const targetRect = targetElement.getBoundingClientRect(); + + // Calculate midpoint position + const newX = (sourceNode.pos_x + sourceRect.width + targetNode.pos_x) / 2 - 80; // Approximate node half-width + const newY = (sourceNode.pos_y + targetNode.pos_y) / 2 - 50; // Approximate node half-height + + // Update node position in data structure + node.pos_x = newX; + node.pos_y = newY; + + // Update the DOM element position + const nodeElement = document.getElementById(`node-${nodeId}`); + if (nodeElement) { + nodeElement.style.left = newX + 'px'; + nodeElement.style.top = newY + 'px'; + } + + // Trigger connection redraw for this node + editor.updateConnectionNodes(`node-${nodeId}`); + + // Push surrounding nodes away with gaussian falloff + pushNodesAway(newX, newY, 400, nodeId); // 400px influence radius + } + + // Remove the old connection + suppressActionRecording = true; + editor.removeSingleConnection( + match.sourceNodeId, + match.targetNodeId, + match.sourceOutputClass, + match.targetInputClass + ); + + // Create new connections: source -> node -> target + // Connection 1: source output -> node input + setTimeout(() => { + editor.addConnection( + match.sourceNodeId, + nodeId, + match.sourceOutputClass, + `input_${match.nodeInputPort + 1}` + ); + + // Connection 2: node output -> target input + setTimeout(() => { + editor.addConnection( + nodeId, + match.targetNodeId, + `output_${match.nodeOutputPort + 1}`, + match.targetInputClass + ); + + suppressActionRecording = false; + }, 50); + }, 50); + } + + // Check if cursor position during drag is near a connection + function checkConnectionInsertionDuringDrag(dragEvent, nodeDef) { + const drawflowDiv = container.querySelector("#drawflow"); + if (!drawflowDiv || !editor) return; + + const rect = drawflowDiv.getBoundingClientRect(); + const canvasX = editor.canvas_x || 0; + const canvasY = editor.canvas_y || 0; + const zoom = editor.zoom || 1; + + // Calculate cursor position in canvas coordinates + const cursorX = (dragEvent.clientX - rect.left - canvasX) / zoom; + const cursorY = (dragEvent.clientY - rect.top - canvasY) / zoom; + + // Get all connections in the current module + const module = editor.module; + const allNodes = editor.drawflow.drawflow[module]?.data || {}; + + // Distance threshold for insertion (in pixels) + const insertionThreshold = 30; + + let bestMatch = null; + let bestDistance = insertionThreshold; + + // Check each connection + for (const [sourceNodeId, sourceNode] of Object.entries(allNodes)) { + for (const [outputKey, outputData] of Object.entries(sourceNode.outputs)) { + for (const connection of outputData.connections) { + const targetNodeId = connection.node; + const targetNode = allNodes[targetNodeId]; + + if (!targetNode) continue; + + // Get source and target positions + const sourceElement = document.getElementById(`node-${sourceNodeId}`); + const targetElement = document.getElementById(`node-${targetNodeId}`); + + if (!sourceElement || !targetElement) continue; + + const sourceRect = sourceElement.getBoundingClientRect(); + const targetRect = targetElement.getBoundingClientRect(); + + // Calculate output port position (right side of source node) + const sourceX = sourceNode.pos_x + sourceRect.width; + const sourceY = sourceNode.pos_y + sourceRect.height / 2; + + // Calculate input port position (left side of target node) + const targetX = targetNode.pos_x; + const targetY = targetNode.pos_y + targetRect.height / 2; + + // Calculate distance from cursor to connection line + const distance = distanceToLineSegment( + cursorX, cursorY, + sourceX, sourceY, + targetX, targetY + ); + + // Check if this is the closest connection + if (distance < bestDistance) { + // Check port compatibility + const sourcePortIndex = parseInt(outputKey.replace('output_', '')) - 1; + const targetPortIndex = parseInt(connection.output.replace('input_', '')) - 1; + + const sourceDef = nodeTypes[sourceNode.name]; + const targetDef = nodeTypes[targetNode.name]; + + if (!sourceDef || !targetDef) continue; + + // Get the signal type of the connection + if (sourcePortIndex >= sourceDef.outputs.length || + targetPortIndex >= targetDef.inputs.length) continue; + + const connectionType = sourceDef.outputs[sourcePortIndex].type; + + // Check if the dragged node has compatible input and output + let compatibleInputIndex = -1; + let compatibleOutputIndex = -1; + + // Find first compatible input and output + for (let i = 0; i < nodeDef.inputs.length; i++) { + if (nodeDef.inputs[i].type === connectionType) { + compatibleInputIndex = i; + break; + } + } + + for (let i = 0; i < nodeDef.outputs.length; i++) { + if (nodeDef.outputs[i].type === connectionType) { + compatibleOutputIndex = i; + break; + } + } + + if (compatibleInputIndex !== -1 && compatibleOutputIndex !== -1) { + bestDistance = distance; + bestMatch = { + sourceNodeId: parseInt(sourceNodeId), + targetNodeId: parseInt(targetNodeId), + sourcePort: sourcePortIndex, + targetPort: targetPortIndex, + nodeInputPort: compatibleInputIndex, + nodeOutputPort: compatibleOutputIndex, + connectionType: connectionType, + sourceOutputClass: outputKey, + targetInputClass: connection.output, + insertX: cursorX, + insertY: cursorY + }; + } + } + } + } + } + + // If we found a match, highlight the connection and store it + if (bestMatch) { + highlightConnectionForInsertion(bestMatch); + pendingInsertionFromDrag = bestMatch; + } else { + clearConnectionHighlights(); + pendingInsertionFromDrag = null; + } + } + + // Check if a node can be inserted into a connection + function checkConnectionInsertion(nodeId) { + const node = editor.getNodeFromId(nodeId); + if (!node) return; + + const nodeDef = nodeTypes[node.name]; + if (!nodeDef) return; + + // Check if node has any connections - skip if it does + let hasConnections = false; + for (const [inputKey, inputData] of Object.entries(node.inputs)) { + if (inputData.connections && inputData.connections.length > 0) { + hasConnections = true; + break; + } + } + if (!hasConnections) { + for (const [outputKey, outputData] of Object.entries(node.outputs)) { + if (outputData.connections && outputData.connections.length > 0) { + hasConnections = true; + break; + } + } + } + + if (hasConnections) { + clearConnectionHighlights(); + pendingNodeInsertions.delete(nodeId); + return; + } + + // Get node center position + const nodeElement = document.getElementById(`node-${nodeId}`); + if (!nodeElement) return; + + const nodeRect = nodeElement.getBoundingClientRect(); + const nodeCenterX = node.pos_x + nodeRect.width / 2; + const nodeCenterY = node.pos_y + nodeRect.height / 2; + + // Get all connections in the current module + const module = editor.module; + const allNodes = editor.drawflow.drawflow[module]?.data || {}; + + // Distance threshold for insertion (in pixels) + const insertionThreshold = 30; + + let bestMatch = null; + let bestDistance = insertionThreshold; + + // Check each connection + for (const [sourceNodeId, sourceNode] of Object.entries(allNodes)) { + if (parseInt(sourceNodeId) === nodeId) continue; // Skip the node being dragged + + for (const [outputKey, outputData] of Object.entries(sourceNode.outputs)) { + for (const connection of outputData.connections) { + const targetNodeId = connection.node; + const targetNode = allNodes[targetNodeId]; + + if (!targetNode || parseInt(targetNodeId) === nodeId) continue; + + // Get source and target positions + const sourceElement = document.getElementById(`node-${sourceNodeId}`); + const targetElement = document.getElementById(`node-${targetNodeId}`); + + if (!sourceElement || !targetElement) continue; + + const sourceRect = sourceElement.getBoundingClientRect(); + const targetRect = targetElement.getBoundingClientRect(); + + // Calculate output port position (right side of source node) + const sourceX = sourceNode.pos_x + sourceRect.width; + const sourceY = sourceNode.pos_y + sourceRect.height / 2; + + // Calculate input port position (left side of target node) + const targetX = targetNode.pos_x; + const targetY = targetNode.pos_y + targetRect.height / 2; + + // Calculate distance from node center to connection line + const distance = distanceToLineSegment( + nodeCenterX, nodeCenterY, + sourceX, sourceY, + targetX, targetY + ); + + // Check if this is the closest connection + if (distance < bestDistance) { + // Check port compatibility + const sourcePortIndex = parseInt(outputKey.replace('output_', '')) - 1; + const targetPortIndex = parseInt(connection.output.replace('input_', '')) - 1; + + const sourceDef = nodeTypes[sourceNode.name]; + const targetDef = nodeTypes[targetNode.name]; + + if (!sourceDef || !targetDef) continue; + + // Get the signal type of the connection + if (sourcePortIndex >= sourceDef.outputs.length || + targetPortIndex >= targetDef.inputs.length) continue; + + const connectionType = sourceDef.outputs[sourcePortIndex].type; + + // Check if the dragged node has compatible input and output + let hasCompatibleInput = false; + let hasCompatibleOutput = false; + let compatibleInputIndex = -1; + let compatibleOutputIndex = -1; + + // Find first compatible input and output + for (let i = 0; i < nodeDef.inputs.length; i++) { + if (nodeDef.inputs[i].type === connectionType) { + hasCompatibleInput = true; + compatibleInputIndex = i; + break; + } + } + + for (let i = 0; i < nodeDef.outputs.length; i++) { + if (nodeDef.outputs[i].type === connectionType) { + hasCompatibleOutput = true; + compatibleOutputIndex = i; + break; + } + } + + if (hasCompatibleInput && hasCompatibleOutput) { + bestDistance = distance; + bestMatch = { + sourceNodeId: parseInt(sourceNodeId), + targetNodeId: parseInt(targetNodeId), + sourcePort: sourcePortIndex, + targetPort: targetPortIndex, + nodeInputPort: compatibleInputIndex, + nodeOutputPort: compatibleOutputIndex, + connectionType: connectionType, + sourceOutputClass: outputKey, + targetInputClass: connection.output + }; + } + } + } + } + } + + // If we found a match, highlight the connection + if (bestMatch) { + highlightConnectionForInsertion(bestMatch); + // Store the match in the Map for use on mouseup + pendingNodeInsertions.set(nodeId, bestMatch); + } else { + clearConnectionHighlights(); + pendingNodeInsertions.delete(nodeId); + } + } + + // Track which connection is highlighted for insertion + let highlightedConnection = null; + let highlightInterval = null; + let pendingInsertionFromDrag = null; + + // Track pending insertions for existing nodes being dragged + const pendingNodeInsertions = new Map(); // nodeId -> insertion match + + // Apply highlight to the tracked connection + function applyConnectionHighlight() { + if (!highlightedConnection) return; + + const connectionElement = document.querySelector( + `.connection.node_in_node-${highlightedConnection.targetNodeId}.node_out_node-${highlightedConnection.sourceNodeId}` + ); + + if (connectionElement && !connectionElement.classList.contains('connection-insertion-highlight')) { + connectionElement.classList.add('connection-insertion-highlight'); + } + } + + // Highlight a connection that can receive the node + function highlightConnectionForInsertion(match) { + // Store the connection to highlight + highlightedConnection = match; + + // Clear any existing interval + if (highlightInterval) { + clearInterval(highlightInterval); + } + + // Apply highlight immediately + applyConnectionHighlight(); + + // Keep re-applying in case Drawflow redraws + highlightInterval = setInterval(applyConnectionHighlight, 50); + } + + // Clear connection insertion highlights + function clearConnectionHighlights() { + // Stop the interval + if (highlightInterval) { + clearInterval(highlightInterval); + highlightInterval = null; + } + + highlightedConnection = null; + + // Remove all highlight classes + document.querySelectorAll('.connection-insertion-highlight').forEach(el => { + el.classList.remove('connection-insertion-highlight'); + }); + } + // Handle connection creation function handleConnectionCreated(connection) { console.log("handleConnectionCreated called:", connection); @@ -7973,7 +8655,7 @@ function nodeEditor() { fromPort: outputPort, toNode: inputNode.data.backendId, toPort: inputPort - }).then(() => { + }).then(async () => { console.log("Connection successful"); // Record action for undo @@ -7993,6 +8675,15 @@ function nodeEditor() { toPortClass: connection.input_class } }); + + // Auto-name AutomationInput nodes when connected + await updateAutomationName( + currentTrackId, + outputNode.data.backendId, + inputNode.data.backendId, + connection.input_class + ); + updateMenu(); }).catch(err => { console.error("Failed to connect nodes:", err); diff --git a/src/models/animation.js b/src/models/animation.js index 380beb4..9da13a0 100644 --- a/src/models/animation.js +++ b/src/models/animation.js @@ -2,6 +2,9 @@ import { context, config, pointerList, startProps } from '../state.js'; +// Get invoke from Tauri global +const { invoke } = window.__TAURI__.core; + // Helper function for UUID generation function uuidv4() { return "10000000-1000-4000-8000-100000000000".replace(/[018]/g, (c) => @@ -181,6 +184,9 @@ class AnimationCurve { existingKeyframe.interpolation = keyframe.interpolation; if (keyframe.easeIn) existingKeyframe.easeIn = keyframe.easeIn; if (keyframe.easeOut) existingKeyframe.easeOut = keyframe.easeOut; + + // Sync update to backend if this is an automation curve + this._syncAutomationKeyframeToBackend(existingKeyframe); } else { // Add new keyframe this.keyframes.push(keyframe); @@ -192,6 +198,9 @@ class AnimationCurve { if (this.parentAnimationData) { this.parentAnimationData.updateDuration(); } + + // Sync to backend if this is an automation curve + this._syncAutomationKeyframeToBackend(keyframe); } removeKeyframe(keyframe) { @@ -203,6 +212,9 @@ class AnimationCurve { if (this.parentAnimationData) { this.parentAnimationData.updateDuration(); } + + // Sync to backend if this is an automation curve + this._syncAutomationKeyframeRemovalToBackend(keyframe); } } @@ -389,6 +401,85 @@ class AnimationCurve { keyframes: this.keyframes.map(kf => kf.toJSON()) }; } + + // Helper method to sync keyframe additions to backend for automation curves + _syncAutomationKeyframeToBackend(keyframe) { + // Check if this is an automation curve (parameter starts with "automation.") + if (!this.parameter.startsWith('automation.')) { + return; // Not an automation curve, skip backend sync + } + + // Extract node ID from parameter (e.g., "automation.5" -> 5) + const nodeIdStr = this.parameter.split('.')[1]; + const nodeId = parseInt(nodeIdStr, 10); + if (isNaN(nodeId)) { + console.error(`Invalid automation node ID: ${nodeIdStr}`); + return; + } + + // Convert keyframe to backend format + const backendKeyframe = { + time: keyframe.time, + value: keyframe.value, + interpolation: keyframe.interpolation || 'linear', + ease_out: keyframe.easeOut ? [keyframe.easeOut.x, keyframe.easeOut.y] : [0.58, 1.0], + ease_in: keyframe.easeIn ? [keyframe.easeIn.x, keyframe.easeIn.y] : [0.42, 0.0] + }; + + // Call Tauri command (fire-and-forget) + // Note: Need to get track_id from context - for now, find it from the curve's parent + const track = window.root?.audioTracks?.find(t => + t.animationData && Object.values(t.animationData.curves).includes(this) + ); + + if (!track || track.audioTrackId === null) { + console.error('Could not find track for automation curve sync'); + return; + } + + invoke('automation_add_keyframe', { + trackId: track.audioTrackId, + nodeId: nodeId, + keyframe: backendKeyframe + }).catch(err => { + console.error(`Failed to sync automation keyframe to backend: ${err}`); + }); + } + + // Helper method to sync keyframe removals to backend for automation curves + _syncAutomationKeyframeRemovalToBackend(keyframe) { + // Check if this is an automation curve (parameter starts with "automation.") + if (!this.parameter.startsWith('automation.')) { + return; // Not an automation curve, skip backend sync + } + + // Extract node ID from parameter (e.g., "automation.5" -> 5) + const nodeIdStr = this.parameter.split('.')[1]; + const nodeId = parseInt(nodeIdStr, 10); + if (isNaN(nodeId)) { + console.error(`Invalid automation node ID: ${nodeIdStr}`); + return; + } + + // Call Tauri command (fire-and-forget) + // Note: Need to get track_id from context - for now, find it from the curve's parent + const track = window.root?.audioTracks?.find(t => + t.animationData && Object.values(t.animationData.curves).includes(this) + ); + + if (!track || track.audioTrackId === null) { + console.error('Could not find track for automation curve sync'); + return; + } + + invoke('automation_remove_keyframe', { + trackId: track.audioTrackId, + nodeId: nodeId, + time: keyframe.time + }).catch(err => { + console.error(`Failed to sync automation keyframe removal to backend: ${err}`); + }); + } } class AnimationData { diff --git a/src/nodeTypes.js b/src/nodeTypes.js index 16067ee..2dea0ed 100644 --- a/src/nodeTypes.js +++ b/src/nodeTypes.js @@ -1038,6 +1038,31 @@ export const nodeTypes = { ` }, + AutomationInput: { + name: 'AutomationInput', + category: NodeCategory.UTILITY, + description: 'Timeline automation - outputs CV signal controlled by timeline curves', + inputs: [], + outputs: [ + { name: 'CV Out', type: SignalType.CV, index: 0 } + ], + parameters: [], + getHTML: (nodeId) => ` +
+
Automation
+
+ Timeline-based automation +
+
+ Not connected +
+
+ Edit curves in timeline +
+
+ ` + }, + Math: { name: 'Math', category: NodeCategory.UTILITY, diff --git a/src/startscreen.js b/src/startscreen.js index aeee142..f35e939 100644 --- a/src/startscreen.js +++ b/src/startscreen.js @@ -94,19 +94,19 @@ function createRightPanel() { { name: 'Animation', value: 'animation', - iconSvg: '', + iconPath: '/assets/focus-animation.svg', description: 'Drawing tools and timeline' }, { name: 'Music', value: 'audioDaw', - iconSvg: '', + iconPath: '/assets/focus-music.svg', description: 'Audio tracks and mixer' }, { name: 'Video editing', value: 'videoEditing', - iconSvg: '', + iconPath: '/assets/focus-video.svg', description: 'Clip timeline and effects' } ]; @@ -121,6 +121,12 @@ function createRightPanel() { return rightPanel; } +async function loadSVG(url, targetElement) { + const response = await fetch(url); + const svgText = await response.text(); + targetElement.innerHTML = svgText; +} + function createFocusCard(focus) { const card = document.createElement('div'); card.className = 'focus-card'; @@ -131,7 +137,10 @@ function createFocusCard(focus) { const iconWrapper = document.createElement('div'); iconWrapper.className = 'focus-card-icon'; - iconWrapper.innerHTML = focus.iconSvg; + + // Load the SVG asynchronously + loadSVG(focus.iconPath, iconWrapper); + iconContainer.appendChild(iconWrapper); card.appendChild(iconContainer); diff --git a/src/state.js b/src/state.js index ff73e23..112fb94 100644 --- a/src/state.js +++ b/src/state.js @@ -77,6 +77,8 @@ export let config = { fileWidth: 800, fileHeight: 600, framerate: 24, + bpm: 120, + timeSignature: { numerator: 4, denominator: 4 }, recentFiles: [], scrollSpeed: 1, debug: false, diff --git a/src/styles.css b/src/styles.css index 039f40e..bb5b90d 100644 --- a/src/styles.css +++ b/src/styles.css @@ -1646,6 +1646,18 @@ button { stroke-width: 2px !important; } +/* Connection insertion highlight */ +.connection-insertion-highlight .main-path { + stroke: #FFD700 !important; + stroke-width: 8px !important; + stroke-dasharray: none !important; + filter: drop-shadow(0 0 12px #FFD700) !important; +} + +.connection-insertion-highlight { + z-index: 9999 !important; +} + /* Port label text styling - position labels away from connectors */ .drawflow .drawflow-node .input > span, .drawflow .drawflow-node .output > span { diff --git a/src/timeline.js b/src/timeline.js index bf8dca1..1b81101 100644 --- a/src/timeline.js +++ b/src/timeline.js @@ -6,10 +6,12 @@ import { backgroundColor, foregroundColor, shadow, labelColor, scrubberColor } f * TimelineState - Global state for timeline display and interaction */ class TimelineState { - constructor(framerate = 24) { + constructor(framerate = 24, bpm = 120, timeSignature = { numerator: 4, denominator: 4 }) { // Time format settings this.timeFormat = 'frames' // 'frames' | 'seconds' | 'measures' this.framerate = framerate + this.bpm = bpm // Beats per minute for measures mode + this.timeSignature = timeSignature // Time signature for measures mode (e.g., {numerator: 4, denominator: 4} or {numerator: 6, denominator: 8}) // Zoom and viewport this.pixelsPerSecond = 100 // Zoom level - how many pixels per second of animation @@ -53,6 +55,30 @@ class TimelineState { return frame / this.framerate } + /** + * Convert time (seconds) to measure position + * Returns {measure, beat, tick} where tick is subdivision of beat (0-999) + */ + timeToMeasure(time) { + const beatsPerSecond = this.bpm / 60 + const totalBeats = time * beatsPerSecond + const beatsPerMeasure = this.timeSignature.numerator + const measure = Math.floor(totalBeats / beatsPerMeasure) + 1 // Measures are 1-indexed + const beat = Math.floor(totalBeats % beatsPerMeasure) + 1 // Beats are 1-indexed + const tick = Math.floor((totalBeats % 1) * 1000) // Ticks are 0-999 + return { measure, beat, tick } + } + + /** + * Convert measure position to time (seconds) + */ + measureToTime(measure, beat = 1, tick = 0) { + const beatsPerMeasure = this.timeSignature.numerator + const totalBeats = (measure - 1) * beatsPerMeasure + (beat - 1) + (tick / 1000) + const beatsPerSecond = this.bpm / 60 + return totalBeats / beatsPerSecond + } + /** * Calculate appropriate ruler interval based on zoom level * Returns interval in seconds that gives ~50-100px spacing @@ -112,6 +138,35 @@ class TimelineState { return bestInterval } + /** + * Calculate appropriate ruler interval for measures mode + * Returns interval in beats that gives ~50-100px spacing + */ + getRulerIntervalBeats() { + const targetPixelSpacing = 75 + const beatsPerSecond = this.bpm / 60 + const pixelsPerBeat = this.pixelsPerSecond / beatsPerSecond + const beatSpacing = targetPixelSpacing / pixelsPerBeat + + const beatsPerMeasure = this.timeSignature.numerator + // Standard beat intervals: 1 beat, 2 beats, 1 measure, 2 measures, 4 measures, etc. + const intervals = [1, 2, beatsPerMeasure, beatsPerMeasure * 2, beatsPerMeasure * 4, beatsPerMeasure * 8, beatsPerMeasure * 16] + + // Find closest interval + let bestInterval = intervals[0] + let bestDiff = Math.abs(beatSpacing - bestInterval) + + for (let interval of intervals) { + const diff = Math.abs(beatSpacing - interval) + if (diff < bestDiff) { + bestDiff = diff + bestInterval = interval + } + } + + return bestInterval + } + /** * Format time for display based on current format setting */ @@ -128,8 +183,10 @@ class TimelineState { } else { return `${seconds}.${ms}s` } + } else if (this.timeFormat === 'measures') { + const { measure, beat } = this.timeToMeasure(time) + return `${measure}.${beat}` } - // measures format - TODO when DAW features added return `${time.toFixed(2)}` } @@ -182,24 +239,19 @@ class TimeRuler { ctx.fillStyle = backgroundColor ctx.fillRect(0, 0, width, this.height) - // Determine interval based on current zoom and format - let interval, isFrameMode - if (this.state.timeFormat === 'frames') { - interval = this.state.getRulerIntervalFrames() // In frames - isFrameMode = true - } else { - interval = this.state.getRulerInterval() // In seconds - isFrameMode = false - } - // Calculate visible time range const startTime = this.state.viewportStartTime const endTime = this.state.pixelToTime(width) - // Draw tick marks and labels - if (isFrameMode) { + // Draw tick marks and labels based on format + if (this.state.timeFormat === 'frames') { + const interval = this.state.getRulerIntervalFrames() // In frames this.drawFrameTicks(ctx, width, interval, startTime, endTime) + } else if (this.state.timeFormat === 'measures') { + const interval = this.state.getRulerIntervalBeats() // In beats + this.drawMeasureTicks(ctx, width, interval, startTime, endTime) } else { + const interval = this.state.getRulerInterval() // In seconds this.drawSecondTicks(ctx, width, interval, startTime, endTime) } @@ -303,6 +355,127 @@ class TimeRuler { } } + /** + * Draw tick marks for measures mode + */ + drawMeasureTicks(ctx, width, interval, startTime, endTime) { + const beatsPerSecond = this.state.bpm / 60 + const beatsPerMeasure = this.state.timeSignature.numerator + + // Always draw individual beats, regardless of interval + const startBeat = Math.floor(startTime * beatsPerSecond) + const endBeat = Math.ceil(endTime * beatsPerSecond) + + ctx.fillStyle = labelColor + ctx.font = '11px sans-serif' + ctx.textAlign = 'center' + ctx.textBaseline = 'top' + + // Draw all beats + for (let beat = startBeat; beat <= endBeat; beat++) { + const time = beat / beatsPerSecond + const x = this.state.timeToPixel(time) + + if (x < 0 || x > width) continue + + // Determine position within the measure + const beatInMeasure = beat % beatsPerMeasure + const isMeasureBoundary = beatInMeasure === 0 + const isEvenBeatInMeasure = (beatInMeasure % 2) === 0 + + // Determine tick style based on position + let opacity, tickHeight + if (isMeasureBoundary) { + // Measure boundary: full opacity, tallest + opacity = 1.0 + tickHeight = 12 + } else if (isEvenBeatInMeasure) { + // Even beat within measure: half opacity, medium height + opacity = 0.5 + tickHeight = 8 + } else { + // Odd beat within measure: quarter opacity, shortest + opacity = 0.25 + tickHeight = 5 + } + + // Draw tick with appropriate opacity + ctx.save() + ctx.globalAlpha = opacity + ctx.strokeStyle = foregroundColor + ctx.lineWidth = isMeasureBoundary ? 2 : 1 + ctx.beginPath() + ctx.moveTo(x, this.height - tickHeight) + ctx.lineTo(x, this.height) + ctx.stroke() + ctx.restore() + + // Determine if we're zoomed in enough to show individual beat labels + const pixelsPerBeat = this.state.pixelsPerSecond / beatsPerSecond + const beatFadeThreshold = 100 // Full opacity at 100px per beat + const beatFadeStart = 60 // Start fading in at 60px per beat + + // Calculate fade opacity for beat labels (0 to 1) + const beatLabelOpacity = Math.max(0, Math.min(1, (pixelsPerBeat - beatFadeStart) / (beatFadeThreshold - beatFadeStart))) + + // Calculate spacing-based fade for measure labels when zoomed out + const pixelsPerMeasure = pixelsPerBeat * beatsPerMeasure + + // Determine which measures to show based on spacing + const { measure: measureNumber } = this.state.timeToMeasure(time) + let showThisMeasure = false + let measureLabelOpacity = 1 + + const isEvery16th = (measureNumber - 1) % 16 === 0 + const isEvery4th = (measureNumber - 1) % 4 === 0 + + if (isEvery16th) { + // Always show every 16th measure when very zoomed out + showThisMeasure = true + if (pixelsPerMeasure < 20) { + // Fade in from 10-20px + measureLabelOpacity = Math.max(0, Math.min(1, (pixelsPerMeasure - 10) / 10)) + } else { + measureLabelOpacity = 1 + } + } else if (isEvery4th && pixelsPerMeasure >= 20) { + // Show every 4th measure when zoomed out but not too far + showThisMeasure = true + if (pixelsPerMeasure < 30) { + // Fade in from 20-30px + measureLabelOpacity = Math.max(0, Math.min(1, (pixelsPerMeasure - 20) / 10)) + } else { + measureLabelOpacity = 1 + } + } else if (pixelsPerMeasure >= 80) { + // Show all measures when zoomed in enough + showThisMeasure = true + if (pixelsPerMeasure < 100) { + // Fade in from 80-100px + measureLabelOpacity = Math.max(0, Math.min(1, (pixelsPerMeasure - 80) / 20)) + } else { + measureLabelOpacity = 1 + } + } + + // Label logic + if (isMeasureBoundary && showThisMeasure) { + // Measure boundaries: show just the measure number with fade + const { measure } = this.state.timeToMeasure(time) + ctx.save() + ctx.globalAlpha = measureLabelOpacity + ctx.fillText(measure.toString(), x, 2) + ctx.restore() + } else if (beatLabelOpacity > 0) { + // Zoomed in: show measure.beat for all beats with fade + ctx.save() + ctx.globalAlpha = beatLabelOpacity + ctx.fillText(this.state.formatTime(time), x, 2) + ctx.restore() + } + } + } + /** * Draw playhead (current time indicator) */ diff --git a/src/utils.js b/src/utils.js index e5f79de..9b57766 100644 --- a/src/utils.js +++ b/src/utils.js @@ -925,6 +925,48 @@ function deeploop(obj, callback) { } } +/** + * Calculate the shortest distance from a point to a line segment + * @param {number} px - Point x coordinate + * @param {number} py - Point y coordinate + * @param {number} x1 - Line segment start x + * @param {number} y1 - Line segment start y + * @param {number} x2 - Line segment end x + * @param {number} y2 - Line segment end y + * @returns {number} Distance from point to line segment + */ +function distanceToLineSegment(px, py, x1, y1, x2, y2) { + const A = px - x1; + const B = py - y1; + const C = x2 - x1; + const D = y2 - y1; + + const dot = A * C + B * D; + const lenSq = C * C + D * D; + let param = -1; + + if (lenSq !== 0) { + param = dot / lenSq; + } + + let xx, yy; + + if (param < 0) { + xx = x1; + yy = y1; + } else if (param > 1) { + xx = x2; + yy = y2; + } else { + xx = x1 + param * C; + yy = y1 + param * D; + } + + const dx = px - xx; + const dy = py - yy; + return Math.sqrt(dx * dx + dy * dy); +} + export { titleCase, getMousePositionFraction, @@ -959,5 +1001,6 @@ export { arraysAreEqual, getFileExtension, createModal, - deeploop + deeploop, + distanceToLineSegment }; \ No newline at end of file diff --git a/src/widgets.js b/src/widgets.js index dc82ca2..fcf685f 100644 --- a/src/widgets.js +++ b/src/widgets.js @@ -535,7 +535,11 @@ class TimelineWindowV2 extends Widget { this.trackHeaderWidth = 150 // Create shared timeline state using config framerate - this.timelineState = new TimelineState(context.config?.framerate || 24) + this.timelineState = new TimelineState( + context.config?.framerate || 24, + context.config?.bpm || 120, + context.config?.timeSignature || { numerator: 4, denominator: 4 } + ) // Create time ruler widget this.ruler = new TimeRuler(this.timelineState) @@ -573,6 +577,9 @@ class TimelineWindowV2 extends Widget { // Selected audio track (for recording) this.selectedTrack = null + + // Cache for automation node names (maps "trackId:nodeId" -> friendly name) + this.automationNameCache = new Map() } draw(ctx) { @@ -792,8 +799,8 @@ class TimelineWindowV2 extends Widget { ctx.fillText(typeText, typeX, y + this.trackHierarchy.trackHeight / 2) } - // Draw toggle buttons for object/shape/audio tracks (Phase 3) - if (track.type === 'object' || track.type === 'shape' || track.type === 'audio') { + // Draw toggle buttons for object/shape/audio/midi tracks (Phase 3) + if (track.type === 'object' || track.type === 'shape' || track.type === 'audio' || track.type === 'midi') { const buttonSize = 14 const buttonY = y + (this.trackHierarchy.trackHeight - buttonSize) / 2 // Use base height for button position let buttonX = this.trackHeaderWidth - 10 // Start from right edge @@ -813,8 +820,8 @@ class TimelineWindowV2 extends Widget { track.object.curvesMode === 'keyframe' ? '≈' : '-' ctx.fillText(curveSymbol, buttonX + buttonSize / 2, buttonY + buttonSize / 2) - // Segment visibility button (only for object/shape tracks, not audio) - if (track.type !== 'audio') { + // Segment visibility button (only for object/shape tracks, not audio/midi) + if (track.type !== 'audio' && track.type !== 'midi') { buttonX -= (buttonSize + 4) ctx.strokeStyle = foregroundColor ctx.lineWidth = 1 @@ -835,7 +842,10 @@ class TimelineWindowV2 extends Widget { let animationData = null // Find the AnimationData for this track - if (track.type === 'object') { + if (track.type === 'audio' || track.type === 'midi') { + // For audio/MIDI tracks, animation data is directly on the track object + animationData = obj.animationData + } else if (track.type === 'object') { for (let layer of this.context.activeObject.allLayers) { if (layer.children && layer.children.includes(obj)) { animationData = layer.animationData @@ -852,11 +862,19 @@ class TimelineWindowV2 extends Widget { } if (animationData) { - const prefix = track.type === 'object' ? `child.${obj.idx}.` : `shape.${obj.shapeId}.` - for (let curveName in animationData.curves) { - if (curveName.startsWith(prefix)) { + if (track.type === 'audio' || track.type === 'midi') { + // For audio/MIDI tracks, include all automation curves + for (let curveName in animationData.curves) { curves.push(animationData.curves[curveName]) } + } else { + // For objects/shapes, filter by prefix + const prefix = track.type === 'object' ? `child.${obj.idx}.` : `shape.${obj.shapeId}.` + for (let curveName in animationData.curves) { + if (curveName.startsWith(prefix)) { + curves.push(animationData.curves[curveName]) + } + } } } @@ -883,9 +901,18 @@ class TimelineWindowV2 extends Widget { ctx.arc(10, itemY + 5, 3, 0, 2 * Math.PI) ctx.fill() - // Draw parameter name (extract last part after last dot) + // Draw parameter name ctx.fillStyle = isHidden ? foregroundColor : labelColor - const paramName = curve.parameter.split('.').pop() + let paramName = curve.parameter.split('.').pop() + + // For automation curves, fetch the friendly name from backend + if (curve.parameter.startsWith('automation.') && (track.type === 'audio' || track.type === 'midi')) { + const nodeId = parseInt(paramName, 10) + if (!isNaN(nodeId) && obj.audioTrackId !== null) { + paramName = this.getAutomationName(obj.audioTrackId, nodeId) + } + } + const truncatedName = paramName.length > 12 ? paramName.substring(0, 10) + '...' : paramName ctx.fillText(truncatedName, 18, itemY) @@ -974,6 +1001,42 @@ class TimelineWindowV2 extends Widget { } } } + } else if (this.timelineState.timeFormat === 'measures') { + // Measures mode: draw beats with varying opacity + const beatsPerSecond = this.timelineState.bpm / 60 + const beatsPerMeasure = this.timelineState.timeSignature.numerator + const startBeat = Math.floor(visibleStartTime * beatsPerSecond) + const endBeat = Math.ceil(visibleEndTime * beatsPerSecond) + + for (let beat = startBeat; beat <= endBeat; beat++) { + const time = beat / beatsPerSecond + const x = this.timelineState.timeToPixel(time) + + if (x >= 0 && x <= trackAreaWidth) { + // Determine position within the measure + const beatInMeasure = beat % beatsPerMeasure + const isMeasureBoundary = beatInMeasure === 0 + const isEvenBeatInMeasure = (beatInMeasure % 2) === 0 + + // Set opacity based on position + ctx.save() + if (isMeasureBoundary) { + ctx.globalAlpha = 1.0 // Full opacity for measure boundaries + } else if (isEvenBeatInMeasure) { + ctx.globalAlpha = 0.5 // Half opacity for even beats + } else { + ctx.globalAlpha = 0.25 // Quarter opacity for odd beats + } + + ctx.strokeStyle = shadow + ctx.lineWidth = 1 + ctx.beginPath() + ctx.moveTo(x, y) + ctx.lineTo(x, y + trackHeight) + ctx.stroke() + ctx.restore() + } + } } else { // Seconds mode: mark every second edge const startSecond = Math.floor(visibleStartTime) @@ -1427,8 +1490,8 @@ class TimelineWindowV2 extends Widget { for (let i = 0; i < this.trackHierarchy.tracks.length; i++) { const track = this.trackHierarchy.tracks[i] - // Only draw curves for objects, shapes, and audio tracks - if (track.type !== 'object' && track.type !== 'shape' && track.type !== 'audio') continue + // Only draw curves for objects, shapes, audio tracks, and MIDI tracks + if (track.type !== 'object' && track.type !== 'shape' && track.type !== 'audio' && track.type !== 'midi') continue const obj = track.object @@ -1439,8 +1502,8 @@ class TimelineWindowV2 extends Widget { // Find the layer containing this object/shape to get AnimationData let animationData = null - if (track.type === 'audio') { - // For audio tracks, animation data is directly on the track object + if (track.type === 'audio' || track.type === 'midi') { + // For audio/MIDI tracks, animation data is directly on the track object animationData = obj.animationData } else if (track.type === 'object') { // For objects, get curves from parent layer @@ -1476,9 +1539,9 @@ class TimelineWindowV2 extends Widget { for (let curveName in animationData.curves) { const curve = animationData.curves[curveName] - // Filter to only curves for this specific object/shape/audio - if (track.type === 'audio') { - // Audio tracks: include all curves (they're prefixed with 'track.' or 'clip.') + // Filter to only curves for this specific object/shape/audio/MIDI + if (track.type === 'audio' || track.type === 'midi') { + // Audio/MIDI tracks: include all automation curves curves.push(curve) } else if (track.type === 'object' && curveName.startsWith(`child.${obj.idx}.`)) { curves.push(curve) @@ -1858,7 +1921,7 @@ class TimelineWindowV2 extends Widget { } // Check if clicking on toggle buttons (Phase 3) - if (track.type === 'object' || track.type === 'shape') { + if (track.type === 'object' || track.type === 'shape' || track.type === 'audio' || track.type === 'midi') { const buttonSize = 14 const trackIndex = this.trackHierarchy.tracks.indexOf(track) const trackY = this.trackHierarchy.getTrackY(trackIndex) @@ -4032,10 +4095,60 @@ class TimelineWindowV2 extends Widget { toggleTimeFormat() { if (this.timelineState.timeFormat === 'frames') { this.timelineState.timeFormat = 'seconds' + } else if (this.timelineState.timeFormat === 'seconds') { + this.timelineState.timeFormat = 'measures' } else { this.timelineState.timeFormat = 'frames' } } + + // Fetch automation name from backend and cache it + async fetchAutomationName(trackId, nodeId) { + const cacheKey = `${trackId}:${nodeId}` + + // Return cached value if available + if (this.automationNameCache.has(cacheKey)) { + return this.automationNameCache.get(cacheKey) + } + + try { + const name = await invoke('automation_get_name', { + trackId: trackId, + nodeId: nodeId + }) + + // Cache the result + if (name && name !== '') { + this.automationNameCache.set(cacheKey, name) + return name + } + } catch (err) { + console.error(`Failed to fetch automation name for node ${nodeId}:`, err) + } + + // Fallback to node ID if fetch fails or returns empty + return `${nodeId}` + } + + // Get automation name synchronously from cache, trigger fetch if not cached + getAutomationName(trackId, nodeId) { + const cacheKey = `${trackId}:${nodeId}` + + if (this.automationNameCache.has(cacheKey)) { + return this.automationNameCache.get(cacheKey) + } + + // Trigger async fetch in background + this.fetchAutomationName(trackId, nodeId).then(() => { + // Redraw when name arrives + if (this.context.timelineWidget?.requestRedraw) { + this.context.timelineWidget.requestRedraw() + } + }) + + // Return node ID as placeholder while fetching + return `${nodeId}` + } } /**