Add automation and drag nodes into connections in the graph

This commit is contained in:
Skyler Lehmkuhl 2025-11-02 01:27:22 -05:00
parent 0ae168cbca
commit 988bbfd1a9
62 changed files with 2622 additions and 82 deletions

View File

@ -729,6 +729,7 @@ impl Engine {
"MidiInput" => Box::new(MidiInputNode::new("MIDI Input".to_string())), "MidiInput" => Box::new(MidiInputNode::new("MIDI Input".to_string())),
"MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV".to_string())), "MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV".to_string())),
"AudioToCV" => Box::new(AudioToCVNode::new("Audio→CV".to_string())), "AudioToCV" => Box::new(AudioToCVNode::new("Audio→CV".to_string())),
"AutomationInput" => Box::new(AutomationInputNode::new("Automation".to_string())),
"Oscilloscope" => Box::new(OscilloscopeNode::new("Oscilloscope".to_string())), "Oscilloscope" => Box::new(OscilloscopeNode::new("Oscilloscope".to_string())),
"TemplateInput" => Box::new(TemplateInputNode::new("Template Input".to_string())), "TemplateInput" => Box::new(TemplateInputNode::new("Template Input".to_string())),
"TemplateOutput" => Box::new(TemplateOutputNode::new("Template Output".to_string())), "TemplateOutput" => Box::new(TemplateOutputNode::new("Template Output".to_string())),
@ -803,6 +804,7 @@ impl Engine {
"MidiInput" => Box::new(MidiInputNode::new("MIDI Input".to_string())), "MidiInput" => Box::new(MidiInputNode::new("MIDI Input".to_string())),
"MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV".to_string())), "MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV".to_string())),
"AudioToCV" => Box::new(AudioToCVNode::new("Audio→CV".to_string())), "AudioToCV" => Box::new(AudioToCVNode::new("Audio→CV".to_string())),
"AutomationInput" => Box::new(AutomationInputNode::new("Automation".to_string())),
"Oscilloscope" => Box::new(OscilloscopeNode::new("Oscilloscope".to_string())), "Oscilloscope" => Box::new(OscilloscopeNode::new("Oscilloscope".to_string())),
"TemplateInput" => Box::new(TemplateInputNode::new("Template Input".to_string())), "TemplateInput" => Box::new(TemplateInputNode::new("Template Input".to_string())),
"TemplateOutput" => Box::new(TemplateOutputNode::new("Template Output".to_string())), "TemplateOutput" => Box::new(TemplateOutputNode::new("Template Output".to_string())),
@ -1117,6 +1119,77 @@ impl Engine {
} }
} }
} }
Command::AutomationAddKeyframe(track_id, node_id, time, value, interpolation_str, ease_out, ease_in) => {
use crate::audio::node_graph::nodes::{AutomationInputNode, AutomationKeyframe, InterpolationType};
// Parse interpolation type
let interpolation = match interpolation_str.to_lowercase().as_str() {
"linear" => InterpolationType::Linear,
"bezier" => InterpolationType::Bezier,
"step" => InterpolationType::Step,
"hold" => InterpolationType::Hold,
_ => {
eprintln!("Unknown interpolation type: {}, defaulting to Linear", interpolation_str);
InterpolationType::Linear
}
};
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
let graph = &mut track.instrument_graph;
let node_idx = NodeIndex::new(node_id as usize);
if let Some(graph_node) = graph.get_graph_node_mut(node_idx) {
// Downcast to AutomationInputNode using as_any_mut
if let Some(auto_node) = graph_node.node.as_any_mut().downcast_mut::<AutomationInputNode>() {
let keyframe = AutomationKeyframe {
time,
value,
interpolation,
ease_out,
ease_in,
};
auto_node.add_keyframe(keyframe);
} else {
eprintln!("Node {} is not an AutomationInputNode", node_id);
}
}
}
}
Command::AutomationRemoveKeyframe(track_id, node_id, time) => {
use crate::audio::node_graph::nodes::AutomationInputNode;
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
let graph = &mut track.instrument_graph;
let node_idx = NodeIndex::new(node_id as usize);
if let Some(graph_node) = graph.get_graph_node_mut(node_idx) {
if let Some(auto_node) = graph_node.node.as_any_mut().downcast_mut::<AutomationInputNode>() {
auto_node.remove_keyframe_at_time(time, 0.001); // 1ms tolerance
} else {
eprintln!("Node {} is not an AutomationInputNode", node_id);
}
}
}
}
Command::AutomationSetName(track_id, node_id, name) => {
use crate::audio::node_graph::nodes::AutomationInputNode;
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
let graph = &mut track.instrument_graph;
let node_idx = NodeIndex::new(node_id as usize);
if let Some(graph_node) = graph.get_graph_node_mut(node_idx) {
if let Some(auto_node) = graph_node.node.as_any_mut().downcast_mut::<AutomationInputNode>() {
auto_node.set_display_name(name);
} else {
eprintln!("Node {} is not an AutomationInputNode", node_id);
}
}
}
}
} }
} }
@ -1185,6 +1258,71 @@ impl Engine {
QueryResponse::MidiClipData(Err(format!("Track {} not found or is not a MIDI track", track_id))) QueryResponse::MidiClipData(Err(format!("Track {} not found or is not a MIDI track", track_id)))
} }
} }
Query::GetAutomationKeyframes(track_id, node_id) => {
use crate::audio::node_graph::nodes::{AutomationInputNode, InterpolationType};
use crate::command::types::AutomationKeyframeData;
if let Some(TrackNode::Midi(track)) = self.project.get_track(track_id) {
let graph = &track.instrument_graph;
let node_idx = NodeIndex::new(node_id as usize);
if let Some(graph_node) = graph.get_graph_node(node_idx) {
// Downcast to AutomationInputNode
if let Some(auto_node) = graph_node.node.as_any().downcast_ref::<AutomationInputNode>() {
let keyframes: Vec<AutomationKeyframeData> = auto_node.keyframes()
.iter()
.map(|kf| {
let interpolation_str = match kf.interpolation {
InterpolationType::Linear => "linear",
InterpolationType::Bezier => "bezier",
InterpolationType::Step => "step",
InterpolationType::Hold => "hold",
}.to_string();
AutomationKeyframeData {
time: kf.time,
value: kf.value,
interpolation: interpolation_str,
ease_out: kf.ease_out,
ease_in: kf.ease_in,
}
})
.collect();
QueryResponse::AutomationKeyframes(Ok(keyframes))
} else {
QueryResponse::AutomationKeyframes(Err(format!("Node {} is not an AutomationInputNode", node_id)))
}
} else {
QueryResponse::AutomationKeyframes(Err(format!("Node {} not found in track {}", node_id, track_id)))
}
} else {
QueryResponse::AutomationKeyframes(Err(format!("Track {} not found or is not a MIDI track", track_id)))
}
}
Query::GetAutomationName(track_id, node_id) => {
use crate::audio::node_graph::nodes::AutomationInputNode;
if let Some(TrackNode::Midi(track)) = self.project.get_track(track_id) {
let graph = &track.instrument_graph;
let node_idx = NodeIndex::new(node_id as usize);
if let Some(graph_node) = graph.get_graph_node(node_idx) {
// Downcast to AutomationInputNode
if let Some(auto_node) = graph_node.node.as_any().downcast_ref::<AutomationInputNode>() {
QueryResponse::AutomationName(Ok(auto_node.display_name().to_string()))
} else {
QueryResponse::AutomationName(Err(format!("Node {} is not an AutomationInputNode", node_id)))
}
} else {
QueryResponse::AutomationName(Err(format!("Node {} not found in track {}", node_id, track_id)))
}
} else {
QueryResponse::AutomationName(Err(format!("Track {} not found or is not a MIDI track", track_id)))
}
}
}; };
// Send response back // Send response back
@ -1503,6 +1641,11 @@ impl EngineController {
let _ = self.command_tx.push(Command::MoveClip(track_id, clip_id, new_start_time)); let _ = self.command_tx.push(Command::MoveClip(track_id, clip_id, new_start_time));
} }
/// Send a generic command to the audio thread
pub fn send_command(&mut self, command: Command) {
let _ = self.command_tx.push(command);
}
/// Get current playhead position in samples /// Get current playhead position in samples
pub fn get_playhead_samples(&self) -> u64 { pub fn get_playhead_samples(&self) -> u64 {
self.playhead.load(Ordering::Relaxed) self.playhead.load(Ordering::Relaxed)
@ -1871,4 +2014,48 @@ impl EngineController {
Err("Query timeout".to_string()) Err("Query timeout".to_string())
} }
/// Query automation keyframes from an AutomationInput node
pub fn query_automation_keyframes(&mut self, track_id: TrackId, node_id: u32) -> Result<Vec<crate::command::types::AutomationKeyframeData>, String> {
// Send query
if let Err(_) = self.query_tx.push(Query::GetAutomationKeyframes(track_id, node_id)) {
return Err("Failed to send query - queue full".to_string());
}
// Wait for response (with timeout)
let start = std::time::Instant::now();
let timeout = std::time::Duration::from_millis(100);
while start.elapsed() < timeout {
if let Ok(QueryResponse::AutomationKeyframes(result)) = self.query_response_rx.pop() {
return result;
}
// Small sleep to avoid busy-waiting
std::thread::sleep(std::time::Duration::from_micros(50));
}
Err("Query timeout".to_string())
}
/// Query automation node display name
pub fn query_automation_name(&mut self, track_id: TrackId, node_id: u32) -> Result<String, String> {
// Send query
if let Err(_) = self.query_tx.push(Query::GetAutomationName(track_id, node_id)) {
return Err("Failed to send query - queue full".to_string());
}
// Wait for response (with timeout)
let start = std::time::Instant::now();
let timeout = std::time::Duration::from_millis(100);
while start.elapsed() < timeout {
if let Ok(QueryResponse::AutomationName(result)) = self.query_response_rx.pop() {
return result;
}
// Small sleep to avoid busy-waiting
std::thread::sleep(std::time::Duration::from_micros(50));
}
Err("Query timeout".to_string())
}
} }

View File

@ -81,6 +81,9 @@ pub struct InstrumentGraph {
/// UI positions for nodes (node_index -> (x, y)) /// UI positions for nodes (node_index -> (x, y))
node_positions: std::collections::HashMap<u32, (f32, f32)>, node_positions: std::collections::HashMap<u32, (f32, f32)>,
/// Current playback time (for automation nodes)
playback_time: f64,
} }
impl InstrumentGraph { impl InstrumentGraph {
@ -98,6 +101,7 @@ impl InstrumentGraph {
// Pre-allocate MIDI input buffers (max 128 events per port) // Pre-allocate MIDI input buffers (max 128 events per port)
midi_input_buffers: (0..16).map(|_| Vec::with_capacity(128)).collect(), midi_input_buffers: (0..16).map(|_| Vec::with_capacity(128)).collect(),
node_positions: std::collections::HashMap::new(), node_positions: std::collections::HashMap::new(),
playback_time: 0.0,
} }
} }
@ -319,7 +323,19 @@ impl InstrumentGraph {
} }
/// Process the graph and produce audio output /// Process the graph and produce audio output
pub fn process(&mut self, output_buffer: &mut [f32], midi_events: &[MidiEvent]) { pub fn process(&mut self, output_buffer: &mut [f32], midi_events: &[MidiEvent], playback_time: f64) {
// Update playback time
self.playback_time = playback_time;
// Update playback time for all automation nodes before processing
use super::nodes::AutomationInputNode;
for node in self.graph.node_weights_mut() {
// Try to downcast to AutomationInputNode and update its playback time
if let Some(auto_node) = node.node.as_any_mut().downcast_mut::<AutomationInputNode>() {
auto_node.set_playback_time(playback_time);
}
}
// Use the requested output buffer size for processing // Use the requested output buffer size for processing
let process_size = output_buffer.len(); let process_size = output_buffer.len();
@ -504,6 +520,11 @@ impl InstrumentGraph {
self.get_node(idx).and_then(|node| node.get_oscilloscope_cv_data(sample_count)) self.get_node(idx).and_then(|node| node.get_oscilloscope_cv_data(sample_count))
} }
/// Get node by index (read-only)
pub fn get_graph_node(&self, idx: NodeIndex) -> Option<&GraphNode> {
self.graph.node_weight(idx)
}
/// Get node mutably by index /// Get node mutably by index
/// Note: Due to lifetime constraints with trait objects, this returns a mutable reference /// Note: Due to lifetime constraints with trait objects, this returns a mutable reference
/// to the GraphNode, from which you can access the node /// to the GraphNode, from which you can access the node
@ -816,6 +837,7 @@ impl InstrumentGraph {
"MidiInput" => Box::new(MidiInputNode::new("MIDI Input")), "MidiInput" => Box::new(MidiInputNode::new("MIDI Input")),
"MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV")), "MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV")),
"AudioToCV" => Box::new(AudioToCVNode::new("Audio→CV")), "AudioToCV" => Box::new(AudioToCVNode::new("Audio→CV")),
"AutomationInput" => Box::new(AutomationInputNode::new("Automation")),
"Oscilloscope" => Box::new(OscilloscopeNode::new("Oscilloscope")), "Oscilloscope" => Box::new(OscilloscopeNode::new("Oscilloscope")),
"TemplateInput" => Box::new(TemplateInputNode::new("Template Input")), "TemplateInput" => Box::new(TemplateInputNode::new("Template Input")),
"TemplateOutput" => Box::new(TemplateOutputNode::new("Template Output")), "TemplateOutput" => Box::new(TemplateOutputNode::new("Template Output")),

View File

@ -70,4 +70,10 @@ pub trait AudioNode: Send {
fn get_oscilloscope_cv_data(&self, _sample_count: usize) -> Option<Vec<f32>> { fn get_oscilloscope_cv_data(&self, _sample_count: usize) -> Option<Vec<f32>> {
None None
} }
/// Downcast to `&mut dyn Any` for type-specific operations
fn as_any_mut(&mut self) -> &mut dyn std::any::Any;
/// Downcast to `&dyn Any` for type-specific read-only operations
fn as_any(&self) -> &dyn std::any::Any;
} }

View File

@ -0,0 +1,46 @@
#!/bin/bash
for file in *.rs; do
if [ "$file" = "mod.rs" ]; then
continue
fi
echo "Processing $file"
# Create a backup
cp "$file" "$file.bak"
# Add as_any() method right after as_any_mut()
awk '
{
lines[NR] = $0
if (/fn as_any_mut\(&mut self\)/) {
# Found as_any_mut, look for its closing brace
found_method = NR
}
if (found_method > 0 && /^ }$/ && !inserted) {
closing_brace = NR
inserted = 1
}
}
END {
for (i = 1; i <= NR; i++) {
print lines[i]
if (i == closing_brace) {
print ""
print " fn as_any(&self) -> &dyn std::any::Any {"
print " self"
print " }"
}
}
}
' "$file.bak" > "$file"
# Verify the change was made
if grep -q "fn as_any(&self)" "$file"; then
echo " ✓ Successfully added as_any() to $file"
rm "$file.bak"
else
echo " ✗ Failed to add as_any() to $file - restoring backup"
mv "$file.bak" "$file"
fi
done

View File

@ -212,4 +212,12 @@ impl AudioNode for ADSRNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -148,4 +148,12 @@ impl AudioNode for AudioToCVNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -0,0 +1,288 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, SignalType};
use crate::audio::midi::MidiEvent;
use serde::{Deserialize, Serialize};
use std::sync::{Arc, RwLock};
/// Interpolation type for automation curves
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum InterpolationType {
Linear,
Bezier,
Step,
Hold,
}
/// A single keyframe in an automation curve
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AutomationKeyframe {
/// Time in seconds (absolute project time)
pub time: f64,
/// CV output value
pub value: f32,
/// Interpolation type to next keyframe
pub interpolation: InterpolationType,
/// Bezier ease-out control point (for bezier interpolation)
pub ease_out: (f32, f32),
/// Bezier ease-in control point (for bezier interpolation)
pub ease_in: (f32, f32),
}
impl AutomationKeyframe {
pub fn new(time: f64, value: f32) -> Self {
Self {
time,
value,
interpolation: InterpolationType::Linear,
ease_out: (0.58, 1.0),
ease_in: (0.42, 0.0),
}
}
}
/// Automation Input Node - outputs CV signal controlled by timeline curves
pub struct AutomationInputNode {
name: String,
display_name: String, // User-editable name shown in UI
keyframes: Vec<AutomationKeyframe>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
/// Shared playback time (set by the graph before processing)
playback_time: Arc<RwLock<f64>>,
}
impl AutomationInputNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
let outputs = vec![
NodePort::new("CV Out", SignalType::CV, 0),
];
Self {
name: name.clone(),
display_name: "Automation".to_string(),
keyframes: Vec::new(),
outputs,
parameters: Vec::new(),
playback_time: Arc::new(RwLock::new(0.0)),
}
}
/// Set the playback time (called by graph before processing)
pub fn set_playback_time(&mut self, time: f64) {
if let Ok(mut playback) = self.playback_time.write() {
*playback = time;
}
}
/// Get the display name (shown in UI)
pub fn display_name(&self) -> &str {
&self.display_name
}
/// Set the display name
pub fn set_display_name(&mut self, name: String) {
self.display_name = name;
}
/// Add a keyframe to the curve (maintains sorted order by time)
pub fn add_keyframe(&mut self, keyframe: AutomationKeyframe) {
// Find insertion position to maintain sorted order
let pos = self.keyframes.binary_search_by(|kf| {
kf.time.partial_cmp(&keyframe.time).unwrap_or(std::cmp::Ordering::Equal)
});
match pos {
Ok(idx) => {
// Replace existing keyframe at same time
self.keyframes[idx] = keyframe;
}
Err(idx) => {
// Insert at correct position
self.keyframes.insert(idx, keyframe);
}
}
}
/// Remove keyframe at specific time (with tolerance)
pub fn remove_keyframe_at_time(&mut self, time: f64, tolerance: f64) -> bool {
if let Some(idx) = self.keyframes.iter().position(|kf| (kf.time - time).abs() < tolerance) {
self.keyframes.remove(idx);
true
} else {
false
}
}
/// Update an existing keyframe
pub fn update_keyframe(&mut self, keyframe: AutomationKeyframe) {
// Remove old keyframe at this time, then add new one
self.remove_keyframe_at_time(keyframe.time, 0.001);
self.add_keyframe(keyframe);
}
/// Get all keyframes
pub fn keyframes(&self) -> &[AutomationKeyframe] {
&self.keyframes
}
/// Clear all keyframes
pub fn clear_keyframes(&mut self) {
self.keyframes.clear();
}
/// Evaluate curve at a specific time
fn evaluate_at_time(&self, time: f64) -> f32 {
if self.keyframes.is_empty() {
return 0.0;
}
// Before first keyframe
if time <= self.keyframes[0].time {
return self.keyframes[0].value;
}
// After last keyframe
let last_idx = self.keyframes.len() - 1;
if time >= self.keyframes[last_idx].time {
return self.keyframes[last_idx].value;
}
// Find bracketing keyframes
for i in 0..self.keyframes.len() - 1 {
let kf1 = &self.keyframes[i];
let kf2 = &self.keyframes[i + 1];
if time >= kf1.time && time <= kf2.time {
return self.interpolate(kf1, kf2, time);
}
}
0.0
}
/// Interpolate between two keyframes
fn interpolate(&self, kf1: &AutomationKeyframe, kf2: &AutomationKeyframe, time: f64) -> f32 {
// Calculate normalized position between keyframes (0.0 to 1.0)
let t = if kf2.time == kf1.time {
0.0
} else {
((time - kf1.time) / (kf2.time - kf1.time)) as f32
};
match kf1.interpolation {
InterpolationType::Linear => {
// Simple linear interpolation
kf1.value + (kf2.value - kf1.value) * t
}
InterpolationType::Bezier => {
// Cubic bezier interpolation using control points
let eased_t = self.cubic_bezier_ease(t, kf1.ease_out, kf2.ease_in);
kf1.value + (kf2.value - kf1.value) * eased_t
}
InterpolationType::Step | InterpolationType::Hold => {
// Hold value until next keyframe
kf1.value
}
}
}
/// Cubic bezier easing function
fn cubic_bezier_ease(&self, t: f32, ease_out: (f32, f32), ease_in: (f32, f32)) -> f32 {
// Simplified cubic bezier for 0,0 -> easeOut -> easeIn -> 1,1
let u = 1.0 - t;
3.0 * u * u * t * ease_out.1 +
3.0 * u * t * t * ease_in.1 +
t * t * t
}
}
impl AudioNode for AutomationInputNode {
fn category(&self) -> NodeCategory {
NodeCategory::Input
}
fn inputs(&self) -> &[NodePort] {
&[] // No inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, _id: u32, _value: f32) {
// No parameters
}
fn get_parameter(&self, _id: u32) -> f32 {
0.0
}
fn process(
&mut self,
_inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
sample_rate: u32,
) {
if outputs.is_empty() {
return;
}
let output = &mut outputs[0];
let length = output.len();
// Get the starting playback time
let playhead = if let Ok(playback) = self.playback_time.read() {
*playback
} else {
0.0
};
// Calculate time per sample
let sample_duration = 1.0 / sample_rate as f64;
// Evaluate curve for each sample
for i in 0..length {
let time = playhead + (i as f64 * sample_duration);
output[i] = self.evaluate_at_time(time);
}
}
fn reset(&mut self) {
// No state to reset
}
fn node_type(&self) -> &str {
"AutomationInput"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
display_name: self.display_name.clone(),
keyframes: self.keyframes.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
playback_time: Arc::new(RwLock::new(0.0)),
})
}
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
}

View File

@ -184,4 +184,12 @@ impl AudioNode for BitCrusherNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -231,4 +231,12 @@ impl AudioNode for ChorusNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -250,4 +250,12 @@ impl AudioNode for CompressorNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -110,4 +110,12 @@ impl AudioNode for ConstantNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -208,4 +208,12 @@ impl AudioNode for DelayNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -254,4 +254,12 @@ impl AudioNode for DistortionNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -155,4 +155,12 @@ impl AudioNode for EnvelopeFollowerNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -256,4 +256,12 @@ impl AudioNode for EQNode {
node.update_filters(); node.update_filters();
Box::new(node) Box::new(node)
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -198,4 +198,12 @@ impl AudioNode for FilterNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -240,4 +240,12 @@ impl AudioNode for FlangerNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -300,4 +300,12 @@ impl AudioNode for FMSynthNode {
fn clone_node(&self) -> Box<dyn AudioNode> { fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self::new(self.name.clone())) Box::new(Self::new(self.name.clone()))
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -127,4 +127,12 @@ impl AudioNode for GainNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -219,4 +219,12 @@ impl AudioNode for LFONode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -212,4 +212,12 @@ impl AudioNode for LimiterNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -161,4 +161,12 @@ impl AudioNode for MathNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -102,4 +102,12 @@ impl AudioNode for MidiInputNode {
fn handle_midi(&mut self, event: &MidiEvent) { fn handle_midi(&mut self, event: &MidiEvent) {
self.pending_events.push(*event); self.pending_events.push(*event);
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -183,4 +183,12 @@ impl AudioNode for MidiToCVNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -142,4 +142,12 @@ impl AudioNode for MixerNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -1,5 +1,6 @@
mod adsr; mod adsr;
mod audio_to_cv; mod audio_to_cv;
mod automation_input;
mod bit_crusher; mod bit_crusher;
mod chorus; mod chorus;
mod compressor; mod compressor;
@ -39,6 +40,7 @@ mod wavetable_oscillator;
pub use adsr::ADSRNode; pub use adsr::ADSRNode;
pub use audio_to_cv::AudioToCVNode; pub use audio_to_cv::AudioToCVNode;
pub use automation_input::{AutomationInputNode, AutomationKeyframe, InterpolationType};
pub use bit_crusher::BitCrusherNode; pub use bit_crusher::BitCrusherNode;
pub use chorus::ChorusNode; pub use chorus::ChorusNode;
pub use compressor::CompressorNode; pub use compressor::CompressorNode;

View File

@ -508,4 +508,12 @@ impl AudioNode for MultiSamplerNode {
fn clone_node(&self) -> Box<dyn AudioNode> { fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self::new(self.name.clone())) Box::new(Self::new(self.name.clone()))
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -194,4 +194,12 @@ impl AudioNode for NoiseGeneratorNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -194,4 +194,12 @@ impl AudioNode for OscillatorNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -299,4 +299,12 @@ impl AudioNode for OscilloscopeNode {
fn get_oscilloscope_cv_data(&self, sample_count: usize) -> Option<Vec<f32>> { fn get_oscilloscope_cv_data(&self, sample_count: usize) -> Option<Vec<f32>> {
Some(self.read_cv_samples(sample_count)) Some(self.read_cv_samples(sample_count))
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -93,4 +93,12 @@ impl AudioNode for AudioOutputNode {
outputs: self.outputs.clone(), outputs: self.outputs.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -165,4 +165,12 @@ impl AudioNode for PanNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -286,4 +286,12 @@ impl AudioNode for PhaserNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -221,4 +221,12 @@ impl AudioNode for QuantizerNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -310,4 +310,12 @@ impl AudioNode for ReverbNode {
fn clone_node(&self) -> Box<dyn AudioNode> { fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self::new(self.name.clone())) Box::new(Self::new(self.name.clone()))
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -134,4 +134,12 @@ impl AudioNode for RingModulatorNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -134,4 +134,12 @@ impl AudioNode for SampleHoldNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -275,4 +275,12 @@ impl AudioNode for SimpleSamplerNode {
fn clone_node(&self) -> Box<dyn AudioNode> { fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self::new(self.name.clone())) Box::new(Self::new(self.name.clone()))
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -153,4 +153,12 @@ impl AudioNode for SlewLimiterNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -101,4 +101,12 @@ impl AudioNode for SplitterNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -85,6 +85,14 @@ impl AudioNode for TemplateInputNode {
fn handle_midi(&mut self, _event: &MidiEvent) { fn handle_midi(&mut self, _event: &MidiEvent) {
// Pass through to connected nodes // Pass through to connected nodes
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }
/// Template Output node - represents the audio output from one voice in a VoiceAllocator /// Template Output node - represents the audio output from one voice in a VoiceAllocator
@ -173,4 +181,12 @@ impl AudioNode for TemplateOutputNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -359,4 +359,12 @@ impl AudioNode for VocoderNode {
node.setup_bands(); node.setup_bands();
Box::new(node) Box::new(node)
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -288,7 +288,8 @@ impl AudioNode for VoiceAllocatorNode {
mix_slice.fill(0.0); mix_slice.fill(0.0);
// Process this voice's graph with its MIDI events // Process this voice's graph with its MIDI events
self.voice_instances[voice_idx].process(mix_slice, &midi_events); // Note: playback_time is 0.0 since voice allocator doesn't track time
self.voice_instances[voice_idx].process(mix_slice, &midi_events, 0.0);
// Mix into output (accumulate) // Mix into output (accumulate)
for (i, sample) in mix_slice.iter().enumerate() { for (i, sample) in mix_slice.iter().enumerate() {
@ -341,4 +342,12 @@ impl AudioNode for VoiceAllocatorNode {
parameters: self.parameters.clone(), parameters: self.parameters.clone(),
}) })
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -283,4 +283,12 @@ impl AudioNode for WavetableOscillatorNode {
fn clone_node(&self) -> Box<dyn AudioNode> { fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self::new(self.name.clone())) Box::new(Self::new(self.name.clone()))
} }
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
} }

View File

@ -383,7 +383,7 @@ impl MidiTrack {
// Create a silent buffer to process the note-offs // Create a silent buffer to process the note-offs
let buffer_size = 512 * 2; // stereo let buffer_size = 512 * 2; // stereo
let mut silent_buffer = vec![0.0f32; buffer_size]; let mut silent_buffer = vec![0.0f32; buffer_size];
self.instrument_graph.process(&mut silent_buffer, &note_offs); self.instrument_graph.process(&mut silent_buffer, &note_offs, 0.0);
} }
/// Queue a live MIDI event (from virtual keyboard or MIDI controller) /// Queue a live MIDI event (from virtual keyboard or MIDI controller)
@ -405,7 +405,7 @@ impl MidiTrack {
_channels: u32, _channels: u32,
) { ) {
// Generate audio using instrument graph with live MIDI events // Generate audio using instrument graph with live MIDI events
self.instrument_graph.process(output, &self.live_midi_queue); self.instrument_graph.process(output, &self.live_midi_queue, 0.0);
// Clear the queue after processing // Clear the queue after processing
self.live_midi_queue.clear(); self.live_midi_queue.clear();
@ -445,7 +445,7 @@ impl MidiTrack {
midi_events.extend(self.live_midi_queue.drain(..)); midi_events.extend(self.live_midi_queue.drain(..));
// Generate audio using instrument graph // Generate audio using instrument graph
self.instrument_graph.process(output, &midi_events); self.instrument_graph.process(output, &midi_events, playhead_seconds);
// Evaluate and apply automation // Evaluate and apply automation
let effective_volume = self.evaluate_automation_at_time(playhead_seconds); let effective_volume = self.evaluate_automation_at_time(playhead_seconds);

View File

@ -149,6 +149,14 @@ pub enum Command {
MultiSamplerUpdateLayer(TrackId, u32, usize, u8, u8, u8, u8, u8), MultiSamplerUpdateLayer(TrackId, u32, usize, u8, u8, u8, u8, u8),
/// Remove a layer from a MultiSampler node (track_id, node_id, layer_index) /// Remove a layer from a MultiSampler node (track_id, node_id, layer_index)
MultiSamplerRemoveLayer(TrackId, u32, usize), MultiSamplerRemoveLayer(TrackId, u32, usize),
// Automation Input Node commands
/// Add or update a keyframe on an AutomationInput node (track_id, node_id, time, value, interpolation, ease_out, ease_in)
AutomationAddKeyframe(TrackId, u32, f64, f32, String, (f32, f32), (f32, f32)),
/// Remove a keyframe from an AutomationInput node (track_id, node_id, time)
AutomationRemoveKeyframe(TrackId, u32, f64),
/// Set the display name of an AutomationInput node (track_id, node_id, name)
AutomationSetName(TrackId, u32, String),
} }
/// Events sent from audio thread back to UI/control thread /// Events sent from audio thread back to UI/control thread
@ -212,6 +220,10 @@ pub enum Query {
GetOscilloscopeData(TrackId, u32, usize), GetOscilloscopeData(TrackId, u32, usize),
/// Get MIDI clip data (track_id, clip_id) /// Get MIDI clip data (track_id, clip_id)
GetMidiClip(TrackId, MidiClipId), GetMidiClip(TrackId, MidiClipId),
/// Get keyframes from an AutomationInput node (track_id, node_id)
GetAutomationKeyframes(TrackId, u32),
/// Get the display name of an AutomationInput node (track_id, node_id)
GetAutomationName(TrackId, u32),
} }
/// Oscilloscope data from a node /// Oscilloscope data from a node
@ -230,6 +242,16 @@ pub struct MidiClipData {
pub events: Vec<crate::audio::midi::MidiEvent>, pub events: Vec<crate::audio::midi::MidiEvent>,
} }
/// Automation keyframe data for serialization
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct AutomationKeyframeData {
pub time: f64,
pub value: f32,
pub interpolation: String,
pub ease_out: (f32, f32),
pub ease_in: (f32, f32),
}
/// Responses to synchronous queries /// Responses to synchronous queries
#[derive(Debug)] #[derive(Debug)]
pub enum QueryResponse { pub enum QueryResponse {
@ -239,4 +261,8 @@ pub enum QueryResponse {
OscilloscopeData(Result<OscilloscopeData, String>), OscilloscopeData(Result<OscilloscopeData, String>),
/// MIDI clip data /// MIDI clip data
MidiClipData(Result<MidiClipData, String>), MidiClipData(Result<MidiClipData, String>),
/// Automation keyframes
AutomationKeyframes(Result<Vec<AutomationKeyframeData>, String>),
/// Automation node name
AutomationName(Result<String, String>),
} }

View File

@ -19,6 +19,7 @@ pub use audio::{
}; };
pub use audio::node_graph::{GraphPreset, InstrumentGraph, PresetMetadata, SerializedConnection, SerializedNode}; pub use audio::node_graph::{GraphPreset, InstrumentGraph, PresetMetadata, SerializedConnection, SerializedNode};
pub use command::{AudioEvent, Command, OscilloscopeData}; pub use command::{AudioEvent, Command, OscilloscopeData};
pub use command::types::AutomationKeyframeData;
pub use io::{load_midi_file, AudioFile, WaveformPeak, WavWriter}; pub use io::{load_midi_file, AudioFile, WaveformPeak, WavWriter};
use cpal::traits::{DeviceTrait, HostTrait, StreamTrait}; use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};

View File

@ -1208,6 +1208,105 @@ pub async fn get_oscilloscope_data(
} }
} }
// ===== Automation Input Node Commands =====
#[tauri::command]
pub async fn automation_add_keyframe(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
node_id: u32,
keyframe: daw_backend::AutomationKeyframeData,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.send_command(daw_backend::Command::AutomationAddKeyframe(
track_id,
node_id,
keyframe.time,
keyframe.value,
keyframe.interpolation,
keyframe.ease_out,
keyframe.ease_in,
));
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn automation_remove_keyframe(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
node_id: u32,
time: f64,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.send_command(daw_backend::Command::AutomationRemoveKeyframe(
track_id,
node_id,
time,
));
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn automation_get_keyframes(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
node_id: u32,
) -> Result<Vec<daw_backend::AutomationKeyframeData>, String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.query_automation_keyframes(track_id, node_id)
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn automation_set_name(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
node_id: u32,
name: String,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.send_command(daw_backend::Command::AutomationSetName(
track_id,
node_id,
name,
));
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn automation_get_name(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
node_id: u32,
) -> Result<String, String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.query_automation_name(track_id, node_id)
} else {
Err("Audio not initialized".to_string())
}
}
#[derive(serde::Serialize, Clone)] #[derive(serde::Serialize, Clone)]
#[serde(tag = "type")] #[serde(tag = "type")]
pub enum SerializedAudioEvent { pub enum SerializedAudioEvent {

View File

@ -236,6 +236,11 @@ pub fn run() {
audio::multi_sampler_update_layer, audio::multi_sampler_update_layer,
audio::multi_sampler_remove_layer, audio::multi_sampler_remove_layer,
audio::get_oscilloscope_data, audio::get_oscilloscope_data,
audio::automation_add_keyframe,
audio::automation_remove_keyframe,
audio::automation_get_keyframes,
audio::automation_set_name,
audio::automation_get_name,
]) ])
// .manage(window_counter) // .manage(window_counter)
.build(tauri::generate_context!()) .build(tauri::generate_context!())

View File

@ -34,6 +34,127 @@ function uuidv4() {
); );
} }
/**
* Initialize a timeline curve for an AutomationInput node
* Creates the curve with a default keyframe at time 0
* @param {number} trackId - Track ID
* @param {number} nodeId - Backend node ID
*/
async function initializeAutomationCurve(trackId, nodeId) {
try {
// Find the audio/MIDI track
const track = context.activeObject.audioTracks?.find(t => t.audioTrackId === trackId);
if (!track) {
console.error(`Track ${trackId} not found`);
return;
}
// Create curve parameter name: "automation.{nodeId}"
const curveName = `automation.${nodeId}`;
// Check if curve already exists
if (track.animationData.curves[curveName]) {
console.log(`Curve ${curveName} already exists`);
return;
}
// Create the curve with a default keyframe at time 0, value 0
const curve = track.animationData.getOrCreateCurve(curveName);
curve.addKeyframe({
time: 0,
value: 0,
interpolation: 'linear',
easeIn: { x: 0.42, y: 0 },
easeOut: { x: 0.58, y: 1 },
idx: `${Date.now()}-${Math.random()}`
});
console.log(`Initialized automation curve: ${curveName}`);
// Redraw timeline if it's open
if (context.timeline?.requestRedraw) {
context.timeline.requestRedraw();
}
} catch (err) {
console.error('Failed to initialize automation curve:', err);
}
}
/**
* Update automation node name based on its connection
* If the source node is an AutomationInput, generate a friendly name from the target
* @param {number} trackId - Track ID
* @param {number} fromNode - Source node ID
* @param {number} toNode - Target node ID
* @param {string} toPortClass - Target port name (frontend)
*/
async function updateAutomationName(trackId, fromNode, toNode, toPortClass) {
try {
// Get the full graph state to find node types and port information
const graphStateJson = await invoke('graph_get_state', { trackId });
const graphState = JSON.parse(graphStateJson);
// Find the source node
const sourceNode = graphState.nodes.find(n => n.id === fromNode);
if (!sourceNode || sourceNode.node_type !== 'AutomationInput') {
return; // Not an AutomationInput, nothing to do
}
// Find the target node
const targetNode = graphState.nodes.find(n => n.id === toNode);
if (!targetNode) {
return;
}
// Find the connection from this AutomationInput to the target node
const connection = graphState.connections.find(c =>
c.from_node === fromNode && c.to_node === toNode
);
if (!connection) {
return;
}
// Use the backend port name from the connection
// This will be something like "cutoff", "frequency", etc.
const portName = connection.to_port;
// Generate a friendly name: "{TargetType} {PortName}"
// e.g., "Filter cutoff" or "Oscillator frequency"
const name = `${targetNode.node_type} ${portName}`;
// Set the automation name in the backend
await invoke('automation_set_name', {
trackId: trackId,
nodeId: fromNode,
name
});
// Update the node UI display if the node editor is open
if (context.nodeEditor) {
const nameElement = document.getElementById(`automation-name-${fromNode}`);
if (nameElement) {
nameElement.textContent = name;
}
}
// Invalidate the timeline cache for this automation node
if (context.timelineWidget) {
const cacheKey = `${trackId}:${fromNode}`;
context.timelineWidget.automationNameCache.delete(cacheKey);
// Trigger a redraw to fetch and display the new name
if (context.timelineWidget.requestRedraw) {
context.timelineWidget.requestRedraw();
}
}
console.log(`Auto-named automation node ${fromNode}: "${name}"`);
} catch (err) {
console.error('Failed to update automation name:', err);
}
}
// Dependencies that will be injected // Dependencies that will be injected
let undoStack = null; let undoStack = null;
let redoStack = null; let redoStack = null;
@ -56,6 +177,9 @@ let config = null;
* @param {Function} deps.invoke - Tauri invoke function * @param {Function} deps.invoke - Tauri invoke function
* @param {Object} deps.config - Application config object * @param {Object} deps.config - Application config object
*/ */
// Export the auto-naming function for use in main.js
export { updateAutomationName };
export function initializeActions(deps) { export function initializeActions(deps) {
undoStack = deps.undoStack; undoStack = deps.undoStack;
redoStack = deps.redoStack; redoStack = deps.redoStack;
@ -1977,6 +2101,12 @@ export const actions = {
posX: action.position.x, posX: action.position.x,
posY: action.position.y posY: action.position.y
}); });
// If this is an AutomationInput node, create a timeline curve for it
if (action.nodeType === 'AutomationInput') {
await initializeAutomationCurve(action.trackId, result);
}
// Reload the entire graph to show the restored node // Reload the entire graph to show the restored node
if (context.reloadNodeEditor) { if (context.reloadNodeEditor) {
await context.reloadNodeEditor(); await context.reloadNodeEditor();
@ -2083,6 +2213,9 @@ export const actions = {
); );
} }
} }
// Auto-name AutomationInput nodes when connected
await updateAutomationName(action.trackId, action.fromNode, action.toNode, action.toPortClass);
} finally { } finally {
if (context.nodeEditorState) { if (context.nodeEditorState) {
context.nodeEditorState.suppressActionRecording = false; context.nodeEditorState.suppressActionRecording = false;

View File

@ -1,5 +1,48 @@
<svg width="100" height="100" viewBox="0 0 100 100" xmlns="http://www.w3.org/2000/svg"> <?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
width="80"
height="80"
viewBox="0 0 100 100"
version="1.1"
id="svg1"
sodipodi:docname="focus-animation.svg"
inkscape:version="1.4 (e7c3feb100, 2024-10-09)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<defs
id="defs1" />
<sodipodi:namedview
id="namedview1"
pagecolor="#505050"
bordercolor="#ffffff"
borderopacity="1"
inkscape:showpageshadow="0"
inkscape:pageopacity="0"
inkscape:pagecheckerboard="1"
inkscape:deskcolor="#505050"
inkscape:zoom="14.7625"
inkscape:cx="39.96613"
inkscape:cy="40"
inkscape:window-width="2256"
inkscape:window-height="1432"
inkscape:window-x="0"
inkscape:window-y="0"
inkscape:window-maximized="1"
inkscape:current-layer="svg1" />
<!-- Placeholder animation/drawing icon --> <!-- Placeholder animation/drawing icon -->
<path d="M20,80 Q30,60 50,50 T80,20" stroke="currentColor" stroke-width="4" fill="none" stroke-linecap="round"/> <path
<circle cx="30" cy="70" r="8" fill="currentColor"/> d="M20,80 Q30,60 50,50 T80,20"
stroke="currentColor"
stroke-width="4"
fill="none"
stroke-linecap="round"
id="path1" />
<circle
cx="30"
cy="65"
r="8"
fill="currentColor"
id="circle1" />
</svg> </svg>

Before

Width:  |  Height:  |  Size: 310 B

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

@ -1,10 +1,93 @@
<svg width="100" height="100" viewBox="0 0 100 100" xmlns="http://www.w3.org/2000/svg"> <?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
width="80"
height="80"
viewBox="0 0 100 100"
version="1.1"
id="svg7"
sodipodi:docname="focus-music.svg"
inkscape:version="1.4 (e7c3feb100, 2024-10-09)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<defs
id="defs7" />
<sodipodi:namedview
id="namedview7"
pagecolor="#505050"
bordercolor="#ffffff"
borderopacity="1"
inkscape:showpageshadow="0"
inkscape:pageopacity="0"
inkscape:pagecheckerboard="1"
inkscape:deskcolor="#505050"
inkscape:zoom="14.7625"
inkscape:cx="39.96613"
inkscape:cy="40"
inkscape:window-width="2256"
inkscape:window-height="1432"
inkscape:window-x="0"
inkscape:window-y="0"
inkscape:window-maximized="1"
inkscape:current-layer="svg7" />
<!-- Placeholder music/piano icon --> <!-- Placeholder music/piano icon -->
<rect x="10" y="20" width="18" height="60" fill="none" stroke="currentColor" stroke-width="3"/> <rect
<rect x="30" y="20" width="18" height="60" fill="none" stroke="currentColor" stroke-width="3"/> x="10"
<rect x="50" y="20" width="18" height="60" fill="none" stroke="currentColor" stroke-width="3"/> y="20"
<rect x="70" y="20" width="18" height="60" fill="none" stroke="currentColor" stroke-width="3"/> width="18"
<rect x="24" y="20" width="12" height="35" fill="currentColor"/> height="60"
<rect x="44" y="20" width="12" height="35" fill="currentColor"/> fill="none"
<rect x="74" y="20" width="12" height="35" fill="currentColor"/> stroke="currentColor"
stroke-width="3"
id="rect1" />
<rect
x="30"
y="20"
width="18"
height="60"
fill="none"
stroke="currentColor"
stroke-width="3"
id="rect2" />
<rect
x="50"
y="20"
width="18"
height="60"
fill="none"
stroke="currentColor"
stroke-width="3"
id="rect3" />
<rect
x="70"
y="20"
width="18"
height="60"
fill="none"
stroke="currentColor"
stroke-width="3"
id="rect4" />
<rect
x="23.237934"
y="20"
width="12"
height="35"
fill="currentColor"
id="rect5" />
<rect
x="43.237934"
y="20"
width="12"
height="35"
fill="currentColor"
id="rect6" />
<rect
x="78.447502"
y="20"
width="10.052498"
height="35"
fill="currentColor"
id="rect7"
style="stroke-width:0.915264" />
</svg> </svg>

Before

Width:  |  Height:  |  Size: 728 B

After

Width:  |  Height:  |  Size: 2.0 KiB

View File

@ -1,9 +1,136 @@
<svg width="100" height="100" viewBox="0 0 100 100" xmlns="http://www.w3.org/2000/svg"> <?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
width="80"
height="80"
viewBox="0 0 100 100"
version="1.1"
id="svg6"
sodipodi:docname="focus-video.svg"
inkscape:version="1.4 (e7c3feb100, 2024-10-09)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<defs
id="defs6" />
<sodipodi:namedview
id="namedview6"
pagecolor="#505050"
bordercolor="#ffffff"
borderopacity="1"
inkscape:showpageshadow="0"
inkscape:pageopacity="0"
inkscape:pagecheckerboard="1"
inkscape:deskcolor="#505050"
inkscape:zoom="14.7625"
inkscape:cx="39.491956"
inkscape:cy="43.793395"
inkscape:window-width="2256"
inkscape:window-height="1432"
inkscape:window-x="0"
inkscape:window-y="0"
inkscape:window-maximized="1"
inkscape:current-layer="svg6" />
<!-- Placeholder video/clapperboard icon --> <!-- Placeholder video/clapperboard icon -->
<rect x="15" y="40" width="70" height="45" fill="currentColor" rx="4"/> <rect
<rect x="15" y="25" width="70" height="15" fill="none" stroke="currentColor" stroke-width="3" rx="4"/> x="15"
<rect x="20" y="25" width="7" height="15" fill="currentColor"/> y="52.5"
<rect x="35" y="25" width="7" height="15" fill="currentColor"/> width="70"
<rect x="50" y="25" width="7" height="15" fill="currentColor"/> height="45"
<rect x="65" y="25" width="7" height="15" fill="currentColor"/> fill="currentColor"
rx="4"
id="rect1" />
<rect
x="14.910591"
y="39.358093"
width="70.178818"
height="13.231316"
fill="none"
stroke="currentColor"
stroke-width="2.82118"
rx="4.0102181"
id="rect2" />
<rect
x="-2.9535398"
y="44.279011"
width="7"
height="17.320509"
fill="currentColor"
id="rect3"
transform="matrix(1,0,0.5,0.8660254,0,0)"
style="stroke-width:1.07457" />
<rect
x="12.046458"
y="44.279011"
width="7"
height="17.320509"
fill="currentColor"
id="rect4"
transform="matrix(1,0,0.5,0.8660254,0,0)"
style="stroke-width:1.07457" />
<rect
x="27.046455"
y="44.279011"
width="7"
height="17.320509"
fill="currentColor"
id="rect5"
transform="matrix(1,0,0.5,0.8660254,0,0)"
style="stroke-width:1.07457" />
<rect
x="42.046463"
y="44.279011"
width="7"
height="17.320509"
fill="currentColor"
id="rect6"
transform="matrix(1,0,0.5,0.8660254,0,0)"
style="stroke-width:1.07457" />
<rect
x="-0.77646101"
y="29.648504"
width="70.178818"
height="13.231316"
fill="none"
stroke="currentColor"
stroke-width="2.82118"
rx="4.0102181"
id="rect2-2"
transform="rotate(-20)" />
<rect
x="-13.034761"
y="33.067337"
width="7"
height="17.320509"
fill="currentColor"
id="rect3-0"
transform="matrix(0.93969262,-0.34202014,0.76604444,0.64278761,0,0)"
style="stroke-width:1.07457" />
<rect
x="1.9652379"
y="33.067337"
width="7"
height="17.320509"
fill="currentColor"
id="rect4-2"
transform="matrix(0.93969262,-0.34202014,0.76604444,0.64278761,0,0)"
style="stroke-width:1.07457" />
<rect
x="16.96524"
y="33.067337"
width="7"
height="17.320509"
fill="currentColor"
id="rect5-3"
transform="matrix(0.93969262,-0.34202014,0.76604444,0.64278761,0,0)"
style="stroke-width:1.07457" />
<rect
x="31.965237"
y="33.067337"
width="7"
height="17.320509"
fill="currentColor"
id="rect6-7"
transform="matrix(0.93969262,-0.34202014,0.76604444,0.64278761,0,0)"
style="stroke-width:1.07457" />
</svg> </svg>

Before

Width:  |  Height:  |  Size: 585 B

After

Width:  |  Height:  |  Size: 3.3 KiB

View File

@ -49,6 +49,7 @@ import {
multiplyMatrices, multiplyMatrices,
growBoundingBox, growBoundingBox,
createMissingTexturePattern, createMissingTexturePattern,
distanceToLineSegment,
} from "./utils.js"; } from "./utils.js";
import { import {
backgroundColor, backgroundColor,
@ -107,7 +108,7 @@ import {
initializeGraphicsObjectDependencies initializeGraphicsObjectDependencies
} from "./models/graphics-object.js"; } from "./models/graphics-object.js";
import { createRoot } from "./models/root.js"; import { createRoot } from "./models/root.js";
import { actions, initializeActions } from "./actions/index.js"; import { actions, initializeActions, updateAutomationName } from "./actions/index.js";
// Layout system // Layout system
import { defaultLayouts, getLayout, getLayoutNames } from "./layouts.js"; import { defaultLayouts, getLayout, getLayoutNames } from "./layouts.js";
@ -1540,6 +1541,11 @@ function _newFile(width, height, fps, layoutKey) {
config.defaultLayout = layoutKey; config.defaultLayout = layoutKey;
console.log('[_newFile] Switching to layout:', layoutKey); console.log('[_newFile] Switching to layout:', layoutKey);
switchLayout(layoutKey); switchLayout(layoutKey);
// Set default time format to measures for music mode
if (layoutKey === 'audioDaw' && context.timelineWidget?.timelineState) {
context.timelineWidget.timelineState.timeFormat = 'measures';
}
} }
// Define frameRate as a non-configurable property with a backing variable // Define frameRate as a non-configurable property with a backing variable
@ -4160,6 +4166,8 @@ function timelineV2() {
const currentTime = context.activeObject?.currentTime || 0; const currentTime = context.activeObject?.currentTime || 0;
const timeFormat = timelineWidget.timelineState.timeFormat; const timeFormat = timelineWidget.timelineState.timeFormat;
const framerate = timelineWidget.timelineState.framerate; const framerate = timelineWidget.timelineState.framerate;
const bpm = timelineWidget.timelineState.bpm;
const timeSignature = timelineWidget.timelineState.timeSignature;
if (timeFormat === 'frames') { if (timeFormat === 'frames') {
// Frames mode: show frame number and framerate // Frames mode: show frame number and framerate
@ -4173,6 +4181,22 @@ function timelineV2() {
<div class="time-label">FPS</div> <div class="time-label">FPS</div>
</div> </div>
`; `;
} else if (timeFormat === 'measures') {
// Measures mode: show measure.beat, BPM, and time signature
const { measure, beat } = timelineWidget.timelineState.timeToMeasure(currentTime);
timeDisplay.innerHTML = `
<div class="time-value time-frame-clickable" data-action="toggle-format">${measure}.${beat}</div>
<div class="time-label">BAR</div>
<div class="time-fps-group time-fps-clickable" data-action="edit-bpm">
<div class="time-value">${bpm}</div>
<div class="time-label">BPM</div>
</div>
<div class="time-fps-group time-fps-clickable" data-action="edit-time-signature">
<div class="time-value">${timeSignature.numerator}/${timeSignature.denominator}</div>
<div class="time-label">TIME</div>
</div>
`;
} else { } else {
// Seconds mode: show MM:SS.mmm or HH:MM:SS.mmm // Seconds mode: show MM:SS.mmm or HH:MM:SS.mmm
const totalSeconds = Math.floor(currentTime); const totalSeconds = Math.floor(currentTime);
@ -4243,6 +4267,129 @@ function timelineV2() {
} }
console.log('[FPS Edit] Done'); console.log('[FPS Edit] Done');
} }
} else if (action === 'edit-bpm') {
// Clicked on BPM - show input to edit BPM
const currentBpm = timelineWidget.timelineState.bpm;
const newBpm = prompt('Enter BPM (Beats Per Minute):', currentBpm);
if (newBpm !== null && !isNaN(newBpm) && newBpm > 0) {
const bpm = parseFloat(newBpm);
timelineWidget.timelineState.bpm = bpm;
context.config.bpm = bpm;
updateTimeDisplay();
if (timelineWidget.requestRedraw) {
timelineWidget.requestRedraw();
}
}
} else if (action === 'edit-time-signature') {
// Clicked on time signature - show custom dropdown with common options
const currentTimeSig = timelineWidget.timelineState.timeSignature;
const currentValue = `${currentTimeSig.numerator}/${currentTimeSig.denominator}`;
// Create a custom dropdown list
const dropdown = document.createElement('div');
dropdown.className = 'time-signature-dropdown';
dropdown.style.position = 'absolute';
dropdown.style.left = e.clientX + 'px';
dropdown.style.top = e.clientY + 'px';
dropdown.style.fontSize = '14px';
dropdown.style.backgroundColor = 'var(--background-color)';
dropdown.style.color = 'var(--label-color)';
dropdown.style.border = '1px solid var(--shadow)';
dropdown.style.borderRadius = '4px';
dropdown.style.zIndex = '10000';
dropdown.style.maxHeight = '300px';
dropdown.style.overflowY = 'auto';
dropdown.style.boxShadow = '0 4px 8px rgba(0,0,0,0.3)';
// Common time signatures
const commonTimeSigs = ['2/4', '3/4', '4/4', '5/4', '6/8', '7/8', '9/8', '12/8', 'Other...'];
commonTimeSigs.forEach(sig => {
const item = document.createElement('div');
item.textContent = sig;
item.style.padding = '8px 12px';
item.style.cursor = 'pointer';
item.style.backgroundColor = 'var(--background-color)';
item.style.color = 'var(--label-color)';
if (sig === currentValue) {
item.style.backgroundColor = 'var(--foreground-color)';
}
item.addEventListener('mouseenter', () => {
item.style.backgroundColor = 'var(--foreground-color)';
});
item.addEventListener('mouseleave', () => {
if (sig !== currentValue) {
item.style.backgroundColor = 'var(--background-color)';
}
});
item.addEventListener('click', () => {
document.body.removeChild(dropdown);
if (sig === 'Other...') {
// Show prompt for custom time signature
const newTimeSig = prompt(
'Enter time signature (e.g., "4/4", "3/4", "6/8"):',
currentValue
);
if (newTimeSig !== null) {
const match = newTimeSig.match(/^(\d+)\/(\d+)$/);
if (match) {
const numerator = parseInt(match[1]);
const denominator = parseInt(match[2]);
if (numerator > 0 && denominator > 0) {
timelineWidget.timelineState.timeSignature = { numerator, denominator };
context.config.timeSignature = { numerator, denominator };
updateTimeDisplay();
if (timelineWidget.requestRedraw) {
timelineWidget.requestRedraw();
}
}
} else {
alert('Invalid time signature format. Please use format like "4/4" or "6/8".');
}
}
} else {
// Parse the selected common time signature
const match = sig.match(/^(\d+)\/(\d+)$/);
if (match) {
const numerator = parseInt(match[1]);
const denominator = parseInt(match[2]);
timelineWidget.timelineState.timeSignature = { numerator, denominator };
context.config.timeSignature = { numerator, denominator };
updateTimeDisplay();
if (timelineWidget.requestRedraw) {
timelineWidget.requestRedraw();
}
}
}
});
dropdown.appendChild(item);
});
document.body.appendChild(dropdown);
dropdown.focus();
// Close dropdown when clicking outside
const closeDropdown = (event) => {
if (!dropdown.contains(event.target)) {
if (document.body.contains(dropdown)) {
document.body.removeChild(dropdown);
}
document.removeEventListener('click', closeDropdown);
}
};
setTimeout(() => {
document.addEventListener('click', closeDropdown);
}, 0);
} }
}); });
@ -6814,6 +6961,15 @@ function nodeEditor() {
drawflowDiv.addEventListener('dragover', (e) => { drawflowDiv.addEventListener('dragover', (e) => {
e.preventDefault(); e.preventDefault();
e.dataTransfer.dropEffect = 'copy'; e.dataTransfer.dropEffect = 'copy';
// Check if dragging over a connection for insertion
const nodeType = e.dataTransfer.getData('text/plain') || draggedNodeType;
if (nodeType) {
const nodeDef = nodeTypes[nodeType];
if (nodeDef) {
checkConnectionInsertionDuringDrag(e, nodeDef);
}
}
}); });
drawflowDiv.addEventListener('drop', (e) => { drawflowDiv.addEventListener('drop', (e) => {
@ -6873,10 +7029,21 @@ function nodeEditor() {
// Add the node // Add the node
console.log(`Adding node ${nodeType} at (${x}, ${y}) with parent ${parentNodeId}`); console.log(`Adding node ${nodeType} at (${x}, ${y}) with parent ${parentNodeId}`);
addNode(nodeType, x, y, parentNodeId); const newNodeId = addNode(nodeType, x, y, parentNodeId);
// Clear the draggedNodeType // Check if we should insert into a connection
if (pendingInsertionFromDrag && newNodeId) {
console.log('Pending insertion detected, will insert node into connection');
// Defer insertion until after node is fully created
setTimeout(() => {
performConnectionInsertion(newNodeId, pendingInsertionFromDrag);
pendingInsertionFromDrag = null;
}, 100);
}
// Clear the draggedNodeType and highlights
draggedNodeType = null; draggedNodeType = null;
clearConnectionHighlights();
}); });
// Connection event handlers // Connection event handlers
@ -6905,7 +7072,10 @@ function nodeEditor() {
}, 50); }, 50);
}); });
// Track node drag start for undo/redo // Track which node is being dragged
let draggingNodeId = null;
// Track node drag start for undo/redo and connection insertion
drawflowDiv.addEventListener('mousedown', (e) => { drawflowDiv.addEventListener('mousedown', (e) => {
const nodeElement = e.target.closest('.drawflow-node'); const nodeElement = e.target.closest('.drawflow-node');
if (nodeElement && !e.target.closest('.input') && !e.target.closest('.output')) { if (nodeElement && !e.target.closest('.input') && !e.target.closest('.output')) {
@ -6913,39 +7083,61 @@ function nodeEditor() {
const node = editor.getNodeFromId(nodeId); const node = editor.getNodeFromId(nodeId);
if (node) { if (node) {
nodeMoveTracker.set(nodeId, { x: node.pos_x, y: node.pos_y }); nodeMoveTracker.set(nodeId, { x: node.pos_x, y: node.pos_y });
draggingNodeId = nodeId;
} }
} }
}); });
// Node moved - resize parent VoiceAllocator // Check for connection insertion while dragging existing nodes
drawflowDiv.addEventListener('mousemove', (e) => {
if (draggingNodeId !== null) {
checkConnectionInsertion(draggingNodeId);
}
});
// Node moved - resize parent VoiceAllocator and check for connection insertion
editor.on("nodeMoved", (nodeId) => { editor.on("nodeMoved", (nodeId) => {
const node = editor.getNodeFromId(nodeId); const node = editor.getNodeFromId(nodeId);
if (node && node.data.parentNodeId) { if (node && node.data.parentNodeId) {
resizeVoiceAllocatorToFit(node.data.parentNodeId); resizeVoiceAllocatorToFit(node.data.parentNodeId);
} }
// Check if node should be inserted into a connection
checkConnectionInsertion(nodeId);
}); });
// Track node drag end for undo/redo // Track node drag end for undo/redo and handle connection insertion
drawflowDiv.addEventListener('mouseup', (e) => { drawflowDiv.addEventListener('mouseup', (e) => {
// Check all tracked nodes for position changes // Check all tracked nodes for position changes and pending insertions
for (const [nodeId, oldPos] of nodeMoveTracker.entries()) { for (const [nodeId, oldPos] of nodeMoveTracker.entries()) {
const node = editor.getNodeFromId(nodeId); const node = editor.getNodeFromId(nodeId);
if (node && (node.pos_x !== oldPos.x || node.pos_y !== oldPos.y)) { const hasPendingInsertion = pendingNodeInsertions.has(nodeId);
// Position changed - record action
redoStack.length = 0; if (node) {
undoStack.push({ // Check for pending insertion first
name: "graphMoveNode", if (hasPendingInsertion) {
action: { const insertionMatch = pendingNodeInsertions.get(nodeId);
nodeId: nodeId, performConnectionInsertion(nodeId, insertionMatch);
oldPosition: oldPos, pendingNodeInsertions.delete(nodeId);
newPosition: { x: node.pos_x, y: node.pos_y } } else if (node.pos_x !== oldPos.x || node.pos_y !== oldPos.y) {
} // Position changed - record action
}); redoStack.length = 0;
updateMenu(); undoStack.push({
name: "graphMoveNode",
action: {
nodeId: nodeId,
oldPosition: oldPos,
newPosition: { x: node.pos_x, y: node.pos_y }
}
});
updateMenu();
}
} }
} }
// Clear tracker // Clear tracker, dragging state, and highlights
nodeMoveTracker.clear(); nodeMoveTracker.clear();
draggingNodeId = null;
clearConnectionHighlights();
}); });
// Node removed - prevent deletion of template nodes // Node removed - prevent deletion of template nodes
@ -7171,6 +7363,39 @@ function nodeEditor() {
} }
} }
// If this is an AutomationInput node, create timeline curve
if (nodeType === "AutomationInput" && !parentNodeId) {
const currentTrackId = getCurrentMidiTrack();
if (currentTrackId !== null) {
// Find the audio/MIDI track
const track = root.audioTracks?.find(t => t.audioTrackId === currentTrackId);
if (track) {
// Create curve parameter name: "automation.{nodeId}"
const curveName = `automation.${backendNodeId}`;
// Check if curve already exists
if (!track.animationData.curves[curveName]) {
// Create the curve with a default keyframe at time 0, value 0
const curve = track.animationData.getOrCreateCurve(curveName);
curve.addKeyframe({
time: 0,
value: 0,
interpolation: 'linear',
easeIn: { x: 0.42, y: 0 },
easeOut: { x: 0.58, y: 1 },
idx: `${Date.now()}-${Math.random()}`
});
console.log(`Initialized automation curve: ${curveName}`);
// Redraw timeline if it's open
if (context.timeline?.requestRedraw) {
context.timeline.requestRedraw();
}
}
}
}
}
// If this is an Oscilloscope node, start the visualization // If this is an Oscilloscope node, start the visualization
if (nodeType === "Oscilloscope") { if (nodeType === "Oscilloscope") {
const currentTrackId = getCurrentMidiTrack(); const currentTrackId = getCurrentMidiTrack();
@ -7200,6 +7425,8 @@ function nodeEditor() {
console.error("Failed to add node to backend:", err); console.error("Failed to add node to backend:", err);
showError("Failed to add node: " + err); showError("Failed to add node: " + err);
}); });
return drawflowNodeId;
} }
// Auto-resize VoiceAllocator to fit its child nodes // Auto-resize VoiceAllocator to fit its child nodes
@ -7812,6 +8039,461 @@ function nodeEditor() {
} }
} }
// Push nodes away from a point using gaussian falloff
function pushNodesAway(centerX, centerY, maxDistance, excludeNodeId) {
const module = editor.module;
const allNodes = editor.drawflow.drawflow[module]?.data || {};
// Gaussian parameters
const sigma = maxDistance / 3; // Standard deviation for falloff
const maxPush = 150; // Maximum push distance at the center
for (const [id, node] of Object.entries(allNodes)) {
const nodeIdNum = parseInt(id);
if (nodeIdNum === excludeNodeId) continue;
// Calculate distance from center
const dx = node.pos_x - centerX;
const dy = node.pos_y - centerY;
const distance = Math.sqrt(dx * dx + dy * dy);
if (distance < maxDistance && distance > 0) {
// Calculate push strength using gaussian falloff
const falloff = Math.exp(-(distance * distance) / (2 * sigma * sigma));
const pushStrength = maxPush * falloff;
// Calculate push direction (normalized)
const dirX = dx / distance;
const dirY = dy / distance;
// Calculate new position
const newX = node.pos_x + dirX * pushStrength;
const newY = node.pos_y + dirY * pushStrength;
// Update position in the data structure
node.pos_x = newX;
node.pos_y = newY;
// Update the DOM element position
const nodeElement = document.getElementById(`node-${nodeIdNum}`);
if (nodeElement) {
nodeElement.style.left = newX + 'px';
nodeElement.style.top = newY + 'px';
}
// Trigger connection redraw
editor.updateConnectionNodes(`node-${nodeIdNum}`);
}
}
}
// Perform the actual connection insertion
function performConnectionInsertion(nodeId, match) {
const node = editor.getNodeFromId(nodeId);
const sourceNode = editor.getNodeFromId(match.sourceNodeId);
const targetNode = editor.getNodeFromId(match.targetNodeId);
if (!node || !sourceNode || !targetNode) {
console.error("Missing nodes for insertion");
return;
}
// Position the node between source and target
const sourceElement = document.getElementById(`node-${match.sourceNodeId}`);
const targetElement = document.getElementById(`node-${match.targetNodeId}`);
if (sourceElement && targetElement) {
const sourceRect = sourceElement.getBoundingClientRect();
const targetRect = targetElement.getBoundingClientRect();
// Calculate midpoint position
const newX = (sourceNode.pos_x + sourceRect.width + targetNode.pos_x) / 2 - 80; // Approximate node half-width
const newY = (sourceNode.pos_y + targetNode.pos_y) / 2 - 50; // Approximate node half-height
// Update node position in data structure
node.pos_x = newX;
node.pos_y = newY;
// Update the DOM element position
const nodeElement = document.getElementById(`node-${nodeId}`);
if (nodeElement) {
nodeElement.style.left = newX + 'px';
nodeElement.style.top = newY + 'px';
}
// Trigger connection redraw for this node
editor.updateConnectionNodes(`node-${nodeId}`);
// Push surrounding nodes away with gaussian falloff
pushNodesAway(newX, newY, 400, nodeId); // 400px influence radius
}
// Remove the old connection
suppressActionRecording = true;
editor.removeSingleConnection(
match.sourceNodeId,
match.targetNodeId,
match.sourceOutputClass,
match.targetInputClass
);
// Create new connections: source -> node -> target
// Connection 1: source output -> node input
setTimeout(() => {
editor.addConnection(
match.sourceNodeId,
nodeId,
match.sourceOutputClass,
`input_${match.nodeInputPort + 1}`
);
// Connection 2: node output -> target input
setTimeout(() => {
editor.addConnection(
nodeId,
match.targetNodeId,
`output_${match.nodeOutputPort + 1}`,
match.targetInputClass
);
suppressActionRecording = false;
}, 50);
}, 50);
}
// Check if cursor position during drag is near a connection
function checkConnectionInsertionDuringDrag(dragEvent, nodeDef) {
const drawflowDiv = container.querySelector("#drawflow");
if (!drawflowDiv || !editor) return;
const rect = drawflowDiv.getBoundingClientRect();
const canvasX = editor.canvas_x || 0;
const canvasY = editor.canvas_y || 0;
const zoom = editor.zoom || 1;
// Calculate cursor position in canvas coordinates
const cursorX = (dragEvent.clientX - rect.left - canvasX) / zoom;
const cursorY = (dragEvent.clientY - rect.top - canvasY) / zoom;
// Get all connections in the current module
const module = editor.module;
const allNodes = editor.drawflow.drawflow[module]?.data || {};
// Distance threshold for insertion (in pixels)
const insertionThreshold = 30;
let bestMatch = null;
let bestDistance = insertionThreshold;
// Check each connection
for (const [sourceNodeId, sourceNode] of Object.entries(allNodes)) {
for (const [outputKey, outputData] of Object.entries(sourceNode.outputs)) {
for (const connection of outputData.connections) {
const targetNodeId = connection.node;
const targetNode = allNodes[targetNodeId];
if (!targetNode) continue;
// Get source and target positions
const sourceElement = document.getElementById(`node-${sourceNodeId}`);
const targetElement = document.getElementById(`node-${targetNodeId}`);
if (!sourceElement || !targetElement) continue;
const sourceRect = sourceElement.getBoundingClientRect();
const targetRect = targetElement.getBoundingClientRect();
// Calculate output port position (right side of source node)
const sourceX = sourceNode.pos_x + sourceRect.width;
const sourceY = sourceNode.pos_y + sourceRect.height / 2;
// Calculate input port position (left side of target node)
const targetX = targetNode.pos_x;
const targetY = targetNode.pos_y + targetRect.height / 2;
// Calculate distance from cursor to connection line
const distance = distanceToLineSegment(
cursorX, cursorY,
sourceX, sourceY,
targetX, targetY
);
// Check if this is the closest connection
if (distance < bestDistance) {
// Check port compatibility
const sourcePortIndex = parseInt(outputKey.replace('output_', '')) - 1;
const targetPortIndex = parseInt(connection.output.replace('input_', '')) - 1;
const sourceDef = nodeTypes[sourceNode.name];
const targetDef = nodeTypes[targetNode.name];
if (!sourceDef || !targetDef) continue;
// Get the signal type of the connection
if (sourcePortIndex >= sourceDef.outputs.length ||
targetPortIndex >= targetDef.inputs.length) continue;
const connectionType = sourceDef.outputs[sourcePortIndex].type;
// Check if the dragged node has compatible input and output
let compatibleInputIndex = -1;
let compatibleOutputIndex = -1;
// Find first compatible input and output
for (let i = 0; i < nodeDef.inputs.length; i++) {
if (nodeDef.inputs[i].type === connectionType) {
compatibleInputIndex = i;
break;
}
}
for (let i = 0; i < nodeDef.outputs.length; i++) {
if (nodeDef.outputs[i].type === connectionType) {
compatibleOutputIndex = i;
break;
}
}
if (compatibleInputIndex !== -1 && compatibleOutputIndex !== -1) {
bestDistance = distance;
bestMatch = {
sourceNodeId: parseInt(sourceNodeId),
targetNodeId: parseInt(targetNodeId),
sourcePort: sourcePortIndex,
targetPort: targetPortIndex,
nodeInputPort: compatibleInputIndex,
nodeOutputPort: compatibleOutputIndex,
connectionType: connectionType,
sourceOutputClass: outputKey,
targetInputClass: connection.output,
insertX: cursorX,
insertY: cursorY
};
}
}
}
}
}
// If we found a match, highlight the connection and store it
if (bestMatch) {
highlightConnectionForInsertion(bestMatch);
pendingInsertionFromDrag = bestMatch;
} else {
clearConnectionHighlights();
pendingInsertionFromDrag = null;
}
}
// Check if a node can be inserted into a connection
function checkConnectionInsertion(nodeId) {
const node = editor.getNodeFromId(nodeId);
if (!node) return;
const nodeDef = nodeTypes[node.name];
if (!nodeDef) return;
// Check if node has any connections - skip if it does
let hasConnections = false;
for (const [inputKey, inputData] of Object.entries(node.inputs)) {
if (inputData.connections && inputData.connections.length > 0) {
hasConnections = true;
break;
}
}
if (!hasConnections) {
for (const [outputKey, outputData] of Object.entries(node.outputs)) {
if (outputData.connections && outputData.connections.length > 0) {
hasConnections = true;
break;
}
}
}
if (hasConnections) {
clearConnectionHighlights();
pendingNodeInsertions.delete(nodeId);
return;
}
// Get node center position
const nodeElement = document.getElementById(`node-${nodeId}`);
if (!nodeElement) return;
const nodeRect = nodeElement.getBoundingClientRect();
const nodeCenterX = node.pos_x + nodeRect.width / 2;
const nodeCenterY = node.pos_y + nodeRect.height / 2;
// Get all connections in the current module
const module = editor.module;
const allNodes = editor.drawflow.drawflow[module]?.data || {};
// Distance threshold for insertion (in pixels)
const insertionThreshold = 30;
let bestMatch = null;
let bestDistance = insertionThreshold;
// Check each connection
for (const [sourceNodeId, sourceNode] of Object.entries(allNodes)) {
if (parseInt(sourceNodeId) === nodeId) continue; // Skip the node being dragged
for (const [outputKey, outputData] of Object.entries(sourceNode.outputs)) {
for (const connection of outputData.connections) {
const targetNodeId = connection.node;
const targetNode = allNodes[targetNodeId];
if (!targetNode || parseInt(targetNodeId) === nodeId) continue;
// Get source and target positions
const sourceElement = document.getElementById(`node-${sourceNodeId}`);
const targetElement = document.getElementById(`node-${targetNodeId}`);
if (!sourceElement || !targetElement) continue;
const sourceRect = sourceElement.getBoundingClientRect();
const targetRect = targetElement.getBoundingClientRect();
// Calculate output port position (right side of source node)
const sourceX = sourceNode.pos_x + sourceRect.width;
const sourceY = sourceNode.pos_y + sourceRect.height / 2;
// Calculate input port position (left side of target node)
const targetX = targetNode.pos_x;
const targetY = targetNode.pos_y + targetRect.height / 2;
// Calculate distance from node center to connection line
const distance = distanceToLineSegment(
nodeCenterX, nodeCenterY,
sourceX, sourceY,
targetX, targetY
);
// Check if this is the closest connection
if (distance < bestDistance) {
// Check port compatibility
const sourcePortIndex = parseInt(outputKey.replace('output_', '')) - 1;
const targetPortIndex = parseInt(connection.output.replace('input_', '')) - 1;
const sourceDef = nodeTypes[sourceNode.name];
const targetDef = nodeTypes[targetNode.name];
if (!sourceDef || !targetDef) continue;
// Get the signal type of the connection
if (sourcePortIndex >= sourceDef.outputs.length ||
targetPortIndex >= targetDef.inputs.length) continue;
const connectionType = sourceDef.outputs[sourcePortIndex].type;
// Check if the dragged node has compatible input and output
let hasCompatibleInput = false;
let hasCompatibleOutput = false;
let compatibleInputIndex = -1;
let compatibleOutputIndex = -1;
// Find first compatible input and output
for (let i = 0; i < nodeDef.inputs.length; i++) {
if (nodeDef.inputs[i].type === connectionType) {
hasCompatibleInput = true;
compatibleInputIndex = i;
break;
}
}
for (let i = 0; i < nodeDef.outputs.length; i++) {
if (nodeDef.outputs[i].type === connectionType) {
hasCompatibleOutput = true;
compatibleOutputIndex = i;
break;
}
}
if (hasCompatibleInput && hasCompatibleOutput) {
bestDistance = distance;
bestMatch = {
sourceNodeId: parseInt(sourceNodeId),
targetNodeId: parseInt(targetNodeId),
sourcePort: sourcePortIndex,
targetPort: targetPortIndex,
nodeInputPort: compatibleInputIndex,
nodeOutputPort: compatibleOutputIndex,
connectionType: connectionType,
sourceOutputClass: outputKey,
targetInputClass: connection.output
};
}
}
}
}
}
// If we found a match, highlight the connection
if (bestMatch) {
highlightConnectionForInsertion(bestMatch);
// Store the match in the Map for use on mouseup
pendingNodeInsertions.set(nodeId, bestMatch);
} else {
clearConnectionHighlights();
pendingNodeInsertions.delete(nodeId);
}
}
// Track which connection is highlighted for insertion
let highlightedConnection = null;
let highlightInterval = null;
let pendingInsertionFromDrag = null;
// Track pending insertions for existing nodes being dragged
const pendingNodeInsertions = new Map(); // nodeId -> insertion match
// Apply highlight to the tracked connection
function applyConnectionHighlight() {
if (!highlightedConnection) return;
const connectionElement = document.querySelector(
`.connection.node_in_node-${highlightedConnection.targetNodeId}.node_out_node-${highlightedConnection.sourceNodeId}`
);
if (connectionElement && !connectionElement.classList.contains('connection-insertion-highlight')) {
connectionElement.classList.add('connection-insertion-highlight');
}
}
// Highlight a connection that can receive the node
function highlightConnectionForInsertion(match) {
// Store the connection to highlight
highlightedConnection = match;
// Clear any existing interval
if (highlightInterval) {
clearInterval(highlightInterval);
}
// Apply highlight immediately
applyConnectionHighlight();
// Keep re-applying in case Drawflow redraws
highlightInterval = setInterval(applyConnectionHighlight, 50);
}
// Clear connection insertion highlights
function clearConnectionHighlights() {
// Stop the interval
if (highlightInterval) {
clearInterval(highlightInterval);
highlightInterval = null;
}
highlightedConnection = null;
// Remove all highlight classes
document.querySelectorAll('.connection-insertion-highlight').forEach(el => {
el.classList.remove('connection-insertion-highlight');
});
}
// Handle connection creation // Handle connection creation
function handleConnectionCreated(connection) { function handleConnectionCreated(connection) {
console.log("handleConnectionCreated called:", connection); console.log("handleConnectionCreated called:", connection);
@ -7973,7 +8655,7 @@ function nodeEditor() {
fromPort: outputPort, fromPort: outputPort,
toNode: inputNode.data.backendId, toNode: inputNode.data.backendId,
toPort: inputPort toPort: inputPort
}).then(() => { }).then(async () => {
console.log("Connection successful"); console.log("Connection successful");
// Record action for undo // Record action for undo
@ -7993,6 +8675,15 @@ function nodeEditor() {
toPortClass: connection.input_class toPortClass: connection.input_class
} }
}); });
// Auto-name AutomationInput nodes when connected
await updateAutomationName(
currentTrackId,
outputNode.data.backendId,
inputNode.data.backendId,
connection.input_class
);
updateMenu(); updateMenu();
}).catch(err => { }).catch(err => {
console.error("Failed to connect nodes:", err); console.error("Failed to connect nodes:", err);

View File

@ -2,6 +2,9 @@
import { context, config, pointerList, startProps } from '../state.js'; import { context, config, pointerList, startProps } from '../state.js';
// Get invoke from Tauri global
const { invoke } = window.__TAURI__.core;
// Helper function for UUID generation // Helper function for UUID generation
function uuidv4() { function uuidv4() {
return "10000000-1000-4000-8000-100000000000".replace(/[018]/g, (c) => return "10000000-1000-4000-8000-100000000000".replace(/[018]/g, (c) =>
@ -181,6 +184,9 @@ class AnimationCurve {
existingKeyframe.interpolation = keyframe.interpolation; existingKeyframe.interpolation = keyframe.interpolation;
if (keyframe.easeIn) existingKeyframe.easeIn = keyframe.easeIn; if (keyframe.easeIn) existingKeyframe.easeIn = keyframe.easeIn;
if (keyframe.easeOut) existingKeyframe.easeOut = keyframe.easeOut; if (keyframe.easeOut) existingKeyframe.easeOut = keyframe.easeOut;
// Sync update to backend if this is an automation curve
this._syncAutomationKeyframeToBackend(existingKeyframe);
} else { } else {
// Add new keyframe // Add new keyframe
this.keyframes.push(keyframe); this.keyframes.push(keyframe);
@ -192,6 +198,9 @@ class AnimationCurve {
if (this.parentAnimationData) { if (this.parentAnimationData) {
this.parentAnimationData.updateDuration(); this.parentAnimationData.updateDuration();
} }
// Sync to backend if this is an automation curve
this._syncAutomationKeyframeToBackend(keyframe);
} }
removeKeyframe(keyframe) { removeKeyframe(keyframe) {
@ -203,6 +212,9 @@ class AnimationCurve {
if (this.parentAnimationData) { if (this.parentAnimationData) {
this.parentAnimationData.updateDuration(); this.parentAnimationData.updateDuration();
} }
// Sync to backend if this is an automation curve
this._syncAutomationKeyframeRemovalToBackend(keyframe);
} }
} }
@ -389,6 +401,85 @@ class AnimationCurve {
keyframes: this.keyframes.map(kf => kf.toJSON()) keyframes: this.keyframes.map(kf => kf.toJSON())
}; };
} }
// Helper method to sync keyframe additions to backend for automation curves
_syncAutomationKeyframeToBackend(keyframe) {
// Check if this is an automation curve (parameter starts with "automation.")
if (!this.parameter.startsWith('automation.')) {
return; // Not an automation curve, skip backend sync
}
// Extract node ID from parameter (e.g., "automation.5" -> 5)
const nodeIdStr = this.parameter.split('.')[1];
const nodeId = parseInt(nodeIdStr, 10);
if (isNaN(nodeId)) {
console.error(`Invalid automation node ID: ${nodeIdStr}`);
return;
}
// Convert keyframe to backend format
const backendKeyframe = {
time: keyframe.time,
value: keyframe.value,
interpolation: keyframe.interpolation || 'linear',
ease_out: keyframe.easeOut ? [keyframe.easeOut.x, keyframe.easeOut.y] : [0.58, 1.0],
ease_in: keyframe.easeIn ? [keyframe.easeIn.x, keyframe.easeIn.y] : [0.42, 0.0]
};
// Call Tauri command (fire-and-forget)
// Note: Need to get track_id from context - for now, find it from the curve's parent
const track = window.root?.audioTracks?.find(t =>
t.animationData && Object.values(t.animationData.curves).includes(this)
);
if (!track || track.audioTrackId === null) {
console.error('Could not find track for automation curve sync');
return;
}
invoke('automation_add_keyframe', {
trackId: track.audioTrackId,
nodeId: nodeId,
keyframe: backendKeyframe
}).catch(err => {
console.error(`Failed to sync automation keyframe to backend: ${err}`);
});
}
// Helper method to sync keyframe removals to backend for automation curves
_syncAutomationKeyframeRemovalToBackend(keyframe) {
// Check if this is an automation curve (parameter starts with "automation.")
if (!this.parameter.startsWith('automation.')) {
return; // Not an automation curve, skip backend sync
}
// Extract node ID from parameter (e.g., "automation.5" -> 5)
const nodeIdStr = this.parameter.split('.')[1];
const nodeId = parseInt(nodeIdStr, 10);
if (isNaN(nodeId)) {
console.error(`Invalid automation node ID: ${nodeIdStr}`);
return;
}
// Call Tauri command (fire-and-forget)
// Note: Need to get track_id from context - for now, find it from the curve's parent
const track = window.root?.audioTracks?.find(t =>
t.animationData && Object.values(t.animationData.curves).includes(this)
);
if (!track || track.audioTrackId === null) {
console.error('Could not find track for automation curve sync');
return;
}
invoke('automation_remove_keyframe', {
trackId: track.audioTrackId,
nodeId: nodeId,
time: keyframe.time
}).catch(err => {
console.error(`Failed to sync automation keyframe removal to backend: ${err}`);
});
}
} }
class AnimationData { class AnimationData {

View File

@ -1038,6 +1038,31 @@ export const nodeTypes = {
` `
}, },
AutomationInput: {
name: 'AutomationInput',
category: NodeCategory.UTILITY,
description: 'Timeline automation - outputs CV signal controlled by timeline curves',
inputs: [],
outputs: [
{ name: 'CV Out', type: SignalType.CV, index: 0 }
],
parameters: [],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">Automation</div>
<div class="node-info" style="font-size: 10px; padding: 8px; color: #888;">
Timeline-based automation
</div>
<div id="automation-name-${nodeId}" style="font-size: 9px; color: #aaa; text-align: center; padding: 4px;">
Not connected
</div>
<div style="font-size: 9px; color: #666; text-align: center; padding: 4px;">
Edit curves in timeline
</div>
</div>
`
},
Math: { Math: {
name: 'Math', name: 'Math',
category: NodeCategory.UTILITY, category: NodeCategory.UTILITY,

View File

@ -94,19 +94,19 @@ function createRightPanel() {
{ {
name: 'Animation', name: 'Animation',
value: 'animation', value: 'animation',
iconSvg: '<svg width="80" height="80" viewBox="0 0 100 100" xmlns="http://www.w3.org/2000/svg"><path d="M20,80 Q30,60 50,50 T80,20" stroke="currentColor" stroke-width="4" fill="none" stroke-linecap="round"/><circle cx="30" cy="70" r="8" fill="currentColor"/></svg>', iconPath: '/assets/focus-animation.svg',
description: 'Drawing tools and timeline' description: 'Drawing tools and timeline'
}, },
{ {
name: 'Music', name: 'Music',
value: 'audioDaw', value: 'audioDaw',
iconSvg: '<svg width="80" height="80" viewBox="0 0 100 100" xmlns="http://www.w3.org/2000/svg"><rect x="10" y="20" width="18" height="60" fill="none" stroke="currentColor" stroke-width="3"/><rect x="30" y="20" width="18" height="60" fill="none" stroke="currentColor" stroke-width="3"/><rect x="50" y="20" width="18" height="60" fill="none" stroke="currentColor" stroke-width="3"/><rect x="70" y="20" width="18" height="60" fill="none" stroke="currentColor" stroke-width="3"/><rect x="24" y="20" width="12" height="35" fill="currentColor"/><rect x="44" y="20" width="12" height="35" fill="currentColor"/><rect x="74" y="20" width="12" height="35" fill="currentColor"/></svg>', iconPath: '/assets/focus-music.svg',
description: 'Audio tracks and mixer' description: 'Audio tracks and mixer'
}, },
{ {
name: 'Video editing', name: 'Video editing',
value: 'videoEditing', value: 'videoEditing',
iconSvg: '<svg width="80" height="80" viewBox="0 0 100 100" xmlns="http://www.w3.org/2000/svg"><rect x="15" y="40" width="70" height="45" fill="currentColor" rx="4"/><rect x="15" y="25" width="70" height="15" fill="none" stroke="currentColor" stroke-width="3" rx="4"/><rect x="20" y="25" width="7" height="15" fill="currentColor"/><rect x="35" y="25" width="7" height="15" fill="currentColor"/><rect x="50" y="25" width="7" height="15" fill="currentColor"/><rect x="65" y="25" width="7" height="15" fill="currentColor"/></svg>', iconPath: '/assets/focus-video.svg',
description: 'Clip timeline and effects' description: 'Clip timeline and effects'
} }
]; ];
@ -121,6 +121,12 @@ function createRightPanel() {
return rightPanel; return rightPanel;
} }
async function loadSVG(url, targetElement) {
const response = await fetch(url);
const svgText = await response.text();
targetElement.innerHTML = svgText;
}
function createFocusCard(focus) { function createFocusCard(focus) {
const card = document.createElement('div'); const card = document.createElement('div');
card.className = 'focus-card'; card.className = 'focus-card';
@ -131,7 +137,10 @@ function createFocusCard(focus) {
const iconWrapper = document.createElement('div'); const iconWrapper = document.createElement('div');
iconWrapper.className = 'focus-card-icon'; iconWrapper.className = 'focus-card-icon';
iconWrapper.innerHTML = focus.iconSvg;
// Load the SVG asynchronously
loadSVG(focus.iconPath, iconWrapper);
iconContainer.appendChild(iconWrapper); iconContainer.appendChild(iconWrapper);
card.appendChild(iconContainer); card.appendChild(iconContainer);

View File

@ -77,6 +77,8 @@ export let config = {
fileWidth: 800, fileWidth: 800,
fileHeight: 600, fileHeight: 600,
framerate: 24, framerate: 24,
bpm: 120,
timeSignature: { numerator: 4, denominator: 4 },
recentFiles: [], recentFiles: [],
scrollSpeed: 1, scrollSpeed: 1,
debug: false, debug: false,

View File

@ -1646,6 +1646,18 @@ button {
stroke-width: 2px !important; stroke-width: 2px !important;
} }
/* Connection insertion highlight */
.connection-insertion-highlight .main-path {
stroke: #FFD700 !important;
stroke-width: 8px !important;
stroke-dasharray: none !important;
filter: drop-shadow(0 0 12px #FFD700) !important;
}
.connection-insertion-highlight {
z-index: 9999 !important;
}
/* Port label text styling - position labels away from connectors */ /* Port label text styling - position labels away from connectors */
.drawflow .drawflow-node .input > span, .drawflow .drawflow-node .input > span,
.drawflow .drawflow-node .output > span { .drawflow .drawflow-node .output > span {

View File

@ -6,10 +6,12 @@ import { backgroundColor, foregroundColor, shadow, labelColor, scrubberColor } f
* TimelineState - Global state for timeline display and interaction * TimelineState - Global state for timeline display and interaction
*/ */
class TimelineState { class TimelineState {
constructor(framerate = 24) { constructor(framerate = 24, bpm = 120, timeSignature = { numerator: 4, denominator: 4 }) {
// Time format settings // Time format settings
this.timeFormat = 'frames' // 'frames' | 'seconds' | 'measures' this.timeFormat = 'frames' // 'frames' | 'seconds' | 'measures'
this.framerate = framerate this.framerate = framerate
this.bpm = bpm // Beats per minute for measures mode
this.timeSignature = timeSignature // Time signature for measures mode (e.g., {numerator: 4, denominator: 4} or {numerator: 6, denominator: 8})
// Zoom and viewport // Zoom and viewport
this.pixelsPerSecond = 100 // Zoom level - how many pixels per second of animation this.pixelsPerSecond = 100 // Zoom level - how many pixels per second of animation
@ -53,6 +55,30 @@ class TimelineState {
return frame / this.framerate return frame / this.framerate
} }
/**
* Convert time (seconds) to measure position
* Returns {measure, beat, tick} where tick is subdivision of beat (0-999)
*/
timeToMeasure(time) {
const beatsPerSecond = this.bpm / 60
const totalBeats = time * beatsPerSecond
const beatsPerMeasure = this.timeSignature.numerator
const measure = Math.floor(totalBeats / beatsPerMeasure) + 1 // Measures are 1-indexed
const beat = Math.floor(totalBeats % beatsPerMeasure) + 1 // Beats are 1-indexed
const tick = Math.floor((totalBeats % 1) * 1000) // Ticks are 0-999
return { measure, beat, tick }
}
/**
* Convert measure position to time (seconds)
*/
measureToTime(measure, beat = 1, tick = 0) {
const beatsPerMeasure = this.timeSignature.numerator
const totalBeats = (measure - 1) * beatsPerMeasure + (beat - 1) + (tick / 1000)
const beatsPerSecond = this.bpm / 60
return totalBeats / beatsPerSecond
}
/** /**
* Calculate appropriate ruler interval based on zoom level * Calculate appropriate ruler interval based on zoom level
* Returns interval in seconds that gives ~50-100px spacing * Returns interval in seconds that gives ~50-100px spacing
@ -112,6 +138,35 @@ class TimelineState {
return bestInterval return bestInterval
} }
/**
* Calculate appropriate ruler interval for measures mode
* Returns interval in beats that gives ~50-100px spacing
*/
getRulerIntervalBeats() {
const targetPixelSpacing = 75
const beatsPerSecond = this.bpm / 60
const pixelsPerBeat = this.pixelsPerSecond / beatsPerSecond
const beatSpacing = targetPixelSpacing / pixelsPerBeat
const beatsPerMeasure = this.timeSignature.numerator
// Standard beat intervals: 1 beat, 2 beats, 1 measure, 2 measures, 4 measures, etc.
const intervals = [1, 2, beatsPerMeasure, beatsPerMeasure * 2, beatsPerMeasure * 4, beatsPerMeasure * 8, beatsPerMeasure * 16]
// Find closest interval
let bestInterval = intervals[0]
let bestDiff = Math.abs(beatSpacing - bestInterval)
for (let interval of intervals) {
const diff = Math.abs(beatSpacing - interval)
if (diff < bestDiff) {
bestDiff = diff
bestInterval = interval
}
}
return bestInterval
}
/** /**
* Format time for display based on current format setting * Format time for display based on current format setting
*/ */
@ -128,8 +183,10 @@ class TimelineState {
} else { } else {
return `${seconds}.${ms}s` return `${seconds}.${ms}s`
} }
} else if (this.timeFormat === 'measures') {
const { measure, beat } = this.timeToMeasure(time)
return `${measure}.${beat}`
} }
// measures format - TODO when DAW features added
return `${time.toFixed(2)}` return `${time.toFixed(2)}`
} }
@ -182,24 +239,19 @@ class TimeRuler {
ctx.fillStyle = backgroundColor ctx.fillStyle = backgroundColor
ctx.fillRect(0, 0, width, this.height) ctx.fillRect(0, 0, width, this.height)
// Determine interval based on current zoom and format
let interval, isFrameMode
if (this.state.timeFormat === 'frames') {
interval = this.state.getRulerIntervalFrames() // In frames
isFrameMode = true
} else {
interval = this.state.getRulerInterval() // In seconds
isFrameMode = false
}
// Calculate visible time range // Calculate visible time range
const startTime = this.state.viewportStartTime const startTime = this.state.viewportStartTime
const endTime = this.state.pixelToTime(width) const endTime = this.state.pixelToTime(width)
// Draw tick marks and labels // Draw tick marks and labels based on format
if (isFrameMode) { if (this.state.timeFormat === 'frames') {
const interval = this.state.getRulerIntervalFrames() // In frames
this.drawFrameTicks(ctx, width, interval, startTime, endTime) this.drawFrameTicks(ctx, width, interval, startTime, endTime)
} else if (this.state.timeFormat === 'measures') {
const interval = this.state.getRulerIntervalBeats() // In beats
this.drawMeasureTicks(ctx, width, interval, startTime, endTime)
} else { } else {
const interval = this.state.getRulerInterval() // In seconds
this.drawSecondTicks(ctx, width, interval, startTime, endTime) this.drawSecondTicks(ctx, width, interval, startTime, endTime)
} }
@ -303,6 +355,127 @@ class TimeRuler {
} }
} }
/**
* Draw tick marks for measures mode
*/
drawMeasureTicks(ctx, width, interval, startTime, endTime) {
const beatsPerSecond = this.state.bpm / 60
const beatsPerMeasure = this.state.timeSignature.numerator
// Always draw individual beats, regardless of interval
const startBeat = Math.floor(startTime * beatsPerSecond)
const endBeat = Math.ceil(endTime * beatsPerSecond)
ctx.fillStyle = labelColor
ctx.font = '11px sans-serif'
ctx.textAlign = 'center'
ctx.textBaseline = 'top'
// Draw all beats
for (let beat = startBeat; beat <= endBeat; beat++) {
const time = beat / beatsPerSecond
const x = this.state.timeToPixel(time)
if (x < 0 || x > width) continue
// Determine position within the measure
const beatInMeasure = beat % beatsPerMeasure
const isMeasureBoundary = beatInMeasure === 0
const isEvenBeatInMeasure = (beatInMeasure % 2) === 0
// Determine tick style based on position
let opacity, tickHeight
if (isMeasureBoundary) {
// Measure boundary: full opacity, tallest
opacity = 1.0
tickHeight = 12
} else if (isEvenBeatInMeasure) {
// Even beat within measure: half opacity, medium height
opacity = 0.5
tickHeight = 8
} else {
// Odd beat within measure: quarter opacity, shortest
opacity = 0.25
tickHeight = 5
}
// Draw tick with appropriate opacity
ctx.save()
ctx.globalAlpha = opacity
ctx.strokeStyle = foregroundColor
ctx.lineWidth = isMeasureBoundary ? 2 : 1
ctx.beginPath()
ctx.moveTo(x, this.height - tickHeight)
ctx.lineTo(x, this.height)
ctx.stroke()
ctx.restore()
// Determine if we're zoomed in enough to show individual beat labels
const pixelsPerBeat = this.state.pixelsPerSecond / beatsPerSecond
const beatFadeThreshold = 100 // Full opacity at 100px per beat
const beatFadeStart = 60 // Start fading in at 60px per beat
// Calculate fade opacity for beat labels (0 to 1)
const beatLabelOpacity = Math.max(0, Math.min(1, (pixelsPerBeat - beatFadeStart) / (beatFadeThreshold - beatFadeStart)))
// Calculate spacing-based fade for measure labels when zoomed out
const pixelsPerMeasure = pixelsPerBeat * beatsPerMeasure
// Determine which measures to show based on spacing
const { measure: measureNumber } = this.state.timeToMeasure(time)
let showThisMeasure = false
let measureLabelOpacity = 1
const isEvery16th = (measureNumber - 1) % 16 === 0
const isEvery4th = (measureNumber - 1) % 4 === 0
if (isEvery16th) {
// Always show every 16th measure when very zoomed out
showThisMeasure = true
if (pixelsPerMeasure < 20) {
// Fade in from 10-20px
measureLabelOpacity = Math.max(0, Math.min(1, (pixelsPerMeasure - 10) / 10))
} else {
measureLabelOpacity = 1
}
} else if (isEvery4th && pixelsPerMeasure >= 20) {
// Show every 4th measure when zoomed out but not too far
showThisMeasure = true
if (pixelsPerMeasure < 30) {
// Fade in from 20-30px
measureLabelOpacity = Math.max(0, Math.min(1, (pixelsPerMeasure - 20) / 10))
} else {
measureLabelOpacity = 1
}
} else if (pixelsPerMeasure >= 80) {
// Show all measures when zoomed in enough
showThisMeasure = true
if (pixelsPerMeasure < 100) {
// Fade in from 80-100px
measureLabelOpacity = Math.max(0, Math.min(1, (pixelsPerMeasure - 80) / 20))
} else {
measureLabelOpacity = 1
}
}
// Label logic
if (isMeasureBoundary && showThisMeasure) {
// Measure boundaries: show just the measure number with fade
const { measure } = this.state.timeToMeasure(time)
ctx.save()
ctx.globalAlpha = measureLabelOpacity
ctx.fillText(measure.toString(), x, 2)
ctx.restore()
} else if (beatLabelOpacity > 0) {
// Zoomed in: show measure.beat for all beats with fade
ctx.save()
ctx.globalAlpha = beatLabelOpacity
ctx.fillText(this.state.formatTime(time), x, 2)
ctx.restore()
}
}
}
/** /**
* Draw playhead (current time indicator) * Draw playhead (current time indicator)
*/ */

View File

@ -925,6 +925,48 @@ function deeploop(obj, callback) {
} }
} }
/**
* Calculate the shortest distance from a point to a line segment
* @param {number} px - Point x coordinate
* @param {number} py - Point y coordinate
* @param {number} x1 - Line segment start x
* @param {number} y1 - Line segment start y
* @param {number} x2 - Line segment end x
* @param {number} y2 - Line segment end y
* @returns {number} Distance from point to line segment
*/
function distanceToLineSegment(px, py, x1, y1, x2, y2) {
const A = px - x1;
const B = py - y1;
const C = x2 - x1;
const D = y2 - y1;
const dot = A * C + B * D;
const lenSq = C * C + D * D;
let param = -1;
if (lenSq !== 0) {
param = dot / lenSq;
}
let xx, yy;
if (param < 0) {
xx = x1;
yy = y1;
} else if (param > 1) {
xx = x2;
yy = y2;
} else {
xx = x1 + param * C;
yy = y1 + param * D;
}
const dx = px - xx;
const dy = py - yy;
return Math.sqrt(dx * dx + dy * dy);
}
export { export {
titleCase, titleCase,
getMousePositionFraction, getMousePositionFraction,
@ -959,5 +1001,6 @@ export {
arraysAreEqual, arraysAreEqual,
getFileExtension, getFileExtension,
createModal, createModal,
deeploop deeploop,
distanceToLineSegment
}; };

View File

@ -535,7 +535,11 @@ class TimelineWindowV2 extends Widget {
this.trackHeaderWidth = 150 this.trackHeaderWidth = 150
// Create shared timeline state using config framerate // Create shared timeline state using config framerate
this.timelineState = new TimelineState(context.config?.framerate || 24) this.timelineState = new TimelineState(
context.config?.framerate || 24,
context.config?.bpm || 120,
context.config?.timeSignature || { numerator: 4, denominator: 4 }
)
// Create time ruler widget // Create time ruler widget
this.ruler = new TimeRuler(this.timelineState) this.ruler = new TimeRuler(this.timelineState)
@ -573,6 +577,9 @@ class TimelineWindowV2 extends Widget {
// Selected audio track (for recording) // Selected audio track (for recording)
this.selectedTrack = null this.selectedTrack = null
// Cache for automation node names (maps "trackId:nodeId" -> friendly name)
this.automationNameCache = new Map()
} }
draw(ctx) { draw(ctx) {
@ -792,8 +799,8 @@ class TimelineWindowV2 extends Widget {
ctx.fillText(typeText, typeX, y + this.trackHierarchy.trackHeight / 2) ctx.fillText(typeText, typeX, y + this.trackHierarchy.trackHeight / 2)
} }
// Draw toggle buttons for object/shape/audio tracks (Phase 3) // Draw toggle buttons for object/shape/audio/midi tracks (Phase 3)
if (track.type === 'object' || track.type === 'shape' || track.type === 'audio') { if (track.type === 'object' || track.type === 'shape' || track.type === 'audio' || track.type === 'midi') {
const buttonSize = 14 const buttonSize = 14
const buttonY = y + (this.trackHierarchy.trackHeight - buttonSize) / 2 // Use base height for button position const buttonY = y + (this.trackHierarchy.trackHeight - buttonSize) / 2 // Use base height for button position
let buttonX = this.trackHeaderWidth - 10 // Start from right edge let buttonX = this.trackHeaderWidth - 10 // Start from right edge
@ -813,8 +820,8 @@ class TimelineWindowV2 extends Widget {
track.object.curvesMode === 'keyframe' ? '≈' : '-' track.object.curvesMode === 'keyframe' ? '≈' : '-'
ctx.fillText(curveSymbol, buttonX + buttonSize / 2, buttonY + buttonSize / 2) ctx.fillText(curveSymbol, buttonX + buttonSize / 2, buttonY + buttonSize / 2)
// Segment visibility button (only for object/shape tracks, not audio) // Segment visibility button (only for object/shape tracks, not audio/midi)
if (track.type !== 'audio') { if (track.type !== 'audio' && track.type !== 'midi') {
buttonX -= (buttonSize + 4) buttonX -= (buttonSize + 4)
ctx.strokeStyle = foregroundColor ctx.strokeStyle = foregroundColor
ctx.lineWidth = 1 ctx.lineWidth = 1
@ -835,7 +842,10 @@ class TimelineWindowV2 extends Widget {
let animationData = null let animationData = null
// Find the AnimationData for this track // Find the AnimationData for this track
if (track.type === 'object') { if (track.type === 'audio' || track.type === 'midi') {
// For audio/MIDI tracks, animation data is directly on the track object
animationData = obj.animationData
} else if (track.type === 'object') {
for (let layer of this.context.activeObject.allLayers) { for (let layer of this.context.activeObject.allLayers) {
if (layer.children && layer.children.includes(obj)) { if (layer.children && layer.children.includes(obj)) {
animationData = layer.animationData animationData = layer.animationData
@ -852,11 +862,19 @@ class TimelineWindowV2 extends Widget {
} }
if (animationData) { if (animationData) {
const prefix = track.type === 'object' ? `child.${obj.idx}.` : `shape.${obj.shapeId}.` if (track.type === 'audio' || track.type === 'midi') {
for (let curveName in animationData.curves) { // For audio/MIDI tracks, include all automation curves
if (curveName.startsWith(prefix)) { for (let curveName in animationData.curves) {
curves.push(animationData.curves[curveName]) curves.push(animationData.curves[curveName])
} }
} else {
// For objects/shapes, filter by prefix
const prefix = track.type === 'object' ? `child.${obj.idx}.` : `shape.${obj.shapeId}.`
for (let curveName in animationData.curves) {
if (curveName.startsWith(prefix)) {
curves.push(animationData.curves[curveName])
}
}
} }
} }
@ -883,9 +901,18 @@ class TimelineWindowV2 extends Widget {
ctx.arc(10, itemY + 5, 3, 0, 2 * Math.PI) ctx.arc(10, itemY + 5, 3, 0, 2 * Math.PI)
ctx.fill() ctx.fill()
// Draw parameter name (extract last part after last dot) // Draw parameter name
ctx.fillStyle = isHidden ? foregroundColor : labelColor ctx.fillStyle = isHidden ? foregroundColor : labelColor
const paramName = curve.parameter.split('.').pop() let paramName = curve.parameter.split('.').pop()
// For automation curves, fetch the friendly name from backend
if (curve.parameter.startsWith('automation.') && (track.type === 'audio' || track.type === 'midi')) {
const nodeId = parseInt(paramName, 10)
if (!isNaN(nodeId) && obj.audioTrackId !== null) {
paramName = this.getAutomationName(obj.audioTrackId, nodeId)
}
}
const truncatedName = paramName.length > 12 ? paramName.substring(0, 10) + '...' : paramName const truncatedName = paramName.length > 12 ? paramName.substring(0, 10) + '...' : paramName
ctx.fillText(truncatedName, 18, itemY) ctx.fillText(truncatedName, 18, itemY)
@ -974,6 +1001,42 @@ class TimelineWindowV2 extends Widget {
} }
} }
} }
} else if (this.timelineState.timeFormat === 'measures') {
// Measures mode: draw beats with varying opacity
const beatsPerSecond = this.timelineState.bpm / 60
const beatsPerMeasure = this.timelineState.timeSignature.numerator
const startBeat = Math.floor(visibleStartTime * beatsPerSecond)
const endBeat = Math.ceil(visibleEndTime * beatsPerSecond)
for (let beat = startBeat; beat <= endBeat; beat++) {
const time = beat / beatsPerSecond
const x = this.timelineState.timeToPixel(time)
if (x >= 0 && x <= trackAreaWidth) {
// Determine position within the measure
const beatInMeasure = beat % beatsPerMeasure
const isMeasureBoundary = beatInMeasure === 0
const isEvenBeatInMeasure = (beatInMeasure % 2) === 0
// Set opacity based on position
ctx.save()
if (isMeasureBoundary) {
ctx.globalAlpha = 1.0 // Full opacity for measure boundaries
} else if (isEvenBeatInMeasure) {
ctx.globalAlpha = 0.5 // Half opacity for even beats
} else {
ctx.globalAlpha = 0.25 // Quarter opacity for odd beats
}
ctx.strokeStyle = shadow
ctx.lineWidth = 1
ctx.beginPath()
ctx.moveTo(x, y)
ctx.lineTo(x, y + trackHeight)
ctx.stroke()
ctx.restore()
}
}
} else { } else {
// Seconds mode: mark every second edge // Seconds mode: mark every second edge
const startSecond = Math.floor(visibleStartTime) const startSecond = Math.floor(visibleStartTime)
@ -1427,8 +1490,8 @@ class TimelineWindowV2 extends Widget {
for (let i = 0; i < this.trackHierarchy.tracks.length; i++) { for (let i = 0; i < this.trackHierarchy.tracks.length; i++) {
const track = this.trackHierarchy.tracks[i] const track = this.trackHierarchy.tracks[i]
// Only draw curves for objects, shapes, and audio tracks // Only draw curves for objects, shapes, audio tracks, and MIDI tracks
if (track.type !== 'object' && track.type !== 'shape' && track.type !== 'audio') continue if (track.type !== 'object' && track.type !== 'shape' && track.type !== 'audio' && track.type !== 'midi') continue
const obj = track.object const obj = track.object
@ -1439,8 +1502,8 @@ class TimelineWindowV2 extends Widget {
// Find the layer containing this object/shape to get AnimationData // Find the layer containing this object/shape to get AnimationData
let animationData = null let animationData = null
if (track.type === 'audio') { if (track.type === 'audio' || track.type === 'midi') {
// For audio tracks, animation data is directly on the track object // For audio/MIDI tracks, animation data is directly on the track object
animationData = obj.animationData animationData = obj.animationData
} else if (track.type === 'object') { } else if (track.type === 'object') {
// For objects, get curves from parent layer // For objects, get curves from parent layer
@ -1476,9 +1539,9 @@ class TimelineWindowV2 extends Widget {
for (let curveName in animationData.curves) { for (let curveName in animationData.curves) {
const curve = animationData.curves[curveName] const curve = animationData.curves[curveName]
// Filter to only curves for this specific object/shape/audio // Filter to only curves for this specific object/shape/audio/MIDI
if (track.type === 'audio') { if (track.type === 'audio' || track.type === 'midi') {
// Audio tracks: include all curves (they're prefixed with 'track.' or 'clip.') // Audio/MIDI tracks: include all automation curves
curves.push(curve) curves.push(curve)
} else if (track.type === 'object' && curveName.startsWith(`child.${obj.idx}.`)) { } else if (track.type === 'object' && curveName.startsWith(`child.${obj.idx}.`)) {
curves.push(curve) curves.push(curve)
@ -1858,7 +1921,7 @@ class TimelineWindowV2 extends Widget {
} }
// Check if clicking on toggle buttons (Phase 3) // Check if clicking on toggle buttons (Phase 3)
if (track.type === 'object' || track.type === 'shape') { if (track.type === 'object' || track.type === 'shape' || track.type === 'audio' || track.type === 'midi') {
const buttonSize = 14 const buttonSize = 14
const trackIndex = this.trackHierarchy.tracks.indexOf(track) const trackIndex = this.trackHierarchy.tracks.indexOf(track)
const trackY = this.trackHierarchy.getTrackY(trackIndex) const trackY = this.trackHierarchy.getTrackY(trackIndex)
@ -4032,10 +4095,60 @@ class TimelineWindowV2 extends Widget {
toggleTimeFormat() { toggleTimeFormat() {
if (this.timelineState.timeFormat === 'frames') { if (this.timelineState.timeFormat === 'frames') {
this.timelineState.timeFormat = 'seconds' this.timelineState.timeFormat = 'seconds'
} else if (this.timelineState.timeFormat === 'seconds') {
this.timelineState.timeFormat = 'measures'
} else { } else {
this.timelineState.timeFormat = 'frames' this.timelineState.timeFormat = 'frames'
} }
} }
// Fetch automation name from backend and cache it
async fetchAutomationName(trackId, nodeId) {
const cacheKey = `${trackId}:${nodeId}`
// Return cached value if available
if (this.automationNameCache.has(cacheKey)) {
return this.automationNameCache.get(cacheKey)
}
try {
const name = await invoke('automation_get_name', {
trackId: trackId,
nodeId: nodeId
})
// Cache the result
if (name && name !== '') {
this.automationNameCache.set(cacheKey, name)
return name
}
} catch (err) {
console.error(`Failed to fetch automation name for node ${nodeId}:`, err)
}
// Fallback to node ID if fetch fails or returns empty
return `${nodeId}`
}
// Get automation name synchronously from cache, trigger fetch if not cached
getAutomationName(trackId, nodeId) {
const cacheKey = `${trackId}:${nodeId}`
if (this.automationNameCache.has(cacheKey)) {
return this.automationNameCache.get(cacheKey)
}
// Trigger async fetch in background
this.fetchAutomationName(trackId, nodeId).then(() => {
// Redraw when name arrives
if (this.context.timelineWidget?.requestRedraw) {
this.context.timelineWidget.requestRedraw()
}
})
// Return node ID as placeholder while fetching
return `${nodeId}`
}
} }
/** /**