use nodes for audio tracks

This commit is contained in:
Skyler Lehmkuhl 2025-11-02 06:33:10 -05:00
parent 988bbfd1a9
commit 66c4746767
11 changed files with 643 additions and 187 deletions

View File

@ -1,7 +1,7 @@
use crate::audio::buffer_pool::BufferPool; use crate::audio::buffer_pool::BufferPool;
use crate::audio::clip::ClipId; use crate::audio::clip::ClipId;
use crate::audio::midi::{MidiClip, MidiClipId, MidiEvent}; use crate::audio::midi::{MidiClip, MidiClipId, MidiEvent};
use crate::audio::node_graph::{nodes::*, InstrumentGraph}; use crate::audio::node_graph::{nodes::*, AudioGraph};
use crate::audio::pool::AudioPool; use crate::audio::pool::AudioPool;
use crate::audio::project::Project; use crate::audio::project::Project;
use crate::audio::recording::{MidiRecordingState, RecordingState}; use crate::audio::recording::{MidiRecordingState, RecordingState};
@ -689,12 +689,27 @@ impl Engine {
// Node graph commands // Node graph commands
Command::GraphAddNode(track_id, node_type, x, y) => { Command::GraphAddNode(track_id, node_type, x, y) => {
// Get MIDI track (graphs are only for MIDI tracks currently) eprintln!("[DEBUG] GraphAddNode received: track_id={}, node_type={}, x={}, y={}", track_id, node_type, x, y);
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
let graph = &mut track.instrument_graph; // Get the track's graph (works for both MIDI and Audio tracks)
{ let graph = match self.project.get_track_mut(track_id) {
// Create the node based on type Some(TrackNode::Midi(track)) => {
let node: Box<dyn crate::audio::node_graph::AudioNode> = match node_type.as_str() { eprintln!("[DEBUG] Found MIDI track, using instrument_graph");
Some(&mut track.instrument_graph)
},
Some(TrackNode::Audio(track)) => {
eprintln!("[DEBUG] Found Audio track, using effects_graph");
Some(&mut track.effects_graph)
},
_ => {
eprintln!("[DEBUG] Track not found or invalid type!");
None
}
};
if let Some(graph) = graph {
// Create the node based on type
let node: Box<dyn crate::audio::node_graph::AudioNode> = match node_type.as_str() {
"Oscillator" => Box::new(OscillatorNode::new("Oscillator".to_string())), "Oscillator" => Box::new(OscillatorNode::new("Oscillator".to_string())),
"Gain" => Box::new(GainNode::new("Gain".to_string())), "Gain" => Box::new(GainNode::new("Gain".to_string())),
"Mixer" => Box::new(MixerNode::new("Mixer".to_string())), "Mixer" => Box::new(MixerNode::new("Mixer".to_string())),
@ -729,6 +744,7 @@ impl Engine {
"MidiInput" => Box::new(MidiInputNode::new("MIDI Input".to_string())), "MidiInput" => Box::new(MidiInputNode::new("MIDI Input".to_string())),
"MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV".to_string())), "MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV".to_string())),
"AudioToCV" => Box::new(AudioToCVNode::new("Audio→CV".to_string())), "AudioToCV" => Box::new(AudioToCVNode::new("Audio→CV".to_string())),
"AudioInput" => Box::new(AudioInputNode::new("Audio Input".to_string())),
"AutomationInput" => Box::new(AutomationInputNode::new("Automation".to_string())), "AutomationInput" => Box::new(AutomationInputNode::new("Automation".to_string())),
"Oscilloscope" => Box::new(OscilloscopeNode::new("Oscilloscope".to_string())), "Oscilloscope" => Box::new(OscilloscopeNode::new("Oscilloscope".to_string())),
"TemplateInput" => Box::new(TemplateInputNode::new("Template Input".to_string())), "TemplateInput" => Box::new(TemplateInputNode::new("Template Input".to_string())),
@ -744,21 +760,29 @@ impl Engine {
} }
}; };
// Add node to graph // Add node to graph
let node_idx = graph.add_node(node); let node_idx = graph.add_node(node);
let node_id = node_idx.index() as u32; let node_id = node_idx.index() as u32;
eprintln!("[DEBUG] Node added with index: {:?}, converted to u32 id: {}", node_idx, node_id);
// Save position // Save position
graph.set_node_position(node_idx, x, y); graph.set_node_position(node_idx, x, y);
// Automatically set MIDI-receiving nodes as MIDI targets // Automatically set MIDI-receiving nodes as MIDI targets
if node_type == "MidiInput" || node_type == "VoiceAllocator" { if node_type == "MidiInput" || node_type == "VoiceAllocator" {
graph.set_midi_target(node_idx, true); graph.set_midi_target(node_idx, true);
}
// Emit success event
let _ = self.event_tx.push(AudioEvent::GraphNodeAdded(track_id, node_id, node_type.clone()));
} }
// Automatically set AudioOutput nodes as the graph output
if node_type == "AudioOutput" {
graph.set_output_node(Some(node_idx));
}
eprintln!("[DEBUG] Emitting GraphNodeAdded event: track_id={}, node_id={}, node_type={}", track_id, node_id, node_type);
// Emit success event
let _ = self.event_tx.push(AudioEvent::GraphNodeAdded(track_id, node_id, node_type.clone()));
} else {
eprintln!("[DEBUG] Graph was None, node not added!");
} }
} }
@ -836,35 +860,55 @@ impl Engine {
} }
Command::GraphRemoveNode(track_id, node_index) => { Command::GraphRemoveNode(track_id, node_index) => {
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { let graph = match self.project.get_track_mut(track_id) {
let graph = &mut track.instrument_graph; Some(TrackNode::Midi(track)) => Some(&mut track.instrument_graph),
{ Some(TrackNode::Audio(track)) => Some(&mut track.effects_graph),
let node_idx = NodeIndex::new(node_index as usize); _ => None,
graph.remove_node(node_idx); };
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id)); if let Some(graph) = graph {
} let node_idx = NodeIndex::new(node_index as usize);
graph.remove_node(node_idx);
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
} }
} }
Command::GraphConnect(track_id, from, from_port, to, to_port) => { Command::GraphConnect(track_id, from, from_port, to, to_port) => {
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { eprintln!("[DEBUG] GraphConnect received: track_id={}, from={}, from_port={}, to={}, to_port={}", track_id, from, from_port, to, to_port);
let graph = &mut track.instrument_graph;
{
let from_idx = NodeIndex::new(from as usize);
let to_idx = NodeIndex::new(to as usize);
match graph.connect(from_idx, from_port, to_idx, to_port) { let graph = match self.project.get_track_mut(track_id) {
Ok(()) => { Some(TrackNode::Midi(track)) => {
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id)); eprintln!("[DEBUG] Found MIDI track for connection");
} Some(&mut track.instrument_graph)
Err(e) => { },
let _ = self.event_tx.push(AudioEvent::GraphConnectionError( Some(TrackNode::Audio(track)) => {
track_id, eprintln!("[DEBUG] Found Audio track for connection");
format!("{:?}", e) Some(&mut track.effects_graph)
)); },
} _ => {
eprintln!("[DEBUG] Track not found for connection!");
None
}
};
if let Some(graph) = graph {
let from_idx = NodeIndex::new(from as usize);
let to_idx = NodeIndex::new(to as usize);
eprintln!("[DEBUG] Attempting to connect nodes: {:?} port {} -> {:?} port {}", from_idx, from_port, to_idx, to_port);
match graph.connect(from_idx, from_port, to_idx, to_port) {
Ok(()) => {
eprintln!("[DEBUG] Connection successful!");
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
}
Err(e) => {
eprintln!("[DEBUG] Connection failed: {:?}", e);
let _ = self.event_tx.push(AudioEvent::GraphConnectionError(
track_id,
format!("{:?}", e)
));
} }
} }
} else {
eprintln!("[DEBUG] No graph found, connection not made");
} }
} }
@ -891,25 +935,37 @@ impl Engine {
} }
Command::GraphDisconnect(track_id, from, from_port, to, to_port) => { Command::GraphDisconnect(track_id, from, from_port, to, to_port) => {
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { eprintln!("[AUDIO ENGINE] GraphDisconnect: track={}, from={}, from_port={}, to={}, to_port={}", track_id, from, from_port, to, to_port);
let graph = &mut track.instrument_graph; let graph = match self.project.get_track_mut(track_id) {
{ Some(TrackNode::Midi(track)) => Some(&mut track.instrument_graph),
let from_idx = NodeIndex::new(from as usize); Some(TrackNode::Audio(track)) => {
let to_idx = NodeIndex::new(to as usize); eprintln!("[AUDIO ENGINE] Found audio track, disconnecting in effects_graph");
graph.disconnect(from_idx, from_port, to_idx, to_port); Some(&mut track.effects_graph)
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
} }
_ => {
eprintln!("[AUDIO ENGINE] Track not found!");
None
}
};
if let Some(graph) = graph {
let from_idx = NodeIndex::new(from as usize);
let to_idx = NodeIndex::new(to as usize);
graph.disconnect(from_idx, from_port, to_idx, to_port);
eprintln!("[AUDIO ENGINE] Disconnect completed");
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
} }
} }
Command::GraphSetParameter(track_id, node_index, param_id, value) => { Command::GraphSetParameter(track_id, node_index, param_id, value) => {
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { let graph = match self.project.get_track_mut(track_id) {
let graph = &mut track.instrument_graph; Some(TrackNode::Midi(track)) => Some(&mut track.instrument_graph),
{ Some(TrackNode::Audio(track)) => Some(&mut track.effects_graph),
let node_idx = NodeIndex::new(node_index as usize); _ => None,
if let Some(graph_node) = graph.get_graph_node_mut(node_idx) { };
graph_node.node.set_parameter(param_id, value); if let Some(graph) = graph {
} let node_idx = NodeIndex::new(node_index as usize);
if let Some(graph_node) = graph.get_graph_node_mut(node_idx) {
graph_node.node.set_parameter(param_id, value);
} }
} }
} }
@ -925,18 +981,24 @@ impl Engine {
} }
Command::GraphSetOutputNode(track_id, node_index) => { Command::GraphSetOutputNode(track_id, node_index) => {
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { let graph = match self.project.get_track_mut(track_id) {
let graph = &mut track.instrument_graph; Some(TrackNode::Midi(track)) => Some(&mut track.instrument_graph),
{ Some(TrackNode::Audio(track)) => Some(&mut track.effects_graph),
let node_idx = NodeIndex::new(node_index as usize); _ => None,
graph.set_output_node(Some(node_idx)); };
} if let Some(graph) = graph {
let node_idx = NodeIndex::new(node_index as usize);
graph.set_output_node(Some(node_idx));
} }
} }
Command::GraphSavePreset(track_id, preset_path, preset_name, description, tags) => { Command::GraphSavePreset(track_id, preset_path, preset_name, description, tags) => {
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { let graph = match self.project.get_track(track_id) {
let graph = &track.instrument_graph; Some(TrackNode::Midi(track)) => Some(&track.instrument_graph),
Some(TrackNode::Audio(track)) => Some(&track.effects_graph),
_ => None,
};
if let Some(graph) = graph {
// Serialize the graph to a preset // Serialize the graph to a preset
let mut preset = graph.to_preset(&preset_name); let mut preset = graph.to_preset(&preset_name);
preset.metadata.description = description; preset.metadata.description = description;
@ -969,14 +1031,21 @@ impl Engine {
// Extract the directory path from the preset path for resolving relative sample paths // Extract the directory path from the preset path for resolving relative sample paths
let preset_base_path = std::path::Path::new(&preset_path).parent(); let preset_base_path = std::path::Path::new(&preset_path).parent();
match InstrumentGraph::from_preset(&preset, self.sample_rate, 8192, preset_base_path) { match AudioGraph::from_preset(&preset, self.sample_rate, 8192, preset_base_path) {
Ok(graph) => { Ok(graph) => {
// Replace the track's graph // Replace the track's graph
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { match self.project.get_track_mut(track_id) {
track.instrument_graph = graph; Some(TrackNode::Midi(track)) => {
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id)); track.instrument_graph = graph;
// Emit preset loaded event after everything is loaded let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
let _ = self.event_tx.push(AudioEvent::GraphPresetLoaded(track_id)); let _ = self.event_tx.push(AudioEvent::GraphPresetLoaded(track_id));
}
Some(TrackNode::Audio(track)) => {
track.effects_graph = graph;
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
let _ = self.event_tx.push(AudioEvent::GraphPresetLoaded(track_id));
}
_ => {}
} }
} }
Err(e) => { Err(e) => {
@ -1197,15 +1266,26 @@ impl Engine {
fn handle_query(&mut self, query: Query) { fn handle_query(&mut self, query: Query) {
let response = match query { let response = match query {
Query::GetGraphState(track_id) => { Query::GetGraphState(track_id) => {
if let Some(TrackNode::Midi(track)) = self.project.get_track(track_id) { match self.project.get_track(track_id) {
let graph = &track.instrument_graph; Some(TrackNode::Midi(track)) => {
let preset = graph.to_preset("temp"); let graph = &track.instrument_graph;
match preset.to_json() { let preset = graph.to_preset("temp");
Ok(json) => QueryResponse::GraphState(Ok(json)), match preset.to_json() {
Err(e) => QueryResponse::GraphState(Err(format!("Failed to serialize graph: {:?}", e))), Ok(json) => QueryResponse::GraphState(Ok(json)),
Err(e) => QueryResponse::GraphState(Err(format!("Failed to serialize graph: {:?}", e))),
}
}
Some(TrackNode::Audio(track)) => {
let graph = &track.effects_graph;
let preset = graph.to_preset("temp");
match preset.to_json() {
Ok(json) => QueryResponse::GraphState(Ok(json)),
Err(e) => QueryResponse::GraphState(Err(format!("Failed to serialize graph: {:?}", e))),
}
}
_ => {
QueryResponse::GraphState(Err(format!("Track {} not found", track_id)))
} }
} else {
QueryResponse::GraphState(Err(format!("Track {} not found or is not a MIDI track", track_id)))
} }
} }
Query::GetTemplateState(track_id, voice_allocator_id) => { Query::GetTemplateState(track_id, voice_allocator_id) => {

View File

@ -57,7 +57,7 @@ impl GraphNode {
} }
/// Audio processing graph for instruments/effects /// Audio processing graph for instruments/effects
pub struct InstrumentGraph { pub struct AudioGraph {
/// The audio graph (StableGraph allows node removal without index invalidation) /// The audio graph (StableGraph allows node removal without index invalidation)
graph: StableGraph<GraphNode, Connection>, graph: StableGraph<GraphNode, Connection>,
@ -86,8 +86,8 @@ pub struct InstrumentGraph {
playback_time: f64, playback_time: f64,
} }
impl InstrumentGraph { impl AudioGraph {
/// Create a new empty instrument graph /// Create a new empty audio graph
pub fn new(sample_rate: u32, buffer_size: usize) -> Self { pub fn new(sample_rate: u32, buffer_size: usize) -> Self {
Self { Self {
graph: StableGraph::new(), graph: StableGraph::new(),

View File

@ -4,7 +4,7 @@ mod types;
pub mod nodes; pub mod nodes;
pub mod preset; pub mod preset;
pub use graph::{Connection, GraphNode, InstrumentGraph}; pub use graph::{Connection, GraphNode, AudioGraph};
pub use node_trait::AudioNode; pub use node_trait::AudioNode;
pub use preset::{GraphPreset, PresetMetadata, SerializedConnection, SerializedNode}; pub use preset::{GraphPreset, PresetMetadata, SerializedConnection, SerializedNode};
pub use types::{ConnectionError, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType}; pub use types::{ConnectionError, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};

View File

@ -0,0 +1,127 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, SignalType};
use crate::audio::midi::MidiEvent;
/// Audio input node - receives audio from audio track clip playback
/// This node acts as the entry point for audio tracks, injecting clip audio into the effects graph
pub struct AudioInputNode {
name: String,
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
/// Internal buffer to hold injected audio from clips
/// This is filled externally by AudioTrack::render() before graph processing
audio_buffer: Vec<f32>,
}
impl AudioInputNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
// Audio input node has no inputs - audio is injected externally
let inputs = vec![];
// Outputs stereo audio
let outputs = vec![
NodePort::new("Audio Out", SignalType::Audio, 0),
];
Self {
name,
inputs,
outputs,
audio_buffer: Vec::new(),
}
}
/// Inject audio from clip playback into this node
/// Should be called by AudioTrack::render() before processing the graph
pub fn inject_audio(&mut self, audio: &[f32]) {
self.audio_buffer.clear();
self.audio_buffer.extend_from_slice(audio);
}
/// Clear the internal audio buffer
pub fn clear_buffer(&mut self) {
self.audio_buffer.clear();
}
}
impl AudioNode for AudioInputNode {
fn category(&self) -> NodeCategory {
NodeCategory::Input
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&[] // No parameters
}
fn set_parameter(&mut self, _id: u32, _value: f32) {
// No parameters
}
fn get_parameter(&self, _id: u32) -> f32 {
0.0
}
fn process(
&mut self,
_inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
_sample_rate: u32,
) {
if outputs.is_empty() {
return;
}
let output = &mut outputs[0];
let len = output.len().min(self.audio_buffer.len());
// Copy audio from internal buffer to output
if len > 0 {
output[..len].copy_from_slice(&self.audio_buffer[..len]);
}
// Clear any remaining samples in output
if output.len() > len {
output[len..].fill(0.0);
}
}
fn reset(&mut self) {
self.audio_buffer.clear();
}
fn node_type(&self) -> &str {
"AudioInput"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
audio_buffer: Vec::new(), // Don't clone the buffer, start fresh
})
}
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
}

View File

@ -1,4 +1,5 @@
mod adsr; mod adsr;
mod audio_input;
mod audio_to_cv; mod audio_to_cv;
mod automation_input; mod automation_input;
mod bit_crusher; mod bit_crusher;
@ -39,6 +40,7 @@ mod voice_allocator;
mod wavetable_oscillator; mod wavetable_oscillator;
pub use adsr::ADSRNode; pub use adsr::ADSRNode;
pub use audio_input::AudioInputNode;
pub use audio_to_cv::AudioToCVNode; pub use audio_to_cv::AudioToCVNode;
pub use automation_input::{AutomationInputNode, AutomationKeyframe, InterpolationType}; pub use automation_input::{AutomationInputNode, AutomationKeyframe, InterpolationType};
pub use bit_crusher::BitCrusherNode; pub use bit_crusher::BitCrusherNode;

View File

@ -1,5 +1,5 @@
use crate::audio::midi::MidiEvent; use crate::audio::midi::MidiEvent;
use crate::audio::node_graph::{AudioNode, InstrumentGraph, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType}; use crate::audio::node_graph::{AudioNode, AudioGraph, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
const PARAM_VOICE_COUNT: u32 = 0; const PARAM_VOICE_COUNT: u32 = 0;
const MAX_VOICES: usize = 16; // Maximum allowed voices const MAX_VOICES: usize = 16; // Maximum allowed voices
@ -34,10 +34,10 @@ pub struct VoiceAllocatorNode {
name: String, name: String,
/// The template graph (edited by user via UI) /// The template graph (edited by user via UI)
template_graph: InstrumentGraph, template_graph: AudioGraph,
/// Runtime voice instances (clones of template) /// Runtime voice instances (clones of template)
voice_instances: Vec<InstrumentGraph>, voice_instances: Vec<AudioGraph>,
/// Voice allocation state /// Voice allocation state
voices: [VoiceState; MAX_VOICES], voices: [VoiceState; MAX_VOICES],
@ -73,11 +73,11 @@ impl VoiceAllocatorNode {
]; ];
// Create empty template graph // Create empty template graph
let template_graph = InstrumentGraph::new(sample_rate, buffer_size); let template_graph = AudioGraph::new(sample_rate, buffer_size);
// Create voice instances (initially empty clones of template) // Create voice instances (initially empty clones of template)
let voice_instances: Vec<InstrumentGraph> = (0..MAX_VOICES) let voice_instances: Vec<AudioGraph> = (0..MAX_VOICES)
.map(|_| InstrumentGraph::new(sample_rate, buffer_size)) .map(|_| AudioGraph::new(sample_rate, buffer_size))
.collect(); .collect();
Self { Self {
@ -94,12 +94,12 @@ impl VoiceAllocatorNode {
} }
/// Get mutable reference to template graph (for UI editing) /// Get mutable reference to template graph (for UI editing)
pub fn template_graph_mut(&mut self) -> &mut InstrumentGraph { pub fn template_graph_mut(&mut self) -> &mut AudioGraph {
&mut self.template_graph &mut self.template_graph
} }
/// Get reference to template graph (for serialization) /// Get reference to template graph (for serialization)
pub fn template_graph(&self) -> &InstrumentGraph { pub fn template_graph(&self) -> &AudioGraph {
&self.template_graph &self.template_graph
} }

View File

@ -1,7 +1,8 @@
use super::automation::{AutomationLane, AutomationLaneId, ParameterId}; use super::automation::{AutomationLane, AutomationLaneId, ParameterId};
use super::clip::Clip; use super::clip::Clip;
use super::midi::{MidiClip, MidiEvent}; use super::midi::{MidiClip, MidiEvent};
use super::node_graph::InstrumentGraph; use super::node_graph::AudioGraph;
use super::node_graph::nodes::{AudioInputNode, AudioOutputNode};
use super::pool::AudioPool; use super::pool::AudioPool;
use std::collections::HashMap; use std::collections::HashMap;
@ -289,7 +290,7 @@ pub struct MidiTrack {
pub id: TrackId, pub id: TrackId,
pub name: String, pub name: String,
pub clips: Vec<MidiClip>, pub clips: Vec<MidiClip>,
pub instrument_graph: InstrumentGraph, pub instrument_graph: AudioGraph,
pub volume: f32, pub volume: f32,
pub muted: bool, pub muted: bool,
pub solo: bool, pub solo: bool,
@ -311,7 +312,7 @@ impl MidiTrack {
id, id,
name, name,
clips: Vec::new(), clips: Vec::new(),
instrument_graph: InstrumentGraph::new(default_sample_rate, default_buffer_size), instrument_graph: AudioGraph::new(default_sample_rate, default_buffer_size),
volume: 1.0, volume: 1.0,
muted: false, muted: false,
solo: false, solo: false,
@ -491,11 +492,34 @@ pub struct AudioTrack {
/// Automation lanes for this track /// Automation lanes for this track
pub automation_lanes: HashMap<AutomationLaneId, AutomationLane>, pub automation_lanes: HashMap<AutomationLaneId, AutomationLane>,
next_automation_id: AutomationLaneId, next_automation_id: AutomationLaneId,
/// Effects processing graph for this audio track
pub effects_graph: AudioGraph,
} }
impl AudioTrack { impl AudioTrack {
/// Create a new audio track with default settings /// Create a new audio track with default settings
pub fn new(id: TrackId, name: String) -> Self { pub fn new(id: TrackId, name: String) -> Self {
// Use default sample rate and a large buffer size that can accommodate any callback
let default_sample_rate = 48000;
let default_buffer_size = 8192;
// Create the effects graph with default AudioInput -> AudioOutput chain
let mut effects_graph = AudioGraph::new(default_sample_rate, default_buffer_size);
// Add AudioInput node
let input_node = Box::new(AudioInputNode::new("Audio Input"));
let input_id = effects_graph.add_node(input_node);
// Add AudioOutput node
let output_node = Box::new(AudioOutputNode::new("Audio Output"));
let output_id = effects_graph.add_node(output_node);
// Connect AudioInput -> AudioOutput
let _ = effects_graph.connect(input_id, 0, output_id, 0);
// Set the AudioOutput node as the graph's output
effects_graph.set_output_node(Some(output_id));
Self { Self {
id, id,
name, name,
@ -505,6 +529,7 @@ impl AudioTrack {
solo: false, solo: false,
automation_lanes: HashMap::new(), automation_lanes: HashMap::new(),
next_automation_id: 0, next_automation_id: 0,
effects_graph,
} }
} }
@ -571,15 +596,17 @@ impl AudioTrack {
let buffer_duration_seconds = output.len() as f64 / (sample_rate as f64 * channels as f64); let buffer_duration_seconds = output.len() as f64 / (sample_rate as f64 * channels as f64);
let buffer_end_seconds = playhead_seconds + buffer_duration_seconds; let buffer_end_seconds = playhead_seconds + buffer_duration_seconds;
// Create a temporary buffer for clip rendering
let mut clip_buffer = vec![0.0f32; output.len()];
let mut rendered = 0; let mut rendered = 0;
// Render all active clips // Render all active clips into the temporary buffer
for clip in &self.clips { for clip in &self.clips {
// Check if clip overlaps with current buffer time range // Check if clip overlaps with current buffer time range
if clip.start_time < buffer_end_seconds && clip.end_time() > playhead_seconds { if clip.start_time < buffer_end_seconds && clip.end_time() > playhead_seconds {
rendered += self.render_clip( rendered += self.render_clip(
clip, clip,
output, &mut clip_buffer,
pool, pool,
playhead_seconds, playhead_seconds,
sample_rate, sample_rate,
@ -588,6 +615,25 @@ impl AudioTrack {
} }
} }
// Clear output buffer before graph processing to ensure clean output
output.fill(0.0);
// Find and inject audio into the AudioInputNode
let node_indices: Vec<_> = self.effects_graph.node_indices().collect();
for node_idx in node_indices {
if let Some(graph_node) = self.effects_graph.get_graph_node_mut(node_idx) {
if graph_node.node.node_type() == "AudioInput" {
if let Some(input_node) = graph_node.node.as_any_mut().downcast_mut::<AudioInputNode>() {
input_node.inject_audio(&clip_buffer);
break;
}
}
}
}
// Process through the effects graph (this will write to output buffer)
self.effects_graph.process(output, &[], playhead_seconds);
// Evaluate and apply automation // Evaluate and apply automation
let effective_volume = self.evaluate_automation_at_time(playhead_seconds); let effective_volume = self.evaluate_automation_at_time(playhead_seconds);

View File

@ -17,7 +17,7 @@ pub use audio::{
Metatrack, MidiClip, MidiClipId, MidiEvent, MidiTrack, ParameterId, PoolAudioFile, Project, RecordingState, RenderContext, Track, TrackId, Metatrack, MidiClip, MidiClipId, MidiEvent, MidiTrack, ParameterId, PoolAudioFile, Project, RecordingState, RenderContext, Track, TrackId,
TrackNode, TrackNode,
}; };
pub use audio::node_graph::{GraphPreset, InstrumentGraph, PresetMetadata, SerializedConnection, SerializedNode}; pub use audio::node_graph::{GraphPreset, AudioGraph, PresetMetadata, SerializedConnection, SerializedNode};
pub use command::{AudioEvent, Command, OscilloscopeData}; pub use command::{AudioEvent, Command, OscilloscopeData};
pub use command::types::AutomationKeyframeData; pub use command::types::AutomationKeyframeData;
pub use io::{load_midi_file, AudioFile, WaveformPeak, WavWriter}; pub use io::{load_midi_file, AudioFile, WaveformPeak, WavWriter};

View File

@ -1161,6 +1161,19 @@ async function handleAudioEvent(event) {
updateRecordingClipDuration(event.clip_id, event.duration); updateRecordingClipDuration(event.clip_id, event.duration);
break; break;
case 'GraphNodeAdded':
console.log('[FRONTEND] GraphNodeAdded event - track:', event.track_id, 'node_id:', event.node_id, 'node_type:', event.node_type);
// Resolve the pending promise with the correct backend ID
if (window.pendingNodeUpdate) {
const { drawflowNodeId, nodeType, resolve } = window.pendingNodeUpdate;
if (nodeType === event.node_type && resolve) {
console.log('[FRONTEND] Resolving promise for node', drawflowNodeId, 'with backend ID:', event.node_id);
resolve(event.node_id);
window.pendingNodeUpdate = null;
}
}
break;
case 'RecordingStopped': case 'RecordingStopped':
console.log('[FRONTEND] RecordingStopped event - clip:', event.clip_id, 'pool_index:', event.pool_index, 'waveform peaks:', event.waveform?.length); console.log('[FRONTEND] RecordingStopped event - clip:', event.clip_id, 'pool_index:', event.pool_index, 'waveform peaks:', event.waveform?.length);
console.log('[FRONTEND] Current recording state - isRecording:', context.isRecording, 'recordingClipId:', context.recordingClipId); console.log('[FRONTEND] Current recording state - isRecording:', context.isRecording, 'recordingClipId:', context.recordingClipId);
@ -6367,16 +6380,29 @@ async function renderMenu() {
} }
updateMenu(); updateMenu();
// Helper function to get the current MIDI track // Helper function to get the current track (MIDI or Audio) for node graph editing
function getCurrentMidiTrack() { function getCurrentTrack() {
const activeLayer = context.activeObject?.activeLayer; const activeLayer = context.activeObject?.activeLayer;
if (!activeLayer || !(activeLayer instanceof AudioTrack) || activeLayer.type !== 'midi') { if (!activeLayer || !(activeLayer instanceof AudioTrack)) {
return null; return null;
} }
if (activeLayer.audioTrackId === null) { if (activeLayer.audioTrackId === null) {
return null; return null;
} }
return activeLayer.audioTrackId; // Return both track ID and track type
return {
trackId: activeLayer.audioTrackId,
trackType: activeLayer.type // 'midi' or 'audio'
};
}
// Backwards compatibility: function to get just the MIDI track ID
function getCurrentMidiTrack() {
const trackInfo = getCurrentTrack();
if (trackInfo && trackInfo.trackType === 'midi') {
return trackInfo.trackId;
}
return null;
} }
function nodeEditor() { function nodeEditor() {
@ -6413,7 +6439,8 @@ function nodeEditor() {
// Create breadcrumb/context header // Create breadcrumb/context header
const header = document.createElement("div"); const header = document.createElement("div");
header.className = "node-editor-header"; header.className = "node-editor-header";
header.innerHTML = '<div class="context-breadcrumb">Main Graph</div>'; // Initial header will be updated by updateBreadcrumb() after track info is available
header.innerHTML = '<div class="context-breadcrumb">Node Graph</div>';
container.appendChild(header); container.appendChild(header);
// Create the Drawflow canvas // Create the Drawflow canvas
@ -6490,6 +6517,9 @@ function nodeEditor() {
// Function to update palette based on context and selected category // Function to update palette based on context and selected category
function updatePalette() { function updatePalette() {
const isTemplate = editingContext !== null; const isTemplate = editingContext !== null;
const trackInfo = getCurrentTrack();
const isMIDI = trackInfo?.trackType === 'midi';
const isAudio = trackInfo?.trackType === 'audio';
if (selectedCategory === null && !searchQuery) { if (selectedCategory === null && !searchQuery) {
// Show categories when no search query // Show categories when no search query
@ -6527,8 +6557,15 @@ function nodeEditor() {
if (isTemplate) { if (isTemplate) {
// In template: hide VoiceAllocator, AudioOutput, MidiInput // In template: hide VoiceAllocator, AudioOutput, MidiInput
return node.type !== 'VoiceAllocator' && node.type !== 'AudioOutput' && node.type !== 'MidiInput'; return node.type !== 'VoiceAllocator' && node.type !== 'AudioOutput' && node.type !== 'MidiInput';
} else if (isMIDI) {
// MIDI track: hide AudioInput, show synth nodes
return node.type !== 'TemplateInput' && node.type !== 'TemplateOutput' && node.type !== 'AudioInput';
} else if (isAudio) {
// Audio track: hide synth/MIDI nodes, show AudioInput
const synthNodes = ['Oscillator', 'FMSynth', 'WavetableOscillator', 'SimpleSampler', 'MultiSampler', 'VoiceAllocator', 'MidiInput', 'MidiToCV'];
return node.type !== 'TemplateInput' && node.type !== 'TemplateOutput' && !synthNodes.includes(node.type);
} else { } else {
// In main graph: hide TemplateInput/TemplateOutput // Fallback: hide TemplateInput/TemplateOutput
return node.type !== 'TemplateInput' && node.type !== 'TemplateOutput'; return node.type !== 'TemplateInput' && node.type !== 'TemplateOutput';
} }
}); });
@ -6563,8 +6600,15 @@ function nodeEditor() {
if (isTemplate) { if (isTemplate) {
// In template: hide VoiceAllocator, AudioOutput, MidiInput // In template: hide VoiceAllocator, AudioOutput, MidiInput
return node.type !== 'VoiceAllocator' && node.type !== 'AudioOutput' && node.type !== 'MidiInput'; return node.type !== 'VoiceAllocator' && node.type !== 'AudioOutput' && node.type !== 'MidiInput';
} else if (isMIDI) {
// MIDI track: hide AudioInput, show synth nodes
return node.type !== 'TemplateInput' && node.type !== 'TemplateOutput' && node.type !== 'AudioInput';
} else if (isAudio) {
// Audio track: hide synth/MIDI nodes, show AudioInput
const synthNodes = ['Oscillator', 'FMSynth', 'WavetableOscillator', 'SimpleSampler', 'MultiSampler', 'VoiceAllocator', 'MidiInput', 'MidiToCV'];
return node.type !== 'TemplateInput' && node.type !== 'TemplateOutput' && !synthNodes.includes(node.type);
} else { } else {
// In main graph: hide TemplateInput/TemplateOutput // Fallback: hide TemplateInput/TemplateOutput
return node.type !== 'TemplateInput' && node.type !== 'TemplateOutput'; return node.type !== 'TemplateInput' && node.type !== 'TemplateOutput';
} }
}); });
@ -7275,13 +7319,14 @@ function nodeEditor() {
// Send command to backend // Send command to backend
// Check editing context first (dedicated template view), then parent node (inline editing) // Check editing context first (dedicated template view), then parent node (inline editing)
const trackId = getCurrentMidiTrack(); const trackInfo = getCurrentTrack();
if (trackId === null) { if (trackInfo === null) {
console.error('No MIDI track selected'); console.error('No track selected');
showNodeEditorError(container, 'Please select a MIDI track first'); alert('Please select a track first');
editor.removeNodeId(`node-${drawflowNodeId}`); editor.removeNodeId(`node-${drawflowNodeId}`);
return; return;
} }
const trackId = trackInfo.trackId;
// Determine if we're adding to a template or main graph // Determine if we're adding to a template or main graph
let commandName, commandArgs; let commandName, commandArgs;
@ -7316,7 +7361,29 @@ function nodeEditor() {
}; };
} }
invoke(commandName, commandArgs).then(backendNodeId => { console.log(`[DEBUG] Invoking ${commandName} with args:`, commandArgs);
// Create a promise that resolves when the GraphNodeAdded event arrives
const eventPromise = new Promise((resolve) => {
window.pendingNodeUpdate = {
drawflowNodeId,
nodeType,
resolve: (backendNodeId) => {
console.log(`[DEBUG] Event promise resolved with backend ID: ${backendNodeId}`);
resolve(backendNodeId);
}
};
});
// Wait for both the invoke response and the event
Promise.all([
invoke(commandName, commandArgs),
eventPromise
]).then(([invokeReturnedId, eventBackendId]) => {
console.log(`[DEBUG] Both returned - invoke: ${invokeReturnedId}, event: ${eventBackendId}`);
// Use the event's backend ID as it's the authoritative source
const backendNodeId = eventBackendId;
console.log(`Node ${nodeType} added with backend ID: ${backendNodeId} (parent: ${parentNodeId})`); console.log(`Node ${nodeType} added with backend ID: ${backendNodeId} (parent: ${parentNodeId})`);
// Store backend node ID using Drawflow's update method // Store backend node ID using Drawflow's update method
@ -7325,12 +7392,13 @@ function nodeEditor() {
console.log("Verifying stored backend ID:", editor.getNodeFromId(drawflowNodeId).data.backendId); console.log("Verifying stored backend ID:", editor.getNodeFromId(drawflowNodeId).data.backendId);
// Cache node data for undo/redo // Cache node data for undo/redo
const trackInfo = getCurrentTrack();
nodeDataCache.set(drawflowNodeId, { nodeDataCache.set(drawflowNodeId, {
nodeType: nodeType, nodeType: nodeType,
backendId: backendNodeId, backendId: backendNodeId,
position: { x, y }, position: { x, y },
parentNodeId: parentNodeId, parentNodeId: parentNodeId,
trackId: getCurrentMidiTrack() trackId: trackInfo ? trackInfo.trackId : null
}); });
// Record action for undo (node is already added to frontend and backend) // Record action for undo (node is already added to frontend and backend)
@ -7350,10 +7418,10 @@ function nodeEditor() {
// If this is an AudioOutput node, automatically set it as the graph output // If this is an AudioOutput node, automatically set it as the graph output
if (nodeType === "AudioOutput") { if (nodeType === "AudioOutput") {
console.log(`Setting node ${backendNodeId} as graph output`); console.log(`Setting node ${backendNodeId} as graph output`);
const currentTrackId = getCurrentMidiTrack(); const trackInfo = getCurrentTrack();
if (currentTrackId !== null) { if (trackInfo !== null) {
invoke("graph_set_output_node", { invoke("graph_set_output_node", {
trackId: currentTrackId, trackId: trackInfo.trackId,
nodeId: backendNodeId nodeId: backendNodeId
}).then(() => { }).then(() => {
console.log("Output node set successfully"); console.log("Output node set successfully");
@ -7365,8 +7433,9 @@ function nodeEditor() {
// If this is an AutomationInput node, create timeline curve // If this is an AutomationInput node, create timeline curve
if (nodeType === "AutomationInput" && !parentNodeId) { if (nodeType === "AutomationInput" && !parentNodeId) {
const currentTrackId = getCurrentMidiTrack(); const trackInfo = getCurrentTrack();
if (currentTrackId !== null) { if (trackInfo !== null) {
const currentTrackId = trackInfo.trackId;
// Find the audio/MIDI track // Find the audio/MIDI track
const track = root.audioTracks?.find(t => t.audioTrackId === currentTrackId); const track = root.audioTracks?.find(t => t.audioTrackId === currentTrackId);
if (track) { if (track) {
@ -7398,8 +7467,9 @@ function nodeEditor() {
// If this is an Oscilloscope node, start the visualization // If this is an Oscilloscope node, start the visualization
if (nodeType === "Oscilloscope") { if (nodeType === "Oscilloscope") {
const currentTrackId = getCurrentMidiTrack(); const trackInfo = getCurrentTrack();
if (currentTrackId !== null) { if (trackInfo !== null) {
const currentTrackId = trackInfo.trackId;
console.log(`Starting oscilloscope visualization for node ${drawflowNodeId} (backend ID: ${backendNodeId})`); console.log(`Starting oscilloscope visualization for node ${drawflowNodeId} (backend ID: ${backendNodeId})`);
// Wait for DOM to update before starting visualization // Wait for DOM to update before starting visualization
setTimeout(() => { setTimeout(() => {
@ -7579,7 +7649,21 @@ function nodeEditor() {
if (param.name === 'trigger_mode') { if (param.name === 'trigger_mode') {
const modes = ['Free', 'Rising', 'Falling', 'V/oct']; const modes = ['Free', 'Rising', 'Falling', 'V/oct'];
displaySpan.textContent = modes[Math.round(value)] || 'Free'; displaySpan.textContent = modes[Math.round(value)] || 'Free';
} else { }
// Special formatting for Phaser rate in sync mode
else if (param.name === 'rate' && nodeData.name === 'Phaser') {
const syncCheckbox = nodeElement.querySelector(`#sync-${nodeId}`);
if (syncCheckbox && syncCheckbox.checked) {
const beatDivisions = [
'4 bars', '2 bars', '1 bar', '1/2', '1/4', '1/8', '1/16', '1/32', '1/2T', '1/4T', '1/8T'
];
const idx = Math.round(value);
displaySpan.textContent = beatDivisions[Math.min(10, Math.max(0, idx))];
} else {
displaySpan.textContent = value.toFixed(param.unit === 'Hz' ? 0 : 2);
}
}
else {
displaySpan.textContent = value.toFixed(param.unit === 'Hz' ? 0 : 2); displaySpan.textContent = value.toFixed(param.unit === 'Hz' ? 0 : 2);
} }
} }
@ -7593,10 +7677,10 @@ function nodeEditor() {
// Send to backend in real-time // Send to backend in real-time
if (nodeData.data.backendId !== null) { if (nodeData.data.backendId !== null) {
const currentTrackId = getCurrentMidiTrack(); const trackInfo = getCurrentTrack();
if (currentTrackId !== null) { if (trackInfo !== null) {
invoke("graph_set_parameter", { invoke("graph_set_parameter", {
trackId: currentTrackId, trackId: trackInfo.trackId,
nodeId: nodeData.data.backendId, nodeId: nodeData.data.backendId,
paramId: paramId, paramId: paramId,
value: value value: value
@ -7672,10 +7756,10 @@ function nodeEditor() {
// Send to backend // Send to backend
if (nodeData.data.backendId !== null) { if (nodeData.data.backendId !== null) {
const currentTrackId = getCurrentMidiTrack(); const trackInfo = getCurrentTrack();
if (currentTrackId !== null) { if (trackInfo !== null) {
invoke("graph_set_parameter", { invoke("graph_set_parameter", {
trackId: currentTrackId, trackId: trackInfo.trackId,
nodeId: nodeData.data.backendId, nodeId: nodeData.data.backendId,
paramId: paramId, paramId: paramId,
value: value value: value
@ -7750,10 +7834,10 @@ function nodeEditor() {
// Send to backend // Send to backend
const nodeData = editor.getNodeFromId(nodeId); const nodeData = editor.getNodeFromId(nodeId);
if (nodeData && nodeData.data.backendId !== null) { if (nodeData && nodeData.data.backendId !== null) {
const currentTrackId = getCurrentMidiTrack(); const trackInfo = getCurrentTrack();
if (currentTrackId !== null) { if (trackInfo !== null) {
invoke("graph_set_parameter", { invoke("graph_set_parameter", {
trackId: currentTrackId, trackId: trackInfo.trackId,
nodeId: nodeData.data.backendId, nodeId: nodeData.data.backendId,
paramId: paramId, paramId: paramId,
value: value value: value
@ -7775,6 +7859,78 @@ function nodeEditor() {
}); });
}); });
// Handle checkboxes
const checkboxes = nodeElement.querySelectorAll('input[type="checkbox"][data-param]');
checkboxes.forEach(checkbox => {
checkbox.addEventListener("change", (e) => {
const paramId = parseInt(e.target.getAttribute("data-param"));
const value = e.target.checked ? 1.0 : 0.0;
console.log(`[setupNodeParameters] Checkbox change - nodeId: ${nodeId}, paramId: ${paramId}, value: ${value}`);
// Send to backend
const nodeData = editor.getNodeFromId(nodeId);
if (nodeData && nodeData.data.backendId !== null) {
const trackInfo = getCurrentTrack();
if (trackInfo !== null) {
invoke("graph_set_parameter", {
trackId: trackInfo.trackId,
nodeId: nodeData.data.backendId,
paramId: paramId,
value: value
}).then(() => {
console.log(`Parameter ${paramId} set to ${value}`);
}).catch(err => {
console.error("Failed to set parameter:", err);
});
}
}
// Special handling for Phaser sync checkbox
if (checkbox.id.startsWith('sync-')) {
const rateSlider = nodeElement.querySelector(`#rate-slider-${nodeId}`);
const rateDisplay = nodeElement.querySelector(`#rate-${nodeId}`);
const rateUnit = nodeElement.querySelector(`#rate-unit-${nodeId}`);
if (rateSlider && rateDisplay && rateUnit) {
if (e.target.checked) {
// Sync mode: Use beat divisions
// Map slider 0-10 to different note divisions
// 0: 4 bars, 1: 2 bars, 2: 1 bar, 3: 1/2, 4: 1/4, 5: 1/8, 6: 1/16, 7: 1/32, 8: 1/2T, 9: 1/4T, 10: 1/8T
const beatDivisions = [
{ label: '4 bars', multiplier: 16.0 },
{ label: '2 bars', multiplier: 8.0 },
{ label: '1 bar', multiplier: 4.0 },
{ label: '1/2', multiplier: 2.0 },
{ label: '1/4', multiplier: 1.0 },
{ label: '1/8', multiplier: 0.5 },
{ label: '1/16', multiplier: 0.25 },
{ label: '1/32', multiplier: 0.125 },
{ label: '1/2T', multiplier: 2.0/3.0 },
{ label: '1/4T', multiplier: 1.0/3.0 },
{ label: '1/8T', multiplier: 0.5/3.0 }
];
rateSlider.min = '0';
rateSlider.max = '10';
rateSlider.step = '1';
const idx = Math.round(parseFloat(rateSlider.value) * 10 / 10);
rateSlider.value = Math.min(10, Math.max(0, idx));
rateDisplay.textContent = beatDivisions[parseInt(rateSlider.value)].label;
rateUnit.textContent = '';
} else {
// Free mode: Hz
rateSlider.min = '0.1';
rateSlider.max = '10.0';
rateSlider.step = '0.1';
rateDisplay.textContent = parseFloat(rateSlider.value).toFixed(1);
rateUnit.textContent = ' Hz';
}
}
}
});
});
// Handle Load Sample button for SimpleSampler // Handle Load Sample button for SimpleSampler
const loadSampleBtn = nodeElement.querySelector(".load-sample-btn"); const loadSampleBtn = nodeElement.querySelector(".load-sample-btn");
if (loadSampleBtn) { if (loadSampleBtn) {
@ -8583,11 +8739,12 @@ function nodeEditor() {
} }
}, 10); }, 10);
// Send to backend (skip if action is handling it) // Send to backend
console.log("Backend IDs - output:", outputNode.data.backendId, "input:", inputNode.data.backendId); console.log("Backend IDs - output:", outputNode.data.backendId, "input:", inputNode.data.backendId);
if (!suppressActionRecording && outputNode.data.backendId !== null && inputNode.data.backendId !== null) { if (outputNode.data.backendId !== null && inputNode.data.backendId !== null) {
const currentTrackId = getCurrentMidiTrack(); const trackInfo = getCurrentTrack();
if (currentTrackId === null) return; if (trackInfo === null) return;
const currentTrackId = trackInfo.trackId;
// Check if we're in template editing mode (dedicated view) // Check if we're in template editing mode (dedicated view)
if (editingContext) { if (editingContext) {
@ -8658,23 +8815,25 @@ function nodeEditor() {
}).then(async () => { }).then(async () => {
console.log("Connection successful"); console.log("Connection successful");
// Record action for undo // Record action for undo (only if not suppressing)
redoStack.length = 0; if (!suppressActionRecording) {
undoStack.push({ redoStack.length = 0;
name: "graphAddConnection", undoStack.push({
action: { name: "graphAddConnection",
trackId: currentTrackId, action: {
fromNode: outputNode.data.backendId, trackId: currentTrackId,
fromPort: outputPort, fromNode: outputNode.data.backendId,
toNode: inputNode.data.backendId, fromPort: outputPort,
toPort: inputPort, toNode: inputNode.data.backendId,
// Store frontend IDs for disconnection toPort: inputPort,
frontendFromId: connection.output_id, // Store frontend IDs for disconnection
frontendToId: connection.input_id, frontendFromId: connection.output_id,
fromPortClass: connection.output_class, frontendToId: connection.input_id,
toPortClass: connection.input_class fromPortClass: connection.output_class,
} toPortClass: connection.input_class
}); }
});
}
// Auto-name AutomationInput nodes when connected // Auto-name AutomationInput nodes when connected
await updateAutomationName( await updateAutomationName(
@ -8741,35 +8900,37 @@ function nodeEditor() {
} }
} }
// Send to backend (skip if action is handling it) // Send to backend
if (!suppressActionRecording && outputNode.data.backendId !== null && inputNode.data.backendId !== null) { if (outputNode.data.backendId !== null && inputNode.data.backendId !== null) {
const currentTrackId = getCurrentMidiTrack(); const trackInfo = getCurrentTrack();
if (currentTrackId !== null) { if (trackInfo !== null) {
invoke("graph_disconnect", { invoke("graph_disconnect", {
trackId: currentTrackId, trackId: trackInfo.trackId,
fromNode: outputNode.data.backendId, fromNode: outputNode.data.backendId,
fromPort: outputPort, fromPort: outputPort,
toNode: inputNode.data.backendId, toNode: inputNode.data.backendId,
toPort: inputPort toPort: inputPort
}).then(() => { }).then(() => {
// Record action for undo // Record action for undo (only if not suppressing)
redoStack.length = 0; if (!suppressActionRecording) {
undoStack.push({ redoStack.length = 0;
name: "graphRemoveConnection", undoStack.push({
action: { name: "graphRemoveConnection",
trackId: currentTrackId, action: {
fromNode: outputNode.data.backendId, trackId: trackInfo.trackId,
fromPort: outputPort, fromNode: outputNode.data.backendId,
toNode: inputNode.data.backendId, fromPort: outputPort,
toPort: inputPort, toNode: inputNode.data.backendId,
// Store frontend IDs for reconnection toPort: inputPort,
frontendFromId: connection.output_id, // Store frontend IDs for reconnection
frontendToId: connection.input_id, frontendFromId: connection.output_id,
fromPortClass: connection.output_class, frontendToId: connection.input_id,
toPortClass: connection.input_class fromPortClass: connection.output_class,
} toPortClass: connection.input_class
}); }
updateMenu(); });
updateMenu();
}
}).catch(err => { }).catch(err => {
console.error("Failed to disconnect nodes:", err); console.error("Failed to disconnect nodes:", err);
}); });
@ -8793,15 +8954,24 @@ function nodeEditor() {
function updateBreadcrumb() { function updateBreadcrumb() {
const breadcrumb = header.querySelector('.context-breadcrumb'); const breadcrumb = header.querySelector('.context-breadcrumb');
if (editingContext) { if (editingContext) {
// Determine main graph name based on track type
const trackInfo = getCurrentTrack();
const mainGraphName = trackInfo?.trackType === 'audio' ? 'Effects Graph' : 'Instrument Graph';
breadcrumb.innerHTML = ` breadcrumb.innerHTML = `
Main Graph &gt; ${mainGraphName} &gt;
<span class="template-name">${editingContext.voiceAllocatorName} Template</span> <span class="template-name">${editingContext.voiceAllocatorName} Template</span>
<button class="exit-template-btn"> Exit Template</button> <button class="exit-template-btn"> Exit Template</button>
`; `;
const exitBtn = breadcrumb.querySelector('.exit-template-btn'); const exitBtn = breadcrumb.querySelector('.exit-template-btn');
exitBtn.addEventListener('click', exitTemplate); exitBtn.addEventListener('click', exitTemplate);
} else { } else {
breadcrumb.textContent = 'Main Graph'; // Not in template mode - show main graph name based on track type
const trackInfo = getCurrentTrack();
const graphName = trackInfo?.trackType === 'audio' ? 'Effects Graph' :
trackInfo?.trackType === 'midi' ? 'Instrument Graph' :
'Node Graph';
breadcrumb.textContent = graphName;
} }
} }
@ -8825,18 +8995,24 @@ function nodeEditor() {
async function reloadGraph() { async function reloadGraph() {
if (!editor) return; if (!editor) return;
const trackId = getCurrentMidiTrack(); const trackInfo = getCurrentTrack();
// Clear editor first // Clear editor first
editor.clearModuleSelected(); editor.clearModuleSelected();
editor.clear(); editor.clear();
// If no MIDI track selected, just leave it cleared // Update UI based on track type
if (trackId === null) { updateBreadcrumb();
console.log('No MIDI track selected, editor cleared'); updatePalette();
// If no track selected, just leave it cleared
if (trackInfo === null) {
console.log('No track selected, editor cleared');
return; return;
} }
const trackId = trackInfo.trackId;
try { try {
// Get graph based on editing context // Get graph based on editing context
let graphJson; let graphJson;
@ -9545,11 +9721,12 @@ function addPresetItemHandlers(listElement) {
} }
async function loadPreset(presetPath) { async function loadPreset(presetPath) {
const trackId = getCurrentMidiTrack(); const trackInfo = getCurrentTrack();
if (trackId === null) { if (trackInfo === null) {
alert('Please select a MIDI track first'); alert('Please select a track first');
return; return;
} }
const trackId = trackInfo.trackId;
try { try {
await invoke('graph_load_preset', { await invoke('graph_load_preset', {
@ -9567,9 +9744,9 @@ async function loadPreset(presetPath) {
} }
function showSavePresetDialog(container) { function showSavePresetDialog(container) {
const currentTrackId = getCurrentMidiTrack(); const trackInfo = getCurrentTrack();
if (currentTrackId === null) { if (trackInfo === null) {
alert('Please select a MIDI track first'); alert('Please select a track first');
return; return;
} }
@ -9626,7 +9803,7 @@ function showSavePresetDialog(container) {
try { try {
await invoke('graph_save_preset', { await invoke('graph_save_preset', {
trackId: currentTrackId, trackId: trackInfo.trackId,
presetName: name, presetName: name,
description, description,
tags tags

View File

@ -373,6 +373,23 @@ export const nodeTypes = {
` `
}, },
AudioInput: {
name: 'AudioInput',
category: NodeCategory.INPUT,
description: 'Audio track clip input - receives audio from timeline clips',
inputs: [],
outputs: [
{ name: 'Audio Out', type: SignalType.AUDIO, index: 0 }
],
parameters: [],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">Audio Input</div>
<div class="node-info">Audio from clips</div>
</div>
`
},
AudioOutput: { AudioOutput: {
name: 'AudioOutput', name: 'AudioOutput',
category: NodeCategory.OUTPUT, category: NodeCategory.OUTPUT,
@ -1335,14 +1352,21 @@ export const nodeTypes = {
{ id: 1, name: 'depth', label: 'Depth', min: 0.0, max: 1.0, default: 0.7, unit: '' }, { id: 1, name: 'depth', label: 'Depth', min: 0.0, max: 1.0, default: 0.7, unit: '' },
{ id: 2, name: 'stages', label: 'Stages', min: 2, max: 8, default: 6, unit: '' }, { id: 2, name: 'stages', label: 'Stages', min: 2, max: 8, default: 6, unit: '' },
{ id: 3, name: 'feedback', label: 'Feedback', min: -0.95, max: 0.95, default: 0.5, unit: '' }, { id: 3, name: 'feedback', label: 'Feedback', min: -0.95, max: 0.95, default: 0.5, unit: '' },
{ id: 4, name: 'wetdry', label: 'Wet/Dry', min: 0.0, max: 1.0, default: 0.5, unit: '' } { id: 4, name: 'wetdry', label: 'Wet/Dry', min: 0.0, max: 1.0, default: 0.5, unit: '' },
{ id: 5, name: 'sync', label: 'Sync to BPM', min: 0, max: 1, default: 0, unit: '' }
], ],
getHTML: (nodeId) => ` getHTML: (nodeId) => `
<div class="node-content"> <div class="node-content">
<div class="node-title">Phaser</div> <div class="node-title">Phaser</div>
<div class="node-param"> <div class="node-param">
<label>Rate: <span id="rate-${nodeId}">0.5</span> Hz</label> <label>
<input type="range" data-node="${nodeId}" data-param="0" min="0.1" max="10.0" value="0.5" step="0.1"> <input type="checkbox" id="sync-${nodeId}" data-node="${nodeId}" data-param="5">
Sync to BPM
</label>
</div>
<div class="node-param">
<label>Rate: <span id="rate-${nodeId}">0.5</span><span id="rate-unit-${nodeId}"> Hz</span></label>
<input type="range" id="rate-slider-${nodeId}" data-node="${nodeId}" data-param="0" min="0.1" max="10.0" value="0.5" step="0.1">
</div> </div>
<div class="node-param"> <div class="node-param">
<label>Depth: <span id="depth-${nodeId}">0.7</span></label> <label>Depth: <span id="depth-${nodeId}">0.7</span></label>

View File

@ -2929,8 +2929,8 @@ class TimelineWindowV2 extends Widget {
this.context.selection = [] this.context.selection = []
this.context.shapeselection = [] this.context.shapeselection = []
// If this is a MIDI track, reload the node editor // Reload the node editor for both MIDI and audio tracks
if (track.object.type === 'midi') { if (track.object.type === 'midi' || track.object.type === 'audio') {
setTimeout(() => this.context.reloadNodeEditor?.(), 50); setTimeout(() => this.context.reloadNodeEditor?.(), 50);
} }
} }