use nodes for audio tracks
This commit is contained in:
parent
988bbfd1a9
commit
66c4746767
|
|
@ -1,7 +1,7 @@
|
|||
use crate::audio::buffer_pool::BufferPool;
|
||||
use crate::audio::clip::ClipId;
|
||||
use crate::audio::midi::{MidiClip, MidiClipId, MidiEvent};
|
||||
use crate::audio::node_graph::{nodes::*, InstrumentGraph};
|
||||
use crate::audio::node_graph::{nodes::*, AudioGraph};
|
||||
use crate::audio::pool::AudioPool;
|
||||
use crate::audio::project::Project;
|
||||
use crate::audio::recording::{MidiRecordingState, RecordingState};
|
||||
|
|
@ -689,12 +689,27 @@ impl Engine {
|
|||
|
||||
// Node graph commands
|
||||
Command::GraphAddNode(track_id, node_type, x, y) => {
|
||||
// Get MIDI track (graphs are only for MIDI tracks currently)
|
||||
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
|
||||
let graph = &mut track.instrument_graph;
|
||||
{
|
||||
// Create the node based on type
|
||||
let node: Box<dyn crate::audio::node_graph::AudioNode> = match node_type.as_str() {
|
||||
eprintln!("[DEBUG] GraphAddNode received: track_id={}, node_type={}, x={}, y={}", track_id, node_type, x, y);
|
||||
|
||||
// Get the track's graph (works for both MIDI and Audio tracks)
|
||||
let graph = match self.project.get_track_mut(track_id) {
|
||||
Some(TrackNode::Midi(track)) => {
|
||||
eprintln!("[DEBUG] Found MIDI track, using instrument_graph");
|
||||
Some(&mut track.instrument_graph)
|
||||
},
|
||||
Some(TrackNode::Audio(track)) => {
|
||||
eprintln!("[DEBUG] Found Audio track, using effects_graph");
|
||||
Some(&mut track.effects_graph)
|
||||
},
|
||||
_ => {
|
||||
eprintln!("[DEBUG] Track not found or invalid type!");
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(graph) = graph {
|
||||
// Create the node based on type
|
||||
let node: Box<dyn crate::audio::node_graph::AudioNode> = match node_type.as_str() {
|
||||
"Oscillator" => Box::new(OscillatorNode::new("Oscillator".to_string())),
|
||||
"Gain" => Box::new(GainNode::new("Gain".to_string())),
|
||||
"Mixer" => Box::new(MixerNode::new("Mixer".to_string())),
|
||||
|
|
@ -729,6 +744,7 @@ impl Engine {
|
|||
"MidiInput" => Box::new(MidiInputNode::new("MIDI Input".to_string())),
|
||||
"MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV".to_string())),
|
||||
"AudioToCV" => Box::new(AudioToCVNode::new("Audio→CV".to_string())),
|
||||
"AudioInput" => Box::new(AudioInputNode::new("Audio Input".to_string())),
|
||||
"AutomationInput" => Box::new(AutomationInputNode::new("Automation".to_string())),
|
||||
"Oscilloscope" => Box::new(OscilloscopeNode::new("Oscilloscope".to_string())),
|
||||
"TemplateInput" => Box::new(TemplateInputNode::new("Template Input".to_string())),
|
||||
|
|
@ -744,21 +760,29 @@ impl Engine {
|
|||
}
|
||||
};
|
||||
|
||||
// Add node to graph
|
||||
let node_idx = graph.add_node(node);
|
||||
let node_id = node_idx.index() as u32;
|
||||
// Add node to graph
|
||||
let node_idx = graph.add_node(node);
|
||||
let node_id = node_idx.index() as u32;
|
||||
eprintln!("[DEBUG] Node added with index: {:?}, converted to u32 id: {}", node_idx, node_id);
|
||||
|
||||
// Save position
|
||||
graph.set_node_position(node_idx, x, y);
|
||||
// Save position
|
||||
graph.set_node_position(node_idx, x, y);
|
||||
|
||||
// Automatically set MIDI-receiving nodes as MIDI targets
|
||||
if node_type == "MidiInput" || node_type == "VoiceAllocator" {
|
||||
graph.set_midi_target(node_idx, true);
|
||||
}
|
||||
|
||||
// Emit success event
|
||||
let _ = self.event_tx.push(AudioEvent::GraphNodeAdded(track_id, node_id, node_type.clone()));
|
||||
// Automatically set MIDI-receiving nodes as MIDI targets
|
||||
if node_type == "MidiInput" || node_type == "VoiceAllocator" {
|
||||
graph.set_midi_target(node_idx, true);
|
||||
}
|
||||
|
||||
// Automatically set AudioOutput nodes as the graph output
|
||||
if node_type == "AudioOutput" {
|
||||
graph.set_output_node(Some(node_idx));
|
||||
}
|
||||
|
||||
eprintln!("[DEBUG] Emitting GraphNodeAdded event: track_id={}, node_id={}, node_type={}", track_id, node_id, node_type);
|
||||
// Emit success event
|
||||
let _ = self.event_tx.push(AudioEvent::GraphNodeAdded(track_id, node_id, node_type.clone()));
|
||||
} else {
|
||||
eprintln!("[DEBUG] Graph was None, node not added!");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -836,35 +860,55 @@ impl Engine {
|
|||
}
|
||||
|
||||
Command::GraphRemoveNode(track_id, node_index) => {
|
||||
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
|
||||
let graph = &mut track.instrument_graph;
|
||||
{
|
||||
let node_idx = NodeIndex::new(node_index as usize);
|
||||
graph.remove_node(node_idx);
|
||||
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
|
||||
}
|
||||
let graph = match self.project.get_track_mut(track_id) {
|
||||
Some(TrackNode::Midi(track)) => Some(&mut track.instrument_graph),
|
||||
Some(TrackNode::Audio(track)) => Some(&mut track.effects_graph),
|
||||
_ => None,
|
||||
};
|
||||
if let Some(graph) = graph {
|
||||
let node_idx = NodeIndex::new(node_index as usize);
|
||||
graph.remove_node(node_idx);
|
||||
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
|
||||
}
|
||||
}
|
||||
|
||||
Command::GraphConnect(track_id, from, from_port, to, to_port) => {
|
||||
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
|
||||
let graph = &mut track.instrument_graph;
|
||||
{
|
||||
let from_idx = NodeIndex::new(from as usize);
|
||||
let to_idx = NodeIndex::new(to as usize);
|
||||
eprintln!("[DEBUG] GraphConnect received: track_id={}, from={}, from_port={}, to={}, to_port={}", track_id, from, from_port, to, to_port);
|
||||
|
||||
match graph.connect(from_idx, from_port, to_idx, to_port) {
|
||||
Ok(()) => {
|
||||
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
|
||||
}
|
||||
Err(e) => {
|
||||
let _ = self.event_tx.push(AudioEvent::GraphConnectionError(
|
||||
track_id,
|
||||
format!("{:?}", e)
|
||||
));
|
||||
}
|
||||
let graph = match self.project.get_track_mut(track_id) {
|
||||
Some(TrackNode::Midi(track)) => {
|
||||
eprintln!("[DEBUG] Found MIDI track for connection");
|
||||
Some(&mut track.instrument_graph)
|
||||
},
|
||||
Some(TrackNode::Audio(track)) => {
|
||||
eprintln!("[DEBUG] Found Audio track for connection");
|
||||
Some(&mut track.effects_graph)
|
||||
},
|
||||
_ => {
|
||||
eprintln!("[DEBUG] Track not found for connection!");
|
||||
None
|
||||
}
|
||||
};
|
||||
if let Some(graph) = graph {
|
||||
let from_idx = NodeIndex::new(from as usize);
|
||||
let to_idx = NodeIndex::new(to as usize);
|
||||
eprintln!("[DEBUG] Attempting to connect nodes: {:?} port {} -> {:?} port {}", from_idx, from_port, to_idx, to_port);
|
||||
|
||||
match graph.connect(from_idx, from_port, to_idx, to_port) {
|
||||
Ok(()) => {
|
||||
eprintln!("[DEBUG] Connection successful!");
|
||||
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("[DEBUG] Connection failed: {:?}", e);
|
||||
let _ = self.event_tx.push(AudioEvent::GraphConnectionError(
|
||||
track_id,
|
||||
format!("{:?}", e)
|
||||
));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
eprintln!("[DEBUG] No graph found, connection not made");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -891,25 +935,37 @@ impl Engine {
|
|||
}
|
||||
|
||||
Command::GraphDisconnect(track_id, from, from_port, to, to_port) => {
|
||||
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
|
||||
let graph = &mut track.instrument_graph;
|
||||
{
|
||||
let from_idx = NodeIndex::new(from as usize);
|
||||
let to_idx = NodeIndex::new(to as usize);
|
||||
graph.disconnect(from_idx, from_port, to_idx, to_port);
|
||||
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
|
||||
eprintln!("[AUDIO ENGINE] GraphDisconnect: track={}, from={}, from_port={}, to={}, to_port={}", track_id, from, from_port, to, to_port);
|
||||
let graph = match self.project.get_track_mut(track_id) {
|
||||
Some(TrackNode::Midi(track)) => Some(&mut track.instrument_graph),
|
||||
Some(TrackNode::Audio(track)) => {
|
||||
eprintln!("[AUDIO ENGINE] Found audio track, disconnecting in effects_graph");
|
||||
Some(&mut track.effects_graph)
|
||||
}
|
||||
_ => {
|
||||
eprintln!("[AUDIO ENGINE] Track not found!");
|
||||
None
|
||||
}
|
||||
};
|
||||
if let Some(graph) = graph {
|
||||
let from_idx = NodeIndex::new(from as usize);
|
||||
let to_idx = NodeIndex::new(to as usize);
|
||||
graph.disconnect(from_idx, from_port, to_idx, to_port);
|
||||
eprintln!("[AUDIO ENGINE] Disconnect completed");
|
||||
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
|
||||
}
|
||||
}
|
||||
|
||||
Command::GraphSetParameter(track_id, node_index, param_id, value) => {
|
||||
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
|
||||
let graph = &mut track.instrument_graph;
|
||||
{
|
||||
let node_idx = NodeIndex::new(node_index as usize);
|
||||
if let Some(graph_node) = graph.get_graph_node_mut(node_idx) {
|
||||
graph_node.node.set_parameter(param_id, value);
|
||||
}
|
||||
let graph = match self.project.get_track_mut(track_id) {
|
||||
Some(TrackNode::Midi(track)) => Some(&mut track.instrument_graph),
|
||||
Some(TrackNode::Audio(track)) => Some(&mut track.effects_graph),
|
||||
_ => None,
|
||||
};
|
||||
if let Some(graph) = graph {
|
||||
let node_idx = NodeIndex::new(node_index as usize);
|
||||
if let Some(graph_node) = graph.get_graph_node_mut(node_idx) {
|
||||
graph_node.node.set_parameter(param_id, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -925,18 +981,24 @@ impl Engine {
|
|||
}
|
||||
|
||||
Command::GraphSetOutputNode(track_id, node_index) => {
|
||||
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
|
||||
let graph = &mut track.instrument_graph;
|
||||
{
|
||||
let node_idx = NodeIndex::new(node_index as usize);
|
||||
graph.set_output_node(Some(node_idx));
|
||||
}
|
||||
let graph = match self.project.get_track_mut(track_id) {
|
||||
Some(TrackNode::Midi(track)) => Some(&mut track.instrument_graph),
|
||||
Some(TrackNode::Audio(track)) => Some(&mut track.effects_graph),
|
||||
_ => None,
|
||||
};
|
||||
if let Some(graph) = graph {
|
||||
let node_idx = NodeIndex::new(node_index as usize);
|
||||
graph.set_output_node(Some(node_idx));
|
||||
}
|
||||
}
|
||||
|
||||
Command::GraphSavePreset(track_id, preset_path, preset_name, description, tags) => {
|
||||
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
|
||||
let graph = &track.instrument_graph;
|
||||
let graph = match self.project.get_track(track_id) {
|
||||
Some(TrackNode::Midi(track)) => Some(&track.instrument_graph),
|
||||
Some(TrackNode::Audio(track)) => Some(&track.effects_graph),
|
||||
_ => None,
|
||||
};
|
||||
if let Some(graph) = graph {
|
||||
// Serialize the graph to a preset
|
||||
let mut preset = graph.to_preset(&preset_name);
|
||||
preset.metadata.description = description;
|
||||
|
|
@ -969,14 +1031,21 @@ impl Engine {
|
|||
// Extract the directory path from the preset path for resolving relative sample paths
|
||||
let preset_base_path = std::path::Path::new(&preset_path).parent();
|
||||
|
||||
match InstrumentGraph::from_preset(&preset, self.sample_rate, 8192, preset_base_path) {
|
||||
match AudioGraph::from_preset(&preset, self.sample_rate, 8192, preset_base_path) {
|
||||
Ok(graph) => {
|
||||
// Replace the track's graph
|
||||
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
|
||||
track.instrument_graph = graph;
|
||||
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
|
||||
// Emit preset loaded event after everything is loaded
|
||||
let _ = self.event_tx.push(AudioEvent::GraphPresetLoaded(track_id));
|
||||
match self.project.get_track_mut(track_id) {
|
||||
Some(TrackNode::Midi(track)) => {
|
||||
track.instrument_graph = graph;
|
||||
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
|
||||
let _ = self.event_tx.push(AudioEvent::GraphPresetLoaded(track_id));
|
||||
}
|
||||
Some(TrackNode::Audio(track)) => {
|
||||
track.effects_graph = graph;
|
||||
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
|
||||
let _ = self.event_tx.push(AudioEvent::GraphPresetLoaded(track_id));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
|
|
@ -1197,15 +1266,26 @@ impl Engine {
|
|||
fn handle_query(&mut self, query: Query) {
|
||||
let response = match query {
|
||||
Query::GetGraphState(track_id) => {
|
||||
if let Some(TrackNode::Midi(track)) = self.project.get_track(track_id) {
|
||||
let graph = &track.instrument_graph;
|
||||
let preset = graph.to_preset("temp");
|
||||
match preset.to_json() {
|
||||
Ok(json) => QueryResponse::GraphState(Ok(json)),
|
||||
Err(e) => QueryResponse::GraphState(Err(format!("Failed to serialize graph: {:?}", e))),
|
||||
match self.project.get_track(track_id) {
|
||||
Some(TrackNode::Midi(track)) => {
|
||||
let graph = &track.instrument_graph;
|
||||
let preset = graph.to_preset("temp");
|
||||
match preset.to_json() {
|
||||
Ok(json) => QueryResponse::GraphState(Ok(json)),
|
||||
Err(e) => QueryResponse::GraphState(Err(format!("Failed to serialize graph: {:?}", e))),
|
||||
}
|
||||
}
|
||||
Some(TrackNode::Audio(track)) => {
|
||||
let graph = &track.effects_graph;
|
||||
let preset = graph.to_preset("temp");
|
||||
match preset.to_json() {
|
||||
Ok(json) => QueryResponse::GraphState(Ok(json)),
|
||||
Err(e) => QueryResponse::GraphState(Err(format!("Failed to serialize graph: {:?}", e))),
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
QueryResponse::GraphState(Err(format!("Track {} not found", track_id)))
|
||||
}
|
||||
} else {
|
||||
QueryResponse::GraphState(Err(format!("Track {} not found or is not a MIDI track", track_id)))
|
||||
}
|
||||
}
|
||||
Query::GetTemplateState(track_id, voice_allocator_id) => {
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ impl GraphNode {
|
|||
}
|
||||
|
||||
/// Audio processing graph for instruments/effects
|
||||
pub struct InstrumentGraph {
|
||||
pub struct AudioGraph {
|
||||
/// The audio graph (StableGraph allows node removal without index invalidation)
|
||||
graph: StableGraph<GraphNode, Connection>,
|
||||
|
||||
|
|
@ -86,8 +86,8 @@ pub struct InstrumentGraph {
|
|||
playback_time: f64,
|
||||
}
|
||||
|
||||
impl InstrumentGraph {
|
||||
/// Create a new empty instrument graph
|
||||
impl AudioGraph {
|
||||
/// Create a new empty audio graph
|
||||
pub fn new(sample_rate: u32, buffer_size: usize) -> Self {
|
||||
Self {
|
||||
graph: StableGraph::new(),
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ mod types;
|
|||
pub mod nodes;
|
||||
pub mod preset;
|
||||
|
||||
pub use graph::{Connection, GraphNode, InstrumentGraph};
|
||||
pub use graph::{Connection, GraphNode, AudioGraph};
|
||||
pub use node_trait::AudioNode;
|
||||
pub use preset::{GraphPreset, PresetMetadata, SerializedConnection, SerializedNode};
|
||||
pub use types::{ConnectionError, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
|
||||
|
|
|
|||
|
|
@ -0,0 +1,127 @@
|
|||
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, SignalType};
|
||||
use crate::audio::midi::MidiEvent;
|
||||
|
||||
/// Audio input node - receives audio from audio track clip playback
|
||||
/// This node acts as the entry point for audio tracks, injecting clip audio into the effects graph
|
||||
pub struct AudioInputNode {
|
||||
name: String,
|
||||
inputs: Vec<NodePort>,
|
||||
outputs: Vec<NodePort>,
|
||||
/// Internal buffer to hold injected audio from clips
|
||||
/// This is filled externally by AudioTrack::render() before graph processing
|
||||
audio_buffer: Vec<f32>,
|
||||
}
|
||||
|
||||
impl AudioInputNode {
|
||||
pub fn new(name: impl Into<String>) -> Self {
|
||||
let name = name.into();
|
||||
|
||||
// Audio input node has no inputs - audio is injected externally
|
||||
let inputs = vec![];
|
||||
|
||||
// Outputs stereo audio
|
||||
let outputs = vec![
|
||||
NodePort::new("Audio Out", SignalType::Audio, 0),
|
||||
];
|
||||
|
||||
Self {
|
||||
name,
|
||||
inputs,
|
||||
outputs,
|
||||
audio_buffer: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Inject audio from clip playback into this node
|
||||
/// Should be called by AudioTrack::render() before processing the graph
|
||||
pub fn inject_audio(&mut self, audio: &[f32]) {
|
||||
self.audio_buffer.clear();
|
||||
self.audio_buffer.extend_from_slice(audio);
|
||||
}
|
||||
|
||||
/// Clear the internal audio buffer
|
||||
pub fn clear_buffer(&mut self) {
|
||||
self.audio_buffer.clear();
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNode for AudioInputNode {
|
||||
fn category(&self) -> NodeCategory {
|
||||
NodeCategory::Input
|
||||
}
|
||||
|
||||
fn inputs(&self) -> &[NodePort] {
|
||||
&self.inputs
|
||||
}
|
||||
|
||||
fn outputs(&self) -> &[NodePort] {
|
||||
&self.outputs
|
||||
}
|
||||
|
||||
fn parameters(&self) -> &[Parameter] {
|
||||
&[] // No parameters
|
||||
}
|
||||
|
||||
fn set_parameter(&mut self, _id: u32, _value: f32) {
|
||||
// No parameters
|
||||
}
|
||||
|
||||
fn get_parameter(&self, _id: u32) -> f32 {
|
||||
0.0
|
||||
}
|
||||
|
||||
fn process(
|
||||
&mut self,
|
||||
_inputs: &[&[f32]],
|
||||
outputs: &mut [&mut [f32]],
|
||||
_midi_inputs: &[&[MidiEvent]],
|
||||
_midi_outputs: &mut [&mut Vec<MidiEvent>],
|
||||
_sample_rate: u32,
|
||||
) {
|
||||
if outputs.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let output = &mut outputs[0];
|
||||
let len = output.len().min(self.audio_buffer.len());
|
||||
|
||||
// Copy audio from internal buffer to output
|
||||
if len > 0 {
|
||||
output[..len].copy_from_slice(&self.audio_buffer[..len]);
|
||||
}
|
||||
|
||||
// Clear any remaining samples in output
|
||||
if output.len() > len {
|
||||
output[len..].fill(0.0);
|
||||
}
|
||||
}
|
||||
|
||||
fn reset(&mut self) {
|
||||
self.audio_buffer.clear();
|
||||
}
|
||||
|
||||
fn node_type(&self) -> &str {
|
||||
"AudioInput"
|
||||
}
|
||||
|
||||
fn name(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
|
||||
fn clone_node(&self) -> Box<dyn AudioNode> {
|
||||
Box::new(Self {
|
||||
name: self.name.clone(),
|
||||
inputs: self.inputs.clone(),
|
||||
outputs: self.outputs.clone(),
|
||||
audio_buffer: Vec::new(), // Don't clone the buffer, start fresh
|
||||
})
|
||||
}
|
||||
|
||||
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
mod adsr;
|
||||
mod audio_input;
|
||||
mod audio_to_cv;
|
||||
mod automation_input;
|
||||
mod bit_crusher;
|
||||
|
|
@ -39,6 +40,7 @@ mod voice_allocator;
|
|||
mod wavetable_oscillator;
|
||||
|
||||
pub use adsr::ADSRNode;
|
||||
pub use audio_input::AudioInputNode;
|
||||
pub use audio_to_cv::AudioToCVNode;
|
||||
pub use automation_input::{AutomationInputNode, AutomationKeyframe, InterpolationType};
|
||||
pub use bit_crusher::BitCrusherNode;
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
use crate::audio::midi::MidiEvent;
|
||||
use crate::audio::node_graph::{AudioNode, InstrumentGraph, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
|
||||
use crate::audio::node_graph::{AudioNode, AudioGraph, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
|
||||
|
||||
const PARAM_VOICE_COUNT: u32 = 0;
|
||||
const MAX_VOICES: usize = 16; // Maximum allowed voices
|
||||
|
|
@ -34,10 +34,10 @@ pub struct VoiceAllocatorNode {
|
|||
name: String,
|
||||
|
||||
/// The template graph (edited by user via UI)
|
||||
template_graph: InstrumentGraph,
|
||||
template_graph: AudioGraph,
|
||||
|
||||
/// Runtime voice instances (clones of template)
|
||||
voice_instances: Vec<InstrumentGraph>,
|
||||
voice_instances: Vec<AudioGraph>,
|
||||
|
||||
/// Voice allocation state
|
||||
voices: [VoiceState; MAX_VOICES],
|
||||
|
|
@ -73,11 +73,11 @@ impl VoiceAllocatorNode {
|
|||
];
|
||||
|
||||
// Create empty template graph
|
||||
let template_graph = InstrumentGraph::new(sample_rate, buffer_size);
|
||||
let template_graph = AudioGraph::new(sample_rate, buffer_size);
|
||||
|
||||
// Create voice instances (initially empty clones of template)
|
||||
let voice_instances: Vec<InstrumentGraph> = (0..MAX_VOICES)
|
||||
.map(|_| InstrumentGraph::new(sample_rate, buffer_size))
|
||||
let voice_instances: Vec<AudioGraph> = (0..MAX_VOICES)
|
||||
.map(|_| AudioGraph::new(sample_rate, buffer_size))
|
||||
.collect();
|
||||
|
||||
Self {
|
||||
|
|
@ -94,12 +94,12 @@ impl VoiceAllocatorNode {
|
|||
}
|
||||
|
||||
/// Get mutable reference to template graph (for UI editing)
|
||||
pub fn template_graph_mut(&mut self) -> &mut InstrumentGraph {
|
||||
pub fn template_graph_mut(&mut self) -> &mut AudioGraph {
|
||||
&mut self.template_graph
|
||||
}
|
||||
|
||||
/// Get reference to template graph (for serialization)
|
||||
pub fn template_graph(&self) -> &InstrumentGraph {
|
||||
pub fn template_graph(&self) -> &AudioGraph {
|
||||
&self.template_graph
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
use super::automation::{AutomationLane, AutomationLaneId, ParameterId};
|
||||
use super::clip::Clip;
|
||||
use super::midi::{MidiClip, MidiEvent};
|
||||
use super::node_graph::InstrumentGraph;
|
||||
use super::node_graph::AudioGraph;
|
||||
use super::node_graph::nodes::{AudioInputNode, AudioOutputNode};
|
||||
use super::pool::AudioPool;
|
||||
use std::collections::HashMap;
|
||||
|
||||
|
|
@ -289,7 +290,7 @@ pub struct MidiTrack {
|
|||
pub id: TrackId,
|
||||
pub name: String,
|
||||
pub clips: Vec<MidiClip>,
|
||||
pub instrument_graph: InstrumentGraph,
|
||||
pub instrument_graph: AudioGraph,
|
||||
pub volume: f32,
|
||||
pub muted: bool,
|
||||
pub solo: bool,
|
||||
|
|
@ -311,7 +312,7 @@ impl MidiTrack {
|
|||
id,
|
||||
name,
|
||||
clips: Vec::new(),
|
||||
instrument_graph: InstrumentGraph::new(default_sample_rate, default_buffer_size),
|
||||
instrument_graph: AudioGraph::new(default_sample_rate, default_buffer_size),
|
||||
volume: 1.0,
|
||||
muted: false,
|
||||
solo: false,
|
||||
|
|
@ -491,11 +492,34 @@ pub struct AudioTrack {
|
|||
/// Automation lanes for this track
|
||||
pub automation_lanes: HashMap<AutomationLaneId, AutomationLane>,
|
||||
next_automation_id: AutomationLaneId,
|
||||
/// Effects processing graph for this audio track
|
||||
pub effects_graph: AudioGraph,
|
||||
}
|
||||
|
||||
impl AudioTrack {
|
||||
/// Create a new audio track with default settings
|
||||
pub fn new(id: TrackId, name: String) -> Self {
|
||||
// Use default sample rate and a large buffer size that can accommodate any callback
|
||||
let default_sample_rate = 48000;
|
||||
let default_buffer_size = 8192;
|
||||
|
||||
// Create the effects graph with default AudioInput -> AudioOutput chain
|
||||
let mut effects_graph = AudioGraph::new(default_sample_rate, default_buffer_size);
|
||||
|
||||
// Add AudioInput node
|
||||
let input_node = Box::new(AudioInputNode::new("Audio Input"));
|
||||
let input_id = effects_graph.add_node(input_node);
|
||||
|
||||
// Add AudioOutput node
|
||||
let output_node = Box::new(AudioOutputNode::new("Audio Output"));
|
||||
let output_id = effects_graph.add_node(output_node);
|
||||
|
||||
// Connect AudioInput -> AudioOutput
|
||||
let _ = effects_graph.connect(input_id, 0, output_id, 0);
|
||||
|
||||
// Set the AudioOutput node as the graph's output
|
||||
effects_graph.set_output_node(Some(output_id));
|
||||
|
||||
Self {
|
||||
id,
|
||||
name,
|
||||
|
|
@ -505,6 +529,7 @@ impl AudioTrack {
|
|||
solo: false,
|
||||
automation_lanes: HashMap::new(),
|
||||
next_automation_id: 0,
|
||||
effects_graph,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -571,15 +596,17 @@ impl AudioTrack {
|
|||
let buffer_duration_seconds = output.len() as f64 / (sample_rate as f64 * channels as f64);
|
||||
let buffer_end_seconds = playhead_seconds + buffer_duration_seconds;
|
||||
|
||||
// Create a temporary buffer for clip rendering
|
||||
let mut clip_buffer = vec![0.0f32; output.len()];
|
||||
let mut rendered = 0;
|
||||
|
||||
// Render all active clips
|
||||
// Render all active clips into the temporary buffer
|
||||
for clip in &self.clips {
|
||||
// Check if clip overlaps with current buffer time range
|
||||
if clip.start_time < buffer_end_seconds && clip.end_time() > playhead_seconds {
|
||||
rendered += self.render_clip(
|
||||
clip,
|
||||
output,
|
||||
&mut clip_buffer,
|
||||
pool,
|
||||
playhead_seconds,
|
||||
sample_rate,
|
||||
|
|
@ -588,6 +615,25 @@ impl AudioTrack {
|
|||
}
|
||||
}
|
||||
|
||||
// Clear output buffer before graph processing to ensure clean output
|
||||
output.fill(0.0);
|
||||
|
||||
// Find and inject audio into the AudioInputNode
|
||||
let node_indices: Vec<_> = self.effects_graph.node_indices().collect();
|
||||
for node_idx in node_indices {
|
||||
if let Some(graph_node) = self.effects_graph.get_graph_node_mut(node_idx) {
|
||||
if graph_node.node.node_type() == "AudioInput" {
|
||||
if let Some(input_node) = graph_node.node.as_any_mut().downcast_mut::<AudioInputNode>() {
|
||||
input_node.inject_audio(&clip_buffer);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Process through the effects graph (this will write to output buffer)
|
||||
self.effects_graph.process(output, &[], playhead_seconds);
|
||||
|
||||
// Evaluate and apply automation
|
||||
let effective_volume = self.evaluate_automation_at_time(playhead_seconds);
|
||||
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ pub use audio::{
|
|||
Metatrack, MidiClip, MidiClipId, MidiEvent, MidiTrack, ParameterId, PoolAudioFile, Project, RecordingState, RenderContext, Track, TrackId,
|
||||
TrackNode,
|
||||
};
|
||||
pub use audio::node_graph::{GraphPreset, InstrumentGraph, PresetMetadata, SerializedConnection, SerializedNode};
|
||||
pub use audio::node_graph::{GraphPreset, AudioGraph, PresetMetadata, SerializedConnection, SerializedNode};
|
||||
pub use command::{AudioEvent, Command, OscilloscopeData};
|
||||
pub use command::types::AutomationKeyframeData;
|
||||
pub use io::{load_midi_file, AudioFile, WaveformPeak, WavWriter};
|
||||
|
|
|
|||
351
src/main.js
351
src/main.js
|
|
@ -1161,6 +1161,19 @@ async function handleAudioEvent(event) {
|
|||
updateRecordingClipDuration(event.clip_id, event.duration);
|
||||
break;
|
||||
|
||||
case 'GraphNodeAdded':
|
||||
console.log('[FRONTEND] GraphNodeAdded event - track:', event.track_id, 'node_id:', event.node_id, 'node_type:', event.node_type);
|
||||
// Resolve the pending promise with the correct backend ID
|
||||
if (window.pendingNodeUpdate) {
|
||||
const { drawflowNodeId, nodeType, resolve } = window.pendingNodeUpdate;
|
||||
if (nodeType === event.node_type && resolve) {
|
||||
console.log('[FRONTEND] Resolving promise for node', drawflowNodeId, 'with backend ID:', event.node_id);
|
||||
resolve(event.node_id);
|
||||
window.pendingNodeUpdate = null;
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case 'RecordingStopped':
|
||||
console.log('[FRONTEND] RecordingStopped event - clip:', event.clip_id, 'pool_index:', event.pool_index, 'waveform peaks:', event.waveform?.length);
|
||||
console.log('[FRONTEND] Current recording state - isRecording:', context.isRecording, 'recordingClipId:', context.recordingClipId);
|
||||
|
|
@ -6367,16 +6380,29 @@ async function renderMenu() {
|
|||
}
|
||||
updateMenu();
|
||||
|
||||
// Helper function to get the current MIDI track
|
||||
function getCurrentMidiTrack() {
|
||||
// Helper function to get the current track (MIDI or Audio) for node graph editing
|
||||
function getCurrentTrack() {
|
||||
const activeLayer = context.activeObject?.activeLayer;
|
||||
if (!activeLayer || !(activeLayer instanceof AudioTrack) || activeLayer.type !== 'midi') {
|
||||
if (!activeLayer || !(activeLayer instanceof AudioTrack)) {
|
||||
return null;
|
||||
}
|
||||
if (activeLayer.audioTrackId === null) {
|
||||
return null;
|
||||
}
|
||||
return activeLayer.audioTrackId;
|
||||
// Return both track ID and track type
|
||||
return {
|
||||
trackId: activeLayer.audioTrackId,
|
||||
trackType: activeLayer.type // 'midi' or 'audio'
|
||||
};
|
||||
}
|
||||
|
||||
// Backwards compatibility: function to get just the MIDI track ID
|
||||
function getCurrentMidiTrack() {
|
||||
const trackInfo = getCurrentTrack();
|
||||
if (trackInfo && trackInfo.trackType === 'midi') {
|
||||
return trackInfo.trackId;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function nodeEditor() {
|
||||
|
|
@ -6413,7 +6439,8 @@ function nodeEditor() {
|
|||
// Create breadcrumb/context header
|
||||
const header = document.createElement("div");
|
||||
header.className = "node-editor-header";
|
||||
header.innerHTML = '<div class="context-breadcrumb">Main Graph</div>';
|
||||
// Initial header will be updated by updateBreadcrumb() after track info is available
|
||||
header.innerHTML = '<div class="context-breadcrumb">Node Graph</div>';
|
||||
container.appendChild(header);
|
||||
|
||||
// Create the Drawflow canvas
|
||||
|
|
@ -6490,6 +6517,9 @@ function nodeEditor() {
|
|||
// Function to update palette based on context and selected category
|
||||
function updatePalette() {
|
||||
const isTemplate = editingContext !== null;
|
||||
const trackInfo = getCurrentTrack();
|
||||
const isMIDI = trackInfo?.trackType === 'midi';
|
||||
const isAudio = trackInfo?.trackType === 'audio';
|
||||
|
||||
if (selectedCategory === null && !searchQuery) {
|
||||
// Show categories when no search query
|
||||
|
|
@ -6527,8 +6557,15 @@ function nodeEditor() {
|
|||
if (isTemplate) {
|
||||
// In template: hide VoiceAllocator, AudioOutput, MidiInput
|
||||
return node.type !== 'VoiceAllocator' && node.type !== 'AudioOutput' && node.type !== 'MidiInput';
|
||||
} else if (isMIDI) {
|
||||
// MIDI track: hide AudioInput, show synth nodes
|
||||
return node.type !== 'TemplateInput' && node.type !== 'TemplateOutput' && node.type !== 'AudioInput';
|
||||
} else if (isAudio) {
|
||||
// Audio track: hide synth/MIDI nodes, show AudioInput
|
||||
const synthNodes = ['Oscillator', 'FMSynth', 'WavetableOscillator', 'SimpleSampler', 'MultiSampler', 'VoiceAllocator', 'MidiInput', 'MidiToCV'];
|
||||
return node.type !== 'TemplateInput' && node.type !== 'TemplateOutput' && !synthNodes.includes(node.type);
|
||||
} else {
|
||||
// In main graph: hide TemplateInput/TemplateOutput
|
||||
// Fallback: hide TemplateInput/TemplateOutput
|
||||
return node.type !== 'TemplateInput' && node.type !== 'TemplateOutput';
|
||||
}
|
||||
});
|
||||
|
|
@ -6563,8 +6600,15 @@ function nodeEditor() {
|
|||
if (isTemplate) {
|
||||
// In template: hide VoiceAllocator, AudioOutput, MidiInput
|
||||
return node.type !== 'VoiceAllocator' && node.type !== 'AudioOutput' && node.type !== 'MidiInput';
|
||||
} else if (isMIDI) {
|
||||
// MIDI track: hide AudioInput, show synth nodes
|
||||
return node.type !== 'TemplateInput' && node.type !== 'TemplateOutput' && node.type !== 'AudioInput';
|
||||
} else if (isAudio) {
|
||||
// Audio track: hide synth/MIDI nodes, show AudioInput
|
||||
const synthNodes = ['Oscillator', 'FMSynth', 'WavetableOscillator', 'SimpleSampler', 'MultiSampler', 'VoiceAllocator', 'MidiInput', 'MidiToCV'];
|
||||
return node.type !== 'TemplateInput' && node.type !== 'TemplateOutput' && !synthNodes.includes(node.type);
|
||||
} else {
|
||||
// In main graph: hide TemplateInput/TemplateOutput
|
||||
// Fallback: hide TemplateInput/TemplateOutput
|
||||
return node.type !== 'TemplateInput' && node.type !== 'TemplateOutput';
|
||||
}
|
||||
});
|
||||
|
|
@ -7275,13 +7319,14 @@ function nodeEditor() {
|
|||
|
||||
// Send command to backend
|
||||
// Check editing context first (dedicated template view), then parent node (inline editing)
|
||||
const trackId = getCurrentMidiTrack();
|
||||
if (trackId === null) {
|
||||
console.error('No MIDI track selected');
|
||||
showNodeEditorError(container, 'Please select a MIDI track first');
|
||||
const trackInfo = getCurrentTrack();
|
||||
if (trackInfo === null) {
|
||||
console.error('No track selected');
|
||||
alert('Please select a track first');
|
||||
editor.removeNodeId(`node-${drawflowNodeId}`);
|
||||
return;
|
||||
}
|
||||
const trackId = trackInfo.trackId;
|
||||
|
||||
// Determine if we're adding to a template or main graph
|
||||
let commandName, commandArgs;
|
||||
|
|
@ -7316,7 +7361,29 @@ function nodeEditor() {
|
|||
};
|
||||
}
|
||||
|
||||
invoke(commandName, commandArgs).then(backendNodeId => {
|
||||
console.log(`[DEBUG] Invoking ${commandName} with args:`, commandArgs);
|
||||
|
||||
// Create a promise that resolves when the GraphNodeAdded event arrives
|
||||
const eventPromise = new Promise((resolve) => {
|
||||
window.pendingNodeUpdate = {
|
||||
drawflowNodeId,
|
||||
nodeType,
|
||||
resolve: (backendNodeId) => {
|
||||
console.log(`[DEBUG] Event promise resolved with backend ID: ${backendNodeId}`);
|
||||
resolve(backendNodeId);
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
// Wait for both the invoke response and the event
|
||||
Promise.all([
|
||||
invoke(commandName, commandArgs),
|
||||
eventPromise
|
||||
]).then(([invokeReturnedId, eventBackendId]) => {
|
||||
console.log(`[DEBUG] Both returned - invoke: ${invokeReturnedId}, event: ${eventBackendId}`);
|
||||
|
||||
// Use the event's backend ID as it's the authoritative source
|
||||
const backendNodeId = eventBackendId;
|
||||
console.log(`Node ${nodeType} added with backend ID: ${backendNodeId} (parent: ${parentNodeId})`);
|
||||
|
||||
// Store backend node ID using Drawflow's update method
|
||||
|
|
@ -7325,12 +7392,13 @@ function nodeEditor() {
|
|||
console.log("Verifying stored backend ID:", editor.getNodeFromId(drawflowNodeId).data.backendId);
|
||||
|
||||
// Cache node data for undo/redo
|
||||
const trackInfo = getCurrentTrack();
|
||||
nodeDataCache.set(drawflowNodeId, {
|
||||
nodeType: nodeType,
|
||||
backendId: backendNodeId,
|
||||
position: { x, y },
|
||||
parentNodeId: parentNodeId,
|
||||
trackId: getCurrentMidiTrack()
|
||||
trackId: trackInfo ? trackInfo.trackId : null
|
||||
});
|
||||
|
||||
// Record action for undo (node is already added to frontend and backend)
|
||||
|
|
@ -7350,10 +7418,10 @@ function nodeEditor() {
|
|||
// If this is an AudioOutput node, automatically set it as the graph output
|
||||
if (nodeType === "AudioOutput") {
|
||||
console.log(`Setting node ${backendNodeId} as graph output`);
|
||||
const currentTrackId = getCurrentMidiTrack();
|
||||
if (currentTrackId !== null) {
|
||||
const trackInfo = getCurrentTrack();
|
||||
if (trackInfo !== null) {
|
||||
invoke("graph_set_output_node", {
|
||||
trackId: currentTrackId,
|
||||
trackId: trackInfo.trackId,
|
||||
nodeId: backendNodeId
|
||||
}).then(() => {
|
||||
console.log("Output node set successfully");
|
||||
|
|
@ -7365,8 +7433,9 @@ function nodeEditor() {
|
|||
|
||||
// If this is an AutomationInput node, create timeline curve
|
||||
if (nodeType === "AutomationInput" && !parentNodeId) {
|
||||
const currentTrackId = getCurrentMidiTrack();
|
||||
if (currentTrackId !== null) {
|
||||
const trackInfo = getCurrentTrack();
|
||||
if (trackInfo !== null) {
|
||||
const currentTrackId = trackInfo.trackId;
|
||||
// Find the audio/MIDI track
|
||||
const track = root.audioTracks?.find(t => t.audioTrackId === currentTrackId);
|
||||
if (track) {
|
||||
|
|
@ -7398,8 +7467,9 @@ function nodeEditor() {
|
|||
|
||||
// If this is an Oscilloscope node, start the visualization
|
||||
if (nodeType === "Oscilloscope") {
|
||||
const currentTrackId = getCurrentMidiTrack();
|
||||
if (currentTrackId !== null) {
|
||||
const trackInfo = getCurrentTrack();
|
||||
if (trackInfo !== null) {
|
||||
const currentTrackId = trackInfo.trackId;
|
||||
console.log(`Starting oscilloscope visualization for node ${drawflowNodeId} (backend ID: ${backendNodeId})`);
|
||||
// Wait for DOM to update before starting visualization
|
||||
setTimeout(() => {
|
||||
|
|
@ -7579,7 +7649,21 @@ function nodeEditor() {
|
|||
if (param.name === 'trigger_mode') {
|
||||
const modes = ['Free', 'Rising', 'Falling', 'V/oct'];
|
||||
displaySpan.textContent = modes[Math.round(value)] || 'Free';
|
||||
} else {
|
||||
}
|
||||
// Special formatting for Phaser rate in sync mode
|
||||
else if (param.name === 'rate' && nodeData.name === 'Phaser') {
|
||||
const syncCheckbox = nodeElement.querySelector(`#sync-${nodeId}`);
|
||||
if (syncCheckbox && syncCheckbox.checked) {
|
||||
const beatDivisions = [
|
||||
'4 bars', '2 bars', '1 bar', '1/2', '1/4', '1/8', '1/16', '1/32', '1/2T', '1/4T', '1/8T'
|
||||
];
|
||||
const idx = Math.round(value);
|
||||
displaySpan.textContent = beatDivisions[Math.min(10, Math.max(0, idx))];
|
||||
} else {
|
||||
displaySpan.textContent = value.toFixed(param.unit === 'Hz' ? 0 : 2);
|
||||
}
|
||||
}
|
||||
else {
|
||||
displaySpan.textContent = value.toFixed(param.unit === 'Hz' ? 0 : 2);
|
||||
}
|
||||
}
|
||||
|
|
@ -7593,10 +7677,10 @@ function nodeEditor() {
|
|||
|
||||
// Send to backend in real-time
|
||||
if (nodeData.data.backendId !== null) {
|
||||
const currentTrackId = getCurrentMidiTrack();
|
||||
if (currentTrackId !== null) {
|
||||
const trackInfo = getCurrentTrack();
|
||||
if (trackInfo !== null) {
|
||||
invoke("graph_set_parameter", {
|
||||
trackId: currentTrackId,
|
||||
trackId: trackInfo.trackId,
|
||||
nodeId: nodeData.data.backendId,
|
||||
paramId: paramId,
|
||||
value: value
|
||||
|
|
@ -7672,10 +7756,10 @@ function nodeEditor() {
|
|||
|
||||
// Send to backend
|
||||
if (nodeData.data.backendId !== null) {
|
||||
const currentTrackId = getCurrentMidiTrack();
|
||||
if (currentTrackId !== null) {
|
||||
const trackInfo = getCurrentTrack();
|
||||
if (trackInfo !== null) {
|
||||
invoke("graph_set_parameter", {
|
||||
trackId: currentTrackId,
|
||||
trackId: trackInfo.trackId,
|
||||
nodeId: nodeData.data.backendId,
|
||||
paramId: paramId,
|
||||
value: value
|
||||
|
|
@ -7750,10 +7834,10 @@ function nodeEditor() {
|
|||
// Send to backend
|
||||
const nodeData = editor.getNodeFromId(nodeId);
|
||||
if (nodeData && nodeData.data.backendId !== null) {
|
||||
const currentTrackId = getCurrentMidiTrack();
|
||||
if (currentTrackId !== null) {
|
||||
const trackInfo = getCurrentTrack();
|
||||
if (trackInfo !== null) {
|
||||
invoke("graph_set_parameter", {
|
||||
trackId: currentTrackId,
|
||||
trackId: trackInfo.trackId,
|
||||
nodeId: nodeData.data.backendId,
|
||||
paramId: paramId,
|
||||
value: value
|
||||
|
|
@ -7775,6 +7859,78 @@ function nodeEditor() {
|
|||
});
|
||||
});
|
||||
|
||||
// Handle checkboxes
|
||||
const checkboxes = nodeElement.querySelectorAll('input[type="checkbox"][data-param]');
|
||||
checkboxes.forEach(checkbox => {
|
||||
checkbox.addEventListener("change", (e) => {
|
||||
const paramId = parseInt(e.target.getAttribute("data-param"));
|
||||
const value = e.target.checked ? 1.0 : 0.0;
|
||||
|
||||
console.log(`[setupNodeParameters] Checkbox change - nodeId: ${nodeId}, paramId: ${paramId}, value: ${value}`);
|
||||
|
||||
// Send to backend
|
||||
const nodeData = editor.getNodeFromId(nodeId);
|
||||
if (nodeData && nodeData.data.backendId !== null) {
|
||||
const trackInfo = getCurrentTrack();
|
||||
if (trackInfo !== null) {
|
||||
invoke("graph_set_parameter", {
|
||||
trackId: trackInfo.trackId,
|
||||
nodeId: nodeData.data.backendId,
|
||||
paramId: paramId,
|
||||
value: value
|
||||
}).then(() => {
|
||||
console.log(`Parameter ${paramId} set to ${value}`);
|
||||
}).catch(err => {
|
||||
console.error("Failed to set parameter:", err);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Special handling for Phaser sync checkbox
|
||||
if (checkbox.id.startsWith('sync-')) {
|
||||
const rateSlider = nodeElement.querySelector(`#rate-slider-${nodeId}`);
|
||||
const rateDisplay = nodeElement.querySelector(`#rate-${nodeId}`);
|
||||
const rateUnit = nodeElement.querySelector(`#rate-unit-${nodeId}`);
|
||||
|
||||
if (rateSlider && rateDisplay && rateUnit) {
|
||||
if (e.target.checked) {
|
||||
// Sync mode: Use beat divisions
|
||||
// Map slider 0-10 to different note divisions
|
||||
// 0: 4 bars, 1: 2 bars, 2: 1 bar, 3: 1/2, 4: 1/4, 5: 1/8, 6: 1/16, 7: 1/32, 8: 1/2T, 9: 1/4T, 10: 1/8T
|
||||
const beatDivisions = [
|
||||
{ label: '4 bars', multiplier: 16.0 },
|
||||
{ label: '2 bars', multiplier: 8.0 },
|
||||
{ label: '1 bar', multiplier: 4.0 },
|
||||
{ label: '1/2', multiplier: 2.0 },
|
||||
{ label: '1/4', multiplier: 1.0 },
|
||||
{ label: '1/8', multiplier: 0.5 },
|
||||
{ label: '1/16', multiplier: 0.25 },
|
||||
{ label: '1/32', multiplier: 0.125 },
|
||||
{ label: '1/2T', multiplier: 2.0/3.0 },
|
||||
{ label: '1/4T', multiplier: 1.0/3.0 },
|
||||
{ label: '1/8T', multiplier: 0.5/3.0 }
|
||||
];
|
||||
|
||||
rateSlider.min = '0';
|
||||
rateSlider.max = '10';
|
||||
rateSlider.step = '1';
|
||||
const idx = Math.round(parseFloat(rateSlider.value) * 10 / 10);
|
||||
rateSlider.value = Math.min(10, Math.max(0, idx));
|
||||
rateDisplay.textContent = beatDivisions[parseInt(rateSlider.value)].label;
|
||||
rateUnit.textContent = '';
|
||||
} else {
|
||||
// Free mode: Hz
|
||||
rateSlider.min = '0.1';
|
||||
rateSlider.max = '10.0';
|
||||
rateSlider.step = '0.1';
|
||||
rateDisplay.textContent = parseFloat(rateSlider.value).toFixed(1);
|
||||
rateUnit.textContent = ' Hz';
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Handle Load Sample button for SimpleSampler
|
||||
const loadSampleBtn = nodeElement.querySelector(".load-sample-btn");
|
||||
if (loadSampleBtn) {
|
||||
|
|
@ -8583,11 +8739,12 @@ function nodeEditor() {
|
|||
}
|
||||
}, 10);
|
||||
|
||||
// Send to backend (skip if action is handling it)
|
||||
// Send to backend
|
||||
console.log("Backend IDs - output:", outputNode.data.backendId, "input:", inputNode.data.backendId);
|
||||
if (!suppressActionRecording && outputNode.data.backendId !== null && inputNode.data.backendId !== null) {
|
||||
const currentTrackId = getCurrentMidiTrack();
|
||||
if (currentTrackId === null) return;
|
||||
if (outputNode.data.backendId !== null && inputNode.data.backendId !== null) {
|
||||
const trackInfo = getCurrentTrack();
|
||||
if (trackInfo === null) return;
|
||||
const currentTrackId = trackInfo.trackId;
|
||||
|
||||
// Check if we're in template editing mode (dedicated view)
|
||||
if (editingContext) {
|
||||
|
|
@ -8658,23 +8815,25 @@ function nodeEditor() {
|
|||
}).then(async () => {
|
||||
console.log("Connection successful");
|
||||
|
||||
// Record action for undo
|
||||
redoStack.length = 0;
|
||||
undoStack.push({
|
||||
name: "graphAddConnection",
|
||||
action: {
|
||||
trackId: currentTrackId,
|
||||
fromNode: outputNode.data.backendId,
|
||||
fromPort: outputPort,
|
||||
toNode: inputNode.data.backendId,
|
||||
toPort: inputPort,
|
||||
// Store frontend IDs for disconnection
|
||||
frontendFromId: connection.output_id,
|
||||
frontendToId: connection.input_id,
|
||||
fromPortClass: connection.output_class,
|
||||
toPortClass: connection.input_class
|
||||
}
|
||||
});
|
||||
// Record action for undo (only if not suppressing)
|
||||
if (!suppressActionRecording) {
|
||||
redoStack.length = 0;
|
||||
undoStack.push({
|
||||
name: "graphAddConnection",
|
||||
action: {
|
||||
trackId: currentTrackId,
|
||||
fromNode: outputNode.data.backendId,
|
||||
fromPort: outputPort,
|
||||
toNode: inputNode.data.backendId,
|
||||
toPort: inputPort,
|
||||
// Store frontend IDs for disconnection
|
||||
frontendFromId: connection.output_id,
|
||||
frontendToId: connection.input_id,
|
||||
fromPortClass: connection.output_class,
|
||||
toPortClass: connection.input_class
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Auto-name AutomationInput nodes when connected
|
||||
await updateAutomationName(
|
||||
|
|
@ -8741,35 +8900,37 @@ function nodeEditor() {
|
|||
}
|
||||
}
|
||||
|
||||
// Send to backend (skip if action is handling it)
|
||||
if (!suppressActionRecording && outputNode.data.backendId !== null && inputNode.data.backendId !== null) {
|
||||
const currentTrackId = getCurrentMidiTrack();
|
||||
if (currentTrackId !== null) {
|
||||
// Send to backend
|
||||
if (outputNode.data.backendId !== null && inputNode.data.backendId !== null) {
|
||||
const trackInfo = getCurrentTrack();
|
||||
if (trackInfo !== null) {
|
||||
invoke("graph_disconnect", {
|
||||
trackId: currentTrackId,
|
||||
trackId: trackInfo.trackId,
|
||||
fromNode: outputNode.data.backendId,
|
||||
fromPort: outputPort,
|
||||
toNode: inputNode.data.backendId,
|
||||
toPort: inputPort
|
||||
}).then(() => {
|
||||
// Record action for undo
|
||||
redoStack.length = 0;
|
||||
undoStack.push({
|
||||
name: "graphRemoveConnection",
|
||||
action: {
|
||||
trackId: currentTrackId,
|
||||
fromNode: outputNode.data.backendId,
|
||||
fromPort: outputPort,
|
||||
toNode: inputNode.data.backendId,
|
||||
toPort: inputPort,
|
||||
// Store frontend IDs for reconnection
|
||||
frontendFromId: connection.output_id,
|
||||
frontendToId: connection.input_id,
|
||||
fromPortClass: connection.output_class,
|
||||
toPortClass: connection.input_class
|
||||
}
|
||||
});
|
||||
updateMenu();
|
||||
// Record action for undo (only if not suppressing)
|
||||
if (!suppressActionRecording) {
|
||||
redoStack.length = 0;
|
||||
undoStack.push({
|
||||
name: "graphRemoveConnection",
|
||||
action: {
|
||||
trackId: trackInfo.trackId,
|
||||
fromNode: outputNode.data.backendId,
|
||||
fromPort: outputPort,
|
||||
toNode: inputNode.data.backendId,
|
||||
toPort: inputPort,
|
||||
// Store frontend IDs for reconnection
|
||||
frontendFromId: connection.output_id,
|
||||
frontendToId: connection.input_id,
|
||||
fromPortClass: connection.output_class,
|
||||
toPortClass: connection.input_class
|
||||
}
|
||||
});
|
||||
updateMenu();
|
||||
}
|
||||
}).catch(err => {
|
||||
console.error("Failed to disconnect nodes:", err);
|
||||
});
|
||||
|
|
@ -8793,15 +8954,24 @@ function nodeEditor() {
|
|||
function updateBreadcrumb() {
|
||||
const breadcrumb = header.querySelector('.context-breadcrumb');
|
||||
if (editingContext) {
|
||||
// Determine main graph name based on track type
|
||||
const trackInfo = getCurrentTrack();
|
||||
const mainGraphName = trackInfo?.trackType === 'audio' ? 'Effects Graph' : 'Instrument Graph';
|
||||
|
||||
breadcrumb.innerHTML = `
|
||||
Main Graph >
|
||||
${mainGraphName} >
|
||||
<span class="template-name">${editingContext.voiceAllocatorName} Template</span>
|
||||
<button class="exit-template-btn">← Exit Template</button>
|
||||
`;
|
||||
const exitBtn = breadcrumb.querySelector('.exit-template-btn');
|
||||
exitBtn.addEventListener('click', exitTemplate);
|
||||
} else {
|
||||
breadcrumb.textContent = 'Main Graph';
|
||||
// Not in template mode - show main graph name based on track type
|
||||
const trackInfo = getCurrentTrack();
|
||||
const graphName = trackInfo?.trackType === 'audio' ? 'Effects Graph' :
|
||||
trackInfo?.trackType === 'midi' ? 'Instrument Graph' :
|
||||
'Node Graph';
|
||||
breadcrumb.textContent = graphName;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -8825,18 +8995,24 @@ function nodeEditor() {
|
|||
async function reloadGraph() {
|
||||
if (!editor) return;
|
||||
|
||||
const trackId = getCurrentMidiTrack();
|
||||
const trackInfo = getCurrentTrack();
|
||||
|
||||
// Clear editor first
|
||||
editor.clearModuleSelected();
|
||||
editor.clear();
|
||||
|
||||
// If no MIDI track selected, just leave it cleared
|
||||
if (trackId === null) {
|
||||
console.log('No MIDI track selected, editor cleared');
|
||||
// Update UI based on track type
|
||||
updateBreadcrumb();
|
||||
updatePalette();
|
||||
|
||||
// If no track selected, just leave it cleared
|
||||
if (trackInfo === null) {
|
||||
console.log('No track selected, editor cleared');
|
||||
return;
|
||||
}
|
||||
|
||||
const trackId = trackInfo.trackId;
|
||||
|
||||
try {
|
||||
// Get graph based on editing context
|
||||
let graphJson;
|
||||
|
|
@ -9545,11 +9721,12 @@ function addPresetItemHandlers(listElement) {
|
|||
}
|
||||
|
||||
async function loadPreset(presetPath) {
|
||||
const trackId = getCurrentMidiTrack();
|
||||
if (trackId === null) {
|
||||
alert('Please select a MIDI track first');
|
||||
const trackInfo = getCurrentTrack();
|
||||
if (trackInfo === null) {
|
||||
alert('Please select a track first');
|
||||
return;
|
||||
}
|
||||
const trackId = trackInfo.trackId;
|
||||
|
||||
try {
|
||||
await invoke('graph_load_preset', {
|
||||
|
|
@ -9567,9 +9744,9 @@ async function loadPreset(presetPath) {
|
|||
}
|
||||
|
||||
function showSavePresetDialog(container) {
|
||||
const currentTrackId = getCurrentMidiTrack();
|
||||
if (currentTrackId === null) {
|
||||
alert('Please select a MIDI track first');
|
||||
const trackInfo = getCurrentTrack();
|
||||
if (trackInfo === null) {
|
||||
alert('Please select a track first');
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
@ -9626,7 +9803,7 @@ function showSavePresetDialog(container) {
|
|||
|
||||
try {
|
||||
await invoke('graph_save_preset', {
|
||||
trackId: currentTrackId,
|
||||
trackId: trackInfo.trackId,
|
||||
presetName: name,
|
||||
description,
|
||||
tags
|
||||
|
|
|
|||
|
|
@ -373,6 +373,23 @@ export const nodeTypes = {
|
|||
`
|
||||
},
|
||||
|
||||
AudioInput: {
|
||||
name: 'AudioInput',
|
||||
category: NodeCategory.INPUT,
|
||||
description: 'Audio track clip input - receives audio from timeline clips',
|
||||
inputs: [],
|
||||
outputs: [
|
||||
{ name: 'Audio Out', type: SignalType.AUDIO, index: 0 }
|
||||
],
|
||||
parameters: [],
|
||||
getHTML: (nodeId) => `
|
||||
<div class="node-content">
|
||||
<div class="node-title">Audio Input</div>
|
||||
<div class="node-info">Audio from clips</div>
|
||||
</div>
|
||||
`
|
||||
},
|
||||
|
||||
AudioOutput: {
|
||||
name: 'AudioOutput',
|
||||
category: NodeCategory.OUTPUT,
|
||||
|
|
@ -1335,14 +1352,21 @@ export const nodeTypes = {
|
|||
{ id: 1, name: 'depth', label: 'Depth', min: 0.0, max: 1.0, default: 0.7, unit: '' },
|
||||
{ id: 2, name: 'stages', label: 'Stages', min: 2, max: 8, default: 6, unit: '' },
|
||||
{ id: 3, name: 'feedback', label: 'Feedback', min: -0.95, max: 0.95, default: 0.5, unit: '' },
|
||||
{ id: 4, name: 'wetdry', label: 'Wet/Dry', min: 0.0, max: 1.0, default: 0.5, unit: '' }
|
||||
{ id: 4, name: 'wetdry', label: 'Wet/Dry', min: 0.0, max: 1.0, default: 0.5, unit: '' },
|
||||
{ id: 5, name: 'sync', label: 'Sync to BPM', min: 0, max: 1, default: 0, unit: '' }
|
||||
],
|
||||
getHTML: (nodeId) => `
|
||||
<div class="node-content">
|
||||
<div class="node-title">Phaser</div>
|
||||
<div class="node-param">
|
||||
<label>Rate: <span id="rate-${nodeId}">0.5</span> Hz</label>
|
||||
<input type="range" data-node="${nodeId}" data-param="0" min="0.1" max="10.0" value="0.5" step="0.1">
|
||||
<label>
|
||||
<input type="checkbox" id="sync-${nodeId}" data-node="${nodeId}" data-param="5">
|
||||
Sync to BPM
|
||||
</label>
|
||||
</div>
|
||||
<div class="node-param">
|
||||
<label>Rate: <span id="rate-${nodeId}">0.5</span><span id="rate-unit-${nodeId}"> Hz</span></label>
|
||||
<input type="range" id="rate-slider-${nodeId}" data-node="${nodeId}" data-param="0" min="0.1" max="10.0" value="0.5" step="0.1">
|
||||
</div>
|
||||
<div class="node-param">
|
||||
<label>Depth: <span id="depth-${nodeId}">0.7</span></label>
|
||||
|
|
|
|||
|
|
@ -2929,8 +2929,8 @@ class TimelineWindowV2 extends Widget {
|
|||
this.context.selection = []
|
||||
this.context.shapeselection = []
|
||||
|
||||
// If this is a MIDI track, reload the node editor
|
||||
if (track.object.type === 'midi') {
|
||||
// Reload the node editor for both MIDI and audio tracks
|
||||
if (track.object.type === 'midi' || track.object.type === 'audio') {
|
||||
setTimeout(() => this.context.reloadNodeEditor?.(), 50);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue