hook graph up to audio backend

This commit is contained in:
Skyler Lehmkuhl 2025-12-16 13:22:28 -05:00
parent d7176a13b7
commit 88dc60f036
4 changed files with 691 additions and 24 deletions

View File

@ -120,12 +120,32 @@ impl AddNodeAction {
.get(&self.layer_id)
.ok_or("Track not found")?;
// Add node to backend (using async API for now - TODO: use sync query)
// Get graph state before adding node to see what nodes exist
let before_json = controller.query_graph_state(*track_id)?;
let before_state: daw_backend::audio::node_graph::GraphPreset =
serde_json::from_str(&before_json)
.map_err(|e| format!("Failed to parse before graph state: {}", e))?;
// Add node to backend (using async API)
controller.graph_add_node(*track_id, self.node_type.clone(), self.position.0, self.position.1);
// TODO: Get actual node ID from synchronous query
// For now, we can't track the backend ID properly with async API
// This will be fixed when we add synchronous query methods
// Query graph state after to find the new node ID
let after_json = controller.query_graph_state(*track_id)?;
let after_state: daw_backend::audio::node_graph::GraphPreset =
serde_json::from_str(&after_json)
.map_err(|e| format!("Failed to parse after graph state: {}", e))?;
// Find the new node by comparing before and after states
// The new node should have an ID that wasn't in the before state
let before_ids: std::collections::HashSet<_> = before_state.nodes.iter().map(|n| n.id).collect();
let new_node = after_state.nodes.iter()
.find(|n| !before_ids.contains(&n.id))
.ok_or("Failed to find newly added node in graph state")?;
// Store the backend node ID
self.backend_node_id = Some(BackendNodeId::Audio(
petgraph::stable_graph::NodeIndex::new(new_node.id as usize)
));
Ok(())
}

View File

@ -138,12 +138,34 @@ impl GraphBackend for AudioGraphBackend {
}
fn get_state(&self) -> Result<GraphState, String> {
// TODO: Implement graph state query
// For now, return empty state
Ok(GraphState {
nodes: vec![],
connections: vec![],
})
let mut controller = self.audio_controller.lock().unwrap();
let json = controller.query_graph_state(self.track_id)?;
// Parse the GraphPreset JSON from backend
let preset: daw_backend::audio::node_graph::GraphPreset =
serde_json::from_str(&json)
.map_err(|e| format!("Failed to parse graph state: {}", e))?;
// Convert to our GraphState format
let nodes = preset.nodes.iter().map(|n| {
super::backend::SerializedNode {
id: n.id,
node_type: n.node_type.clone(),
position: n.position,
parameters: n.parameters.iter().map(|(&k, &v)| (k, v as f64)).collect(),
}
}).collect();
let connections = preset.connections.iter().map(|c| {
super::backend::SerializedConnection {
from_node: c.from_node,
from_port: c.from_port,
to_node: c.to_node,
to_port: c.to_port,
}
}).collect();
Ok(GraphState { nodes, connections })
}
fn load_state(&mut self, _state: &GraphState) -> Result<(), String> {

View File

@ -20,15 +20,32 @@ pub enum NodeTemplate {
// Inputs
MidiInput,
AudioInput,
AutomationInput,
// Generators
Oscillator,
WavetableOscillator,
FmSynth,
Noise,
SimpleSampler,
MultiSampler,
// Effects
Filter,
Gain,
Delay,
Reverb,
Chorus,
Flanger,
Phaser,
Distortion,
BitCrusher,
Compressor,
Limiter,
Eq,
Pan,
RingModulator,
Vocoder,
// Utilities
Adsr,
@ -36,6 +53,21 @@ pub enum NodeTemplate {
Mixer,
Splitter,
Constant,
MidiToCv,
AudioToCv,
Math,
SampleHold,
SlewLimiter,
Quantizer,
EnvelopeFollower,
BpmDetector,
Mod,
// Analysis
Oscilloscope,
// Advanced
VoiceAllocator,
// Outputs
AudioOutput,
@ -99,29 +131,72 @@ impl NodeTemplateTrait for NodeTemplate {
fn node_finder_label(&self, _user_state: &mut Self::UserState) -> std::borrow::Cow<'_, str> {
match self {
// Inputs
NodeTemplate::MidiInput => "MIDI Input".into(),
NodeTemplate::AudioInput => "Audio Input".into(),
NodeTemplate::AutomationInput => "Automation Input".into(),
// Generators
NodeTemplate::Oscillator => "Oscillator".into(),
NodeTemplate::Noise => "Noise".into(),
NodeTemplate::WavetableOscillator => "Wavetable Oscillator".into(),
NodeTemplate::FmSynth => "FM Synth".into(),
NodeTemplate::Noise => "Noise Generator".into(),
NodeTemplate::SimpleSampler => "Simple Sampler".into(),
NodeTemplate::MultiSampler => "Multi Sampler".into(),
// Effects
NodeTemplate::Filter => "Filter".into(),
NodeTemplate::Gain => "Gain".into(),
NodeTemplate::Delay => "Delay".into(),
NodeTemplate::Adsr => "ADSR".into(),
NodeTemplate::Reverb => "Reverb".into(),
NodeTemplate::Chorus => "Chorus".into(),
NodeTemplate::Flanger => "Flanger".into(),
NodeTemplate::Phaser => "Phaser".into(),
NodeTemplate::Distortion => "Distortion".into(),
NodeTemplate::BitCrusher => "Bit Crusher".into(),
NodeTemplate::Compressor => "Compressor".into(),
NodeTemplate::Limiter => "Limiter".into(),
NodeTemplate::Eq => "EQ".into(),
NodeTemplate::Pan => "Pan".into(),
NodeTemplate::RingModulator => "Ring Modulator".into(),
NodeTemplate::Vocoder => "Vocoder".into(),
// Utilities
NodeTemplate::Adsr => "ADSR Envelope".into(),
NodeTemplate::Lfo => "LFO".into(),
NodeTemplate::Mixer => "Mixer".into(),
NodeTemplate::Splitter => "Splitter".into(),
NodeTemplate::Constant => "Constant".into(),
NodeTemplate::MidiToCv => "MIDI to CV".into(),
NodeTemplate::AudioToCv => "Audio to CV".into(),
NodeTemplate::Math => "Math".into(),
NodeTemplate::SampleHold => "Sample & Hold".into(),
NodeTemplate::SlewLimiter => "Slew Limiter".into(),
NodeTemplate::Quantizer => "Quantizer".into(),
NodeTemplate::EnvelopeFollower => "Envelope Follower".into(),
NodeTemplate::BpmDetector => "BPM Detector".into(),
NodeTemplate::Mod => "Modulator".into(),
// Analysis
NodeTemplate::Oscilloscope => "Oscilloscope".into(),
// Advanced
NodeTemplate::VoiceAllocator => "Voice Allocator".into(),
// Outputs
NodeTemplate::AudioOutput => "Audio Output".into(),
}
}
fn node_finder_categories(&self, _user_state: &mut Self::UserState) -> Vec<&'static str> {
match self {
NodeTemplate::MidiInput | NodeTemplate::AudioInput => vec!["Inputs"],
NodeTemplate::Oscillator | NodeTemplate::Noise => vec!["Generators"],
NodeTemplate::Filter | NodeTemplate::Gain | NodeTemplate::Delay => vec!["Effects"],
NodeTemplate::Adsr | NodeTemplate::Lfo | NodeTemplate::Mixer
| NodeTemplate::Splitter | NodeTemplate::Constant => vec!["Utilities"],
NodeTemplate::MidiInput | NodeTemplate::AudioInput | NodeTemplate::AutomationInput => vec!["Inputs"],
NodeTemplate::Oscillator | NodeTemplate::WavetableOscillator | NodeTemplate::FmSynth
| NodeTemplate::Noise | NodeTemplate::SimpleSampler | NodeTemplate::MultiSampler => vec!["Generators"],
NodeTemplate::Filter | NodeTemplate::Gain | NodeTemplate::Delay | NodeTemplate::Reverb
| NodeTemplate::Chorus | NodeTemplate::Flanger | NodeTemplate::Phaser | NodeTemplate::Distortion
| NodeTemplate::BitCrusher | NodeTemplate::Compressor | NodeTemplate::Limiter | NodeTemplate::Eq
| NodeTemplate::Pan | NodeTemplate::RingModulator | NodeTemplate::Vocoder => vec!["Effects"],
NodeTemplate::Adsr | NodeTemplate::Lfo | NodeTemplate::Mixer | NodeTemplate::Splitter
| NodeTemplate::Constant | NodeTemplate::MidiToCv | NodeTemplate::AudioToCv | NodeTemplate::Math
| NodeTemplate::SampleHold | NodeTemplate::SlewLimiter | NodeTemplate::Quantizer
| NodeTemplate::EnvelopeFollower | NodeTemplate::BpmDetector | NodeTemplate::Mod => vec!["Utilities"],
NodeTemplate::Oscilloscope => vec!["Analysis"],
NodeTemplate::VoiceAllocator => vec!["Advanced"],
NodeTemplate::AudioOutput => vec!["Outputs"],
}
}
@ -364,6 +439,69 @@ impl NodeTemplateTrait for NodeTemplate {
);
graph.add_output_param(node_id, "CV Out".into(), DataType::CV);
}
NodeTemplate::MidiToCv => {
graph.add_input_param(node_id, "MIDI In".into(), DataType::Midi, ValueType::Float { value: 0.0 }, InputParamKind::ConnectionOnly, true);
graph.add_output_param(node_id, "V/Oct".into(), DataType::CV);
graph.add_output_param(node_id, "Gate".into(), DataType::CV);
graph.add_output_param(node_id, "Velocity".into(), DataType::CV);
}
// Stub implementations for all other nodes - add proper ports as needed
NodeTemplate::AutomationInput => {
graph.add_output_param(node_id, "CV Out".into(), DataType::CV);
}
NodeTemplate::WavetableOscillator => {
graph.add_input_param(node_id, "V/Oct".into(), DataType::CV, ValueType::Float { value: 0.0 }, InputParamKind::ConnectionOnly, true);
graph.add_output_param(node_id, "Audio Out".into(), DataType::Audio);
}
NodeTemplate::FmSynth => {
graph.add_input_param(node_id, "V/Oct".into(), DataType::CV, ValueType::Float { value: 0.0 }, InputParamKind::ConnectionOnly, true);
graph.add_output_param(node_id, "Audio Out".into(), DataType::Audio);
}
NodeTemplate::SimpleSampler => {
graph.add_input_param(node_id, "Gate".into(), DataType::CV, ValueType::Float { value: 0.0 }, InputParamKind::ConnectionOnly, true);
graph.add_output_param(node_id, "Audio Out".into(), DataType::Audio);
}
NodeTemplate::MultiSampler => {
graph.add_input_param(node_id, "MIDI In".into(), DataType::Midi, ValueType::Float { value: 0.0 }, InputParamKind::ConnectionOnly, true);
graph.add_output_param(node_id, "Audio Out".into(), DataType::Audio);
}
NodeTemplate::Reverb | NodeTemplate::Chorus | NodeTemplate::Flanger | NodeTemplate::Phaser
| NodeTemplate::Distortion | NodeTemplate::BitCrusher | NodeTemplate::Compressor
| NodeTemplate::Limiter | NodeTemplate::Eq | NodeTemplate::Pan | NodeTemplate::RingModulator
| NodeTemplate::Vocoder => {
graph.add_input_param(node_id, "Audio In".into(), DataType::Audio, ValueType::Float { value: 0.0 }, InputParamKind::ConnectionOnly, true);
graph.add_output_param(node_id, "Audio Out".into(), DataType::Audio);
}
NodeTemplate::AudioToCv => {
graph.add_input_param(node_id, "Audio In".into(), DataType::Audio, ValueType::Float { value: 0.0 }, InputParamKind::ConnectionOnly, true);
graph.add_output_param(node_id, "CV Out".into(), DataType::CV);
}
NodeTemplate::Math => {
graph.add_input_param(node_id, "A".into(), DataType::CV, ValueType::Float { value: 0.0 }, InputParamKind::ConnectionOrConstant, true);
graph.add_input_param(node_id, "B".into(), DataType::CV, ValueType::Float { value: 0.0 }, InputParamKind::ConnectionOrConstant, true);
graph.add_output_param(node_id, "Out".into(), DataType::CV);
}
NodeTemplate::SampleHold | NodeTemplate::SlewLimiter | NodeTemplate::Quantizer | NodeTemplate::EnvelopeFollower => {
graph.add_input_param(node_id, "In".into(), DataType::CV, ValueType::Float { value: 0.0 }, InputParamKind::ConnectionOnly, true);
graph.add_output_param(node_id, "Out".into(), DataType::CV);
}
NodeTemplate::BpmDetector => {
graph.add_input_param(node_id, "Audio In".into(), DataType::Audio, ValueType::Float { value: 0.0 }, InputParamKind::ConnectionOnly, true);
graph.add_output_param(node_id, "BPM".into(), DataType::CV);
}
NodeTemplate::Mod => {
graph.add_input_param(node_id, "Carrier".into(), DataType::Audio, ValueType::Float { value: 0.0 }, InputParamKind::ConnectionOnly, true);
graph.add_input_param(node_id, "Modulator".into(), DataType::CV, ValueType::Float { value: 0.0 }, InputParamKind::ConnectionOnly, true);
graph.add_output_param(node_id, "Out".into(), DataType::Audio);
}
NodeTemplate::Oscilloscope => {
graph.add_input_param(node_id, "Audio In".into(), DataType::Audio, ValueType::Float { value: 0.0 }, InputParamKind::ConnectionOnly, true);
graph.add_input_param(node_id, "CV In".into(), DataType::CV, ValueType::Float { value: 0.0 }, InputParamKind::ConnectionOnly, true);
}
NodeTemplate::VoiceAllocator => {
graph.add_input_param(node_id, "MIDI In".into(), DataType::Midi, ValueType::Float { value: 0.0 }, InputParamKind::ConnectionOnly, true);
graph.add_output_param(node_id, "Audio Out".into(), DataType::Audio);
}
}
}
}
@ -431,18 +569,53 @@ impl NodeTemplateIter for AllNodeTemplates {
fn all_kinds(&self) -> Vec<Self::Item> {
vec![
// Inputs
NodeTemplate::MidiInput,
NodeTemplate::AudioInput,
NodeTemplate::AutomationInput,
// Generators
NodeTemplate::Oscillator,
NodeTemplate::WavetableOscillator,
NodeTemplate::FmSynth,
NodeTemplate::Noise,
NodeTemplate::SimpleSampler,
NodeTemplate::MultiSampler,
// Effects
NodeTemplate::Filter,
NodeTemplate::Gain,
NodeTemplate::Delay,
NodeTemplate::Reverb,
NodeTemplate::Chorus,
NodeTemplate::Flanger,
NodeTemplate::Phaser,
NodeTemplate::Distortion,
NodeTemplate::BitCrusher,
NodeTemplate::Compressor,
NodeTemplate::Limiter,
NodeTemplate::Eq,
NodeTemplate::Pan,
NodeTemplate::RingModulator,
NodeTemplate::Vocoder,
// Utilities
NodeTemplate::Adsr,
NodeTemplate::Lfo,
NodeTemplate::Mixer,
NodeTemplate::Splitter,
NodeTemplate::Constant,
NodeTemplate::MidiToCv,
NodeTemplate::AudioToCv,
NodeTemplate::Math,
NodeTemplate::SampleHold,
NodeTemplate::SlewLimiter,
NodeTemplate::Quantizer,
NodeTemplate::EnvelopeFollower,
NodeTemplate::BpmDetector,
NodeTemplate::Mod,
// Analysis
NodeTemplate::Oscilloscope,
// Advanced
NodeTemplate::VoiceAllocator,
// Outputs
NodeTemplate::AudioOutput,
]
}

View File

@ -29,16 +29,25 @@ pub struct NodeGraphPane {
backend: Option<Box<dyn GraphBackend>>,
/// Maps frontend node IDs to backend node IDs
#[allow(dead_code)]
node_id_map: HashMap<NodeId, BackendNodeId>,
/// Maps backend node IDs to frontend node IDs (reverse mapping)
backend_to_frontend_map: HashMap<BackendNodeId, NodeId>,
/// Track ID this graph belongs to
#[allow(dead_code)]
track_id: Option<Uuid>,
/// Pending action to execute
#[allow(dead_code)]
pending_action: Option<Box<dyn lightningbeam_core::action::Action>>,
/// Track newly added nodes to update ID mappings after action execution
/// (frontend_id, node_type, position)
pending_node_addition: Option<(NodeId, String, (f32, f32))>,
/// Track parameter values to detect changes
/// Maps InputId -> last known value
parameter_values: HashMap<InputId, f32>,
}
impl NodeGraphPane {
@ -50,30 +59,412 @@ impl NodeGraphPane {
user_state: GraphState::default(),
backend: None,
node_id_map: HashMap::new(),
backend_to_frontend_map: HashMap::new(),
track_id: None,
pending_action: None,
pending_node_addition: None,
parameter_values: HashMap::new(),
}
}
pub fn with_track_id(
track_id: Uuid,
audio_controller: std::sync::Arc<std::sync::Mutex<daw_backend::EngineController>>,
backend_track_id: u32,
) -> Self {
// Get backend track ID (placeholder - would need actual mapping)
let backend_track_id = 0;
let backend = Box::new(audio_backend::AudioGraphBackend::new(
backend_track_id,
audio_controller,
));
Self {
let mut pane = Self {
state: GraphEditorState::new(1.0),
user_state: GraphState::default(),
backend: Some(backend),
node_id_map: HashMap::new(),
backend_to_frontend_map: HashMap::new(),
track_id: Some(track_id),
pending_action: None,
pending_node_addition: None,
parameter_values: HashMap::new(),
};
// Load existing graph from backend
if let Err(e) = pane.load_graph_from_backend() {
eprintln!("Failed to load graph from backend: {}", e);
}
pane
}
/// Load the graph state from the backend and populate the frontend
fn load_graph_from_backend(&mut self) -> Result<(), String> {
let graph_state = if let Some(backend) = &self.backend {
backend.get_state()?
} else {
return Err("No backend available".to_string());
};
// Clear existing graph
self.state.graph.nodes.clear();
self.state.graph.inputs.clear();
self.state.graph.outputs.clear();
self.state.graph.connections.clear();
self.state.node_order.clear();
self.state.node_positions.clear();
self.state.selected_nodes.clear();
self.state.connection_in_progress = None;
self.state.ongoing_box_selection = None;
self.node_id_map.clear();
self.backend_to_frontend_map.clear();
// Create nodes in frontend
for node in &graph_state.nodes {
// Parse node type from string (e.g., "Oscillator" -> NodeTemplate::Oscillator)
let node_template = match node.node_type.as_str() {
// Inputs
"MidiInput" => graph_data::NodeTemplate::MidiInput,
"AudioInput" => graph_data::NodeTemplate::AudioInput,
"AutomationInput" => graph_data::NodeTemplate::AutomationInput,
// Generators
"Oscillator" => graph_data::NodeTemplate::Oscillator,
"WavetableOscillator" => graph_data::NodeTemplate::WavetableOscillator,
"FMSynth" => graph_data::NodeTemplate::FmSynth,
"NoiseGenerator" => graph_data::NodeTemplate::Noise,
"SimpleSampler" => graph_data::NodeTemplate::SimpleSampler,
"MultiSampler" => graph_data::NodeTemplate::MultiSampler,
// Effects
"Filter" => graph_data::NodeTemplate::Filter,
"Gain" => graph_data::NodeTemplate::Gain,
"Delay" => graph_data::NodeTemplate::Delay,
"Reverb" => graph_data::NodeTemplate::Reverb,
"Chorus" => graph_data::NodeTemplate::Chorus,
"Flanger" => graph_data::NodeTemplate::Flanger,
"Phaser" => graph_data::NodeTemplate::Phaser,
"Distortion" => graph_data::NodeTemplate::Distortion,
"BitCrusher" => graph_data::NodeTemplate::BitCrusher,
"Compressor" => graph_data::NodeTemplate::Compressor,
"Limiter" => graph_data::NodeTemplate::Limiter,
"EQ" => graph_data::NodeTemplate::Eq,
"Pan" => graph_data::NodeTemplate::Pan,
"RingModulator" => graph_data::NodeTemplate::RingModulator,
"Vocoder" => graph_data::NodeTemplate::Vocoder,
// Utilities
"ADSR" => graph_data::NodeTemplate::Adsr,
"LFO" => graph_data::NodeTemplate::Lfo,
"Mixer" => graph_data::NodeTemplate::Mixer,
"Splitter" => graph_data::NodeTemplate::Splitter,
"Constant" => graph_data::NodeTemplate::Constant,
"MidiToCV" => graph_data::NodeTemplate::MidiToCv,
"AudioToCV" => graph_data::NodeTemplate::AudioToCv,
"Math" => graph_data::NodeTemplate::Math,
"SampleHold" => graph_data::NodeTemplate::SampleHold,
"SlewLimiter" => graph_data::NodeTemplate::SlewLimiter,
"Quantizer" => graph_data::NodeTemplate::Quantizer,
"EnvelopeFollower" => graph_data::NodeTemplate::EnvelopeFollower,
"BPMDetector" => graph_data::NodeTemplate::BpmDetector,
"Mod" => graph_data::NodeTemplate::Mod,
// Analysis
"Oscilloscope" => graph_data::NodeTemplate::Oscilloscope,
// Advanced
"VoiceAllocator" => graph_data::NodeTemplate::VoiceAllocator,
// Outputs
"AudioOutput" => graph_data::NodeTemplate::AudioOutput,
_ => {
eprintln!("Unknown node type: {}", node.node_type);
continue;
}
};
// Create node directly in the graph
use egui_node_graph2::Node;
let frontend_id = self.state.graph.nodes.insert(Node {
id: egui_node_graph2::NodeId::default(), // Will be replaced by insert
label: node.node_type.clone(),
inputs: vec![],
outputs: vec![],
user_data: graph_data::NodeData,
});
// Build the node's inputs and outputs (this adds them to graph.inputs and graph.outputs)
// build_node() automatically populates the node's inputs/outputs vectors with correct names and order
node_template.build_node(&mut self.state.graph, &mut self.user_state, frontend_id);
// Set position
self.state.node_positions.insert(
frontend_id,
egui::pos2(node.position.0, node.position.1),
);
// Add to node order for rendering
self.state.node_order.push(frontend_id);
// Map frontend ID to backend ID
let backend_id = BackendNodeId::Audio(petgraph::stable_graph::NodeIndex::new(node.id as usize));
self.node_id_map.insert(frontend_id, backend_id);
self.backend_to_frontend_map.insert(backend_id, frontend_id);
// Set parameter values
for (&param_id, &value) in &node.parameters {
// Find the input param in the graph and set its value
if let Some(node_data) = self.state.graph.nodes.get_mut(frontend_id) {
// TODO: Set parameter values on the node's input params
// This requires matching param_id to the input param by index
let _ = (param_id, value); // Silence unused warning for now
}
}
}
// Create connections in frontend
for conn in &graph_state.connections {
let from_backend = BackendNodeId::Audio(petgraph::stable_graph::NodeIndex::new(conn.from_node as usize));
let to_backend = BackendNodeId::Audio(petgraph::stable_graph::NodeIndex::new(conn.to_node as usize));
if let (Some(&from_id), Some(&to_id)) = (
self.backend_to_frontend_map.get(&from_backend),
self.backend_to_frontend_map.get(&to_backend),
) {
// Find output param on from_node
if let Some(from_node) = self.state.graph.nodes.get(from_id) {
if let Some((_name, output_id)) = from_node.outputs.get(conn.from_port) {
// Find input param on to_node
if let Some(to_node) = self.state.graph.nodes.get(to_id) {
if let Some((_name, input_id)) = to_node.inputs.get(conn.to_port) {
// Add connection to graph - connections map is InputId -> Vec<OutputId>
if let Some(connections) = self.state.graph.connections.get_mut(*input_id) {
connections.push(*output_id);
} else {
self.state.graph.connections.insert(*input_id, vec![*output_id]);
}
}
}
}
}
}
}
Ok(())
}
fn handle_graph_response(
&mut self,
response: egui_node_graph2::GraphResponse<
graph_data::UserResponse,
graph_data::NodeData,
>,
shared: &mut crate::panes::SharedPaneState,
) {
use egui_node_graph2::NodeResponse;
for node_response in response.node_responses {
match node_response {
NodeResponse::CreatedNode(node_id) => {
// Node was created from the node finder
// Get node label which is the node type string
if let Some(node) = self.state.graph.nodes.get(node_id) {
let node_type = node.label.clone();
let position = self.state.node_positions.get(node_id)
.map(|pos| (pos.x, pos.y))
.unwrap_or((0.0, 0.0));
if let Some(track_id) = self.track_id {
let action = Box::new(actions::NodeGraphAction::AddNode(
actions::AddNodeAction::new(track_id, node_type.clone(), position)
));
self.pending_action = Some(action);
// Track this addition so we can update ID mappings after execution
self.pending_node_addition = Some((node_id, node_type, position));
}
}
}
NodeResponse::ConnectEventEnded { output, input, .. } => {
// Connection was made between output and input
if let Some(track_id) = self.track_id {
// Get the nodes that own these params
let from_node = self.state.graph.outputs.get(output).map(|o| o.node);
let to_node = self.state.graph.inputs.get(input).map(|i| i.node);
if let (Some(from_node_id), Some(to_node_id)) = (from_node, to_node) {
// Find port indices
let from_port = self.state.graph.nodes.get(from_node_id)
.and_then(|n| n.outputs.iter().position(|(_, id)| *id == output))
.unwrap_or(0);
let to_port = self.state.graph.nodes.get(to_node_id)
.and_then(|n| n.inputs.iter().position(|(_, id)| *id == input))
.unwrap_or(0);
// Map frontend IDs to backend IDs
let from_backend = self.node_id_map.get(&from_node_id);
let to_backend = self.node_id_map.get(&to_node_id);
if let (Some(&from_id), Some(&to_id)) = (from_backend, to_backend) {
let action = Box::new(actions::NodeGraphAction::Connect(
actions::ConnectAction::new(
track_id,
from_id,
from_port,
to_id,
to_port,
)
));
self.pending_action = Some(action);
}
}
}
}
NodeResponse::DisconnectEvent { output, input } => {
// Connection was removed
if let Some(track_id) = self.track_id {
// Get the nodes that own these params
let from_node = self.state.graph.outputs.get(output).map(|o| o.node);
let to_node = self.state.graph.inputs.get(input).map(|i| i.node);
if let (Some(from_node_id), Some(to_node_id)) = (from_node, to_node) {
// Find port indices
let from_port = self.state.graph.nodes.get(from_node_id)
.and_then(|n| n.outputs.iter().position(|(_, id)| *id == output))
.unwrap_or(0);
let to_port = self.state.graph.nodes.get(to_node_id)
.and_then(|n| n.inputs.iter().position(|(_, id)| *id == input))
.unwrap_or(0);
// Map frontend IDs to backend IDs
let from_backend = self.node_id_map.get(&from_node_id);
let to_backend = self.node_id_map.get(&to_node_id);
if let (Some(&from_id), Some(&to_id)) = (from_backend, to_backend) {
let action = Box::new(actions::NodeGraphAction::Disconnect(
actions::DisconnectAction::new(
track_id,
from_id,
from_port,
to_id,
to_port,
)
));
self.pending_action = Some(action);
}
}
}
}
NodeResponse::DeleteNodeFull { node_id, .. } => {
// Node was deleted
if let Some(track_id) = self.track_id {
if let Some(&backend_id) = self.node_id_map.get(&node_id) {
let action = Box::new(actions::NodeGraphAction::RemoveNode(
actions::RemoveNodeAction::new(track_id, backend_id)
));
self.pending_action = Some(action);
// Remove from ID map
self.node_id_map.remove(&node_id);
self.backend_to_frontend_map.remove(&backend_id);
}
}
}
NodeResponse::MoveNode { node, drag_delta: _ } => {
// Node was moved - we'll handle this on drag end
// For now, just update the position (no action needed during drag)
self.user_state.active_node = Some(node);
}
_ => {
// Ignore other events (SelectNode, RaiseNode, etc.)
}
}
}
// Execute pending action if any
if let Some(action) = self.pending_action.take() {
// Node graph actions need to update the backend, so use execute_with_backend
if let Some(ref audio_controller) = shared.audio_controller {
let mut controller = audio_controller.lock().unwrap();
// Node graph actions don't use clip instances, so we use an empty map
let mut empty_clip_map = std::collections::HashMap::new();
let mut backend_context = lightningbeam_core::action::BackendContext {
audio_controller: Some(&mut *controller),
layer_to_track_map: shared.layer_to_track_map,
clip_instance_to_backend_map: &mut empty_clip_map,
};
if let Err(e) = shared.action_executor.execute_with_backend(action, &mut backend_context) {
eprintln!("Failed to execute node graph action: {}", e);
} else {
// If this was a node addition, query backend to get the new node's ID
if let Some((frontend_id, node_type, position)) = self.pending_node_addition.take() {
if let Some(track_id) = self.track_id {
if let Some(&backend_track_id) = shared.layer_to_track_map.get(&track_id) {
// Query graph state to find the new node
if let Ok(json) = controller.query_graph_state(backend_track_id) {
if let Ok(state) = serde_json::from_str::<daw_backend::audio::node_graph::GraphPreset>(&json) {
// Find node by type and position (approximate match for position)
if let Some(backend_node) = state.nodes.iter().find(|n| {
n.node_type == node_type &&
(n.position.0 - position.0).abs() < 1.0 &&
(n.position.1 - position.1).abs() < 1.0
}) {
let backend_id = BackendNodeId::Audio(
petgraph::stable_graph::NodeIndex::new(backend_node.id as usize)
);
self.node_id_map.insert(frontend_id, backend_id);
self.backend_to_frontend_map.insert(backend_id, frontend_id);
eprintln!("[DEBUG] Mapped new node: frontend {:?} -> backend {:?}", frontend_id, backend_id);
}
}
}
}
}
}
}
} else {
eprintln!("Cannot execute node graph action: no audio controller");
}
}
}
fn check_parameter_changes(&mut self) {
// Check all input parameters for value changes
for (input_id, input_param) in &self.state.graph.inputs {
// Only check parameters that can have constant values (not ConnectionOnly)
if matches!(input_param.kind, InputParamKind::ConnectionOnly) {
continue;
}
// Get current value
let current_value = match &input_param.value {
ValueType::Float { value } => *value,
_ => continue, // Skip non-float values for now
};
// Check if value has changed
let previous_value = self.parameter_values.get(&input_id).copied();
if previous_value.is_none() || (previous_value.unwrap() - current_value).abs() > 0.0001 {
// Value has changed, create SetParameterAction
if let Some(track_id) = self.track_id {
let node_id = input_param.node;
// Get backend node ID
if let Some(&backend_id) = self.node_id_map.get(&node_id) {
// Get parameter index (position in node's inputs array)
if let Some(node) = self.state.graph.nodes.get(node_id) {
if let Some(param_index) = node.inputs.iter().position(|(_, id)| *id == input_id) {
// Create action to update backend
let action = Box::new(actions::NodeGraphAction::SetParameter(
actions::SetParameterAction::new(
track_id,
backend_id,
param_index as u32,
current_value as f64,
)
));
self.pending_action = Some(action);
}
}
}
}
// Update stored value
self.parameter_values.insert(input_id, current_value);
}
}
}
@ -142,6 +533,61 @@ impl crate::panes::PaneRenderer for NodeGraphPane {
_path: &NodePath,
shared: &mut crate::panes::SharedPaneState,
) {
// Check if we need to reload for a different track
let current_track = *shared.active_layer_id;
// If selected track changed, reload the graph
if self.track_id != current_track {
if let Some(new_track_id) = current_track {
// Get backend track ID
if let Some(&backend_track_id) = shared.layer_to_track_map.get(&new_track_id) {
// Check if track is MIDI or Audio
if let Some(audio_controller) = &shared.audio_controller {
let is_valid_track = {
let controller = audio_controller.lock().unwrap();
// TODO: Query track type from backend
// For now, assume it's valid if we have a track ID mapping
true
};
if is_valid_track {
// Reload graph for new track
self.track_id = Some(new_track_id);
// Recreate backend
self.backend = Some(Box::new(audio_backend::AudioGraphBackend::new(
backend_track_id,
(*audio_controller).clone(),
)));
// Load graph from backend
if let Err(e) = self.load_graph_from_backend() {
eprintln!("Failed to load graph from backend: {}", e);
}
}
}
}
} else {
self.track_id = None;
}
}
// Check if we have a valid track
if self.track_id.is_none() || self.backend.is_none() {
// Show message that no valid track is selected
let painter = ui.painter();
let bg_color = egui::Color32::from_gray(30);
painter.rect_filled(rect, 0.0, bg_color);
let text = "Select a MIDI or Audio track to view its node graph";
let font_id = egui::FontId::proportional(16.0);
let text_color = egui::Color32::from_gray(150);
let galley = painter.layout_no_wrap(text.to_string(), font_id, text_color);
let text_pos = rect.center() - galley.size() / 2.0;
painter.galley(text_pos, galley, text_color);
return;
}
// Get colors from theme
let bg_style = shared.theme.style(".node-graph-background", ui.ctx());
let grid_style = shared.theme.style(".node-graph-grid", ui.ctx());
@ -185,13 +631,19 @@ impl crate::panes::PaneRenderer for NodeGraphPane {
Self::draw_dot_grid_background(ui, rect, bg_color, grid_color, pan_zoom);
// Draw the graph editor (library will process scroll as zoom by default)
let _graph_response = self.state.draw_graph_editor(
let graph_response = self.state.draw_graph_editor(
ui,
AllNodeTemplates,
&mut self.user_state,
Vec::default(),
);
// Handle graph events and create actions
self.handle_graph_response(graph_response, shared);
// Check for parameter value changes and send updates to backend
self.check_parameter_changes();
// Override library's default scroll behavior:
// - Library uses scroll for zoom
// - We want: scroll = pan, ctrl+scroll = zoom