Fix preset loading, add LFO, noise, pan and splitter nodes

This commit is contained in:
Skyler Lehmkuhl 2025-10-25 07:29:14 -04:00
parent 139946fb75
commit e57ae51397
16 changed files with 1466 additions and 128 deletions

73
daw-backend/Cargo.lock generated
View File

@ -410,6 +410,7 @@ dependencies = [
"dasp_signal",
"midly",
"petgraph 0.6.5",
"rand",
"ratatui",
"rtrb",
"serde",
@ -468,6 +469,17 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
[[package]]
name = "getrandom"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592"
dependencies = [
"cfg-if",
"libc",
"wasi",
]
[[package]]
name = "getrandom"
version = "0.3.4"
@ -587,7 +599,7 @@ version = "0.1.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33"
dependencies = [
"getrandom",
"getrandom 0.3.4",
"libc",
]
@ -854,6 +866,15 @@ version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
[[package]]
name = "ppv-lite86"
version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9"
dependencies = [
"zerocopy",
]
[[package]]
name = "proc-macro-crate"
version = "3.4.0"
@ -887,6 +908,36 @@ version = "5.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
[[package]]
name = "rand"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [
"libc",
"rand_chacha",
"rand_core",
]
[[package]]
name = "rand_chacha"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core",
]
[[package]]
name = "rand_core"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom 0.2.16",
]
[[package]]
name = "ratatui"
version = "0.26.3"
@ -1804,3 +1855,23 @@ name = "wit-bindgen"
version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59"
[[package]]
name = "zerocopy"
version = "0.8.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c"
dependencies = [
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.8.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831"
dependencies = [
"proc-macro2",
"quote",
"syn",
]

View File

@ -11,6 +11,7 @@ midly = "0.5"
serde = { version = "1.0", features = ["derive"] }
ratatui = "0.26"
crossterm = "0.27"
rand = "0.8"
# Node-based audio graph dependencies
dasp_graph = "0.11"

View File

@ -739,6 +739,10 @@ impl Engine {
"Mixer" => Box::new(MixerNode::new("Mixer".to_string())),
"Filter" => Box::new(FilterNode::new("Filter".to_string())),
"ADSR" => Box::new(ADSRNode::new("ADSR".to_string())),
"LFO" => Box::new(LFONode::new("LFO".to_string())),
"NoiseGenerator" => Box::new(NoiseGeneratorNode::new("Noise".to_string())),
"Splitter" => Box::new(SplitterNode::new("Splitter".to_string())),
"Pan" => Box::new(PanNode::new("Pan".to_string())),
"MidiInput" => Box::new(MidiInputNode::new("MIDI Input".to_string())),
"MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV".to_string())),
"AudioToCV" => Box::new(AudioToCVNode::new("Audio→CV".to_string())),
@ -786,6 +790,10 @@ impl Engine {
"Mixer" => Box::new(MixerNode::new("Mixer".to_string())),
"Filter" => Box::new(FilterNode::new("Filter".to_string())),
"ADSR" => Box::new(ADSRNode::new("ADSR".to_string())),
"LFO" => Box::new(LFONode::new("LFO".to_string())),
"NoiseGenerator" => Box::new(NoiseGeneratorNode::new("Noise".to_string())),
"Splitter" => Box::new(SplitterNode::new("Splitter".to_string())),
"Pan" => Box::new(PanNode::new("Pan".to_string())),
"MidiInput" => Box::new(MidiInputNode::new("MIDI Input".to_string())),
"MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV".to_string())),
"AudioToCV" => Box::new(AudioToCVNode::new("Audio→CV".to_string())),
@ -976,6 +984,35 @@ impl Engine {
}
}
}
Command::GraphSaveTemplatePreset(track_id, voice_allocator_id, preset_path, preset_name) => {
use crate::audio::node_graph::nodes::VoiceAllocatorNode;
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
if let Some(ref graph) = track.instrument_graph {
let va_idx = NodeIndex::new(voice_allocator_id as usize);
// Get the VoiceAllocator node and serialize its template
if let Some(node) = graph.get_node(va_idx) {
// Downcast to VoiceAllocatorNode
let node_ptr = node as *const dyn crate::audio::node_graph::AudioNode;
let node_ptr = node_ptr as *const VoiceAllocatorNode;
unsafe {
let va_node = &*node_ptr;
let template_preset = va_node.template_graph().to_preset(&preset_name);
// Write to file
if let Ok(json) = template_preset.to_json() {
if let Err(e) = std::fs::write(&preset_path, json) {
eprintln!("Failed to save template preset: {}", e);
}
}
}
}
}
}
}
}
}
@ -1456,4 +1493,9 @@ impl EngineController {
pub fn graph_load_preset(&mut self, track_id: TrackId, preset_path: String) {
let _ = self.command_tx.push(Command::GraphLoadPreset(track_id, preset_path));
}
/// Save a VoiceAllocator's template graph as a preset
pub fn graph_save_template_preset(&mut self, track_id: TrackId, voice_allocator_id: u32, preset_path: String, preset_name: String) {
let _ = self.command_tx.push(Command::GraphSaveTemplatePreset(track_id, voice_allocator_id, preset_path, preset_name));
}
}

View File

@ -125,6 +125,17 @@ impl InstrumentGraph {
to: NodeIndex,
to_port: usize,
) -> Result<(), ConnectionError> {
eprintln!("[GRAPH] connect() called: {:?} port {} -> {:?} port {}", from, from_port, to, to_port);
// Check if this exact connection already exists
if let Some(edge_idx) = self.graph.find_edge(from, to) {
let existing_conn = &self.graph[edge_idx];
if existing_conn.from_port == from_port && existing_conn.to_port == to_port {
eprintln!("[GRAPH] Connection already exists, skipping duplicate");
return Ok(()); // Connection already exists, don't create duplicate
}
}
// Validate the connection
self.validate_connection(from, from_port, to, to_port)?;
@ -310,6 +321,11 @@ impl InstrumentGraph {
// Use the requested output buffer size for processing
let process_size = output_buffer.len();
if process_size > self.buffer_size * 2 {
eprintln!("[GRAPH] WARNING: process_size {} > allocated buffer_size {} * 2",
process_size, self.buffer_size);
}
// Clear all output buffers (audio/CV and MIDI)
for node in self.graph.node_weights_mut() {
for buffer in &mut node.output_buffers {
@ -670,8 +686,10 @@ impl InstrumentGraph {
index_map.insert(serialized_node.id, node_idx);
// Set parameters
eprintln!("[PRESET] Node {}: type={}, params={:?}", serialized_node.id, serialized_node.node_type, serialized_node.parameters);
for (&param_id, &value) in &serialized_node.parameters {
if let Some(graph_node) = graph.graph.node_weight_mut(node_idx) {
eprintln!("[PRESET] Setting param {} = {}", param_id, value);
graph_node.node.set_parameter(param_id, value);
}
}
@ -681,26 +699,32 @@ impl InstrumentGraph {
}
// Create connections
eprintln!("[PRESET] Creating {} connections", preset.connections.len());
for conn in &preset.connections {
let from_idx = index_map.get(&conn.from_node)
.ok_or_else(|| format!("Connection from unknown node {}", conn.from_node))?;
let to_idx = index_map.get(&conn.to_node)
.ok_or_else(|| format!("Connection to unknown node {}", conn.to_node))?;
eprintln!("[PRESET] Connecting: node {} port {} -> node {} port {}", conn.from_node, conn.from_port, conn.to_node, conn.to_port);
graph.connect(*from_idx, conn.from_port, *to_idx, conn.to_port)
.map_err(|e| format!("Failed to connect nodes: {:?}", e))?;
}
// Set MIDI targets
eprintln!("[PRESET] Setting MIDI targets: {:?}", preset.midi_targets);
for &target_id in &preset.midi_targets {
if let Some(&target_idx) = index_map.get(&target_id) {
eprintln!("[PRESET] MIDI target: node {} -> index {:?}", target_id, target_idx);
graph.set_midi_target(target_idx, true);
}
}
// Set output node
eprintln!("[PRESET] Setting output node: {:?}", preset.output_node);
if let Some(output_id) = preset.output_node {
if let Some(&output_idx) = index_map.get(&output_id) {
eprintln!("[PRESET] Output node: {} -> index {:?}", output_id, output_idx);
graph.output_node = Some(output_idx);
}
}

View File

@ -0,0 +1,222 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
use crate::audio::midi::MidiEvent;
use std::f32::consts::PI;
use rand::Rng;
const PARAM_FREQUENCY: u32 = 0;
const PARAM_AMPLITUDE: u32 = 1;
const PARAM_WAVEFORM: u32 = 2;
const PARAM_PHASE_OFFSET: u32 = 3;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum LFOWaveform {
Sine = 0,
Triangle = 1,
Saw = 2,
Square = 3,
Random = 4,
}
impl LFOWaveform {
fn from_f32(value: f32) -> Self {
match value.round() as i32 {
1 => LFOWaveform::Triangle,
2 => LFOWaveform::Saw,
3 => LFOWaveform::Square,
4 => LFOWaveform::Random,
_ => LFOWaveform::Sine,
}
}
}
/// Low Frequency Oscillator node for modulation
pub struct LFONode {
name: String,
frequency: f32,
amplitude: f32,
waveform: LFOWaveform,
phase_offset: f32,
phase: f32,
last_random_value: f32,
next_random_value: f32,
random_phase: f32,
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
}
impl LFONode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
let inputs = vec![];
let outputs = vec![
NodePort::new("CV Out", SignalType::CV, 0),
];
let parameters = vec![
Parameter::new(PARAM_FREQUENCY, "Frequency", 0.01, 20.0, 1.0, ParameterUnit::Frequency),
Parameter::new(PARAM_AMPLITUDE, "Amplitude", 0.0, 1.0, 1.0, ParameterUnit::Generic),
Parameter::new(PARAM_WAVEFORM, "Waveform", 0.0, 4.0, 0.0, ParameterUnit::Generic),
Parameter::new(PARAM_PHASE_OFFSET, "Phase", 0.0, 1.0, 0.0, ParameterUnit::Generic),
];
let mut rng = rand::thread_rng();
Self {
name,
frequency: 1.0,
amplitude: 1.0,
waveform: LFOWaveform::Sine,
phase_offset: 0.0,
phase: 0.0,
last_random_value: rng.gen_range(-1.0..1.0),
next_random_value: rng.gen_range(-1.0..1.0),
random_phase: 0.0,
inputs,
outputs,
parameters,
}
}
}
impl AudioNode for LFONode {
fn category(&self) -> NodeCategory {
NodeCategory::Utility
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, id: u32, value: f32) {
match id {
PARAM_FREQUENCY => self.frequency = value.clamp(0.01, 20.0),
PARAM_AMPLITUDE => self.amplitude = value.clamp(0.0, 1.0),
PARAM_WAVEFORM => self.waveform = LFOWaveform::from_f32(value),
PARAM_PHASE_OFFSET => self.phase_offset = value.clamp(0.0, 1.0),
_ => {}
}
}
fn get_parameter(&self, id: u32) -> f32 {
match id {
PARAM_FREQUENCY => self.frequency,
PARAM_AMPLITUDE => self.amplitude,
PARAM_WAVEFORM => self.waveform as i32 as f32,
PARAM_PHASE_OFFSET => self.phase_offset,
_ => 0.0,
}
}
fn process(
&mut self,
_inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
sample_rate: u32,
) {
if outputs.is_empty() {
return;
}
let output = &mut outputs[0];
let sample_rate_f32 = sample_rate as f32;
// CV signals are mono
for sample_idx in 0..output.len() {
let current_phase = (self.phase + self.phase_offset) % 1.0;
// Generate waveform sample based on waveform type
let raw_sample = match self.waveform {
LFOWaveform::Sine => (current_phase * 2.0 * PI).sin(),
LFOWaveform::Triangle => {
// Triangle: rises from -1 to 1, falls back to -1
4.0 * (current_phase - 0.5).abs() - 1.0
}
LFOWaveform::Saw => {
// Sawtooth: ramp from -1 to 1
2.0 * current_phase - 1.0
}
LFOWaveform::Square => {
if current_phase < 0.5 { 1.0 } else { -1.0 }
}
LFOWaveform::Random => {
// Sample & hold random values with smooth interpolation
// Interpolate between last and next random value
let t = self.random_phase;
self.last_random_value * (1.0 - t) + self.next_random_value * t
}
};
// Scale to 0-1 range and apply amplitude
let sample = (raw_sample * 0.5 + 0.5) * self.amplitude;
output[sample_idx] = sample;
// Update phase
self.phase += self.frequency / sample_rate_f32;
if self.phase >= 1.0 {
self.phase -= 1.0;
// For random waveform, generate new random value at each cycle
if self.waveform == LFOWaveform::Random {
self.last_random_value = self.next_random_value;
let mut rng = rand::thread_rng();
self.next_random_value = rng.gen_range(-1.0..1.0);
self.random_phase = 0.0;
}
}
// Update random interpolation phase
if self.waveform == LFOWaveform::Random {
self.random_phase += self.frequency / sample_rate_f32;
if self.random_phase >= 1.0 {
self.random_phase -= 1.0;
}
}
}
}
fn reset(&mut self) {
self.phase = 0.0;
self.random_phase = 0.0;
let mut rng = rand::thread_rng();
self.last_random_value = rng.gen_range(-1.0..1.0);
self.next_random_value = rng.gen_range(-1.0..1.0);
}
fn node_type(&self) -> &str {
"LFO"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
frequency: self.frequency,
amplitude: self.amplitude,
waveform: self.waveform,
phase_offset: self.phase_offset,
phase: 0.0, // Reset phase for new instance
last_random_value: self.last_random_value,
next_random_value: self.next_random_value,
random_phase: 0.0,
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
})
}
}

View File

@ -2,12 +2,16 @@ mod adsr;
mod audio_to_cv;
mod filter;
mod gain;
mod lfo;
mod midi_input;
mod midi_to_cv;
mod mixer;
mod noise;
mod oscillator;
mod oscilloscope;
mod output;
mod pan;
mod splitter;
mod template_io;
mod voice_allocator;
@ -15,11 +19,15 @@ pub use adsr::ADSRNode;
pub use audio_to_cv::AudioToCVNode;
pub use filter::FilterNode;
pub use gain::GainNode;
pub use lfo::LFONode;
pub use midi_input::MidiInputNode;
pub use midi_to_cv::MidiToCVNode;
pub use mixer::MixerNode;
pub use noise::NoiseGeneratorNode;
pub use oscillator::OscillatorNode;
pub use oscilloscope::OscilloscopeNode;
pub use output::AudioOutputNode;
pub use pan::PanNode;
pub use splitter::SplitterNode;
pub use template_io::{TemplateInputNode, TemplateOutputNode};
pub use voice_allocator::VoiceAllocatorNode;

View File

@ -0,0 +1,197 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
use crate::audio::midi::MidiEvent;
use rand::Rng;
const PARAM_AMPLITUDE: u32 = 0;
const PARAM_COLOR: u32 = 1;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum NoiseColor {
White = 0,
Pink = 1,
}
impl NoiseColor {
fn from_f32(value: f32) -> Self {
match value.round() as i32 {
1 => NoiseColor::Pink,
_ => NoiseColor::White,
}
}
}
/// Noise generator node with white and pink noise
pub struct NoiseGeneratorNode {
name: String,
amplitude: f32,
color: NoiseColor,
// Pink noise state (Paul Kellet's pink noise algorithm)
pink_b0: f32,
pink_b1: f32,
pink_b2: f32,
pink_b3: f32,
pink_b4: f32,
pink_b5: f32,
pink_b6: f32,
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
}
impl NoiseGeneratorNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
let inputs = vec![];
let outputs = vec![
NodePort::new("Audio Out", SignalType::Audio, 0),
];
let parameters = vec![
Parameter::new(PARAM_AMPLITUDE, "Amplitude", 0.0, 1.0, 0.5, ParameterUnit::Generic),
Parameter::new(PARAM_COLOR, "Color", 0.0, 1.0, 0.0, ParameterUnit::Generic),
];
Self {
name,
amplitude: 0.5,
color: NoiseColor::White,
pink_b0: 0.0,
pink_b1: 0.0,
pink_b2: 0.0,
pink_b3: 0.0,
pink_b4: 0.0,
pink_b5: 0.0,
pink_b6: 0.0,
inputs,
outputs,
parameters,
}
}
/// Generate white noise sample
fn generate_white(&self) -> f32 {
let mut rng = rand::thread_rng();
rng.gen_range(-1.0..1.0)
}
/// Generate pink noise sample using Paul Kellet's algorithm
fn generate_pink(&mut self) -> f32 {
let mut rng = rand::thread_rng();
let white: f32 = rng.gen_range(-1.0..1.0);
self.pink_b0 = 0.99886 * self.pink_b0 + white * 0.0555179;
self.pink_b1 = 0.99332 * self.pink_b1 + white * 0.0750759;
self.pink_b2 = 0.96900 * self.pink_b2 + white * 0.1538520;
self.pink_b3 = 0.86650 * self.pink_b3 + white * 0.3104856;
self.pink_b4 = 0.55000 * self.pink_b4 + white * 0.5329522;
self.pink_b5 = -0.7616 * self.pink_b5 - white * 0.0168980;
let pink = self.pink_b0 + self.pink_b1 + self.pink_b2 + self.pink_b3 + self.pink_b4 + self.pink_b5 + self.pink_b6 + white * 0.5362;
self.pink_b6 = white * 0.115926;
// Scale to approximately -1 to 1
pink * 0.11
}
}
impl AudioNode for NoiseGeneratorNode {
fn category(&self) -> NodeCategory {
NodeCategory::Generator
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, id: u32, value: f32) {
match id {
PARAM_AMPLITUDE => self.amplitude = value.clamp(0.0, 1.0),
PARAM_COLOR => self.color = NoiseColor::from_f32(value),
_ => {}
}
}
fn get_parameter(&self, id: u32) -> f32 {
match id {
PARAM_AMPLITUDE => self.amplitude,
PARAM_COLOR => self.color as i32 as f32,
_ => 0.0,
}
}
fn process(
&mut self,
_inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
_sample_rate: u32,
) {
if outputs.is_empty() {
return;
}
let output = &mut outputs[0];
// Audio signals are stereo (interleaved L/R)
// Process by frames, not samples
let frames = output.len() / 2;
for frame in 0..frames {
let sample = match self.color {
NoiseColor::White => self.generate_white(),
NoiseColor::Pink => self.generate_pink(),
} * self.amplitude;
// Write to both channels (mono source duplicated to stereo)
output[frame * 2] = sample; // Left
output[frame * 2 + 1] = sample; // Right
}
}
fn reset(&mut self) {
self.pink_b0 = 0.0;
self.pink_b1 = 0.0;
self.pink_b2 = 0.0;
self.pink_b3 = 0.0;
self.pink_b4 = 0.0;
self.pink_b5 = 0.0;
self.pink_b6 = 0.0;
}
fn node_type(&self) -> &str {
"NoiseGenerator"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
amplitude: self.amplitude,
color: self.color,
pink_b0: 0.0,
pink_b1: 0.0,
pink_b2: 0.0,
pink_b3: 0.0,
pink_b4: 0.0,
pink_b5: 0.0,
pink_b6: 0.0,
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
})
}
}

View File

@ -0,0 +1,168 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
use crate::audio::midi::MidiEvent;
use std::f32::consts::PI;
const PARAM_PAN: u32 = 0;
/// Stereo panning node using constant-power panning law
/// Converts mono audio to stereo with controllable pan position
pub struct PanNode {
name: String,
pan: f32,
left_gain: f32,
right_gain: f32,
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
}
impl PanNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
let inputs = vec![
NodePort::new("Audio In", SignalType::Audio, 0),
NodePort::new("Pan CV", SignalType::CV, 1),
];
let outputs = vec![
NodePort::new("Audio Out", SignalType::Audio, 0),
];
let parameters = vec![
Parameter::new(PARAM_PAN, "Pan", -1.0, 1.0, 0.0, ParameterUnit::Generic),
];
let mut node = Self {
name,
pan: 0.0,
left_gain: 1.0,
right_gain: 1.0,
inputs,
outputs,
parameters,
};
node.update_gains();
node
}
/// Update left/right gains using constant-power panning law
fn update_gains(&mut self) {
// Constant-power panning: pan from -1 to +1 maps to angle 0 to PI/2
let angle = (self.pan + 1.0) * 0.5 * PI / 2.0;
self.left_gain = angle.cos();
self.right_gain = angle.sin();
}
}
impl AudioNode for PanNode {
fn category(&self) -> NodeCategory {
NodeCategory::Utility
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, id: u32, value: f32) {
match id {
PARAM_PAN => {
self.pan = value.clamp(-1.0, 1.0);
self.update_gains();
}
_ => {}
}
}
fn get_parameter(&self, id: u32) -> f32 {
match id {
PARAM_PAN => self.pan,
_ => 0.0,
}
}
fn process(
&mut self,
inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
_sample_rate: u32,
) {
if inputs.is_empty() || outputs.is_empty() {
return;
}
let audio_input = inputs[0];
let output = &mut outputs[0];
// Audio signals are stereo (interleaved L/R)
// Process by frames, not samples
let frames = audio_input.len() / 2;
let output_frames = output.len() / 2;
let frames_to_process = frames.min(output_frames);
for frame in 0..frames_to_process {
// Get base pan position
let mut pan = self.pan;
// Add CV modulation if connected
if inputs.len() > 1 && frame < inputs[1].len() {
let cv = inputs[1][frame]; // CV is mono
// CV is 0-1, map to -1 to +1 range
pan += (cv * 2.0 - 1.0);
pan = pan.clamp(-1.0, 1.0);
}
// Update gains if pan changed from CV
let angle = (pan + 1.0) * 0.5 * PI / 2.0;
let left_gain = angle.cos();
let right_gain = angle.sin();
// Read stereo input
let left_in = audio_input[frame * 2];
let right_in = audio_input[frame * 2 + 1];
// Mix both input channels with panning
// When pan is -1 (full left), left gets full signal, right gets nothing
// When pan is 0 (center), both get equal signal
// When pan is +1 (full right), right gets full signal, left gets nothing
output[frame * 2] = (left_in + right_in) * left_gain; // Left
output[frame * 2 + 1] = (left_in + right_in) * right_gain; // Right
}
}
fn reset(&mut self) {
// No state to reset
}
fn node_type(&self) -> &str {
"Pan"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
pan: self.pan,
left_gain: self.left_gain,
right_gain: self.right_gain,
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
})
}
}

View File

@ -0,0 +1,104 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
use crate::audio::midi::MidiEvent;
/// Splitter node - copies input to multiple outputs for parallel routing
pub struct SplitterNode {
name: String,
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
}
impl SplitterNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
let inputs = vec![
NodePort::new("Audio In", SignalType::Audio, 0),
];
let outputs = vec![
NodePort::new("Out 1", SignalType::Audio, 0),
NodePort::new("Out 2", SignalType::Audio, 1),
NodePort::new("Out 3", SignalType::Audio, 2),
NodePort::new("Out 4", SignalType::Audio, 3),
];
let parameters = vec![];
Self {
name,
inputs,
outputs,
parameters,
}
}
}
impl AudioNode for SplitterNode {
fn category(&self) -> NodeCategory {
NodeCategory::Utility
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, _id: u32, _value: f32) {
// No parameters
}
fn get_parameter(&self, _id: u32) -> f32 {
0.0
}
fn process(
&mut self,
inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
_sample_rate: u32,
) {
if inputs.is_empty() || outputs.is_empty() {
return;
}
let input = inputs[0];
// Copy input to all outputs
for output in outputs.iter_mut() {
let len = input.len().min(output.len());
output[..len].copy_from_slice(&input[..len]);
}
}
fn reset(&mut self) {
// No state to reset
}
fn node_type(&self) -> &str {
"Splitter"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
})
}
}

View File

@ -142,6 +142,8 @@ pub enum Command {
GraphSavePreset(TrackId, String, String, String, Vec<String>),
/// Load a preset into a track's graph (track_id, preset_path)
GraphLoadPreset(TrackId, String),
/// Save a VoiceAllocator's template graph as a preset (track_id, voice_allocator_id, preset_path, preset_name)
GraphSaveTemplatePreset(TrackId, u32, String, String),
}
/// Events sent from audio thread back to UI/control thread

1
src-tauri/Cargo.lock generated
View File

@ -1025,6 +1025,7 @@ dependencies = [
"dasp_signal",
"midly",
"petgraph 0.6.5",
"rand 0.8.5",
"ratatui",
"rtrb",
"serde",

View File

@ -891,6 +891,51 @@ pub async fn graph_get_state(
}
}
#[tauri::command]
pub async fn graph_get_template_state(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
voice_allocator_id: u32,
) -> Result<String, String> {
use daw_backend::GraphPreset;
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
// For template graphs, we'll use a different temp file path
let temp_path = std::env::temp_dir().join(format!("temp_template_state_{}_{}.json", track_id, voice_allocator_id));
let temp_path_str = temp_path.to_string_lossy().to_string();
// Send a custom command to save the template graph
// We'll need to add this command to the backend
controller.graph_save_template_preset(
track_id,
voice_allocator_id,
temp_path_str.clone(),
"temp_template".to_string()
);
// Give the audio thread time to process
std::thread::sleep(std::time::Duration::from_millis(50));
// Read the temp file
let json = match std::fs::read_to_string(&temp_path) {
Ok(json) => json,
Err(_) => {
// If file doesn't exist, template is likely empty
let empty_preset = GraphPreset::new("empty_template");
empty_preset.to_json().unwrap_or_else(|_| "{}".to_string())
}
};
// Clean up temp file
let _ = std::fs::remove_file(&temp_path);
Ok(json)
} else {
Err("Audio not initialized".to_string())
}
}
#[derive(serde::Serialize, Clone)]
#[serde(tag = "type")]
pub enum SerializedAudioEvent {

View File

@ -227,6 +227,7 @@ pub fn run() {
audio::graph_list_presets,
audio::graph_delete_preset,
audio::graph_get_state,
audio::graph_get_template_state,
])
// .manage(window_counter)
.build(tauri::generate_context!())

View File

@ -62,7 +62,7 @@ import {
} from "./styles.js";
import { Icon } from "./icon.js";
import { AlphaSelectionBar, ColorSelectorWidget, ColorWidget, HueSelectionBar, SaturationValueSelectionGradient, TimelineWindow, TimelineWindowV2, VirtualPiano, PianoRollEditor, Widget } from "./widgets.js";
import { nodeTypes, SignalType, getPortClass } from "./nodeTypes.js";
import { nodeTypes, SignalType, getPortClass, NodeCategory, getCategories, getNodesByCategory } from "./nodeTypes.js";
// State management
import {
@ -6059,28 +6059,95 @@ function nodeEditor() {
const container = document.createElement("div");
container.id = "node-editor-container";
// Track editing context: null = main graph, {voiceAllocatorId, voiceAllocatorName} = editing template
let editingContext = null;
// Track palette navigation: null = showing categories, string = showing nodes in that category
let selectedCategory = null;
// Create breadcrumb/context header
const header = document.createElement("div");
header.className = "node-editor-header";
header.innerHTML = '<div class="context-breadcrumb">Main Graph</div>';
container.appendChild(header);
// Create the Drawflow canvas
const editorDiv = document.createElement("div");
editorDiv.id = "drawflow";
editorDiv.style.width = "100%";
editorDiv.style.height = "100%";
editorDiv.style.height = "calc(100% - 40px)"; // Account for header
editorDiv.style.position = "relative";
container.appendChild(editorDiv);
// Create node palette
const palette = document.createElement("div");
palette.className = "node-palette";
container.appendChild(palette);
// Category display names
const categoryNames = {
[NodeCategory.INPUT]: 'Inputs',
[NodeCategory.GENERATOR]: 'Generators',
[NodeCategory.EFFECT]: 'Effects',
[NodeCategory.UTILITY]: 'Utilities',
[NodeCategory.OUTPUT]: 'Outputs'
};
// Function to update palette based on context and selected category
function updatePalette() {
const isTemplate = editingContext !== null;
if (selectedCategory === null) {
// Show categories
const categories = getCategories().filter(category => {
// Filter categories based on context
if (isTemplate) {
// In template: show all categories
return true;
} else {
// In main graph: hide INPUT/OUTPUT categories that contain template nodes
return true; // We'll filter nodes instead
}
});
palette.innerHTML = `
<h3>Nodes</h3>
${Object.entries(nodeTypes)
.filter(([type, def]) => type !== 'TemplateInput' && type !== 'TemplateOutput') // Hide template nodes
.map(([type, def]) => `
<div class="node-palette-item" data-node-type="${type}">
${def.name}
<h3>Node Categories</h3>
${categories.map(category => `
<div class="node-category-item" data-category="${category}">
${categoryNames[category] || category}
</div>
`).join('')}
`;
container.appendChild(palette);
} else {
// Show nodes in selected category
const nodesInCategory = getNodesByCategory(selectedCategory);
// Filter based on context
const filteredNodes = nodesInCategory.filter(node => {
if (isTemplate) {
// In template: hide VoiceAllocator, AudioOutput, MidiInput
return node.type !== 'VoiceAllocator' && node.type !== 'AudioOutput' && node.type !== 'MidiInput';
} else {
// In main graph: hide TemplateInput/TemplateOutput
return node.type !== 'TemplateInput' && node.type !== 'TemplateOutput';
}
});
palette.innerHTML = `
<div class="palette-header">
<button class="palette-back-btn"> Back</button>
<h3>${categoryNames[selectedCategory] || selectedCategory}</h3>
</div>
${filteredNodes.map(node => `
<div class="node-palette-item" data-node-type="${node.type}" draggable="true" title="${node.description}">
${node.name}
</div>
`).join('')}
`;
}
}
updatePalette();
// Initialize Drawflow editor (will be set up after DOM insertion)
let editor = null;
@ -6104,33 +6171,88 @@ function nodeEditor() {
// Store editor reference in context
context.nodeEditor = editor;
// Add palette item drag-and-drop handlers
const paletteItems = container.querySelectorAll(".node-palette-item");
// Add trackpad/mousewheel scrolling support for panning
drawflowDiv.addEventListener('wheel', (e) => {
// Don't scroll if hovering over palette or other UI elements
if (e.target.closest('.node-palette')) {
return;
}
// Don't interfere with zoom (Ctrl+wheel)
if (e.ctrlKey) return;
// Prevent default scrolling behavior
e.preventDefault();
// Pan the canvas based on scroll direction
const deltaX = e.deltaX;
const deltaY = e.deltaY;
// Update Drawflow's canvas position
if (typeof editor.canvas_x === 'undefined') {
editor.canvas_x = 0;
}
if (typeof editor.canvas_y === 'undefined') {
editor.canvas_y = 0;
}
editor.canvas_x -= deltaX;
editor.canvas_y -= deltaY;
// Update the canvas transform
const precanvas = drawflowDiv.querySelector('.drawflow');
if (precanvas) {
const zoom = editor.zoom || 1;
precanvas.style.transform = `translate(${editor.canvas_x}px, ${editor.canvas_y}px) scale(${zoom})`;
}
}, { passive: false });
// Add palette item drag-and-drop handlers using event delegation
let draggedNodeType = null;
paletteItems.forEach(item => {
// Make items draggable
item.setAttribute('draggable', 'true');
// Use event delegation for click on palette items, categories, and back button
palette.addEventListener("click", (e) => {
// Handle back button
const backBtn = e.target.closest(".palette-back-btn");
if (backBtn) {
selectedCategory = null;
updatePalette();
return;
}
// Click handler for quick add
item.addEventListener("click", () => {
// Handle category selection
const categoryItem = e.target.closest(".node-category-item");
if (categoryItem) {
selectedCategory = categoryItem.getAttribute("data-category");
updatePalette();
return;
}
// Handle node selection
const item = e.target.closest(".node-palette-item");
if (item) {
const nodeType = item.getAttribute("data-node-type");
addNode(nodeType, 100, 100, null);
}
});
// Drag start
item.addEventListener('dragstart', (e) => {
// Use event delegation for drag events
palette.addEventListener('dragstart', (e) => {
const item = e.target.closest(".node-palette-item");
if (item) {
draggedNodeType = item.getAttribute('data-node-type');
e.dataTransfer.effectAllowed = 'copy';
e.dataTransfer.setData('text/plain', draggedNodeType); // Required for drag to work
e.dataTransfer.setData('text/plain', draggedNodeType);
console.log('Drag started:', draggedNodeType);
}
});
// Drag end
item.addEventListener('dragend', () => {
palette.addEventListener('dragend', (e) => {
const item = e.target.closest(".node-palette-item");
if (item) {
console.log('Drag ended');
draggedNodeType = null;
});
}
});
// Add drop handler to drawflow canvas
@ -6302,7 +6424,7 @@ function nodeEditor() {
}, 10);
// Send command to backend
// If parent node exists, add to VoiceAllocator template; otherwise add to main graph
// Check editing context first (dedicated template view), then parent node (inline editing)
const trackId = getCurrentMidiTrack();
if (trackId === null) {
console.error('No MIDI track selected');
@ -6311,21 +6433,38 @@ function nodeEditor() {
return;
}
const commandName = parentNodeId ? "graph_add_node_to_template" : "graph_add_node";
const commandArgs = parentNodeId
? {
// Determine if we're adding to a template or main graph
let commandName, commandArgs;
if (editingContext) {
// Adding to template in dedicated view
commandName = "graph_add_node_to_template";
commandArgs = {
trackId: trackId,
voiceAllocatorId: editingContext.voiceAllocatorId,
nodeType: nodeType,
x: x,
y: y
};
} else if (parentNodeId) {
// Adding to template inline (old approach, still supported for backwards compat)
commandName = "graph_add_node_to_template";
commandArgs = {
trackId: trackId,
voiceAllocatorId: editor.getNodeFromId(parentNodeId).data.backendId,
nodeType: nodeType,
x: x,
y: y
}
: {
};
} else {
// Adding to main graph
commandName = "graph_add_node";
commandArgs = {
trackId: trackId,
nodeType: nodeType,
x: x,
y: y
};
}
invoke(commandName, commandArgs).then(backendNodeId => {
console.log(`Node ${nodeType} added with backend ID: ${backendNodeId} (parent: ${parentNodeId})`);
@ -6518,66 +6657,29 @@ function nodeEditor() {
}, 100);
}
// Handle double-click on nodes (for VoiceAllocator expansion)
// Handle double-click on nodes (for VoiceAllocator template editing)
function handleNodeDoubleClick(nodeId) {
const node = editor.getNodeFromId(nodeId);
if (!node) return;
// Only VoiceAllocator nodes can be expanded
// Only VoiceAllocator nodes can be opened for template editing
if (node.data.nodeType !== 'VoiceAllocator') return;
const nodeElement = document.getElementById(`node-${nodeId}`);
if (!nodeElement) return;
const contentsArea = document.getElementById(`voice-allocator-contents-${nodeId}`);
if (!contentsArea) return;
// Toggle expanded state
if (expandedNodes.has(nodeId)) {
// Collapse
expandedNodes.delete(nodeId);
nodeElement.classList.remove('expanded');
nodeElement.style.width = '';
nodeElement.style.height = '';
nodeElement.style.minWidth = '';
nodeElement.style.minHeight = '';
contentsArea.style.display = 'none';
// Hide all child nodes
for (const [childId, parentId] of nodeParents.entries()) {
if (parentId === nodeId) {
const childElement = document.getElementById(`node-${childId}`);
if (childElement) {
childElement.style.display = 'none';
}
}
// Don't allow entering templates when already editing a template
if (editingContext) {
showError("Cannot nest template editing - exit current template first");
return;
}
console.log('Collapsed VoiceAllocator node:', nodeId);
} else {
// Expand
expandedNodes.add(nodeId);
nodeElement.classList.add('expanded');
// Make the node larger to show contents
nodeElement.style.width = '600px';
nodeElement.style.height = '400px';
nodeElement.style.minWidth = '600px';
nodeElement.style.minHeight = '400px';
contentsArea.style.display = 'block';
// Show all child nodes
for (const [childId, parentId] of nodeParents.entries()) {
if (parentId === nodeId) {
const childElement = document.getElementById(`node-${childId}`);
if (childElement) {
childElement.style.display = 'block';
}
}
// Get the backend ID and node name
if (node.data.backendId === null) {
showError("VoiceAllocator not yet created on backend");
return;
}
console.log('Expanded VoiceAllocator node:', nodeId);
}
// Enter template editing mode
const nodeName = node.name || 'VoiceAllocator';
enterTemplate(node.data.backendId, nodeName);
}
// Handle connection creation
@ -6646,7 +6748,35 @@ function nodeEditor() {
// Send to backend
console.log("Backend IDs - output:", outputNode.data.backendId, "input:", inputNode.data.backendId);
if (outputNode.data.backendId !== null && inputNode.data.backendId !== null) {
// Check if both nodes are inside the same VoiceAllocator
const currentTrackId = getCurrentMidiTrack();
if (currentTrackId === null) return;
// Check if we're in template editing mode (dedicated view)
if (editingContext) {
// Connecting in template view
console.log(`Connecting in template ${editingContext.voiceAllocatorId}: node ${outputNode.data.backendId} port ${outputPort} -> node ${inputNode.data.backendId} port ${inputPort}`);
invoke("graph_connect_in_template", {
trackId: currentTrackId,
voiceAllocatorId: editingContext.voiceAllocatorId,
fromNode: outputNode.data.backendId,
fromPort: outputPort,
toNode: inputNode.data.backendId,
toPort: inputPort
}).then(() => {
console.log("Template connection successful");
}).catch(err => {
console.error("Failed to connect nodes in template:", err);
showError("Template connection failed: " + err);
// Remove the connection
editor.removeSingleConnection(
connection.output_id,
connection.input_id,
connection.output_class,
connection.input_class
);
});
} else {
// Check if both nodes are inside the same VoiceAllocator (inline editing)
// Convert connection IDs to numbers to match Map keys
const outputId = parseInt(connection.output_id);
const inputId = parseInt(connection.input_id);
@ -6655,11 +6785,9 @@ function nodeEditor() {
console.log(`Parent detection - output node ${outputId} parent: ${outputParent}, input node ${inputId} parent: ${inputParent}`);
if (outputParent && inputParent && outputParent === inputParent) {
// Both nodes are inside the same VoiceAllocator - connect in template
// Both nodes are inside the same VoiceAllocator - connect in template (inline editing)
const parentNode = editor.getNodeFromId(outputParent);
console.log(`Connecting in VoiceAllocator template ${parentNode.data.backendId}: node ${outputNode.data.backendId} port ${outputPort} -> node ${inputNode.data.backendId} port ${inputPort}`);
const currentTrackId = getCurrentMidiTrack();
if (currentTrackId !== null) {
invoke("graph_connect_in_template", {
trackId: currentTrackId,
voiceAllocatorId: parentNode.data.backendId,
@ -6680,12 +6808,9 @@ function nodeEditor() {
connection.input_class
);
});
}
} else {
// Normal connection in main graph
console.log(`Connecting: node ${outputNode.data.backendId} port ${outputPort} -> node ${inputNode.data.backendId} port ${inputPort}`);
const currentTrackId = getCurrentMidiTrack();
if (currentTrackId !== null) {
invoke("graph_connect", {
trackId: currentTrackId,
fromNode: outputNode.data.backendId,
@ -6754,6 +6879,38 @@ function nodeEditor() {
}, 3000);
}
// Function to update breadcrumb display
function updateBreadcrumb() {
const breadcrumb = header.querySelector('.context-breadcrumb');
if (editingContext) {
breadcrumb.innerHTML = `
Main Graph &gt;
<span class="template-name">${editingContext.voiceAllocatorName} Template</span>
<button class="exit-template-btn"> Exit Template</button>
`;
const exitBtn = breadcrumb.querySelector('.exit-template-btn');
exitBtn.addEventListener('click', exitTemplate);
} else {
breadcrumb.textContent = 'Main Graph';
}
}
// Function to enter template editing mode
async function enterTemplate(voiceAllocatorId, voiceAllocatorName) {
editingContext = { voiceAllocatorId, voiceAllocatorName };
updateBreadcrumb();
updatePalette();
await reloadGraph();
}
// Function to exit template editing mode
async function exitTemplate() {
editingContext = null;
updateBreadcrumb();
updatePalette();
await reloadGraph();
}
// Function to reload graph from backend
async function reloadGraph() {
if (!editor) return;
@ -6771,7 +6928,19 @@ function nodeEditor() {
}
try {
const graphJson = await invoke('graph_get_state', { trackId });
// Get graph based on editing context
let graphJson;
if (editingContext) {
// Loading template graph
graphJson = await invoke('graph_get_template_state', {
trackId,
voiceAllocatorId: editingContext.voiceAllocatorId
});
} else {
// Loading main graph
graphJson = await invoke('graph_get_state', { trackId });
}
const preset = JSON.parse(graphJson);
// If graph is empty (no nodes), just leave cleared
@ -7165,22 +7334,46 @@ function createPresetItem(preset) {
<div class="preset-item" data-preset-path="${preset.path}" data-preset-tags="${preset.tags.join(',')}">
<div class="preset-item-header">
<span class="preset-name">${preset.name}</span>
<button class="preset-load-btn" title="Load preset">Load</button>
${deleteBtn}
</div>
<div class="preset-details">
<div class="preset-description">${preset.description || 'No description'}</div>
<div class="preset-tags">${tags}</div>
<div class="preset-author">by ${preset.author || 'Unknown'}</div>
</div>
</div>
`;
}
function addPresetItemHandlers(listElement) {
// Load preset on click
// Toggle selection on preset item click
listElement.querySelectorAll('.preset-item').forEach(item => {
item.addEventListener('click', async (e) => {
// Don't trigger if clicking delete button
if (e.target.classList.contains('preset-delete-btn')) return;
item.addEventListener('click', (e) => {
// Don't trigger if clicking buttons
if (e.target.classList.contains('preset-load-btn') ||
e.target.classList.contains('preset-delete-btn')) {
return;
}
// Toggle selection
const wasSelected = item.classList.contains('selected');
// Deselect all presets
listElement.querySelectorAll('.preset-item').forEach(i => i.classList.remove('selected'));
// Select this preset if it wasn't selected
if (!wasSelected) {
item.classList.add('selected');
}
});
});
// Load preset on Load button click
listElement.querySelectorAll('.preset-load-btn').forEach(btn => {
btn.addEventListener('click', async (e) => {
e.stopPropagation();
const item = btn.closest('.preset-item');
const presetPath = item.dataset.presetPath;
await loadPreset(presetPath);
});

View File

@ -422,6 +422,113 @@ export const nodeTypes = {
<div class="node-info" style="font-size: 9px;">Audio to mixer</div>
</div>
`
},
LFO: {
name: 'LFO',
category: NodeCategory.UTILITY,
description: 'Low frequency oscillator for modulation',
inputs: [],
outputs: [
{ name: 'CV Out', type: SignalType.CV, index: 0 }
],
parameters: [
{ id: 0, name: 'frequency', label: 'Frequency', min: 0.01, max: 20, default: 1.0, unit: 'Hz' },
{ id: 1, name: 'amplitude', label: 'Amplitude', min: 0, max: 1, default: 1.0, unit: '' },
{ id: 2, name: 'waveform', label: 'Waveform', min: 0, max: 4, default: 0, unit: '' },
{ id: 3, name: 'phase', label: 'Phase', min: 0, max: 1, default: 0, unit: '' }
],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">LFO</div>
<div class="node-param">
<label>Wave: <span id="lfowave-${nodeId}">Sine</span></label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="2" min="0" max="4" value="0" step="1">
</div>
<div class="node-param">
<label>Freq: <span id="lfofreq-${nodeId}">1.0</span> Hz</label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="0" min="0.01" max="20" value="1.0" step="0.01">
</div>
<div class="node-param">
<label>Depth: <span id="lfoamp-${nodeId}">1.0</span></label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="1" min="0" max="1" value="1.0" step="0.01">
</div>
</div>
`
},
NoiseGenerator: {
name: 'NoiseGenerator',
category: NodeCategory.GENERATOR,
description: 'White and pink noise generator',
inputs: [],
outputs: [
{ name: 'Audio Out', type: SignalType.AUDIO, index: 0 }
],
parameters: [
{ id: 0, name: 'amplitude', label: 'Amplitude', min: 0, max: 1, default: 0.5, unit: '' },
{ id: 1, name: 'color', label: 'Color', min: 0, max: 1, default: 0, unit: '' }
],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">Noise</div>
<div class="node-param">
<label>Color: <span id="noisecolor-${nodeId}">White</span></label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="1" min="0" max="1" value="0" step="1">
</div>
<div class="node-param">
<label>Level: <span id="noiselevel-${nodeId}">0.5</span></label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="0" min="0" max="1" value="0.5" step="0.01">
</div>
</div>
`
},
Splitter: {
name: 'Splitter',
category: NodeCategory.UTILITY,
description: 'Split audio signal to multiple outputs for parallel routing',
inputs: [
{ name: 'Audio In', type: SignalType.AUDIO, index: 0 }
],
outputs: [
{ name: 'Out 1', type: SignalType.AUDIO, index: 0 },
{ name: 'Out 2', type: SignalType.AUDIO, index: 1 },
{ name: 'Out 3', type: SignalType.AUDIO, index: 2 },
{ name: 'Out 4', type: SignalType.AUDIO, index: 3 }
],
parameters: [],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">Splitter</div>
<div class="node-info" style="font-size: 10px;">14 split</div>
</div>
`
},
Pan: {
name: 'Pan',
category: NodeCategory.UTILITY,
description: 'Stereo panning with CV modulation',
inputs: [
{ name: 'Audio In', type: SignalType.AUDIO, index: 0 },
{ name: 'Pan CV', type: SignalType.CV, index: 1 }
],
outputs: [
{ name: 'Audio Out', type: SignalType.AUDIO, index: 0 }
],
parameters: [
{ id: 0, name: 'pan', label: 'Pan', min: -1, max: 1, default: 0, unit: '' }
],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">Pan</div>
<div class="node-param">
<label>Position: <span id="panpos-${nodeId}">0.0</span></label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="0" min="-1" max="1" value="0" step="0.01">
</div>
</div>
`
}
};

View File

@ -1051,16 +1051,68 @@ button {
background: #1e1e1e;
}
/* Node editor header and breadcrumb */
.node-editor-header {
position: absolute;
top: 0;
left: 0;
right: 0;
height: 40px;
background: #2d2d2d;
border-bottom: 1px solid #3d3d3d;
display: flex;
align-items: center;
padding: 0 16px;
z-index: 200;
}
.context-breadcrumb {
color: #ddd;
font-size: 14px;
font-weight: 500;
display: flex;
align-items: center;
gap: 8px;
}
.template-name {
color: #7c7cff;
font-weight: bold;
}
.exit-template-btn {
margin-left: 12px;
padding: 4px 12px;
background: #3d3d3d;
border: 1px solid #4d4d4d;
border-radius: 3px;
color: #ddd;
font-size: 12px;
cursor: pointer;
transition: background 0.2s;
}
.exit-template-btn:hover {
background: #4d4d4d;
border-color: #5d5d5d;
}
.exit-template-btn:active {
background: #5d5d5d;
}
/* Node palette */
.node-palette {
position: absolute;
top: 10px;
top: 50px;
left: 10px;
background: #2d2d2d;
border: 1px solid #3d3d3d;
border-radius: 4px;
padding: 8px;
max-width: 200px;
max-height: calc(100% - 100px);
overflow-y: auto;
z-index: 100;
}
@ -1071,6 +1123,54 @@ button {
text-transform: uppercase;
}
.palette-header {
display: flex;
flex-direction: column;
gap: 8px;
margin-bottom: 8px;
}
.palette-back-btn {
padding: 6px 8px;
background: #3d3d3d;
border: 1px solid #4d4d4d;
border-radius: 3px;
color: #ddd;
font-size: 12px;
cursor: pointer;
transition: background 0.2s;
}
.palette-back-btn:hover {
background: #4d4d4d;
}
.palette-header h3 {
margin: 0;
}
.node-category-item {
padding: 8px 10px;
margin: 4px 0;
background: #3d3d3d;
border: 1px solid #5d5d5d;
border-radius: 3px;
cursor: pointer;
color: #ddd;
font-size: 13px;
font-weight: 500;
transition: background 0.2s, border-color 0.2s;
}
.node-category-item:hover {
background: #4d4d4d;
border-color: #7c7cff;
}
.node-category-item:active {
background: #5d5d5d;
}
.node-palette-item {
padding: 6px 8px;
margin: 4px 0;
@ -1410,9 +1510,10 @@ button {
background: #252525;
border: 1px solid #3d3d3d;
border-radius: 4px;
padding: 10px 12px;
padding: 8px 10px;
cursor: pointer;
transition: all 0.2s;
margin-bottom: 4px;
}
.preset-item:hover {
@ -1420,17 +1521,52 @@ button {
border-color: #4CAF50;
}
.preset-item.selected {
background: #2d2d2d;
border-color: #7c7cff;
padding: 10px 12px;
}
.preset-item-header {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 6px;
gap: 8px;
}
.preset-name {
font-size: 14px;
font-size: 13px;
font-weight: 500;
color: #fff;
flex: 1;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
.preset-item.selected .preset-name {
font-size: 14px;
white-space: normal;
}
.preset-load-btn {
background: #4CAF50;
border: none;
color: #fff;
cursor: pointer;
font-size: 11px;
padding: 4px 8px;
border-radius: 3px;
transition: background 0.2s;
display: none;
}
.preset-item.selected .preset-load-btn {
display: block;
}
.preset-load-btn:hover {
background: #45a049;
}
.preset-delete-btn {
@ -1442,12 +1578,28 @@ button {
padding: 2px 6px;
border-radius: 3px;
transition: background 0.2s;
display: none;
}
.preset-item.selected .preset-delete-btn {
display: block;
}
.preset-delete-btn:hover {
background: rgba(244, 67, 54, 0.2);
}
.preset-details {
display: none;
margin-top: 8px;
padding-top: 8px;
border-top: 1px solid #3d3d3d;
}
.preset-item.selected .preset-details {
display: block;
}
.preset-description {
font-size: 12px;
color: #999;