Add arpeggiator node

This commit is contained in:
Skyler Lehmkuhl 2026-02-19 05:30:34 -05:00
parent 89bbd3614f
commit 0a18d28f98
10 changed files with 534 additions and 121 deletions

View File

@ -1167,6 +1167,7 @@ impl Engine {
"Constant" => Box::new(ConstantNode::new("Constant".to_string())),
"BpmDetector" => Box::new(BpmDetectorNode::new("BPM Detector".to_string())),
"Beat" => Box::new(BeatNode::new("Beat".to_string())),
"Arpeggiator" => Box::new(ArpeggiatorNode::new("Arpeggiator".to_string())),
"EnvelopeFollower" => Box::new(EnvelopeFollowerNode::new("Envelope Follower".to_string())),
"Limiter" => Box::new(LimiterNode::new("Limiter".to_string())),
"Math" => Box::new(MathNode::new("Math".to_string())),
@ -1255,6 +1256,7 @@ impl Engine {
"Constant" => Box::new(ConstantNode::new("Constant".to_string())),
"BpmDetector" => Box::new(BpmDetectorNode::new("BPM Detector".to_string())),
"Beat" => Box::new(BeatNode::new("Beat".to_string())),
"Arpeggiator" => Box::new(ArpeggiatorNode::new("Arpeggiator".to_string())),
"EnvelopeFollower" => Box::new(EnvelopeFollowerNode::new("Envelope Follower".to_string())),
"Limiter" => Box::new(LimiterNode::new("Limiter".to_string())),
"Math" => Box::new(MathNode::new("Math".to_string())),

View File

@ -456,7 +456,9 @@ impl AudioGraph {
}
// Use the requested output buffer size for processing
// process_size is stereo (interleaved L/R), frame_count is mono
let process_size = output_buffer.len();
let frame_count = process_size / 2;
// Clear all output buffers (audio/CV and MIDI)
for node in self.graph.node_weights_mut() {
@ -499,6 +501,11 @@ impl AudioGraph {
let inputs = self.graph[node_idx].node.inputs();
let num_audio_cv_inputs = inputs.iter().filter(|p| p.signal_type != SignalType::Midi).count();
let num_midi_inputs = inputs.iter().filter(|p| p.signal_type == SignalType::Midi).count();
// Collect audio/CV input signal types for correct buffer sizing
let audio_cv_input_types: Vec<SignalType> = inputs.iter()
.filter(|p| p.signal_type != SignalType::Midi)
.map(|p| p.signal_type)
.collect();
// Clear input buffers
// - Audio inputs: fill with 0.0 (silence) when unconnected
@ -545,11 +552,18 @@ impl AudioGraph {
match source_port_type {
SignalType::Audio | SignalType::CV => {
// Map from global port index to audio/CV-only port index
// (input_buffers only contains audio/CV entries, not MIDI)
let audio_cv_port_idx = inputs.iter()
.take(to_port + 1)
.filter(|p| p.signal_type != SignalType::Midi)
.count().saturating_sub(1);
// Copy audio/CV data
if to_port < num_audio_cv_inputs && from_port < source_node.output_buffers.len() {
if audio_cv_port_idx < num_audio_cv_inputs && from_port < source_node.output_buffers.len() {
let source_buffer = &source_node.output_buffers[from_port];
if to_port < self.input_buffers.len() {
for (dst, src) in self.input_buffers[to_port].iter_mut().zip(source_buffer.iter()) {
if audio_cv_port_idx < self.input_buffers.len() {
for (dst, src) in self.input_buffers[audio_cv_port_idx].iter_mut().zip(source_buffer.iter()) {
// If dst is NaN (unconnected), replace it; otherwise add (for mixing)
if dst.is_nan() {
*dst = *src;
@ -583,11 +597,15 @@ impl AudioGraph {
}
}
// Prepare audio/CV input slices
// Prepare audio/CV input slices (Audio=stereo process_size, CV=mono frame_count)
let input_slices: Vec<&[f32]> = (0..num_audio_cv_inputs)
.map(|i| {
if i < self.input_buffers.len() {
&self.input_buffers[i][..process_size.min(self.input_buffers[i].len())]
let slice_size = match audio_cv_input_types.get(i) {
Some(&SignalType::Audio) => process_size,
_ => frame_count,
};
&self.input_buffers[i][..slice_size.min(self.input_buffers[i].len())]
} else {
&[][..]
}
@ -608,19 +626,22 @@ impl AudioGraph {
// Get mutable access to output buffers
let node = &mut self.graph[node_idx];
let outputs = node.node.outputs();
let num_audio_cv_outputs = outputs.iter().filter(|p| p.signal_type != SignalType::Midi).count();
let num_midi_outputs = outputs.iter().filter(|p| p.signal_type == SignalType::Midi).count();
// Collect output signal types for correct buffer sizing
let output_signal_types: Vec<SignalType> = outputs.iter().map(|p| p.signal_type).collect();
// Create mutable slices for audio/CV outputs
// Each buffer is independent, so this is safe
let mut output_slices: Vec<&mut [f32]> = node.output_buffers
.iter_mut()
.take(num_audio_cv_outputs)
.map(|buf| {
// Create mutable slices for audio/CV outputs (Audio=stereo, CV=mono)
let mut output_slices: Vec<&mut [f32]> = Vec::new();
for (i, buf) in node.output_buffers.iter_mut().enumerate() {
let signal_type = output_signal_types.get(i).copied().unwrap_or(SignalType::CV);
if signal_type == SignalType::Midi { continue; }
let slice_size = match signal_type {
SignalType::Audio => process_size,
_ => frame_count,
};
let len = buf.len();
&mut buf[..process_size.min(len)]
})
.collect();
output_slices.push(&mut buf[..slice_size.min(len)]);
}
// Create mutable references for MIDI outputs
let mut midi_output_refs: Vec<&mut Vec<MidiEvent>> = node.midi_output_buffers
@ -969,6 +990,7 @@ impl AudioGraph {
"Compressor" => Box::new(CompressorNode::new("Compressor")),
"Constant" => Box::new(ConstantNode::new("Constant")),
"Beat" => Box::new(BeatNode::new("Beat")),
"Arpeggiator" => Box::new(ArpeggiatorNode::new("Arpeggiator")),
"EnvelopeFollower" => Box::new(EnvelopeFollowerNode::new("Envelope Follower")),
"Limiter" => Box::new(LimiterNode::new("Limiter")),
"Math" => Box::new(MathNode::new("Math")),

View File

@ -0,0 +1,412 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType, cv_input_or_default};
use crate::audio::midi::MidiEvent;
const PARAM_MODE: u32 = 0;
const PARAM_DIRECTION: u32 = 1;
const PARAM_OCTAVES: u32 = 2;
const PARAM_RETRIGGER: u32 = 3;
/// ~1ms gate-off for re-triggering at 48kHz
const RETRIGGER_SAMPLES: u32 = 48;
#[derive(Debug, Clone, Copy, PartialEq)]
enum ArpMode {
OnePerCycle = 0,
AllPerCycle = 1,
}
impl ArpMode {
fn from_f32(v: f32) -> Self {
if v.round() as i32 >= 1 { ArpMode::AllPerCycle } else { ArpMode::OnePerCycle }
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
enum ArpDirection {
Up = 0,
Down = 1,
UpDown = 2,
Random = 3,
}
impl ArpDirection {
fn from_f32(v: f32) -> Self {
match v.round() as i32 {
1 => ArpDirection::Down,
2 => ArpDirection::UpDown,
3 => ArpDirection::Random,
_ => ArpDirection::Up,
}
}
}
/// Arpeggiator node — takes MIDI input (held chord) and a CV phase input,
/// outputs CV V/Oct + Gate stepping through the held notes.
pub struct ArpeggiatorNode {
name: String,
/// Currently held notes: (note, velocity), kept sorted by pitch
held_notes: Vec<(u8, u8)>,
/// Expanded sequence after applying direction + octaves
sequence: Vec<(u8, u8)>,
/// Current position in the sequence (for OnePerCycle mode)
current_step: usize,
/// Previous phase value for wraparound detection
prev_phase: f32,
/// Countdown for gate re-trigger gap
retrigger_countdown: u32,
/// Current output values
current_voct: f32,
current_gate: f32,
/// Parameters
mode: ArpMode,
direction: ArpDirection,
octaves: u32,
retrigger: bool,
/// For Up/Down direction tracking
going_up: bool,
/// Track whether sequence needs rebuilding
sequence_dirty: bool,
/// Stateful PRNG for random direction
rng_state: u32,
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
}
impl ArpeggiatorNode {
pub fn new(name: impl Into<String>) -> Self {
let inputs = vec![
NodePort::new("MIDI In", SignalType::Midi, 0),
NodePort::new("Phase", SignalType::CV, 0),
];
let outputs = vec![
NodePort::new("V/Oct", SignalType::CV, 0),
NodePort::new("Gate", SignalType::CV, 1),
];
let parameters = vec![
Parameter::new(PARAM_MODE, "Mode", 0.0, 1.0, 0.0, ParameterUnit::Generic),
Parameter::new(PARAM_DIRECTION, "Direction", 0.0, 3.0, 0.0, ParameterUnit::Generic),
Parameter::new(PARAM_OCTAVES, "Octaves", 1.0, 4.0, 1.0, ParameterUnit::Generic),
Parameter::new(PARAM_RETRIGGER, "Retrigger", 0.0, 1.0, 1.0, ParameterUnit::Generic),
];
Self {
name: name.into(),
held_notes: Vec::new(),
sequence: Vec::new(),
current_step: 0,
prev_phase: 0.0,
retrigger_countdown: 0,
current_voct: 0.0,
current_gate: 0.0,
mode: ArpMode::OnePerCycle,
direction: ArpDirection::Up,
octaves: 1,
retrigger: true,
going_up: true,
sequence_dirty: false,
rng_state: 12345,
inputs,
outputs,
parameters,
}
}
fn midi_note_to_voct(note: u8) -> f32 {
(note as f32 - 69.0) / 12.0
}
fn rebuild_sequence(&mut self) {
self.sequence.clear();
if self.held_notes.is_empty() {
return;
}
// Build base sequence sorted by pitch (held_notes is already sorted)
let base: Vec<(u8, u8)> = self.held_notes.clone();
// Expand across octaves
let mut expanded = Vec::new();
for oct in 0..self.octaves {
for &(note, vel) in &base {
let transposed = note.saturating_add((oct * 12) as u8);
if transposed <= 127 {
expanded.push((transposed, vel));
}
}
}
// Apply direction
match self.direction {
ArpDirection::Up => {
self.sequence = expanded;
}
ArpDirection::Down => {
expanded.reverse();
self.sequence = expanded;
}
ArpDirection::UpDown => {
if expanded.len() > 1 {
let mut up_down = expanded.clone();
// Go back down, skipping the top and bottom notes to avoid doubles
for i in (1..expanded.len() - 1).rev() {
up_down.push(expanded[i]);
}
self.sequence = up_down;
} else {
self.sequence = expanded;
}
}
ArpDirection::Random => {
// For random, keep the expanded list; we'll pick randomly in process()
self.sequence = expanded;
}
}
// Clamp current_step to valid range and update V/Oct immediately
if !self.sequence.is_empty() {
self.current_step = self.current_step % self.sequence.len();
let (note, _vel) = self.sequence[self.current_step];
self.current_voct = Self::midi_note_to_voct(note);
} else {
self.current_step = 0;
}
self.sequence_dirty = false;
}
fn advance_step(&mut self) {
if self.sequence.is_empty() {
return;
}
if self.direction == ArpDirection::Random {
// Stateful xorshift32 PRNG — evolves independently of current_step
let mut x = self.rng_state;
x ^= x << 13;
x ^= x >> 17;
x ^= x << 5;
self.rng_state = x;
// Use upper bits (better distribution) and exclude current note
if self.sequence.len() > 1 {
let pick = ((x >> 16) as usize) % (self.sequence.len() - 1);
self.current_step = if pick >= self.current_step { pick + 1 } else { pick };
}
} else {
self.current_step = (self.current_step + 1) % self.sequence.len();
}
}
fn step_changed(&mut self, new_step: usize) {
let old_step = self.current_step;
self.current_step = new_step;
if !self.sequence.is_empty() {
let (note, _vel) = self.sequence[self.current_step];
self.current_voct = Self::midi_note_to_voct(note);
}
// Start retrigger gap if enabled and the step actually changed
if self.retrigger && old_step != new_step {
self.retrigger_countdown = RETRIGGER_SAMPLES;
}
}
}
impl AudioNode for ArpeggiatorNode {
fn category(&self) -> NodeCategory {
NodeCategory::Utility
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, id: u32, value: f32) {
match id {
PARAM_MODE => self.mode = ArpMode::from_f32(value),
PARAM_DIRECTION => {
let new_dir = ArpDirection::from_f32(value);
if new_dir != self.direction {
self.direction = new_dir;
self.going_up = true;
self.sequence_dirty = true;
}
}
PARAM_OCTAVES => {
// UI sends 0-3 (combo box index), map to 1-4 octaves
let new_oct = (value.round() as u32 + 1).clamp(1, 4);
if new_oct != self.octaves {
self.octaves = new_oct;
self.sequence_dirty = true;
}
}
PARAM_RETRIGGER => self.retrigger = value.round() as i32 >= 1,
_ => {}
}
}
fn get_parameter(&self, id: u32) -> f32 {
match id {
PARAM_MODE => self.mode as i32 as f32,
PARAM_DIRECTION => self.direction as i32 as f32,
PARAM_OCTAVES => (self.octaves - 1) as f32,
PARAM_RETRIGGER => if self.retrigger { 1.0 } else { 0.0 },
_ => 0.0,
}
}
fn process(
&mut self,
inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
_sample_rate: u32,
) {
// Process incoming MIDI to build held_notes
if !midi_inputs.is_empty() {
for event in midi_inputs[0] {
let status = event.status & 0xF0;
match status {
0x90 if event.data2 > 0 => {
// Note on — add to held notes (sorted by pitch)
let note = event.data1;
let vel = event.data2;
// Remove if already held (avoid duplicates)
self.held_notes.retain(|&(n, _)| n != note);
// Insert sorted by pitch
let pos = self.held_notes.partition_point(|&(n, _)| n < note);
self.held_notes.insert(pos, (note, vel));
self.sequence_dirty = true;
}
0x80 | 0x90 => {
// Note off
let note = event.data1;
self.held_notes.retain(|&(n, _)| n != note);
self.sequence_dirty = true;
}
_ => {}
}
}
}
// Rebuild sequence if needed
if self.sequence_dirty {
self.rebuild_sequence();
}
if outputs.len() < 2 {
return;
}
let len = outputs[0].len();
// If no notes held, output silence
if self.sequence.is_empty() {
for i in 0..len {
outputs[0][i] = self.current_voct;
outputs[1][i] = 0.0;
}
self.current_gate = 0.0;
return;
}
for i in 0..len {
let phase = cv_input_or_default(inputs, 0, i, 0.0).clamp(0.0, 1.0);
match self.mode {
ArpMode::OnePerCycle => {
// Detect phase wraparound (high → low = new cycle)
if self.prev_phase > 0.7 && phase < 0.3 {
self.advance_step();
let step = self.current_step;
self.step_changed(step);
}
}
ArpMode::AllPerCycle => {
// Phase 0→1 maps across all sequence notes
let new_step = ((phase * self.sequence.len() as f32).floor() as usize)
.min(self.sequence.len() - 1);
if new_step != self.current_step {
self.step_changed(new_step);
}
}
}
self.prev_phase = phase;
// Gate: off if retriggering, on otherwise
if self.retrigger_countdown > 0 {
self.retrigger_countdown -= 1;
self.current_gate = 0.0;
} else {
self.current_gate = 1.0;
}
outputs[0][i] = self.current_voct;
outputs[1][i] = self.current_gate;
}
}
fn reset(&mut self) {
self.held_notes.clear();
self.sequence.clear();
self.current_step = 0;
self.prev_phase = 0.0;
self.retrigger_countdown = 0;
self.current_voct = 0.0;
self.current_gate = 0.0;
self.going_up = true;
}
fn node_type(&self) -> &str {
"Arpeggiator"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
held_notes: Vec::new(),
sequence: Vec::new(),
current_step: 0,
prev_phase: 0.0,
retrigger_countdown: 0,
current_voct: 0.0,
current_gate: 0.0,
mode: self.mode,
direction: self.direction,
octaves: self.octaves,
retrigger: self.retrigger,
going_up: true,
sequence_dirty: false,
rng_state: 12345,
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
})
}
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
}

View File

@ -1,16 +1,10 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, SignalType};
use crate::audio::midi::MidiEvent;
const PARAM_ATTACK: u32 = 0;
const PARAM_RELEASE: u32 = 1;
/// Audio to CV converter (Envelope Follower)
/// Converts audio amplitude to control voltage
/// Audio to CV converter
/// Directly converts a stereo audio signal to mono CV (averages L+R channels)
pub struct AudioToCVNode {
name: String,
envelope: f32, // Current envelope value
attack: f32, // Attack time in seconds
release: f32, // Release time in seconds
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
@ -28,19 +22,11 @@ impl AudioToCVNode {
NodePort::new("CV Out", SignalType::CV, 0),
];
let parameters = vec![
Parameter::new(PARAM_ATTACK, "Attack", 0.001, 1.0, 0.01, ParameterUnit::Time),
Parameter::new(PARAM_RELEASE, "Release", 0.001, 1.0, 0.1, ParameterUnit::Time),
];
Self {
name,
envelope: 0.0,
attack: 0.01,
release: 0.1,
inputs,
outputs,
parameters,
parameters: Vec::new(),
}
}
}
@ -62,20 +48,10 @@ impl AudioNode for AudioToCVNode {
&self.parameters
}
fn set_parameter(&mut self, id: u32, value: f32) {
match id {
PARAM_ATTACK => self.attack = value.clamp(0.001, 1.0),
PARAM_RELEASE => self.release = value.clamp(0.001, 1.0),
_ => {}
}
}
fn set_parameter(&mut self, _id: u32, _value: f32) {}
fn get_parameter(&self, id: u32) -> f32 {
match id {
PARAM_ATTACK => self.attack,
PARAM_RELEASE => self.release,
_ => 0.0,
}
fn get_parameter(&self, _id: u32) -> f32 {
0.0
}
fn process(
@ -84,7 +60,7 @@ impl AudioNode for AudioToCVNode {
outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
sample_rate: u32,
_sample_rate: u32,
) {
if inputs.is_empty() || outputs.is_empty() {
return;
@ -95,39 +71,16 @@ impl AudioNode for AudioToCVNode {
// Audio input is stereo (interleaved L/R), CV output is mono
let audio_frames = input.len() / 2;
let cv_frames = output.len();
let frames = audio_frames.min(cv_frames);
// Calculate attack and release coefficients
let sample_rate_f32 = sample_rate as f32;
let attack_coeff = (-1.0 / (self.attack * sample_rate_f32)).exp();
let release_coeff = (-1.0 / (self.release * sample_rate_f32)).exp();
let frames = audio_frames.min(output.len());
for frame in 0..frames {
// Get stereo samples
let left = input[frame * 2];
let right = input[frame * 2 + 1];
// Calculate RMS-like value (average of absolute values for simplicity)
let amplitude = (left.abs() + right.abs()) / 2.0;
// Envelope follower with attack/release
if amplitude > self.envelope {
// Attack: follow signal up quickly
self.envelope = amplitude * (1.0 - attack_coeff) + self.envelope * attack_coeff;
} else {
// Release: decay slowly
self.envelope = amplitude * (1.0 - release_coeff) + self.envelope * release_coeff;
}
// Output CV (mono)
output[frame] = self.envelope;
output[frame] = (left + right) * 0.5;
}
}
fn reset(&mut self) {
self.envelope = 0.0;
}
fn reset(&mut self) {}
fn node_type(&self) -> &str {
"AudioToCV"
@ -140,9 +93,6 @@ impl AudioNode for AudioToCVNode {
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
envelope: 0.0, // Reset envelope
attack: self.attack,
release: self.release,
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),

View File

@ -47,6 +47,9 @@ impl BeatResolution {
/// Beat clock node — generates tempo-synced CV signals.
///
/// When playing: synced to timeline position.
/// When stopped: free-runs continuously at the set BPM.
///
/// Outputs:
/// - BPM: constant CV proportional to tempo (bpm / 240)
/// - Beat Phase: sawtooth 0→1 per beat subdivision
@ -60,10 +63,8 @@ pub struct BeatNode {
playback_time: f64,
/// Previous playback_time to detect paused state
prev_playback_time: f64,
/// Cached output values held when paused
held_beat_phase: f32,
held_bar_phase: f32,
held_gate: f32,
/// Free-running time accumulator for when playback is stopped
free_run_time: f64,
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
@ -90,9 +91,7 @@ impl BeatNode {
resolution: BeatResolution::Quarter,
playback_time: 0.0,
prev_playback_time: -1.0,
held_beat_phase: 0.0,
held_bar_phase: 0.0,
held_gate: 0.0,
free_run_time: 0.0,
inputs,
outputs,
parameters,
@ -149,29 +148,20 @@ impl AudioNode for BeatNode {
let bpm_cv = (self.bpm / 240.0).clamp(0.0, 1.0);
let len = outputs[0].len();
let sample_period = 1.0 / sample_rate as f64;
// Detect paused: playback_time hasn't changed since last process()
let paused = self.playback_time == self.prev_playback_time;
self.prev_playback_time = self.playback_time;
if paused {
// Hold last values
for i in 0..len {
outputs[0][i] = bpm_cv;
outputs[1][i] = self.held_beat_phase;
outputs[2][i] = self.held_bar_phase;
outputs[3][i] = self.held_gate;
}
return;
}
let beats_per_second = self.bpm as f64 / 60.0;
let sample_period = 1.0 / sample_rate as f64;
let subs_per_beat = self.resolution.subdivisions_per_beat();
// Choose time source: timeline when playing, free-running when stopped
let base_time = if paused { self.free_run_time } else { self.playback_time };
for i in 0..len {
// Derive beat position from timeline playback time
let time = self.playback_time + i as f64 * sample_period;
let time = base_time + i as f64 * sample_period;
let beat_pos = time * beats_per_second;
// Beat subdivision phase: 0→1 sawtooth
@ -189,20 +179,14 @@ impl AudioNode for BeatNode {
outputs[3][i] = gate;
}
// Cache last sample's values for hold when paused
if len > 0 {
self.held_beat_phase = outputs[1][len - 1];
self.held_bar_phase = outputs[2][len - 1];
self.held_gate = outputs[3][len - 1];
}
// Advance free-run time (always ticks, so it's ready when playback stops)
self.free_run_time += len as f64 * sample_period;
}
fn reset(&mut self) {
self.playback_time = 0.0;
self.prev_playback_time = -1.0;
self.held_beat_phase = 0.0;
self.held_bar_phase = 0.0;
self.held_gate = 0.0;
self.free_run_time = 0.0;
}
fn node_type(&self) -> &str {
@ -220,9 +204,7 @@ impl AudioNode for BeatNode {
resolution: self.resolution,
playback_time: 0.0,
prev_playback_time: -1.0,
held_beat_phase: 0.0,
held_bar_phase: 0.0,
held_gate: 0.0,
free_run_time: 0.0,
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),

View File

@ -1,4 +1,5 @@
mod adsr;
mod arpeggiator;
mod audio_input;
mod audio_to_cv;
mod automation_input;
@ -42,6 +43,7 @@ mod voice_allocator;
mod wavetable_oscillator;
pub use adsr::ADSRNode;
pub use arpeggiator::ArpeggiatorNode;
pub use audio_input::AudioInputNode;
pub use audio_to_cv::AudioToCVNode;
pub use automation_input::{AutomationInputNode, AutomationKeyframe, InterpolationType};

View File

@ -87,8 +87,9 @@ pub struct OscilloscopeNode {
trigger_period: usize, // Period in samples for V/oct triggering
// Shared buffers for reading from Tauri commands
buffer: Arc<Mutex<CircularBuffer>>, // Audio buffer
buffer: Arc<Mutex<CircularBuffer>>, // Audio buffer (mono downmix)
cv_buffer: Arc<Mutex<CircularBuffer>>, // CV buffer
mono_buf: Vec<f32>, // Scratch buffer for stereo-to-mono downmix
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
@ -125,6 +126,7 @@ impl OscilloscopeNode {
trigger_period: 480, // Default to ~100Hz at 48kHz
buffer: Arc::new(Mutex::new(CircularBuffer::new(BUFFER_SIZE))),
cv_buffer: Arc::new(Mutex::new(CircularBuffer::new(BUFFER_SIZE))),
mono_buf: vec![0.0; 2048],
inputs,
outputs,
parameters,
@ -220,12 +222,13 @@ impl AudioNode for OscilloscopeNode {
let input = inputs[0];
let output = &mut outputs[0];
let len = input.len().min(output.len());
let stereo_len = input.len().min(output.len());
let frame_count = stereo_len / 2;
// Read CV input if available (port 1) — used for both display and V/Oct triggering
if inputs.len() > 1 && !inputs[1].is_empty() {
let cv_input = inputs[1];
let cv_len = len.min(cv_input.len());
let cv_len = frame_count.min(cv_input.len());
// Check if connected (not NaN sentinel)
if cv_len > 0 && !cv_input[0].is_nan() {
@ -244,20 +247,25 @@ impl AudioNode for OscilloscopeNode {
// Update sample counter for V/oct triggering
if self.trigger_mode == TriggerMode::VoltPerOctave {
self.sample_counter = (self.sample_counter + len) % self.trigger_period;
self.sample_counter = (self.sample_counter + frame_count) % self.trigger_period;
}
// Pass through audio (copy input to output)
output[..len].copy_from_slice(&input[..len]);
output[..stereo_len].copy_from_slice(&input[..stereo_len]);
// Capture audio samples to buffer
// Capture audio as mono downmix to match CV time scale
if let Ok(mut buffer) = self.buffer.lock() {
buffer.write(&input[..len]);
for frame in 0..frame_count {
let left = input[frame * 2];
let right = input[frame * 2 + 1];
self.mono_buf[frame] = (left + right) * 0.5;
}
buffer.write(&self.mono_buf[..frame_count]);
}
// Update last sample for trigger detection (use left channel, frame 0)
if !input.is_empty() {
self.last_sample = input[0];
// Update last sample for trigger detection
if frame_count > 0 {
self.last_sample = (input[0] + input[1]) * 0.5;
}
}
@ -288,6 +296,7 @@ impl AudioNode for OscilloscopeNode {
trigger_period: 480,
buffer: Arc::new(Mutex::new(CircularBuffer::new(BUFFER_SIZE))),
cv_buffer: Arc::new(Mutex::new(CircularBuffer::new(BUFFER_SIZE))),
mono_buf: vec![0.0; 2048],
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),

View File

@ -58,6 +58,7 @@ pub enum NodeTemplate {
Constant,
MidiToCv,
AudioToCv,
Arpeggiator,
Math,
SampleHold,
SlewLimiter,
@ -116,6 +117,7 @@ impl NodeTemplate {
NodeTemplate::Constant => "Constant",
NodeTemplate::MidiToCv => "MidiToCV",
NodeTemplate::AudioToCv => "AudioToCV",
NodeTemplate::Arpeggiator => "Arpeggiator",
NodeTemplate::Math => "Math",
NodeTemplate::SampleHold => "SampleHold",
NodeTemplate::SlewLimiter => "SlewLimiter",
@ -356,6 +358,7 @@ impl NodeTemplateTrait for NodeTemplate {
NodeTemplate::Constant => "Constant".into(),
NodeTemplate::MidiToCv => "MIDI to CV".into(),
NodeTemplate::AudioToCv => "Audio to CV".into(),
NodeTemplate::Arpeggiator => "Arpeggiator".into(),
NodeTemplate::Math => "Math".into(),
NodeTemplate::SampleHold => "Sample & Hold".into(),
NodeTemplate::SlewLimiter => "Slew Limiter".into(),
@ -387,7 +390,7 @@ impl NodeTemplateTrait for NodeTemplate {
| NodeTemplate::BitCrusher | NodeTemplate::Compressor | NodeTemplate::Limiter | NodeTemplate::Eq
| NodeTemplate::Pan | NodeTemplate::RingModulator | NodeTemplate::Vocoder => vec!["Effects"],
NodeTemplate::Adsr | NodeTemplate::Lfo | NodeTemplate::Mixer | NodeTemplate::Splitter
| NodeTemplate::Constant | NodeTemplate::MidiToCv | NodeTemplate::AudioToCv | NodeTemplate::Math
| NodeTemplate::Constant | NodeTemplate::MidiToCv | NodeTemplate::AudioToCv | NodeTemplate::Arpeggiator | NodeTemplate::Math
| NodeTemplate::SampleHold | NodeTemplate::SlewLimiter | NodeTemplate::Quantizer
| NodeTemplate::EnvelopeFollower | NodeTemplate::BpmDetector | NodeTemplate::Mod => vec!["Utilities"],
NodeTemplate::Oscilloscope => vec!["Analysis"],
@ -719,6 +722,28 @@ impl NodeTemplateTrait for NodeTemplate {
graph.add_input_param(node_id, "Audio In".into(), DataType::Audio, ValueType::float(0.0), InputParamKind::ConnectionOnly, true);
graph.add_output_param(node_id, "CV Out".into(), DataType::CV);
}
NodeTemplate::Arpeggiator => {
graph.add_input_param(node_id, "MIDI In".into(), DataType::Midi, ValueType::float(0.0), InputParamKind::ConnectionOnly, true);
graph.add_input_param(node_id, "Phase".into(), DataType::CV, ValueType::float(0.0), InputParamKind::ConnectionOnly, true);
graph.add_input_param(node_id, "Mode".into(), DataType::CV,
ValueType::float_param(0.0, 0.0, 1.0, "", 0,
Some(&["One/Cycle", "All/Cycle"])),
InputParamKind::ConstantOnly, true);
graph.add_input_param(node_id, "Direction".into(), DataType::CV,
ValueType::float_param(0.0, 0.0, 3.0, "", 1,
Some(&["Up", "Down", "Up/Down", "Random"])),
InputParamKind::ConstantOnly, true);
graph.add_input_param(node_id, "Octaves".into(), DataType::CV,
ValueType::float_param(0.0, 0.0, 3.0, "", 2,
Some(&["1", "2", "3", "4"])),
InputParamKind::ConstantOnly, true);
graph.add_input_param(node_id, "Retrigger".into(), DataType::CV,
ValueType::float_param(1.0, 0.0, 1.0, "", 3,
Some(&["Off", "On"])),
InputParamKind::ConstantOnly, true);
graph.add_output_param(node_id, "V/Oct".into(), DataType::CV);
graph.add_output_param(node_id, "Gate".into(), DataType::CV);
}
NodeTemplate::Math => {
graph.add_input_param(node_id, "A".into(), DataType::CV, ValueType::float(0.0), InputParamKind::ConnectionOrConstant, true);
graph.add_input_param(node_id, "B".into(), DataType::CV, ValueType::float(0.0), InputParamKind::ConnectionOrConstant, true);
@ -1125,6 +1150,7 @@ impl NodeTemplateIter for AllNodeTemplates {
NodeTemplate::Constant,
NodeTemplate::MidiToCv,
NodeTemplate::AudioToCv,
NodeTemplate::Arpeggiator,
NodeTemplate::Math,
NodeTemplate::SampleHold,
NodeTemplate::SlewLimiter,

View File

@ -1963,6 +1963,8 @@ impl NodeGraphPane {
"BPMDetector" => Some(NodeTemplate::BpmDetector),
"Mod" => Some(NodeTemplate::Mod),
"Oscilloscope" => Some(NodeTemplate::Oscilloscope),
"Arpeggiator" => Some(NodeTemplate::Arpeggiator),
"Beat" => Some(NodeTemplate::Beat),
"VoiceAllocator" => Some(NodeTemplate::VoiceAllocator),
"Group" => Some(NodeTemplate::Group),
"TemplateInput" => Some(NodeTemplate::TemplateInput),

View File

@ -73,6 +73,12 @@ impl PresetBrowserPane {
self.scan_directory(&factory_dir, &factory_dir, true);
}
// User presets
let user_dir = user_presets_dir();
if user_dir.is_dir() {
self.scan_directory(&user_dir, &user_dir, false);
}
// Sort presets alphabetically by name within each category
self.presets.sort_by(|a, b| {
a.category.cmp(&b.category).then(a.name.cmp(&b.name))