MIDI recording

This commit is contained in:
Skyler Lehmkuhl 2025-10-29 01:50:45 -04:00
parent 6e7e90fe57
commit dc32fc4200
19 changed files with 1470 additions and 132 deletions

View File

@ -4,11 +4,11 @@ use crate::audio::midi::{MidiClip, MidiClipId, MidiEvent};
use crate::audio::node_graph::{nodes::*, InstrumentGraph}; use crate::audio::node_graph::{nodes::*, InstrumentGraph};
use crate::audio::pool::AudioPool; use crate::audio::pool::AudioPool;
use crate::audio::project::Project; use crate::audio::project::Project;
use crate::audio::recording::RecordingState; use crate::audio::recording::{MidiRecordingState, RecordingState};
use crate::audio::track::{Track, TrackId, TrackNode}; use crate::audio::track::{Track, TrackId, TrackNode};
use crate::command::{AudioEvent, Command, Query, QueryResponse}; use crate::command::{AudioEvent, Command, Query, QueryResponse};
use petgraph::stable_graph::NodeIndex; use petgraph::stable_graph::NodeIndex;
use std::sync::atomic::{AtomicU64, Ordering}; use std::sync::atomic::{AtomicU32, AtomicU64, Ordering};
use std::sync::Arc; use std::sync::Arc;
/// Audio engine for Phase 6: hierarchical tracks with groups /// Audio engine for Phase 6: hierarchical tracks with groups
@ -30,6 +30,9 @@ pub struct Engine {
// Shared playhead for UI reads // Shared playhead for UI reads
playhead_atomic: Arc<AtomicU64>, playhead_atomic: Arc<AtomicU64>,
// Shared MIDI clip ID counter for synchronous access
next_midi_clip_id_atomic: Arc<AtomicU32>,
// Event counter for periodic position updates // Event counter for periodic position updates
frames_since_last_event: usize, frames_since_last_event: usize,
event_interval_frames: usize, event_interval_frames: usize,
@ -38,13 +41,15 @@ pub struct Engine {
mix_buffer: Vec<f32>, mix_buffer: Vec<f32>,
// ID counters // ID counters
next_midi_clip_id: MidiClipId,
next_clip_id: ClipId, next_clip_id: ClipId,
// Recording state // Recording state
recording_state: Option<RecordingState>, recording_state: Option<RecordingState>,
input_rx: Option<rtrb::Consumer<f32>>, input_rx: Option<rtrb::Consumer<f32>>,
recording_progress_counter: usize, recording_progress_counter: usize,
// MIDI recording state
midi_recording_state: Option<MidiRecordingState>,
} }
impl Engine { impl Engine {
@ -75,14 +80,15 @@ impl Engine {
query_rx, query_rx,
query_response_tx, query_response_tx,
playhead_atomic: Arc::new(AtomicU64::new(0)), playhead_atomic: Arc::new(AtomicU64::new(0)),
next_midi_clip_id_atomic: Arc::new(AtomicU32::new(0)),
frames_since_last_event: 0, frames_since_last_event: 0,
event_interval_frames, event_interval_frames,
mix_buffer: Vec::new(), mix_buffer: Vec::new(),
next_midi_clip_id: 0,
next_clip_id: 0, next_clip_id: 0,
recording_state: None, recording_state: None,
input_rx: None, input_rx: None,
recording_progress_counter: 0, recording_progress_counter: 0,
midi_recording_state: None,
} }
} }
@ -157,6 +163,7 @@ impl Engine {
query_tx, query_tx,
query_response_rx, query_response_rx,
playhead: Arc::clone(&self.playhead_atomic), playhead: Arc::clone(&self.playhead_atomic),
next_midi_clip_id: Arc::clone(&self.next_midi_clip_id_atomic),
sample_rate: self.sample_rate, sample_rate: self.sample_rate,
channels: self.channels, channels: self.channels,
} }
@ -192,8 +199,8 @@ impl Engine {
self.buffer_pool = BufferPool::new(8, output.len()); self.buffer_pool = BufferPool::new(8, output.len());
} }
// Convert playhead from samples to seconds for timeline-based rendering // Convert playhead from frames to seconds for timeline-based rendering
let playhead_seconds = self.playhead as f64 / (self.sample_rate as f64 * self.channels as f64); let playhead_seconds = self.playhead as f64 / self.sample_rate as f64;
// Render the entire project hierarchy into the mix buffer // Render the entire project hierarchy into the mix buffer
self.project.render( self.project.render(
@ -208,8 +215,8 @@ impl Engine {
// Copy mix to output // Copy mix to output
output.copy_from_slice(&self.mix_buffer); output.copy_from_slice(&self.mix_buffer);
// Update playhead // Update playhead (convert total samples to frames)
self.playhead += output.len() as u64; self.playhead += (output.len() / self.channels as usize) as u64;
// Update atomic playhead for UI reads // Update atomic playhead for UI reads
self.playhead_atomic self.playhead_atomic
@ -219,12 +226,24 @@ impl Engine {
self.frames_since_last_event += output.len() / self.channels as usize; self.frames_since_last_event += output.len() / self.channels as usize;
if self.frames_since_last_event >= self.event_interval_frames / self.channels as usize if self.frames_since_last_event >= self.event_interval_frames / self.channels as usize
{ {
let position_seconds = let position_seconds = self.playhead as f64 / self.sample_rate as f64;
self.playhead as f64 / (self.sample_rate as f64 * self.channels as f64);
let _ = self let _ = self
.event_tx .event_tx
.push(AudioEvent::PlaybackPosition(position_seconds)); .push(AudioEvent::PlaybackPosition(position_seconds));
self.frames_since_last_event = 0; self.frames_since_last_event = 0;
// Send MIDI recording progress if active
if let Some(recording) = &self.midi_recording_state {
let current_time = self.playhead as f64 / self.sample_rate as f64;
let duration = current_time - recording.start_time;
let notes = recording.get_notes().to_vec();
let _ = self.event_tx.push(AudioEvent::MidiRecordingProgress(
recording.track_id,
recording.clip_id,
duration,
notes,
));
}
} }
} else { } else {
// Not playing, but process live MIDI input // Not playing, but process live MIDI input
@ -296,10 +315,12 @@ impl Engine {
self.project.stop_all_notes(); self.project.stop_all_notes();
} }
Command::Seek(seconds) => { Command::Seek(seconds) => {
let samples = (seconds * self.sample_rate as f64 * self.channels as f64) as u64; let frames = (seconds * self.sample_rate as f64) as u64;
self.playhead = samples; self.playhead = frames;
self.playhead_atomic self.playhead_atomic
.store(self.playhead, Ordering::Relaxed); .store(self.playhead, Ordering::Relaxed);
// Stop all MIDI notes when seeking to prevent stuck notes
self.project.stop_all_notes();
} }
Command::SetTrackVolume(track_id, volume) => { Command::SetTrackVolume(track_id, volume) => {
if let Some(track) = self.project.get_track_mut(track_id) { if let Some(track) = self.project.get_track_mut(track_id) {
@ -393,28 +414,28 @@ impl Engine {
let _ = self.event_tx.push(AudioEvent::TrackCreated(track_id, false, name)); let _ = self.event_tx.push(AudioEvent::TrackCreated(track_id, false, name));
} }
Command::CreateMidiClip(track_id, start_time, duration) => { Command::CreateMidiClip(track_id, start_time, duration) => {
// Create a new MIDI clip with unique ID // Get the next MIDI clip ID from the atomic counter
let clip_id = self.next_midi_clip_id; let clip_id = self.next_midi_clip_id_atomic.fetch_add(1, Ordering::Relaxed);
self.next_midi_clip_id += 1;
let clip = MidiClip::new(clip_id, start_time, duration); let clip = MidiClip::new(clip_id, start_time, duration);
let _ = self.project.add_midi_clip(track_id, clip); let _ = self.project.add_midi_clip(track_id, clip);
// Notify UI about the new clip with its ID
let _ = self.event_tx.push(AudioEvent::ClipAdded(track_id, clip_id));
} }
Command::AddMidiNote(track_id, clip_id, time_offset, note, velocity, duration) => { Command::AddMidiNote(track_id, clip_id, time_offset, note, velocity, duration) => {
// Add a MIDI note event to the specified clip // Add a MIDI note event to the specified clip
if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) { if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
// Convert time to sample timestamp // Timestamp is now in seconds (sample-rate independent)
let timestamp = (time_offset * self.sample_rate as f64) as u64; let note_on = MidiEvent::note_on(time_offset, 0, note, velocity);
let note_on = MidiEvent::note_on(timestamp, 0, note, velocity);
clip.events.push(note_on); clip.events.push(note_on);
// Add note off event // Add note off event
let note_off_timestamp = ((time_offset + duration) * self.sample_rate as f64) as u64; let note_off_time = time_offset + duration;
let note_off = MidiEvent::note_off(note_off_timestamp, 0, note, 64); let note_off = MidiEvent::note_off(note_off_time, 0, note, 64);
clip.events.push(note_off); clip.events.push(note_off);
// Sort events by timestamp // Sort events by timestamp (using partial_cmp for f64)
clip.events.sort_by_key(|e| e.timestamp); clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
} }
} }
} }
@ -430,20 +451,19 @@ impl Engine {
clip.events.clear(); clip.events.clear();
// Add new events from the notes array // Add new events from the notes array
// Timestamps are now stored in seconds (sample-rate independent)
for (start_time, note, velocity, duration) in notes { for (start_time, note, velocity, duration) in notes {
// Convert time to sample timestamp let note_on = MidiEvent::note_on(start_time, 0, note, velocity);
let timestamp = (start_time * self.sample_rate as f64) as u64;
let note_on = MidiEvent::note_on(timestamp, 0, note, velocity);
clip.events.push(note_on); clip.events.push(note_on);
// Add note off event // Add note off event
let note_off_timestamp = ((start_time + duration) * self.sample_rate as f64) as u64; let note_off_time = start_time + duration;
let note_off = MidiEvent::note_off(note_off_timestamp, 0, note, 64); let note_off = MidiEvent::note_off(note_off_time, 0, note, 64);
clip.events.push(note_off); clip.events.push(note_off);
} }
// Sort events by timestamp // Sort events by timestamp (using partial_cmp for f64)
clip.events.sort_by_key(|e| e.timestamp); clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
} }
} }
} }
@ -596,6 +616,16 @@ impl Engine {
recording.resume(); recording.resume();
} }
} }
Command::StartMidiRecording(track_id, clip_id, start_time) => {
// Start MIDI recording on the specified track
self.handle_start_midi_recording(track_id, clip_id, start_time);
}
Command::StopMidiRecording => {
eprintln!("[ENGINE] Received StopMidiRecording command");
// Stop the current MIDI recording
self.handle_stop_midi_recording();
eprintln!("[ENGINE] handle_stop_midi_recording() completed");
}
Command::Reset => { Command::Reset => {
// Reset the entire project to initial state // Reset the entire project to initial state
// Stop playback // Stop playback
@ -617,7 +647,7 @@ impl Engine {
self.buffer_pool = BufferPool::new(8, buffer_size); self.buffer_pool = BufferPool::new(8, buffer_size);
// Reset ID counters // Reset ID counters
self.next_midi_clip_id = 0; self.next_midi_clip_id_atomic.store(0, Ordering::Relaxed);
self.next_clip_id = 0; self.next_clip_id = 0;
// Clear mix buffer // Clear mix buffer
@ -630,11 +660,31 @@ impl Engine {
Command::SendMidiNoteOn(track_id, note, velocity) => { Command::SendMidiNoteOn(track_id, note, velocity) => {
// Send a live MIDI note on event to the specified track's instrument // Send a live MIDI note on event to the specified track's instrument
self.project.send_midi_note_on(track_id, note, velocity); self.project.send_midi_note_on(track_id, note, velocity);
// If MIDI recording is active on this track, capture the event
if let Some(recording) = &mut self.midi_recording_state {
if recording.track_id == track_id {
let absolute_time = self.playhead as f64 / self.sample_rate as f64;
eprintln!("[MIDI_RECORDING] NoteOn captured: note={}, velocity={}, absolute_time={:.3}s, playhead={}, sample_rate={}",
note, velocity, absolute_time, self.playhead, self.sample_rate);
recording.note_on(note, velocity, absolute_time);
}
}
} }
Command::SendMidiNoteOff(track_id, note) => { Command::SendMidiNoteOff(track_id, note) => {
// Send a live MIDI note off event to the specified track's instrument // Send a live MIDI note off event to the specified track's instrument
self.project.send_midi_note_off(track_id, note); self.project.send_midi_note_off(track_id, note);
// If MIDI recording is active on this track, capture the event
if let Some(recording) = &mut self.midi_recording_state {
if recording.track_id == track_id {
let absolute_time = self.playhead as f64 / self.sample_rate as f64;
eprintln!("[MIDI_RECORDING] NoteOff captured: note={}, absolute_time={:.3}s, playhead={}, sample_rate={}",
note, absolute_time, self.playhead, self.sample_rate);
recording.note_off(note, absolute_time);
}
}
} }
// Node graph commands // Node graph commands
@ -654,17 +704,20 @@ impl Engine {
"NoiseGenerator" => Box::new(NoiseGeneratorNode::new("Noise".to_string())), "NoiseGenerator" => Box::new(NoiseGeneratorNode::new("Noise".to_string())),
"Splitter" => Box::new(SplitterNode::new("Splitter".to_string())), "Splitter" => Box::new(SplitterNode::new("Splitter".to_string())),
"Pan" => Box::new(PanNode::new("Pan".to_string())), "Pan" => Box::new(PanNode::new("Pan".to_string())),
"Quantizer" => Box::new(QuantizerNode::new("Quantizer".to_string())),
"Delay" => Box::new(DelayNode::new("Delay".to_string())), "Delay" => Box::new(DelayNode::new("Delay".to_string())),
"Distortion" => Box::new(DistortionNode::new("Distortion".to_string())), "Distortion" => Box::new(DistortionNode::new("Distortion".to_string())),
"Reverb" => Box::new(ReverbNode::new("Reverb".to_string())), "Reverb" => Box::new(ReverbNode::new("Reverb".to_string())),
"Chorus" => Box::new(ChorusNode::new("Chorus".to_string())), "Chorus" => Box::new(ChorusNode::new("Chorus".to_string())),
"Compressor" => Box::new(CompressorNode::new("Compressor".to_string())), "Compressor" => Box::new(CompressorNode::new("Compressor".to_string())),
"Limiter" => Box::new(LimiterNode::new("Limiter".to_string())), "Limiter" => Box::new(LimiterNode::new("Limiter".to_string())),
"Math" => Box::new(MathNode::new("Math".to_string())),
"EQ" => Box::new(EQNode::new("EQ".to_string())), "EQ" => Box::new(EQNode::new("EQ".to_string())),
"Flanger" => Box::new(FlangerNode::new("Flanger".to_string())), "Flanger" => Box::new(FlangerNode::new("Flanger".to_string())),
"FMSynth" => Box::new(FMSynthNode::new("FM Synth".to_string())), "FMSynth" => Box::new(FMSynthNode::new("FM Synth".to_string())),
"WavetableOscillator" => Box::new(WavetableOscillatorNode::new("Wavetable".to_string())), "WavetableOscillator" => Box::new(WavetableOscillatorNode::new("Wavetable".to_string())),
"SimpleSampler" => Box::new(SimpleSamplerNode::new("Sampler".to_string())), "SimpleSampler" => Box::new(SimpleSamplerNode::new("Sampler".to_string())),
"SlewLimiter" => Box::new(SlewLimiterNode::new("Slew Limiter".to_string())),
"MultiSampler" => Box::new(MultiSamplerNode::new("Multi Sampler".to_string())), "MultiSampler" => Box::new(MultiSamplerNode::new("Multi Sampler".to_string())),
"MidiInput" => Box::new(MidiInputNode::new("MIDI Input".to_string())), "MidiInput" => Box::new(MidiInputNode::new("MIDI Input".to_string())),
"MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV".to_string())), "MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV".to_string())),
@ -718,17 +771,20 @@ impl Engine {
"NoiseGenerator" => Box::new(NoiseGeneratorNode::new("Noise".to_string())), "NoiseGenerator" => Box::new(NoiseGeneratorNode::new("Noise".to_string())),
"Splitter" => Box::new(SplitterNode::new("Splitter".to_string())), "Splitter" => Box::new(SplitterNode::new("Splitter".to_string())),
"Pan" => Box::new(PanNode::new("Pan".to_string())), "Pan" => Box::new(PanNode::new("Pan".to_string())),
"Quantizer" => Box::new(QuantizerNode::new("Quantizer".to_string())),
"Delay" => Box::new(DelayNode::new("Delay".to_string())), "Delay" => Box::new(DelayNode::new("Delay".to_string())),
"Distortion" => Box::new(DistortionNode::new("Distortion".to_string())), "Distortion" => Box::new(DistortionNode::new("Distortion".to_string())),
"Reverb" => Box::new(ReverbNode::new("Reverb".to_string())), "Reverb" => Box::new(ReverbNode::new("Reverb".to_string())),
"Chorus" => Box::new(ChorusNode::new("Chorus".to_string())), "Chorus" => Box::new(ChorusNode::new("Chorus".to_string())),
"Compressor" => Box::new(CompressorNode::new("Compressor".to_string())), "Compressor" => Box::new(CompressorNode::new("Compressor".to_string())),
"Limiter" => Box::new(LimiterNode::new("Limiter".to_string())), "Limiter" => Box::new(LimiterNode::new("Limiter".to_string())),
"Math" => Box::new(MathNode::new("Math".to_string())),
"EQ" => Box::new(EQNode::new("EQ".to_string())), "EQ" => Box::new(EQNode::new("EQ".to_string())),
"Flanger" => Box::new(FlangerNode::new("Flanger".to_string())), "Flanger" => Box::new(FlangerNode::new("Flanger".to_string())),
"FMSynth" => Box::new(FMSynthNode::new("FM Synth".to_string())), "FMSynth" => Box::new(FMSynthNode::new("FM Synth".to_string())),
"WavetableOscillator" => Box::new(WavetableOscillatorNode::new("Wavetable".to_string())), "WavetableOscillator" => Box::new(WavetableOscillatorNode::new("Wavetable".to_string())),
"SimpleSampler" => Box::new(SimpleSamplerNode::new("Sampler".to_string())), "SimpleSampler" => Box::new(SimpleSamplerNode::new("Sampler".to_string())),
"SlewLimiter" => Box::new(SlewLimiterNode::new("Slew Limiter".to_string())),
"MultiSampler" => Box::new(MultiSamplerNode::new("Multi Sampler".to_string())), "MultiSampler" => Box::new(MultiSamplerNode::new("Multi Sampler".to_string())),
"MidiInput" => Box::new(MidiInputNode::new("MIDI Input".to_string())), "MidiInput" => Box::new(MidiInputNode::new("MIDI Input".to_string())),
"MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV".to_string())), "MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV".to_string())),
@ -1100,6 +1156,21 @@ impl Engine {
))), ))),
} }
} }
Query::GetMidiClip(track_id, clip_id) => {
if let Some(TrackNode::Midi(track)) = self.project.get_track(track_id) {
if let Some(clip) = track.clips.iter().find(|c| c.id == clip_id) {
use crate::command::MidiClipData;
QueryResponse::MidiClipData(Ok(MidiClipData {
duration: clip.duration,
events: clip.events.clone(),
}))
} else {
QueryResponse::MidiClipData(Err(format!("Clip {} not found in track {}", clip_id, track_id)))
}
} else {
QueryResponse::MidiClipData(Err(format!("Track {} not found or is not a MIDI track", track_id)))
}
}
}; };
// Send response back // Send response back
@ -1193,6 +1264,15 @@ impl Engine {
/// Handle stopping a recording /// Handle stopping a recording
fn handle_stop_recording(&mut self) { fn handle_stop_recording(&mut self) {
eprintln!("[STOP_RECORDING] handle_stop_recording called"); eprintln!("[STOP_RECORDING] handle_stop_recording called");
// Check if we have an active MIDI recording first
if self.midi_recording_state.is_some() {
eprintln!("[STOP_RECORDING] Detected active MIDI recording, delegating to handle_stop_midi_recording");
self.handle_stop_midi_recording();
return;
}
// Handle audio recording
if let Some(recording) = self.recording_state.take() { if let Some(recording) = self.recording_state.take() {
let clip_id = recording.clip_id; let clip_id = recording.clip_id;
let track_id = recording.track_id; let track_id = recording.track_id;
@ -1248,6 +1328,90 @@ impl Engine {
} }
} }
/// Handle starting MIDI recording
fn handle_start_midi_recording(&mut self, track_id: TrackId, clip_id: MidiClipId, start_time: f64) {
// Check if track exists and is a MIDI track
if let Some(crate::audio::track::TrackNode::Midi(_)) = self.project.get_track_mut(track_id) {
// Create MIDI recording state
let recording_state = MidiRecordingState::new(track_id, clip_id, start_time);
self.midi_recording_state = Some(recording_state);
eprintln!("[MIDI_RECORDING] Started MIDI recording on track {} for clip {}", track_id, clip_id);
} else {
// Send error event if track not found or not a MIDI track
let _ = self.event_tx.push(AudioEvent::RecordingError(
format!("Track {} not found or is not a MIDI track", track_id)
));
}
}
/// Handle stopping MIDI recording
fn handle_stop_midi_recording(&mut self) {
eprintln!("[MIDI_RECORDING] handle_stop_midi_recording called");
if let Some(mut recording) = self.midi_recording_state.take() {
// Close out any active notes at the current playhead position
let end_time = self.playhead as f64 / self.sample_rate as f64;
eprintln!("[MIDI_RECORDING] Closing active notes at time {}", end_time);
recording.close_active_notes(end_time);
let clip_id = recording.clip_id;
let track_id = recording.track_id;
let notes = recording.get_notes().to_vec();
let note_count = notes.len();
let recording_duration = end_time - recording.start_time;
eprintln!("[MIDI_RECORDING] Stopping MIDI recording for clip_id={}, track_id={}, captured {} notes, duration={:.3}s",
clip_id, track_id, note_count, recording_duration);
// Update the MIDI clip using the existing UpdateMidiClipNotes logic
eprintln!("[MIDI_RECORDING] Looking for track {} to update clip", track_id);
if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
eprintln!("[MIDI_RECORDING] Found MIDI track, looking for clip {}", clip_id);
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
eprintln!("[MIDI_RECORDING] Found clip, clearing and adding {} notes", note_count);
// Clear existing events
clip.events.clear();
// Update clip duration to match the actual recording time
clip.duration = recording_duration;
// Add new events from the recorded notes
// Timestamps are now stored in seconds (sample-rate independent)
for (start_time, note, velocity, duration) in notes.iter() {
let note_on = MidiEvent::note_on(*start_time, 0, *note, *velocity);
eprintln!("[MIDI_RECORDING] Note {}: start_time={:.3}s, duration={:.3}s",
note, start_time, duration);
clip.events.push(note_on);
// Add note off event
let note_off_time = *start_time + *duration;
let note_off = MidiEvent::note_off(note_off_time, 0, *note, 64);
clip.events.push(note_off);
}
// Sort events by timestamp (using partial_cmp for f64)
clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
eprintln!("[MIDI_RECORDING] Updated clip {} with {} notes ({} events)", clip_id, note_count, clip.events.len());
} else {
eprintln!("[MIDI_RECORDING] ERROR: Clip {} not found on track!", clip_id);
}
} else {
eprintln!("[MIDI_RECORDING] ERROR: Track {} not found or not a MIDI track!", track_id);
}
// Send event to UI
eprintln!("[MIDI_RECORDING] Pushing MidiRecordingStopped event to event_tx...");
match self.event_tx.push(AudioEvent::MidiRecordingStopped(track_id, clip_id, note_count)) {
Ok(_) => eprintln!("[MIDI_RECORDING] MidiRecordingStopped event pushed successfully"),
Err(e) => eprintln!("[MIDI_RECORDING] ERROR: Failed to push event: {:?}", e),
}
} else {
eprintln!("[MIDI_RECORDING] No active MIDI recording to stop");
}
}
/// Get current sample rate /// Get current sample rate
pub fn sample_rate(&self) -> u32 { pub fn sample_rate(&self) -> u32 {
self.sample_rate self.sample_rate
@ -1270,6 +1434,7 @@ pub struct EngineController {
query_tx: rtrb::Producer<Query>, query_tx: rtrb::Producer<Query>,
query_response_rx: rtrb::Consumer<QueryResponse>, query_response_rx: rtrb::Consumer<QueryResponse>,
playhead: Arc<AtomicU64>, playhead: Arc<AtomicU64>,
next_midi_clip_id: Arc<AtomicU32>,
sample_rate: u32, sample_rate: u32,
channels: u32, channels: u32,
} }
@ -1331,8 +1496,8 @@ impl EngineController {
/// Get current playhead position in seconds /// Get current playhead position in seconds
pub fn get_playhead_seconds(&self) -> f64 { pub fn get_playhead_seconds(&self) -> f64 {
let samples = self.playhead.load(Ordering::Relaxed); let frames = self.playhead.load(Ordering::Relaxed);
samples as f64 / (self.sample_rate as f64 * self.channels as f64) frames as f64 / self.sample_rate as f64
} }
/// Create a new metatrack /// Create a new metatrack
@ -1388,8 +1553,11 @@ impl EngineController {
} }
/// Create a new MIDI clip on a track /// Create a new MIDI clip on a track
pub fn create_midi_clip(&mut self, track_id: TrackId, start_time: f64, duration: f64) { pub fn create_midi_clip(&mut self, track_id: TrackId, start_time: f64, duration: f64) -> MidiClipId {
// Peek at the next clip ID that will be used
let clip_id = self.next_midi_clip_id.load(Ordering::Relaxed);
let _ = self.command_tx.push(Command::CreateMidiClip(track_id, start_time, duration)); let _ = self.command_tx.push(Command::CreateMidiClip(track_id, start_time, duration));
clip_id
} }
/// Add a MIDI note to a clip /// Add a MIDI note to a clip
@ -1496,6 +1664,16 @@ impl EngineController {
let _ = self.command_tx.push(Command::ResumeRecording); let _ = self.command_tx.push(Command::ResumeRecording);
} }
/// Start MIDI recording on a track
pub fn start_midi_recording(&mut self, track_id: TrackId, clip_id: MidiClipId, start_time: f64) {
let _ = self.command_tx.push(Command::StartMidiRecording(track_id, clip_id, start_time));
}
/// Stop the current MIDI recording
pub fn stop_midi_recording(&mut self) {
let _ = self.command_tx.push(Command::StopMidiRecording);
}
/// Reset the entire project (clear all tracks, audio pool, and state) /// Reset the entire project (clear all tracks, audio pool, and state)
pub fn reset(&mut self) { pub fn reset(&mut self) {
let _ = self.command_tx.push(Command::Reset); let _ = self.command_tx.push(Command::Reset);
@ -1636,6 +1814,28 @@ impl EngineController {
Err("Query timeout".to_string()) Err("Query timeout".to_string())
} }
/// Query MIDI clip data
pub fn query_midi_clip(&mut self, track_id: TrackId, clip_id: MidiClipId) -> Result<crate::command::MidiClipData, String> {
// Send query
if let Err(_) = self.query_tx.push(Query::GetMidiClip(track_id, clip_id)) {
return Err("Failed to send query - queue full".to_string());
}
// Wait for response (with timeout)
let start = std::time::Instant::now();
let timeout = std::time::Duration::from_millis(500);
while start.elapsed() < timeout {
if let Ok(QueryResponse::MidiClipData(result)) = self.query_response_rx.pop() {
return result;
}
// Small sleep to avoid busy-waiting
std::thread::sleep(std::time::Duration::from_micros(100));
}
Err("Query timeout".to_string())
}
/// Query oscilloscope data from a node /// Query oscilloscope data from a node
pub fn query_oscilloscope_data(&mut self, track_id: TrackId, node_id: u32, sample_count: usize) -> Result<crate::command::OscilloscopeData, String> { pub fn query_oscilloscope_data(&mut self, track_id: TrackId, node_id: u32, sample_count: usize) -> Result<crate::command::OscilloscopeData, String> {
// Send query // Send query

View File

@ -1,8 +1,8 @@
/// MIDI event representing a single MIDI message /// MIDI event representing a single MIDI message
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)]
pub struct MidiEvent { pub struct MidiEvent {
/// Sample position within the clip /// Time position within the clip in seconds (sample-rate independent)
pub timestamp: u64, pub timestamp: f64,
/// MIDI status byte (includes channel) /// MIDI status byte (includes channel)
pub status: u8, pub status: u8,
/// First data byte (note number, CC number, etc.) /// First data byte (note number, CC number, etc.)
@ -13,7 +13,7 @@ pub struct MidiEvent {
impl MidiEvent { impl MidiEvent {
/// Create a new MIDI event /// Create a new MIDI event
pub fn new(timestamp: u64, status: u8, data1: u8, data2: u8) -> Self { pub fn new(timestamp: f64, status: u8, data1: u8, data2: u8) -> Self {
Self { Self {
timestamp, timestamp,
status, status,
@ -23,7 +23,7 @@ impl MidiEvent {
} }
/// Create a note on event /// Create a note on event
pub fn note_on(timestamp: u64, channel: u8, note: u8, velocity: u8) -> Self { pub fn note_on(timestamp: f64, channel: u8, note: u8, velocity: u8) -> Self {
Self { Self {
timestamp, timestamp,
status: 0x90 | (channel & 0x0F), status: 0x90 | (channel & 0x0F),
@ -33,7 +33,7 @@ impl MidiEvent {
} }
/// Create a note off event /// Create a note off event
pub fn note_off(timestamp: u64, channel: u8, note: u8, velocity: u8) -> Self { pub fn note_off(timestamp: f64, channel: u8, note: u8, velocity: u8) -> Self {
Self { Self {
timestamp, timestamp,
status: 0x80 | (channel & 0x0F), status: 0x80 | (channel & 0x0F),
@ -91,8 +91,8 @@ impl MidiClip {
/// Add a MIDI event to the clip /// Add a MIDI event to the clip
pub fn add_event(&mut self, event: MidiEvent) { pub fn add_event(&mut self, event: MidiEvent) {
self.events.push(event); self.events.push(event);
// Keep events sorted by timestamp // Keep events sorted by timestamp (using partial_cmp for f64)
self.events.sort_by_key(|e| e.timestamp); self.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
} }
/// Get the end time of the clip /// Get the end time of the clip
@ -107,8 +107,8 @@ impl MidiClip {
&self, &self,
range_start_seconds: f64, range_start_seconds: f64,
range_end_seconds: f64, range_end_seconds: f64,
sample_rate: u32, _sample_rate: u32,
) -> Vec<(u64, MidiEvent)> { ) -> Vec<MidiEvent> {
let mut result = Vec::new(); let mut result = Vec::new();
// Check if clip overlaps with the range // Check if clip overlaps with the range
@ -120,21 +120,16 @@ impl MidiClip {
let play_start = range_start_seconds.max(self.start_time); let play_start = range_start_seconds.max(self.start_time);
let play_end = range_end_seconds.min(self.end_time()); let play_end = range_end_seconds.min(self.end_time());
// Convert to samples
let range_start_samples = (range_start_seconds * sample_rate as f64) as u64;
// Position within the clip // Position within the clip
let clip_position_seconds = play_start - self.start_time; let clip_position_seconds = play_start - self.start_time;
let clip_position_samples = (clip_position_seconds * sample_rate as f64) as u64; let clip_end_seconds = play_end - self.start_time;
let clip_end_samples = ((play_end - self.start_time) * sample_rate as f64) as u64;
// Find events in this range // Find events in this range
// Note: Using <= for the end boundary to include events exactly at the clip end // Note: event.timestamp is now in seconds relative to clip start
// Use half-open interval [start, end) to avoid triggering events twice
for event in &self.events { for event in &self.events {
if event.timestamp >= clip_position_samples && event.timestamp <= clip_end_samples { if event.timestamp >= clip_position_seconds && event.timestamp < clip_end_seconds {
// Calculate absolute timestamp in the output buffer result.push(*event);
let absolute_timestamp = range_start_samples + (event.timestamp - clip_position_samples);
result.push((absolute_timestamp, *event));
} }
} }

View File

@ -791,17 +791,20 @@ impl InstrumentGraph {
"NoiseGenerator" => Box::new(NoiseGeneratorNode::new("Noise")), "NoiseGenerator" => Box::new(NoiseGeneratorNode::new("Noise")),
"Splitter" => Box::new(SplitterNode::new("Splitter")), "Splitter" => Box::new(SplitterNode::new("Splitter")),
"Pan" => Box::new(PanNode::new("Pan")), "Pan" => Box::new(PanNode::new("Pan")),
"Quantizer" => Box::new(QuantizerNode::new("Quantizer")),
"Delay" => Box::new(DelayNode::new("Delay")), "Delay" => Box::new(DelayNode::new("Delay")),
"Distortion" => Box::new(DistortionNode::new("Distortion")), "Distortion" => Box::new(DistortionNode::new("Distortion")),
"Reverb" => Box::new(ReverbNode::new("Reverb")), "Reverb" => Box::new(ReverbNode::new("Reverb")),
"Chorus" => Box::new(ChorusNode::new("Chorus")), "Chorus" => Box::new(ChorusNode::new("Chorus")),
"Compressor" => Box::new(CompressorNode::new("Compressor")), "Compressor" => Box::new(CompressorNode::new("Compressor")),
"Limiter" => Box::new(LimiterNode::new("Limiter")), "Limiter" => Box::new(LimiterNode::new("Limiter")),
"Math" => Box::new(MathNode::new("Math")),
"EQ" => Box::new(EQNode::new("EQ")), "EQ" => Box::new(EQNode::new("EQ")),
"Flanger" => Box::new(FlangerNode::new("Flanger")), "Flanger" => Box::new(FlangerNode::new("Flanger")),
"FMSynth" => Box::new(FMSynthNode::new("FM Synth")), "FMSynth" => Box::new(FMSynthNode::new("FM Synth")),
"WavetableOscillator" => Box::new(WavetableOscillatorNode::new("Wavetable")), "WavetableOscillator" => Box::new(WavetableOscillatorNode::new("Wavetable")),
"SimpleSampler" => Box::new(SimpleSamplerNode::new("Sampler")), "SimpleSampler" => Box::new(SimpleSamplerNode::new("Sampler")),
"SlewLimiter" => Box::new(SlewLimiterNode::new("Slew Limiter")),
"MultiSampler" => Box::new(MultiSamplerNode::new("Multi Sampler")), "MultiSampler" => Box::new(MultiSamplerNode::new("Multi Sampler")),
"MidiInput" => Box::new(MidiInputNode::new("MIDI Input")), "MidiInput" => Box::new(MidiInputNode::new("MIDI Input")),
"MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV")), "MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV")),

View File

@ -0,0 +1,178 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
use crate::audio::midi::MidiEvent;
const PARAM_OPERATION: u32 = 0;
const PARAM_OPERAND: u32 = 1;
/// Mathematical and logical operations on CV signals
/// Operations:
/// 0 = Add, 1 = Subtract, 2 = Multiply, 3 = Divide
/// 4 = Min, 5 = Max, 6 = Average
/// 7 = Invert (1.0 - x), 8 = Absolute Value
/// 9 = Clamp (0.0 to 1.0), 10 = Wrap (-1.0 to 1.0)
/// 11 = Greater Than, 12 = Less Than, 13 = Equal (with tolerance)
pub struct MathNode {
name: String,
operation: u32,
operand: f32,
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
}
impl MathNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
let inputs = vec![
NodePort::new("CV In A", SignalType::CV, 0),
NodePort::new("CV In B", SignalType::CV, 1),
];
let outputs = vec![
NodePort::new("CV Out", SignalType::CV, 0),
];
let parameters = vec![
Parameter::new(PARAM_OPERATION, "Operation", 0.0, 13.0, 0.0, ParameterUnit::Generic),
Parameter::new(PARAM_OPERAND, "Operand", -10.0, 10.0, 1.0, ParameterUnit::Generic),
];
Self {
name,
operation: 0,
operand: 1.0,
inputs,
outputs,
parameters,
}
}
fn apply_operation(&self, a: f32, b: f32) -> f32 {
match self.operation {
0 => a + b, // Add
1 => a - b, // Subtract
2 => a * b, // Multiply
3 => if b.abs() > 0.0001 { a / b } else { 0.0 }, // Divide (with protection)
4 => a.min(b), // Min
5 => a.max(b), // Max
6 => (a + b) * 0.5, // Average
7 => 1.0 - a, // Invert (ignores b)
8 => a.abs(), // Absolute Value (ignores b)
9 => a.clamp(0.0, 1.0), // Clamp to 0-1 (ignores b)
10 => { // Wrap -1 to 1
let mut result = a;
while result > 1.0 {
result -= 2.0;
}
while result < -1.0 {
result += 2.0;
}
result
},
11 => if a > b { 1.0 } else { 0.0 }, // Greater Than
12 => if a < b { 1.0 } else { 0.0 }, // Less Than
13 => if (a - b).abs() < 0.01 { 1.0 } else { 0.0 }, // Equal (with tolerance)
_ => a, // Unknown operation - pass through
}
}
}
impl AudioNode for MathNode {
fn category(&self) -> NodeCategory {
NodeCategory::Utility
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, id: u32, value: f32) {
match id {
PARAM_OPERATION => self.operation = (value as u32).clamp(0, 13),
PARAM_OPERAND => self.operand = value.clamp(-10.0, 10.0),
_ => {}
}
}
fn get_parameter(&self, id: u32) -> f32 {
match id {
PARAM_OPERATION => self.operation as f32,
PARAM_OPERAND => self.operand,
_ => 0.0,
}
}
fn process(
&mut self,
inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
_sample_rate: u32,
) {
if outputs.is_empty() {
return;
}
let output = &mut outputs[0];
let length = output.len();
// Get input A (or use 0.0)
let input_a = if !inputs.is_empty() && !inputs[0].is_empty() {
inputs[0]
} else {
&[]
};
// Get input B (or use operand parameter)
let input_b = if inputs.len() > 1 && !inputs[1].is_empty() {
inputs[1]
} else {
&[]
};
// Process each sample
for i in 0..length {
let a = if i < input_a.len() { input_a[i] } else { 0.0 };
let b = if i < input_b.len() {
input_b[i]
} else {
self.operand
};
output[i] = self.apply_operation(a, b);
}
}
fn reset(&mut self) {
// No state to reset
}
fn node_type(&self) -> &str {
"Math"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
operation: self.operation,
operand: self.operand,
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
})
}
}

View File

@ -11,6 +11,7 @@ mod limiter;
mod fm_synth; mod fm_synth;
mod gain; mod gain;
mod lfo; mod lfo;
mod math;
mod midi_input; mod midi_input;
mod midi_to_cv; mod midi_to_cv;
mod mixer; mod mixer;
@ -20,8 +21,10 @@ mod oscillator;
mod oscilloscope; mod oscilloscope;
mod output; mod output;
mod pan; mod pan;
mod quantizer;
mod reverb; mod reverb;
mod simple_sampler; mod simple_sampler;
mod slew_limiter;
mod splitter; mod splitter;
mod template_io; mod template_io;
mod voice_allocator; mod voice_allocator;
@ -40,6 +43,7 @@ pub use limiter::LimiterNode;
pub use fm_synth::FMSynthNode; pub use fm_synth::FMSynthNode;
pub use gain::GainNode; pub use gain::GainNode;
pub use lfo::LFONode; pub use lfo::LFONode;
pub use math::MathNode;
pub use midi_input::MidiInputNode; pub use midi_input::MidiInputNode;
pub use midi_to_cv::MidiToCVNode; pub use midi_to_cv::MidiToCVNode;
pub use mixer::MixerNode; pub use mixer::MixerNode;
@ -49,8 +53,10 @@ pub use oscillator::OscillatorNode;
pub use oscilloscope::OscilloscopeNode; pub use oscilloscope::OscilloscopeNode;
pub use output::AudioOutputNode; pub use output::AudioOutputNode;
pub use pan::PanNode; pub use pan::PanNode;
pub use quantizer::QuantizerNode;
pub use reverb::ReverbNode; pub use reverb::ReverbNode;
pub use simple_sampler::SimpleSamplerNode; pub use simple_sampler::SimpleSamplerNode;
pub use slew_limiter::SlewLimiterNode;
pub use splitter::SplitterNode; pub use splitter::SplitterNode;
pub use template_io::{TemplateInputNode, TemplateOutputNode}; pub use template_io::{TemplateInputNode, TemplateOutputNode};
pub use voice_allocator::VoiceAllocatorNode; pub use voice_allocator::VoiceAllocatorNode;

View File

@ -0,0 +1,220 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
use crate::audio::midi::MidiEvent;
const PARAM_SCALE: u32 = 0;
const PARAM_ROOT_NOTE: u32 = 1;
/// Quantizer - snaps CV values to musical scales
/// Converts continuous CV into discrete pitch values based on a scale
/// Scale parameter:
/// 0 = Chromatic (all 12 notes)
/// 1 = Major scale
/// 2 = Minor scale (natural)
/// 3 = Pentatonic major
/// 4 = Pentatonic minor
/// 5 = Dorian
/// 6 = Phrygian
/// 7 = Lydian
/// 8 = Mixolydian
/// 9 = Whole tone
/// 10 = Octaves only
pub struct QuantizerNode {
name: String,
scale: u32,
root_note: u32, // 0-11 (C-B)
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
}
impl QuantizerNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
let inputs = vec![
NodePort::new("CV In", SignalType::CV, 0),
];
let outputs = vec![
NodePort::new("CV Out", SignalType::CV, 0),
NodePort::new("Gate Out", SignalType::CV, 1), // Trigger when note changes
];
let parameters = vec![
Parameter::new(PARAM_SCALE, "Scale", 0.0, 10.0, 0.0, ParameterUnit::Generic),
Parameter::new(PARAM_ROOT_NOTE, "Root", 0.0, 11.0, 0.0, ParameterUnit::Generic),
];
Self {
name,
scale: 0,
root_note: 0,
inputs,
outputs,
parameters,
}
}
/// Get the scale intervals (semitones from root)
fn get_scale_intervals(&self) -> Vec<u32> {
match self.scale {
0 => vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11], // Chromatic
1 => vec![0, 2, 4, 5, 7, 9, 11], // Major
2 => vec![0, 2, 3, 5, 7, 8, 10], // Minor (natural)
3 => vec![0, 2, 4, 7, 9], // Pentatonic major
4 => vec![0, 3, 5, 7, 10], // Pentatonic minor
5 => vec![0, 2, 3, 5, 7, 9, 10], // Dorian
6 => vec![0, 1, 3, 5, 7, 8, 10], // Phrygian
7 => vec![0, 2, 4, 6, 7, 9, 11], // Lydian
8 => vec![0, 2, 4, 5, 7, 9, 10], // Mixolydian
9 => vec![0, 2, 4, 6, 8, 10], // Whole tone
10 => vec![0], // Octaves only
_ => vec![0, 2, 4, 5, 7, 9, 11], // Default to major
}
}
/// Quantize a CV value to the nearest note in the scale
fn quantize(&self, cv: f32) -> f32 {
// Convert V/Oct to MIDI note (standard: 0V = A4 = MIDI 69)
// cv = (midi_note - 69) / 12.0
// midi_note = cv * 12.0 + 69
let input_midi_note = cv * 12.0 + 69.0;
// Clamp to reasonable range
let input_midi_note = input_midi_note.clamp(0.0, 127.0);
// Get scale intervals
let intervals = self.get_scale_intervals();
// Find which octave we're in (relative to C)
let octave = (input_midi_note / 12.0).floor() as i32;
let note_in_octave = (input_midi_note % 12.0) as u32;
// Find the nearest note in the scale
let mut closest_interval = intervals[0];
let mut min_distance = (note_in_octave as i32 - closest_interval as i32).abs();
for &interval in &intervals {
let distance = (note_in_octave as i32 - interval as i32).abs();
if distance < min_distance {
min_distance = distance;
closest_interval = interval;
}
}
// Calculate final MIDI note (adjusted for root note)
// Start from the octave * 12, add root note, add scale interval
let quantized_midi_note = (octave * 12) as f32 + self.root_note as f32 + closest_interval as f32;
// Clamp result
let quantized_midi_note = quantized_midi_note.clamp(0.0, 127.0);
// Convert back to V/Oct: voct = (midi_note - 69) / 12.0
(quantized_midi_note - 69.0) / 12.0
}
}
impl AudioNode for QuantizerNode {
fn category(&self) -> NodeCategory {
NodeCategory::Utility
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, id: u32, value: f32) {
match id {
PARAM_SCALE => self.scale = (value as u32).clamp(0, 10),
PARAM_ROOT_NOTE => self.root_note = (value as u32).clamp(0, 11),
_ => {}
}
}
fn get_parameter(&self, id: u32) -> f32 {
match id {
PARAM_SCALE => self.scale as f32,
PARAM_ROOT_NOTE => self.root_note as f32,
_ => 0.0,
}
}
fn process(
&mut self,
inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
_sample_rate: u32,
) {
if inputs.is_empty() || outputs.is_empty() {
return;
}
let input = inputs[0];
let length = input.len().min(outputs[0].len());
// Split outputs to avoid borrow conflicts
if outputs.len() > 1 {
let (cv_out, gate_out) = outputs.split_at_mut(1);
let cv_output = &mut cv_out[0];
let gate_output = &mut gate_out[0];
let gate_length = length.min(gate_output.len());
let mut last_note: Option<f32> = None;
for i in 0..length {
let quantized = self.quantize(input[i]);
cv_output[i] = quantized;
// Generate gate trigger when note changes
if i < gate_length {
if let Some(prev) = last_note {
gate_output[i] = if (quantized - prev).abs() > 0.001 { 1.0 } else { 0.0 };
} else {
gate_output[i] = 1.0; // First note triggers gate
}
}
last_note = Some(quantized);
}
} else {
// No gate output, just quantize CV
let cv_output = &mut outputs[0];
for i in 0..length {
cv_output[i] = self.quantize(input[i]);
}
}
}
fn reset(&mut self) {
// No state to reset
}
fn node_type(&self) -> &str {
"Quantizer"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
scale: self.scale,
root_note: self.root_note,
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
})
}
}

View File

@ -0,0 +1,156 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
use crate::audio::midi::MidiEvent;
const PARAM_RISE_TIME: u32 = 0;
const PARAM_FALL_TIME: u32 = 1;
/// Slew limiter - limits the rate of change of a CV signal
/// Useful for creating portamento/glide effects and smoothing control signals
pub struct SlewLimiterNode {
name: String,
rise_time: f32, // Time in seconds to rise from 0 to 1
fall_time: f32, // Time in seconds to fall from 1 to 0
last_value: f32,
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
}
impl SlewLimiterNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
let inputs = vec![
NodePort::new("CV In", SignalType::CV, 0),
];
let outputs = vec![
NodePort::new("CV Out", SignalType::CV, 0),
];
let parameters = vec![
Parameter::new(PARAM_RISE_TIME, "Rise Time", 0.0, 5.0, 0.01, ParameterUnit::Time),
Parameter::new(PARAM_FALL_TIME, "Fall Time", 0.0, 5.0, 0.01, ParameterUnit::Time),
];
Self {
name,
rise_time: 0.01,
fall_time: 0.01,
last_value: 0.0,
inputs,
outputs,
parameters,
}
}
}
impl AudioNode for SlewLimiterNode {
fn category(&self) -> NodeCategory {
NodeCategory::Utility
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, id: u32, value: f32) {
match id {
PARAM_RISE_TIME => self.rise_time = value.clamp(0.0, 5.0),
PARAM_FALL_TIME => self.fall_time = value.clamp(0.0, 5.0),
_ => {}
}
}
fn get_parameter(&self, id: u32) -> f32 {
match id {
PARAM_RISE_TIME => self.rise_time,
PARAM_FALL_TIME => self.fall_time,
_ => 0.0,
}
}
fn process(
&mut self,
inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
sample_rate: u32,
) {
if inputs.is_empty() || outputs.is_empty() {
return;
}
let input = inputs[0];
let output = &mut outputs[0];
let length = input.len().min(output.len());
// Calculate maximum change per sample
let sample_duration = 1.0 / sample_rate as f32;
// Rise/fall rates (units per second)
let rise_rate = if self.rise_time > 0.0001 {
1.0 / self.rise_time
} else {
f32::MAX // No limiting
};
let fall_rate = if self.fall_time > 0.0001 {
1.0 / self.fall_time
} else {
f32::MAX // No limiting
};
for i in 0..length {
let target = input[i];
let difference = target - self.last_value;
let max_change = if difference > 0.0 {
// Rising
rise_rate * sample_duration
} else {
// Falling
fall_rate * sample_duration
};
// Limit the change
let limited_difference = difference.clamp(-max_change, max_change);
self.last_value += limited_difference;
output[i] = self.last_value;
}
}
fn reset(&mut self) {
self.last_value = 0.0;
}
fn node_type(&self) -> &str {
"SlewLimiter"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
rise_time: self.rise_time,
fall_time: self.fall_time,
last_value: self.last_value,
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
})
}
}

View File

@ -405,7 +405,7 @@ impl Project {
pub fn send_midi_note_on(&mut self, track_id: TrackId, note: u8, velocity: u8) { pub fn send_midi_note_on(&mut self, track_id: TrackId, note: u8, velocity: u8) {
// Queue the MIDI note-on event to the track's live MIDI queue // Queue the MIDI note-on event to the track's live MIDI queue
if let Some(TrackNode::Midi(track)) = self.tracks.get_mut(&track_id) { if let Some(TrackNode::Midi(track)) = self.tracks.get_mut(&track_id) {
let event = MidiEvent::note_on(0, 0, note, velocity); let event = MidiEvent::note_on(0.0, 0, note, velocity);
track.queue_live_midi(event); track.queue_live_midi(event);
} }
} }
@ -414,7 +414,7 @@ impl Project {
pub fn send_midi_note_off(&mut self, track_id: TrackId, note: u8) { pub fn send_midi_note_off(&mut self, track_id: TrackId, note: u8) {
// Queue the MIDI note-off event to the track's live MIDI queue // Queue the MIDI note-off event to the track's live MIDI queue
if let Some(TrackNode::Midi(track)) = self.tracks.get_mut(&track_id) { if let Some(TrackNode::Midi(track)) = self.tracks.get_mut(&track_id) {
let event = MidiEvent::note_off(0, 0, note, 0); let event = MidiEvent::note_off(0.0, 0, note, 0);
track.queue_live_midi(event); track.queue_live_midi(event);
} }
} }

View File

@ -1,6 +1,7 @@
/// Audio recording system for capturing microphone input /// Audio recording system for capturing microphone input
use crate::audio::{ClipId, TrackId}; use crate::audio::{ClipId, MidiClipId, TrackId};
use crate::io::{WavWriter, WaveformPeak}; use crate::io::{WavWriter, WaveformPeak};
use std::collections::HashMap;
use std::path::PathBuf; use std::path::PathBuf;
/// State of an active recording session /// State of an active recording session
@ -204,3 +205,106 @@ impl RecordingState {
self.paused = false; self.paused = false;
} }
} }
/// Active MIDI note waiting for its noteOff event
#[derive(Debug, Clone)]
struct ActiveMidiNote {
/// MIDI note number (0-127)
note: u8,
/// Velocity (0-127)
velocity: u8,
/// Absolute time when note started (seconds)
start_time: f64,
}
/// State of an active MIDI recording session
pub struct MidiRecordingState {
/// Track being recorded to
pub track_id: TrackId,
/// MIDI clip ID
pub clip_id: MidiClipId,
/// Timeline start position in seconds
pub start_time: f64,
/// Currently active notes (noteOn without matching noteOff)
/// Maps note number to ActiveMidiNote
active_notes: HashMap<u8, ActiveMidiNote>,
/// Completed notes ready to be added to clip
/// Format: (time_offset, note, velocity, duration)
pub completed_notes: Vec<(f64, u8, u8, f64)>,
}
impl MidiRecordingState {
/// Create a new MIDI recording state
pub fn new(track_id: TrackId, clip_id: MidiClipId, start_time: f64) -> Self {
Self {
track_id,
clip_id,
start_time,
active_notes: HashMap::new(),
completed_notes: Vec::new(),
}
}
/// Handle a MIDI note on event
pub fn note_on(&mut self, note: u8, velocity: u8, absolute_time: f64) {
// Store this note as active
self.active_notes.insert(note, ActiveMidiNote {
note,
velocity,
start_time: absolute_time,
});
}
/// Handle a MIDI note off event
pub fn note_off(&mut self, note: u8, absolute_time: f64) {
// Find the matching noteOn
if let Some(active_note) = self.active_notes.remove(&note) {
// Calculate relative time offset and duration
let time_offset = active_note.start_time - self.start_time;
let duration = absolute_time - active_note.start_time;
eprintln!("[MIDI_RECORDING_STATE] Completing note {}: note_start={:.3}s, note_end={:.3}s, recording_start={:.3}s, time_offset={:.3}s, duration={:.3}s",
note, active_note.start_time, absolute_time, self.start_time, time_offset, duration);
// Add to completed notes
self.completed_notes.push((
time_offset,
active_note.note,
active_note.velocity,
duration,
));
}
// If no matching noteOn found, ignore the noteOff
}
/// Get all completed notes
pub fn get_notes(&self) -> &[(f64, u8, u8, f64)] {
&self.completed_notes
}
/// Get the number of completed notes
pub fn note_count(&self) -> usize {
self.completed_notes.len()
}
/// Close out all active notes at the given time
/// This should be called when stopping recording to end any held notes
pub fn close_active_notes(&mut self, end_time: f64) {
// Collect all active notes and close them
let active_notes: Vec<_> = self.active_notes.drain().collect();
for (_note_num, active_note) in active_notes {
// Calculate relative time offset and duration
let time_offset = active_note.start_time - self.start_time;
let duration = end_time - active_note.start_time;
// Add to completed notes
self.completed_notes.push((
time_offset,
active_note.note,
active_note.velocity,
duration,
));
}
}
}

View File

@ -374,8 +374,16 @@ impl MidiTrack {
/// Stop all currently playing notes on this track's instrument /// Stop all currently playing notes on this track's instrument
/// Note: With node-based instruments, stopping is handled by ceasing MIDI input /// Note: With node-based instruments, stopping is handled by ceasing MIDI input
pub fn stop_all_notes(&mut self) { pub fn stop_all_notes(&mut self) {
// No-op: Node-based instruments stop when they receive no MIDI input // Send note-off for all 128 possible MIDI notes to silence the instrument
// Individual synthesizer nodes handle note-off events appropriately let mut note_offs = Vec::new();
for note in 0..128 {
note_offs.push(MidiEvent::note_off(0.0, 0, note, 0));
}
// Create a silent buffer to process the note-offs
let buffer_size = 512 * 2; // stereo
let mut silent_buffer = vec![0.0f32; buffer_size];
self.instrument_graph.process(&mut silent_buffer, &note_offs);
} }
/// Queue a live MIDI event (from virtual keyboard or MIDI controller) /// Queue a live MIDI event (from virtual keyboard or MIDI controller)
@ -428,11 +436,14 @@ impl MidiTrack {
sample_rate, sample_rate,
); );
for (_timestamp, event) in events { // Events now have timestamps in seconds relative to clip start
midi_events.push(event); midi_events.extend(events);
}
} }
// Add live MIDI events (from virtual keyboard or MIDI controllers)
// This allows real-time input to be heard during playback/recording
midi_events.extend(self.live_midi_queue.drain(..));
// Generate audio using instrument graph // Generate audio using instrument graph
self.instrument_graph.process(output, &midi_events); self.instrument_graph.process(output, &midi_events);

View File

@ -1,3 +1,3 @@
pub mod types; pub mod types;
pub use types::{AudioEvent, Command, OscilloscopeData, Query, QueryResponse}; pub use types::{AudioEvent, Command, MidiClipData, OscilloscopeData, Query, QueryResponse};

View File

@ -98,6 +98,12 @@ pub enum Command {
/// Resume the current recording /// Resume the current recording
ResumeRecording, ResumeRecording,
// MIDI Recording commands
/// Start MIDI recording on a track (track_id, clip_id, start_time)
StartMidiRecording(TrackId, MidiClipId, f64),
/// Stop the current MIDI recording
StopMidiRecording,
// Project commands // Project commands
/// Reset the entire project (remove all tracks, clear audio pool, reset state) /// Reset the entire project (remove all tracks, clear audio pool, reset state)
Reset, Reset,
@ -172,6 +178,11 @@ pub enum AudioEvent {
RecordingStopped(ClipId, usize, Vec<WaveformPeak>), RecordingStopped(ClipId, usize, Vec<WaveformPeak>),
/// Recording error (error_message) /// Recording error (error_message)
RecordingError(String), RecordingError(String),
/// MIDI recording stopped (track_id, clip_id, note_count)
MidiRecordingStopped(TrackId, MidiClipId, usize),
/// MIDI recording progress (track_id, clip_id, duration, notes)
/// Notes format: (start_time, note, velocity, duration)
MidiRecordingProgress(TrackId, MidiClipId, f64, Vec<(f64, u8, u8, f64)>),
/// Project has been reset /// Project has been reset
ProjectReset, ProjectReset,
/// MIDI note started playing (note, velocity) /// MIDI note started playing (note, velocity)
@ -199,6 +210,8 @@ pub enum Query {
GetTemplateState(TrackId, u32), GetTemplateState(TrackId, u32),
/// Get oscilloscope data from a node (track_id, node_id, sample_count) /// Get oscilloscope data from a node (track_id, node_id, sample_count)
GetOscilloscopeData(TrackId, u32, usize), GetOscilloscopeData(TrackId, u32, usize),
/// Get MIDI clip data (track_id, clip_id)
GetMidiClip(TrackId, MidiClipId),
} }
/// Oscilloscope data from a node /// Oscilloscope data from a node
@ -210,6 +223,13 @@ pub struct OscilloscopeData {
pub cv: Vec<f32>, pub cv: Vec<f32>,
} }
/// MIDI clip data for serialization
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct MidiClipData {
pub duration: f64,
pub events: Vec<crate::audio::midi::MidiEvent>,
}
/// Responses to synchronous queries /// Responses to synchronous queries
#[derive(Debug)] #[derive(Debug)]
pub enum QueryResponse { pub enum QueryResponse {
@ -217,4 +237,6 @@ pub enum QueryResponse {
GraphState(Result<String, String>), GraphState(Result<String, String>),
/// Oscilloscope data samples /// Oscilloscope data samples
OscilloscopeData(Result<OscilloscopeData, String>), OscilloscopeData(Result<OscilloscopeData, String>),
/// MIDI clip data
MidiClipData(Result<MidiClipData, String>),
} }

View File

@ -6,7 +6,7 @@ use std::path::Path;
pub fn load_midi_file<P: AsRef<Path>>( pub fn load_midi_file<P: AsRef<Path>>(
path: P, path: P,
clip_id: MidiClipId, clip_id: MidiClipId,
sample_rate: u32, _sample_rate: u32,
) -> Result<MidiClip, String> { ) -> Result<MidiClip, String> {
// Read the MIDI file // Read the MIDI file
let data = fs::read(path.as_ref()).map_err(|e| format!("Failed to read MIDI file: {}", e))?; let data = fs::read(path.as_ref()).map_err(|e| format!("Failed to read MIDI file: {}", e))?;
@ -109,7 +109,8 @@ pub fn load_midi_file<P: AsRef<Path>>(
accumulated_time += delta_time; accumulated_time += delta_time;
last_tick = tick; last_tick = tick;
let timestamp = (accumulated_time * sample_rate as f64) as u64; // Store timestamp in seconds (sample-rate independent)
let timestamp = accumulated_time;
match message { match message {
midly::MidiMessage::NoteOn { key, vel } => { midly::MidiMessage::NoteOn { key, vel } => {

View File

@ -93,6 +93,12 @@ impl EventEmitter for TauriEventEmitter {
AudioEvent::GraphPresetLoaded(track_id) => { AudioEvent::GraphPresetLoaded(track_id) => {
SerializedAudioEvent::GraphPresetLoaded { track_id } SerializedAudioEvent::GraphPresetLoaded { track_id }
} }
AudioEvent::MidiRecordingStopped(track_id, clip_id, note_count) => {
SerializedAudioEvent::MidiRecordingStopped { track_id, clip_id, note_count }
}
AudioEvent::MidiRecordingProgress(track_id, clip_id, duration, notes) => {
SerializedAudioEvent::MidiRecordingProgress { track_id, clip_id, duration, notes }
}
_ => return, // Ignore other event types for now _ => return, // Ignore other event types for now
}; };
@ -381,6 +387,39 @@ pub async fn audio_resume_recording(
} }
} }
#[tauri::command]
pub async fn audio_start_midi_recording(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
clip_id: u32,
start_time: f64,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.start_midi_recording(track_id, clip_id, start_time);
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn audio_stop_midi_recording(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
) -> Result<(), String> {
eprintln!("[TAURI] audio_stop_midi_recording called");
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
eprintln!("[TAURI] Calling controller.stop_midi_recording()");
controller.stop_midi_recording();
eprintln!("[TAURI] controller.stop_midi_recording() returned");
Ok(())
} else {
eprintln!("[TAURI] Audio not initialized!");
Err("Audio not initialized".to_string())
}
}
#[tauri::command] #[tauri::command]
pub async fn audio_create_midi_clip( pub async fn audio_create_midi_clip(
state: tauri::State<'_, Arc<Mutex<AudioState>>>, state: tauri::State<'_, Arc<Mutex<AudioState>>>,
@ -390,9 +429,8 @@ pub async fn audio_create_midi_clip(
) -> Result<u32, String> { ) -> Result<u32, String> {
let mut audio_state = state.lock().unwrap(); let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller { if let Some(controller) = &mut audio_state.controller {
controller.create_midi_clip(track_id, start_time, duration); let clip_id = controller.create_midi_clip(track_id, start_time, duration);
// Return a clip ID (for now, just use 0 as clips are managed internally) Ok(clip_id)
Ok(0)
} else { } else {
Err("Audio not initialized".to_string()) Err("Audio not initialized".to_string())
} }
@ -505,6 +543,51 @@ pub async fn audio_load_midi_file(
} }
} }
#[tauri::command]
pub async fn audio_get_midi_clip_data(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
clip_id: u32,
) -> Result<MidiFileMetadata, String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
// Query the MIDI clip data from the backend
let clip_data = controller.query_midi_clip(track_id, clip_id)?;
// Convert MIDI events to MidiNote format
let mut notes = Vec::new();
let mut active_notes: std::collections::HashMap<u8, (f64, u8)> = std::collections::HashMap::new();
for event in &clip_data.events {
// event.timestamp is already in seconds (sample-rate independent)
let time_seconds = event.timestamp;
if event.is_note_on() {
// Store note on event (time and velocity)
active_notes.insert(event.data1, (time_seconds, event.data2));
} else if event.is_note_off() {
// Find matching note on and create a MidiNote
if let Some((start, velocity)) = active_notes.remove(&event.data1) {
notes.push(MidiNote {
note: event.data1,
start_time: start,
duration: time_seconds - start,
velocity,
});
}
}
}
Ok(MidiFileMetadata {
duration: clip_data.duration,
notes,
})
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command] #[tauri::command]
pub async fn audio_update_midi_clip_notes( pub async fn audio_update_midi_clip_notes(
state: tauri::State<'_, Arc<Mutex<AudioState>>>, state: tauri::State<'_, Arc<Mutex<AudioState>>>,
@ -1133,6 +1216,8 @@ pub enum SerializedAudioEvent {
RecordingProgress { clip_id: u32, duration: f64 }, RecordingProgress { clip_id: u32, duration: f64 },
RecordingStopped { clip_id: u32, pool_index: usize, waveform: Vec<WaveformPeak> }, RecordingStopped { clip_id: u32, pool_index: usize, waveform: Vec<WaveformPeak> },
RecordingError { message: String }, RecordingError { message: String },
MidiRecordingStopped { track_id: u32, clip_id: u32, note_count: usize },
MidiRecordingProgress { track_id: u32, clip_id: u32, duration: f64, notes: Vec<(f64, u8, u8, f64)> },
NoteOn { note: u8, velocity: u8 }, NoteOn { note: u8, velocity: u8 },
NoteOff { note: u8 }, NoteOff { note: u8 },
GraphNodeAdded { track_id: u32, node_id: u32, node_type: String }, GraphNodeAdded { track_id: u32, node_id: u32, node_type: String },

View File

@ -207,9 +207,12 @@ pub fn run() {
audio::audio_stop_recording, audio::audio_stop_recording,
audio::audio_pause_recording, audio::audio_pause_recording,
audio::audio_resume_recording, audio::audio_resume_recording,
audio::audio_start_midi_recording,
audio::audio_stop_midi_recording,
audio::audio_create_midi_clip, audio::audio_create_midi_clip,
audio::audio_add_midi_note, audio::audio_add_midi_note,
audio::audio_load_midi_file, audio::audio_load_midi_file,
audio::audio_get_midi_clip_data,
audio::audio_update_midi_clip_notes, audio::audio_update_midi_clip_notes,
audio::audio_send_midi_note_on, audio::audio_send_midi_note_on,
audio::audio_send_midi_note_off, audio::audio_send_midi_note_off,

View File

@ -450,11 +450,8 @@ export const actions = {
let newMIDITrack = new AudioTrack(action.trackuuid, action.midiname, 'midi'); let newMIDITrack = new AudioTrack(action.trackuuid, action.midiname, 'midi');
let object = pointerList[action.object]; let object = pointerList[action.object];
// Get available instruments and use the first one (SimpleSynth) // Note: MIDI tracks now use node-based instruments via instrument_graph
const { invoke } = window.__TAURI__.core; const { invoke } = window.__TAURI__.core;
const instruments = await invoke('audio_get_available_instruments');
const instrument = instruments.length > 0 ? instruments[0] : 'SimpleSynth';
newMIDITrack.instrument = instrument;
// Add placeholder clip immediately so user sees feedback // Add placeholder clip immediately so user sees feedback
newMIDITrack.clips.push({ newMIDITrack.clips.push({

View File

@ -710,25 +710,12 @@ Object.defineProperty(globalThis, 'root', {
return __root; return __root;
}, },
set(newRoot) { set(newRoot) {
console.error('[ROOT REPLACED] root is being replaced!');
console.error('[ROOT REPLACED] Old root idx:', __root?.idx, 'New root idx:', newRoot?.idx);
console.trace('[ROOT REPLACED] Stack trace:');
__root = newRoot; __root = newRoot;
}, },
configurable: true, configurable: true,
enumerable: true enumerable: true
}); });
// Set up a watchdog to monitor root.frameRate
setInterval(() => {
if (root && root.frameRate === undefined) {
console.error('[WATCHDOG] root.frameRate is undefined!');
console.error('[WATCHDOG] root object idx:', root.idx);
console.error('[WATCHDOG] Has frameRate property?', 'frameRate' in root);
console.trace('[WATCHDOG] Stack trace:');
}
}, 1000);
async function greet() { async function greet() {
// Learn more about Tauri commands at https://tauri.app/develop/calling-rust/ // Learn more about Tauri commands at https://tauri.app/develop/calling-rust/
greetMsgEl.textContent = await invoke("greet", { name: greetInputEl.value }); greetMsgEl.textContent = await invoke("greet", { name: greetInputEl.value });
@ -821,6 +808,7 @@ window.addEventListener("keydown", (e) => {
case config.shortcuts.playAnimation: case config.shortcuts.playAnimation:
console.log("Spacebar pressed"); console.log("Spacebar pressed");
playPause(); playPause();
e.preventDefault(); // Prevent spacebar from clicking focused buttons
break; break;
case config.shortcuts.selectAll: case config.shortcuts.selectAll:
e.preventDefault(); e.preventDefault();
@ -991,11 +979,6 @@ function playbackLoop() {
if (playing) { if (playing) {
const duration = context.activeObject.duration; const duration = context.activeObject.duration;
// Debug logging for recording
if (context.isRecording) {
console.log('playbackLoop - recording active, currentTime:', context.activeObject.currentTime, 'duration:', duration, 'isRecording:', context.isRecording);
}
// Check if we've reached the end (but allow infinite playback when recording) // Check if we've reached the end (but allow infinite playback when recording)
if (context.isRecording || (duration > 0 && context.activeObject.currentTime < duration)) { if (context.isRecording || (duration > 0 && context.activeObject.currentTime < duration)) {
// Continue playing // Continue playing
@ -1203,6 +1186,98 @@ async function handleAudioEvent(event) {
context.recordingClipId = null; context.recordingClipId = null;
break; break;
case 'MidiRecordingProgress':
// Update MIDI clip during recording with current duration and notes
const progressMidiTrack = context.activeObject.audioTracks.find(t => t.audioTrackId === event.track_id);
if (progressMidiTrack) {
const progressClip = progressMidiTrack.clips.find(c => c.clipId === event.clip_id);
if (progressClip) {
console.log('[MIDI_PROGRESS] Updating clip', event.clip_id, '- duration:', event.duration, 'notes:', event.notes.length, 'loading:', progressClip.loading);
progressClip.duration = event.duration;
progressClip.loading = false; // Make sure clip is not in loading state
// Convert backend note format to frontend format
progressClip.notes = event.notes.map(([start_time, note, velocity, duration]) => ({
note: note,
start_time: start_time,
duration: duration,
velocity: velocity
}));
console.log('[MIDI_PROGRESS] Clip now has', progressClip.notes.length, 'notes');
// Request redraw to show updated clip
updateLayers();
if (context.timelineWidget) {
context.timelineWidget.requestRedraw();
}
} else {
console.log('[MIDI_PROGRESS] Could not find clip', event.clip_id);
}
}
break;
case 'MidiRecordingStopped':
console.log('[FRONTEND] ========== MidiRecordingStopped EVENT ==========');
console.log('[FRONTEND] Event details - track:', event.track_id, 'clip:', event.clip_id, 'notes:', event.note_count);
// Find the track and update the clip
const midiTrack = context.activeObject.audioTracks.find(t => t.audioTrackId === event.track_id);
console.log('[FRONTEND] Found MIDI track:', midiTrack ? midiTrack.name : 'NOT FOUND');
if (midiTrack) {
console.log('[FRONTEND] Track has', midiTrack.clips.length, 'clips:', midiTrack.clips.map(c => `{id:${c.clipId}, name:"${c.name}", loading:${c.loading}}`));
// Find the clip we created when recording started
let existingClip = midiTrack.clips.find(c => c.clipId === event.clip_id);
console.log('[FRONTEND] Found existing clip:', existingClip ? `id:${existingClip.clipId}, name:"${existingClip.name}", loading:${existingClip.loading}` : 'NOT FOUND');
if (existingClip) {
// Fetch the clip data from the backend
try {
console.log('[FRONTEND] Fetching MIDI clip data from backend...');
const clipData = await invoke('audio_get_midi_clip_data', {
trackId: event.track_id,
clipId: event.clip_id
});
console.log('[FRONTEND] Received clip data:', clipData);
// Update the clip with the recorded notes
console.log('[FRONTEND] Updating clip - before:', { loading: existingClip.loading, name: existingClip.name, duration: existingClip.duration, noteCount: existingClip.notes?.length });
existingClip.loading = false;
existingClip.name = `MIDI Clip (${event.note_count} notes)`;
existingClip.duration = clipData.duration;
existingClip.notes = clipData.notes;
console.log('[FRONTEND] Updating clip - after:', { loading: existingClip.loading, name: existingClip.name, duration: existingClip.duration, noteCount: existingClip.notes?.length });
} catch (error) {
console.error('[FRONTEND] Failed to fetch MIDI clip data:', error);
existingClip.loading = false;
existingClip.name = `MIDI Clip (failed)`;
}
} else {
console.error('[FRONTEND] Could not find clip', event.clip_id, 'on track', event.track_id);
}
// Request redraw to show the clip with recorded notes
updateLayers();
if (context.timelineWidget) {
context.timelineWidget.requestRedraw();
}
}
// Clear recording state
console.log('[FRONTEND] Clearing MIDI recording state');
context.isRecording = false;
context.recordingTrackId = null;
context.recordingClipId = null;
// Update record button appearance
if (context.recordButton) {
context.recordButton.className = "playback-btn playback-btn-record";
context.recordButton.title = "Record";
}
console.log('[FRONTEND] MIDI recording complete - recorded', event.note_count, 'notes');
break;
case 'GraphPresetLoaded': case 'GraphPresetLoaded':
// Preset loaded - layers are already populated during graph reload // Preset loaded - layers are already populated during graph reload
console.log('GraphPresetLoaded event received for track:', event.track_id); console.log('GraphPresetLoaded event received for track:', event.track_id);
@ -1330,31 +1405,99 @@ async function toggleRecording() {
// Stop recording // Stop recording
console.log('[FRONTEND] toggleRecording - stopping recording for clip:', context.recordingClipId); console.log('[FRONTEND] toggleRecording - stopping recording for clip:', context.recordingClipId);
try { try {
// Check if we're recording MIDI or audio
const track = context.activeObject.audioTracks.find(t => t.audioTrackId === context.recordingTrackId);
const isMidiRecording = track && track.type === 'midi';
console.log('[FRONTEND] Stopping recording - isMIDI:', isMidiRecording, 'track type:', track?.type, 'track ID:', context.recordingTrackId);
if (isMidiRecording) {
console.log('[FRONTEND] Calling audio_stop_midi_recording...');
await invoke('audio_stop_midi_recording');
console.log('[FRONTEND] audio_stop_midi_recording returned successfully');
} else {
console.log('[FRONTEND] Calling audio_stop_recording...');
await invoke('audio_stop_recording'); await invoke('audio_stop_recording');
console.log('[FRONTEND] audio_stop_recording returned successfully');
}
console.log('[FRONTEND] Clearing recording state in toggleRecording');
context.isRecording = false; context.isRecording = false;
context.recordingTrackId = null; context.recordingTrackId = null;
context.recordingClipId = null; context.recordingClipId = null;
console.log('[FRONTEND] Recording stopped via toggle button');
} catch (error) { } catch (error) {
console.error('[FRONTEND] Failed to stop recording:', error); console.error('[FRONTEND] Failed to stop recording:', error);
} }
} else { } else {
// Start recording - check if activeLayer is an audio track // Start recording - check if activeLayer is a track
const audioTrack = context.activeObject.activeLayer; const audioTrack = context.activeObject.activeLayer;
if (!audioTrack || !(audioTrack instanceof AudioTrack)) { if (!audioTrack || !(audioTrack instanceof AudioTrack)) {
alert('Please select an audio track to record to'); alert('Please select a track to record to');
return; return;
} }
if (audioTrack.audioTrackId === null) { if (audioTrack.audioTrackId === null) {
alert('Audio track not properly initialized'); alert('Track not properly initialized');
return; return;
} }
// Start recording at current playhead position // Start recording at current playhead position
const startTime = context.activeObject.currentTime || 0; const startTime = context.activeObject.currentTime || 0;
console.log('[FRONTEND] Starting recording on track', audioTrack.audioTrackId, 'at time', startTime); // Check if this is a MIDI track or audio track
if (audioTrack.type === 'midi') {
// MIDI recording
console.log('[FRONTEND] Starting MIDI recording on track', audioTrack.audioTrackId, 'at time', startTime);
try {
// First, create a MIDI clip at the current playhead position
const clipDuration = 4.0; // Default clip duration of 4 seconds (can be extended by recording)
const clipId = await invoke('audio_create_midi_clip', {
trackId: audioTrack.audioTrackId,
startTime: startTime,
duration: clipDuration
});
console.log('[FRONTEND] Created MIDI clip with ID:', clipId);
// Add clip to track immediately (similar to MIDI import)
audioTrack.clips.push({
clipId: clipId,
name: 'Recording...',
startTime: startTime,
duration: clipDuration,
notes: [],
loading: true
});
// Update UI to show the recording clip
updateLayers();
if (context.timelineWidget) {
context.timelineWidget.requestRedraw();
}
// Now start MIDI recording
await invoke('audio_start_midi_recording', {
trackId: audioTrack.audioTrackId,
clipId: clipId,
startTime: startTime
});
context.isRecording = true;
context.recordingTrackId = audioTrack.audioTrackId;
context.recordingClipId = clipId;
console.log('[FRONTEND] MIDI recording started successfully');
// Start playback so the timeline moves (if not already playing)
if (!playing) {
await playPause();
}
} catch (error) {
console.error('[FRONTEND] Failed to start MIDI recording:', error);
alert('Failed to start MIDI recording: ' + error);
}
} else {
// Audio recording
console.log('[FRONTEND] Starting audio recording on track', audioTrack.audioTrackId, 'at time', startTime);
try { try {
await invoke('audio_start_recording', { await invoke('audio_start_recording', {
trackId: audioTrack.audioTrackId, trackId: audioTrack.audioTrackId,
@ -1362,15 +1505,16 @@ async function toggleRecording() {
}); });
context.isRecording = true; context.isRecording = true;
context.recordingTrackId = audioTrack.audioTrackId; context.recordingTrackId = audioTrack.audioTrackId;
console.log('[FRONTEND] Recording started successfully, waiting for RecordingStarted event'); console.log('[FRONTEND] Audio recording started successfully, waiting for RecordingStarted event');
// Start playback so the timeline moves (if not already playing) // Start playback so the timeline moves (if not already playing)
if (!playing) { if (!playing) {
await playPause(); await playPause();
} }
} catch (error) { } catch (error) {
console.error('[FRONTEND] Failed to start recording:', error); console.error('[FRONTEND] Failed to start audio recording:', error);
alert('Failed to start recording: ' + error); alert('Failed to start audio recording: ' + error);
}
} }
} }
} }
@ -7248,6 +7392,81 @@ function nodeEditor() {
}); });
}); });
// Handle select dropdowns
const selects = nodeElement.querySelectorAll('select[data-param]');
selects.forEach(select => {
// Track parameter change action for undo/redo
let paramAction = null;
// Prevent node dragging when interacting with select
select.addEventListener("mousedown", (e) => {
e.stopPropagation();
// Initialize undo action
const paramId = parseInt(e.target.getAttribute("data-param"));
const currentValue = parseFloat(e.target.value);
const nodeData = editor.getNodeFromId(nodeId);
if (nodeData && nodeData.data.backendId !== null) {
const currentTrackId = getCurrentMidiTrack();
if (currentTrackId !== null) {
paramAction = actions.graphSetParameter.initialize(
currentTrackId,
nodeData.data.backendId,
paramId,
nodeId,
currentValue
);
}
}
});
select.addEventListener("pointerdown", (e) => {
e.stopPropagation();
});
select.addEventListener("change", (e) => {
const paramId = parseInt(e.target.getAttribute("data-param"));
const value = parseFloat(e.target.value);
console.log(`[setupNodeParameters] Select change - nodeId: ${nodeId}, paramId: ${paramId}, value: ${value}`);
// Update display span if it exists
const nodeData = editor.getNodeFromId(nodeId);
if (nodeData) {
const nodeDef = nodeTypes[nodeData.name];
if (nodeDef && nodeDef.parameters[paramId]) {
const param = nodeDef.parameters[paramId];
const displaySpan = nodeElement.querySelector(`#${param.name}-${nodeId}`);
if (displaySpan) {
// Update the span with the selected option text
displaySpan.textContent = e.target.options[e.target.selectedIndex].text;
}
}
// Send to backend
if (nodeData.data.backendId !== null) {
const currentTrackId = getCurrentMidiTrack();
if (currentTrackId !== null) {
invoke("graph_set_parameter", {
trackId: currentTrackId,
nodeId: nodeData.data.backendId,
paramId: paramId,
value: value
}).catch(err => {
console.error("Failed to set parameter:", err);
});
}
}
}
// Finalize undo action
if (paramAction) {
actions.graphSetParameter.finalize(paramAction, value);
paramAction = null;
}
});
});
// Handle Load Sample button for SimpleSampler // Handle Load Sample button for SimpleSampler
const loadSampleBtn = nodeElement.querySelector(".load-sample-btn"); const loadSampleBtn = nodeElement.querySelector(".load-sample-btn");
if (loadSampleBtn) { if (loadSampleBtn) {
@ -9107,20 +9326,15 @@ async function addEmptyMIDITrack() {
const trackUuid = uuidv4(); const trackUuid = uuidv4();
try { try {
// Get available instruments // Note: MIDI tracks now use node-based instruments via instrument_graph
const instruments = await getAvailableInstruments();
// Default to SimpleSynth for now (we can add UI selection later)
const instrument = instruments.length > 0 ? instruments[0] : 'SimpleSynth';
// Create new AudioTrack with type='midi' // Create new AudioTrack with type='midi'
const newMIDITrack = new AudioTrack(trackUuid, trackName, 'midi'); const newMIDITrack = new AudioTrack(trackUuid, trackName, 'midi');
newMIDITrack.instrument = instrument;
// Initialize track in backend (creates MIDI track with instrument) // Initialize track in backend (creates MIDI track with node graph)
await newMIDITrack.initializeTrack(); await newMIDITrack.initializeTrack();
console.log('[addEmptyMIDITrack] After initializeTrack - instrument:', instrument); console.log('[addEmptyMIDITrack] After initializeTrack - track created with node graph');
// Add track to active object // Add track to active object
context.activeObject.audioTracks.push(newMIDITrack); context.activeObject.audioTracks.push(newMIDITrack);
@ -9144,16 +9358,7 @@ async function addEmptyMIDITrack() {
} }
// MIDI Command Wrappers // MIDI Command Wrappers
async function getAvailableInstruments() { // Note: getAvailableInstruments() removed - now using node-based instruments
try {
const instruments = await invoke('audio_get_available_instruments');
console.log('Available instruments:', instruments);
return instruments;
} catch (error) {
console.error('Failed to get available instruments:', error);
throw error;
}
}
async function createMIDITrack(name, instrument) { async function createMIDITrack(name, instrument) {
try { try {

View File

@ -1015,6 +1015,135 @@ export const nodeTypes = {
` `
}, },
Math: {
name: 'Math',
category: NodeCategory.UTILITY,
description: 'Mathematical and logical operations on CV signals',
inputs: [
{ name: 'CV In A', type: SignalType.CV, index: 0 },
{ name: 'CV In B', type: SignalType.CV, index: 1 }
],
outputs: [
{ name: 'CV Out', type: SignalType.CV, index: 0 }
],
parameters: [
{ id: 0, name: 'operation', label: 'Operation', min: 0, max: 13, default: 0, unit: '' },
{ id: 1, name: 'operand', label: 'Operand', min: -10, max: 10, default: 1, unit: '' }
],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">Math</div>
<div class="node-param">
<label>Op: <span id="mathop-${nodeId}">Add</span></label>
<select class="node-select" data-node="${nodeId}" data-param="0" style="width: 100%; padding: 2px;">
<option value="0">Add</option>
<option value="1">Subtract</option>
<option value="2">Multiply</option>
<option value="3">Divide</option>
<option value="4">Min</option>
<option value="5">Max</option>
<option value="6">Average</option>
<option value="7">Invert</option>
<option value="8">Abs</option>
<option value="9">Clamp</option>
<option value="10">Wrap</option>
<option value="11">Greater</option>
<option value="12">Less</option>
<option value="13">Equal</option>
</select>
</div>
<div class="node-param">
<label>B: <span id="mathoperand-${nodeId}">1.0</span></label>
<input type="range" data-node="${nodeId}" data-param="1" min="-10" max="10" value="1" step="0.1">
</div>
</div>
`
},
Quantizer: {
name: 'Quantizer',
category: NodeCategory.UTILITY,
description: 'Quantize CV to musical scales',
inputs: [
{ name: 'CV In', type: SignalType.CV, index: 0 }
],
outputs: [
{ name: 'CV Out', type: SignalType.CV, index: 0 },
{ name: 'Gate Out', type: SignalType.CV, index: 1 }
],
parameters: [
{ id: 0, name: 'scale', label: 'Scale', min: 0, max: 10, default: 0, unit: '' },
{ id: 1, name: 'root', label: 'Root', min: 0, max: 11, default: 0, unit: '' }
],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">Quantizer</div>
<div class="node-param">
<label>Scale: <span id="quantscale-${nodeId}">Chromatic</span></label>
<select class="node-select" data-node="${nodeId}" data-param="0" style="width: 100%; padding: 2px;">
<option value="0">Chromatic</option>
<option value="1">Major</option>
<option value="2">Minor</option>
<option value="3">Pent. Major</option>
<option value="4">Pent. Minor</option>
<option value="5">Dorian</option>
<option value="6">Phrygian</option>
<option value="7">Lydian</option>
<option value="8">Mixolydian</option>
<option value="9">Whole Tone</option>
<option value="10">Octaves</option>
</select>
</div>
<div class="node-param">
<label>Root: <span id="quantroot-${nodeId}">C</span></label>
<select class="node-select" data-node="${nodeId}" data-param="1" style="width: 100%; padding: 2px;">
<option value="0">C</option>
<option value="1">C#</option>
<option value="2">D</option>
<option value="3">D#</option>
<option value="4">E</option>
<option value="5">F</option>
<option value="6">F#</option>
<option value="7">G</option>
<option value="8">G#</option>
<option value="9">A</option>
<option value="10">A#</option>
<option value="11">B</option>
</select>
</div>
</div>
`
},
SlewLimiter: {
name: 'SlewLimiter',
category: NodeCategory.UTILITY,
description: 'Limit rate of change for portamento/glide effects',
inputs: [
{ name: 'CV In', type: SignalType.CV, index: 0 }
],
outputs: [
{ name: 'CV Out', type: SignalType.CV, index: 0 }
],
parameters: [
{ id: 0, name: 'rise_time', label: 'Rise Time', min: 0, max: 5, default: 0.01, unit: 's' },
{ id: 1, name: 'fall_time', label: 'Fall Time', min: 0, max: 5, default: 0.01, unit: 's' }
],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">Slew Limiter</div>
<div class="node-param">
<label>Rise: <span id="slewrise-${nodeId}">0.01</span>s</label>
<input type="range" data-node="${nodeId}" data-param="0" min="0" max="5" value="0.01" step="0.001">
</div>
<div class="node-param">
<label>Fall: <span id="slewfall-${nodeId}">0.01</span>s</label>
<input type="range" data-node="${nodeId}" data-param="1" min="0" max="5" value="0.01" step="0.001">
</div>
</div>
`
},
EQ: { EQ: {
name: 'EQ', name: 'EQ',
category: NodeCategory.EFFECT, category: NodeCategory.EFFECT,

View File

@ -4285,9 +4285,20 @@ class VirtualPiano extends Widget {
console.log(`Note ON: ${this.getMidiNoteInfo(midiNote).name} (${midiNote}) velocity: ${velocity}`); console.log(`Note ON: ${this.getMidiNoteInfo(midiNote).name} (${midiNote}) velocity: ${velocity}`);
// Send to backend - use track ID 0 (first MIDI track) // Send to backend - use selected track or recording track
// TODO: Make this configurable to select which track to send to let trackId = 0; // Default to first track
invoke('audio_send_midi_note_on', { trackId: 0, note: midiNote, velocity }).catch(error => { if (typeof context !== 'undefined') {
// If recording, use the recording track
if (context.isRecording && context.recordingTrackId !== null) {
trackId = context.recordingTrackId;
}
// Otherwise use the selected track
else if (context.activeObject && context.activeObject.activeLayer && context.activeObject.activeLayer.audioTrackId !== null) {
trackId = context.activeObject.activeLayer.audioTrackId;
}
}
invoke('audio_send_midi_note_on', { trackId: trackId, note: midiNote, velocity }).catch(error => {
console.error('Failed to send MIDI note on:', error); console.error('Failed to send MIDI note on:', error);
}); });
@ -4305,8 +4316,20 @@ class VirtualPiano extends Widget {
console.log(`Note OFF: ${this.getMidiNoteInfo(midiNote).name} (${midiNote})`); console.log(`Note OFF: ${this.getMidiNoteInfo(midiNote).name} (${midiNote})`);
// Send to backend - use track ID 0 (first MIDI track) // Send to backend - use selected track or recording track
invoke('audio_send_midi_note_off', { trackId: 0, note: midiNote }).catch(error => { let trackId = 0; // Default to first track
if (typeof context !== 'undefined') {
// If recording, use the recording track
if (context.isRecording && context.recordingTrackId !== null) {
trackId = context.recordingTrackId;
}
// Otherwise use the selected track
else if (context.activeObject && context.activeObject.activeLayer && context.activeObject.activeLayer.audioTrackId !== null) {
trackId = context.activeObject.activeLayer.audioTrackId;
}
}
invoke('audio_send_midi_note_off', { trackId: trackId, note: midiNote }).catch(error => {
console.error('Failed to send MIDI note off:', error); console.error('Failed to send MIDI note off:', error);
}); });