diff --git a/daw-backend/src/audio/engine.rs b/daw-backend/src/audio/engine.rs index 3be4e77..b7e3279 100644 --- a/daw-backend/src/audio/engine.rs +++ b/daw-backend/src/audio/engine.rs @@ -4,11 +4,11 @@ use crate::audio::midi::{MidiClip, MidiClipId, MidiEvent}; use crate::audio::node_graph::{nodes::*, InstrumentGraph}; use crate::audio::pool::AudioPool; use crate::audio::project::Project; -use crate::audio::recording::RecordingState; +use crate::audio::recording::{MidiRecordingState, RecordingState}; use crate::audio::track::{Track, TrackId, TrackNode}; use crate::command::{AudioEvent, Command, Query, QueryResponse}; use petgraph::stable_graph::NodeIndex; -use std::sync::atomic::{AtomicU64, Ordering}; +use std::sync::atomic::{AtomicU32, AtomicU64, Ordering}; use std::sync::Arc; /// Audio engine for Phase 6: hierarchical tracks with groups @@ -30,6 +30,9 @@ pub struct Engine { // Shared playhead for UI reads playhead_atomic: Arc, + // Shared MIDI clip ID counter for synchronous access + next_midi_clip_id_atomic: Arc, + // Event counter for periodic position updates frames_since_last_event: usize, event_interval_frames: usize, @@ -38,13 +41,15 @@ pub struct Engine { mix_buffer: Vec, // ID counters - next_midi_clip_id: MidiClipId, next_clip_id: ClipId, // Recording state recording_state: Option, input_rx: Option>, recording_progress_counter: usize, + + // MIDI recording state + midi_recording_state: Option, } impl Engine { @@ -75,14 +80,15 @@ impl Engine { query_rx, query_response_tx, playhead_atomic: Arc::new(AtomicU64::new(0)), + next_midi_clip_id_atomic: Arc::new(AtomicU32::new(0)), frames_since_last_event: 0, event_interval_frames, mix_buffer: Vec::new(), - next_midi_clip_id: 0, next_clip_id: 0, recording_state: None, input_rx: None, recording_progress_counter: 0, + midi_recording_state: None, } } @@ -157,6 +163,7 @@ impl Engine { query_tx, query_response_rx, playhead: Arc::clone(&self.playhead_atomic), + next_midi_clip_id: Arc::clone(&self.next_midi_clip_id_atomic), sample_rate: self.sample_rate, channels: self.channels, } @@ -192,8 +199,8 @@ impl Engine { self.buffer_pool = BufferPool::new(8, output.len()); } - // Convert playhead from samples to seconds for timeline-based rendering - let playhead_seconds = self.playhead as f64 / (self.sample_rate as f64 * self.channels as f64); + // Convert playhead from frames to seconds for timeline-based rendering + let playhead_seconds = self.playhead as f64 / self.sample_rate as f64; // Render the entire project hierarchy into the mix buffer self.project.render( @@ -208,8 +215,8 @@ impl Engine { // Copy mix to output output.copy_from_slice(&self.mix_buffer); - // Update playhead - self.playhead += output.len() as u64; + // Update playhead (convert total samples to frames) + self.playhead += (output.len() / self.channels as usize) as u64; // Update atomic playhead for UI reads self.playhead_atomic @@ -219,12 +226,24 @@ impl Engine { self.frames_since_last_event += output.len() / self.channels as usize; if self.frames_since_last_event >= self.event_interval_frames / self.channels as usize { - let position_seconds = - self.playhead as f64 / (self.sample_rate as f64 * self.channels as f64); + let position_seconds = self.playhead as f64 / self.sample_rate as f64; let _ = self .event_tx .push(AudioEvent::PlaybackPosition(position_seconds)); self.frames_since_last_event = 0; + + // Send MIDI recording progress if active + if let Some(recording) = &self.midi_recording_state { + let current_time = self.playhead as f64 / self.sample_rate as f64; + let duration = current_time - recording.start_time; + let notes = recording.get_notes().to_vec(); + let _ = self.event_tx.push(AudioEvent::MidiRecordingProgress( + recording.track_id, + recording.clip_id, + duration, + notes, + )); + } } } else { // Not playing, but process live MIDI input @@ -296,10 +315,12 @@ impl Engine { self.project.stop_all_notes(); } Command::Seek(seconds) => { - let samples = (seconds * self.sample_rate as f64 * self.channels as f64) as u64; - self.playhead = samples; + let frames = (seconds * self.sample_rate as f64) as u64; + self.playhead = frames; self.playhead_atomic .store(self.playhead, Ordering::Relaxed); + // Stop all MIDI notes when seeking to prevent stuck notes + self.project.stop_all_notes(); } Command::SetTrackVolume(track_id, volume) => { if let Some(track) = self.project.get_track_mut(track_id) { @@ -393,28 +414,28 @@ impl Engine { let _ = self.event_tx.push(AudioEvent::TrackCreated(track_id, false, name)); } Command::CreateMidiClip(track_id, start_time, duration) => { - // Create a new MIDI clip with unique ID - let clip_id = self.next_midi_clip_id; - self.next_midi_clip_id += 1; + // Get the next MIDI clip ID from the atomic counter + let clip_id = self.next_midi_clip_id_atomic.fetch_add(1, Ordering::Relaxed); let clip = MidiClip::new(clip_id, start_time, duration); let _ = self.project.add_midi_clip(track_id, clip); + // Notify UI about the new clip with its ID + let _ = self.event_tx.push(AudioEvent::ClipAdded(track_id, clip_id)); } Command::AddMidiNote(track_id, clip_id, time_offset, note, velocity, duration) => { // Add a MIDI note event to the specified clip if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) { - // Convert time to sample timestamp - let timestamp = (time_offset * self.sample_rate as f64) as u64; - let note_on = MidiEvent::note_on(timestamp, 0, note, velocity); + // Timestamp is now in seconds (sample-rate independent) + let note_on = MidiEvent::note_on(time_offset, 0, note, velocity); clip.events.push(note_on); // Add note off event - let note_off_timestamp = ((time_offset + duration) * self.sample_rate as f64) as u64; - let note_off = MidiEvent::note_off(note_off_timestamp, 0, note, 64); + let note_off_time = time_offset + duration; + let note_off = MidiEvent::note_off(note_off_time, 0, note, 64); clip.events.push(note_off); - // Sort events by timestamp - clip.events.sort_by_key(|e| e.timestamp); + // Sort events by timestamp (using partial_cmp for f64) + clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap()); } } } @@ -430,20 +451,19 @@ impl Engine { clip.events.clear(); // Add new events from the notes array + // Timestamps are now stored in seconds (sample-rate independent) for (start_time, note, velocity, duration) in notes { - // Convert time to sample timestamp - let timestamp = (start_time * self.sample_rate as f64) as u64; - let note_on = MidiEvent::note_on(timestamp, 0, note, velocity); + let note_on = MidiEvent::note_on(start_time, 0, note, velocity); clip.events.push(note_on); // Add note off event - let note_off_timestamp = ((start_time + duration) * self.sample_rate as f64) as u64; - let note_off = MidiEvent::note_off(note_off_timestamp, 0, note, 64); + let note_off_time = start_time + duration; + let note_off = MidiEvent::note_off(note_off_time, 0, note, 64); clip.events.push(note_off); } - // Sort events by timestamp - clip.events.sort_by_key(|e| e.timestamp); + // Sort events by timestamp (using partial_cmp for f64) + clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap()); } } } @@ -596,6 +616,16 @@ impl Engine { recording.resume(); } } + Command::StartMidiRecording(track_id, clip_id, start_time) => { + // Start MIDI recording on the specified track + self.handle_start_midi_recording(track_id, clip_id, start_time); + } + Command::StopMidiRecording => { + eprintln!("[ENGINE] Received StopMidiRecording command"); + // Stop the current MIDI recording + self.handle_stop_midi_recording(); + eprintln!("[ENGINE] handle_stop_midi_recording() completed"); + } Command::Reset => { // Reset the entire project to initial state // Stop playback @@ -617,7 +647,7 @@ impl Engine { self.buffer_pool = BufferPool::new(8, buffer_size); // Reset ID counters - self.next_midi_clip_id = 0; + self.next_midi_clip_id_atomic.store(0, Ordering::Relaxed); self.next_clip_id = 0; // Clear mix buffer @@ -630,11 +660,31 @@ impl Engine { Command::SendMidiNoteOn(track_id, note, velocity) => { // Send a live MIDI note on event to the specified track's instrument self.project.send_midi_note_on(track_id, note, velocity); + + // If MIDI recording is active on this track, capture the event + if let Some(recording) = &mut self.midi_recording_state { + if recording.track_id == track_id { + let absolute_time = self.playhead as f64 / self.sample_rate as f64; + eprintln!("[MIDI_RECORDING] NoteOn captured: note={}, velocity={}, absolute_time={:.3}s, playhead={}, sample_rate={}", + note, velocity, absolute_time, self.playhead, self.sample_rate); + recording.note_on(note, velocity, absolute_time); + } + } } Command::SendMidiNoteOff(track_id, note) => { // Send a live MIDI note off event to the specified track's instrument self.project.send_midi_note_off(track_id, note); + + // If MIDI recording is active on this track, capture the event + if let Some(recording) = &mut self.midi_recording_state { + if recording.track_id == track_id { + let absolute_time = self.playhead as f64 / self.sample_rate as f64; + eprintln!("[MIDI_RECORDING] NoteOff captured: note={}, absolute_time={:.3}s, playhead={}, sample_rate={}", + note, absolute_time, self.playhead, self.sample_rate); + recording.note_off(note, absolute_time); + } + } } // Node graph commands @@ -654,17 +704,20 @@ impl Engine { "NoiseGenerator" => Box::new(NoiseGeneratorNode::new("Noise".to_string())), "Splitter" => Box::new(SplitterNode::new("Splitter".to_string())), "Pan" => Box::new(PanNode::new("Pan".to_string())), + "Quantizer" => Box::new(QuantizerNode::new("Quantizer".to_string())), "Delay" => Box::new(DelayNode::new("Delay".to_string())), "Distortion" => Box::new(DistortionNode::new("Distortion".to_string())), "Reverb" => Box::new(ReverbNode::new("Reverb".to_string())), "Chorus" => Box::new(ChorusNode::new("Chorus".to_string())), "Compressor" => Box::new(CompressorNode::new("Compressor".to_string())), "Limiter" => Box::new(LimiterNode::new("Limiter".to_string())), + "Math" => Box::new(MathNode::new("Math".to_string())), "EQ" => Box::new(EQNode::new("EQ".to_string())), "Flanger" => Box::new(FlangerNode::new("Flanger".to_string())), "FMSynth" => Box::new(FMSynthNode::new("FM Synth".to_string())), "WavetableOscillator" => Box::new(WavetableOscillatorNode::new("Wavetable".to_string())), "SimpleSampler" => Box::new(SimpleSamplerNode::new("Sampler".to_string())), + "SlewLimiter" => Box::new(SlewLimiterNode::new("Slew Limiter".to_string())), "MultiSampler" => Box::new(MultiSamplerNode::new("Multi Sampler".to_string())), "MidiInput" => Box::new(MidiInputNode::new("MIDI Input".to_string())), "MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV".to_string())), @@ -718,17 +771,20 @@ impl Engine { "NoiseGenerator" => Box::new(NoiseGeneratorNode::new("Noise".to_string())), "Splitter" => Box::new(SplitterNode::new("Splitter".to_string())), "Pan" => Box::new(PanNode::new("Pan".to_string())), + "Quantizer" => Box::new(QuantizerNode::new("Quantizer".to_string())), "Delay" => Box::new(DelayNode::new("Delay".to_string())), "Distortion" => Box::new(DistortionNode::new("Distortion".to_string())), "Reverb" => Box::new(ReverbNode::new("Reverb".to_string())), "Chorus" => Box::new(ChorusNode::new("Chorus".to_string())), "Compressor" => Box::new(CompressorNode::new("Compressor".to_string())), "Limiter" => Box::new(LimiterNode::new("Limiter".to_string())), + "Math" => Box::new(MathNode::new("Math".to_string())), "EQ" => Box::new(EQNode::new("EQ".to_string())), "Flanger" => Box::new(FlangerNode::new("Flanger".to_string())), "FMSynth" => Box::new(FMSynthNode::new("FM Synth".to_string())), "WavetableOscillator" => Box::new(WavetableOscillatorNode::new("Wavetable".to_string())), "SimpleSampler" => Box::new(SimpleSamplerNode::new("Sampler".to_string())), + "SlewLimiter" => Box::new(SlewLimiterNode::new("Slew Limiter".to_string())), "MultiSampler" => Box::new(MultiSamplerNode::new("Multi Sampler".to_string())), "MidiInput" => Box::new(MidiInputNode::new("MIDI Input".to_string())), "MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV".to_string())), @@ -1100,6 +1156,21 @@ impl Engine { ))), } } + Query::GetMidiClip(track_id, clip_id) => { + if let Some(TrackNode::Midi(track)) = self.project.get_track(track_id) { + if let Some(clip) = track.clips.iter().find(|c| c.id == clip_id) { + use crate::command::MidiClipData; + QueryResponse::MidiClipData(Ok(MidiClipData { + duration: clip.duration, + events: clip.events.clone(), + })) + } else { + QueryResponse::MidiClipData(Err(format!("Clip {} not found in track {}", clip_id, track_id))) + } + } else { + QueryResponse::MidiClipData(Err(format!("Track {} not found or is not a MIDI track", track_id))) + } + } }; // Send response back @@ -1193,6 +1264,15 @@ impl Engine { /// Handle stopping a recording fn handle_stop_recording(&mut self) { eprintln!("[STOP_RECORDING] handle_stop_recording called"); + + // Check if we have an active MIDI recording first + if self.midi_recording_state.is_some() { + eprintln!("[STOP_RECORDING] Detected active MIDI recording, delegating to handle_stop_midi_recording"); + self.handle_stop_midi_recording(); + return; + } + + // Handle audio recording if let Some(recording) = self.recording_state.take() { let clip_id = recording.clip_id; let track_id = recording.track_id; @@ -1248,6 +1328,90 @@ impl Engine { } } + /// Handle starting MIDI recording + fn handle_start_midi_recording(&mut self, track_id: TrackId, clip_id: MidiClipId, start_time: f64) { + // Check if track exists and is a MIDI track + if let Some(crate::audio::track::TrackNode::Midi(_)) = self.project.get_track_mut(track_id) { + // Create MIDI recording state + let recording_state = MidiRecordingState::new(track_id, clip_id, start_time); + self.midi_recording_state = Some(recording_state); + + eprintln!("[MIDI_RECORDING] Started MIDI recording on track {} for clip {}", track_id, clip_id); + } else { + // Send error event if track not found or not a MIDI track + let _ = self.event_tx.push(AudioEvent::RecordingError( + format!("Track {} not found or is not a MIDI track", track_id) + )); + } + } + + /// Handle stopping MIDI recording + fn handle_stop_midi_recording(&mut self) { + eprintln!("[MIDI_RECORDING] handle_stop_midi_recording called"); + if let Some(mut recording) = self.midi_recording_state.take() { + // Close out any active notes at the current playhead position + let end_time = self.playhead as f64 / self.sample_rate as f64; + eprintln!("[MIDI_RECORDING] Closing active notes at time {}", end_time); + recording.close_active_notes(end_time); + + let clip_id = recording.clip_id; + let track_id = recording.track_id; + let notes = recording.get_notes().to_vec(); + let note_count = notes.len(); + let recording_duration = end_time - recording.start_time; + + eprintln!("[MIDI_RECORDING] Stopping MIDI recording for clip_id={}, track_id={}, captured {} notes, duration={:.3}s", + clip_id, track_id, note_count, recording_duration); + + // Update the MIDI clip using the existing UpdateMidiClipNotes logic + eprintln!("[MIDI_RECORDING] Looking for track {} to update clip", track_id); + if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { + eprintln!("[MIDI_RECORDING] Found MIDI track, looking for clip {}", clip_id); + if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) { + eprintln!("[MIDI_RECORDING] Found clip, clearing and adding {} notes", note_count); + // Clear existing events + clip.events.clear(); + + // Update clip duration to match the actual recording time + clip.duration = recording_duration; + + // Add new events from the recorded notes + // Timestamps are now stored in seconds (sample-rate independent) + for (start_time, note, velocity, duration) in notes.iter() { + let note_on = MidiEvent::note_on(*start_time, 0, *note, *velocity); + + eprintln!("[MIDI_RECORDING] Note {}: start_time={:.3}s, duration={:.3}s", + note, start_time, duration); + + clip.events.push(note_on); + + // Add note off event + let note_off_time = *start_time + *duration; + let note_off = MidiEvent::note_off(note_off_time, 0, *note, 64); + clip.events.push(note_off); + } + + // Sort events by timestamp (using partial_cmp for f64) + clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap()); + eprintln!("[MIDI_RECORDING] Updated clip {} with {} notes ({} events)", clip_id, note_count, clip.events.len()); + } else { + eprintln!("[MIDI_RECORDING] ERROR: Clip {} not found on track!", clip_id); + } + } else { + eprintln!("[MIDI_RECORDING] ERROR: Track {} not found or not a MIDI track!", track_id); + } + + // Send event to UI + eprintln!("[MIDI_RECORDING] Pushing MidiRecordingStopped event to event_tx..."); + match self.event_tx.push(AudioEvent::MidiRecordingStopped(track_id, clip_id, note_count)) { + Ok(_) => eprintln!("[MIDI_RECORDING] MidiRecordingStopped event pushed successfully"), + Err(e) => eprintln!("[MIDI_RECORDING] ERROR: Failed to push event: {:?}", e), + } + } else { + eprintln!("[MIDI_RECORDING] No active MIDI recording to stop"); + } + } + /// Get current sample rate pub fn sample_rate(&self) -> u32 { self.sample_rate @@ -1270,6 +1434,7 @@ pub struct EngineController { query_tx: rtrb::Producer, query_response_rx: rtrb::Consumer, playhead: Arc, + next_midi_clip_id: Arc, sample_rate: u32, channels: u32, } @@ -1331,8 +1496,8 @@ impl EngineController { /// Get current playhead position in seconds pub fn get_playhead_seconds(&self) -> f64 { - let samples = self.playhead.load(Ordering::Relaxed); - samples as f64 / (self.sample_rate as f64 * self.channels as f64) + let frames = self.playhead.load(Ordering::Relaxed); + frames as f64 / self.sample_rate as f64 } /// Create a new metatrack @@ -1388,8 +1553,11 @@ impl EngineController { } /// Create a new MIDI clip on a track - pub fn create_midi_clip(&mut self, track_id: TrackId, start_time: f64, duration: f64) { + pub fn create_midi_clip(&mut self, track_id: TrackId, start_time: f64, duration: f64) -> MidiClipId { + // Peek at the next clip ID that will be used + let clip_id = self.next_midi_clip_id.load(Ordering::Relaxed); let _ = self.command_tx.push(Command::CreateMidiClip(track_id, start_time, duration)); + clip_id } /// Add a MIDI note to a clip @@ -1496,6 +1664,16 @@ impl EngineController { let _ = self.command_tx.push(Command::ResumeRecording); } + /// Start MIDI recording on a track + pub fn start_midi_recording(&mut self, track_id: TrackId, clip_id: MidiClipId, start_time: f64) { + let _ = self.command_tx.push(Command::StartMidiRecording(track_id, clip_id, start_time)); + } + + /// Stop the current MIDI recording + pub fn stop_midi_recording(&mut self) { + let _ = self.command_tx.push(Command::StopMidiRecording); + } + /// Reset the entire project (clear all tracks, audio pool, and state) pub fn reset(&mut self) { let _ = self.command_tx.push(Command::Reset); @@ -1636,6 +1814,28 @@ impl EngineController { Err("Query timeout".to_string()) } + /// Query MIDI clip data + pub fn query_midi_clip(&mut self, track_id: TrackId, clip_id: MidiClipId) -> Result { + // Send query + if let Err(_) = self.query_tx.push(Query::GetMidiClip(track_id, clip_id)) { + return Err("Failed to send query - queue full".to_string()); + } + + // Wait for response (with timeout) + let start = std::time::Instant::now(); + let timeout = std::time::Duration::from_millis(500); + + while start.elapsed() < timeout { + if let Ok(QueryResponse::MidiClipData(result)) = self.query_response_rx.pop() { + return result; + } + // Small sleep to avoid busy-waiting + std::thread::sleep(std::time::Duration::from_micros(100)); + } + + Err("Query timeout".to_string()) + } + /// Query oscilloscope data from a node pub fn query_oscilloscope_data(&mut self, track_id: TrackId, node_id: u32, sample_count: usize) -> Result { // Send query diff --git a/daw-backend/src/audio/midi.rs b/daw-backend/src/audio/midi.rs index 0147440..a22c740 100644 --- a/daw-backend/src/audio/midi.rs +++ b/daw-backend/src/audio/midi.rs @@ -1,8 +1,8 @@ /// MIDI event representing a single MIDI message -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)] pub struct MidiEvent { - /// Sample position within the clip - pub timestamp: u64, + /// Time position within the clip in seconds (sample-rate independent) + pub timestamp: f64, /// MIDI status byte (includes channel) pub status: u8, /// First data byte (note number, CC number, etc.) @@ -13,7 +13,7 @@ pub struct MidiEvent { impl MidiEvent { /// Create a new MIDI event - pub fn new(timestamp: u64, status: u8, data1: u8, data2: u8) -> Self { + pub fn new(timestamp: f64, status: u8, data1: u8, data2: u8) -> Self { Self { timestamp, status, @@ -23,7 +23,7 @@ impl MidiEvent { } /// Create a note on event - pub fn note_on(timestamp: u64, channel: u8, note: u8, velocity: u8) -> Self { + pub fn note_on(timestamp: f64, channel: u8, note: u8, velocity: u8) -> Self { Self { timestamp, status: 0x90 | (channel & 0x0F), @@ -33,7 +33,7 @@ impl MidiEvent { } /// Create a note off event - pub fn note_off(timestamp: u64, channel: u8, note: u8, velocity: u8) -> Self { + pub fn note_off(timestamp: f64, channel: u8, note: u8, velocity: u8) -> Self { Self { timestamp, status: 0x80 | (channel & 0x0F), @@ -91,8 +91,8 @@ impl MidiClip { /// Add a MIDI event to the clip pub fn add_event(&mut self, event: MidiEvent) { self.events.push(event); - // Keep events sorted by timestamp - self.events.sort_by_key(|e| e.timestamp); + // Keep events sorted by timestamp (using partial_cmp for f64) + self.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap()); } /// Get the end time of the clip @@ -107,8 +107,8 @@ impl MidiClip { &self, range_start_seconds: f64, range_end_seconds: f64, - sample_rate: u32, - ) -> Vec<(u64, MidiEvent)> { + _sample_rate: u32, + ) -> Vec { let mut result = Vec::new(); // Check if clip overlaps with the range @@ -120,21 +120,16 @@ impl MidiClip { let play_start = range_start_seconds.max(self.start_time); let play_end = range_end_seconds.min(self.end_time()); - // Convert to samples - let range_start_samples = (range_start_seconds * sample_rate as f64) as u64; - // Position within the clip let clip_position_seconds = play_start - self.start_time; - let clip_position_samples = (clip_position_seconds * sample_rate as f64) as u64; - let clip_end_samples = ((play_end - self.start_time) * sample_rate as f64) as u64; + let clip_end_seconds = play_end - self.start_time; // Find events in this range - // Note: Using <= for the end boundary to include events exactly at the clip end + // Note: event.timestamp is now in seconds relative to clip start + // Use half-open interval [start, end) to avoid triggering events twice for event in &self.events { - if event.timestamp >= clip_position_samples && event.timestamp <= clip_end_samples { - // Calculate absolute timestamp in the output buffer - let absolute_timestamp = range_start_samples + (event.timestamp - clip_position_samples); - result.push((absolute_timestamp, *event)); + if event.timestamp >= clip_position_seconds && event.timestamp < clip_end_seconds { + result.push(*event); } } diff --git a/daw-backend/src/audio/node_graph/graph.rs b/daw-backend/src/audio/node_graph/graph.rs index 3101c9d..95eb355 100644 --- a/daw-backend/src/audio/node_graph/graph.rs +++ b/daw-backend/src/audio/node_graph/graph.rs @@ -791,17 +791,20 @@ impl InstrumentGraph { "NoiseGenerator" => Box::new(NoiseGeneratorNode::new("Noise")), "Splitter" => Box::new(SplitterNode::new("Splitter")), "Pan" => Box::new(PanNode::new("Pan")), + "Quantizer" => Box::new(QuantizerNode::new("Quantizer")), "Delay" => Box::new(DelayNode::new("Delay")), "Distortion" => Box::new(DistortionNode::new("Distortion")), "Reverb" => Box::new(ReverbNode::new("Reverb")), "Chorus" => Box::new(ChorusNode::new("Chorus")), "Compressor" => Box::new(CompressorNode::new("Compressor")), "Limiter" => Box::new(LimiterNode::new("Limiter")), + "Math" => Box::new(MathNode::new("Math")), "EQ" => Box::new(EQNode::new("EQ")), "Flanger" => Box::new(FlangerNode::new("Flanger")), "FMSynth" => Box::new(FMSynthNode::new("FM Synth")), "WavetableOscillator" => Box::new(WavetableOscillatorNode::new("Wavetable")), "SimpleSampler" => Box::new(SimpleSamplerNode::new("Sampler")), + "SlewLimiter" => Box::new(SlewLimiterNode::new("Slew Limiter")), "MultiSampler" => Box::new(MultiSamplerNode::new("Multi Sampler")), "MidiInput" => Box::new(MidiInputNode::new("MIDI Input")), "MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV")), diff --git a/daw-backend/src/audio/node_graph/nodes/math.rs b/daw-backend/src/audio/node_graph/nodes/math.rs new file mode 100644 index 0000000..c34e6fb --- /dev/null +++ b/daw-backend/src/audio/node_graph/nodes/math.rs @@ -0,0 +1,178 @@ +use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType}; +use crate::audio::midi::MidiEvent; + +const PARAM_OPERATION: u32 = 0; +const PARAM_OPERAND: u32 = 1; + +/// Mathematical and logical operations on CV signals +/// Operations: +/// 0 = Add, 1 = Subtract, 2 = Multiply, 3 = Divide +/// 4 = Min, 5 = Max, 6 = Average +/// 7 = Invert (1.0 - x), 8 = Absolute Value +/// 9 = Clamp (0.0 to 1.0), 10 = Wrap (-1.0 to 1.0) +/// 11 = Greater Than, 12 = Less Than, 13 = Equal (with tolerance) +pub struct MathNode { + name: String, + operation: u32, + operand: f32, + inputs: Vec, + outputs: Vec, + parameters: Vec, +} + +impl MathNode { + pub fn new(name: impl Into) -> Self { + let name = name.into(); + + let inputs = vec![ + NodePort::new("CV In A", SignalType::CV, 0), + NodePort::new("CV In B", SignalType::CV, 1), + ]; + + let outputs = vec![ + NodePort::new("CV Out", SignalType::CV, 0), + ]; + + let parameters = vec![ + Parameter::new(PARAM_OPERATION, "Operation", 0.0, 13.0, 0.0, ParameterUnit::Generic), + Parameter::new(PARAM_OPERAND, "Operand", -10.0, 10.0, 1.0, ParameterUnit::Generic), + ]; + + Self { + name, + operation: 0, + operand: 1.0, + inputs, + outputs, + parameters, + } + } + + fn apply_operation(&self, a: f32, b: f32) -> f32 { + match self.operation { + 0 => a + b, // Add + 1 => a - b, // Subtract + 2 => a * b, // Multiply + 3 => if b.abs() > 0.0001 { a / b } else { 0.0 }, // Divide (with protection) + 4 => a.min(b), // Min + 5 => a.max(b), // Max + 6 => (a + b) * 0.5, // Average + 7 => 1.0 - a, // Invert (ignores b) + 8 => a.abs(), // Absolute Value (ignores b) + 9 => a.clamp(0.0, 1.0), // Clamp to 0-1 (ignores b) + 10 => { // Wrap -1 to 1 + let mut result = a; + while result > 1.0 { + result -= 2.0; + } + while result < -1.0 { + result += 2.0; + } + result + }, + 11 => if a > b { 1.0 } else { 0.0 }, // Greater Than + 12 => if a < b { 1.0 } else { 0.0 }, // Less Than + 13 => if (a - b).abs() < 0.01 { 1.0 } else { 0.0 }, // Equal (with tolerance) + _ => a, // Unknown operation - pass through + } + } +} + +impl AudioNode for MathNode { + fn category(&self) -> NodeCategory { + NodeCategory::Utility + } + + fn inputs(&self) -> &[NodePort] { + &self.inputs + } + + fn outputs(&self) -> &[NodePort] { + &self.outputs + } + + fn parameters(&self) -> &[Parameter] { + &self.parameters + } + + fn set_parameter(&mut self, id: u32, value: f32) { + match id { + PARAM_OPERATION => self.operation = (value as u32).clamp(0, 13), + PARAM_OPERAND => self.operand = value.clamp(-10.0, 10.0), + _ => {} + } + } + + fn get_parameter(&self, id: u32) -> f32 { + match id { + PARAM_OPERATION => self.operation as f32, + PARAM_OPERAND => self.operand, + _ => 0.0, + } + } + + fn process( + &mut self, + inputs: &[&[f32]], + outputs: &mut [&mut [f32]], + _midi_inputs: &[&[MidiEvent]], + _midi_outputs: &mut [&mut Vec], + _sample_rate: u32, + ) { + if outputs.is_empty() { + return; + } + + let output = &mut outputs[0]; + let length = output.len(); + + // Get input A (or use 0.0) + let input_a = if !inputs.is_empty() && !inputs[0].is_empty() { + inputs[0] + } else { + &[] + }; + + // Get input B (or use operand parameter) + let input_b = if inputs.len() > 1 && !inputs[1].is_empty() { + inputs[1] + } else { + &[] + }; + + // Process each sample + for i in 0..length { + let a = if i < input_a.len() { input_a[i] } else { 0.0 }; + let b = if i < input_b.len() { + input_b[i] + } else { + self.operand + }; + + output[i] = self.apply_operation(a, b); + } + } + + fn reset(&mut self) { + // No state to reset + } + + fn node_type(&self) -> &str { + "Math" + } + + fn name(&self) -> &str { + &self.name + } + + fn clone_node(&self) -> Box { + Box::new(Self { + name: self.name.clone(), + operation: self.operation, + operand: self.operand, + inputs: self.inputs.clone(), + outputs: self.outputs.clone(), + parameters: self.parameters.clone(), + }) + } +} diff --git a/daw-backend/src/audio/node_graph/nodes/mod.rs b/daw-backend/src/audio/node_graph/nodes/mod.rs index dbc84fe..4f190da 100644 --- a/daw-backend/src/audio/node_graph/nodes/mod.rs +++ b/daw-backend/src/audio/node_graph/nodes/mod.rs @@ -11,6 +11,7 @@ mod limiter; mod fm_synth; mod gain; mod lfo; +mod math; mod midi_input; mod midi_to_cv; mod mixer; @@ -20,8 +21,10 @@ mod oscillator; mod oscilloscope; mod output; mod pan; +mod quantizer; mod reverb; mod simple_sampler; +mod slew_limiter; mod splitter; mod template_io; mod voice_allocator; @@ -40,6 +43,7 @@ pub use limiter::LimiterNode; pub use fm_synth::FMSynthNode; pub use gain::GainNode; pub use lfo::LFONode; +pub use math::MathNode; pub use midi_input::MidiInputNode; pub use midi_to_cv::MidiToCVNode; pub use mixer::MixerNode; @@ -49,8 +53,10 @@ pub use oscillator::OscillatorNode; pub use oscilloscope::OscilloscopeNode; pub use output::AudioOutputNode; pub use pan::PanNode; +pub use quantizer::QuantizerNode; pub use reverb::ReverbNode; pub use simple_sampler::SimpleSamplerNode; +pub use slew_limiter::SlewLimiterNode; pub use splitter::SplitterNode; pub use template_io::{TemplateInputNode, TemplateOutputNode}; pub use voice_allocator::VoiceAllocatorNode; diff --git a/daw-backend/src/audio/node_graph/nodes/quantizer.rs b/daw-backend/src/audio/node_graph/nodes/quantizer.rs new file mode 100644 index 0000000..c71e211 --- /dev/null +++ b/daw-backend/src/audio/node_graph/nodes/quantizer.rs @@ -0,0 +1,220 @@ +use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType}; +use crate::audio::midi::MidiEvent; + +const PARAM_SCALE: u32 = 0; +const PARAM_ROOT_NOTE: u32 = 1; + +/// Quantizer - snaps CV values to musical scales +/// Converts continuous CV into discrete pitch values based on a scale +/// Scale parameter: +/// 0 = Chromatic (all 12 notes) +/// 1 = Major scale +/// 2 = Minor scale (natural) +/// 3 = Pentatonic major +/// 4 = Pentatonic minor +/// 5 = Dorian +/// 6 = Phrygian +/// 7 = Lydian +/// 8 = Mixolydian +/// 9 = Whole tone +/// 10 = Octaves only +pub struct QuantizerNode { + name: String, + scale: u32, + root_note: u32, // 0-11 (C-B) + inputs: Vec, + outputs: Vec, + parameters: Vec, +} + +impl QuantizerNode { + pub fn new(name: impl Into) -> Self { + let name = name.into(); + + let inputs = vec![ + NodePort::new("CV In", SignalType::CV, 0), + ]; + + let outputs = vec![ + NodePort::new("CV Out", SignalType::CV, 0), + NodePort::new("Gate Out", SignalType::CV, 1), // Trigger when note changes + ]; + + let parameters = vec![ + Parameter::new(PARAM_SCALE, "Scale", 0.0, 10.0, 0.0, ParameterUnit::Generic), + Parameter::new(PARAM_ROOT_NOTE, "Root", 0.0, 11.0, 0.0, ParameterUnit::Generic), + ]; + + Self { + name, + scale: 0, + root_note: 0, + inputs, + outputs, + parameters, + } + } + + /// Get the scale intervals (semitones from root) + fn get_scale_intervals(&self) -> Vec { + match self.scale { + 0 => vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11], // Chromatic + 1 => vec![0, 2, 4, 5, 7, 9, 11], // Major + 2 => vec![0, 2, 3, 5, 7, 8, 10], // Minor (natural) + 3 => vec![0, 2, 4, 7, 9], // Pentatonic major + 4 => vec![0, 3, 5, 7, 10], // Pentatonic minor + 5 => vec![0, 2, 3, 5, 7, 9, 10], // Dorian + 6 => vec![0, 1, 3, 5, 7, 8, 10], // Phrygian + 7 => vec![0, 2, 4, 6, 7, 9, 11], // Lydian + 8 => vec![0, 2, 4, 5, 7, 9, 10], // Mixolydian + 9 => vec![0, 2, 4, 6, 8, 10], // Whole tone + 10 => vec![0], // Octaves only + _ => vec![0, 2, 4, 5, 7, 9, 11], // Default to major + } + } + + /// Quantize a CV value to the nearest note in the scale + fn quantize(&self, cv: f32) -> f32 { + // Convert V/Oct to MIDI note (standard: 0V = A4 = MIDI 69) + // cv = (midi_note - 69) / 12.0 + // midi_note = cv * 12.0 + 69 + let input_midi_note = cv * 12.0 + 69.0; + + // Clamp to reasonable range + let input_midi_note = input_midi_note.clamp(0.0, 127.0); + + // Get scale intervals + let intervals = self.get_scale_intervals(); + + // Find which octave we're in (relative to C) + let octave = (input_midi_note / 12.0).floor() as i32; + let note_in_octave = (input_midi_note % 12.0) as u32; + + // Find the nearest note in the scale + let mut closest_interval = intervals[0]; + let mut min_distance = (note_in_octave as i32 - closest_interval as i32).abs(); + + for &interval in &intervals { + let distance = (note_in_octave as i32 - interval as i32).abs(); + if distance < min_distance { + min_distance = distance; + closest_interval = interval; + } + } + + // Calculate final MIDI note (adjusted for root note) + // Start from the octave * 12, add root note, add scale interval + let quantized_midi_note = (octave * 12) as f32 + self.root_note as f32 + closest_interval as f32; + + // Clamp result + let quantized_midi_note = quantized_midi_note.clamp(0.0, 127.0); + + // Convert back to V/Oct: voct = (midi_note - 69) / 12.0 + (quantized_midi_note - 69.0) / 12.0 + } +} + +impl AudioNode for QuantizerNode { + fn category(&self) -> NodeCategory { + NodeCategory::Utility + } + + fn inputs(&self) -> &[NodePort] { + &self.inputs + } + + fn outputs(&self) -> &[NodePort] { + &self.outputs + } + + fn parameters(&self) -> &[Parameter] { + &self.parameters + } + + fn set_parameter(&mut self, id: u32, value: f32) { + match id { + PARAM_SCALE => self.scale = (value as u32).clamp(0, 10), + PARAM_ROOT_NOTE => self.root_note = (value as u32).clamp(0, 11), + _ => {} + } + } + + fn get_parameter(&self, id: u32) -> f32 { + match id { + PARAM_SCALE => self.scale as f32, + PARAM_ROOT_NOTE => self.root_note as f32, + _ => 0.0, + } + } + + fn process( + &mut self, + inputs: &[&[f32]], + outputs: &mut [&mut [f32]], + _midi_inputs: &[&[MidiEvent]], + _midi_outputs: &mut [&mut Vec], + _sample_rate: u32, + ) { + if inputs.is_empty() || outputs.is_empty() { + return; + } + + let input = inputs[0]; + let length = input.len().min(outputs[0].len()); + + // Split outputs to avoid borrow conflicts + if outputs.len() > 1 { + let (cv_out, gate_out) = outputs.split_at_mut(1); + let cv_output = &mut cv_out[0]; + let gate_output = &mut gate_out[0]; + let gate_length = length.min(gate_output.len()); + + let mut last_note: Option = None; + + for i in 0..length { + let quantized = self.quantize(input[i]); + cv_output[i] = quantized; + + // Generate gate trigger when note changes + if i < gate_length { + if let Some(prev) = last_note { + gate_output[i] = if (quantized - prev).abs() > 0.001 { 1.0 } else { 0.0 }; + } else { + gate_output[i] = 1.0; // First note triggers gate + } + } + + last_note = Some(quantized); + } + } else { + // No gate output, just quantize CV + let cv_output = &mut outputs[0]; + for i in 0..length { + cv_output[i] = self.quantize(input[i]); + } + } + } + + fn reset(&mut self) { + // No state to reset + } + + fn node_type(&self) -> &str { + "Quantizer" + } + + fn name(&self) -> &str { + &self.name + } + + fn clone_node(&self) -> Box { + Box::new(Self { + name: self.name.clone(), + scale: self.scale, + root_note: self.root_note, + inputs: self.inputs.clone(), + outputs: self.outputs.clone(), + parameters: self.parameters.clone(), + }) + } +} diff --git a/daw-backend/src/audio/node_graph/nodes/slew_limiter.rs b/daw-backend/src/audio/node_graph/nodes/slew_limiter.rs new file mode 100644 index 0000000..7fed1dc --- /dev/null +++ b/daw-backend/src/audio/node_graph/nodes/slew_limiter.rs @@ -0,0 +1,156 @@ +use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType}; +use crate::audio::midi::MidiEvent; + +const PARAM_RISE_TIME: u32 = 0; +const PARAM_FALL_TIME: u32 = 1; + +/// Slew limiter - limits the rate of change of a CV signal +/// Useful for creating portamento/glide effects and smoothing control signals +pub struct SlewLimiterNode { + name: String, + rise_time: f32, // Time in seconds to rise from 0 to 1 + fall_time: f32, // Time in seconds to fall from 1 to 0 + last_value: f32, + inputs: Vec, + outputs: Vec, + parameters: Vec, +} + +impl SlewLimiterNode { + pub fn new(name: impl Into) -> Self { + let name = name.into(); + + let inputs = vec![ + NodePort::new("CV In", SignalType::CV, 0), + ]; + + let outputs = vec![ + NodePort::new("CV Out", SignalType::CV, 0), + ]; + + let parameters = vec![ + Parameter::new(PARAM_RISE_TIME, "Rise Time", 0.0, 5.0, 0.01, ParameterUnit::Time), + Parameter::new(PARAM_FALL_TIME, "Fall Time", 0.0, 5.0, 0.01, ParameterUnit::Time), + ]; + + Self { + name, + rise_time: 0.01, + fall_time: 0.01, + last_value: 0.0, + inputs, + outputs, + parameters, + } + } +} + +impl AudioNode for SlewLimiterNode { + fn category(&self) -> NodeCategory { + NodeCategory::Utility + } + + fn inputs(&self) -> &[NodePort] { + &self.inputs + } + + fn outputs(&self) -> &[NodePort] { + &self.outputs + } + + fn parameters(&self) -> &[Parameter] { + &self.parameters + } + + fn set_parameter(&mut self, id: u32, value: f32) { + match id { + PARAM_RISE_TIME => self.rise_time = value.clamp(0.0, 5.0), + PARAM_FALL_TIME => self.fall_time = value.clamp(0.0, 5.0), + _ => {} + } + } + + fn get_parameter(&self, id: u32) -> f32 { + match id { + PARAM_RISE_TIME => self.rise_time, + PARAM_FALL_TIME => self.fall_time, + _ => 0.0, + } + } + + fn process( + &mut self, + inputs: &[&[f32]], + outputs: &mut [&mut [f32]], + _midi_inputs: &[&[MidiEvent]], + _midi_outputs: &mut [&mut Vec], + sample_rate: u32, + ) { + if inputs.is_empty() || outputs.is_empty() { + return; + } + + let input = inputs[0]; + let output = &mut outputs[0]; + let length = input.len().min(output.len()); + + // Calculate maximum change per sample + let sample_duration = 1.0 / sample_rate as f32; + + // Rise/fall rates (units per second) + let rise_rate = if self.rise_time > 0.0001 { + 1.0 / self.rise_time + } else { + f32::MAX // No limiting + }; + + let fall_rate = if self.fall_time > 0.0001 { + 1.0 / self.fall_time + } else { + f32::MAX // No limiting + }; + + for i in 0..length { + let target = input[i]; + let difference = target - self.last_value; + + let max_change = if difference > 0.0 { + // Rising + rise_rate * sample_duration + } else { + // Falling + fall_rate * sample_duration + }; + + // Limit the change + let limited_difference = difference.clamp(-max_change, max_change); + self.last_value += limited_difference; + + output[i] = self.last_value; + } + } + + fn reset(&mut self) { + self.last_value = 0.0; + } + + fn node_type(&self) -> &str { + "SlewLimiter" + } + + fn name(&self) -> &str { + &self.name + } + + fn clone_node(&self) -> Box { + Box::new(Self { + name: self.name.clone(), + rise_time: self.rise_time, + fall_time: self.fall_time, + last_value: self.last_value, + inputs: self.inputs.clone(), + outputs: self.outputs.clone(), + parameters: self.parameters.clone(), + }) + } +} diff --git a/daw-backend/src/audio/project.rs b/daw-backend/src/audio/project.rs index 57403fb..dd697bf 100644 --- a/daw-backend/src/audio/project.rs +++ b/daw-backend/src/audio/project.rs @@ -405,7 +405,7 @@ impl Project { pub fn send_midi_note_on(&mut self, track_id: TrackId, note: u8, velocity: u8) { // Queue the MIDI note-on event to the track's live MIDI queue if let Some(TrackNode::Midi(track)) = self.tracks.get_mut(&track_id) { - let event = MidiEvent::note_on(0, 0, note, velocity); + let event = MidiEvent::note_on(0.0, 0, note, velocity); track.queue_live_midi(event); } } @@ -414,7 +414,7 @@ impl Project { pub fn send_midi_note_off(&mut self, track_id: TrackId, note: u8) { // Queue the MIDI note-off event to the track's live MIDI queue if let Some(TrackNode::Midi(track)) = self.tracks.get_mut(&track_id) { - let event = MidiEvent::note_off(0, 0, note, 0); + let event = MidiEvent::note_off(0.0, 0, note, 0); track.queue_live_midi(event); } } diff --git a/daw-backend/src/audio/recording.rs b/daw-backend/src/audio/recording.rs index ad7880e..2386638 100644 --- a/daw-backend/src/audio/recording.rs +++ b/daw-backend/src/audio/recording.rs @@ -1,6 +1,7 @@ /// Audio recording system for capturing microphone input -use crate::audio::{ClipId, TrackId}; +use crate::audio::{ClipId, MidiClipId, TrackId}; use crate::io::{WavWriter, WaveformPeak}; +use std::collections::HashMap; use std::path::PathBuf; /// State of an active recording session @@ -204,3 +205,106 @@ impl RecordingState { self.paused = false; } } + +/// Active MIDI note waiting for its noteOff event +#[derive(Debug, Clone)] +struct ActiveMidiNote { + /// MIDI note number (0-127) + note: u8, + /// Velocity (0-127) + velocity: u8, + /// Absolute time when note started (seconds) + start_time: f64, +} + +/// State of an active MIDI recording session +pub struct MidiRecordingState { + /// Track being recorded to + pub track_id: TrackId, + /// MIDI clip ID + pub clip_id: MidiClipId, + /// Timeline start position in seconds + pub start_time: f64, + /// Currently active notes (noteOn without matching noteOff) + /// Maps note number to ActiveMidiNote + active_notes: HashMap, + /// Completed notes ready to be added to clip + /// Format: (time_offset, note, velocity, duration) + pub completed_notes: Vec<(f64, u8, u8, f64)>, +} + +impl MidiRecordingState { + /// Create a new MIDI recording state + pub fn new(track_id: TrackId, clip_id: MidiClipId, start_time: f64) -> Self { + Self { + track_id, + clip_id, + start_time, + active_notes: HashMap::new(), + completed_notes: Vec::new(), + } + } + + /// Handle a MIDI note on event + pub fn note_on(&mut self, note: u8, velocity: u8, absolute_time: f64) { + // Store this note as active + self.active_notes.insert(note, ActiveMidiNote { + note, + velocity, + start_time: absolute_time, + }); + } + + /// Handle a MIDI note off event + pub fn note_off(&mut self, note: u8, absolute_time: f64) { + // Find the matching noteOn + if let Some(active_note) = self.active_notes.remove(¬e) { + // Calculate relative time offset and duration + let time_offset = active_note.start_time - self.start_time; + let duration = absolute_time - active_note.start_time; + + eprintln!("[MIDI_RECORDING_STATE] Completing note {}: note_start={:.3}s, note_end={:.3}s, recording_start={:.3}s, time_offset={:.3}s, duration={:.3}s", + note, active_note.start_time, absolute_time, self.start_time, time_offset, duration); + + // Add to completed notes + self.completed_notes.push(( + time_offset, + active_note.note, + active_note.velocity, + duration, + )); + } + // If no matching noteOn found, ignore the noteOff + } + + /// Get all completed notes + pub fn get_notes(&self) -> &[(f64, u8, u8, f64)] { + &self.completed_notes + } + + /// Get the number of completed notes + pub fn note_count(&self) -> usize { + self.completed_notes.len() + } + + /// Close out all active notes at the given time + /// This should be called when stopping recording to end any held notes + pub fn close_active_notes(&mut self, end_time: f64) { + // Collect all active notes and close them + let active_notes: Vec<_> = self.active_notes.drain().collect(); + + for (_note_num, active_note) in active_notes { + // Calculate relative time offset and duration + let time_offset = active_note.start_time - self.start_time; + let duration = end_time - active_note.start_time; + + // Add to completed notes + self.completed_notes.push(( + time_offset, + active_note.note, + active_note.velocity, + duration, + )); + } + } +} diff --git a/daw-backend/src/audio/track.rs b/daw-backend/src/audio/track.rs index 6dbf31c..76ebedb 100644 --- a/daw-backend/src/audio/track.rs +++ b/daw-backend/src/audio/track.rs @@ -374,8 +374,16 @@ impl MidiTrack { /// Stop all currently playing notes on this track's instrument /// Note: With node-based instruments, stopping is handled by ceasing MIDI input pub fn stop_all_notes(&mut self) { - // No-op: Node-based instruments stop when they receive no MIDI input - // Individual synthesizer nodes handle note-off events appropriately + // Send note-off for all 128 possible MIDI notes to silence the instrument + let mut note_offs = Vec::new(); + for note in 0..128 { + note_offs.push(MidiEvent::note_off(0.0, 0, note, 0)); + } + + // Create a silent buffer to process the note-offs + let buffer_size = 512 * 2; // stereo + let mut silent_buffer = vec![0.0f32; buffer_size]; + self.instrument_graph.process(&mut silent_buffer, ¬e_offs); } /// Queue a live MIDI event (from virtual keyboard or MIDI controller) @@ -428,11 +436,14 @@ impl MidiTrack { sample_rate, ); - for (_timestamp, event) in events { - midi_events.push(event); - } + // Events now have timestamps in seconds relative to clip start + midi_events.extend(events); } + // Add live MIDI events (from virtual keyboard or MIDI controllers) + // This allows real-time input to be heard during playback/recording + midi_events.extend(self.live_midi_queue.drain(..)); + // Generate audio using instrument graph self.instrument_graph.process(output, &midi_events); diff --git a/daw-backend/src/command/mod.rs b/daw-backend/src/command/mod.rs index 4dd1b68..5baaac2 100644 --- a/daw-backend/src/command/mod.rs +++ b/daw-backend/src/command/mod.rs @@ -1,3 +1,3 @@ pub mod types; -pub use types::{AudioEvent, Command, OscilloscopeData, Query, QueryResponse}; +pub use types::{AudioEvent, Command, MidiClipData, OscilloscopeData, Query, QueryResponse}; diff --git a/daw-backend/src/command/types.rs b/daw-backend/src/command/types.rs index e7774ed..4145d10 100644 --- a/daw-backend/src/command/types.rs +++ b/daw-backend/src/command/types.rs @@ -98,6 +98,12 @@ pub enum Command { /// Resume the current recording ResumeRecording, + // MIDI Recording commands + /// Start MIDI recording on a track (track_id, clip_id, start_time) + StartMidiRecording(TrackId, MidiClipId, f64), + /// Stop the current MIDI recording + StopMidiRecording, + // Project commands /// Reset the entire project (remove all tracks, clear audio pool, reset state) Reset, @@ -172,6 +178,11 @@ pub enum AudioEvent { RecordingStopped(ClipId, usize, Vec), /// Recording error (error_message) RecordingError(String), + /// MIDI recording stopped (track_id, clip_id, note_count) + MidiRecordingStopped(TrackId, MidiClipId, usize), + /// MIDI recording progress (track_id, clip_id, duration, notes) + /// Notes format: (start_time, note, velocity, duration) + MidiRecordingProgress(TrackId, MidiClipId, f64, Vec<(f64, u8, u8, f64)>), /// Project has been reset ProjectReset, /// MIDI note started playing (note, velocity) @@ -199,6 +210,8 @@ pub enum Query { GetTemplateState(TrackId, u32), /// Get oscilloscope data from a node (track_id, node_id, sample_count) GetOscilloscopeData(TrackId, u32, usize), + /// Get MIDI clip data (track_id, clip_id) + GetMidiClip(TrackId, MidiClipId), } /// Oscilloscope data from a node @@ -210,6 +223,13 @@ pub struct OscilloscopeData { pub cv: Vec, } +/// MIDI clip data for serialization +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct MidiClipData { + pub duration: f64, + pub events: Vec, +} + /// Responses to synchronous queries #[derive(Debug)] pub enum QueryResponse { @@ -217,4 +237,6 @@ pub enum QueryResponse { GraphState(Result), /// Oscilloscope data samples OscilloscopeData(Result), + /// MIDI clip data + MidiClipData(Result), } diff --git a/daw-backend/src/io/midi_file.rs b/daw-backend/src/io/midi_file.rs index 2b2512b..51f5ea3 100644 --- a/daw-backend/src/io/midi_file.rs +++ b/daw-backend/src/io/midi_file.rs @@ -6,7 +6,7 @@ use std::path::Path; pub fn load_midi_file>( path: P, clip_id: MidiClipId, - sample_rate: u32, + _sample_rate: u32, ) -> Result { // Read the MIDI file let data = fs::read(path.as_ref()).map_err(|e| format!("Failed to read MIDI file: {}", e))?; @@ -109,7 +109,8 @@ pub fn load_midi_file>( accumulated_time += delta_time; last_tick = tick; - let timestamp = (accumulated_time * sample_rate as f64) as u64; + // Store timestamp in seconds (sample-rate independent) + let timestamp = accumulated_time; match message { midly::MidiMessage::NoteOn { key, vel } => { diff --git a/src-tauri/src/audio.rs b/src-tauri/src/audio.rs index 86ba8a3..fb2781c 100644 --- a/src-tauri/src/audio.rs +++ b/src-tauri/src/audio.rs @@ -93,6 +93,12 @@ impl EventEmitter for TauriEventEmitter { AudioEvent::GraphPresetLoaded(track_id) => { SerializedAudioEvent::GraphPresetLoaded { track_id } } + AudioEvent::MidiRecordingStopped(track_id, clip_id, note_count) => { + SerializedAudioEvent::MidiRecordingStopped { track_id, clip_id, note_count } + } + AudioEvent::MidiRecordingProgress(track_id, clip_id, duration, notes) => { + SerializedAudioEvent::MidiRecordingProgress { track_id, clip_id, duration, notes } + } _ => return, // Ignore other event types for now }; @@ -381,6 +387,39 @@ pub async fn audio_resume_recording( } } +#[tauri::command] +pub async fn audio_start_midi_recording( + state: tauri::State<'_, Arc>>, + track_id: u32, + clip_id: u32, + start_time: f64, +) -> Result<(), String> { + let mut audio_state = state.lock().unwrap(); + if let Some(controller) = &mut audio_state.controller { + controller.start_midi_recording(track_id, clip_id, start_time); + Ok(()) + } else { + Err("Audio not initialized".to_string()) + } +} + +#[tauri::command] +pub async fn audio_stop_midi_recording( + state: tauri::State<'_, Arc>>, +) -> Result<(), String> { + eprintln!("[TAURI] audio_stop_midi_recording called"); + let mut audio_state = state.lock().unwrap(); + if let Some(controller) = &mut audio_state.controller { + eprintln!("[TAURI] Calling controller.stop_midi_recording()"); + controller.stop_midi_recording(); + eprintln!("[TAURI] controller.stop_midi_recording() returned"); + Ok(()) + } else { + eprintln!("[TAURI] Audio not initialized!"); + Err("Audio not initialized".to_string()) + } +} + #[tauri::command] pub async fn audio_create_midi_clip( state: tauri::State<'_, Arc>>, @@ -390,9 +429,8 @@ pub async fn audio_create_midi_clip( ) -> Result { let mut audio_state = state.lock().unwrap(); if let Some(controller) = &mut audio_state.controller { - controller.create_midi_clip(track_id, start_time, duration); - // Return a clip ID (for now, just use 0 as clips are managed internally) - Ok(0) + let clip_id = controller.create_midi_clip(track_id, start_time, duration); + Ok(clip_id) } else { Err("Audio not initialized".to_string()) } @@ -505,6 +543,51 @@ pub async fn audio_load_midi_file( } } +#[tauri::command] +pub async fn audio_get_midi_clip_data( + state: tauri::State<'_, Arc>>, + track_id: u32, + clip_id: u32, +) -> Result { + let mut audio_state = state.lock().unwrap(); + + if let Some(controller) = &mut audio_state.controller { + // Query the MIDI clip data from the backend + let clip_data = controller.query_midi_clip(track_id, clip_id)?; + + // Convert MIDI events to MidiNote format + let mut notes = Vec::new(); + let mut active_notes: std::collections::HashMap = std::collections::HashMap::new(); + + for event in &clip_data.events { + // event.timestamp is already in seconds (sample-rate independent) + let time_seconds = event.timestamp; + + if event.is_note_on() { + // Store note on event (time and velocity) + active_notes.insert(event.data1, (time_seconds, event.data2)); + } else if event.is_note_off() { + // Find matching note on and create a MidiNote + if let Some((start, velocity)) = active_notes.remove(&event.data1) { + notes.push(MidiNote { + note: event.data1, + start_time: start, + duration: time_seconds - start, + velocity, + }); + } + } + } + + Ok(MidiFileMetadata { + duration: clip_data.duration, + notes, + }) + } else { + Err("Audio not initialized".to_string()) + } +} + #[tauri::command] pub async fn audio_update_midi_clip_notes( state: tauri::State<'_, Arc>>, @@ -1133,6 +1216,8 @@ pub enum SerializedAudioEvent { RecordingProgress { clip_id: u32, duration: f64 }, RecordingStopped { clip_id: u32, pool_index: usize, waveform: Vec }, RecordingError { message: String }, + MidiRecordingStopped { track_id: u32, clip_id: u32, note_count: usize }, + MidiRecordingProgress { track_id: u32, clip_id: u32, duration: f64, notes: Vec<(f64, u8, u8, f64)> }, NoteOn { note: u8, velocity: u8 }, NoteOff { note: u8 }, GraphNodeAdded { track_id: u32, node_id: u32, node_type: String }, diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index d082f78..1c5eee3 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -207,9 +207,12 @@ pub fn run() { audio::audio_stop_recording, audio::audio_pause_recording, audio::audio_resume_recording, + audio::audio_start_midi_recording, + audio::audio_stop_midi_recording, audio::audio_create_midi_clip, audio::audio_add_midi_note, audio::audio_load_midi_file, + audio::audio_get_midi_clip_data, audio::audio_update_midi_clip_notes, audio::audio_send_midi_note_on, audio::audio_send_midi_note_off, diff --git a/src/actions/index.js b/src/actions/index.js index d1513e6..3a22f18 100644 --- a/src/actions/index.js +++ b/src/actions/index.js @@ -450,11 +450,8 @@ export const actions = { let newMIDITrack = new AudioTrack(action.trackuuid, action.midiname, 'midi'); let object = pointerList[action.object]; - // Get available instruments and use the first one (SimpleSynth) + // Note: MIDI tracks now use node-based instruments via instrument_graph const { invoke } = window.__TAURI__.core; - const instruments = await invoke('audio_get_available_instruments'); - const instrument = instruments.length > 0 ? instruments[0] : 'SimpleSynth'; - newMIDITrack.instrument = instrument; // Add placeholder clip immediately so user sees feedback newMIDITrack.clips.push({ diff --git a/src/main.js b/src/main.js index a601d85..64fc96d 100644 --- a/src/main.js +++ b/src/main.js @@ -710,25 +710,12 @@ Object.defineProperty(globalThis, 'root', { return __root; }, set(newRoot) { - console.error('[ROOT REPLACED] root is being replaced!'); - console.error('[ROOT REPLACED] Old root idx:', __root?.idx, 'New root idx:', newRoot?.idx); - console.trace('[ROOT REPLACED] Stack trace:'); __root = newRoot; }, configurable: true, enumerable: true }); -// Set up a watchdog to monitor root.frameRate -setInterval(() => { - if (root && root.frameRate === undefined) { - console.error('[WATCHDOG] root.frameRate is undefined!'); - console.error('[WATCHDOG] root object idx:', root.idx); - console.error('[WATCHDOG] Has frameRate property?', 'frameRate' in root); - console.trace('[WATCHDOG] Stack trace:'); - } -}, 1000); - async function greet() { // Learn more about Tauri commands at https://tauri.app/develop/calling-rust/ greetMsgEl.textContent = await invoke("greet", { name: greetInputEl.value }); @@ -821,6 +808,7 @@ window.addEventListener("keydown", (e) => { case config.shortcuts.playAnimation: console.log("Spacebar pressed"); playPause(); + e.preventDefault(); // Prevent spacebar from clicking focused buttons break; case config.shortcuts.selectAll: e.preventDefault(); @@ -991,11 +979,6 @@ function playbackLoop() { if (playing) { const duration = context.activeObject.duration; - // Debug logging for recording - if (context.isRecording) { - console.log('playbackLoop - recording active, currentTime:', context.activeObject.currentTime, 'duration:', duration, 'isRecording:', context.isRecording); - } - // Check if we've reached the end (but allow infinite playback when recording) if (context.isRecording || (duration > 0 && context.activeObject.currentTime < duration)) { // Continue playing @@ -1203,6 +1186,98 @@ async function handleAudioEvent(event) { context.recordingClipId = null; break; + case 'MidiRecordingProgress': + // Update MIDI clip during recording with current duration and notes + const progressMidiTrack = context.activeObject.audioTracks.find(t => t.audioTrackId === event.track_id); + if (progressMidiTrack) { + const progressClip = progressMidiTrack.clips.find(c => c.clipId === event.clip_id); + if (progressClip) { + console.log('[MIDI_PROGRESS] Updating clip', event.clip_id, '- duration:', event.duration, 'notes:', event.notes.length, 'loading:', progressClip.loading); + progressClip.duration = event.duration; + progressClip.loading = false; // Make sure clip is not in loading state + // Convert backend note format to frontend format + progressClip.notes = event.notes.map(([start_time, note, velocity, duration]) => ({ + note: note, + start_time: start_time, + duration: duration, + velocity: velocity + })); + console.log('[MIDI_PROGRESS] Clip now has', progressClip.notes.length, 'notes'); + + // Request redraw to show updated clip + updateLayers(); + if (context.timelineWidget) { + context.timelineWidget.requestRedraw(); + } + } else { + console.log('[MIDI_PROGRESS] Could not find clip', event.clip_id); + } + } + break; + + case 'MidiRecordingStopped': + console.log('[FRONTEND] ========== MidiRecordingStopped EVENT =========='); + console.log('[FRONTEND] Event details - track:', event.track_id, 'clip:', event.clip_id, 'notes:', event.note_count); + + // Find the track and update the clip + const midiTrack = context.activeObject.audioTracks.find(t => t.audioTrackId === event.track_id); + console.log('[FRONTEND] Found MIDI track:', midiTrack ? midiTrack.name : 'NOT FOUND'); + + if (midiTrack) { + console.log('[FRONTEND] Track has', midiTrack.clips.length, 'clips:', midiTrack.clips.map(c => `{id:${c.clipId}, name:"${c.name}", loading:${c.loading}}`)); + + // Find the clip we created when recording started + let existingClip = midiTrack.clips.find(c => c.clipId === event.clip_id); + console.log('[FRONTEND] Found existing clip:', existingClip ? `id:${existingClip.clipId}, name:"${existingClip.name}", loading:${existingClip.loading}` : 'NOT FOUND'); + + if (existingClip) { + // Fetch the clip data from the backend + try { + console.log('[FRONTEND] Fetching MIDI clip data from backend...'); + const clipData = await invoke('audio_get_midi_clip_data', { + trackId: event.track_id, + clipId: event.clip_id + }); + console.log('[FRONTEND] Received clip data:', clipData); + + // Update the clip with the recorded notes + console.log('[FRONTEND] Updating clip - before:', { loading: existingClip.loading, name: existingClip.name, duration: existingClip.duration, noteCount: existingClip.notes?.length }); + existingClip.loading = false; + existingClip.name = `MIDI Clip (${event.note_count} notes)`; + existingClip.duration = clipData.duration; + existingClip.notes = clipData.notes; + console.log('[FRONTEND] Updating clip - after:', { loading: existingClip.loading, name: existingClip.name, duration: existingClip.duration, noteCount: existingClip.notes?.length }); + } catch (error) { + console.error('[FRONTEND] Failed to fetch MIDI clip data:', error); + existingClip.loading = false; + existingClip.name = `MIDI Clip (failed)`; + } + } else { + console.error('[FRONTEND] Could not find clip', event.clip_id, 'on track', event.track_id); + } + + // Request redraw to show the clip with recorded notes + updateLayers(); + if (context.timelineWidget) { + context.timelineWidget.requestRedraw(); + } + } + + // Clear recording state + console.log('[FRONTEND] Clearing MIDI recording state'); + context.isRecording = false; + context.recordingTrackId = null; + context.recordingClipId = null; + + // Update record button appearance + if (context.recordButton) { + context.recordButton.className = "playback-btn playback-btn-record"; + context.recordButton.title = "Record"; + } + + console.log('[FRONTEND] MIDI recording complete - recorded', event.note_count, 'notes'); + break; + case 'GraphPresetLoaded': // Preset loaded - layers are already populated during graph reload console.log('GraphPresetLoaded event received for track:', event.track_id); @@ -1330,47 +1405,116 @@ async function toggleRecording() { // Stop recording console.log('[FRONTEND] toggleRecording - stopping recording for clip:', context.recordingClipId); try { - await invoke('audio_stop_recording'); + // Check if we're recording MIDI or audio + const track = context.activeObject.audioTracks.find(t => t.audioTrackId === context.recordingTrackId); + const isMidiRecording = track && track.type === 'midi'; + + console.log('[FRONTEND] Stopping recording - isMIDI:', isMidiRecording, 'track type:', track?.type, 'track ID:', context.recordingTrackId); + + if (isMidiRecording) { + console.log('[FRONTEND] Calling audio_stop_midi_recording...'); + await invoke('audio_stop_midi_recording'); + console.log('[FRONTEND] audio_stop_midi_recording returned successfully'); + } else { + console.log('[FRONTEND] Calling audio_stop_recording...'); + await invoke('audio_stop_recording'); + console.log('[FRONTEND] audio_stop_recording returned successfully'); + } + + console.log('[FRONTEND] Clearing recording state in toggleRecording'); context.isRecording = false; context.recordingTrackId = null; context.recordingClipId = null; - console.log('[FRONTEND] Recording stopped via toggle button'); } catch (error) { console.error('[FRONTEND] Failed to stop recording:', error); } } else { - // Start recording - check if activeLayer is an audio track + // Start recording - check if activeLayer is a track const audioTrack = context.activeObject.activeLayer; if (!audioTrack || !(audioTrack instanceof AudioTrack)) { - alert('Please select an audio track to record to'); + alert('Please select a track to record to'); return; } if (audioTrack.audioTrackId === null) { - alert('Audio track not properly initialized'); + alert('Track not properly initialized'); return; } // Start recording at current playhead position const startTime = context.activeObject.currentTime || 0; - console.log('[FRONTEND] Starting recording on track', audioTrack.audioTrackId, 'at time', startTime); - try { - await invoke('audio_start_recording', { - trackId: audioTrack.audioTrackId, - startTime: startTime - }); - context.isRecording = true; - context.recordingTrackId = audioTrack.audioTrackId; - console.log('[FRONTEND] Recording started successfully, waiting for RecordingStarted event'); + // Check if this is a MIDI track or audio track + if (audioTrack.type === 'midi') { + // MIDI recording + console.log('[FRONTEND] Starting MIDI recording on track', audioTrack.audioTrackId, 'at time', startTime); + try { + // First, create a MIDI clip at the current playhead position + const clipDuration = 4.0; // Default clip duration of 4 seconds (can be extended by recording) + const clipId = await invoke('audio_create_midi_clip', { + trackId: audioTrack.audioTrackId, + startTime: startTime, + duration: clipDuration + }); - // Start playback so the timeline moves (if not already playing) - if (!playing) { - await playPause(); + console.log('[FRONTEND] Created MIDI clip with ID:', clipId); + + // Add clip to track immediately (similar to MIDI import) + audioTrack.clips.push({ + clipId: clipId, + name: 'Recording...', + startTime: startTime, + duration: clipDuration, + notes: [], + loading: true + }); + + // Update UI to show the recording clip + updateLayers(); + if (context.timelineWidget) { + context.timelineWidget.requestRedraw(); + } + + // Now start MIDI recording + await invoke('audio_start_midi_recording', { + trackId: audioTrack.audioTrackId, + clipId: clipId, + startTime: startTime + }); + + context.isRecording = true; + context.recordingTrackId = audioTrack.audioTrackId; + context.recordingClipId = clipId; + console.log('[FRONTEND] MIDI recording started successfully'); + + // Start playback so the timeline moves (if not already playing) + if (!playing) { + await playPause(); + } + } catch (error) { + console.error('[FRONTEND] Failed to start MIDI recording:', error); + alert('Failed to start MIDI recording: ' + error); + } + } else { + // Audio recording + console.log('[FRONTEND] Starting audio recording on track', audioTrack.audioTrackId, 'at time', startTime); + try { + await invoke('audio_start_recording', { + trackId: audioTrack.audioTrackId, + startTime: startTime + }); + context.isRecording = true; + context.recordingTrackId = audioTrack.audioTrackId; + console.log('[FRONTEND] Audio recording started successfully, waiting for RecordingStarted event'); + + // Start playback so the timeline moves (if not already playing) + if (!playing) { + await playPause(); + } + } catch (error) { + console.error('[FRONTEND] Failed to start audio recording:', error); + alert('Failed to start audio recording: ' + error); } - } catch (error) { - console.error('[FRONTEND] Failed to start recording:', error); - alert('Failed to start recording: ' + error); } } } @@ -7248,6 +7392,81 @@ function nodeEditor() { }); }); + // Handle select dropdowns + const selects = nodeElement.querySelectorAll('select[data-param]'); + selects.forEach(select => { + // Track parameter change action for undo/redo + let paramAction = null; + + // Prevent node dragging when interacting with select + select.addEventListener("mousedown", (e) => { + e.stopPropagation(); + + // Initialize undo action + const paramId = parseInt(e.target.getAttribute("data-param")); + const currentValue = parseFloat(e.target.value); + const nodeData = editor.getNodeFromId(nodeId); + + if (nodeData && nodeData.data.backendId !== null) { + const currentTrackId = getCurrentMidiTrack(); + if (currentTrackId !== null) { + paramAction = actions.graphSetParameter.initialize( + currentTrackId, + nodeData.data.backendId, + paramId, + nodeId, + currentValue + ); + } + } + }); + select.addEventListener("pointerdown", (e) => { + e.stopPropagation(); + }); + + select.addEventListener("change", (e) => { + const paramId = parseInt(e.target.getAttribute("data-param")); + const value = parseFloat(e.target.value); + + console.log(`[setupNodeParameters] Select change - nodeId: ${nodeId}, paramId: ${paramId}, value: ${value}`); + + // Update display span if it exists + const nodeData = editor.getNodeFromId(nodeId); + if (nodeData) { + const nodeDef = nodeTypes[nodeData.name]; + if (nodeDef && nodeDef.parameters[paramId]) { + const param = nodeDef.parameters[paramId]; + const displaySpan = nodeElement.querySelector(`#${param.name}-${nodeId}`); + if (displaySpan) { + // Update the span with the selected option text + displaySpan.textContent = e.target.options[e.target.selectedIndex].text; + } + } + + // Send to backend + if (nodeData.data.backendId !== null) { + const currentTrackId = getCurrentMidiTrack(); + if (currentTrackId !== null) { + invoke("graph_set_parameter", { + trackId: currentTrackId, + nodeId: nodeData.data.backendId, + paramId: paramId, + value: value + }).catch(err => { + console.error("Failed to set parameter:", err); + }); + } + } + } + + // Finalize undo action + if (paramAction) { + actions.graphSetParameter.finalize(paramAction, value); + paramAction = null; + } + }); + }); + // Handle Load Sample button for SimpleSampler const loadSampleBtn = nodeElement.querySelector(".load-sample-btn"); if (loadSampleBtn) { @@ -9107,20 +9326,15 @@ async function addEmptyMIDITrack() { const trackUuid = uuidv4(); try { - // Get available instruments - const instruments = await getAvailableInstruments(); - - // Default to SimpleSynth for now (we can add UI selection later) - const instrument = instruments.length > 0 ? instruments[0] : 'SimpleSynth'; + // Note: MIDI tracks now use node-based instruments via instrument_graph // Create new AudioTrack with type='midi' const newMIDITrack = new AudioTrack(trackUuid, trackName, 'midi'); - newMIDITrack.instrument = instrument; - // Initialize track in backend (creates MIDI track with instrument) + // Initialize track in backend (creates MIDI track with node graph) await newMIDITrack.initializeTrack(); - console.log('[addEmptyMIDITrack] After initializeTrack - instrument:', instrument); + console.log('[addEmptyMIDITrack] After initializeTrack - track created with node graph'); // Add track to active object context.activeObject.audioTracks.push(newMIDITrack); @@ -9144,16 +9358,7 @@ async function addEmptyMIDITrack() { } // MIDI Command Wrappers -async function getAvailableInstruments() { - try { - const instruments = await invoke('audio_get_available_instruments'); - console.log('Available instruments:', instruments); - return instruments; - } catch (error) { - console.error('Failed to get available instruments:', error); - throw error; - } -} +// Note: getAvailableInstruments() removed - now using node-based instruments async function createMIDITrack(name, instrument) { try { diff --git a/src/nodeTypes.js b/src/nodeTypes.js index 6f5ef83..449008c 100644 --- a/src/nodeTypes.js +++ b/src/nodeTypes.js @@ -1015,6 +1015,135 @@ export const nodeTypes = { ` }, + Math: { + name: 'Math', + category: NodeCategory.UTILITY, + description: 'Mathematical and logical operations on CV signals', + inputs: [ + { name: 'CV In A', type: SignalType.CV, index: 0 }, + { name: 'CV In B', type: SignalType.CV, index: 1 } + ], + outputs: [ + { name: 'CV Out', type: SignalType.CV, index: 0 } + ], + parameters: [ + { id: 0, name: 'operation', label: 'Operation', min: 0, max: 13, default: 0, unit: '' }, + { id: 1, name: 'operand', label: 'Operand', min: -10, max: 10, default: 1, unit: '' } + ], + getHTML: (nodeId) => ` +
+
Math
+
+ + +
+
+ + +
+
+ ` + }, + + Quantizer: { + name: 'Quantizer', + category: NodeCategory.UTILITY, + description: 'Quantize CV to musical scales', + inputs: [ + { name: 'CV In', type: SignalType.CV, index: 0 } + ], + outputs: [ + { name: 'CV Out', type: SignalType.CV, index: 0 }, + { name: 'Gate Out', type: SignalType.CV, index: 1 } + ], + parameters: [ + { id: 0, name: 'scale', label: 'Scale', min: 0, max: 10, default: 0, unit: '' }, + { id: 1, name: 'root', label: 'Root', min: 0, max: 11, default: 0, unit: '' } + ], + getHTML: (nodeId) => ` +
+
Quantizer
+
+ + +
+
+ + +
+
+ ` + }, + + SlewLimiter: { + name: 'SlewLimiter', + category: NodeCategory.UTILITY, + description: 'Limit rate of change for portamento/glide effects', + inputs: [ + { name: 'CV In', type: SignalType.CV, index: 0 } + ], + outputs: [ + { name: 'CV Out', type: SignalType.CV, index: 0 } + ], + parameters: [ + { id: 0, name: 'rise_time', label: 'Rise Time', min: 0, max: 5, default: 0.01, unit: 's' }, + { id: 1, name: 'fall_time', label: 'Fall Time', min: 0, max: 5, default: 0.01, unit: 's' } + ], + getHTML: (nodeId) => ` +
+
Slew Limiter
+
+ + +
+
+ + +
+
+ ` + }, + EQ: { name: 'EQ', category: NodeCategory.EFFECT, diff --git a/src/widgets.js b/src/widgets.js index 1b2b256..dc82ca2 100644 --- a/src/widgets.js +++ b/src/widgets.js @@ -4285,9 +4285,20 @@ class VirtualPiano extends Widget { console.log(`Note ON: ${this.getMidiNoteInfo(midiNote).name} (${midiNote}) velocity: ${velocity}`); - // Send to backend - use track ID 0 (first MIDI track) - // TODO: Make this configurable to select which track to send to - invoke('audio_send_midi_note_on', { trackId: 0, note: midiNote, velocity }).catch(error => { + // Send to backend - use selected track or recording track + let trackId = 0; // Default to first track + if (typeof context !== 'undefined') { + // If recording, use the recording track + if (context.isRecording && context.recordingTrackId !== null) { + trackId = context.recordingTrackId; + } + // Otherwise use the selected track + else if (context.activeObject && context.activeObject.activeLayer && context.activeObject.activeLayer.audioTrackId !== null) { + trackId = context.activeObject.activeLayer.audioTrackId; + } + } + + invoke('audio_send_midi_note_on', { trackId: trackId, note: midiNote, velocity }).catch(error => { console.error('Failed to send MIDI note on:', error); }); @@ -4305,8 +4316,20 @@ class VirtualPiano extends Widget { console.log(`Note OFF: ${this.getMidiNoteInfo(midiNote).name} (${midiNote})`); - // Send to backend - use track ID 0 (first MIDI track) - invoke('audio_send_midi_note_off', { trackId: 0, note: midiNote }).catch(error => { + // Send to backend - use selected track or recording track + let trackId = 0; // Default to first track + if (typeof context !== 'undefined') { + // If recording, use the recording track + if (context.isRecording && context.recordingTrackId !== null) { + trackId = context.recordingTrackId; + } + // Otherwise use the selected track + else if (context.activeObject && context.activeObject.activeLayer && context.activeObject.activeLayer.audioTrackId !== null) { + trackId = context.activeObject.activeLayer.audioTrackId; + } + } + + invoke('audio_send_midi_note_off', { trackId: trackId, note: midiNote }).catch(error => { console.error('Failed to send MIDI note off:', error); });