diff --git a/daw-backend/src/audio/engine.rs b/daw-backend/src/audio/engine.rs index 45f83ad..cde4b96 100644 --- a/daw-backend/src/audio/engine.rs +++ b/daw-backend/src/audio/engine.rs @@ -2431,6 +2431,12 @@ impl Engine { fn handle_stop_midi_recording(&mut self) { eprintln!("[MIDI_RECORDING] handle_stop_midi_recording called"); if let Some(mut recording) = self.midi_recording_state.take() { + // Send note-off to the synth for any notes still held, so they don't get stuck + let track_id_for_noteoff = recording.track_id; + for note_num in recording.active_note_numbers() { + self.project.send_midi_note_off(track_id_for_noteoff, note_num); + } + // Close out any active notes at the current playhead position let end_time = self.playhead as f64 / self.sample_rate as f64; eprintln!("[MIDI_RECORDING] Closing active notes at time {}", end_time); diff --git a/daw-backend/src/audio/midi.rs b/daw-backend/src/audio/midi.rs index 8a496da..f3127a5 100644 --- a/daw-backend/src/audio/midi.rs +++ b/daw-backend/src/audio/midi.rs @@ -256,7 +256,8 @@ impl MidiClipInstance { // Get events from the clip that fall within the internal range for event in &clip.events { // Skip events outside the trimmed region - if event.timestamp < self.internal_start || event.timestamp >= self.internal_end { + // Use > (not >=) for internal_end so note-offs at the clip boundary are included + if event.timestamp < self.internal_start || event.timestamp > self.internal_end { continue; } @@ -265,9 +266,10 @@ impl MidiClipInstance { let timeline_time = self.external_start + loop_offset + relative_content_time; // Check if within current buffer range and instance bounds + // Use <= for external_end so note-offs at the clip boundary are included if timeline_time >= range_start_seconds && timeline_time < range_end_seconds - && timeline_time < external_end + && timeline_time <= external_end { let mut adjusted_event = *event; adjusted_event.timestamp = timeline_time; diff --git a/daw-backend/src/audio/recording.rs b/daw-backend/src/audio/recording.rs index ee8e811..0bdf9af 100644 --- a/daw-backend/src/audio/recording.rs +++ b/daw-backend/src/audio/recording.rs @@ -253,6 +253,11 @@ impl MidiRecordingState { self.completed_notes.len() } + /// Get the note numbers of all currently held (active) notes + pub fn active_note_numbers(&self) -> Vec { + self.active_notes.keys().copied().collect() + } + /// Close out all active notes at the given time /// This should be called when stopping recording to end any held notes pub fn close_active_notes(&mut self, end_time: f64) { diff --git a/daw-backend/src/audio/track.rs b/daw-backend/src/audio/track.rs index 94afa85..1a46368 100644 --- a/daw-backend/src/audio/track.rs +++ b/daw-backend/src/audio/track.rs @@ -7,7 +7,7 @@ use super::node_graph::nodes::{AudioInputNode, AudioOutputNode}; use super::node_graph::preset::GraphPreset; use super::pool::AudioClipPool; use serde::{Serialize, Deserialize}; -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; /// Track ID type pub type TrackId = u32; @@ -334,6 +334,10 @@ pub struct MidiTrack { /// Queue for live MIDI input (virtual keyboard, MIDI controllers) #[serde(skip)] live_midi_queue: Vec, + /// Clip instances that were active (overlapping playhead) in the previous render buffer. + /// Used to detect when the playhead exits a clip, so we can send all-notes-off. + #[serde(skip)] + prev_active_instances: HashSet, } impl Clone for MidiTrack { @@ -350,6 +354,7 @@ impl Clone for MidiTrack { automation_lanes: self.automation_lanes.clone(), next_automation_id: self.next_automation_id, live_midi_queue: Vec::new(), // Don't clone live MIDI queue + prev_active_instances: HashSet::new(), } } } @@ -372,6 +377,7 @@ impl MidiTrack { automation_lanes: HashMap::new(), next_automation_id: 0, live_midi_queue: Vec::new(), + prev_active_instances: HashSet::new(), } } @@ -505,7 +511,11 @@ impl MidiTrack { // Collect MIDI events from all clip instances that overlap with current time range let mut midi_events = Vec::new(); + let mut currently_active = HashSet::new(); for instance in &self.clip_instances { + if instance.overlaps_range(playhead_seconds, buffer_end_seconds) { + currently_active.insert(instance.id); + } // Get the clip content from the pool if let Some(clip) = midi_pool.get_clip(instance.clip_id) { let events = instance.get_events_in_range( @@ -517,6 +527,18 @@ impl MidiTrack { } } + // Send all-notes-off for clip instances that just became inactive + // (playhead exited the clip). This prevents stuck notes from malformed clips. + for prev_id in &self.prev_active_instances { + if !currently_active.contains(prev_id) { + for note in 0..128u8 { + midi_events.push(MidiEvent::note_off(playhead_seconds, 0, note, 0)); + } + break; // One round of all-notes-off is enough + } + } + self.prev_active_instances = currently_active; + // Add live MIDI events (from virtual keyboard or MIDI controllers) // This allows real-time input to be heard during playback/recording midi_events.extend(self.live_midi_queue.drain(..)); diff --git a/lightningbeam-ui/Cargo.toml b/lightningbeam-ui/Cargo.toml index 59d1c7f..75d2aa7 100644 --- a/lightningbeam-ui/Cargo.toml +++ b/lightningbeam-ui/Cargo.toml @@ -42,3 +42,34 @@ pollster = "0.3" # Desktop notifications notify-rust = "4.11" + +# Optimize the audio backend even in debug builds — the audio callback +# runs on a real-time thread with ~1.5ms deadlines at small buffer sizes, +# so it cannot tolerate unoptimized code. +[profile.dev.package.daw-backend] +opt-level = 2 + +# Also optimize symphonia (audio decoder) and cpal (audio I/O) — these +# run in the audio callback path and are heavily numeric. +[profile.dev.package.symphonia] +opt-level = 2 +[profile.dev.package.symphonia-core] +opt-level = 2 +[profile.dev.package.symphonia-bundle-mp3] +opt-level = 2 +[profile.dev.package.symphonia-bundle-flac] +opt-level = 2 +[profile.dev.package.symphonia-format-wav] +opt-level = 2 +[profile.dev.package.symphonia-format-ogg] +opt-level = 2 +[profile.dev.package.symphonia-codec-vorbis] +opt-level = 2 +[profile.dev.package.symphonia-codec-aac] +opt-level = 2 +[profile.dev.package.symphonia-format-isomp4] +opt-level = 2 +[profile.dev.package.cpal] +opt-level = 2 +[profile.dev.package.rubato] +opt-level = 2 diff --git a/lightningbeam-ui/lightningbeam-editor/src/main.rs b/lightningbeam-ui/lightningbeam-editor/src/main.rs index b867243..88414e4 100644 --- a/lightningbeam-ui/lightningbeam-editor/src/main.rs +++ b/lightningbeam-ui/lightningbeam-editor/src/main.rs @@ -3482,13 +3482,103 @@ impl eframe::App for EditorApp { self.recording_layer_id = None; ctx.request_repaint(); } - AudioEvent::MidiRecordingProgress(_track_id, _clip_id, duration, _notes) => { - println!("🎹 MIDI recording progress: {:.2}s", duration); + AudioEvent::MidiRecordingProgress(_track_id, clip_id, duration, notes) => { + // Update clip duration in document (so timeline bar grows) + if let Some(layer_id) = self.recording_layer_id { + let doc_clip_id = { + let document = self.action_executor.document(); + document.root.children.iter() + .find(|l| l.id() == layer_id) + .and_then(|layer| { + if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer { + audio_layer.clip_instances.last().map(|i| i.clip_id) + } else { + None + } + }) + }; + + if let Some(doc_clip_id) = doc_clip_id { + if let Some(clip) = self.action_executor.document_mut().audio_clips.get_mut(&doc_clip_id) { + clip.duration = duration; + } + } + } + + // Update midi_event_cache with notes captured so far + // (inlined instead of calling rebuild_midi_cache_entry to avoid + // conflicting &mut self borrow with event_rx loop) + { + let mut events: Vec<(f64, u8, u8, bool)> = Vec::with_capacity(notes.len() * 2); + for &(start_time, note, velocity, dur) in ¬es { + events.push((start_time, note, velocity, true)); + events.push((start_time + dur, note, velocity, false)); + } + events.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap()); + self.midi_event_cache.insert(clip_id, events); + } ctx.request_repaint(); } AudioEvent::MidiRecordingStopped(track_id, clip_id, note_count) => { println!("🎹 MIDI recording stopped: track={:?}, clip_id={}, {} notes", track_id, clip_id, note_count); + + // Query backend for the definitive final note data + if let Some(ref controller_arc) = self.audio_controller { + let mut controller = controller_arc.lock().unwrap(); + match controller.query_midi_clip(track_id, clip_id) { + Ok(midi_clip_data) => { + // Convert backend MidiEvent format to cache format + let cache_events: Vec<(f64, u8, u8, bool)> = midi_clip_data.events.iter() + .filter_map(|event| { + let status_type = event.status & 0xF0; + if status_type == 0x90 || status_type == 0x80 { + let is_note_on = status_type == 0x90 && event.data2 > 0; + Some((event.timestamp, event.data1, event.data2, is_note_on)) + } else { + None + } + }) + .collect(); + drop(controller); + self.midi_event_cache.insert(clip_id, cache_events); + + // Update document clip with final duration and name + if let Some(layer_id) = self.recording_layer_id { + let doc_clip_id = { + let document = self.action_executor.document(); + document.root.children.iter() + .find(|l| l.id() == layer_id) + .and_then(|layer| { + if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer { + audio_layer.clip_instances.last().map(|i| i.clip_id) + } else { + None + } + }) + }; + if let Some(doc_clip_id) = doc_clip_id { + if let Some(clip) = self.action_executor.document_mut().audio_clips.get_mut(&doc_clip_id) { + clip.duration = midi_clip_data.duration; + clip.name = format!("MIDI Recording {}", clip_id); + } + } + } + + println!("✅ Finalized MIDI recording: {} notes, {:.2}s", + note_count, midi_clip_data.duration); + } + Err(e) => { + eprintln!("Failed to query MIDI clip data after recording: {}", e); + // Cache was already populated by last MidiRecordingProgress event + } + } + } + + // TODO: Store clip_instance_to_backend_map entry for this MIDI clip. + // The backend created the instance in create_midi_clip(), but doesn't + // report the instance_id back. Needed for move/trim operations later. + // Clear recording state self.is_recording = false; self.recording_clips.clear(); diff --git a/lightningbeam-ui/lightningbeam-editor/src/panes/timeline.rs b/lightningbeam-ui/lightningbeam-editor/src/panes/timeline.rs index 2e58ac0..338cf64 100644 --- a/lightningbeam-ui/lightningbeam-editor/src/panes/timeline.rs +++ b/lightningbeam-ui/lightningbeam-editor/src/panes/timeline.rs @@ -150,21 +150,25 @@ impl TimelinePane { /// Start recording on the active audio layer fn start_recording(&mut self, shared: &mut SharedPaneState) { + use lightningbeam_core::clip::{AudioClip, ClipInstance}; + let Some(active_layer_id) = *shared.active_layer_id else { println!("⚠️ No active layer selected for recording"); return; }; - // Get the active layer and check if it's an audio layer - let document = shared.action_executor.document(); - let Some(layer) = document.root.children.iter().find(|l| l.id() == active_layer_id) else { - println!("⚠️ Active layer not found in document"); - return; - }; - - let AnyLayer::Audio(audio_layer) = layer else { - println!("⚠️ Active layer is not an audio layer - cannot record"); - return; + // Get layer type (copy it so we can drop the document borrow before mutating) + let layer_type = { + let document = shared.action_executor.document(); + let Some(layer) = document.root.children.iter().find(|l| l.id() == active_layer_id) else { + println!("⚠️ Active layer not found in document"); + return; + }; + let AnyLayer::Audio(audio_layer) = layer else { + println!("⚠️ Active layer is not an audio layer - cannot record"); + return; + }; + audio_layer.audio_layer_type }; // Get the backend track ID for this layer @@ -179,31 +183,53 @@ impl TimelinePane { if let Some(controller_arc) = shared.audio_controller { let mut controller = controller_arc.lock().unwrap(); - match audio_layer.audio_layer_type { + match layer_type { AudioLayerType::Midi => { - // For MIDI recording, we need to create a clip first - // The backend will emit MidiRecordingStarted with the clip_id + // Create backend MIDI clip and start recording let clip_id = controller.create_midi_clip(track_id, start_time, 4.0); controller.start_midi_recording(track_id, clip_id, start_time); shared.recording_clips.insert(active_layer_id, clip_id); println!("🎹 Started MIDI recording on track {:?} at {:.2}s, clip_id={}", track_id, start_time, clip_id); + + // Drop controller lock before document mutation + drop(controller); + + // Create document clip + clip instance immediately (clip_id is known synchronously) + let doc_clip = AudioClip::new_midi("Recording...", clip_id, 4.0); + let doc_clip_id = shared.action_executor.document_mut().add_audio_clip(doc_clip); + + let clip_instance = ClipInstance::new(doc_clip_id) + .with_timeline_start(start_time); + + if let Some(layer) = shared.action_executor.document_mut().root.children.iter_mut() + .find(|l| l.id() == active_layer_id) + { + if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer { + audio_layer.clip_instances.push(clip_instance); + } + } + + // Initialize empty cache entry for this clip + shared.midi_event_cache.insert(clip_id, Vec::new()); } AudioLayerType::Sampled => { // For audio recording, backend creates the clip controller.start_recording(track_id, start_time); println!("🎤 Started audio recording on track {:?} at {:.2}s", track_id, start_time); + drop(controller); } } - // Auto-start playback if not already playing + // Re-acquire lock for playback start if !*shared.is_playing { + let mut controller = controller_arc.lock().unwrap(); controller.play(); *shared.is_playing = true; println!("▶ Auto-started playback for recording"); } - // Store recording state for clip creation when RecordingStarted event arrives + // Store recording state *shared.is_recording = true; *shared.recording_start_time = start_time; *shared.recording_layer_id = Some(active_layer_id);