diff --git a/daw-backend/src/audio/engine.rs b/daw-backend/src/audio/engine.rs index 6d3a0a9..beb7543 100644 --- a/daw-backend/src/audio/engine.rs +++ b/daw-backend/src/audio/engine.rs @@ -416,13 +416,23 @@ impl Engine { if let Some(recording) = &self.midi_recording_state { let current_time = self.playhead as f64 / self.sample_rate as f64; let duration = current_time - recording.start_time; - let notes = recording.get_notes().to_vec(); + let notes = recording.get_notes_with_active(current_time); let _ = self.event_tx.push(AudioEvent::MidiRecordingProgress( recording.track_id, recording.clip_id, duration, notes, )); + // Keep the snapshot up to date so the UI can display a growing clip bar. + let track_id = recording.track_id; + let clip_id = recording.clip_id; + if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { + if let Some(instance) = track.clip_instances.iter_mut().find(|i| i.clip_id == clip_id) { + instance.internal_end = duration; + instance.external_duration = duration; + } + } + self.refresh_clip_snapshot(); } } } else { diff --git a/daw-backend/src/audio/recording.rs b/daw-backend/src/audio/recording.rs index 0bdf9af..8dd1f3f 100644 --- a/daw-backend/src/audio/recording.rs +++ b/daw-backend/src/audio/recording.rs @@ -253,6 +253,18 @@ impl MidiRecordingState { self.completed_notes.len() } + /// Get all completed notes plus currently-held notes with a provisional duration. + /// Used for live preview during recording so held notes appear immediately. + pub fn get_notes_with_active(&self, current_time: f64) -> Vec<(f64, u8, u8, f64)> { + let mut notes = self.completed_notes.clone(); + for active in self.active_notes.values() { + let time_offset = active.start_time - self.start_time; + let provisional_dur = (current_time - active.start_time).max(0.0); + notes.push((time_offset, active.note, active.velocity, provisional_dur)); + } + notes + } + /// Get the note numbers of all currently held (active) notes pub fn active_note_numbers(&self) -> Vec { self.active_notes.keys().copied().collect() diff --git a/lightningbeam-ui/lightningbeam-editor/src/main.rs b/lightningbeam-ui/lightningbeam-editor/src/main.rs index 326ac89..203d8dc 100644 --- a/lightningbeam-ui/lightningbeam-editor/src/main.rs +++ b/lightningbeam-ui/lightningbeam-editor/src/main.rs @@ -5164,6 +5164,29 @@ impl eframe::App for EditorApp { .filter(|lid| self.recording_layer_ids.contains(lid)) .copied(); if let Some(layer_id) = midi_layer_id { + // Lazily create the doc clip + instance on the first progress event + // (there is no MidiRecordingStarted event from the backend). + let already_exists = self.clip_instance_to_backend_map.values().any(|v| { + matches!(v, lightningbeam_core::action::BackendClipInstanceId::Midi(id) if *id == clip_id) + }); + if !already_exists { + use lightningbeam_core::clip::{AudioClip, ClipInstance}; + let clip = AudioClip::new_recording("Recording..."); + let doc_clip_id = self.action_executor.document_mut().add_audio_clip(clip); + let clip_instance = ClipInstance::new(doc_clip_id) + .with_timeline_start(self.recording_start_time); + let clip_instance_id = clip_instance.id; + if let Some(layer) = self.action_executor.document_mut().get_layer_mut(&layer_id) { + if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer { + audio_layer.clip_instances.push(clip_instance); + } + } + self.clip_instance_to_backend_map.insert( + clip_instance_id, + lightningbeam_core::action::BackendClipInstanceId::Midi(clip_id), + ); + } + let doc_clip_id = { let document = self.action_executor.document(); document.get_layer(&layer_id)