Midi recording

This commit is contained in:
Skyler Lehmkuhl 2026-02-13 18:00:59 -05:00
parent b86af7bbf5
commit 82b58ae0dc
7 changed files with 202 additions and 20 deletions

View File

@ -2431,6 +2431,12 @@ impl Engine {
fn handle_stop_midi_recording(&mut self) { fn handle_stop_midi_recording(&mut self) {
eprintln!("[MIDI_RECORDING] handle_stop_midi_recording called"); eprintln!("[MIDI_RECORDING] handle_stop_midi_recording called");
if let Some(mut recording) = self.midi_recording_state.take() { if let Some(mut recording) = self.midi_recording_state.take() {
// Send note-off to the synth for any notes still held, so they don't get stuck
let track_id_for_noteoff = recording.track_id;
for note_num in recording.active_note_numbers() {
self.project.send_midi_note_off(track_id_for_noteoff, note_num);
}
// Close out any active notes at the current playhead position // Close out any active notes at the current playhead position
let end_time = self.playhead as f64 / self.sample_rate as f64; let end_time = self.playhead as f64 / self.sample_rate as f64;
eprintln!("[MIDI_RECORDING] Closing active notes at time {}", end_time); eprintln!("[MIDI_RECORDING] Closing active notes at time {}", end_time);

View File

@ -256,7 +256,8 @@ impl MidiClipInstance {
// Get events from the clip that fall within the internal range // Get events from the clip that fall within the internal range
for event in &clip.events { for event in &clip.events {
// Skip events outside the trimmed region // Skip events outside the trimmed region
if event.timestamp < self.internal_start || event.timestamp >= self.internal_end { // Use > (not >=) for internal_end so note-offs at the clip boundary are included
if event.timestamp < self.internal_start || event.timestamp > self.internal_end {
continue; continue;
} }
@ -265,9 +266,10 @@ impl MidiClipInstance {
let timeline_time = self.external_start + loop_offset + relative_content_time; let timeline_time = self.external_start + loop_offset + relative_content_time;
// Check if within current buffer range and instance bounds // Check if within current buffer range and instance bounds
// Use <= for external_end so note-offs at the clip boundary are included
if timeline_time >= range_start_seconds if timeline_time >= range_start_seconds
&& timeline_time < range_end_seconds && timeline_time < range_end_seconds
&& timeline_time < external_end && timeline_time <= external_end
{ {
let mut adjusted_event = *event; let mut adjusted_event = *event;
adjusted_event.timestamp = timeline_time; adjusted_event.timestamp = timeline_time;

View File

@ -253,6 +253,11 @@ impl MidiRecordingState {
self.completed_notes.len() self.completed_notes.len()
} }
/// Get the note numbers of all currently held (active) notes
pub fn active_note_numbers(&self) -> Vec<u8> {
self.active_notes.keys().copied().collect()
}
/// Close out all active notes at the given time /// Close out all active notes at the given time
/// This should be called when stopping recording to end any held notes /// This should be called when stopping recording to end any held notes
pub fn close_active_notes(&mut self, end_time: f64) { pub fn close_active_notes(&mut self, end_time: f64) {

View File

@ -7,7 +7,7 @@ use super::node_graph::nodes::{AudioInputNode, AudioOutputNode};
use super::node_graph::preset::GraphPreset; use super::node_graph::preset::GraphPreset;
use super::pool::AudioClipPool; use super::pool::AudioClipPool;
use serde::{Serialize, Deserialize}; use serde::{Serialize, Deserialize};
use std::collections::HashMap; use std::collections::{HashMap, HashSet};
/// Track ID type /// Track ID type
pub type TrackId = u32; pub type TrackId = u32;
@ -334,6 +334,10 @@ pub struct MidiTrack {
/// Queue for live MIDI input (virtual keyboard, MIDI controllers) /// Queue for live MIDI input (virtual keyboard, MIDI controllers)
#[serde(skip)] #[serde(skip)]
live_midi_queue: Vec<MidiEvent>, live_midi_queue: Vec<MidiEvent>,
/// Clip instances that were active (overlapping playhead) in the previous render buffer.
/// Used to detect when the playhead exits a clip, so we can send all-notes-off.
#[serde(skip)]
prev_active_instances: HashSet<MidiClipInstanceId>,
} }
impl Clone for MidiTrack { impl Clone for MidiTrack {
@ -350,6 +354,7 @@ impl Clone for MidiTrack {
automation_lanes: self.automation_lanes.clone(), automation_lanes: self.automation_lanes.clone(),
next_automation_id: self.next_automation_id, next_automation_id: self.next_automation_id,
live_midi_queue: Vec::new(), // Don't clone live MIDI queue live_midi_queue: Vec::new(), // Don't clone live MIDI queue
prev_active_instances: HashSet::new(),
} }
} }
} }
@ -372,6 +377,7 @@ impl MidiTrack {
automation_lanes: HashMap::new(), automation_lanes: HashMap::new(),
next_automation_id: 0, next_automation_id: 0,
live_midi_queue: Vec::new(), live_midi_queue: Vec::new(),
prev_active_instances: HashSet::new(),
} }
} }
@ -505,7 +511,11 @@ impl MidiTrack {
// Collect MIDI events from all clip instances that overlap with current time range // Collect MIDI events from all clip instances that overlap with current time range
let mut midi_events = Vec::new(); let mut midi_events = Vec::new();
let mut currently_active = HashSet::new();
for instance in &self.clip_instances { for instance in &self.clip_instances {
if instance.overlaps_range(playhead_seconds, buffer_end_seconds) {
currently_active.insert(instance.id);
}
// Get the clip content from the pool // Get the clip content from the pool
if let Some(clip) = midi_pool.get_clip(instance.clip_id) { if let Some(clip) = midi_pool.get_clip(instance.clip_id) {
let events = instance.get_events_in_range( let events = instance.get_events_in_range(
@ -517,6 +527,18 @@ impl MidiTrack {
} }
} }
// Send all-notes-off for clip instances that just became inactive
// (playhead exited the clip). This prevents stuck notes from malformed clips.
for prev_id in &self.prev_active_instances {
if !currently_active.contains(prev_id) {
for note in 0..128u8 {
midi_events.push(MidiEvent::note_off(playhead_seconds, 0, note, 0));
}
break; // One round of all-notes-off is enough
}
}
self.prev_active_instances = currently_active;
// Add live MIDI events (from virtual keyboard or MIDI controllers) // Add live MIDI events (from virtual keyboard or MIDI controllers)
// This allows real-time input to be heard during playback/recording // This allows real-time input to be heard during playback/recording
midi_events.extend(self.live_midi_queue.drain(..)); midi_events.extend(self.live_midi_queue.drain(..));

View File

@ -42,3 +42,34 @@ pollster = "0.3"
# Desktop notifications # Desktop notifications
notify-rust = "4.11" notify-rust = "4.11"
# Optimize the audio backend even in debug builds — the audio callback
# runs on a real-time thread with ~1.5ms deadlines at small buffer sizes,
# so it cannot tolerate unoptimized code.
[profile.dev.package.daw-backend]
opt-level = 2
# Also optimize symphonia (audio decoder) and cpal (audio I/O) — these
# run in the audio callback path and are heavily numeric.
[profile.dev.package.symphonia]
opt-level = 2
[profile.dev.package.symphonia-core]
opt-level = 2
[profile.dev.package.symphonia-bundle-mp3]
opt-level = 2
[profile.dev.package.symphonia-bundle-flac]
opt-level = 2
[profile.dev.package.symphonia-format-wav]
opt-level = 2
[profile.dev.package.symphonia-format-ogg]
opt-level = 2
[profile.dev.package.symphonia-codec-vorbis]
opt-level = 2
[profile.dev.package.symphonia-codec-aac]
opt-level = 2
[profile.dev.package.symphonia-format-isomp4]
opt-level = 2
[profile.dev.package.cpal]
opt-level = 2
[profile.dev.package.rubato]
opt-level = 2

View File

@ -3482,13 +3482,103 @@ impl eframe::App for EditorApp {
self.recording_layer_id = None; self.recording_layer_id = None;
ctx.request_repaint(); ctx.request_repaint();
} }
AudioEvent::MidiRecordingProgress(_track_id, _clip_id, duration, _notes) => { AudioEvent::MidiRecordingProgress(_track_id, clip_id, duration, notes) => {
println!("🎹 MIDI recording progress: {:.2}s", duration); // Update clip duration in document (so timeline bar grows)
if let Some(layer_id) = self.recording_layer_id {
let doc_clip_id = {
let document = self.action_executor.document();
document.root.children.iter()
.find(|l| l.id() == layer_id)
.and_then(|layer| {
if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer {
audio_layer.clip_instances.last().map(|i| i.clip_id)
} else {
None
}
})
};
if let Some(doc_clip_id) = doc_clip_id {
if let Some(clip) = self.action_executor.document_mut().audio_clips.get_mut(&doc_clip_id) {
clip.duration = duration;
}
}
}
// Update midi_event_cache with notes captured so far
// (inlined instead of calling rebuild_midi_cache_entry to avoid
// conflicting &mut self borrow with event_rx loop)
{
let mut events: Vec<(f64, u8, u8, bool)> = Vec::with_capacity(notes.len() * 2);
for &(start_time, note, velocity, dur) in &notes {
events.push((start_time, note, velocity, true));
events.push((start_time + dur, note, velocity, false));
}
events.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap());
self.midi_event_cache.insert(clip_id, events);
}
ctx.request_repaint(); ctx.request_repaint();
} }
AudioEvent::MidiRecordingStopped(track_id, clip_id, note_count) => { AudioEvent::MidiRecordingStopped(track_id, clip_id, note_count) => {
println!("🎹 MIDI recording stopped: track={:?}, clip_id={}, {} notes", println!("🎹 MIDI recording stopped: track={:?}, clip_id={}, {} notes",
track_id, clip_id, note_count); track_id, clip_id, note_count);
// Query backend for the definitive final note data
if let Some(ref controller_arc) = self.audio_controller {
let mut controller = controller_arc.lock().unwrap();
match controller.query_midi_clip(track_id, clip_id) {
Ok(midi_clip_data) => {
// Convert backend MidiEvent format to cache format
let cache_events: Vec<(f64, u8, u8, bool)> = midi_clip_data.events.iter()
.filter_map(|event| {
let status_type = event.status & 0xF0;
if status_type == 0x90 || status_type == 0x80 {
let is_note_on = status_type == 0x90 && event.data2 > 0;
Some((event.timestamp, event.data1, event.data2, is_note_on))
} else {
None
}
})
.collect();
drop(controller);
self.midi_event_cache.insert(clip_id, cache_events);
// Update document clip with final duration and name
if let Some(layer_id) = self.recording_layer_id {
let doc_clip_id = {
let document = self.action_executor.document();
document.root.children.iter()
.find(|l| l.id() == layer_id)
.and_then(|layer| {
if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer {
audio_layer.clip_instances.last().map(|i| i.clip_id)
} else {
None
}
})
};
if let Some(doc_clip_id) = doc_clip_id {
if let Some(clip) = self.action_executor.document_mut().audio_clips.get_mut(&doc_clip_id) {
clip.duration = midi_clip_data.duration;
clip.name = format!("MIDI Recording {}", clip_id);
}
}
}
println!("✅ Finalized MIDI recording: {} notes, {:.2}s",
note_count, midi_clip_data.duration);
}
Err(e) => {
eprintln!("Failed to query MIDI clip data after recording: {}", e);
// Cache was already populated by last MidiRecordingProgress event
}
}
}
// TODO: Store clip_instance_to_backend_map entry for this MIDI clip.
// The backend created the instance in create_midi_clip(), but doesn't
// report the instance_id back. Needed for move/trim operations later.
// Clear recording state // Clear recording state
self.is_recording = false; self.is_recording = false;
self.recording_clips.clear(); self.recording_clips.clear();

View File

@ -150,21 +150,25 @@ impl TimelinePane {
/// Start recording on the active audio layer /// Start recording on the active audio layer
fn start_recording(&mut self, shared: &mut SharedPaneState) { fn start_recording(&mut self, shared: &mut SharedPaneState) {
use lightningbeam_core::clip::{AudioClip, ClipInstance};
let Some(active_layer_id) = *shared.active_layer_id else { let Some(active_layer_id) = *shared.active_layer_id else {
println!("⚠️ No active layer selected for recording"); println!("⚠️ No active layer selected for recording");
return; return;
}; };
// Get the active layer and check if it's an audio layer // Get layer type (copy it so we can drop the document borrow before mutating)
let document = shared.action_executor.document(); let layer_type = {
let Some(layer) = document.root.children.iter().find(|l| l.id() == active_layer_id) else { let document = shared.action_executor.document();
println!("⚠️ Active layer not found in document"); let Some(layer) = document.root.children.iter().find(|l| l.id() == active_layer_id) else {
return; println!("⚠️ Active layer not found in document");
}; return;
};
let AnyLayer::Audio(audio_layer) = layer else { let AnyLayer::Audio(audio_layer) = layer else {
println!("⚠️ Active layer is not an audio layer - cannot record"); println!("⚠️ Active layer is not an audio layer - cannot record");
return; return;
};
audio_layer.audio_layer_type
}; };
// Get the backend track ID for this layer // Get the backend track ID for this layer
@ -179,31 +183,53 @@ impl TimelinePane {
if let Some(controller_arc) = shared.audio_controller { if let Some(controller_arc) = shared.audio_controller {
let mut controller = controller_arc.lock().unwrap(); let mut controller = controller_arc.lock().unwrap();
match audio_layer.audio_layer_type { match layer_type {
AudioLayerType::Midi => { AudioLayerType::Midi => {
// For MIDI recording, we need to create a clip first // Create backend MIDI clip and start recording
// The backend will emit MidiRecordingStarted with the clip_id
let clip_id = controller.create_midi_clip(track_id, start_time, 4.0); let clip_id = controller.create_midi_clip(track_id, start_time, 4.0);
controller.start_midi_recording(track_id, clip_id, start_time); controller.start_midi_recording(track_id, clip_id, start_time);
shared.recording_clips.insert(active_layer_id, clip_id); shared.recording_clips.insert(active_layer_id, clip_id);
println!("🎹 Started MIDI recording on track {:?} at {:.2}s, clip_id={}", println!("🎹 Started MIDI recording on track {:?} at {:.2}s, clip_id={}",
track_id, start_time, clip_id); track_id, start_time, clip_id);
// Drop controller lock before document mutation
drop(controller);
// Create document clip + clip instance immediately (clip_id is known synchronously)
let doc_clip = AudioClip::new_midi("Recording...", clip_id, 4.0);
let doc_clip_id = shared.action_executor.document_mut().add_audio_clip(doc_clip);
let clip_instance = ClipInstance::new(doc_clip_id)
.with_timeline_start(start_time);
if let Some(layer) = shared.action_executor.document_mut().root.children.iter_mut()
.find(|l| l.id() == active_layer_id)
{
if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer {
audio_layer.clip_instances.push(clip_instance);
}
}
// Initialize empty cache entry for this clip
shared.midi_event_cache.insert(clip_id, Vec::new());
} }
AudioLayerType::Sampled => { AudioLayerType::Sampled => {
// For audio recording, backend creates the clip // For audio recording, backend creates the clip
controller.start_recording(track_id, start_time); controller.start_recording(track_id, start_time);
println!("🎤 Started audio recording on track {:?} at {:.2}s", track_id, start_time); println!("🎤 Started audio recording on track {:?} at {:.2}s", track_id, start_time);
drop(controller);
} }
} }
// Auto-start playback if not already playing // Re-acquire lock for playback start
if !*shared.is_playing { if !*shared.is_playing {
let mut controller = controller_arc.lock().unwrap();
controller.play(); controller.play();
*shared.is_playing = true; *shared.is_playing = true;
println!("▶ Auto-started playback for recording"); println!("▶ Auto-started playback for recording");
} }
// Store recording state for clip creation when RecordingStarted event arrives // Store recording state
*shared.is_recording = true; *shared.is_recording = true;
*shared.recording_start_time = start_time; *shared.recording_start_time = start_time;
*shared.recording_layer_id = Some(active_layer_id); *shared.recording_layer_id = Some(active_layer_id);