Snap to beats in measures mode

This commit is contained in:
Skyler Lehmkuhl 2026-03-19 00:47:15 -04:00
parent 164ed2ba73
commit 84a1a98452
2 changed files with 178 additions and 74 deletions

View File

@ -5155,40 +5155,57 @@ impl eframe::App for EditorApp {
if let Some(layer_id) = midi_layer_id { if let Some(layer_id) = midi_layer_id {
// Lazily create the doc clip + instance on the first progress event // Lazily create the doc clip + instance on the first progress event
// (there is no MidiRecordingStarted event from the backend). // (there is no MidiRecordingStarted event from the backend).
let already_exists = self.clip_instance_to_backend_map.values().any(|v| { //
matches!(v, lightningbeam_core::action::BackendClipInstanceId::Midi(id) if *id == clip_id) // MidiClipId (clip_id) is the content ID; MidiClipInstanceId is
}); // the placement ID used in the snapshot and backend operations.
if !already_exists { // We need to store the instance ID, not the content ID, so that
use lightningbeam_core::clip::{AudioClip, ClipInstance}; // build_audio_clip_cache can correlate mc.id → doc UUID.
let clip = AudioClip::new_recording("Recording..."); // Command::CreateMidiClip has already been processed and the
let doc_clip_id = self.action_executor.document_mut().add_audio_clip(clip); // snapshot refreshed by the time this event arrives.
let clip_instance = ClipInstance::new(doc_clip_id) let backend_instance_id: u32 = if let Some(ref controller_arc) = self.audio_controller {
.with_timeline_start(self.recording_start_time); let controller = controller_arc.lock().unwrap();
let clip_instance_id = clip_instance.id; let snap = controller.clip_snapshot();
if let Some(layer) = self.action_executor.document_mut().get_layer_mut(&layer_id) { let snap = snap.read().unwrap();
if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer { snap.midi.get(&_track_id)
audio_layer.clip_instances.push(clip_instance); .and_then(|instances| instances.iter().find(|mc| mc.clip_id == clip_id))
} .map(|mc| mc.id)
} .unwrap_or(clip_id)
self.clip_instance_to_backend_map.insert(
clip_instance_id,
lightningbeam_core::action::BackendClipInstanceId::Midi(clip_id),
);
}
let doc_clip_id = {
let document = self.action_executor.document();
document.get_layer(&layer_id)
.and_then(|layer| {
if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer {
audio_layer.clip_instances.last().map(|i| i.clip_id)
} else { } else {
None clip_id
}
})
}; };
// Find the Midi-typed clip instance the timeline already created.
// Register it in the map (using the correct instance ID, not the
// content ID) so trim/move actions can find it via the snapshot.
let already_mapped = self.clip_instance_to_backend_map.values().any(|v| {
matches!(v, lightningbeam_core::action::BackendClipInstanceId::Midi(id) if *id == backend_instance_id)
});
let doc_clip_id = {
let doc = self.action_executor.document();
doc.audio_clip_by_midi_clip_id(clip_id).map(|(id, _)| id)
};
if let Some(doc_clip_id) = doc_clip_id { if let Some(doc_clip_id) = doc_clip_id {
if !already_mapped {
// Find the clip instance for this clip on the layer
let instance_id = {
let doc = self.action_executor.document();
doc.get_layer(&layer_id)
.and_then(|l| {
if let lightningbeam_core::layer::AnyLayer::Audio(al) = l {
al.clip_instances.iter()
.find(|ci| ci.clip_id == doc_clip_id)
.map(|ci| ci.id)
} else { None }
})
};
if let Some(instance_id) = instance_id {
self.clip_instance_to_backend_map.insert(
instance_id,
lightningbeam_core::action::BackendClipInstanceId::Midi(backend_instance_id),
);
}
}
// Update the clip's duration so the timeline bar grows
if let Some(clip) = self.action_executor.document_mut().audio_clips.get_mut(&doc_clip_id) { if let Some(clip) = self.action_executor.document_mut().audio_clips.get_mut(&doc_clip_id) {
clip.duration = duration; clip.duration = duration;
} }
@ -5221,28 +5238,15 @@ impl eframe::App for EditorApp {
self.midi_event_cache.insert(clip_id, midi_clip_data.events.clone()); self.midi_event_cache.insert(clip_id, midi_clip_data.events.clone());
// Update document clip with final duration and name // Update document clip with final duration and name
let midi_layer_id = self.track_to_layer_map.get(&track_id) let doc_clip_id = self.action_executor.document()
.filter(|lid| self.recording_layer_ids.contains(lid)) .audio_clip_by_midi_clip_id(clip_id)
.copied(); .map(|(id, _)| id);
if let Some(layer_id) = midi_layer_id {
let doc_clip_id = {
let document = self.action_executor.document();
document.get_layer(&layer_id)
.and_then(|layer| {
if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer {
audio_layer.clip_instances.last().map(|i| i.clip_id)
} else {
None
}
})
};
if let Some(doc_clip_id) = doc_clip_id { if let Some(doc_clip_id) = doc_clip_id {
if let Some(clip) = self.action_executor.document_mut().audio_clips.get_mut(&doc_clip_id) { if let Some(clip) = self.action_executor.document_mut().audio_clips.get_mut(&doc_clip_id) {
clip.duration = midi_clip_data.duration; clip.duration = midi_clip_data.duration;
clip.name = format!("MIDI Recording {}", clip_id); clip.name = format!("MIDI Recording {}", clip_id);
} }
} }
}
println!("✅ Finalized MIDI recording: {} notes, {:.2}s", println!("✅ Finalized MIDI recording: {} notes, {:.2}s",
note_count, midi_clip_data.duration); note_count, midi_clip_data.duration);

View File

@ -181,6 +181,7 @@ pub struct TimelinePane {
/// Clip drag state (None if not dragging) /// Clip drag state (None if not dragging)
clip_drag_state: Option<ClipDragType>, clip_drag_state: Option<ClipDragType>,
drag_offset: f64, // Time offset being applied during drag (for preview) drag_offset: f64, // Time offset being applied during drag (for preview)
drag_anchor_start: f64, // Original timeline_start of earliest selected clip; used for snapped move offset
/// Cached mouse position from mousedown (used for edge detection when drag starts) /// Cached mouse position from mousedown (used for edge detection when drag starts)
mousedown_pos: Option<egui::Pos2>, mousedown_pos: Option<egui::Pos2>,
@ -656,6 +657,7 @@ impl TimelinePane {
last_pan_pos: None, last_pan_pos: None,
clip_drag_state: None, clip_drag_state: None,
drag_offset: 0.0, drag_offset: 0.0,
drag_anchor_start: 0.0,
mousedown_pos: None, mousedown_pos: None,
layer_control_clicked: false, layer_control_clicked: false,
context_menu_clip: None, context_menu_clip: None,
@ -1265,6 +1267,73 @@ impl TimelinePane {
self.viewport_start_time + (x / self.pixels_per_second) as f64 self.viewport_start_time + (x / self.pixels_per_second) as f64
} }
/// Returns the quantization grid size in seconds, or None to disable snapping.
/// - Measures mode: zoom-adaptive (coarser when zoomed out, None when very zoomed in)
/// - Frames mode: always 1/framerate regardless of zoom
/// - Seconds mode: no snapping
fn quantize_grid_size(
&self,
bpm: f64,
time_sig: &lightningbeam_core::document::TimeSignature,
framerate: f64,
) -> Option<f64> {
match self.time_display_format {
TimeDisplayFormat::Frames => Some(1.0 / framerate),
TimeDisplayFormat::Measures => {
use lightningbeam_core::beat_time::{beat_duration, measure_duration};
let beat = beat_duration(bpm);
let measure = measure_duration(bpm, time_sig);
let pps = self.pixels_per_second as f64;
// Very zoomed in: 16th note > 40px → no snap
if pps * beat / 4.0 > 40.0 { return None; }
// Find finest subdivision with >= 15px spacing (finest → coarsest)
const MIN_PX: f64 = 15.0;
for &sub in &[beat / 4.0, beat / 2.0, beat, beat * 2.0, measure] {
if pps * sub >= MIN_PX { return Some(sub); }
}
// Very zoomed out: try 2x, 4x, ... multiples of a measure
let mut m = measure * 2.0;
for _ in 0..10 {
if pps * m >= MIN_PX { return Some(m); }
m *= 2.0;
}
Some(measure)
}
TimeDisplayFormat::Seconds => None,
}
}
/// Snap a time value to the nearest quantization grid point (or return unchanged).
fn snap_to_grid(
&self,
t: f64,
bpm: f64,
time_sig: &lightningbeam_core::document::TimeSignature,
framerate: f64,
) -> f64 {
match self.quantize_grid_size(bpm, time_sig, framerate) {
Some(grid) => (t / grid).round() * grid,
None => t,
}
}
/// Effective drag offset for Move operations.
/// Snaps the anchor clip's resulting position to the grid; all selected clips use the same offset.
fn snapped_move_offset(
&self,
bpm: f64,
time_sig: &lightningbeam_core::document::TimeSignature,
framerate: f64,
) -> f64 {
match self.quantize_grid_size(bpm, time_sig, framerate) {
Some(grid) => {
let snapped = ((self.drag_anchor_start + self.drag_offset) / grid).round() * grid;
snapped - self.drag_anchor_start
}
None => self.drag_offset,
}
}
/// Calculate appropriate interval for time ruler based on zoom level /// Calculate appropriate interval for time ruler based on zoom level
fn calculate_ruler_interval(&self) -> f64 { fn calculate_ruler_interval(&self) -> f64 {
// Target: 50-100px between major ticks // Target: 50-100px between major ticks
@ -2530,7 +2599,7 @@ impl TimelinePane {
let dur = ci.total_duration(clip_dur); let dur = ci.total_duration(clip_dur);
// Apply drag offset for selected clips during move // Apply drag offset for selected clips during move
if is_move_drag && selection.contains_clip_instance(&ci.id) { if is_move_drag && selection.contains_clip_instance(&ci.id) {
start = (start + self.drag_offset).max(0.0); start = (start + self.snapped_move_offset(document.bpm, &document.time_signature, document.framerate)).max(0.0);
} }
ranges.push((start, start + dur)); ranges.push((start, start + dur));
} }
@ -2600,7 +2669,7 @@ impl TimelinePane {
.unwrap_or_else(|| ci.trim_end.unwrap_or(1.0) - ci.trim_start); .unwrap_or_else(|| ci.trim_end.unwrap_or(1.0) - ci.trim_start);
let mut ci_start = ci.effective_start(); let mut ci_start = ci.effective_start();
if is_move_drag && selection.contains_clip_instance(&ci.id) { if is_move_drag && selection.contains_clip_instance(&ci.id) {
ci_start = (ci_start + self.drag_offset).max(0.0); ci_start = (ci_start + self.snapped_move_offset(document.bpm, &document.time_signature, document.framerate)).max(0.0);
} }
let ci_duration = ci.total_duration(clip_dur); let ci_duration = ci.total_duration(clip_dur);
let ci_end = ci_start + ci_duration; let ci_end = ci_start + ci_duration;
@ -2719,7 +2788,7 @@ impl TimelinePane {
let clip_dur = audio_clip.duration; let clip_dur = audio_clip.duration;
let mut ci_start = ci.effective_start(); let mut ci_start = ci.effective_start();
if is_move_drag && selection.contains_clip_instance(&ci.id) { if is_move_drag && selection.contains_clip_instance(&ci.id) {
ci_start = (ci_start + self.drag_offset).max(0.0); ci_start = (ci_start + self.snapped_move_offset(document.bpm, &document.time_signature, document.framerate)).max(0.0);
} }
let ci_duration = ci.total_duration(clip_dur); let ci_duration = ci.total_duration(clip_dur);
@ -2821,7 +2890,7 @@ impl TimelinePane {
}) })
.collect(); .collect();
if !group.is_empty() { if !group.is_empty() {
Some(document.clamp_group_move_offset(&layer.id(), &group, self.drag_offset)) Some(document.clamp_group_move_offset(&layer.id(), &group, self.snapped_move_offset(document.bpm, &document.time_signature, document.framerate)))
} else { } else {
None None
} }
@ -2854,7 +2923,7 @@ impl TimelinePane {
} }
} }
ClipDragType::TrimLeft => { ClipDragType::TrimLeft => {
let new_trim = (ci.trim_start + self.drag_offset).max(0.0).min(clip_dur); let new_trim = self.snap_to_grid(ci.trim_start + self.drag_offset, document.bpm, &document.time_signature, document.framerate).max(0.0).min(clip_dur);
let offset = new_trim - ci.trim_start; let offset = new_trim - ci.trim_start;
start = (ci.timeline_start + offset).max(0.0); start = (ci.timeline_start + offset).max(0.0);
duration = (clip_dur - new_trim).max(0.0); duration = (clip_dur - new_trim).max(0.0);
@ -2864,14 +2933,16 @@ impl TimelinePane {
} }
ClipDragType::TrimRight => { ClipDragType::TrimRight => {
let old_trim_end = ci.trim_end.unwrap_or(clip_dur); let old_trim_end = ci.trim_end.unwrap_or(clip_dur);
let new_trim_end = (old_trim_end + self.drag_offset).max(ci.trim_start).min(clip_dur); let new_trim_end = self.snap_to_grid(old_trim_end + self.drag_offset, document.bpm, &document.time_signature, document.framerate).max(ci.trim_start).min(clip_dur);
duration = (new_trim_end - ci.trim_start).max(0.0); duration = (new_trim_end - ci.trim_start).max(0.0);
} }
ClipDragType::LoopExtendRight => { ClipDragType::LoopExtendRight => {
let trim_end = ci.trim_end.unwrap_or(clip_dur); let trim_end = ci.trim_end.unwrap_or(clip_dur);
let content_window = (trim_end - ci.trim_start).max(0.0); let content_window = (trim_end - ci.trim_start).max(0.0);
let current_right = ci.timeline_duration.unwrap_or(content_window); let current_right = ci.timeline_duration.unwrap_or(content_window);
let new_right = (current_right + self.drag_offset).max(content_window); let right_edge = ci.timeline_start + current_right + self.drag_offset;
let snapped_edge = self.snap_to_grid(right_edge, document.bpm, &document.time_signature, document.framerate);
let new_right = (snapped_edge - ci.timeline_start).max(content_window);
let loop_before = ci.loop_before.unwrap_or(0.0); let loop_before = ci.loop_before.unwrap_or(0.0);
duration = loop_before + new_right; duration = loop_before + new_right;
} }
@ -2945,7 +3016,7 @@ impl TimelinePane {
} }
ClipDragType::TrimLeft => { ClipDragType::TrimLeft => {
// Trim left: calculate new trim_start with snap to adjacent clips // Trim left: calculate new trim_start with snap to adjacent clips
let desired_trim_start = (clip_instance.trim_start + self.drag_offset) let desired_trim_start = self.snap_to_grid(clip_instance.trim_start + self.drag_offset, document.bpm, &document.time_signature, document.framerate)
.max(0.0) .max(0.0)
.min(clip_duration); .min(clip_duration);
@ -2985,8 +3056,7 @@ impl TimelinePane {
ClipDragType::TrimRight => { ClipDragType::TrimRight => {
// Trim right: extend or reduce duration with snap to adjacent clips // Trim right: extend or reduce duration with snap to adjacent clips
let old_trim_end = clip_instance.trim_end.unwrap_or(clip_duration); let old_trim_end = clip_instance.trim_end.unwrap_or(clip_duration);
let desired_change = self.drag_offset; let desired_trim_end = self.snap_to_grid(old_trim_end + self.drag_offset, document.bpm, &document.time_signature, document.framerate)
let desired_trim_end = (old_trim_end + desired_change)
.max(clip_instance.trim_start) .max(clip_instance.trim_start)
.min(clip_duration); .min(clip_duration);
@ -3019,7 +3089,9 @@ impl TimelinePane {
let trim_end = clip_instance.trim_end.unwrap_or(clip_duration); let trim_end = clip_instance.trim_end.unwrap_or(clip_duration);
let content_window = (trim_end - clip_instance.trim_start).max(0.0); let content_window = (trim_end - clip_instance.trim_start).max(0.0);
let current_right = clip_instance.timeline_duration.unwrap_or(content_window); let current_right = clip_instance.timeline_duration.unwrap_or(content_window);
let desired_right = (current_right + self.drag_offset).max(content_window); let right_edge = clip_instance.timeline_start + current_right + self.drag_offset;
let snapped_edge = self.snap_to_grid(right_edge, document.bpm, &document.time_signature, document.framerate);
let desired_right = (snapped_edge - clip_instance.timeline_start).max(content_window);
let new_right = if desired_right > current_right { let new_right = if desired_right > current_right {
let max_extend = document.find_max_trim_extend_right( let max_extend = document.find_max_trim_extend_right(
@ -4008,6 +4080,18 @@ impl TimelinePane {
// Start dragging with the detected drag type // Start dragging with the detected drag type
self.clip_drag_state = Some(drag_type); self.clip_drag_state = Some(drag_type);
self.drag_offset = 0.0; self.drag_offset = 0.0;
if drag_type == ClipDragType::Move {
// Find earliest selected clip as snap anchor for quantized moves
let mut earliest = f64::MAX;
for (_, clip_instances) in all_layer_clip_instances(context_layers, &audio_cache) {
for ci in clip_instances {
if selection.contains_clip_instance(&ci.id) && ci.timeline_start < earliest {
earliest = ci.timeline_start;
}
}
}
self.drag_anchor_start = if earliest == f64::MAX { 0.0 } else { earliest };
}
} else if let Some(child_ids) = self.detect_collapsed_group_at_pointer( } else if let Some(child_ids) = self.detect_collapsed_group_at_pointer(
mousedown_pos, mousedown_pos,
document, document,
@ -4026,6 +4110,16 @@ impl TimelinePane {
*focus = lightningbeam_core::selection::FocusSelection::ClipInstances(selection.clip_instances().to_vec()); *focus = lightningbeam_core::selection::FocusSelection::ClipInstances(selection.clip_instances().to_vec());
self.clip_drag_state = Some(ClipDragType::Move); self.clip_drag_state = Some(ClipDragType::Move);
self.drag_offset = 0.0; self.drag_offset = 0.0;
// Find earliest selected clip as snap anchor
let mut earliest = f64::MAX;
for (_, clip_instances) in all_layer_clip_instances(context_layers, &audio_cache) {
for ci in clip_instances {
if selection.contains_clip_instance(&ci.id) && ci.timeline_start < earliest {
earliest = ci.timeline_start;
}
}
}
self.drag_anchor_start = if earliest == f64::MAX { 0.0 } else { earliest };
} }
} }
} }
@ -4046,6 +4140,9 @@ impl TimelinePane {
let mut layer_moves: HashMap<uuid::Uuid, Vec<(uuid::Uuid, f64, f64)>> = let mut layer_moves: HashMap<uuid::Uuid, Vec<(uuid::Uuid, f64, f64)>> =
HashMap::new(); HashMap::new();
// Compute snapped offset once for all selected clips (preserves relative spacing)
let move_offset = self.snapped_move_offset(document.bpm, &document.time_signature, document.framerate);
// Iterate through all layers (including group children) to find selected clip instances // Iterate through all layers (including group children) to find selected clip instances
for (layer, clip_instances) in all_layer_clip_instances(context_layers, &audio_cache) { for (layer, clip_instances) in all_layer_clip_instances(context_layers, &audio_cache) {
let layer_id = layer.id(); let layer_id = layer.id();
@ -4053,7 +4150,7 @@ impl TimelinePane {
for clip_instance in clip_instances { for clip_instance in clip_instances {
if selection.contains_clip_instance(&clip_instance.id) { if selection.contains_clip_instance(&clip_instance.id) {
let old_timeline_start = clip_instance.timeline_start; let old_timeline_start = clip_instance.timeline_start;
let new_timeline_start = old_timeline_start + self.drag_offset; let new_timeline_start = old_timeline_start + move_offset;
// Add to layer_moves // Add to layer_moves
layer_moves layer_moves
@ -4104,11 +4201,11 @@ impl TimelinePane {
let old_timeline_start = let old_timeline_start =
clip_instance.timeline_start; clip_instance.timeline_start;
// New trim_start is clamped to valid range // New trim_start is snapped then clamped to valid range
let desired_trim_start = (old_trim_start let desired_trim_start = self.snap_to_grid(
+ self.drag_offset) old_trim_start + self.drag_offset,
.max(0.0) document.bpm, &document.time_signature, document.framerate,
.min(clip_duration); ).max(0.0).min(clip_duration);
// Apply overlap prevention when extending left // Apply overlap prevention when extending left
let new_trim_start = if desired_trim_start < old_trim_start { let new_trim_start = if desired_trim_start < old_trim_start {
@ -4152,9 +4249,10 @@ impl TimelinePane {
let current_duration = let current_duration =
clip_instance.effective_duration(clip_duration); clip_instance.effective_duration(clip_duration);
let old_trim_end_val = clip_instance.trim_end.unwrap_or(clip_duration); let old_trim_end_val = clip_instance.trim_end.unwrap_or(clip_duration);
let desired_trim_end = (old_trim_end_val + self.drag_offset) let desired_trim_end = self.snap_to_grid(
.max(clip_instance.trim_start) old_trim_end_val + self.drag_offset,
.min(clip_duration); document.bpm, &document.time_signature, document.framerate,
).max(clip_instance.trim_start).min(clip_duration);
// Apply overlap prevention when extending right // Apply overlap prevention when extending right
let new_trim_end_val = if desired_trim_end > old_trim_end_val { let new_trim_end_val = if desired_trim_end > old_trim_end_val {
@ -4230,7 +4328,9 @@ impl TimelinePane {
let trim_end = clip_instance.trim_end.unwrap_or(clip_duration); let trim_end = clip_instance.trim_end.unwrap_or(clip_duration);
let content_window = (trim_end - clip_instance.trim_start).max(0.0); let content_window = (trim_end - clip_instance.trim_start).max(0.0);
let current_right = clip_instance.timeline_duration.unwrap_or(content_window); let current_right = clip_instance.timeline_duration.unwrap_or(content_window);
let desired_right = current_right + self.drag_offset; let right_edge = clip_instance.timeline_start + current_right + self.drag_offset;
let snapped_edge = self.snap_to_grid(right_edge, document.bpm, &document.time_signature, document.framerate);
let desired_right = snapped_edge - clip_instance.timeline_start;
let new_right = if desired_right > current_right { let new_right = if desired_right > current_right {
let max_extend = document.find_max_trim_extend_right( let max_extend = document.find_max_trim_extend_right(
@ -4407,7 +4507,7 @@ impl TimelinePane {
if cursor_over_ruler && !alt_held && (response.clicked() || (response.dragged() && !self.is_panning)) { if cursor_over_ruler && !alt_held && (response.clicked() || (response.dragged() && !self.is_panning)) {
if let Some(pos) = response.interact_pointer_pos() { if let Some(pos) = response.interact_pointer_pos() {
let x = (pos.x - content_rect.min.x).max(0.0); let x = (pos.x - content_rect.min.x).max(0.0);
let new_time = self.x_to_time(x).max(0.0); let new_time = self.snap_to_grid(self.x_to_time(x).max(0.0), document.bpm, &document.time_signature, document.framerate);
*playback_time = new_time; *playback_time = new_time;
self.is_scrubbing = true; self.is_scrubbing = true;
// Seek immediately so it works while playing // Seek immediately so it works while playing
@ -4421,7 +4521,7 @@ impl TimelinePane {
else if self.is_scrubbing && response.dragged() && !self.is_panning { else if self.is_scrubbing && response.dragged() && !self.is_panning {
if let Some(pos) = response.interact_pointer_pos() { if let Some(pos) = response.interact_pointer_pos() {
let x = (pos.x - content_rect.min.x).max(0.0); let x = (pos.x - content_rect.min.x).max(0.0);
let new_time = self.x_to_time(x).max(0.0); let new_time = self.snap_to_grid(self.x_to_time(x).max(0.0), document.bpm, &document.time_signature, document.framerate);
*playback_time = new_time; *playback_time = new_time;
if let Some(controller_arc) = audio_controller { if let Some(controller_arc) = audio_controller {
let mut controller = controller_arc.lock().unwrap(); let mut controller = controller_arc.lock().unwrap();