All events now have three time references for seconds, measures/beats, frames

This commit is contained in:
Skyler Lehmkuhl 2026-03-30 10:15:55 -04:00
parent 65a550d8f4
commit cfb8e4462b
19 changed files with 1317 additions and 59 deletions

View File

@ -15,6 +15,7 @@ pub type ClipId = AudioClipInstanceId;
/// ## Timing Model
/// - `internal_start` / `internal_end`: Define the region of the source audio to play (trimming)
/// - `external_start` / `external_duration`: Define where the clip appears on the timeline and how long
/// - `*_beats` / `*_frames`: Derived representations for Measures/Frames mode display
///
/// ## Looping
/// If `external_duration` is greater than `internal_end - internal_start`,
@ -26,13 +27,21 @@ pub struct AudioClipInstance {
/// Start position within the audio content (seconds)
pub internal_start: f64,
#[serde(default)] pub internal_start_beats: f64,
#[serde(default)] pub internal_start_frames: f64,
/// End position within the audio content (seconds)
pub internal_end: f64,
#[serde(default)] pub internal_end_beats: f64,
#[serde(default)] pub internal_end_frames: f64,
/// Start position on the timeline (seconds)
pub external_start: f64,
#[serde(default)] pub external_start_beats: f64,
#[serde(default)] pub external_start_frames: f64,
/// Duration on the timeline (seconds) - can be longer than internal duration for looping
pub external_duration: f64,
#[serde(default)] pub external_duration_beats: f64,
#[serde(default)] pub external_duration_frames: f64,
/// Clip-level gain
pub gain: f32,
@ -62,9 +71,17 @@ impl AudioClipInstance {
id,
audio_pool_index,
internal_start,
internal_start_beats: 0.0,
internal_start_frames: 0.0,
internal_end,
internal_end_beats: 0.0,
internal_end_frames: 0.0,
external_start,
external_start_beats: 0.0,
external_start_frames: 0.0,
external_duration,
external_duration_beats: 0.0,
external_duration_frames: 0.0,
gain: 1.0,
read_ahead: None,
}
@ -83,9 +100,17 @@ impl AudioClipInstance {
id,
audio_pool_index,
internal_start: offset,
internal_start_beats: 0.0,
internal_start_frames: 0.0,
internal_end: offset + duration,
internal_end_beats: 0.0,
internal_end_frames: 0.0,
external_start: start_time,
external_start_beats: 0.0,
external_start_frames: 0.0,
external_duration: duration,
external_duration_beats: 0.0,
external_duration_frames: 0.0,
gain: 1.0,
read_ahead: None,
}
@ -147,4 +172,40 @@ impl AudioClipInstance {
pub fn set_gain(&mut self, gain: f32) {
self.gain = gain.max(0.0);
}
/// Populate beats/frames from the current seconds values.
pub fn sync_from_seconds(&mut self, bpm: f64, fps: f64) {
self.external_start_beats = self.external_start * bpm / 60.0;
self.external_start_frames = self.external_start * fps;
self.external_duration_beats = self.external_duration * bpm / 60.0;
self.external_duration_frames = self.external_duration * fps;
self.internal_start_beats = self.internal_start * bpm / 60.0;
self.internal_start_frames = self.internal_start * fps;
self.internal_end_beats = self.internal_end * bpm / 60.0;
self.internal_end_frames = self.internal_end * fps;
}
/// BPM changed; recompute seconds/frames from the stored beats values.
pub fn apply_beats(&mut self, bpm: f64, fps: f64) {
self.external_start = self.external_start_beats * 60.0 / bpm;
self.external_start_frames = self.external_start * fps;
self.external_duration = self.external_duration_beats * 60.0 / bpm;
self.external_duration_frames = self.external_duration * fps;
self.internal_start = self.internal_start_beats * 60.0 / bpm;
self.internal_start_frames = self.internal_start * fps;
self.internal_end = self.internal_end_beats * 60.0 / bpm;
self.internal_end_frames = self.internal_end * fps;
}
/// FPS changed; recompute seconds/beats from the stored frames values.
pub fn apply_frames(&mut self, fps: f64, bpm: f64) {
self.external_start = self.external_start_frames / fps;
self.external_start_beats = self.external_start * bpm / 60.0;
self.external_duration = self.external_duration_frames / fps;
self.external_duration_beats = self.external_duration * bpm / 60.0;
self.internal_start = self.internal_start_frames / fps;
self.internal_start_beats = self.internal_start * bpm / 60.0;
self.internal_end = self.internal_end_frames / fps;
self.internal_end_beats = self.internal_end * bpm / 60.0;
}
}

View File

@ -108,6 +108,11 @@ pub struct Engine {
timing_worst_render_us: u64,
timing_sum_total_us: u64,
timing_overrun_count: u64,
// Current tempo/framerate — kept in sync with SetTempo/ApplyBpmChange so that
// newly-created clip instances can be immediately synced via sync_from_seconds.
current_bpm: f64,
current_fps: f64,
}
impl Engine {
@ -184,6 +189,8 @@ impl Engine {
timing_worst_render_us: 0,
timing_sum_total_us: 0,
timing_overrun_count: 0,
current_bpm: 120.0,
current_fps: 30.0,
}
}
@ -728,16 +735,19 @@ impl Engine {
}
Command::MoveClip(track_id, clip_id, new_start_time) => {
// Moving just changes external_start, external_duration stays the same
let bpm = self.current_bpm;
let fps = self.current_fps;
match self.project.get_track_mut(track_id) {
Some(crate::audio::track::TrackNode::Audio(track)) => {
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
clip.external_start = new_start_time;
clip.sync_from_seconds(bpm, fps);
}
}
Some(crate::audio::track::TrackNode::Midi(track)) => {
// Note: clip_id here is the pool clip ID, not instance ID
if let Some(instance) = track.clip_instances.iter_mut().find(|c| c.clip_id == clip_id) {
if let Some(instance) = track.clip_instances.iter_mut().find(|c| c.id == clip_id) {
instance.external_start = new_start_time;
instance.sync_from_seconds(bpm, fps);
}
}
_ => {}
@ -747,13 +757,15 @@ impl Engine {
Command::TrimClip(track_id, clip_id, new_internal_start, new_internal_end) => {
// Trim changes which portion of the source content is used
// Also updates external_duration to match internal duration (no looping after trim)
let bpm = self.current_bpm;
let fps = self.current_fps;
match self.project.get_track_mut(track_id) {
Some(crate::audio::track::TrackNode::Audio(track)) => {
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
clip.internal_start = new_internal_start;
clip.internal_end = new_internal_end;
// By default, trimming sets external_duration to match internal duration
clip.external_duration = new_internal_end - new_internal_start;
clip.sync_from_seconds(bpm, fps);
}
}
Some(crate::audio::track::TrackNode::Midi(track)) => {
@ -761,8 +773,8 @@ impl Engine {
if let Some(instance) = track.clip_instances.iter_mut().find(|c| c.clip_id == clip_id) {
instance.internal_start = new_internal_start;
instance.internal_end = new_internal_end;
// By default, trimming sets external_duration to match internal duration
instance.external_duration = new_internal_end - new_internal_start;
instance.sync_from_seconds(bpm, fps);
}
}
_ => {}
@ -771,16 +783,20 @@ impl Engine {
}
Command::ExtendClip(track_id, clip_id, new_external_duration) => {
// Extend changes the external duration (enables looping if > internal duration)
let bpm = self.current_bpm;
let fps = self.current_fps;
match self.project.get_track_mut(track_id) {
Some(crate::audio::track::TrackNode::Audio(track)) => {
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
clip.external_duration = new_external_duration;
clip.sync_from_seconds(bpm, fps);
}
}
Some(crate::audio::track::TrackNode::Midi(track)) => {
// Note: clip_id here is the pool clip ID, not instance ID
if let Some(instance) = track.clip_instances.iter_mut().find(|c| c.clip_id == clip_id) {
instance.external_duration = new_external_duration;
instance.sync_from_seconds(bpm, fps);
}
}
_ => {}
@ -899,13 +915,14 @@ impl Engine {
}
Command::AddAudioClip(track_id, clip_id, pool_index, start_time, duration, offset) => {
// Create a new clip instance with the pre-assigned clip_id
let clip = AudioClipInstance::from_legacy(
let mut clip = AudioClipInstance::from_legacy(
clip_id,
pool_index,
start_time,
duration,
offset,
);
clip.sync_from_seconds(self.current_bpm, self.current_fps);
// Add clip to track
if let Some(crate::audio::track::TrackNode::Audio(track)) = self.project.get_track_mut(track_id) {
@ -933,7 +950,8 @@ impl Engine {
// Create an instance for this clip on the track
let instance_id = self.project.next_midi_clip_instance_id();
let instance = MidiClipInstance::from_full_clip(instance_id, clip_id, duration, start_time);
let mut instance = MidiClipInstance::from_full_clip(instance_id, clip_id, duration, start_time);
instance.sync_from_seconds(self.current_bpm, self.current_fps);
if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
track.clip_instances.push(instance);
@ -973,7 +991,15 @@ impl Engine {
}
Command::AddLoadedMidiClip(track_id, clip, start_time) => {
// Add a pre-loaded MIDI clip to the track with the given start time
let _ = self.project.add_midi_clip_at(track_id, clip, start_time);
let bpm = self.current_bpm;
let fps = self.current_fps;
if let Ok(instance_id) = self.project.add_midi_clip_at(track_id, clip, start_time) {
if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
if let Some(inst) = track.clip_instances.iter_mut().find(|i| i.id == instance_id) {
inst.sync_from_seconds(bpm, fps);
}
}
}
self.refresh_clip_snapshot();
}
Command::UpdateMidiClipNotes(_track_id, clip_id, notes) => {
@ -1277,6 +1303,14 @@ impl Engine {
Command::SetTempo(bpm, time_sig) => {
self.metronome.update_timing(bpm, time_sig);
self.project.set_tempo(bpm, time_sig.0);
self.current_bpm = bpm as f64;
}
Command::ApplyBpmChange(bpm, fps, midi_durations) => {
self.current_bpm = bpm;
self.current_fps = fps;
self.project.apply_bpm_change(bpm, fps, &midi_durations);
self.refresh_clip_snapshot();
}
// Node graph commands
@ -2716,8 +2750,18 @@ impl Engine {
}
Query::AddMidiClipSync(track_id, clip, start_time) => {
// Add MIDI clip to track and return the instance ID
let bpm = self.current_bpm;
let fps = self.current_fps;
let result = match self.project.add_midi_clip_at(track_id, clip, start_time) {
Ok(instance_id) => QueryResponse::MidiClipInstanceAdded(Ok(instance_id)),
Ok(instance_id) => {
// Sync beats/frames on the newly created instance
if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
if let Some(inst) = track.clip_instances.iter_mut().find(|i| i.id == instance_id) {
inst.sync_from_seconds(bpm, fps);
}
}
QueryResponse::MidiClipInstanceAdded(Ok(instance_id))
}
Err(e) => QueryResponse::MidiClipInstanceAdded(Err(e.to_string())),
};
self.refresh_clip_snapshot();
@ -2728,6 +2772,7 @@ impl Engine {
// Assign instance ID
let instance_id = self.project.next_midi_clip_instance_id();
instance.id = instance_id;
instance.sync_from_seconds(self.current_bpm, self.current_fps);
let result = match self.project.add_midi_clip_instance(track_id, instance) {
Ok(_) => QueryResponse::MidiClipInstanceAdded(Ok(instance_id)),
@ -3642,6 +3687,12 @@ impl EngineController {
let _ = self.command_tx.push(Command::SetTempo(bpm, time_signature));
}
/// After a BPM change: update MIDI clip durations and sync all clip beats/frames.
/// Call this after move_clip() has been called for all affected clips.
pub fn apply_bpm_change(&mut self, bpm: f64, fps: f64, midi_durations: Vec<(crate::audio::MidiClipId, f64)>) {
let _ = self.command_tx.push(Command::ApplyBpmChange(bpm, fps, midi_durations));
}
// Node graph operations
/// Add a node to a track's instrument graph

View File

@ -3,6 +3,12 @@
pub struct MidiEvent {
/// Time position within the clip in seconds (sample-rate independent)
pub timestamp: f64,
/// Time position in beats (quarter-note beats from clip start); derived from timestamp
#[serde(default)]
pub timestamp_beats: f64,
/// Time position in frames; derived from timestamp
#[serde(default)]
pub timestamp_frames: f64,
/// MIDI status byte (includes channel)
pub status: u8,
/// First data byte (note number, CC number, etc.)
@ -16,6 +22,8 @@ impl MidiEvent {
pub fn new(timestamp: f64, status: u8, data1: u8, data2: u8) -> Self {
Self {
timestamp,
timestamp_beats: 0.0,
timestamp_frames: 0.0,
status,
data1,
data2,
@ -26,6 +34,8 @@ impl MidiEvent {
pub fn note_on(timestamp: f64, channel: u8, note: u8, velocity: u8) -> Self {
Self {
timestamp,
timestamp_beats: 0.0,
timestamp_frames: 0.0,
status: 0x90 | (channel & 0x0F),
data1: note,
data2: velocity,
@ -36,12 +46,32 @@ impl MidiEvent {
pub fn note_off(timestamp: f64, channel: u8, note: u8, velocity: u8) -> Self {
Self {
timestamp,
timestamp_beats: 0.0,
timestamp_frames: 0.0,
status: 0x80 | (channel & 0x0F),
data1: note,
data2: velocity,
}
}
/// Sync beats and frames from seconds (call after constructing or when seconds is canonical)
pub fn sync_from_seconds(&mut self, bpm: f64, fps: f64) {
self.timestamp_beats = self.timestamp * bpm / 60.0;
self.timestamp_frames = self.timestamp * fps;
}
/// Recompute seconds and frames from beats (call when BPM changes in Measures mode)
pub fn apply_beats(&mut self, bpm: f64, fps: f64) {
self.timestamp = self.timestamp_beats * 60.0 / bpm;
self.timestamp_frames = self.timestamp * fps;
}
/// Recompute seconds and beats from frames (call when FPS changes in Frames mode)
pub fn apply_frames(&mut self, fps: f64, bpm: f64) {
self.timestamp = self.timestamp_frames / fps;
self.timestamp_beats = self.timestamp * bpm / 60.0;
}
/// Check if this is a note on event (with non-zero velocity)
pub fn is_note_on(&self) -> bool {
(self.status & 0xF0) == 0x90 && self.data2 > 0
@ -128,6 +158,7 @@ impl MidiClip {
/// ## Timing Model
/// - `internal_start` / `internal_end`: Define the region of the source clip to play (trimming)
/// - `external_start` / `external_duration`: Define where the instance appears on the timeline and how long
/// - `*_beats` / `*_frames`: Derived representations for Measures/Frames mode display
///
/// ## Looping
/// If `external_duration` is greater than `internal_end - internal_start`,
@ -139,13 +170,21 @@ pub struct MidiClipInstance {
/// Start position within the clip content (seconds)
pub internal_start: f64,
#[serde(default)] pub internal_start_beats: f64,
#[serde(default)] pub internal_start_frames: f64,
/// End position within the clip content (seconds)
pub internal_end: f64,
#[serde(default)] pub internal_end_beats: f64,
#[serde(default)] pub internal_end_frames: f64,
/// Start position on the timeline (seconds)
pub external_start: f64,
#[serde(default)] pub external_start_beats: f64,
#[serde(default)] pub external_start_frames: f64,
/// Duration on the timeline (seconds) - can be longer than internal duration for looping
pub external_duration: f64,
#[serde(default)] pub external_duration_beats: f64,
#[serde(default)] pub external_duration_frames: f64,
}
impl MidiClipInstance {
@ -162,9 +201,17 @@ impl MidiClipInstance {
id,
clip_id,
internal_start,
internal_start_beats: 0.0,
internal_start_frames: 0.0,
internal_end,
internal_end_beats: 0.0,
internal_end_frames: 0.0,
external_start,
external_start_beats: 0.0,
external_start_frames: 0.0,
external_duration,
external_duration_beats: 0.0,
external_duration_frames: 0.0,
}
}
@ -179,9 +226,17 @@ impl MidiClipInstance {
id,
clip_id,
internal_start: 0.0,
internal_start_beats: 0.0,
internal_start_frames: 0.0,
internal_end: clip_duration,
internal_end_beats: 0.0,
internal_end_frames: 0.0,
external_start,
external_start_beats: 0.0,
external_start_frames: 0.0,
external_duration: clip_duration,
external_duration_beats: 0.0,
external_duration_frames: 0.0,
}
}
@ -215,6 +270,42 @@ impl MidiClipInstance {
self.external_start < range_end && self.external_end() > range_start
}
/// Populate beats/frames from the current seconds values.
pub fn sync_from_seconds(&mut self, bpm: f64, fps: f64) {
self.external_start_beats = self.external_start * bpm / 60.0;
self.external_start_frames = self.external_start * fps;
self.external_duration_beats = self.external_duration * bpm / 60.0;
self.external_duration_frames = self.external_duration * fps;
self.internal_start_beats = self.internal_start * bpm / 60.0;
self.internal_start_frames = self.internal_start * fps;
self.internal_end_beats = self.internal_end * bpm / 60.0;
self.internal_end_frames = self.internal_end * fps;
}
/// BPM changed; recompute seconds/frames from the stored beats values.
pub fn apply_beats(&mut self, bpm: f64, fps: f64) {
self.external_start = self.external_start_beats * 60.0 / bpm;
self.external_start_frames = self.external_start * fps;
self.external_duration = self.external_duration_beats * 60.0 / bpm;
self.external_duration_frames = self.external_duration * fps;
self.internal_start = self.internal_start_beats * 60.0 / bpm;
self.internal_start_frames = self.internal_start * fps;
self.internal_end = self.internal_end_beats * 60.0 / bpm;
self.internal_end_frames = self.internal_end * fps;
}
/// FPS changed; recompute seconds/beats from the stored frames values.
pub fn apply_frames(&mut self, fps: f64, bpm: f64) {
self.external_start = self.external_start_frames / fps;
self.external_start_beats = self.external_start * bpm / 60.0;
self.external_duration = self.external_duration_frames / fps;
self.external_duration_beats = self.external_duration * bpm / 60.0;
self.internal_start = self.internal_start_frames / fps;
self.internal_start_beats = self.internal_start * bpm / 60.0;
self.internal_end = self.internal_end_frames / fps;
self.internal_end_beats = self.internal_end * bpm / 60.0;
}
/// Get events that should be triggered in a given timeline range
///
/// This handles:

View File

@ -216,6 +216,46 @@ impl Project {
self.tracks.iter().map(|(&id, node)| (id, node))
}
/// After a BPM change, update MIDI clip durations then sync all clip beats/frames from seconds.
///
/// `midi_durations` maps each MidiClipId to its new content duration in seconds.
/// Call this after the seconds positions have already been updated (e.g. via MoveClip).
pub fn apply_bpm_change(&mut self, bpm: f64, fps: f64, midi_durations: &[(crate::audio::midi::MidiClipId, f64)]) {
for (_, track) in self.tracks.iter_mut() {
match track {
crate::audio::track::TrackNode::Audio(t) => {
for clip in &mut t.clips {
clip.sync_from_seconds(bpm, fps);
}
}
crate::audio::track::TrackNode::Midi(t) => {
// Update content durations first so internal_end is correct before sync
for instance in &mut t.clip_instances {
if let Some(&new_dur) = midi_durations.iter()
.find(|(id, _)| *id == instance.clip_id)
.map(|(_, d)| d)
{
let old_internal_dur = instance.internal_duration();
instance.internal_end = instance.internal_start + new_dur;
// Scale external_duration by the same ratio (works for both looping and non-looping)
if old_internal_dur > 1e-12 {
instance.external_duration = instance.external_duration * new_dur / old_internal_dur;
}
}
instance.sync_from_seconds(bpm, fps);
}
// Update pool clip durations
for &(clip_id, new_dur) in midi_durations {
if let Some(clip) = self.midi_clip_pool.get_clip_mut(clip_id) {
clip.duration = new_dur;
}
}
}
_ => {}
}
}
}
/// Get oscilloscope data from a node in a track's graph
pub fn get_oscilloscope_data(&self, track_id: TrackId, node_id: u32, sample_count: usize) -> Option<(Vec<f32>, Vec<f32>)> {
if let Some(TrackNode::Midi(track)) = self.tracks.get(&track_id) {

View File

@ -144,6 +144,9 @@ pub enum Command {
SetMetronomeEnabled(bool),
/// Set project tempo and time signature (bpm, (numerator, denominator))
SetTempo(f32, (u32, u32)),
/// After a BPM change: update MIDI clip durations and sync all clip beats/frames from seconds.
/// (bpm, fps, midi_durations: Vec<(clip_id, new_duration_seconds)>)
ApplyBpmChange(f64, f64, Vec<(MidiClipId, f64)>),
// Node graph commands
/// Add a node to a track's instrument graph (track_id, node_type, position_x, position_y)

View File

@ -120,6 +120,17 @@ pub trait Action: Send {
fn midi_events_after_rollback(&self) -> Option<(u32, &[daw_backend::audio::midi::MidiEvent])> {
None
}
/// Return MIDI event data for multiple clips after execute/redo (e.g. BPM change).
/// Each element is (midi_clip_id, events). Default: empty.
fn all_midi_events_after_execute(&self) -> Vec<(u32, Vec<daw_backend::audio::midi::MidiEvent>)> {
Vec::new()
}
/// Return MIDI event data for multiple clips after rollback/undo.
fn all_midi_events_after_rollback(&self) -> Vec<(u32, Vec<daw_backend::audio::midi::MidiEvent>)> {
Vec::new()
}
}
/// Action executor that wraps the document and manages undo/redo
@ -301,6 +312,16 @@ impl ActionExecutor {
self.redo_stack.last().and_then(|a| a.midi_events_after_rollback())
}
/// Get multi-clip MIDI event data from the last undo stack action (after redo).
pub fn last_undo_all_midi_events(&self) -> Vec<(u32, Vec<daw_backend::audio::midi::MidiEvent>)> {
self.undo_stack.last().map(|a| a.all_midi_events_after_execute()).unwrap_or_default()
}
/// Get multi-clip MIDI event data from the last redo stack action (after undo).
pub fn last_redo_all_midi_events(&self) -> Vec<(u32, Vec<daw_backend::audio::midi::MidiEvent>)> {
self.redo_stack.last().map(|a| a.all_midi_events_after_rollback()).unwrap_or_default()
}
/// Get the description of the next action to redo
pub fn redo_description(&self) -> Option<String> {
self.redo_stack.last().map(|a| a.description())

View File

@ -79,6 +79,8 @@ impl Action for AddClipInstanceAction {
if let Some(valid_start) = adjusted_start {
// Update instance to use the valid position
self.clip_instance.timeline_start = valid_start;
let (bpm, fps) = (document.bpm, document.framerate);
self.clip_instance.sync_from_seconds(bpm, fps);
} else {
// No valid position found - reject the operation
return Err("Cannot add clip: no valid position found on layer (layer is full)".to_string());

View File

@ -0,0 +1,361 @@
//! Change BPM action
//!
//! Atomically changes the document BPM and rescales all clip instance positions and
//! MIDI event timestamps so that beat positions are preserved (Measures mode behaviour).
use crate::action::{Action, BackendContext};
use crate::clip::ClipInstance;
use crate::document::Document;
use crate::layer::AnyLayer;
use std::collections::HashMap;
use uuid::Uuid;
/// Snapshot of all timing fields on a `ClipInstance`
#[derive(Clone)]
struct TimingFields {
timeline_start: f64,
timeline_start_beats: f64,
timeline_start_frames: f64,
trim_start: f64,
trim_start_beats: f64,
trim_start_frames: f64,
trim_end: Option<f64>,
trim_end_beats: Option<f64>,
trim_end_frames: Option<f64>,
timeline_duration: Option<f64>,
timeline_duration_beats: Option<f64>,
timeline_duration_frames: Option<f64>,
}
impl TimingFields {
fn from_instance(ci: &ClipInstance) -> Self {
Self {
timeline_start: ci.timeline_start,
timeline_start_beats: ci.timeline_start_beats,
timeline_start_frames: ci.timeline_start_frames,
trim_start: ci.trim_start,
trim_start_beats: ci.trim_start_beats,
trim_start_frames: ci.trim_start_frames,
trim_end: ci.trim_end,
trim_end_beats: ci.trim_end_beats,
trim_end_frames: ci.trim_end_frames,
timeline_duration: ci.timeline_duration,
timeline_duration_beats: ci.timeline_duration_beats,
timeline_duration_frames: ci.timeline_duration_frames,
}
}
fn apply_to(&self, ci: &mut ClipInstance) {
ci.timeline_start = self.timeline_start;
ci.timeline_start_beats = self.timeline_start_beats;
ci.timeline_start_frames = self.timeline_start_frames;
ci.trim_start = self.trim_start;
ci.trim_start_beats = self.trim_start_beats;
ci.trim_start_frames = self.trim_start_frames;
ci.trim_end = self.trim_end;
ci.trim_end_beats = self.trim_end_beats;
ci.trim_end_frames = self.trim_end_frames;
ci.timeline_duration = self.timeline_duration;
ci.timeline_duration_beats = self.timeline_duration_beats;
ci.timeline_duration_frames = self.timeline_duration_frames;
}
}
#[derive(Clone)]
struct ClipTimingSnapshot {
layer_id: Uuid,
instance_id: Uuid,
old_fields: TimingFields,
new_fields: TimingFields,
}
#[derive(Clone)]
struct MidiClipSnapshot {
layer_id: Uuid,
midi_clip_id: u32,
clip_id: Uuid,
old_clip_duration: f64,
new_clip_duration: f64,
old_events: Vec<daw_backend::audio::midi::MidiEvent>,
new_events: Vec<daw_backend::audio::midi::MidiEvent>,
}
/// Action that atomically changes BPM and rescales all clip/note positions to preserve beats
pub struct ChangeBpmAction {
old_bpm: f64,
new_bpm: f64,
time_sig: (u32, u32),
clip_snapshots: Vec<ClipTimingSnapshot>,
midi_snapshots: Vec<MidiClipSnapshot>,
}
impl ChangeBpmAction {
/// Build the action, computing new positions for all clip instances and MIDI events.
///
/// `midi_event_cache` maps backend MIDI clip ID → current event list.
pub fn new(
old_bpm: f64,
new_bpm: f64,
document: &Document,
midi_event_cache: &HashMap<u32, Vec<daw_backend::audio::midi::MidiEvent>>,
) -> Self {
let fps = document.framerate;
let time_sig = (
document.time_signature.numerator,
document.time_signature.denominator,
);
let mut clip_snapshots: Vec<ClipTimingSnapshot> = Vec::new();
let mut midi_snapshots: Vec<MidiClipSnapshot> = Vec::new();
// Collect MIDI clip IDs we've already snapshotted (avoid duplicates)
let mut seen_midi_clips: std::collections::HashSet<u32> = std::collections::HashSet::new();
for layer in document.all_layers() {
let layer_id = layer.id();
let clip_instances: &[ClipInstance] = match layer {
AnyLayer::Vector(vl) => &vl.clip_instances,
AnyLayer::Audio(al) => &al.clip_instances,
AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) | AnyLayer::Raster(_) => continue,
};
for ci in clip_instances {
let old_fields = TimingFields::from_instance(ci);
// Compute new fields: beats are canonical, recompute seconds + frames.
// Guard: if timeline_start_beats was never populated (clips added without
// sync_from_seconds), derive beats from seconds before applying.
let mut new_ci = ci.clone();
if new_ci.timeline_start_beats == 0.0 && new_ci.timeline_start.abs() > 1e-9 {
new_ci.sync_from_seconds(old_bpm, fps);
}
new_ci.apply_beats(new_bpm, fps);
let new_fields = TimingFields::from_instance(&new_ci);
clip_snapshots.push(ClipTimingSnapshot {
layer_id,
instance_id: ci.id,
old_fields,
new_fields,
});
// If this is a MIDI clip on an audio layer, collect MIDI events + rescale duration.
// Always snapshot the clip (even if empty) so clip.duration is rescaled.
if let AnyLayer::Audio(_) = layer {
if let Some(audio_clip) = document.get_audio_clip(&ci.clip_id) {
use crate::clip::AudioClipType;
if let AudioClipType::Midi { midi_clip_id } = &audio_clip.clip_type {
let midi_id = *midi_clip_id;
if !seen_midi_clips.contains(&midi_id) {
seen_midi_clips.insert(midi_id);
let old_clip_duration = audio_clip.duration;
let new_clip_duration = old_clip_duration * old_bpm / new_bpm;
// Use cached events if present; empty vec for clips with no events yet.
let old_events = midi_event_cache.get(&midi_id).cloned().unwrap_or_default();
let new_events: Vec<_> = old_events.iter().map(|ev| {
let mut e = ev.clone();
// Ensure beats are populated before using them as canonical.
// Events created before triple-rep (e.g. from recording)
// have timestamp_beats == 0.0 — sync from seconds first.
if e.timestamp_beats == 0.0 && e.timestamp.abs() > 1e-9 {
e.sync_from_seconds(old_bpm, fps);
}
e.apply_beats(new_bpm, fps);
e
}).collect();
midi_snapshots.push(MidiClipSnapshot {
layer_id,
midi_clip_id: midi_id,
clip_id: ci.clip_id,
old_clip_duration,
new_clip_duration,
old_events,
new_events,
});
}
}
}
}
}
}
Self {
old_bpm,
new_bpm,
time_sig,
clip_snapshots,
midi_snapshots,
}
}
/// Return the new MIDI event lists for each affected clip (for immediate cache update).
pub fn new_midi_events(&self) -> impl Iterator<Item = (u32, &Vec<daw_backend::audio::midi::MidiEvent>)> {
self.midi_snapshots.iter().map(|s| (s.midi_clip_id, &s.new_events))
}
fn apply_clips(document: &mut Document, snapshots: &[ClipTimingSnapshot], use_new: bool) {
for snap in snapshots {
let fields = if use_new { &snap.new_fields } else { &snap.old_fields };
let layer = match document.get_layer_mut(&snap.layer_id) {
Some(l) => l,
None => continue,
};
let clip_instances = match layer {
AnyLayer::Vector(vl) => &mut vl.clip_instances,
AnyLayer::Audio(al) => &mut al.clip_instances,
AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(el) => &mut el.clip_instances,
AnyLayer::Group(_) | AnyLayer::Raster(_) => continue,
};
if let Some(ci) = clip_instances.iter_mut().find(|ci| ci.id == snap.instance_id) {
fields.apply_to(ci);
}
}
}
fn apply_midi_durations(document: &mut Document, snapshots: &[MidiClipSnapshot], use_new: bool) {
for snap in snapshots {
if let Some(clip) = document.get_audio_clip_mut(&snap.clip_id) {
clip.duration = if use_new { snap.new_clip_duration } else { snap.old_clip_duration };
}
}
}
}
impl Action for ChangeBpmAction {
fn execute(&mut self, document: &mut Document) -> Result<(), String> {
document.bpm = self.new_bpm;
Self::apply_clips(document, &self.clip_snapshots, true);
Self::apply_midi_durations(document, &self.midi_snapshots, true);
Ok(())
}
fn rollback(&mut self, document: &mut Document) -> Result<(), String> {
document.bpm = self.old_bpm;
Self::apply_clips(document, &self.clip_snapshots, false);
Self::apply_midi_durations(document, &self.midi_snapshots, false);
Ok(())
}
fn description(&self) -> String {
"Change BPM".to_string()
}
fn execute_backend(
&mut self,
backend: &mut BackendContext,
document: &Document,
) -> Result<(), String> {
let controller = match backend.audio_controller.as_mut() {
Some(c) => c,
None => return Ok(()),
};
// Update tempo
controller.set_tempo(self.new_bpm as f32, self.time_sig);
// Update MIDI clip events and positions
for snap in &self.midi_snapshots {
let track_id = match backend.layer_to_track_map.get(&snap.layer_id) {
Some(&id) => id,
None => continue,
};
controller.update_midi_clip_events(track_id, snap.midi_clip_id, snap.new_events.clone());
}
// Move clip instances in the backend
for snap in &self.clip_snapshots {
let track_id = match backend.layer_to_track_map.get(&snap.layer_id) {
Some(&id) => id,
None => continue,
};
let backend_id = backend.clip_instance_to_backend_map.get(&snap.instance_id);
match backend_id {
Some(crate::action::BackendClipInstanceId::Audio(audio_id)) => {
controller.move_clip(track_id, *audio_id, snap.new_fields.timeline_start);
}
Some(crate::action::BackendClipInstanceId::Midi(midi_id)) => {
controller.move_clip(track_id, *midi_id, snap.new_fields.timeline_start);
}
None => {} // Vector/video clips — no backend move needed
}
}
// Sync beat/frame representations and rescale MIDI clip durations in the backend
let fps = document.framerate;
let midi_durations: Vec<(u32, f64)> = self.midi_snapshots.iter()
.map(|s| (s.midi_clip_id, s.new_clip_duration))
.collect();
controller.apply_bpm_change(self.new_bpm, fps, midi_durations);
Ok(())
}
fn rollback_backend(
&mut self,
backend: &mut BackendContext,
document: &Document,
) -> Result<(), String> {
let controller = match backend.audio_controller.as_mut() {
Some(c) => c,
None => return Ok(()),
};
controller.set_tempo(self.old_bpm as f32, self.time_sig);
for snap in &self.midi_snapshots {
let track_id = match backend.layer_to_track_map.get(&snap.layer_id) {
Some(&id) => id,
None => continue,
};
controller.update_midi_clip_events(track_id, snap.midi_clip_id, snap.old_events.clone());
}
for snap in &self.clip_snapshots {
let track_id = match backend.layer_to_track_map.get(&snap.layer_id) {
Some(&id) => id,
None => continue,
};
let backend_id = backend.clip_instance_to_backend_map.get(&snap.instance_id);
match backend_id {
Some(crate::action::BackendClipInstanceId::Audio(audio_id)) => {
controller.move_clip(track_id, *audio_id, snap.old_fields.timeline_start);
}
Some(crate::action::BackendClipInstanceId::Midi(midi_id)) => {
controller.move_clip(track_id, *midi_id, snap.old_fields.timeline_start);
}
None => {}
}
}
// Sync beat/frame representations and restore MIDI clip durations in the backend
let fps = document.framerate;
let midi_durations: Vec<(u32, f64)> = self.midi_snapshots.iter()
.map(|s| (s.midi_clip_id, s.old_clip_duration))
.collect();
controller.apply_bpm_change(self.old_bpm, fps, midi_durations);
Ok(())
}
fn all_midi_events_after_execute(&self) -> Vec<(u32, Vec<daw_backend::audio::midi::MidiEvent>)> {
self.midi_snapshots.iter()
.map(|s| (s.midi_clip_id, s.new_events.clone()))
.collect()
}
fn all_midi_events_after_rollback(&self) -> Vec<(u32, Vec<daw_backend::audio::midi::MidiEvent>)> {
self.midi_snapshots.iter()
.map(|s| (s.midi_clip_id, s.old_events.clone()))
.collect()
}
}

View File

@ -0,0 +1,227 @@
//! Change FPS action
//!
//! Atomically changes the document framerate and rescales all clip instance positions
//! so that frame positions are preserved (Frames mode behaviour).
use crate::action::{Action, BackendContext};
use crate::clip::ClipInstance;
use crate::document::Document;
use crate::layer::AnyLayer;
use uuid::Uuid;
/// Snapshot of all timing fields on a `ClipInstance`
#[derive(Clone)]
struct TimingFields {
timeline_start: f64,
timeline_start_beats: f64,
timeline_start_frames: f64,
trim_start: f64,
trim_start_beats: f64,
trim_start_frames: f64,
trim_end: Option<f64>,
trim_end_beats: Option<f64>,
trim_end_frames: Option<f64>,
timeline_duration: Option<f64>,
timeline_duration_beats: Option<f64>,
timeline_duration_frames: Option<f64>,
}
impl TimingFields {
fn from_instance(ci: &ClipInstance) -> Self {
Self {
timeline_start: ci.timeline_start,
timeline_start_beats: ci.timeline_start_beats,
timeline_start_frames: ci.timeline_start_frames,
trim_start: ci.trim_start,
trim_start_beats: ci.trim_start_beats,
trim_start_frames: ci.trim_start_frames,
trim_end: ci.trim_end,
trim_end_beats: ci.trim_end_beats,
trim_end_frames: ci.trim_end_frames,
timeline_duration: ci.timeline_duration,
timeline_duration_beats: ci.timeline_duration_beats,
timeline_duration_frames: ci.timeline_duration_frames,
}
}
fn apply_to(&self, ci: &mut ClipInstance) {
ci.timeline_start = self.timeline_start;
ci.timeline_start_beats = self.timeline_start_beats;
ci.timeline_start_frames = self.timeline_start_frames;
ci.trim_start = self.trim_start;
ci.trim_start_beats = self.trim_start_beats;
ci.trim_start_frames = self.trim_start_frames;
ci.trim_end = self.trim_end;
ci.trim_end_beats = self.trim_end_beats;
ci.trim_end_frames = self.trim_end_frames;
ci.timeline_duration = self.timeline_duration;
ci.timeline_duration_beats = self.timeline_duration_beats;
ci.timeline_duration_frames = self.timeline_duration_frames;
}
}
#[derive(Clone)]
struct ClipTimingSnapshot {
layer_id: Uuid,
instance_id: Uuid,
old_fields: TimingFields,
new_fields: TimingFields,
}
/// Action that atomically changes framerate and rescales all clip positions to preserve frames
pub struct ChangeFpsAction {
old_fps: f64,
new_fps: f64,
clip_snapshots: Vec<ClipTimingSnapshot>,
}
impl ChangeFpsAction {
/// Build the action, computing new positions for all clip instances.
pub fn new(old_fps: f64, new_fps: f64, document: &Document) -> Self {
let bpm = document.bpm;
let mut clip_snapshots: Vec<ClipTimingSnapshot> = Vec::new();
for layer in document.all_layers() {
let layer_id = layer.id();
let clip_instances: &[ClipInstance] = match layer {
AnyLayer::Vector(vl) => &vl.clip_instances,
AnyLayer::Audio(al) => &al.clip_instances,
AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) | AnyLayer::Raster(_) => continue,
};
for ci in clip_instances {
let old_fields = TimingFields::from_instance(ci);
// Compute new fields: frames are canonical, recompute seconds + beats
let mut new_ci = ci.clone();
new_ci.apply_frames(new_fps, bpm);
let new_fields = TimingFields::from_instance(&new_ci);
clip_snapshots.push(ClipTimingSnapshot {
layer_id,
instance_id: ci.id,
old_fields,
new_fields,
});
}
}
Self {
old_fps,
new_fps,
clip_snapshots,
}
}
fn apply_clips(document: &mut Document, snapshots: &[ClipTimingSnapshot], use_new: bool) {
for snap in snapshots {
let fields = if use_new { &snap.new_fields } else { &snap.old_fields };
let layer = match document.get_layer_mut(&snap.layer_id) {
Some(l) => l,
None => continue,
};
let clip_instances = match layer {
AnyLayer::Vector(vl) => &mut vl.clip_instances,
AnyLayer::Audio(al) => &mut al.clip_instances,
AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(el) => &mut el.clip_instances,
AnyLayer::Group(_) | AnyLayer::Raster(_) => continue,
};
if let Some(ci) = clip_instances.iter_mut().find(|ci| ci.id == snap.instance_id) {
fields.apply_to(ci);
}
}
}
}
impl Action for ChangeFpsAction {
fn execute(&mut self, document: &mut Document) -> Result<(), String> {
document.framerate = self.new_fps;
Self::apply_clips(document, &self.clip_snapshots, true);
Ok(())
}
fn rollback(&mut self, document: &mut Document) -> Result<(), String> {
document.framerate = self.old_fps;
Self::apply_clips(document, &self.clip_snapshots, false);
Ok(())
}
fn description(&self) -> String {
"Change FPS".to_string()
}
fn execute_backend(
&mut self,
backend: &mut BackendContext,
_document: &Document,
) -> Result<(), String> {
// FPS change does not affect audio timing — only move clips that changed position
let controller = match backend.audio_controller.as_mut() {
Some(c) => c,
None => return Ok(()),
};
for snap in &self.clip_snapshots {
if (snap.new_fields.timeline_start - snap.old_fields.timeline_start).abs() < 1e-9 {
continue; // No movement, skip
}
let track_id = match backend.layer_to_track_map.get(&snap.layer_id) {
Some(&id) => id,
None => continue,
};
let backend_id = backend.clip_instance_to_backend_map.get(&snap.instance_id);
match backend_id {
Some(crate::action::BackendClipInstanceId::Audio(audio_id)) => {
controller.move_clip(track_id, *audio_id, snap.new_fields.timeline_start);
}
Some(crate::action::BackendClipInstanceId::Midi(midi_id)) => {
controller.move_clip(track_id, *midi_id, snap.new_fields.timeline_start);
}
None => {}
}
}
Ok(())
}
fn rollback_backend(
&mut self,
backend: &mut BackendContext,
_document: &Document,
) -> Result<(), String> {
let controller = match backend.audio_controller.as_mut() {
Some(c) => c,
None => return Ok(()),
};
for snap in &self.clip_snapshots {
if (snap.new_fields.timeline_start - snap.old_fields.timeline_start).abs() < 1e-9 {
continue;
}
let track_id = match backend.layer_to_track_map.get(&snap.layer_id) {
Some(&id) => id,
None => continue,
};
let backend_id = backend.clip_instance_to_backend_map.get(&snap.instance_id);
match backend_id {
Some(crate::action::BackendClipInstanceId::Audio(audio_id)) => {
controller.move_clip(track_id, *audio_id, snap.old_fields.timeline_start);
}
Some(crate::action::BackendClipInstanceId::Midi(midi_id)) => {
controller.move_clip(track_id, *midi_id, snap.old_fields.timeline_start);
}
None => {}
}
}
Ok(())
}
}

View File

@ -4,6 +4,8 @@
//! through the action system.
pub mod add_clip_instance;
pub mod change_bpm;
pub mod change_fps;
pub mod add_effect;
pub mod add_layer;
pub mod add_shape;
@ -70,3 +72,5 @@ pub use raster_stroke::RasterStrokeAction;
pub use raster_fill::RasterFillAction;
pub use move_layer::MoveLayerAction;
pub use set_fill_paint::SetFillPaintAction;
pub use change_bpm::ChangeBpmAction;
pub use change_fps::ChangeFpsAction;

View File

@ -119,6 +119,9 @@ impl Action for MoveClipInstancesAction {
// Store adjusted moves for rollback
self.layer_moves = adjusted_moves.clone();
let bpm = document.bpm;
let fps = document.framerate;
// Apply all adjusted moves
for (layer_id, moves) in &adjusted_moves {
let layer = document.get_layer_mut(layer_id)
@ -139,6 +142,7 @@ impl Action for MoveClipInstancesAction {
if let Some(clip_instance) = clip_instances.iter_mut().find(|ci| ci.id == *clip_id)
{
clip_instance.timeline_start = *new;
clip_instance.sync_from_seconds(bpm, fps);
}
}
}
@ -147,6 +151,8 @@ impl Action for MoveClipInstancesAction {
}
fn rollback(&mut self, document: &mut Document) -> Result<(), String> {
let bpm = document.bpm;
let fps = document.framerate;
for (layer_id, moves) in &self.layer_moves {
let layer = document.get_layer_mut(layer_id)
.ok_or_else(|| format!("Layer {} not found", layer_id))?;
@ -166,6 +172,7 @@ impl Action for MoveClipInstancesAction {
if let Some(clip_instance) = clip_instances.iter_mut().find(|ci| ci.id == *clip_id)
{
clip_instance.timeline_start = *old;
clip_instance.sync_from_seconds(bpm, fps);
}
}
}

View File

@ -179,6 +179,7 @@ impl Action for SplitClipInstanceAction {
}
self.new_instance_id = Some(right_instance.id);
right_instance.sync_from_seconds(document.bpm, document.framerate);
// Now modify the original (left) instance and add the new (right) instance
let layer_mut = document
@ -238,6 +239,21 @@ impl Action for SplitClipInstanceAction {
}
}
// Sync derived fields on the left (original) instance
let (bpm, fps) = (document.bpm, document.framerate);
if let Some(layer) = document.get_layer_mut(&self.layer_id) {
let cis: &mut Vec<crate::clip::ClipInstance> = match layer {
AnyLayer::Vector(vl) => &mut vl.clip_instances,
AnyLayer::Audio(al) => &mut al.clip_instances,
AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(el) => &mut el.clip_instances,
_ => return { self.executed = true; Ok(()) },
};
if let Some(inst) = cis.iter_mut().find(|ci| ci.id == self.instance_id) {
inst.sync_from_seconds(bpm, fps);
}
}
self.executed = true;
Ok(())
}

View File

@ -260,6 +260,9 @@ impl Action for TrimClipInstancesAction {
// Store clamped trims for rollback
self.layer_trims = clamped_trims.clone();
let bpm = document.bpm;
let fps = document.framerate;
// Apply all clamped trims
for (layer_id, trims) in &clamped_trims {
let layer = match document.get_layer_mut(layer_id) {
@ -294,6 +297,7 @@ impl Action for TrimClipInstancesAction {
clip_instance.trim_end = new.trim_value;
}
}
clip_instance.sync_from_seconds(bpm, fps);
}
}
}
@ -301,6 +305,8 @@ impl Action for TrimClipInstancesAction {
}
fn rollback(&mut self, document: &mut Document) -> Result<(), String> {
let bpm = document.bpm;
let fps = document.framerate;
for (layer_id, trims) in &self.layer_trims {
let layer = match document.get_layer_mut(layer_id) {
Some(l) => l,
@ -334,6 +340,7 @@ impl Action for TrimClipInstancesAction {
clip_instance.trim_end = old.trim_value;
}
}
clip_instance.sync_from_seconds(bpm, fps);
}
}
}

View File

@ -610,11 +610,23 @@ pub struct ClipInstance {
/// This is the external positioning - where the instance appears on the timeline
/// Default: 0.0 (start at beginning of layer)
pub timeline_start: f64,
/// timeline_start in beats (quarter-note beats); derived from timeline_start
#[serde(default)]
pub timeline_start_beats: f64,
/// timeline_start in frames; derived from timeline_start
#[serde(default)]
pub timeline_start_frames: f64,
/// How long this instance appears on the timeline (in seconds)
/// If timeline_duration > (trim_end - trim_start), the trimmed content will loop
/// Default: None (use trimmed clip duration, no looping)
pub timeline_duration: Option<f64>,
/// timeline_duration in beats; derived from timeline_duration
#[serde(default)]
pub timeline_duration_beats: Option<f64>,
/// timeline_duration in frames; derived from timeline_duration
#[serde(default)]
pub timeline_duration_frames: Option<f64>,
/// Trim start: offset into the clip's internal content (in seconds)
/// Allows trimming the beginning of the clip
@ -623,11 +635,23 @@ pub struct ClipInstance {
/// - For vector: offset into the animation timeline
/// Default: 0.0 (start at beginning of clip)
pub trim_start: f64,
/// trim_start in beats; derived from trim_start
#[serde(default)]
pub trim_start_beats: f64,
/// trim_start in frames; derived from trim_start
#[serde(default)]
pub trim_start_frames: f64,
/// Trim end: offset into the clip's internal content (in seconds)
/// Allows trimming the end of the clip
/// Default: None (use full clip duration)
pub trim_end: Option<f64>,
/// trim_end in beats; derived from trim_end
#[serde(default)]
pub trim_end_beats: Option<f64>,
/// trim_end in frames; derived from trim_end
#[serde(default)]
pub trim_end_frames: Option<f64>,
/// Playback speed multiplier
/// 1.0 = normal speed, 0.5 = half speed, 2.0 = double speed
@ -696,9 +720,17 @@ impl ClipInstance {
opacity: 1.0,
name: None,
timeline_start: 0.0,
timeline_start_beats: 0.0,
timeline_start_frames: 0.0,
timeline_duration: None,
timeline_duration_beats: None,
timeline_duration_frames: None,
trim_start: 0.0,
trim_start_beats: 0.0,
trim_start_frames: 0.0,
trim_end: None,
trim_end_beats: None,
trim_end_frames: None,
playback_speed: 1.0,
gain: 1.0,
loop_before: None,
@ -714,15 +746,71 @@ impl ClipInstance {
opacity: 1.0,
name: None,
timeline_start: 0.0,
timeline_start_beats: 0.0,
timeline_start_frames: 0.0,
timeline_duration: None,
timeline_duration_beats: None,
timeline_duration_frames: None,
trim_start: 0.0,
trim_start_beats: 0.0,
trim_start_frames: 0.0,
trim_end: None,
trim_end_beats: None,
trim_end_frames: None,
playback_speed: 1.0,
gain: 1.0,
loop_before: None,
}
}
/// Sync beats and frames from the seconds fields (call after any seconds-based write).
pub fn sync_from_seconds(&mut self, bpm: f64, fps: f64) {
self.timeline_start_beats = self.timeline_start * bpm / 60.0;
self.timeline_start_frames = self.timeline_start * fps;
self.trim_start_beats = self.trim_start * bpm / 60.0;
self.trim_start_frames = self.trim_start * fps;
self.trim_end_beats = self.trim_end.map(|v| v * bpm / 60.0);
self.trim_end_frames = self.trim_end.map(|v| v * fps);
self.timeline_duration_beats = self.timeline_duration.map(|v| v * bpm / 60.0);
self.timeline_duration_frames = self.timeline_duration.map(|v| v * fps);
}
/// Recompute seconds and frames from beats (call when BPM changes in Measures mode).
pub fn apply_beats(&mut self, bpm: f64, fps: f64) {
self.timeline_start = self.timeline_start_beats * 60.0 / bpm;
self.timeline_start_frames = self.timeline_start * fps;
self.trim_start = self.trim_start_beats * 60.0 / bpm;
self.trim_start_frames = self.trim_start * fps;
if let Some(b) = self.trim_end_beats {
let s = b * 60.0 / bpm;
self.trim_end = Some(s);
self.trim_end_frames = Some(s * fps);
}
if let Some(b) = self.timeline_duration_beats {
let s = b * 60.0 / bpm;
self.timeline_duration = Some(s);
self.timeline_duration_frames = Some(s * fps);
}
}
/// Recompute seconds and beats from frames (call when FPS changes in Frames mode).
pub fn apply_frames(&mut self, fps: f64, bpm: f64) {
self.timeline_start = self.timeline_start_frames / fps;
self.timeline_start_beats = self.timeline_start * bpm / 60.0;
self.trim_start = self.trim_start_frames / fps;
self.trim_start_beats = self.trim_start * bpm / 60.0;
if let Some(f) = self.trim_end_frames {
let s = f / fps;
self.trim_end = Some(s);
self.trim_end_beats = Some(s * bpm / 60.0);
}
if let Some(f) = self.timeline_duration_frames {
let s = f / fps;
self.timeline_duration = Some(s);
self.timeline_duration_beats = Some(s * bpm / 60.0);
}
}
/// Set the transform
pub fn with_transform(mut self, transform: Transform) -> Self {
self.transform = transform;

View File

@ -619,6 +619,56 @@ impl Document {
layers
}
/// Migrate old documents: compute beats/frames from seconds for any ClipInstance whose
/// derived fields are still zero (i.e., documents saved before triple-representation).
/// Call once after loading a document.
pub fn sync_all_clip_positions(&mut self) {
let bpm = self.bpm;
let fps = self.framerate;
fn sync_list(list: &mut [crate::layer::AnyLayer], bpm: f64, fps: f64) {
for layer in list.iter_mut() {
match layer {
crate::layer::AnyLayer::Vector(vl) => {
for ci in &mut vl.clip_instances {
if ci.timeline_start_beats == 0.0 { ci.sync_from_seconds(bpm, fps); }
}
}
crate::layer::AnyLayer::Audio(al) => {
for ci in &mut al.clip_instances {
if ci.timeline_start_beats == 0.0 { ci.sync_from_seconds(bpm, fps); }
}
}
crate::layer::AnyLayer::Video(vl) => {
for ci in &mut vl.clip_instances {
if ci.timeline_start_beats == 0.0 { ci.sync_from_seconds(bpm, fps); }
}
}
crate::layer::AnyLayer::Effect(el) => {
for ci in &mut el.clip_instances {
if ci.timeline_start_beats == 0.0 { ci.sync_from_seconds(bpm, fps); }
}
}
crate::layer::AnyLayer::Group(g) => {
sync_list(&mut g.children, bpm, fps);
}
crate::layer::AnyLayer::Raster(_) => {}
}
}
}
sync_list(&mut self.root.children, bpm, fps);
for clip in self.vector_clips.values_mut() {
for node in &mut clip.layers.roots {
if let crate::layer::AnyLayer::Vector(vl) = &mut node.data {
for ci in &mut vl.clip_instances {
if ci.timeline_start_beats == 0.0 { ci.sync_from_seconds(bpm, fps); }
}
}
}
}
}
// === CLIP LIBRARY METHODS ===
/// Add a vector clip to the library

View File

@ -3114,7 +3114,12 @@ impl EditorApp {
};
// Rebuild MIDI cache after undo (backend_context dropped, borrows released)
if undo_succeeded {
if let Some((clip_id, events)) = self.action_executor.last_redo_midi_events()
let multi = self.action_executor.last_redo_all_midi_events();
if !multi.is_empty() {
for (clip_id, events) in multi {
self.midi_event_cache.insert(clip_id, events);
}
} else if let Some((clip_id, events)) = self.action_executor.last_redo_midi_events()
.map(|(id, ev)| (id, ev.to_vec()))
{
self.midi_event_cache.insert(clip_id, events);
@ -3158,7 +3163,12 @@ impl EditorApp {
};
// Rebuild MIDI cache after redo (backend_context dropped, borrows released)
if redo_succeeded {
if let Some((clip_id, events)) = self.action_executor.last_undo_midi_events()
let multi = self.action_executor.last_undo_all_midi_events();
if !multi.is_empty() {
for (clip_id, events) in multi {
self.midi_event_cache.insert(clip_id, events);
}
} else if let Some((clip_id, events)) = self.action_executor.last_undo_midi_events()
.map(|(id, ev)| (id, ev.to_vec()))
{
self.midi_event_cache.insert(clip_id, events);
@ -3824,6 +3834,9 @@ impl EditorApp {
// Rebuild MIDI event cache for all MIDI clips (needed for timeline/piano roll rendering)
let step8_start = std::time::Instant::now();
// Migrate old documents: compute beats/frames derived fields
self.action_executor.document_mut().sync_all_clip_positions();
self.midi_event_cache.clear();
let midi_clip_ids: Vec<u32> = self.action_executor.document()
.audio_clips.values()
@ -3846,6 +3859,19 @@ impl EditorApp {
}
eprintln!("📊 [APPLY] Step 8: Rebuilt MIDI event cache for {} clips in {:.2}ms", midi_fetched, step8_start.elapsed().as_secs_f64() * 1000.0);
// Sync beats/frames derived fields on MIDI events (migration for old documents)
{
let bpm = self.action_executor.document().bpm;
let fps = self.action_executor.document().framerate;
for events in self.midi_event_cache.values_mut() {
for ev in events.iter_mut() {
if ev.timestamp_beats == 0.0 && ev.timestamp.abs() > 1e-9 {
ev.sync_from_seconds(bpm, fps);
}
}
}
}
// Reset playback state
self.playback_time = 0.0;
self.is_playing = false;
@ -3976,10 +4002,16 @@ impl EditorApp {
/// Rebuild a MIDI event cache entry from backend note format.
/// Called after undo/redo to keep the cache consistent with the backend.
fn rebuild_midi_cache_entry(&mut self, clip_id: u32, notes: &[(f64, u8, u8, f64)]) {
let bpm = self.action_executor.document().bpm;
let fps = self.action_executor.document().framerate;
let mut events: Vec<daw_backend::audio::midi::MidiEvent> = Vec::with_capacity(notes.len() * 2);
for &(start_time, note, velocity, duration) in notes {
events.push(daw_backend::audio::midi::MidiEvent::note_on(start_time, 0, note, velocity));
events.push(daw_backend::audio::midi::MidiEvent::note_off(start_time + duration, 0, note, 0));
let mut on = daw_backend::audio::midi::MidiEvent::note_on(start_time, 0, note, velocity);
on.sync_from_seconds(bpm, fps);
events.push(on);
let mut off = daw_backend::audio::midi::MidiEvent::note_off(start_time + duration, 0, note, 0);
off.sync_from_seconds(bpm, fps);
events.push(off);
}
events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
self.midi_event_cache.insert(clip_id, events);
@ -5178,10 +5210,16 @@ impl eframe::App for EditorApp {
// Update midi_event_cache with notes captured so far
// (inlined to avoid conflicting &mut self borrow)
{
let bpm = self.action_executor.document().bpm;
let fps = self.action_executor.document().framerate;
let mut events: Vec<daw_backend::audio::midi::MidiEvent> = Vec::with_capacity(notes.len() * 2);
for &(start_time, note, velocity, dur) in &notes {
events.push(daw_backend::audio::midi::MidiEvent::note_on(start_time, 0, note, velocity));
events.push(daw_backend::audio::midi::MidiEvent::note_off(start_time + dur, 0, note, 0));
let mut on = daw_backend::audio::midi::MidiEvent::note_on(start_time, 0, note, velocity);
on.sync_from_seconds(bpm, fps);
events.push(on);
let mut off = daw_backend::audio::midi::MidiEvent::note_off(start_time + dur, 0, note, 0);
off.sync_from_seconds(bpm, fps);
events.push(off);
}
events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
self.midi_event_cache.insert(clip_id, events);
@ -5198,7 +5236,13 @@ impl eframe::App for EditorApp {
match controller.query_midi_clip(track_id, clip_id) {
Ok(midi_clip_data) => {
drop(controller);
self.midi_event_cache.insert(clip_id, midi_clip_data.events.clone());
let bpm = self.action_executor.document().bpm;
let fps = self.action_executor.document().framerate;
let mut final_events = midi_clip_data.events.clone();
for ev in &mut final_events {
ev.sync_from_seconds(bpm, fps);
}
self.midi_event_cache.insert(clip_id, final_events);
// Update document clip with final duration and name
let doc_clip_id = self.action_executor.document()

View File

@ -42,6 +42,8 @@ pub struct InfopanelPane {
selected_shape_gradient_stop: Option<usize>,
/// Selected stop index for gradient editor in tool section (gradient tool).
selected_tool_gradient_stop: Option<usize>,
/// FPS value captured when a drag/focus-in starts (for single-undo-action on commit)
fps_drag_start: Option<f64>,
}
impl InfopanelPane {
@ -58,6 +60,7 @@ impl InfopanelPane {
brush_preview_textures: Vec::new(),
selected_shape_gradient_stop: None,
selected_tool_gradient_stop: None,
fps_drag_start: None,
}
}
}
@ -906,21 +909,20 @@ impl InfopanelPane {
}
/// Render document settings section (shown when nothing is focused)
fn render_document_section(&self, ui: &mut Ui, path: &NodePath, shared: &mut SharedPaneState) {
fn render_document_section(&mut self, ui: &mut Ui, path: &NodePath, shared: &mut SharedPaneState) {
egui::CollapsingHeader::new("Document")
.id_salt(("document", path))
.default_open(true)
.show(ui, |ui| {
ui.add_space(4.0);
let document = shared.action_executor.document();
// Get current values for editing
let mut width = document.width;
let mut height = document.height;
let mut duration = document.duration;
let mut framerate = document.framerate;
let layer_count = document.root.children.len();
// Extract all needed values up front, then drop the borrow before closures
// that need mutable access to shared or self.
let (mut width, mut height, mut duration, mut framerate, layer_count, background_color) = {
let document = shared.action_executor.document();
(document.width, document.height, document.duration, document.framerate,
document.root.children.len(), document.background_color)
};
// Canvas width
ui.horizontal(|ui| {
@ -966,24 +968,54 @@ impl InfopanelPane {
// Framerate
ui.horizontal(|ui| {
ui.label("Framerate:");
if ui
.add(
DragValue::new(&mut framerate)
.speed(1.0)
.range(1.0..=120.0)
.suffix(" fps"),
)
.changed()
{
let action = SetDocumentPropertiesAction::set_framerate(framerate);
shared.pending_actions.push(Box::new(action));
let fps_response = ui.add(
DragValue::new(&mut framerate)
.speed(1.0)
.range(1.0..=120.0)
.suffix(" fps"),
);
if fps_response.gained_focus() || fps_response.drag_started() {
if self.fps_drag_start.is_none() {
self.fps_drag_start = Some(framerate);
}
}
if fps_response.changed() {
// Live preview: update document directly
shared.action_executor.document_mut().framerate = framerate;
}
if fps_response.drag_stopped() || fps_response.lost_focus() {
if let Some(start_fps) = self.fps_drag_start.take() {
let new_fps = shared.action_executor.document().framerate;
let timeline_mode = shared.action_executor.document().timeline_mode;
if (start_fps - new_fps).abs() > 1e-9
&& timeline_mode == lightningbeam_core::document::TimelineMode::Frames
{
use lightningbeam_core::actions::ChangeFpsAction;
// Revert live-preview so the action owns it
shared.action_executor.document_mut().framerate = start_fps;
let action = ChangeFpsAction::new(
start_fps,
new_fps,
shared.action_executor.document(),
);
shared.pending_actions.push(Box::new(action));
} else if (start_fps - new_fps).abs() > 1e-9 {
// Not in Frames mode — use simple property action (no stretching)
shared.action_executor.document_mut().framerate = start_fps;
let action = SetDocumentPropertiesAction::set_framerate(new_fps);
shared.pending_actions.push(Box::new(action));
}
}
}
});
// Background color (with alpha)
ui.horizontal(|ui| {
ui.label("Background:");
let bg = document.background_color;
let bg = background_color;
let mut color = [bg.r, bg.g, bg.b, bg.a];
if ui.color_edit_button_srgba_unmultiplied(&mut color).changed() {
let action = SetDocumentPropertiesAction::set_background_color(

View File

@ -770,7 +770,7 @@ impl PianoRollPane {
};
let timestamp = note_start + t * note_duration;
let (lsb, msb) = encode_bend(normalized);
events.push(MidiEvent { timestamp, status: 0xE0 | channel, data1: lsb, data2: msb });
events.push(MidiEvent::new(timestamp, 0xE0 | channel, lsb, msb));
}
events
}
@ -1568,15 +1568,20 @@ impl PianoRollPane {
let combined = (existing_norm[i] + zone_norm).clamp(-1.0, 1.0);
let (lsb, msb) = encode_bend(combined);
let ts = note_start + i as f64 / num_steps as f64 * note_duration;
new_events.push(daw_backend::audio::midi::MidiEvent { timestamp: ts, status: 0xE0 | target_channel, data1: lsb, data2: msb });
new_events.push(daw_backend::audio::midi::MidiEvent::new(ts, 0xE0 | target_channel, lsb, msb));
}
// For End zone: reset just after note ends so it doesn't bleed into next note
if zone == PitchBendZone::End {
let (lsb, msb) = encode_bend(0.0);
new_events.push(daw_backend::audio::midi::MidiEvent { timestamp: note_start + note_duration + 0.005, status: 0xE0 | target_channel, data1: lsb, data2: msb });
new_events.push(daw_backend::audio::midi::MidiEvent::new(note_start + note_duration + 0.005, 0xE0 | target_channel, lsb, msb));
}
new_events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap_or(std::cmp::Ordering::Equal));
{
let doc = shared.action_executor.document();
let bpm = doc.bpm; let fps = doc.framerate;
for ev in &mut new_events { ev.sync_from_seconds(bpm, fps); }
}
self.push_events_action("Set pitch bend", clip_id, old_events, new_events.clone(), shared);
shared.midi_event_cache.insert(clip_id, new_events);
}
@ -2359,15 +2364,17 @@ impl PaneRenderer for PianoRollPane {
!(is_cc1 && at_start)
});
if new_cc1 > 0 {
new_events.push(daw_backend::audio::midi::MidiEvent {
timestamp: sn.start_time,
status: 0xB0 | sn.channel,
data1: 1,
data2: new_cc1,
});
new_events.push(daw_backend::audio::midi::MidiEvent::new(
sn.start_time, 0xB0 | sn.channel, 1, new_cc1,
));
}
}
new_events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap_or(std::cmp::Ordering::Equal));
{
let doc = shared.action_executor.document();
let bpm = doc.bpm; let fps = doc.framerate;
for ev in &mut new_events { ev.sync_from_seconds(bpm, fps); }
}
self.push_events_action("Set modulation", clip_id, old_events, new_events.clone(), shared);
shared.midi_event_cache.insert(clip_id, new_events);
}

View File

@ -222,6 +222,9 @@ pub struct TimelinePane {
/// Layer currently being renamed via inline text edit (layer_id, buffer)
renaming_layer: Option<(uuid::Uuid, String)>,
/// BPM value captured when a drag/focus-in starts (for single-undo-action on commit)
bpm_drag_start: Option<f64>,
}
/// Deferred recording start created during count-in pre-roll
@ -505,11 +508,19 @@ fn build_audio_clip_cache(
let mut ci = ClipInstance::new(clip_id);
ci.id = instance_id;
ci.timeline_start = ac.external_start;
ci.timeline_start_beats = ac.external_start_beats;
ci.timeline_start_frames = ac.external_start_frames;
ci.trim_start = ac.internal_start;
ci.trim_start_beats = ac.internal_start_beats;
ci.trim_start_frames = ac.internal_start_frames;
ci.trim_end = Some(ac.internal_end);
ci.trim_end_beats = Some(ac.internal_end_beats);
ci.trim_end_frames = Some(ac.internal_end_frames);
let internal_dur = ac.internal_end - ac.internal_start;
if (ac.external_duration - internal_dur).abs() > 1e-9 {
ci.timeline_duration = Some(ac.external_duration);
ci.timeline_duration_beats = Some(ac.external_duration_beats);
ci.timeline_duration_frames = Some(ac.external_duration_frames);
}
ci.gain = ac.gain;
instances.push(ci);
@ -527,11 +538,19 @@ fn build_audio_clip_cache(
let mut ci = ClipInstance::new(clip_id);
ci.id = instance_id;
ci.timeline_start = mc.external_start;
ci.timeline_start_beats = mc.external_start_beats;
ci.timeline_start_frames = mc.external_start_frames;
ci.trim_start = mc.internal_start;
ci.trim_start_beats = mc.internal_start_beats;
ci.trim_start_frames = mc.internal_start_frames;
ci.trim_end = Some(mc.internal_end);
ci.trim_end_beats = Some(mc.internal_end_beats);
ci.trim_end_frames = Some(mc.internal_end_frames);
let internal_dur = mc.internal_end - mc.internal_start;
if (mc.external_duration - internal_dur).abs() > 1e-9 {
ci.timeline_duration = Some(mc.external_duration);
ci.timeline_duration_beats = Some(mc.external_duration_beats);
ci.timeline_duration_frames = Some(mc.external_duration_frames);
}
instances.push(ci);
}
@ -684,6 +703,7 @@ impl TimelinePane {
metronome_icon: None,
pending_recording_start: None,
renaming_layer: None,
bpm_drag_start: None,
}
}
@ -1004,8 +1024,11 @@ impl TimelinePane {
*shared.recording_clips.get(&layer_id).unwrap_or(&0), 0.0);
let doc_clip_id = shared.action_executor.document_mut().add_audio_clip(doc_clip);
let clip_instance = ClipInstance::new(doc_clip_id)
let bpm = shared.action_executor.document().bpm;
let fps = shared.action_executor.document().framerate;
let mut clip_instance = ClipInstance::new(doc_clip_id)
.with_timeline_start(start_time);
clip_instance.sync_from_seconds(bpm, fps);
if let Some(layer) = shared.action_executor.document_mut().get_layer_mut(&layer_id) {
if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer {
@ -1340,6 +1363,61 @@ impl TimelinePane {
((time - self.viewport_start_time) * self.pixels_per_second as f64) as f32
}
/// Effective display start for a clip instance.
///
/// In Measures mode, uses `timeline_start_beats` as the canonical position so clips stay
/// anchored to their beat position during live BPM drag preview. Falls back to seconds
/// in other modes or when beat data is unavailable.
fn instance_display_start(&self, ci: &lightningbeam_core::clip::ClipInstance, bpm: f64) -> f64 {
if self.time_display_format == lightningbeam_core::document::TimelineMode::Measures
&& (ci.timeline_start_beats.abs() > 1e-12 || ci.timeline_start == 0.0)
{
ci.timeline_start_beats * 60.0 / bpm - ci.loop_before.unwrap_or(0.0)
} else {
ci.effective_start()
}
}
/// In Measures mode, uses beats fields for the clip's on-timeline duration so the width
/// stays correct during live BPM drag preview. Falls back to seconds in other modes.
fn instance_display_duration(&self, ci: &lightningbeam_core::clip::ClipInstance, clip_dur_secs: f64, bpm: f64) -> f64 {
use lightningbeam_core::document::TimelineMode;
if self.time_display_format == TimelineMode::Measures {
// Looping/extended clip: explicit timeline_duration_beats
if let Some(dur_beats) = ci.timeline_duration_beats {
if dur_beats.abs() > 1e-12 {
return dur_beats * 60.0 / bpm;
}
}
// Non-looping: derive from trim range in beats
let ts_beats = ci.trim_start_beats;
if let Some(te_beats) = ci.trim_end_beats {
if te_beats.abs() > 1e-12 || ts_beats.abs() > 1e-12 {
return (te_beats - ts_beats).max(0.0) * 60.0 / bpm;
}
}
}
ci.total_duration(clip_dur_secs)
}
/// In Measures mode, returns the clip content start (trim_start) and duration in
/// beat-derived display seconds, for use when rendering note overlays.
fn content_display_range(&self, ci: &lightningbeam_core::clip::ClipInstance, clip_dur_secs: f64, bpm: f64) -> (f64, f64) {
use lightningbeam_core::document::TimelineMode;
if self.time_display_format == TimelineMode::Measures {
let ts_beats = ci.trim_start_beats;
if let Some(te_beats) = ci.trim_end_beats {
if te_beats.abs() > 1e-12 || ts_beats.abs() > 1e-12 {
let start = ts_beats * 60.0 / bpm;
let dur = (te_beats - ts_beats).max(0.0) * 60.0 / bpm;
return (start, dur);
}
}
}
let trim_end = ci.trim_end.unwrap_or(clip_dur_secs);
(ci.trim_start, (trim_end - ci.trim_start).max(0.0))
}
/// Convert pixel x-coordinate to time (seconds)
fn x_to_time(&self, x: f32) -> f64 {
self.viewport_start_time + (x / self.pixels_per_second) as f64
@ -1616,6 +1694,9 @@ impl TimelinePane {
/// Render mini piano roll visualization for MIDI clips on timeline
/// Shows notes modulo 12 (one octave) matching the JavaScript reference implementation
///
/// `display_bpm`: when `Some(bpm)`, note timestamps are derived from `timestamp_beats`
/// so positions stay beat-anchored during live BPM drag preview.
#[allow(clippy::too_many_arguments)]
fn render_midi_piano_roll(
painter: &egui::Painter,
@ -1630,6 +1711,7 @@ impl TimelinePane {
theme: &crate::theme::Theme,
ctx: &egui::Context,
faded: bool,
display_bpm: Option<f64>,
) {
let clip_height = clip_rect.height();
let note_height = clip_height / 12.0; // 12 semitones per octave
@ -1638,6 +1720,17 @@ impl TimelinePane {
let note_style = theme.style(".timeline-midi-note", ctx);
let note_color = note_style.background_color().unwrap_or(egui::Color32::BLACK);
// In Measures mode during BPM drag, derive display time from beats so notes
// stay anchored to their beat positions.
let event_display_time = |ev: &daw_backend::audio::midi::MidiEvent| -> f64 {
if let Some(bpm) = display_bpm {
if ev.timestamp_beats.abs() > 1e-12 || ev.timestamp == 0.0 {
return ev.timestamp_beats * 60.0 / bpm;
}
}
ev.timestamp
};
// Build a map of active notes (note_number -> note_on_timestamp)
// to calculate durations when we encounter note-offs
let mut active_notes: std::collections::HashMap<u8, f64> = std::collections::HashMap::new();
@ -1646,10 +1739,10 @@ impl TimelinePane {
// First pass: pair note-ons with note-offs to calculate durations
for event in events {
if event.is_note_on() {
let (note_number, timestamp) = (event.data1, event.timestamp);
let (note_number, timestamp) = (event.data1, event_display_time(event));
active_notes.insert(note_number, timestamp);
} else if event.is_note_off() {
let (note_number, timestamp) = (event.data1, event.timestamp);
let (note_number, timestamp) = (event.data1, event_display_time(event));
if let Some(&note_on_time) = active_notes.get(&note_number) {
let duration = timestamp - note_on_time;
@ -3120,14 +3213,15 @@ impl TimelinePane {
let clip_duration = effective_clip_duration(document, layer, clip_instance);
if let Some(clip_duration) = clip_duration {
// Calculate effective duration accounting for trimming
let mut instance_duration = clip_instance.total_duration(clip_duration);
// Calculate effective duration accounting for trimming.
// In Measures mode, uses beats fields so width tracks BPM during live drag.
let mut instance_duration = self.instance_display_duration(clip_instance, clip_duration, document.bpm);
// Instance positioned on the layer's timeline using timeline_start
// The layer itself has start_time, so the absolute timeline position is:
// layer.start_time + instance.timeline_start
// Instance positioned on the layer's timeline using timeline_start.
// In Measures mode, uses timeline_start_beats so clips stay at their beat
// position during live BPM drag preview.
let _layer_data = layer.layer();
let mut instance_start = clip_instance.effective_start();
let mut instance_start = self.instance_display_start(clip_instance, document.bpm);
// Apply drag offset preview for selected clips with snapping
let is_selected = selection.contains_clip_instance(&clip_instance.id);
@ -3148,12 +3242,13 @@ impl TimelinePane {
// Content origin: where the first "real" content iteration starts
// Loop iterations tile outward from this point
let mut content_origin = clip_instance.timeline_start;
let mut content_origin = instance_start + clip_instance.loop_before.unwrap_or(0.0);
// Track preview trim values for waveform rendering
let mut preview_trim_start = clip_instance.trim_start;
let preview_trim_end_default = clip_instance.trim_end.unwrap_or(clip_duration);
let mut preview_clip_duration = (preview_trim_end_default - preview_trim_start).max(0.0);
// Track preview trim values for note/waveform rendering.
// In Measures mode, derive from beats so they track BPM during live drag.
let (base_trim_start, base_clip_duration) = self.content_display_range(clip_instance, clip_duration, document.bpm);
let mut preview_trim_start = base_trim_start;
let mut preview_clip_duration = base_clip_duration;
if let Some(drag_type) = self.clip_drag_state {
if is_selected || is_linked_to_dragged {
@ -3408,6 +3503,11 @@ impl TimelinePane {
let iter_duration = iter_end - iter_start;
if iter_duration <= 0.0 { continue; }
let note_bpm = if self.time_display_format == lightningbeam_core::document::TimelineMode::Measures {
Some(document.bpm)
} else {
None
};
Self::render_midi_piano_roll(
&painter,
clip_rect,
@ -3421,9 +3521,15 @@ impl TimelinePane {
theme,
ui.ctx(),
si != 0, // fade non-content iterations
note_bpm,
);
}
} else {
let note_bpm = if self.time_display_format == lightningbeam_core::document::TimelineMode::Measures {
Some(document.bpm)
} else {
None
};
Self::render_midi_piano_roll(
&painter,
clip_rect,
@ -3437,6 +3543,7 @@ impl TimelinePane {
theme,
ui.ctx(),
false,
note_bpm,
);
}
}
@ -5061,7 +5168,21 @@ impl PaneRenderer for TimelinePane {
.range(20.0..=300.0)
.speed(0.5)
.fixed_decimals(1));
// Capture start BPM on drag/focus start
if bpm_response.gained_focus() || bpm_response.drag_started() {
if self.bpm_drag_start.is_none() {
self.bpm_drag_start = Some(bpm);
}
}
if bpm_response.changed() {
// Fallback capture: if gained_focus/drag_started didn't fire (e.g. rapid input),
// capture start BPM on the first change before updating the document.
if self.bpm_drag_start.is_none() {
self.bpm_drag_start = Some(bpm);
}
// Live preview: update document directly so grid reflows immediately
shared.action_executor.document_mut().bpm = bpm_val;
if let Some(controller_arc) = shared.audio_controller {
let mut controller = controller_arc.lock().unwrap();
@ -5069,6 +5190,31 @@ impl PaneRenderer for TimelinePane {
}
}
// On commit, dispatch a single ChangeBpmAction (single undo entry)
if bpm_response.drag_stopped() || bpm_response.lost_focus() {
if let Some(start_bpm) = self.bpm_drag_start.take() {
let new_bpm = shared.action_executor.document().bpm;
if (start_bpm - new_bpm).abs() > 1e-6
&& self.time_display_format == lightningbeam_core::document::TimelineMode::Measures
{
use lightningbeam_core::actions::ChangeBpmAction;
// Revert the live-preview mutation so the action owns it
shared.action_executor.document_mut().bpm = start_bpm;
let action = ChangeBpmAction::new(
start_bpm,
new_bpm,
shared.action_executor.document(),
shared.midi_event_cache,
);
// Immediately update midi_event_cache for rendering
for (clip_id, events) in action.new_midi_events() {
shared.midi_event_cache.insert(clip_id, events.clone());
}
shared.pending_actions.push(Box::new(action));
}
}
}
ui.separator();
// Time signature selector