diff --git a/daw-backend/src/audio/clip.rs b/daw-backend/src/audio/clip.rs index b5749a8..6ec1c8b 100644 --- a/daw-backend/src/audio/clip.rs +++ b/daw-backend/src/audio/clip.rs @@ -1,21 +1,68 @@ -/// Clip ID type -pub type ClipId = u32; +/// Audio clip instance ID type +pub type AudioClipInstanceId = u32; -/// Audio clip that references data in the AudioPool +/// Type alias for backwards compatibility +pub type ClipId = AudioClipInstanceId; + +/// Audio clip instance that references content in the AudioClipPool +/// +/// This represents a placed instance of audio content on the timeline. +/// The actual audio data is stored in the AudioClipPool and referenced by `audio_pool_index`. +/// +/// ## Timing Model +/// - `internal_start` / `internal_end`: Define the region of the source audio to play (trimming) +/// - `external_start` / `external_duration`: Define where the clip appears on the timeline and how long +/// +/// ## Looping +/// If `external_duration` is greater than `internal_end - internal_start`, +/// the clip will seamlessly loop back to `internal_start` when it reaches `internal_end`. #[derive(Debug, Clone)] -pub struct Clip { - pub id: ClipId, +pub struct AudioClipInstance { + pub id: AudioClipInstanceId, pub audio_pool_index: usize, - pub start_time: f64, // Position on timeline in seconds - pub duration: f64, // Clip duration in seconds - pub offset: f64, // Offset into audio file in seconds - pub gain: f32, // Clip-level gain + + /// Start position within the audio content (seconds) + pub internal_start: f64, + /// End position within the audio content (seconds) + pub internal_end: f64, + + /// Start position on the timeline (seconds) + pub external_start: f64, + /// Duration on the timeline (seconds) - can be longer than internal duration for looping + pub external_duration: f64, + + /// Clip-level gain + pub gain: f32, } -impl Clip { - /// Create a new clip +/// Type alias for backwards compatibility +pub type Clip = AudioClipInstance; + +impl AudioClipInstance { + /// Create a new audio clip instance pub fn new( - id: ClipId, + id: AudioClipInstanceId, + audio_pool_index: usize, + internal_start: f64, + internal_end: f64, + external_start: f64, + external_duration: f64, + ) -> Self { + Self { + id, + audio_pool_index, + internal_start, + internal_end, + external_start, + external_duration, + gain: 1.0, + } + } + + /// Create a clip instance from legacy parameters (for backwards compatibility) + /// Maps old start_time/duration/offset to new timing model + pub fn from_legacy( + id: AudioClipInstanceId, audio_pool_index: usize, start_time: f64, duration: f64, @@ -24,22 +71,64 @@ impl Clip { Self { id, audio_pool_index, - start_time, - duration, - offset, + internal_start: offset, + internal_end: offset + duration, + external_start: start_time, + external_duration: duration, gain: 1.0, } } - /// Check if this clip is active at a given timeline position + /// Check if this clip instance is active at a given timeline position pub fn is_active_at(&self, time_seconds: f64) -> bool { - let clip_end = self.start_time + self.duration; - time_seconds >= self.start_time && time_seconds < clip_end + time_seconds >= self.external_start && time_seconds < self.external_end() } - /// Get the end time of this clip on the timeline + /// Get the end time of this clip instance on the timeline + pub fn external_end(&self) -> f64 { + self.external_start + self.external_duration + } + + /// Get the end time of this clip instance on the timeline + /// (Alias for external_end(), for backwards compatibility) pub fn end_time(&self) -> f64 { - self.start_time + self.duration + self.external_end() + } + + /// Get the start time on the timeline + /// (Alias for external_start, for backwards compatibility) + pub fn start_time(&self) -> f64 { + self.external_start + } + + /// Get the internal (content) duration + pub fn internal_duration(&self) -> f64 { + self.internal_end - self.internal_start + } + + /// Check if this clip instance loops + pub fn is_looping(&self) -> bool { + self.external_duration > self.internal_duration() + } + + /// Get the position within the audio content for a given timeline position + /// Returns None if the timeline position is outside this clip instance + /// Handles looping automatically + pub fn get_content_position(&self, timeline_pos: f64) -> Option { + if timeline_pos < self.external_start || timeline_pos >= self.external_end() { + return None; + } + + let relative_pos = timeline_pos - self.external_start; + let internal_duration = self.internal_duration(); + + if internal_duration <= 0.0 { + return None; + } + + // Wrap around for looping + let content_offset = relative_pos % internal_duration; + Some(self.internal_start + content_offset) } /// Set clip gain diff --git a/daw-backend/src/audio/engine.rs b/daw-backend/src/audio/engine.rs index 5432f19..fa5639d 100644 --- a/daw-backend/src/audio/engine.rs +++ b/daw-backend/src/audio/engine.rs @@ -1,9 +1,9 @@ use crate::audio::buffer_pool::BufferPool; -use crate::audio::clip::ClipId; +use crate::audio::clip::{AudioClipInstance, ClipId}; use crate::audio::metronome::Metronome; -use crate::audio::midi::{MidiClip, MidiClipId, MidiEvent}; +use crate::audio::midi::{MidiClip, MidiClipId, MidiClipInstance, MidiEvent}; use crate::audio::node_graph::{nodes::*, AudioGraph}; -use crate::audio::pool::AudioPool; +use crate::audio::pool::AudioClipPool; use crate::audio::project::Project; use crate::audio::recording::{MidiRecordingState, RecordingState}; use crate::audio::track::{Track, TrackId, TrackNode}; @@ -16,7 +16,7 @@ use std::sync::Arc; /// Audio engine for Phase 6: hierarchical tracks with groups pub struct Engine { project: Project, - audio_pool: AudioPool, + audio_pool: AudioClipPool, buffer_pool: BufferPool, playhead: u64, // Playhead position in samples sample_rate: u32, @@ -78,7 +78,7 @@ impl Engine { Self { project: Project::new(sample_rate), - audio_pool: AudioPool::new(), + audio_pool: AudioClipPool::new(), buffer_pool: BufferPool::new(8, buffer_size), // 8 buffers should handle deep nesting playhead: 0, sample_rate, @@ -164,12 +164,12 @@ impl Engine { } /// Get mutable reference to audio pool - pub fn audio_pool_mut(&mut self) -> &mut AudioPool { + pub fn audio_pool_mut(&mut self) -> &mut AudioClipPool { &mut self.audio_pool } /// Get reference to audio pool - pub fn audio_pool(&self) -> &AudioPool { + pub fn audio_pool(&self) -> &AudioClipPool { &self.audio_pool } @@ -240,9 +240,15 @@ impl Engine { let playhead_seconds = self.playhead as f64 / self.sample_rate as f64; // Render the entire project hierarchy into the mix buffer + // Note: We need to use a raw pointer to avoid borrow checker issues + // The midi_clip_pool is part of project, so we extract a reference before mutable borrow + let midi_pool_ptr = &self.project.midi_clip_pool as *const _; + // SAFETY: The midi_clip_pool is not mutated during render, only read + let midi_pool_ref = unsafe { &*midi_pool_ptr }; self.project.render( &mut self.mix_buffer, &self.audio_pool, + midi_pool_ref, &mut self.buffer_pool, playhead_seconds, self.sample_rate, @@ -314,10 +320,12 @@ impl Engine { let clip_id = recording.clip_id; let track_id = recording.track_id; - // Update clip duration in project + // Update clip duration in project as recording progresses if let Some(crate::audio::track::TrackNode::Audio(track)) = self.project.get_track_mut(track_id) { if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) { - clip.duration = duration; + // Update both internal_end and external_duration as recording progresses + clip.internal_end = clip.internal_start + duration; + clip.external_duration = duration; } } @@ -384,33 +392,58 @@ impl Engine { } } Command::MoveClip(track_id, clip_id, new_start_time) => { + // Moving just changes external_start, external_duration stays the same match self.project.get_track_mut(track_id) { Some(crate::audio::track::TrackNode::Audio(track)) => { if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) { - clip.start_time = new_start_time; + clip.external_start = new_start_time; } } Some(crate::audio::track::TrackNode::Midi(track)) => { - if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) { - clip.start_time = new_start_time; + // Note: clip_id here is the pool clip ID, not instance ID + if let Some(instance) = track.clip_instances.iter_mut().find(|c| c.clip_id == clip_id) { + instance.external_start = new_start_time; } } _ => {} } } - Command::TrimClip(track_id, clip_id, new_start_time, new_duration, new_offset) => { + Command::TrimClip(track_id, clip_id, new_internal_start, new_internal_end) => { + // Trim changes which portion of the source content is used + // Also updates external_duration to match internal duration (no looping after trim) match self.project.get_track_mut(track_id) { Some(crate::audio::track::TrackNode::Audio(track)) => { if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) { - clip.start_time = new_start_time; - clip.duration = new_duration; - clip.offset = new_offset; + clip.internal_start = new_internal_start; + clip.internal_end = new_internal_end; + // By default, trimming sets external_duration to match internal duration + clip.external_duration = new_internal_end - new_internal_start; } } Some(crate::audio::track::TrackNode::Midi(track)) => { + // Note: clip_id here is the pool clip ID, not instance ID + if let Some(instance) = track.clip_instances.iter_mut().find(|c| c.clip_id == clip_id) { + instance.internal_start = new_internal_start; + instance.internal_end = new_internal_end; + // By default, trimming sets external_duration to match internal duration + instance.external_duration = new_internal_end - new_internal_start; + } + } + _ => {} + } + } + Command::ExtendClip(track_id, clip_id, new_external_duration) => { + // Extend changes the external duration (enables looping if > internal duration) + match self.project.get_track_mut(track_id) { + Some(crate::audio::track::TrackNode::Audio(track)) => { if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) { - clip.start_time = new_start_time; - clip.duration = new_duration; + clip.external_duration = new_external_duration; + } + } + Some(crate::audio::track::TrackNode::Midi(track)) => { + // Note: clip_id here is the pool clip ID, not instance ID + if let Some(instance) = track.clip_instances.iter_mut().find(|c| c.clip_id == clip_id) { + instance.external_duration = new_external_duration; } } _ => {} @@ -475,10 +508,10 @@ impl Engine { pool_index, pool_size); } - // Create a new clip with unique ID + // Create a new clip instance with unique ID using legacy parameters let clip_id = self.next_clip_id; self.next_clip_id += 1; - let clip = crate::audio::clip::Clip::new( + let clip = AudioClipInstance::from_legacy( clip_id, pool_index, start_time, @@ -504,55 +537,74 @@ impl Engine { Command::CreateMidiClip(track_id, start_time, duration) => { // Get the next MIDI clip ID from the atomic counter let clip_id = self.next_midi_clip_id_atomic.fetch_add(1, Ordering::Relaxed); - let clip = MidiClip::new(clip_id, start_time, duration); - let _ = self.project.add_midi_clip(track_id, clip); - // Notify UI about the new clip with its ID + + // Create clip content in the pool + let clip = MidiClip::empty(clip_id, duration, format!("MIDI Clip {}", clip_id)); + self.project.midi_clip_pool.add_existing_clip(clip); + + // Create an instance for this clip on the track + let instance_id = self.project.next_midi_clip_instance_id(); + let instance = MidiClipInstance::from_full_clip(instance_id, clip_id, duration, start_time); + + if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { + track.clip_instances.push(instance); + } + + // Notify UI about the new clip with its ID (using clip_id for now) let _ = self.event_tx.push(AudioEvent::ClipAdded(track_id, clip_id)); } Command::AddMidiNote(track_id, clip_id, time_offset, note, velocity, duration) => { - // Add a MIDI note event to the specified clip - if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { - if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) { - // Timestamp is now in seconds (sample-rate independent) - let note_on = MidiEvent::note_on(time_offset, 0, note, velocity); - clip.events.push(note_on); + // Add a MIDI note event to the specified clip in the pool + // Note: clip_id here refers to the clip in the pool, not the instance + if let Some(clip) = self.project.midi_clip_pool.get_clip_mut(clip_id) { + // Timestamp is now in seconds (sample-rate independent) + let note_on = MidiEvent::note_on(time_offset, 0, note, velocity); + clip.add_event(note_on); - // Add note off event - let note_off_time = time_offset + duration; - let note_off = MidiEvent::note_off(note_off_time, 0, note, 64); - clip.events.push(note_off); - - // Sort events by timestamp (using partial_cmp for f64) - clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap()); + // Add note off event + let note_off_time = time_offset + duration; + let note_off = MidiEvent::note_off(note_off_time, 0, note, 64); + clip.add_event(note_off); + } else { + // Try legacy behavior: look for instance on track and find its clip + if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { + if let Some(instance) = track.clip_instances.iter().find(|c| c.clip_id == clip_id) { + let actual_clip_id = instance.clip_id; + if let Some(clip) = self.project.midi_clip_pool.get_clip_mut(actual_clip_id) { + let note_on = MidiEvent::note_on(time_offset, 0, note, velocity); + clip.add_event(note_on); + let note_off_time = time_offset + duration; + let note_off = MidiEvent::note_off(note_off_time, 0, note, 64); + clip.add_event(note_off); + } + } } } } - Command::AddLoadedMidiClip(track_id, clip) => { - // Add a pre-loaded MIDI clip to the track - let _ = self.project.add_midi_clip(track_id, clip); + Command::AddLoadedMidiClip(track_id, clip, start_time) => { + // Add a pre-loaded MIDI clip to the track with the given start time + let _ = self.project.add_midi_clip_at(track_id, clip, start_time); } - Command::UpdateMidiClipNotes(track_id, clip_id, notes) => { - // Update all notes in a MIDI clip - if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { - if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) { - // Clear existing events - clip.events.clear(); + Command::UpdateMidiClipNotes(_track_id, clip_id, notes) => { + // Update all notes in a MIDI clip (directly in the pool) + if let Some(clip) = self.project.midi_clip_pool.get_clip_mut(clip_id) { + // Clear existing events + clip.events.clear(); - // Add new events from the notes array - // Timestamps are now stored in seconds (sample-rate independent) - for (start_time, note, velocity, duration) in notes { - let note_on = MidiEvent::note_on(start_time, 0, note, velocity); - clip.events.push(note_on); + // Add new events from the notes array + // Timestamps are now stored in seconds (sample-rate independent) + for (start_time, note, velocity, duration) in notes { + let note_on = MidiEvent::note_on(start_time, 0, note, velocity); + clip.events.push(note_on); - // Add note off event - let note_off_time = start_time + duration; - let note_off = MidiEvent::note_off(note_off_time, 0, note, 64); - clip.events.push(note_off); - } - - // Sort events by timestamp (using partial_cmp for f64) - clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap()); + // Add note off event + let note_off_time = start_time + duration; + let note_off = MidiEvent::note_off(note_off_time, 0, note, 64); + clip.events.push(note_off); } + + // Sort events by timestamp (using partial_cmp for f64) + clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap()); } } Command::RequestBufferPoolStats => { @@ -728,7 +780,7 @@ impl Engine { self.project = Project::new(self.sample_rate); // Clear audio pool - self.audio_pool = AudioPool::new(); + self.audio_pool = AudioClipPool::new(); // Reset buffer pool (recreate with same settings) let buffer_size = 512 * self.channels as usize; @@ -1439,19 +1491,16 @@ impl Engine { ))), } } - Query::GetMidiClip(track_id, clip_id) => { - if let Some(TrackNode::Midi(track)) = self.project.get_track(track_id) { - if let Some(clip) = track.clips.iter().find(|c| c.id == clip_id) { - use crate::command::MidiClipData; - QueryResponse::MidiClipData(Ok(MidiClipData { - duration: clip.duration, - events: clip.events.clone(), - })) - } else { - QueryResponse::MidiClipData(Err(format!("Clip {} not found in track {}", clip_id, track_id))) - } + Query::GetMidiClip(_track_id, clip_id) => { + // Get MIDI clip data from the pool + if let Some(clip) = self.project.midi_clip_pool.get_clip(clip_id) { + use crate::command::MidiClipData; + QueryResponse::MidiClipData(Ok(MidiClipData { + duration: clip.duration, + events: clip.events.clone(), + })) } else { - QueryResponse::MidiClipData(Err(format!("Track {} not found or is not a MIDI track", track_id))) + QueryResponse::MidiClipData(Err(format!("Clip {} not found in pool", clip_id))) } } @@ -1622,7 +1671,10 @@ impl Engine { Query::ExportAudio(settings, output_path) => { // Perform export directly - this will block the audio thread but that's okay // since we're exporting and not playing back anyway - match crate::audio::export_audio(&mut self.project, &self.audio_pool, &settings, &output_path) { + // Use raw pointer to get midi_pool reference before mutable borrow of project + let midi_pool_ptr: *const _ = &self.project.midi_clip_pool; + let midi_pool_ref = unsafe { &*midi_pool_ptr }; + match crate::audio::export_audio(&mut self.project, &self.audio_pool, midi_pool_ref, &settings, &output_path) { Ok(()) => QueryResponse::AudioExported(Ok(())), Err(e) => QueryResponse::AudioExported(Err(e)), } @@ -1658,9 +1710,10 @@ impl Engine { let clip = crate::audio::clip::Clip::new( clip_id, 0, // Temporary pool index, will be updated on finalization - start_time, - 0.0, // Duration starts at 0, will be updated during recording - 0.0, + 0.0, // internal_start + 0.0, // internal_end - Duration starts at 0, will be updated during recording + start_time, // external_start (timeline position) + start_time, // external_end - will be updated during recording ); // Add clip to track @@ -1819,42 +1872,47 @@ impl Engine { eprintln!("[MIDI_RECORDING] Stopping MIDI recording for clip_id={}, track_id={}, captured {} notes, duration={:.3}s", clip_id, track_id, note_count, recording_duration); - // Update the MIDI clip using the existing UpdateMidiClipNotes logic - eprintln!("[MIDI_RECORDING] Looking for track {} to update clip", track_id); - if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { - eprintln!("[MIDI_RECORDING] Found MIDI track, looking for clip {}", clip_id); - if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) { - eprintln!("[MIDI_RECORDING] Found clip, clearing and adding {} notes", note_count); - // Clear existing events - clip.events.clear(); + // Update the MIDI clip in the pool (new model: clips are stored centrally in the pool) + eprintln!("[MIDI_RECORDING] Looking for clip {} in midi_clip_pool", clip_id); + if let Some(clip) = self.project.midi_clip_pool.get_clip_mut(clip_id) { + eprintln!("[MIDI_RECORDING] Found clip in pool, clearing and adding {} notes", note_count); + // Clear existing events + clip.events.clear(); - // Update clip duration to match the actual recording time - clip.duration = recording_duration; + // Update clip duration to match the actual recording time + clip.duration = recording_duration; - // Add new events from the recorded notes - // Timestamps are now stored in seconds (sample-rate independent) - for (start_time, note, velocity, duration) in notes.iter() { - let note_on = MidiEvent::note_on(*start_time, 0, *note, *velocity); + // Add new events from the recorded notes + // Timestamps are now stored in seconds (sample-rate independent) + for (start_time, note, velocity, duration) in notes.iter() { + let note_on = MidiEvent::note_on(*start_time, 0, *note, *velocity); - eprintln!("[MIDI_RECORDING] Note {}: start_time={:.3}s, duration={:.3}s", - note, start_time, duration); + eprintln!("[MIDI_RECORDING] Note {}: start_time={:.3}s, duration={:.3}s", + note, start_time, duration); - clip.events.push(note_on); + clip.events.push(note_on); - // Add note off event - let note_off_time = *start_time + *duration; - let note_off = MidiEvent::note_off(note_off_time, 0, *note, 64); - clip.events.push(note_off); + // Add note off event + let note_off_time = *start_time + *duration; + let note_off = MidiEvent::note_off(note_off_time, 0, *note, 64); + clip.events.push(note_off); + } + + // Sort events by timestamp (using partial_cmp for f64) + clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap()); + eprintln!("[MIDI_RECORDING] Updated clip {} with {} notes ({} events)", clip_id, note_count, clip.events.len()); + + // Also update the clip instance's internal_end and external_duration to match the recording duration + if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { + if let Some(instance) = track.clip_instances.iter_mut().find(|i| i.clip_id == clip_id) { + instance.internal_end = recording_duration; + instance.external_duration = recording_duration; + eprintln!("[MIDI_RECORDING] Updated clip instance timing: internal_end={:.3}s, external_duration={:.3}s", + instance.internal_end, instance.external_duration); } - - // Sort events by timestamp (using partial_cmp for f64) - clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap()); - eprintln!("[MIDI_RECORDING] Updated clip {} with {} notes ({} events)", clip_id, note_count, clip.events.len()); - } else { - eprintln!("[MIDI_RECORDING] ERROR: Clip {} not found on track!", clip_id); } } else { - eprintln!("[MIDI_RECORDING] ERROR: Track {} not found or not a MIDI track!", track_id); + eprintln!("[MIDI_RECORDING] ERROR: Clip {} not found in pool!", clip_id); } // Send event to UI @@ -1941,13 +1999,20 @@ impl EngineController { let _ = self.command_tx.push(Command::SetTrackSolo(track_id, solo)); } - /// Move a clip to a new timeline position + /// Move a clip to a new timeline position (changes external_start) pub fn move_clip(&mut self, track_id: TrackId, clip_id: ClipId, new_start_time: f64) { let _ = self.command_tx.push(Command::MoveClip(track_id, clip_id, new_start_time)); } - pub fn trim_clip(&mut self, track_id: TrackId, clip_id: ClipId, new_start_time: f64, new_duration: f64, new_offset: f64) { - let _ = self.command_tx.push(Command::TrimClip(track_id, clip_id, new_start_time, new_duration, new_offset)); + /// Trim a clip's internal boundaries (changes which portion of source content is used) + /// This also resets external_duration to match internal duration (disables looping) + pub fn trim_clip(&mut self, track_id: TrackId, clip_id: ClipId, new_internal_start: f64, new_internal_end: f64) { + let _ = self.command_tx.push(Command::TrimClip(track_id, clip_id, new_internal_start, new_internal_end)); + } + + /// Extend or shrink a clip's external duration (enables looping if > internal duration) + pub fn extend_clip(&mut self, track_id: TrackId, clip_id: ClipId, new_external_duration: f64) { + let _ = self.command_tx.push(Command::ExtendClip(track_id, clip_id, new_external_duration)); } /// Send a generic command to the audio thread @@ -2071,9 +2136,9 @@ impl EngineController { let _ = self.command_tx.push(Command::AddMidiNote(track_id, clip_id, time_offset, note, velocity, duration)); } - /// Add a pre-loaded MIDI clip to a track - pub fn add_loaded_midi_clip(&mut self, track_id: TrackId, clip: MidiClip) { - let _ = self.command_tx.push(Command::AddLoadedMidiClip(track_id, clip)); + /// Add a pre-loaded MIDI clip to a track at the given timeline position + pub fn add_loaded_midi_clip(&mut self, track_id: TrackId, clip: MidiClip, start_time: f64) { + let _ = self.command_tx.push(Command::AddLoadedMidiClip(track_id, clip, start_time)); } /// Update all notes in a MIDI clip diff --git a/daw-backend/src/audio/export.rs b/daw-backend/src/audio/export.rs index c3ed147..f9535c6 100644 --- a/daw-backend/src/audio/export.rs +++ b/daw-backend/src/audio/export.rs @@ -1,4 +1,5 @@ use super::buffer_pool::BufferPool; +use super::midi_pool::MidiClipPool; use super::pool::AudioPool; use super::project::Project; use std::path::Path; @@ -61,11 +62,12 @@ impl Default for ExportSettings { pub fn export_audio>( project: &mut Project, pool: &AudioPool, + midi_pool: &MidiClipPool, settings: &ExportSettings, output_path: P, ) -> Result<(), String> { // Render the project to memory - let samples = render_to_memory(project, pool, settings)?; + let samples = render_to_memory(project, pool, midi_pool, settings)?; // Write to file based on format match settings.format { @@ -80,6 +82,7 @@ pub fn export_audio>( fn render_to_memory( project: &mut Project, pool: &AudioPool, + midi_pool: &MidiClipPool, settings: &ExportSettings, ) -> Result, String> { // Calculate total number of frames @@ -113,6 +116,7 @@ fn render_to_memory( project.render( &mut render_buffer, pool, + midi_pool, &mut buffer_pool, playhead, settings.sample_rate, diff --git a/daw-backend/src/audio/midi.rs b/daw-backend/src/audio/midi.rs index a22c740..7dbaeb0 100644 --- a/daw-backend/src/audio/midi.rs +++ b/daw-backend/src/audio/midi.rs @@ -63,73 +63,216 @@ impl MidiEvent { } } -/// MIDI clip ID type +/// MIDI clip ID type (for clips stored in the pool) pub type MidiClipId = u32; -/// MIDI clip containing a sequence of MIDI events +/// MIDI clip instance ID type (for instances placed on tracks) +pub type MidiClipInstanceId = u32; + +/// MIDI clip content - stores the actual MIDI events +/// +/// This represents the content data stored in the MidiClipPool. +/// Events have timestamps relative to the start of the clip (0.0 = clip beginning). #[derive(Debug, Clone)] pub struct MidiClip { pub id: MidiClipId, pub events: Vec, - pub start_time: f64, // Position on timeline in seconds - pub duration: f64, // Clip duration in seconds - pub loop_enabled: bool, + pub duration: f64, // Total content duration in seconds + pub name: String, } impl MidiClip { - /// Create a new MIDI clip - pub fn new(id: MidiClipId, start_time: f64, duration: f64) -> Self { + /// Create a new MIDI clip with content + pub fn new(id: MidiClipId, events: Vec, duration: f64, name: String) -> Self { + let mut clip = Self { + id, + events, + duration, + name, + }; + // Sort events by timestamp + clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap()); + clip + } + + /// Create an empty MIDI clip + pub fn empty(id: MidiClipId, duration: f64, name: String) -> Self { Self { id, events: Vec::new(), - start_time, duration, - loop_enabled: false, + name, } } /// Add a MIDI event to the clip pub fn add_event(&mut self, event: MidiEvent) { self.events.push(event); - // Keep events sorted by timestamp (using partial_cmp for f64) + // Keep events sorted by timestamp self.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap()); } - /// Get the end time of the clip - pub fn end_time(&self) -> f64 { - self.start_time + self.duration + /// Get events within a time range (relative to clip start) + /// This is used by MidiClipInstance to fetch events for a given portion + pub fn get_events_in_range(&self, start: f64, end: f64) -> Vec { + self.events + .iter() + .filter(|e| e.timestamp >= start && e.timestamp < end) + .copied() + .collect() + } +} + +/// MIDI clip instance - a reference to MidiClip content with timeline positioning +/// +/// ## Timing Model +/// - `internal_start` / `internal_end`: Define the region of the source clip to play (trimming) +/// - `external_start` / `external_duration`: Define where the instance appears on the timeline and how long +/// +/// ## Looping +/// If `external_duration` is greater than `internal_end - internal_start`, +/// the instance will seamlessly loop back to `internal_start` when it reaches `internal_end`. +#[derive(Debug, Clone)] +pub struct MidiClipInstance { + pub id: MidiClipInstanceId, + pub clip_id: MidiClipId, // Reference to MidiClip in pool + + /// Start position within the clip content (seconds) + pub internal_start: f64, + /// End position within the clip content (seconds) + pub internal_end: f64, + + /// Start position on the timeline (seconds) + pub external_start: f64, + /// Duration on the timeline (seconds) - can be longer than internal duration for looping + pub external_duration: f64, +} + +impl MidiClipInstance { + /// Create a new MIDI clip instance + pub fn new( + id: MidiClipInstanceId, + clip_id: MidiClipId, + internal_start: f64, + internal_end: f64, + external_start: f64, + external_duration: f64, + ) -> Self { + Self { + id, + clip_id, + internal_start, + internal_end, + external_start, + external_duration, + } } - /// Get events that should be triggered in a given time range + /// Create an instance that uses the full clip content (no trimming, no looping) + pub fn from_full_clip( + id: MidiClipInstanceId, + clip_id: MidiClipId, + clip_duration: f64, + external_start: f64, + ) -> Self { + Self { + id, + clip_id, + internal_start: 0.0, + internal_end: clip_duration, + external_start, + external_duration: clip_duration, + } + } + + /// Get the internal (content) duration + pub fn internal_duration(&self) -> f64 { + self.internal_end - self.internal_start + } + + /// Get the end time on the timeline + pub fn external_end(&self) -> f64 { + self.external_start + self.external_duration + } + + /// Check if this instance loops + pub fn is_looping(&self) -> bool { + self.external_duration > self.internal_duration() + } + + /// Get the end time on the timeline (for backwards compatibility) + pub fn end_time(&self) -> f64 { + self.external_end() + } + + /// Get the start time on the timeline (for backwards compatibility) + pub fn start_time(&self) -> f64 { + self.external_start + } + + /// Check if this instance overlaps with a time range + pub fn overlaps_range(&self, range_start: f64, range_end: f64) -> bool { + self.external_start < range_end && self.external_end() > range_start + } + + /// Get events that should be triggered in a given timeline range /// - /// Returns events along with their absolute timestamps in samples + /// This handles: + /// - Trimming (internal_start/internal_end) + /// - Looping (when external duration > internal duration) + /// - Time mapping from timeline to clip content + /// + /// Returns events with timestamps adjusted to timeline time (not clip-relative) pub fn get_events_in_range( &self, + clip: &MidiClip, range_start_seconds: f64, range_end_seconds: f64, - _sample_rate: u32, ) -> Vec { let mut result = Vec::new(); - // Check if clip overlaps with the range - if range_start_seconds >= self.end_time() || range_end_seconds <= self.start_time { + // Check if instance overlaps with the range + if !self.overlaps_range(range_start_seconds, range_end_seconds) { return result; } - // Calculate the intersection - let play_start = range_start_seconds.max(self.start_time); - let play_end = range_end_seconds.min(self.end_time()); + let internal_duration = self.internal_duration(); + if internal_duration <= 0.0 { + return result; + } - // Position within the clip - let clip_position_seconds = play_start - self.start_time; - let clip_end_seconds = play_end - self.start_time; + // Calculate how many complete loops fit in the external duration + let num_loops = if self.external_duration > internal_duration { + (self.external_duration / internal_duration).ceil() as usize + } else { + 1 + }; - // Find events in this range - // Note: event.timestamp is now in seconds relative to clip start - // Use half-open interval [start, end) to avoid triggering events twice - for event in &self.events { - if event.timestamp >= clip_position_seconds && event.timestamp < clip_end_seconds { - result.push(*event); + let external_end = self.external_end(); + + for loop_idx in 0..num_loops { + let loop_offset = loop_idx as f64 * internal_duration; + + // Get events from the clip that fall within the internal range + for event in &clip.events { + // Skip events outside the trimmed region + if event.timestamp < self.internal_start || event.timestamp >= self.internal_end { + continue; + } + + // Convert to timeline time + let relative_content_time = event.timestamp - self.internal_start; + let timeline_time = self.external_start + loop_offset + relative_content_time; + + // Check if within current buffer range and instance bounds + if timeline_time >= range_start_seconds + && timeline_time < range_end_seconds + && timeline_time < external_end + { + let mut adjusted_event = *event; + adjusted_event.timestamp = timeline_time; + result.push(adjusted_event); + } } } diff --git a/daw-backend/src/audio/midi_pool.rs b/daw-backend/src/audio/midi_pool.rs new file mode 100644 index 0000000..184333a --- /dev/null +++ b/daw-backend/src/audio/midi_pool.rs @@ -0,0 +1,101 @@ +use std::collections::HashMap; +use super::midi::{MidiClip, MidiClipId, MidiEvent}; + +/// Pool for storing MIDI clip content +/// Similar to AudioClipPool but for MIDI data +pub struct MidiClipPool { + clips: HashMap, + next_id: MidiClipId, +} + +impl MidiClipPool { + /// Create a new empty MIDI clip pool + pub fn new() -> Self { + Self { + clips: HashMap::new(), + next_id: 1, // Start at 1 so 0 can indicate "no clip" + } + } + + /// Add a new clip to the pool with the given events and duration + /// Returns the ID of the newly created clip + pub fn add_clip(&mut self, events: Vec, duration: f64, name: String) -> MidiClipId { + let id = self.next_id; + self.next_id += 1; + + let clip = MidiClip::new(id, events, duration, name); + self.clips.insert(id, clip); + id + } + + /// Add an existing clip to the pool (used when loading projects) + /// The clip's ID is preserved + pub fn add_existing_clip(&mut self, clip: MidiClip) { + // Update next_id to avoid collisions + if clip.id >= self.next_id { + self.next_id = clip.id + 1; + } + self.clips.insert(clip.id, clip); + } + + /// Get a clip by ID + pub fn get_clip(&self, id: MidiClipId) -> Option<&MidiClip> { + self.clips.get(&id) + } + + /// Get a mutable clip by ID + pub fn get_clip_mut(&mut self, id: MidiClipId) -> Option<&mut MidiClip> { + self.clips.get_mut(&id) + } + + /// Remove a clip from the pool + pub fn remove_clip(&mut self, id: MidiClipId) -> Option { + self.clips.remove(&id) + } + + /// Duplicate a clip, returning the new clip's ID + pub fn duplicate_clip(&mut self, id: MidiClipId) -> Option { + let clip = self.clips.get(&id)?; + let new_id = self.next_id; + self.next_id += 1; + + let mut new_clip = clip.clone(); + new_clip.id = new_id; + new_clip.name = format!("{} (copy)", clip.name); + + self.clips.insert(new_id, new_clip); + Some(new_id) + } + + /// Get all clip IDs in the pool + pub fn clip_ids(&self) -> Vec { + self.clips.keys().copied().collect() + } + + /// Get the number of clips in the pool + pub fn len(&self) -> usize { + self.clips.len() + } + + /// Check if the pool is empty + pub fn is_empty(&self) -> bool { + self.clips.is_empty() + } + + /// Clear all clips from the pool + pub fn clear(&mut self) { + self.clips.clear(); + self.next_id = 1; + } + + /// Get an iterator over all clips + pub fn iter(&self) -> impl Iterator { + self.clips.iter() + } +} + +impl Default for MidiClipPool { + fn default() -> Self { + Self::new() + } +} diff --git a/daw-backend/src/audio/mod.rs b/daw-backend/src/audio/mod.rs index 5ddc44e..64a7c99 100644 --- a/daw-backend/src/audio/mod.rs +++ b/daw-backend/src/audio/mod.rs @@ -6,6 +6,7 @@ pub mod engine; pub mod export; pub mod metronome; pub mod midi; +pub mod midi_pool; pub mod node_graph; pub mod pool; pub mod project; @@ -15,12 +16,13 @@ pub mod track; pub use automation::{AutomationLane, AutomationLaneId, AutomationPoint, CurveType, ParameterId}; pub use buffer_pool::BufferPool; -pub use clip::{Clip, ClipId}; +pub use clip::{AudioClipInstance, AudioClipInstanceId, Clip, ClipId}; pub use engine::{Engine, EngineController}; pub use export::{export_audio, ExportFormat, ExportSettings}; pub use metronome::Metronome; -pub use midi::{MidiClip, MidiClipId, MidiEvent}; -pub use pool::{AudioFile as PoolAudioFile, AudioPool}; +pub use midi::{MidiClip, MidiClipId, MidiClipInstance, MidiClipInstanceId, MidiEvent}; +pub use midi_pool::MidiClipPool; +pub use pool::{AudioClipPool, AudioFile as PoolAudioFile, AudioPool}; pub use project::Project; pub use recording::RecordingState; pub use sample_loader::{load_audio_file, SampleData}; diff --git a/daw-backend/src/audio/pool.rs b/daw-backend/src/audio/pool.rs index 621aea1..c780f60 100644 --- a/daw-backend/src/audio/pool.rs +++ b/daw-backend/src/audio/pool.rs @@ -119,13 +119,16 @@ impl AudioFile { } } -/// Pool of shared audio files -pub struct AudioPool { +/// Pool of shared audio files (audio clip content) +pub struct AudioClipPool { files: Vec, } -impl AudioPool { - /// Create a new empty audio pool +/// Type alias for backwards compatibility +pub type AudioPool = AudioClipPool; + +impl AudioClipPool { + /// Create a new empty audio clip pool pub fn new() -> Self { Self { files: Vec::new(), @@ -301,7 +304,7 @@ impl AudioPool { } } -impl Default for AudioPool { +impl Default for AudioClipPool { fn default() -> Self { Self::new() } @@ -335,8 +338,8 @@ pub struct AudioPoolEntry { pub embedded_data: Option, } -impl AudioPool { - /// Serialize the audio pool for project saving +impl AudioClipPool { + /// Serialize the audio clip pool for project saving /// /// Files smaller than 10MB are embedded as base64. /// Larger files are stored as relative paths to the project file. diff --git a/daw-backend/src/audio/project.rs b/daw-backend/src/audio/project.rs index 76acef4..4937280 100644 --- a/daw-backend/src/audio/project.rs +++ b/daw-backend/src/audio/project.rs @@ -1,19 +1,27 @@ use super::buffer_pool::BufferPool; use super::clip::Clip; -use super::midi::{MidiClip, MidiEvent}; -use super::pool::AudioPool; +use super::midi::{MidiClip, MidiClipId, MidiClipInstance, MidiClipInstanceId, MidiEvent}; +use super::midi_pool::MidiClipPool; +use super::pool::AudioClipPool; use super::track::{AudioTrack, Metatrack, MidiTrack, RenderContext, TrackId, TrackNode}; use std::collections::HashMap; -/// Project manages the hierarchical track structure +/// Project manages the hierarchical track structure and clip pools /// /// Tracks are stored in a flat HashMap but can be organized into groups, /// forming a tree structure. Groups render their children recursively. +/// +/// Clip content is stored in pools (MidiClipPool), while tracks store +/// clip instances that reference the pool content. pub struct Project { tracks: HashMap, next_track_id: TrackId, root_tracks: Vec, // Top-level tracks (not in any group) sample_rate: u32, // System sample rate + /// Pool for MIDI clip content + pub midi_clip_pool: MidiClipPool, + /// Next MIDI clip instance ID (for generating unique IDs) + next_midi_clip_instance_id: MidiClipInstanceId, } impl Project { @@ -24,6 +32,8 @@ impl Project { next_track_id: 0, root_tracks: Vec::new(), sample_rate, + midi_clip_pool: MidiClipPool::new(), + next_midi_clip_instance_id: 1, } } @@ -241,21 +251,81 @@ impl Project { } } - /// Add a MIDI clip to a MIDI track - pub fn add_midi_clip(&mut self, track_id: TrackId, clip: MidiClip) -> Result<(), &'static str> { + /// Add a MIDI clip instance to a MIDI track + /// The clip content should already exist in the midi_clip_pool + pub fn add_midi_clip_instance(&mut self, track_id: TrackId, instance: MidiClipInstance) -> Result<(), &'static str> { if let Some(TrackNode::Midi(track)) = self.tracks.get_mut(&track_id) { - track.add_clip(clip); + track.add_clip_instance(instance); Ok(()) } else { Err("Track not found or is not a MIDI track") } } + /// Create a new MIDI clip in the pool and add an instance to a track + /// Returns (clip_id, instance_id) on success + pub fn create_midi_clip_with_instance( + &mut self, + track_id: TrackId, + events: Vec, + duration: f64, + name: String, + external_start: f64, + ) -> Result<(MidiClipId, MidiClipInstanceId), &'static str> { + // Verify track exists and is a MIDI track + if !matches!(self.tracks.get(&track_id), Some(TrackNode::Midi(_))) { + return Err("Track not found or is not a MIDI track"); + } + + // Create clip in pool + let clip_id = self.midi_clip_pool.add_clip(events, duration, name); + + // Create instance + let instance_id = self.next_midi_clip_instance_id; + self.next_midi_clip_instance_id += 1; + + let instance = MidiClipInstance::from_full_clip(instance_id, clip_id, duration, external_start); + + // Add instance to track + if let Some(TrackNode::Midi(track)) = self.tracks.get_mut(&track_id) { + track.add_clip_instance(instance); + } + + Ok((clip_id, instance_id)) + } + + /// Generate a new unique MIDI clip instance ID + pub fn next_midi_clip_instance_id(&mut self) -> MidiClipInstanceId { + let id = self.next_midi_clip_instance_id; + self.next_midi_clip_instance_id += 1; + id + } + + /// Legacy method for backwards compatibility - creates clip and instance from old MidiClip format + pub fn add_midi_clip(&mut self, track_id: TrackId, clip: MidiClip) -> Result<(), &'static str> { + self.add_midi_clip_at(track_id, clip, 0.0) + } + + /// Add a MIDI clip to the pool and create an instance at the given timeline position + pub fn add_midi_clip_at(&mut self, track_id: TrackId, clip: MidiClip, start_time: f64) -> Result<(), &'static str> { + // Add the clip to the pool (it already has events and duration) + let duration = clip.duration; + let clip_id = clip.id; + self.midi_clip_pool.add_existing_clip(clip); + + // Create an instance that uses the full clip at the given position + let instance_id = self.next_midi_clip_instance_id(); + let instance = MidiClipInstance::from_full_clip(instance_id, clip_id, duration, start_time); + + self.add_midi_clip_instance(track_id, instance) + } + /// Render all root tracks into the output buffer pub fn render( &mut self, output: &mut [f32], - pool: &AudioPool, + audio_pool: &AudioClipPool, + midi_pool: &MidiClipPool, buffer_pool: &mut BufferPool, playhead_seconds: f64, sample_rate: u32, @@ -278,7 +348,8 @@ impl Project { self.render_track( track_id, output, - pool, + audio_pool, + midi_pool, buffer_pool, ctx, any_solo, @@ -292,7 +363,8 @@ impl Project { &mut self, track_id: TrackId, output: &mut [f32], - pool: &AudioPool, + audio_pool: &AudioClipPool, + midi_pool: &MidiClipPool, buffer_pool: &mut BufferPool, ctx: RenderContext, any_solo: bool, @@ -336,11 +408,11 @@ impl Project { match self.tracks.get_mut(&track_id) { Some(TrackNode::Audio(track)) => { // Render audio track directly into output - track.render(output, pool, ctx.playhead_seconds, ctx.sample_rate, ctx.channels); + track.render(output, audio_pool, ctx.playhead_seconds, ctx.sample_rate, ctx.channels); } Some(TrackNode::Midi(track)) => { // Render MIDI track directly into output - track.render(output, ctx.playhead_seconds, ctx.sample_rate, ctx.channels); + track.render(output, midi_pool, ctx.playhead_seconds, ctx.sample_rate, ctx.channels); } Some(TrackNode::Group(group)) => { // Get children IDs, check if this group is soloed, and transform context @@ -360,7 +432,8 @@ impl Project { self.render_track( child_id, &mut group_buffer, - pool, + audio_pool, + midi_pool, buffer_pool, child_ctx, any_solo, diff --git a/daw-backend/src/audio/track.rs b/daw-backend/src/audio/track.rs index b7581aa..cc8bd4e 100644 --- a/daw-backend/src/audio/track.rs +++ b/daw-backend/src/audio/track.rs @@ -1,9 +1,10 @@ use super::automation::{AutomationLane, AutomationLaneId, ParameterId}; -use super::clip::Clip; -use super::midi::{MidiClip, MidiEvent}; +use super::clip::AudioClipInstance; +use super::midi::{MidiClipInstance, MidiEvent}; +use super::midi_pool::MidiClipPool; use super::node_graph::AudioGraph; use super::node_graph::nodes::{AudioInputNode, AudioOutputNode}; -use super::pool::AudioPool; +use super::pool::AudioClipPool; use std::collections::HashMap; /// Track ID type @@ -285,11 +286,12 @@ impl Metatrack { } } -/// MIDI track with MIDI clips and a node-based instrument +/// MIDI track with MIDI clip instances and a node-based instrument pub struct MidiTrack { pub id: TrackId, pub name: String, - pub clips: Vec, + /// Clip instances placed on this track (reference clips in the MidiClipPool) + pub clip_instances: Vec, pub instrument_graph: AudioGraph, pub volume: f32, pub muted: bool, @@ -310,7 +312,7 @@ impl MidiTrack { Self { id, name, - clips: Vec::new(), + clip_instances: Vec::new(), instrument_graph: AudioGraph::new(sample_rate, default_buffer_size), volume: 1.0, muted: false, @@ -346,9 +348,9 @@ impl MidiTrack { self.automation_lanes.remove(&lane_id).is_some() } - /// Add a MIDI clip to this track - pub fn add_clip(&mut self, clip: MidiClip) { - self.clips.push(clip); + /// Add a MIDI clip instance to this track + pub fn add_clip_instance(&mut self, instance: MidiClipInstance) { + self.clip_instances.push(instance); } /// Set track volume @@ -420,6 +422,7 @@ impl MidiTrack { pub fn render( &mut self, output: &mut [f32], + midi_pool: &MidiClipPool, playhead_seconds: f64, sample_rate: u32, channels: u32, @@ -427,17 +430,18 @@ impl MidiTrack { let buffer_duration_seconds = output.len() as f64 / (sample_rate as f64 * channels as f64); let buffer_end_seconds = playhead_seconds + buffer_duration_seconds; - // Collect MIDI events from all clips that overlap with current time range + // Collect MIDI events from all clip instances that overlap with current time range let mut midi_events = Vec::new(); - for clip in &self.clips { - let events = clip.get_events_in_range( - playhead_seconds, - buffer_end_seconds, - sample_rate, - ); - - // Events now have timestamps in seconds relative to clip start - midi_events.extend(events); + for instance in &self.clip_instances { + // Get the clip content from the pool + if let Some(clip) = midi_pool.get_clip(instance.clip_id) { + let events = instance.get_events_in_range( + clip, + playhead_seconds, + buffer_end_seconds, + ); + midi_events.extend(events); + } } // Add live MIDI events (from virtual keyboard or MIDI controllers) @@ -480,11 +484,12 @@ impl MidiTrack { } } -/// Audio track with clips +/// Audio track with audio clip instances pub struct AudioTrack { pub id: TrackId, pub name: String, - pub clips: Vec, + /// Audio clip instances (reference content in the AudioClipPool) + pub clips: Vec, pub volume: f32, pub muted: bool, pub solo: bool, @@ -560,8 +565,8 @@ impl AudioTrack { self.automation_lanes.remove(&lane_id).is_some() } - /// Add a clip to this track - pub fn add_clip(&mut self, clip: Clip) { + /// Add an audio clip instance to this track + pub fn add_clip(&mut self, clip: AudioClipInstance) { self.clips.push(clip); } @@ -590,7 +595,7 @@ impl AudioTrack { pub fn render( &mut self, output: &mut [f32], - pool: &AudioPool, + pool: &AudioClipPool, playhead_seconds: f64, sample_rate: u32, channels: u32, @@ -602,10 +607,10 @@ impl AudioTrack { let mut clip_buffer = vec![0.0f32; output.len()]; let mut rendered = 0; - // Render all active clips into the temporary buffer + // Render all active clip instances into the temporary buffer for clip in &self.clips { // Check if clip overlaps with current buffer time range - if clip.start_time < buffer_end_seconds && clip.end_time() > playhead_seconds { + if clip.external_start < buffer_end_seconds && clip.external_end() > playhead_seconds { rendered += self.render_clip( clip, &mut clip_buffer, @@ -667,12 +672,13 @@ impl AudioTrack { volume } - /// Render a single clip into the output buffer + /// Render a single audio clip instance into the output buffer + /// Handles looping when external_duration > internal_duration fn render_clip( &self, - clip: &Clip, + clip: &AudioClipInstance, output: &mut [f32], - pool: &AudioPool, + pool: &AudioClipPool, playhead_seconds: f64, sample_rate: u32, channels: u32, @@ -680,46 +686,94 @@ impl AudioTrack { let buffer_duration_seconds = output.len() as f64 / (sample_rate as f64 * channels as f64); let buffer_end_seconds = playhead_seconds + buffer_duration_seconds; - // Determine the time range we need to render (intersection of buffer and clip) - let render_start_seconds = playhead_seconds.max(clip.start_time); - let render_end_seconds = buffer_end_seconds.min(clip.end_time()); + // Determine the time range we need to render (intersection of buffer and clip external bounds) + let render_start_seconds = playhead_seconds.max(clip.external_start); + let render_end_seconds = buffer_end_seconds.min(clip.external_end()); // If no overlap, return early if render_start_seconds >= render_end_seconds { return 0; } - // Calculate offset into the output buffer (in interleaved samples) - let output_offset_seconds = render_start_seconds - playhead_seconds; - let output_offset_samples = (output_offset_seconds * sample_rate as f64 * channels as f64) as usize; - - // Calculate position within the clip's audio file (in seconds) - let clip_position_seconds = render_start_seconds - clip.start_time + clip.offset; - - // Calculate how many samples to render in the output - let render_duration_seconds = render_end_seconds - render_start_seconds; - let samples_to_render = (render_duration_seconds * sample_rate as f64 * channels as f64) as usize; - let samples_to_render = samples_to_render.min(output.len() - output_offset_samples); - - // Get the slice of output buffer to write to - if output_offset_samples + samples_to_render > output.len() { + let internal_duration = clip.internal_duration(); + if internal_duration <= 0.0 { return 0; } - let output_slice = &mut output[output_offset_samples..output_offset_samples + samples_to_render]; - // Calculate combined gain let combined_gain = clip.gain * self.volume; - // Render from pool with sample rate conversion - // Pass the time position in seconds, let the pool handle sample rate conversion - pool.render_from_file( - clip.audio_pool_index, - output_slice, - clip_position_seconds, - combined_gain, - sample_rate, - channels, - ) + let mut total_rendered = 0; + + // Process the render range sample by sample (or in chunks for efficiency) + // For looping clips, we need to handle wrap-around at the loop boundary + let samples_per_second = sample_rate as f64 * channels as f64; + + // For now, render in a simpler way - iterate through the timeline range + // and use get_content_position for each sample position + let output_start_offset = ((render_start_seconds - playhead_seconds) * samples_per_second) as usize; + let output_end_offset = ((render_end_seconds - playhead_seconds) * samples_per_second) as usize; + + if output_end_offset > output.len() || output_start_offset > output.len() { + return 0; + } + + // If not looping, we can render in one chunk (more efficient) + if !clip.is_looping() { + // Simple case: no looping + let content_start = clip.get_content_position(render_start_seconds).unwrap_or(clip.internal_start); + let output_len = output.len(); + let output_slice = &mut output[output_start_offset..output_end_offset.min(output_len)]; + + total_rendered = pool.render_from_file( + clip.audio_pool_index, + output_slice, + content_start, + combined_gain, + sample_rate, + channels, + ); + } else { + // Looping case: need to handle wrap-around at loop boundaries + // Render in segments, one per loop iteration + let mut timeline_pos = render_start_seconds; + let mut output_offset = output_start_offset; + + while timeline_pos < render_end_seconds && output_offset < output.len() { + // Calculate position within the loop + let relative_pos = timeline_pos - clip.external_start; + let loop_offset = relative_pos % internal_duration; + let content_pos = clip.internal_start + loop_offset; + + // Calculate how much we can render before hitting the loop boundary + let time_to_loop_end = internal_duration - loop_offset; + let time_to_render_end = render_end_seconds - timeline_pos; + let chunk_duration = time_to_loop_end.min(time_to_render_end); + + let chunk_samples = (chunk_duration * samples_per_second) as usize; + let chunk_samples = chunk_samples.min(output.len() - output_offset); + + if chunk_samples == 0 { + break; + } + + let output_slice = &mut output[output_offset..output_offset + chunk_samples]; + + let rendered = pool.render_from_file( + clip.audio_pool_index, + output_slice, + content_pos, + combined_gain, + sample_rate, + channels, + ); + + total_rendered += rendered; + output_offset += chunk_samples; + timeline_pos += chunk_duration; + } + } + + total_rendered } } diff --git a/daw-backend/src/command/types.rs b/daw-backend/src/command/types.rs index 7f190f6..b116ee2 100644 --- a/daw-backend/src/command/types.rs +++ b/daw-backend/src/command/types.rs @@ -28,10 +28,14 @@ pub enum Command { SetTrackSolo(TrackId, bool), // Clip management commands - /// Move a clip to a new timeline position + /// Move a clip to a new timeline position (track_id, clip_id, new_external_start) MoveClip(TrackId, ClipId, f64), - /// Trim a clip (track_id, clip_id, new_start_time, new_duration, new_offset) - TrimClip(TrackId, ClipId, f64, f64, f64), + /// Trim a clip's internal boundaries (track_id, clip_id, new_internal_start, new_internal_end) + /// This changes which portion of the source content is used + TrimClip(TrackId, ClipId, f64, f64), + /// Extend/shrink a clip's external duration (track_id, clip_id, new_external_duration) + /// If duration > internal duration, the clip will loop + ExtendClip(TrackId, ClipId, f64), // Metatrack management commands /// Create a new metatrack with a name @@ -67,8 +71,8 @@ pub enum Command { CreateMidiClip(TrackId, f64, f64), /// Add a MIDI note to a clip (track_id, clip_id, time_offset, note, velocity, duration) AddMidiNote(TrackId, MidiClipId, f64, u8, u8, f64), - /// Add a pre-loaded MIDI clip to a track - AddLoadedMidiClip(TrackId, MidiClip), + /// Add a pre-loaded MIDI clip to a track (track_id, clip, start_time) + AddLoadedMidiClip(TrackId, MidiClip, f64), /// Update MIDI clip notes (track_id, clip_id, notes: Vec<(start_time, note, velocity, duration)>) /// NOTE: May need to switch to individual note operations if this becomes slow on clips with many notes UpdateMidiClipNotes(TrackId, MidiClipId, Vec<(f64, u8, u8, f64)>), diff --git a/daw-backend/src/io/midi_file.rs b/daw-backend/src/io/midi_file.rs index 51f5ea3..1dd2305 100644 --- a/daw-backend/src/io/midi_file.rs +++ b/daw-backend/src/io/midi_file.rs @@ -157,9 +157,8 @@ pub fn load_midi_file>( (final_delta_ticks as f64 / ticks_per_beat) * (microseconds_per_beat / 1_000_000.0); let duration_seconds = accumulated_time + final_delta_time; - // Create the MIDI clip - let mut clip = MidiClip::new(clip_id, 0.0, duration_seconds); - clip.events = events; + // Create the MIDI clip (content only, positioning happens when creating instance) + let clip = MidiClip::new(clip_id, events, duration_seconds, "Imported MIDI".to_string()); Ok(clip) } diff --git a/daw-backend/src/tui/mod.rs b/daw-backend/src/tui/mod.rs index fe72a97..20c64a7 100644 --- a/daw-backend/src/tui/mod.rs +++ b/daw-backend/src/tui/mod.rs @@ -847,8 +847,7 @@ fn execute_command( // Load the MIDI file match load_midi_file(file_path, app.next_clip_id, 48000) { - Ok(mut midi_clip) => { - midi_clip.start_time = start_time; + Ok(midi_clip) => { let clip_id = midi_clip.id; let duration = midi_clip.duration; let event_count = midi_clip.events.len(); @@ -882,8 +881,8 @@ fn execute_command( app.add_clip(track_id, clip_id, start_time, duration, file_path.to_string(), notes); app.next_clip_id += 1; - // Send to audio engine - controller.add_loaded_midi_clip(track_id, midi_clip); + // Send to audio engine with the start_time (clip content is separate from timeline position) + controller.add_loaded_midi_clip(track_id, midi_clip, start_time); app.set_status(format!("Loaded {} ({} events, {:.2}s) to track {} at {:.2}s", file_path, event_count, duration, track_id, start_time)); diff --git a/src-tauri/src/audio.rs b/src-tauri/src/audio.rs index b08ac45..9f4e456 100644 --- a/src-tauri/src/audio.rs +++ b/src-tauri/src/audio.rs @@ -1,5 +1,6 @@ use daw_backend::{AudioEvent, AudioSystem, EngineController, EventEmitter, WaveformPeak}; use daw_backend::audio::pool::AudioPoolEntry; +use ffmpeg_next::ffi::FF_LOSS_COLORQUANT; use std::sync::{Arc, Mutex}; use std::collections::HashMap; use std::path::Path; @@ -406,13 +407,28 @@ pub async fn audio_trim_clip( state: tauri::State<'_, Arc>>, track_id: u32, clip_id: u32, - new_start_time: f64, - new_duration: f64, - new_offset: f64, + internal_start: f64, + internal_end: f64, ) -> Result<(), String> { let mut audio_state = state.lock().unwrap(); if let Some(controller) = &mut audio_state.controller { - controller.trim_clip(track_id, clip_id, new_start_time, new_duration, new_offset); + controller.trim_clip(track_id, clip_id, internal_start, internal_end); + Ok(()) + } else { + Err("Audio not initialized".to_string()) + } +} + +#[tauri::command] +pub async fn audio_extend_clip( + state: tauri::State<'_, Arc>>, + track_id: u32, + clip_id: u32, + new_external_duration: f64, +) -> Result<(), String> { + let mut audio_state = state.lock().unwrap(); + if let Some(controller) = &mut audio_state.controller { + controller.extend_clip(track_id, clip_id, new_external_duration); Ok(()) } else { Err("Audio not initialized".to_string()) @@ -601,11 +617,8 @@ pub async fn audio_load_midi_file( let sample_rate = audio_state.sample_rate; if let Some(controller) = &mut audio_state.controller { - // Load and parse the MIDI file - let mut clip = daw_backend::load_midi_file(&path, 0, sample_rate)?; - - // Set the start time - clip.start_time = start_time; + // Load and parse the MIDI file (clip content only, no positioning) + let clip = daw_backend::load_midi_file(&path, 0, sample_rate)?; let duration = clip.duration; // Extract note data from MIDI events @@ -631,8 +644,8 @@ pub async fn audio_load_midi_file( } } - // Add the loaded MIDI clip to the track - controller.add_loaded_midi_clip(track_id, clip); + // Add the loaded MIDI clip to the track at the specified start_time + controller.add_loaded_midi_clip(track_id, clip, start_time); Ok(MidiFileMetadata { duration, diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index d023263..f08cf88 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -256,6 +256,7 @@ pub fn run() { audio::audio_add_clip, audio::audio_move_clip, audio::audio_trim_clip, + audio::audio_extend_clip, audio::audio_start_recording, audio::audio_stop_recording, audio::audio_pause_recording, diff --git a/src/main.js b/src/main.js index e719c35..181832a 100644 --- a/src/main.js +++ b/src/main.js @@ -1622,6 +1622,7 @@ async function toggleRecording() { name: 'Recording...', startTime: startTime, duration: clipDuration, + offset: 0, notes: [], loading: true }); @@ -1818,12 +1819,28 @@ async function _save(path) { // Serialize current layout structure (panes, splits, sizes) const serializedLayout = serializeLayout(rootPane); + // Serialize timeline state + let timelineState = null; + if (context.timelineWidget?.timelineState) { + const ts = context.timelineWidget.timelineState; + timelineState = { + timeFormat: ts.timeFormat, + framerate: ts.framerate, + bpm: ts.bpm, + timeSignature: ts.timeSignature, + pixelsPerSecond: ts.pixelsPerSecond, + viewportStartTime: ts.viewportStartTime, + snapToFrames: ts.snapToFrames, + }; + } + const fileData = { version: "2.0.0", width: config.fileWidth, height: config.fileHeight, fps: config.framerate, layoutState: serializedLayout, // Save current layout structure + timelineState: timelineState, // Save timeline settings actions: undoStack, json: root.toJSON(), // Audio pool at the end for human readability @@ -2275,6 +2292,44 @@ async function _open(path, returnJson = false) { console.log('[JS] Skipping layout restoration'); } + // Restore timeline state if saved + if (file.timelineState && context.timelineWidget?.timelineState) { + const ts = context.timelineWidget.timelineState; + const saved = file.timelineState; + console.log('[JS] Restoring timeline state:', saved); + + if (saved.timeFormat) ts.timeFormat = saved.timeFormat; + if (saved.framerate) ts.framerate = saved.framerate; + if (saved.bpm) ts.bpm = saved.bpm; + if (saved.timeSignature) ts.timeSignature = saved.timeSignature; + if (saved.pixelsPerSecond) ts.pixelsPerSecond = saved.pixelsPerSecond; + if (saved.viewportStartTime !== undefined) ts.viewportStartTime = saved.viewportStartTime; + if (saved.snapToFrames !== undefined) ts.snapToFrames = saved.snapToFrames; + + // Update metronome button visibility based on restored time format + if (context.metronomeGroup) { + context.metronomeGroup.style.display = ts.timeFormat === 'measures' ? '' : 'none'; + } + + // Update time display + if (context.updateTimeDisplay) { + context.updateTimeDisplay(); + } + + // Update snap checkbox if it exists + const snapCheckbox = document.getElementById('snap-checkbox'); + if (snapCheckbox) { + snapCheckbox.checked = ts.snapToFrames; + } + + // Trigger timeline redraw + if (context.timelineWidget.requestRedraw) { + context.timelineWidget.requestRedraw(); + } + + console.log('[JS] Timeline state restored successfully'); + } + // Restore audio tracks and clips to the Rust backend // The fromJSON method only creates JavaScript objects, // but doesn't initialize them in the audio engine @@ -5074,6 +5129,35 @@ function timeline() { controls.push(timeDisplay); + // Snap checkbox + const snapGroup = document.createElement("div"); + snapGroup.className = "playback-controls-group"; + snapGroup.style.display = "flex"; + snapGroup.style.alignItems = "center"; + snapGroup.style.gap = "4px"; + + const snapCheckbox = document.createElement("input"); + snapCheckbox.type = "checkbox"; + snapCheckbox.id = "snap-checkbox"; + snapCheckbox.checked = timelineWidget.timelineState.snapToFrames; + snapCheckbox.style.cursor = "pointer"; + snapCheckbox.addEventListener("change", () => { + timelineWidget.timelineState.snapToFrames = snapCheckbox.checked; + console.log('Snapping', snapCheckbox.checked ? 'enabled' : 'disabled'); + }); + + const snapLabel = document.createElement("label"); + snapLabel.htmlFor = "snap-checkbox"; + snapLabel.textContent = "Snap"; + snapLabel.style.cursor = "pointer"; + snapLabel.style.fontSize = "12px"; + snapLabel.style.color = "var(--text-secondary)"; + + snapGroup.appendChild(snapCheckbox); + snapGroup.appendChild(snapLabel); + + controls.push(snapGroup); + return controls; }; diff --git a/src/models/layer.js b/src/models/layer.js index 33599d0..b516ec3 100644 --- a/src/models/layer.js +++ b/src/models/layer.js @@ -1178,12 +1178,12 @@ class AudioTrack { name: clip.name, startTime: clip.startTime, duration: clip.duration, + offset: clip.offset || 0, // Default to 0 if not present }; // Restore audio-specific fields if (clip.poolIndex !== undefined) { clipData.poolIndex = clip.poolIndex; - clipData.offset = clip.offset; } // Restore MIDI-specific fields diff --git a/src/state.js b/src/state.js index e4239f8..2925f39 100644 --- a/src/state.js +++ b/src/state.js @@ -97,7 +97,7 @@ export let config = { currentLayout: "animation", // Current active layout key defaultLayout: "animation", // Default layout for new files showStartScreen: false, // Show layout picker on startup (disabled for now) - restoreLayoutFromFile: false, // Restore layout when opening files + restoreLayoutFromFile: true, // Restore layout when opening files customLayouts: [] // User-saved custom layouts }; diff --git a/src/timeline.js b/src/timeline.js index 81eae63..e1d2268 100644 --- a/src/timeline.js +++ b/src/timeline.js @@ -24,7 +24,7 @@ class TimelineState { this.rulerHeight = 30 // Height of time ruler in pixels // Snapping (Phase 5) - this.snapToFrames = false // Whether to snap keyframes to frame boundaries + this.snapToFrames = true // Whether to snap keyframes to frame boundaries (default: on) } /** diff --git a/src/widgets.js b/src/widgets.js index be724ea..176f317 100644 --- a/src/widgets.js +++ b/src/widgets.js @@ -582,6 +582,54 @@ class TimelineWindowV2 extends Widget { this.automationNameCache = new Map() } + /** + * Quantize a time value to the nearest beat/measure division based on zoom level. + * Only applies when in measures mode and snapping is enabled. + * @param {number} time - The time value to quantize (in seconds) + * @returns {number} - The quantized time value + */ + quantizeTime(time) { + // Only quantize in measures mode with snapping enabled + if (this.timelineState.timeFormat !== 'measures' || !this.timelineState.snapToFrames) { + return time + } + + const bpm = this.timelineState.bpm || 120 + const beatsPerSecond = bpm / 60 + const beatDuration = 1 / beatsPerSecond // Duration of one beat in seconds + const beatsPerMeasure = this.timelineState.timeSignature?.numerator || 4 + + // Calculate beat width in pixels + const beatWidth = beatDuration * this.timelineState.pixelsPerSecond + + // Base threshold for zoom level detection (adjustable) + const zoomThreshold = 30 + + // Determine quantization level based on zoom (beat width in pixels) + // When zoomed out (small beat width), quantize to measures + // When zoomed in (large beat width), quantize to smaller divisions + let quantizeDuration + if (beatWidth < zoomThreshold * 0.5) { + // Very zoomed out: quantize to whole measures + quantizeDuration = beatDuration * beatsPerMeasure + } else if (beatWidth < zoomThreshold) { + // Zoomed out: quantize to half measures (2 beats in 4/4) + quantizeDuration = beatDuration * (beatsPerMeasure / 2) + } else if (beatWidth < zoomThreshold * 2) { + // Medium zoom: quantize to beats + quantizeDuration = beatDuration + } else if (beatWidth < zoomThreshold * 4) { + // Zoomed in: quantize to half beats (eighth notes in 4/4) + quantizeDuration = beatDuration / 2 + } else { + // Very zoomed in: quantize to quarter beats (sixteenth notes in 4/4) + quantizeDuration = beatDuration / 4 + } + + // Round time to nearest quantization unit + return Math.round(time / quantizeDuration) * quantizeDuration + } + draw(ctx) { ctx.save() @@ -594,9 +642,6 @@ class TimelineWindowV2 extends Widget { ctx.fillStyle = backgroundColor ctx.fillRect(0, 0, this.width, this.height) - // Draw snapping checkbox in ruler header area (Phase 5) - this.drawSnappingCheckbox(ctx) - // Draw time ruler at top, offset by track header width ctx.save() ctx.translate(this.trackHeaderWidth, 0) @@ -659,33 +704,6 @@ class TimelineWindowV2 extends Widget { ctx.restore() } - /** - * Draw snapping checkbox in ruler header area (Phase 5) - */ - drawSnappingCheckbox(ctx) { - const checkboxSize = 14 - const checkboxX = 10 - const checkboxY = (this.ruler.height - checkboxSize) / 2 - - // Draw checkbox border - ctx.strokeStyle = foregroundColor - ctx.lineWidth = 1 - ctx.strokeRect(checkboxX, checkboxY, checkboxSize, checkboxSize) - - // Fill if snapping is enabled - if (this.timelineState.snapToFrames) { - ctx.fillStyle = foregroundColor - ctx.fillRect(checkboxX + 2, checkboxY + 2, checkboxSize - 4, checkboxSize - 4) - } - - // Draw label - ctx.fillStyle = labelColor - ctx.font = '11px sans-serif' - ctx.textAlign = 'left' - ctx.textBaseline = 'middle' - ctx.fillText('Snap', checkboxX + checkboxSize + 6, this.ruler.height / 2) - } - /** * Draw fixed track headers on the left (names, expand/collapse) */ @@ -1430,60 +1448,99 @@ class TimelineWindowV2 extends Widget { const availableHeight = trackHeight - 10 - (verticalPadding * 2) const noteHeight = availableHeight / 12 - // Calculate visible time range within the clip + // Get clip trim boundaries (internal_start = offset, internal_end depends on source) + const clipOffset = clip.offset || 0 + // Use stored internalDuration if available (set when trimming), otherwise calculate from notes + let internalDuration + if (clip.internalDuration !== undefined) { + internalDuration = clip.internalDuration + } else { + // Fallback: calculate from actual notes (for clips that haven't been trimmed) + let contentEndTime = clipOffset + for (const note of clip.notes) { + const noteEnd = note.start_time + note.duration + if (noteEnd > contentEndTime) { + contentEndTime = noteEnd + } + } + internalDuration = contentEndTime - clipOffset + } + const contentEndTime = clipOffset + internalDuration + // If clip.duration exceeds internal duration, we're looping + const isLooping = clip.duration > internalDuration && internalDuration > 0 + + // Calculate visible time range within the clip (in clip-local time) const clipEndX = startX + clipWidth const visibleStartTime = this.timelineState.pixelToTime(Math.max(startX, 0)) - clip.startTime const visibleEndTime = this.timelineState.pixelToTime(Math.min(clipEndX, this.width)) - clip.startTime - // Binary search to find first visible note - let firstVisibleIdx = 0 - let left = 0 - let right = clip.notes.length - 1 - while (left <= right) { - const mid = Math.floor((left + right) / 2) - const noteEndTime = clip.notes[mid].start_time + clip.notes[mid].duration + // Helper function to draw notes for a given loop iteration + const drawNotesForIteration = (loopOffset, opacity) => { + ctx.fillStyle = opacity < 1 ? `rgba(111, 220, 111, ${opacity})` : '#6fdc6f' - if (noteEndTime < visibleStartTime) { - left = mid + 1 - firstVisibleIdx = left - } else { - right = mid - 1 + for (let i = 0; i < clip.notes.length; i++) { + const note = clip.notes[i] + const noteEndTime = note.start_time + note.duration + + // Skip notes that are outside the trimmed region + if (noteEndTime <= clipOffset || note.start_time >= contentEndTime) { + continue + } + + // Calculate note position in this loop iteration + const noteDisplayStart = note.start_time - clipOffset + loopOffset + const noteDisplayEnd = noteEndTime - clipOffset + loopOffset + + // Skip if this iteration's note is beyond clip duration + if (noteDisplayStart >= clip.duration) { + continue + } + + // Exit early if note starts after visible range + if (noteDisplayStart > visibleEndTime) { + continue + } + + // Skip if note ends before visible range + if (noteDisplayEnd < visibleStartTime) { + continue + } + + // Calculate note position (pitch mod 12 for chromatic representation) + const pitchClass = note.note % 12 + // Invert Y so higher pitches appear at top + const noteY = y + 5 + ((11 - pitchClass) * noteHeight) + + // Calculate note timing on timeline + const noteStartX = this.timelineState.timeToPixel(clip.startTime + noteDisplayStart) + let noteEndX = this.timelineState.timeToPixel(clip.startTime + Math.min(noteDisplayEnd, clip.duration)) + + // Clip to visible bounds + const visibleStartX = Math.max(noteStartX, startX + 2) + const visibleEndX = Math.min(noteEndX, startX + clipWidth - 2) + const visibleWidth = visibleEndX - visibleStartX + + if (visibleWidth > 0) { + // Draw note rectangle + ctx.fillRect( + visibleStartX, + noteY, + visibleWidth, + noteHeight - 1 // Small gap between notes + ) + } } } - // Draw visible notes only - ctx.fillStyle = '#6fdc6f' // Bright green for note bars + // Draw primary notes at full opacity + drawNotesForIteration(0, 1.0) - for (let i = firstVisibleIdx; i < clip.notes.length; i++) { - const note = clip.notes[i] - - // Exit early if note starts after visible range - if (note.start_time > visibleEndTime) { - break - } - - // Calculate note position (pitch mod 12 for chromatic representation) - const pitchClass = note.note % 12 - // Invert Y so higher pitches appear at top - const noteY = y + 5 + ((11 - pitchClass) * noteHeight) - - // Calculate note timing on timeline - const noteStartX = this.timelineState.timeToPixel(clip.startTime + note.start_time) - const noteEndX = this.timelineState.timeToPixel(clip.startTime + note.start_time + note.duration) - - // Clip to visible bounds - const visibleStartX = Math.max(noteStartX, startX + 2) - const visibleEndX = Math.min(noteEndX, startX + clipWidth - 2) - const visibleWidth = visibleEndX - visibleStartX - - if (visibleWidth > 0) { - // Draw note rectangle - ctx.fillRect( - visibleStartX, - noteY, - visibleWidth, - noteHeight - 1 // Small gap between notes - ) + // Draw looped iterations at 50% opacity + if (isLooping) { + let loopOffset = internalDuration + while (loopOffset < clip.duration) { + drawNotesForIteration(loopOffset, 0.5) + loopOffset += internalDuration } } } else if (!isMIDI && clip.waveform && clip.waveform.length > 0) { @@ -1986,22 +2043,6 @@ class TimelineWindowV2 extends Widget { } mousedown(x, y) { - // Check if clicking on snapping checkbox (Phase 5) - if (y <= this.ruler.height && x < this.trackHeaderWidth) { - const checkboxSize = 14 - const checkboxX = 10 - const checkboxY = (this.ruler.height - checkboxSize) / 2 - - if (x >= checkboxX && x <= checkboxX + checkboxSize && - y >= checkboxY && y <= checkboxY + checkboxSize) { - // Toggle snapping - this.timelineState.snapToFrames = !this.timelineState.snapToFrames - console.log('Snapping', this.timelineState.snapToFrames ? 'enabled' : 'disabled') - if (this.requestRedraw) this.requestRedraw() - return true - } - } - // Check if clicking in ruler area (after track headers) if (y <= this.ruler.height && x >= this.trackHeaderWidth) { // Adjust x for ruler (remove track header offset) @@ -2233,6 +2274,36 @@ class TimelineWindowV2 extends Widget { return true } + // Check if clicking on loop corner (top-right) to extend/loop clip + const loopCornerInfo = this.getAudioClipLoopCornerAtPoint(track, adjustedX, adjustedY) + if (loopCornerInfo) { + // Skip if right-clicking (button 2) + if (this.lastClickEvent?.button === 2) { + return false + } + + // Select the track + this.selectTrack(track) + + // Start loop corner dragging + this.draggingLoopCorner = { + track: track, + clip: loopCornerInfo.clip, + clipIndex: loopCornerInfo.clipIndex, + audioTrack: loopCornerInfo.audioTrack, + isMIDI: loopCornerInfo.isMIDI, + initialDuration: loopCornerInfo.clip.duration + } + + // Enable global mouse events for dragging + this._globalEvents.add("mousemove") + this._globalEvents.add("mouseup") + + console.log('Started dragging loop corner') + if (this.requestRedraw) this.requestRedraw() + return true + } + // Check if clicking on audio clip edge to start trimming const audioEdgeInfo = this.getAudioClipEdgeAtPoint(track, adjustedX, adjustedY) if (audioEdgeInfo) { @@ -2934,6 +3005,47 @@ class TimelineWindowV2 extends Widget { return null } + /** + * Check if hovering over the loop corner (top-right) of an audio/MIDI clip + * Returns clip info if in the loop corner zone + */ + getAudioClipLoopCornerAtPoint(track, x, y) { + if (track.type !== 'audio') return null + + const trackIndex = this.trackHierarchy.tracks.indexOf(track) + if (trackIndex === -1) return null + + const trackY = this.trackHierarchy.getTrackY(trackIndex) + const trackHeight = this.trackHierarchy.trackHeight + const clipTop = trackY + 5 + const cornerSize = 12 // Size of the corner hot zone in pixels + + // Check if y is in the top portion of the clip + if (y < clipTop || y > clipTop + cornerSize) return null + + const clickTime = this.timelineState.pixelToTime(x) + const audioTrack = track.object + + // Check each clip + for (let i = 0; i < audioTrack.clips.length; i++) { + const clip = audioTrack.clips[i] + const clipEnd = clip.startTime + clip.duration + const clipEndX = this.timelineState.timeToPixel(clipEnd) + + // Check if x is near the right edge (within corner zone) + if (x >= clipEndX - cornerSize && x <= clipEndX) { + return { + clip: clip, + clipIndex: i, + audioTrack: audioTrack, + isMIDI: audioTrack.type === 'midi' + } + } + } + + return null + } + getVideoClipAtPoint(track, x, y) { if (track.type !== 'video') return null @@ -3848,19 +3960,23 @@ class TimelineWindowV2 extends Widget { // Handle audio clip edge dragging (trimming) if (this.draggingAudioClipEdge) { const adjustedX = x - this.trackHeaderWidth - const newTime = this.timelineState.pixelToTime(adjustedX) + const rawTime = this.timelineState.pixelToTime(adjustedX) const minClipDuration = this.context.config.minClipDuration if (this.draggingAudioClipEdge.edge === 'left') { // Dragging left edge - adjust startTime and offset const initialEnd = this.draggingAudioClipEdge.initialClipStart + this.draggingAudioClipEdge.initialClipDuration const maxStartTime = initialEnd - minClipDuration - const newStartTime = Math.max(0, Math.min(newTime, maxStartTime)) + // Quantize the new start time + let newStartTime = Math.max(0, Math.min(rawTime, maxStartTime)) + newStartTime = this.quantizeTime(newStartTime) const startTimeDelta = newStartTime - this.draggingAudioClipEdge.initialClipStart this.draggingAudioClipEdge.clip.startTime = newStartTime this.draggingAudioClipEdge.clip.offset = this.draggingAudioClipEdge.initialClipOffset + startTimeDelta this.draggingAudioClipEdge.clip.duration = this.draggingAudioClipEdge.initialClipDuration - startTimeDelta + // Also update internalDuration when trimming (this is the content length before looping) + this.draggingAudioClipEdge.clip.internalDuration = this.draggingAudioClipEdge.initialClipDuration - startTimeDelta // Also trim linked video clip if it exists if (this.draggingAudioClipEdge.clip.linkedVideoClip) { @@ -3872,14 +3988,21 @@ class TimelineWindowV2 extends Widget { } else { // Dragging right edge - adjust duration const minEndTime = this.draggingAudioClipEdge.initialClipStart + minClipDuration - const newEndTime = Math.max(minEndTime, newTime) + // Quantize the new end time + let newEndTime = Math.max(minEndTime, rawTime) + newEndTime = this.quantizeTime(newEndTime) let newDuration = newEndTime - this.draggingAudioClipEdge.clip.startTime - // Constrain duration to not exceed source file duration minus offset - const maxAvailableDuration = this.draggingAudioClipEdge.clip.sourceDuration - this.draggingAudioClipEdge.clip.offset - newDuration = Math.min(newDuration, maxAvailableDuration) + // Constrain duration to not exceed source file duration minus offset (for audio clips only) + // MIDI clips don't have sourceDuration and can be extended freely + if (this.draggingAudioClipEdge.clip.sourceDuration !== undefined) { + const maxAvailableDuration = this.draggingAudioClipEdge.clip.sourceDuration - (this.draggingAudioClipEdge.clip.offset || 0) + newDuration = Math.min(newDuration, maxAvailableDuration) + } this.draggingAudioClipEdge.clip.duration = newDuration + // Also update internalDuration when trimming (this is the content length before looping) + this.draggingAudioClipEdge.clip.internalDuration = newDuration // Also trim linked video clip if it exists if (this.draggingAudioClipEdge.clip.linkedVideoClip) { @@ -3893,6 +4016,25 @@ class TimelineWindowV2 extends Widget { return true } + // Handle loop corner dragging (extending/looping clip) + if (this.draggingLoopCorner) { + const adjustedX = x - this.trackHeaderWidth + const newTime = this.timelineState.pixelToTime(adjustedX) + const minClipDuration = this.context.config.minClipDuration + + // Calculate new end time and quantize it + let newEndTime = Math.max(this.draggingLoopCorner.clip.startTime + minClipDuration, newTime) + newEndTime = this.quantizeTime(newEndTime) + const newDuration = newEndTime - this.draggingLoopCorner.clip.startTime + + // Update clip duration (no maximum constraint - allows looping) + this.draggingLoopCorner.clip.duration = newDuration + + // Trigger timeline redraw + if (this.requestRedraw) this.requestRedraw() + return true + } + // Handle audio clip dragging if (this.draggingAudioClip) { // Adjust coordinates to timeline area @@ -4046,7 +4188,8 @@ class TimelineWindowV2 extends Widget { // Update cursor based on hover position (when not dragging) if (!this.draggingAudioClip && !this.draggingVideoClip && !this.draggingAudioClipEdge && !this.draggingVideoClipEdge && - !this.draggingKeyframe && !this.draggingPlayhead && !this.draggingSegment) { + !this.draggingKeyframe && !this.draggingPlayhead && !this.draggingSegment && + !this.draggingLoopCorner) { const trackY = y - this.ruler.height if (trackY >= 0 && x >= this.trackHeaderWidth) { const adjustedY = trackY - this.trackScrollOffset @@ -4054,6 +4197,16 @@ class TimelineWindowV2 extends Widget { const track = this.trackHierarchy.getTrackAtY(adjustedY) if (track) { + // Check for audio/MIDI clip loop corner (top-right) - must check before edge detection + if (track.type === 'audio') { + const loopCornerInfo = this.getAudioClipLoopCornerAtPoint(track, adjustedX, adjustedY) + if (loopCornerInfo) { + // Use the same rotate cursor as the transform tool corner handles + this.cursor = "url(\"data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='24' height='24' fill='currentColor' viewBox='0 0 16 16'%3E%3Cpath fill-rule='evenodd' d='M8 3a5 5 0 1 1-4.546 2.914.5.5 0 0 0-.908-.417A6 6 0 1 0 8 2z'/%3E%3Cpath d='M8 4.466V.534a.25.25 0 0 0-.41-.192L5.23 2.308a.25.25 0 0 0 0 .384l2.36 1.966A.25.25 0 0 0 8 4.466'/%3E%3C/svg%3E\") 12 12, auto" + return false + } + } + // Check for audio clip edge if (track.type === 'audio') { const audioEdgeInfo = this.getAudioClipEdgeAtPoint(track, adjustedX, adjustedY) @@ -4142,13 +4295,28 @@ class TimelineWindowV2 extends Widget { if (this.draggingAudioClipEdge) { console.log('Finished trimming audio clip edge') - // Update backend with new clip trim + const clip = this.draggingAudioClipEdge.clip + const trackId = this.draggingAudioClipEdge.audioTrack.audioTrackId + const clipId = clip.clipId + + // If dragging left edge, also move the clip's timeline position + if (this.draggingAudioClipEdge.edge === 'left') { + invoke('audio_move_clip', { + trackId: trackId, + clipId: clipId, + newStartTime: clip.startTime + }).catch(error => { + console.error('Failed to move audio clip in backend:', error) + }) + } + + // Update the internal trim boundaries + // internal_start = offset, internal_end = offset + duration (content region) invoke('audio_trim_clip', { - trackId: this.draggingAudioClipEdge.audioTrack.audioTrackId, - clipId: this.draggingAudioClipEdge.clip.clipId, - newStartTime: this.draggingAudioClipEdge.clip.startTime, - newDuration: this.draggingAudioClipEdge.clip.duration, - newOffset: this.draggingAudioClipEdge.clip.offset + trackId: trackId, + clipId: clipId, + internalStart: clip.offset, + internalEnd: clip.offset + clip.duration }).catch(error => { console.error('Failed to trim audio clip in backend:', error) }) @@ -4168,6 +4336,33 @@ class TimelineWindowV2 extends Widget { return true } + // Complete loop corner dragging (extending/looping clip) + if (this.draggingLoopCorner) { + console.log('Finished extending clip via loop corner') + + const clip = this.draggingLoopCorner.clip + const trackId = this.draggingLoopCorner.audioTrack.audioTrackId + const clipId = clip.clipId + + // Call audio_extend_clip to update the external duration in the backend + invoke('audio_extend_clip', { + trackId: trackId, + clipId: clipId, + newExternalDuration: clip.duration + }).catch(error => { + console.error('Failed to extend audio clip in backend:', error) + }) + + // Clean up dragging state + this.draggingLoopCorner = null + this._globalEvents.delete("mousemove") + this._globalEvents.delete("mouseup") + + // Final redraw + if (this.requestRedraw) this.requestRedraw() + return true + } + // Complete video clip edge dragging (trimming) if (this.draggingVideoClipEdge) { console.log('Finished trimming video clip edge') @@ -4177,12 +4372,26 @@ class TimelineWindowV2 extends Widget { const linkedAudioClip = this.draggingVideoClipEdge.clip.linkedAudioClip const audioTrack = this.draggingVideoClipEdge.videoLayer.linkedAudioTrack if (audioTrack) { + const trackId = audioTrack.audioTrackId + const clipId = linkedAudioClip.clipId + + // If dragging left edge, also move the clip's timeline position + if (this.draggingVideoClipEdge.edge === 'left') { + invoke('audio_move_clip', { + trackId: trackId, + clipId: clipId, + newStartTime: linkedAudioClip.startTime + }).catch(error => { + console.error('Failed to move linked audio clip in backend:', error) + }) + } + + // Update the internal trim boundaries invoke('audio_trim_clip', { - trackId: audioTrack.audioTrackId, - clipId: linkedAudioClip.clipId, - newStartTime: linkedAudioClip.startTime, - newDuration: linkedAudioClip.duration, - newOffset: linkedAudioClip.offset + trackId: trackId, + clipId: clipId, + internalStart: linkedAudioClip.offset, + internalEnd: linkedAudioClip.offset + linkedAudioClip.duration }).catch(error => { console.error('Failed to trim linked audio clip in backend:', error) })