Add support for audio trimming and looping
This commit is contained in:
parent
422f97382b
commit
a1ad0b44b1
|
|
@ -1,21 +1,68 @@
|
||||||
/// Clip ID type
|
/// Audio clip instance ID type
|
||||||
pub type ClipId = u32;
|
pub type AudioClipInstanceId = u32;
|
||||||
|
|
||||||
/// Audio clip that references data in the AudioPool
|
/// Type alias for backwards compatibility
|
||||||
|
pub type ClipId = AudioClipInstanceId;
|
||||||
|
|
||||||
|
/// Audio clip instance that references content in the AudioClipPool
|
||||||
|
///
|
||||||
|
/// This represents a placed instance of audio content on the timeline.
|
||||||
|
/// The actual audio data is stored in the AudioClipPool and referenced by `audio_pool_index`.
|
||||||
|
///
|
||||||
|
/// ## Timing Model
|
||||||
|
/// - `internal_start` / `internal_end`: Define the region of the source audio to play (trimming)
|
||||||
|
/// - `external_start` / `external_duration`: Define where the clip appears on the timeline and how long
|
||||||
|
///
|
||||||
|
/// ## Looping
|
||||||
|
/// If `external_duration` is greater than `internal_end - internal_start`,
|
||||||
|
/// the clip will seamlessly loop back to `internal_start` when it reaches `internal_end`.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Clip {
|
pub struct AudioClipInstance {
|
||||||
pub id: ClipId,
|
pub id: AudioClipInstanceId,
|
||||||
pub audio_pool_index: usize,
|
pub audio_pool_index: usize,
|
||||||
pub start_time: f64, // Position on timeline in seconds
|
|
||||||
pub duration: f64, // Clip duration in seconds
|
/// Start position within the audio content (seconds)
|
||||||
pub offset: f64, // Offset into audio file in seconds
|
pub internal_start: f64,
|
||||||
pub gain: f32, // Clip-level gain
|
/// End position within the audio content (seconds)
|
||||||
|
pub internal_end: f64,
|
||||||
|
|
||||||
|
/// Start position on the timeline (seconds)
|
||||||
|
pub external_start: f64,
|
||||||
|
/// Duration on the timeline (seconds) - can be longer than internal duration for looping
|
||||||
|
pub external_duration: f64,
|
||||||
|
|
||||||
|
/// Clip-level gain
|
||||||
|
pub gain: f32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clip {
|
/// Type alias for backwards compatibility
|
||||||
/// Create a new clip
|
pub type Clip = AudioClipInstance;
|
||||||
|
|
||||||
|
impl AudioClipInstance {
|
||||||
|
/// Create a new audio clip instance
|
||||||
pub fn new(
|
pub fn new(
|
||||||
id: ClipId,
|
id: AudioClipInstanceId,
|
||||||
|
audio_pool_index: usize,
|
||||||
|
internal_start: f64,
|
||||||
|
internal_end: f64,
|
||||||
|
external_start: f64,
|
||||||
|
external_duration: f64,
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
id,
|
||||||
|
audio_pool_index,
|
||||||
|
internal_start,
|
||||||
|
internal_end,
|
||||||
|
external_start,
|
||||||
|
external_duration,
|
||||||
|
gain: 1.0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a clip instance from legacy parameters (for backwards compatibility)
|
||||||
|
/// Maps old start_time/duration/offset to new timing model
|
||||||
|
pub fn from_legacy(
|
||||||
|
id: AudioClipInstanceId,
|
||||||
audio_pool_index: usize,
|
audio_pool_index: usize,
|
||||||
start_time: f64,
|
start_time: f64,
|
||||||
duration: f64,
|
duration: f64,
|
||||||
|
|
@ -24,22 +71,64 @@ impl Clip {
|
||||||
Self {
|
Self {
|
||||||
id,
|
id,
|
||||||
audio_pool_index,
|
audio_pool_index,
|
||||||
start_time,
|
internal_start: offset,
|
||||||
duration,
|
internal_end: offset + duration,
|
||||||
offset,
|
external_start: start_time,
|
||||||
|
external_duration: duration,
|
||||||
gain: 1.0,
|
gain: 1.0,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if this clip is active at a given timeline position
|
/// Check if this clip instance is active at a given timeline position
|
||||||
pub fn is_active_at(&self, time_seconds: f64) -> bool {
|
pub fn is_active_at(&self, time_seconds: f64) -> bool {
|
||||||
let clip_end = self.start_time + self.duration;
|
time_seconds >= self.external_start && time_seconds < self.external_end()
|
||||||
time_seconds >= self.start_time && time_seconds < clip_end
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the end time of this clip on the timeline
|
/// Get the end time of this clip instance on the timeline
|
||||||
|
pub fn external_end(&self) -> f64 {
|
||||||
|
self.external_start + self.external_duration
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the end time of this clip instance on the timeline
|
||||||
|
/// (Alias for external_end(), for backwards compatibility)
|
||||||
pub fn end_time(&self) -> f64 {
|
pub fn end_time(&self) -> f64 {
|
||||||
self.start_time + self.duration
|
self.external_end()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the start time on the timeline
|
||||||
|
/// (Alias for external_start, for backwards compatibility)
|
||||||
|
pub fn start_time(&self) -> f64 {
|
||||||
|
self.external_start
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the internal (content) duration
|
||||||
|
pub fn internal_duration(&self) -> f64 {
|
||||||
|
self.internal_end - self.internal_start
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if this clip instance loops
|
||||||
|
pub fn is_looping(&self) -> bool {
|
||||||
|
self.external_duration > self.internal_duration()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the position within the audio content for a given timeline position
|
||||||
|
/// Returns None if the timeline position is outside this clip instance
|
||||||
|
/// Handles looping automatically
|
||||||
|
pub fn get_content_position(&self, timeline_pos: f64) -> Option<f64> {
|
||||||
|
if timeline_pos < self.external_start || timeline_pos >= self.external_end() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let relative_pos = timeline_pos - self.external_start;
|
||||||
|
let internal_duration = self.internal_duration();
|
||||||
|
|
||||||
|
if internal_duration <= 0.0 {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wrap around for looping
|
||||||
|
let content_offset = relative_pos % internal_duration;
|
||||||
|
Some(self.internal_start + content_offset)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set clip gain
|
/// Set clip gain
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,9 @@
|
||||||
use crate::audio::buffer_pool::BufferPool;
|
use crate::audio::buffer_pool::BufferPool;
|
||||||
use crate::audio::clip::ClipId;
|
use crate::audio::clip::{AudioClipInstance, ClipId};
|
||||||
use crate::audio::metronome::Metronome;
|
use crate::audio::metronome::Metronome;
|
||||||
use crate::audio::midi::{MidiClip, MidiClipId, MidiEvent};
|
use crate::audio::midi::{MidiClip, MidiClipId, MidiClipInstance, MidiEvent};
|
||||||
use crate::audio::node_graph::{nodes::*, AudioGraph};
|
use crate::audio::node_graph::{nodes::*, AudioGraph};
|
||||||
use crate::audio::pool::AudioPool;
|
use crate::audio::pool::AudioClipPool;
|
||||||
use crate::audio::project::Project;
|
use crate::audio::project::Project;
|
||||||
use crate::audio::recording::{MidiRecordingState, RecordingState};
|
use crate::audio::recording::{MidiRecordingState, RecordingState};
|
||||||
use crate::audio::track::{Track, TrackId, TrackNode};
|
use crate::audio::track::{Track, TrackId, TrackNode};
|
||||||
|
|
@ -16,7 +16,7 @@ use std::sync::Arc;
|
||||||
/// Audio engine for Phase 6: hierarchical tracks with groups
|
/// Audio engine for Phase 6: hierarchical tracks with groups
|
||||||
pub struct Engine {
|
pub struct Engine {
|
||||||
project: Project,
|
project: Project,
|
||||||
audio_pool: AudioPool,
|
audio_pool: AudioClipPool,
|
||||||
buffer_pool: BufferPool,
|
buffer_pool: BufferPool,
|
||||||
playhead: u64, // Playhead position in samples
|
playhead: u64, // Playhead position in samples
|
||||||
sample_rate: u32,
|
sample_rate: u32,
|
||||||
|
|
@ -78,7 +78,7 @@ impl Engine {
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
project: Project::new(sample_rate),
|
project: Project::new(sample_rate),
|
||||||
audio_pool: AudioPool::new(),
|
audio_pool: AudioClipPool::new(),
|
||||||
buffer_pool: BufferPool::new(8, buffer_size), // 8 buffers should handle deep nesting
|
buffer_pool: BufferPool::new(8, buffer_size), // 8 buffers should handle deep nesting
|
||||||
playhead: 0,
|
playhead: 0,
|
||||||
sample_rate,
|
sample_rate,
|
||||||
|
|
@ -164,12 +164,12 @@ impl Engine {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get mutable reference to audio pool
|
/// Get mutable reference to audio pool
|
||||||
pub fn audio_pool_mut(&mut self) -> &mut AudioPool {
|
pub fn audio_pool_mut(&mut self) -> &mut AudioClipPool {
|
||||||
&mut self.audio_pool
|
&mut self.audio_pool
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get reference to audio pool
|
/// Get reference to audio pool
|
||||||
pub fn audio_pool(&self) -> &AudioPool {
|
pub fn audio_pool(&self) -> &AudioClipPool {
|
||||||
&self.audio_pool
|
&self.audio_pool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -240,9 +240,15 @@ impl Engine {
|
||||||
let playhead_seconds = self.playhead as f64 / self.sample_rate as f64;
|
let playhead_seconds = self.playhead as f64 / self.sample_rate as f64;
|
||||||
|
|
||||||
// Render the entire project hierarchy into the mix buffer
|
// Render the entire project hierarchy into the mix buffer
|
||||||
|
// Note: We need to use a raw pointer to avoid borrow checker issues
|
||||||
|
// The midi_clip_pool is part of project, so we extract a reference before mutable borrow
|
||||||
|
let midi_pool_ptr = &self.project.midi_clip_pool as *const _;
|
||||||
|
// SAFETY: The midi_clip_pool is not mutated during render, only read
|
||||||
|
let midi_pool_ref = unsafe { &*midi_pool_ptr };
|
||||||
self.project.render(
|
self.project.render(
|
||||||
&mut self.mix_buffer,
|
&mut self.mix_buffer,
|
||||||
&self.audio_pool,
|
&self.audio_pool,
|
||||||
|
midi_pool_ref,
|
||||||
&mut self.buffer_pool,
|
&mut self.buffer_pool,
|
||||||
playhead_seconds,
|
playhead_seconds,
|
||||||
self.sample_rate,
|
self.sample_rate,
|
||||||
|
|
@ -314,10 +320,12 @@ impl Engine {
|
||||||
let clip_id = recording.clip_id;
|
let clip_id = recording.clip_id;
|
||||||
let track_id = recording.track_id;
|
let track_id = recording.track_id;
|
||||||
|
|
||||||
// Update clip duration in project
|
// Update clip duration in project as recording progresses
|
||||||
if let Some(crate::audio::track::TrackNode::Audio(track)) = self.project.get_track_mut(track_id) {
|
if let Some(crate::audio::track::TrackNode::Audio(track)) = self.project.get_track_mut(track_id) {
|
||||||
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
|
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
|
||||||
clip.duration = duration;
|
// Update both internal_end and external_duration as recording progresses
|
||||||
|
clip.internal_end = clip.internal_start + duration;
|
||||||
|
clip.external_duration = duration;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -384,33 +392,58 @@ impl Engine {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Command::MoveClip(track_id, clip_id, new_start_time) => {
|
Command::MoveClip(track_id, clip_id, new_start_time) => {
|
||||||
|
// Moving just changes external_start, external_duration stays the same
|
||||||
match self.project.get_track_mut(track_id) {
|
match self.project.get_track_mut(track_id) {
|
||||||
Some(crate::audio::track::TrackNode::Audio(track)) => {
|
Some(crate::audio::track::TrackNode::Audio(track)) => {
|
||||||
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
|
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
|
||||||
clip.start_time = new_start_time;
|
clip.external_start = new_start_time;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some(crate::audio::track::TrackNode::Midi(track)) => {
|
Some(crate::audio::track::TrackNode::Midi(track)) => {
|
||||||
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
|
// Note: clip_id here is the pool clip ID, not instance ID
|
||||||
clip.start_time = new_start_time;
|
if let Some(instance) = track.clip_instances.iter_mut().find(|c| c.clip_id == clip_id) {
|
||||||
|
instance.external_start = new_start_time;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Command::TrimClip(track_id, clip_id, new_start_time, new_duration, new_offset) => {
|
Command::TrimClip(track_id, clip_id, new_internal_start, new_internal_end) => {
|
||||||
|
// Trim changes which portion of the source content is used
|
||||||
|
// Also updates external_duration to match internal duration (no looping after trim)
|
||||||
match self.project.get_track_mut(track_id) {
|
match self.project.get_track_mut(track_id) {
|
||||||
Some(crate::audio::track::TrackNode::Audio(track)) => {
|
Some(crate::audio::track::TrackNode::Audio(track)) => {
|
||||||
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
|
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
|
||||||
clip.start_time = new_start_time;
|
clip.internal_start = new_internal_start;
|
||||||
clip.duration = new_duration;
|
clip.internal_end = new_internal_end;
|
||||||
clip.offset = new_offset;
|
// By default, trimming sets external_duration to match internal duration
|
||||||
|
clip.external_duration = new_internal_end - new_internal_start;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some(crate::audio::track::TrackNode::Midi(track)) => {
|
Some(crate::audio::track::TrackNode::Midi(track)) => {
|
||||||
|
// Note: clip_id here is the pool clip ID, not instance ID
|
||||||
|
if let Some(instance) = track.clip_instances.iter_mut().find(|c| c.clip_id == clip_id) {
|
||||||
|
instance.internal_start = new_internal_start;
|
||||||
|
instance.internal_end = new_internal_end;
|
||||||
|
// By default, trimming sets external_duration to match internal duration
|
||||||
|
instance.external_duration = new_internal_end - new_internal_start;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Command::ExtendClip(track_id, clip_id, new_external_duration) => {
|
||||||
|
// Extend changes the external duration (enables looping if > internal duration)
|
||||||
|
match self.project.get_track_mut(track_id) {
|
||||||
|
Some(crate::audio::track::TrackNode::Audio(track)) => {
|
||||||
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
|
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
|
||||||
clip.start_time = new_start_time;
|
clip.external_duration = new_external_duration;
|
||||||
clip.duration = new_duration;
|
}
|
||||||
|
}
|
||||||
|
Some(crate::audio::track::TrackNode::Midi(track)) => {
|
||||||
|
// Note: clip_id here is the pool clip ID, not instance ID
|
||||||
|
if let Some(instance) = track.clip_instances.iter_mut().find(|c| c.clip_id == clip_id) {
|
||||||
|
instance.external_duration = new_external_duration;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
|
|
@ -475,10 +508,10 @@ impl Engine {
|
||||||
pool_index, pool_size);
|
pool_index, pool_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a new clip with unique ID
|
// Create a new clip instance with unique ID using legacy parameters
|
||||||
let clip_id = self.next_clip_id;
|
let clip_id = self.next_clip_id;
|
||||||
self.next_clip_id += 1;
|
self.next_clip_id += 1;
|
||||||
let clip = crate::audio::clip::Clip::new(
|
let clip = AudioClipInstance::from_legacy(
|
||||||
clip_id,
|
clip_id,
|
||||||
pool_index,
|
pool_index,
|
||||||
start_time,
|
start_time,
|
||||||
|
|
@ -504,37 +537,57 @@ impl Engine {
|
||||||
Command::CreateMidiClip(track_id, start_time, duration) => {
|
Command::CreateMidiClip(track_id, start_time, duration) => {
|
||||||
// Get the next MIDI clip ID from the atomic counter
|
// Get the next MIDI clip ID from the atomic counter
|
||||||
let clip_id = self.next_midi_clip_id_atomic.fetch_add(1, Ordering::Relaxed);
|
let clip_id = self.next_midi_clip_id_atomic.fetch_add(1, Ordering::Relaxed);
|
||||||
let clip = MidiClip::new(clip_id, start_time, duration);
|
|
||||||
let _ = self.project.add_midi_clip(track_id, clip);
|
// Create clip content in the pool
|
||||||
// Notify UI about the new clip with its ID
|
let clip = MidiClip::empty(clip_id, duration, format!("MIDI Clip {}", clip_id));
|
||||||
|
self.project.midi_clip_pool.add_existing_clip(clip);
|
||||||
|
|
||||||
|
// Create an instance for this clip on the track
|
||||||
|
let instance_id = self.project.next_midi_clip_instance_id();
|
||||||
|
let instance = MidiClipInstance::from_full_clip(instance_id, clip_id, duration, start_time);
|
||||||
|
|
||||||
|
if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
|
||||||
|
track.clip_instances.push(instance);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notify UI about the new clip with its ID (using clip_id for now)
|
||||||
let _ = self.event_tx.push(AudioEvent::ClipAdded(track_id, clip_id));
|
let _ = self.event_tx.push(AudioEvent::ClipAdded(track_id, clip_id));
|
||||||
}
|
}
|
||||||
Command::AddMidiNote(track_id, clip_id, time_offset, note, velocity, duration) => {
|
Command::AddMidiNote(track_id, clip_id, time_offset, note, velocity, duration) => {
|
||||||
// Add a MIDI note event to the specified clip
|
// Add a MIDI note event to the specified clip in the pool
|
||||||
if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
|
// Note: clip_id here refers to the clip in the pool, not the instance
|
||||||
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
|
if let Some(clip) = self.project.midi_clip_pool.get_clip_mut(clip_id) {
|
||||||
// Timestamp is now in seconds (sample-rate independent)
|
// Timestamp is now in seconds (sample-rate independent)
|
||||||
let note_on = MidiEvent::note_on(time_offset, 0, note, velocity);
|
let note_on = MidiEvent::note_on(time_offset, 0, note, velocity);
|
||||||
clip.events.push(note_on);
|
clip.add_event(note_on);
|
||||||
|
|
||||||
// Add note off event
|
// Add note off event
|
||||||
let note_off_time = time_offset + duration;
|
let note_off_time = time_offset + duration;
|
||||||
let note_off = MidiEvent::note_off(note_off_time, 0, note, 64);
|
let note_off = MidiEvent::note_off(note_off_time, 0, note, 64);
|
||||||
clip.events.push(note_off);
|
clip.add_event(note_off);
|
||||||
|
} else {
|
||||||
// Sort events by timestamp (using partial_cmp for f64)
|
// Try legacy behavior: look for instance on track and find its clip
|
||||||
clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Command::AddLoadedMidiClip(track_id, clip) => {
|
|
||||||
// Add a pre-loaded MIDI clip to the track
|
|
||||||
let _ = self.project.add_midi_clip(track_id, clip);
|
|
||||||
}
|
|
||||||
Command::UpdateMidiClipNotes(track_id, clip_id, notes) => {
|
|
||||||
// Update all notes in a MIDI clip
|
|
||||||
if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
|
if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
|
||||||
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
|
if let Some(instance) = track.clip_instances.iter().find(|c| c.clip_id == clip_id) {
|
||||||
|
let actual_clip_id = instance.clip_id;
|
||||||
|
if let Some(clip) = self.project.midi_clip_pool.get_clip_mut(actual_clip_id) {
|
||||||
|
let note_on = MidiEvent::note_on(time_offset, 0, note, velocity);
|
||||||
|
clip.add_event(note_on);
|
||||||
|
let note_off_time = time_offset + duration;
|
||||||
|
let note_off = MidiEvent::note_off(note_off_time, 0, note, 64);
|
||||||
|
clip.add_event(note_off);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Command::AddLoadedMidiClip(track_id, clip, start_time) => {
|
||||||
|
// Add a pre-loaded MIDI clip to the track with the given start time
|
||||||
|
let _ = self.project.add_midi_clip_at(track_id, clip, start_time);
|
||||||
|
}
|
||||||
|
Command::UpdateMidiClipNotes(_track_id, clip_id, notes) => {
|
||||||
|
// Update all notes in a MIDI clip (directly in the pool)
|
||||||
|
if let Some(clip) = self.project.midi_clip_pool.get_clip_mut(clip_id) {
|
||||||
// Clear existing events
|
// Clear existing events
|
||||||
clip.events.clear();
|
clip.events.clear();
|
||||||
|
|
||||||
|
|
@ -554,7 +607,6 @@ impl Engine {
|
||||||
clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
|
clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
Command::RequestBufferPoolStats => {
|
Command::RequestBufferPoolStats => {
|
||||||
// Send buffer pool statistics back to UI
|
// Send buffer pool statistics back to UI
|
||||||
let stats = self.buffer_pool.stats();
|
let stats = self.buffer_pool.stats();
|
||||||
|
|
@ -728,7 +780,7 @@ impl Engine {
|
||||||
self.project = Project::new(self.sample_rate);
|
self.project = Project::new(self.sample_rate);
|
||||||
|
|
||||||
// Clear audio pool
|
// Clear audio pool
|
||||||
self.audio_pool = AudioPool::new();
|
self.audio_pool = AudioClipPool::new();
|
||||||
|
|
||||||
// Reset buffer pool (recreate with same settings)
|
// Reset buffer pool (recreate with same settings)
|
||||||
let buffer_size = 512 * self.channels as usize;
|
let buffer_size = 512 * self.channels as usize;
|
||||||
|
|
@ -1439,19 +1491,16 @@ impl Engine {
|
||||||
))),
|
))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Query::GetMidiClip(track_id, clip_id) => {
|
Query::GetMidiClip(_track_id, clip_id) => {
|
||||||
if let Some(TrackNode::Midi(track)) = self.project.get_track(track_id) {
|
// Get MIDI clip data from the pool
|
||||||
if let Some(clip) = track.clips.iter().find(|c| c.id == clip_id) {
|
if let Some(clip) = self.project.midi_clip_pool.get_clip(clip_id) {
|
||||||
use crate::command::MidiClipData;
|
use crate::command::MidiClipData;
|
||||||
QueryResponse::MidiClipData(Ok(MidiClipData {
|
QueryResponse::MidiClipData(Ok(MidiClipData {
|
||||||
duration: clip.duration,
|
duration: clip.duration,
|
||||||
events: clip.events.clone(),
|
events: clip.events.clone(),
|
||||||
}))
|
}))
|
||||||
} else {
|
} else {
|
||||||
QueryResponse::MidiClipData(Err(format!("Clip {} not found in track {}", clip_id, track_id)))
|
QueryResponse::MidiClipData(Err(format!("Clip {} not found in pool", clip_id)))
|
||||||
}
|
|
||||||
} else {
|
|
||||||
QueryResponse::MidiClipData(Err(format!("Track {} not found or is not a MIDI track", track_id)))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1622,7 +1671,10 @@ impl Engine {
|
||||||
Query::ExportAudio(settings, output_path) => {
|
Query::ExportAudio(settings, output_path) => {
|
||||||
// Perform export directly - this will block the audio thread but that's okay
|
// Perform export directly - this will block the audio thread but that's okay
|
||||||
// since we're exporting and not playing back anyway
|
// since we're exporting and not playing back anyway
|
||||||
match crate::audio::export_audio(&mut self.project, &self.audio_pool, &settings, &output_path) {
|
// Use raw pointer to get midi_pool reference before mutable borrow of project
|
||||||
|
let midi_pool_ptr: *const _ = &self.project.midi_clip_pool;
|
||||||
|
let midi_pool_ref = unsafe { &*midi_pool_ptr };
|
||||||
|
match crate::audio::export_audio(&mut self.project, &self.audio_pool, midi_pool_ref, &settings, &output_path) {
|
||||||
Ok(()) => QueryResponse::AudioExported(Ok(())),
|
Ok(()) => QueryResponse::AudioExported(Ok(())),
|
||||||
Err(e) => QueryResponse::AudioExported(Err(e)),
|
Err(e) => QueryResponse::AudioExported(Err(e)),
|
||||||
}
|
}
|
||||||
|
|
@ -1658,9 +1710,10 @@ impl Engine {
|
||||||
let clip = crate::audio::clip::Clip::new(
|
let clip = crate::audio::clip::Clip::new(
|
||||||
clip_id,
|
clip_id,
|
||||||
0, // Temporary pool index, will be updated on finalization
|
0, // Temporary pool index, will be updated on finalization
|
||||||
start_time,
|
0.0, // internal_start
|
||||||
0.0, // Duration starts at 0, will be updated during recording
|
0.0, // internal_end - Duration starts at 0, will be updated during recording
|
||||||
0.0,
|
start_time, // external_start (timeline position)
|
||||||
|
start_time, // external_end - will be updated during recording
|
||||||
);
|
);
|
||||||
|
|
||||||
// Add clip to track
|
// Add clip to track
|
||||||
|
|
@ -1819,12 +1872,10 @@ impl Engine {
|
||||||
eprintln!("[MIDI_RECORDING] Stopping MIDI recording for clip_id={}, track_id={}, captured {} notes, duration={:.3}s",
|
eprintln!("[MIDI_RECORDING] Stopping MIDI recording for clip_id={}, track_id={}, captured {} notes, duration={:.3}s",
|
||||||
clip_id, track_id, note_count, recording_duration);
|
clip_id, track_id, note_count, recording_duration);
|
||||||
|
|
||||||
// Update the MIDI clip using the existing UpdateMidiClipNotes logic
|
// Update the MIDI clip in the pool (new model: clips are stored centrally in the pool)
|
||||||
eprintln!("[MIDI_RECORDING] Looking for track {} to update clip", track_id);
|
eprintln!("[MIDI_RECORDING] Looking for clip {} in midi_clip_pool", clip_id);
|
||||||
if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
|
if let Some(clip) = self.project.midi_clip_pool.get_clip_mut(clip_id) {
|
||||||
eprintln!("[MIDI_RECORDING] Found MIDI track, looking for clip {}", clip_id);
|
eprintln!("[MIDI_RECORDING] Found clip in pool, clearing and adding {} notes", note_count);
|
||||||
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
|
|
||||||
eprintln!("[MIDI_RECORDING] Found clip, clearing and adding {} notes", note_count);
|
|
||||||
// Clear existing events
|
// Clear existing events
|
||||||
clip.events.clear();
|
clip.events.clear();
|
||||||
|
|
||||||
|
|
@ -1850,11 +1901,18 @@ impl Engine {
|
||||||
// Sort events by timestamp (using partial_cmp for f64)
|
// Sort events by timestamp (using partial_cmp for f64)
|
||||||
clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
|
clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
|
||||||
eprintln!("[MIDI_RECORDING] Updated clip {} with {} notes ({} events)", clip_id, note_count, clip.events.len());
|
eprintln!("[MIDI_RECORDING] Updated clip {} with {} notes ({} events)", clip_id, note_count, clip.events.len());
|
||||||
} else {
|
|
||||||
eprintln!("[MIDI_RECORDING] ERROR: Clip {} not found on track!", clip_id);
|
// Also update the clip instance's internal_end and external_duration to match the recording duration
|
||||||
|
if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
|
||||||
|
if let Some(instance) = track.clip_instances.iter_mut().find(|i| i.clip_id == clip_id) {
|
||||||
|
instance.internal_end = recording_duration;
|
||||||
|
instance.external_duration = recording_duration;
|
||||||
|
eprintln!("[MIDI_RECORDING] Updated clip instance timing: internal_end={:.3}s, external_duration={:.3}s",
|
||||||
|
instance.internal_end, instance.external_duration);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
eprintln!("[MIDI_RECORDING] ERROR: Track {} not found or not a MIDI track!", track_id);
|
eprintln!("[MIDI_RECORDING] ERROR: Clip {} not found in pool!", clip_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Send event to UI
|
// Send event to UI
|
||||||
|
|
@ -1941,13 +1999,20 @@ impl EngineController {
|
||||||
let _ = self.command_tx.push(Command::SetTrackSolo(track_id, solo));
|
let _ = self.command_tx.push(Command::SetTrackSolo(track_id, solo));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Move a clip to a new timeline position
|
/// Move a clip to a new timeline position (changes external_start)
|
||||||
pub fn move_clip(&mut self, track_id: TrackId, clip_id: ClipId, new_start_time: f64) {
|
pub fn move_clip(&mut self, track_id: TrackId, clip_id: ClipId, new_start_time: f64) {
|
||||||
let _ = self.command_tx.push(Command::MoveClip(track_id, clip_id, new_start_time));
|
let _ = self.command_tx.push(Command::MoveClip(track_id, clip_id, new_start_time));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn trim_clip(&mut self, track_id: TrackId, clip_id: ClipId, new_start_time: f64, new_duration: f64, new_offset: f64) {
|
/// Trim a clip's internal boundaries (changes which portion of source content is used)
|
||||||
let _ = self.command_tx.push(Command::TrimClip(track_id, clip_id, new_start_time, new_duration, new_offset));
|
/// This also resets external_duration to match internal duration (disables looping)
|
||||||
|
pub fn trim_clip(&mut self, track_id: TrackId, clip_id: ClipId, new_internal_start: f64, new_internal_end: f64) {
|
||||||
|
let _ = self.command_tx.push(Command::TrimClip(track_id, clip_id, new_internal_start, new_internal_end));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extend or shrink a clip's external duration (enables looping if > internal duration)
|
||||||
|
pub fn extend_clip(&mut self, track_id: TrackId, clip_id: ClipId, new_external_duration: f64) {
|
||||||
|
let _ = self.command_tx.push(Command::ExtendClip(track_id, clip_id, new_external_duration));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Send a generic command to the audio thread
|
/// Send a generic command to the audio thread
|
||||||
|
|
@ -2071,9 +2136,9 @@ impl EngineController {
|
||||||
let _ = self.command_tx.push(Command::AddMidiNote(track_id, clip_id, time_offset, note, velocity, duration));
|
let _ = self.command_tx.push(Command::AddMidiNote(track_id, clip_id, time_offset, note, velocity, duration));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a pre-loaded MIDI clip to a track
|
/// Add a pre-loaded MIDI clip to a track at the given timeline position
|
||||||
pub fn add_loaded_midi_clip(&mut self, track_id: TrackId, clip: MidiClip) {
|
pub fn add_loaded_midi_clip(&mut self, track_id: TrackId, clip: MidiClip, start_time: f64) {
|
||||||
let _ = self.command_tx.push(Command::AddLoadedMidiClip(track_id, clip));
|
let _ = self.command_tx.push(Command::AddLoadedMidiClip(track_id, clip, start_time));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Update all notes in a MIDI clip
|
/// Update all notes in a MIDI clip
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
use super::buffer_pool::BufferPool;
|
use super::buffer_pool::BufferPool;
|
||||||
|
use super::midi_pool::MidiClipPool;
|
||||||
use super::pool::AudioPool;
|
use super::pool::AudioPool;
|
||||||
use super::project::Project;
|
use super::project::Project;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
@ -61,11 +62,12 @@ impl Default for ExportSettings {
|
||||||
pub fn export_audio<P: AsRef<Path>>(
|
pub fn export_audio<P: AsRef<Path>>(
|
||||||
project: &mut Project,
|
project: &mut Project,
|
||||||
pool: &AudioPool,
|
pool: &AudioPool,
|
||||||
|
midi_pool: &MidiClipPool,
|
||||||
settings: &ExportSettings,
|
settings: &ExportSettings,
|
||||||
output_path: P,
|
output_path: P,
|
||||||
) -> Result<(), String> {
|
) -> Result<(), String> {
|
||||||
// Render the project to memory
|
// Render the project to memory
|
||||||
let samples = render_to_memory(project, pool, settings)?;
|
let samples = render_to_memory(project, pool, midi_pool, settings)?;
|
||||||
|
|
||||||
// Write to file based on format
|
// Write to file based on format
|
||||||
match settings.format {
|
match settings.format {
|
||||||
|
|
@ -80,6 +82,7 @@ pub fn export_audio<P: AsRef<Path>>(
|
||||||
fn render_to_memory(
|
fn render_to_memory(
|
||||||
project: &mut Project,
|
project: &mut Project,
|
||||||
pool: &AudioPool,
|
pool: &AudioPool,
|
||||||
|
midi_pool: &MidiClipPool,
|
||||||
settings: &ExportSettings,
|
settings: &ExportSettings,
|
||||||
) -> Result<Vec<f32>, String> {
|
) -> Result<Vec<f32>, String> {
|
||||||
// Calculate total number of frames
|
// Calculate total number of frames
|
||||||
|
|
@ -113,6 +116,7 @@ fn render_to_memory(
|
||||||
project.render(
|
project.render(
|
||||||
&mut render_buffer,
|
&mut render_buffer,
|
||||||
pool,
|
pool,
|
||||||
|
midi_pool,
|
||||||
&mut buffer_pool,
|
&mut buffer_pool,
|
||||||
playhead,
|
playhead,
|
||||||
settings.sample_rate,
|
settings.sample_rate,
|
||||||
|
|
|
||||||
|
|
@ -63,73 +63,216 @@ impl MidiEvent {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// MIDI clip ID type
|
/// MIDI clip ID type (for clips stored in the pool)
|
||||||
pub type MidiClipId = u32;
|
pub type MidiClipId = u32;
|
||||||
|
|
||||||
/// MIDI clip containing a sequence of MIDI events
|
/// MIDI clip instance ID type (for instances placed on tracks)
|
||||||
|
pub type MidiClipInstanceId = u32;
|
||||||
|
|
||||||
|
/// MIDI clip content - stores the actual MIDI events
|
||||||
|
///
|
||||||
|
/// This represents the content data stored in the MidiClipPool.
|
||||||
|
/// Events have timestamps relative to the start of the clip (0.0 = clip beginning).
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct MidiClip {
|
pub struct MidiClip {
|
||||||
pub id: MidiClipId,
|
pub id: MidiClipId,
|
||||||
pub events: Vec<MidiEvent>,
|
pub events: Vec<MidiEvent>,
|
||||||
pub start_time: f64, // Position on timeline in seconds
|
pub duration: f64, // Total content duration in seconds
|
||||||
pub duration: f64, // Clip duration in seconds
|
pub name: String,
|
||||||
pub loop_enabled: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MidiClip {
|
impl MidiClip {
|
||||||
/// Create a new MIDI clip
|
/// Create a new MIDI clip with content
|
||||||
pub fn new(id: MidiClipId, start_time: f64, duration: f64) -> Self {
|
pub fn new(id: MidiClipId, events: Vec<MidiEvent>, duration: f64, name: String) -> Self {
|
||||||
|
let mut clip = Self {
|
||||||
|
id,
|
||||||
|
events,
|
||||||
|
duration,
|
||||||
|
name,
|
||||||
|
};
|
||||||
|
// Sort events by timestamp
|
||||||
|
clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
|
||||||
|
clip
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create an empty MIDI clip
|
||||||
|
pub fn empty(id: MidiClipId, duration: f64, name: String) -> Self {
|
||||||
Self {
|
Self {
|
||||||
id,
|
id,
|
||||||
events: Vec::new(),
|
events: Vec::new(),
|
||||||
start_time,
|
|
||||||
duration,
|
duration,
|
||||||
loop_enabled: false,
|
name,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a MIDI event to the clip
|
/// Add a MIDI event to the clip
|
||||||
pub fn add_event(&mut self, event: MidiEvent) {
|
pub fn add_event(&mut self, event: MidiEvent) {
|
||||||
self.events.push(event);
|
self.events.push(event);
|
||||||
// Keep events sorted by timestamp (using partial_cmp for f64)
|
// Keep events sorted by timestamp
|
||||||
self.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
|
self.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the end time of the clip
|
/// Get events within a time range (relative to clip start)
|
||||||
pub fn end_time(&self) -> f64 {
|
/// This is used by MidiClipInstance to fetch events for a given portion
|
||||||
self.start_time + self.duration
|
pub fn get_events_in_range(&self, start: f64, end: f64) -> Vec<MidiEvent> {
|
||||||
|
self.events
|
||||||
|
.iter()
|
||||||
|
.filter(|e| e.timestamp >= start && e.timestamp < end)
|
||||||
|
.copied()
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// MIDI clip instance - a reference to MidiClip content with timeline positioning
|
||||||
|
///
|
||||||
|
/// ## Timing Model
|
||||||
|
/// - `internal_start` / `internal_end`: Define the region of the source clip to play (trimming)
|
||||||
|
/// - `external_start` / `external_duration`: Define where the instance appears on the timeline and how long
|
||||||
|
///
|
||||||
|
/// ## Looping
|
||||||
|
/// If `external_duration` is greater than `internal_end - internal_start`,
|
||||||
|
/// the instance will seamlessly loop back to `internal_start` when it reaches `internal_end`.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct MidiClipInstance {
|
||||||
|
pub id: MidiClipInstanceId,
|
||||||
|
pub clip_id: MidiClipId, // Reference to MidiClip in pool
|
||||||
|
|
||||||
|
/// Start position within the clip content (seconds)
|
||||||
|
pub internal_start: f64,
|
||||||
|
/// End position within the clip content (seconds)
|
||||||
|
pub internal_end: f64,
|
||||||
|
|
||||||
|
/// Start position on the timeline (seconds)
|
||||||
|
pub external_start: f64,
|
||||||
|
/// Duration on the timeline (seconds) - can be longer than internal duration for looping
|
||||||
|
pub external_duration: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MidiClipInstance {
|
||||||
|
/// Create a new MIDI clip instance
|
||||||
|
pub fn new(
|
||||||
|
id: MidiClipInstanceId,
|
||||||
|
clip_id: MidiClipId,
|
||||||
|
internal_start: f64,
|
||||||
|
internal_end: f64,
|
||||||
|
external_start: f64,
|
||||||
|
external_duration: f64,
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
id,
|
||||||
|
clip_id,
|
||||||
|
internal_start,
|
||||||
|
internal_end,
|
||||||
|
external_start,
|
||||||
|
external_duration,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get events that should be triggered in a given time range
|
/// Create an instance that uses the full clip content (no trimming, no looping)
|
||||||
|
pub fn from_full_clip(
|
||||||
|
id: MidiClipInstanceId,
|
||||||
|
clip_id: MidiClipId,
|
||||||
|
clip_duration: f64,
|
||||||
|
external_start: f64,
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
id,
|
||||||
|
clip_id,
|
||||||
|
internal_start: 0.0,
|
||||||
|
internal_end: clip_duration,
|
||||||
|
external_start,
|
||||||
|
external_duration: clip_duration,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the internal (content) duration
|
||||||
|
pub fn internal_duration(&self) -> f64 {
|
||||||
|
self.internal_end - self.internal_start
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the end time on the timeline
|
||||||
|
pub fn external_end(&self) -> f64 {
|
||||||
|
self.external_start + self.external_duration
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if this instance loops
|
||||||
|
pub fn is_looping(&self) -> bool {
|
||||||
|
self.external_duration > self.internal_duration()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the end time on the timeline (for backwards compatibility)
|
||||||
|
pub fn end_time(&self) -> f64 {
|
||||||
|
self.external_end()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the start time on the timeline (for backwards compatibility)
|
||||||
|
pub fn start_time(&self) -> f64 {
|
||||||
|
self.external_start
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if this instance overlaps with a time range
|
||||||
|
pub fn overlaps_range(&self, range_start: f64, range_end: f64) -> bool {
|
||||||
|
self.external_start < range_end && self.external_end() > range_start
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get events that should be triggered in a given timeline range
|
||||||
///
|
///
|
||||||
/// Returns events along with their absolute timestamps in samples
|
/// This handles:
|
||||||
|
/// - Trimming (internal_start/internal_end)
|
||||||
|
/// - Looping (when external duration > internal duration)
|
||||||
|
/// - Time mapping from timeline to clip content
|
||||||
|
///
|
||||||
|
/// Returns events with timestamps adjusted to timeline time (not clip-relative)
|
||||||
pub fn get_events_in_range(
|
pub fn get_events_in_range(
|
||||||
&self,
|
&self,
|
||||||
|
clip: &MidiClip,
|
||||||
range_start_seconds: f64,
|
range_start_seconds: f64,
|
||||||
range_end_seconds: f64,
|
range_end_seconds: f64,
|
||||||
_sample_rate: u32,
|
|
||||||
) -> Vec<MidiEvent> {
|
) -> Vec<MidiEvent> {
|
||||||
let mut result = Vec::new();
|
let mut result = Vec::new();
|
||||||
|
|
||||||
// Check if clip overlaps with the range
|
// Check if instance overlaps with the range
|
||||||
if range_start_seconds >= self.end_time() || range_end_seconds <= self.start_time {
|
if !self.overlaps_range(range_start_seconds, range_end_seconds) {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Calculate the intersection
|
let internal_duration = self.internal_duration();
|
||||||
let play_start = range_start_seconds.max(self.start_time);
|
if internal_duration <= 0.0 {
|
||||||
let play_end = range_end_seconds.min(self.end_time());
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
// Position within the clip
|
// Calculate how many complete loops fit in the external duration
|
||||||
let clip_position_seconds = play_start - self.start_time;
|
let num_loops = if self.external_duration > internal_duration {
|
||||||
let clip_end_seconds = play_end - self.start_time;
|
(self.external_duration / internal_duration).ceil() as usize
|
||||||
|
} else {
|
||||||
|
1
|
||||||
|
};
|
||||||
|
|
||||||
// Find events in this range
|
let external_end = self.external_end();
|
||||||
// Note: event.timestamp is now in seconds relative to clip start
|
|
||||||
// Use half-open interval [start, end) to avoid triggering events twice
|
for loop_idx in 0..num_loops {
|
||||||
for event in &self.events {
|
let loop_offset = loop_idx as f64 * internal_duration;
|
||||||
if event.timestamp >= clip_position_seconds && event.timestamp < clip_end_seconds {
|
|
||||||
result.push(*event);
|
// Get events from the clip that fall within the internal range
|
||||||
|
for event in &clip.events {
|
||||||
|
// Skip events outside the trimmed region
|
||||||
|
if event.timestamp < self.internal_start || event.timestamp >= self.internal_end {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert to timeline time
|
||||||
|
let relative_content_time = event.timestamp - self.internal_start;
|
||||||
|
let timeline_time = self.external_start + loop_offset + relative_content_time;
|
||||||
|
|
||||||
|
// Check if within current buffer range and instance bounds
|
||||||
|
if timeline_time >= range_start_seconds
|
||||||
|
&& timeline_time < range_end_seconds
|
||||||
|
&& timeline_time < external_end
|
||||||
|
{
|
||||||
|
let mut adjusted_event = *event;
|
||||||
|
adjusted_event.timestamp = timeline_time;
|
||||||
|
result.push(adjusted_event);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,101 @@
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use super::midi::{MidiClip, MidiClipId, MidiEvent};
|
||||||
|
|
||||||
|
/// Pool for storing MIDI clip content
|
||||||
|
/// Similar to AudioClipPool but for MIDI data
|
||||||
|
pub struct MidiClipPool {
|
||||||
|
clips: HashMap<MidiClipId, MidiClip>,
|
||||||
|
next_id: MidiClipId,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MidiClipPool {
|
||||||
|
/// Create a new empty MIDI clip pool
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
clips: HashMap::new(),
|
||||||
|
next_id: 1, // Start at 1 so 0 can indicate "no clip"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add a new clip to the pool with the given events and duration
|
||||||
|
/// Returns the ID of the newly created clip
|
||||||
|
pub fn add_clip(&mut self, events: Vec<MidiEvent>, duration: f64, name: String) -> MidiClipId {
|
||||||
|
let id = self.next_id;
|
||||||
|
self.next_id += 1;
|
||||||
|
|
||||||
|
let clip = MidiClip::new(id, events, duration, name);
|
||||||
|
self.clips.insert(id, clip);
|
||||||
|
id
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add an existing clip to the pool (used when loading projects)
|
||||||
|
/// The clip's ID is preserved
|
||||||
|
pub fn add_existing_clip(&mut self, clip: MidiClip) {
|
||||||
|
// Update next_id to avoid collisions
|
||||||
|
if clip.id >= self.next_id {
|
||||||
|
self.next_id = clip.id + 1;
|
||||||
|
}
|
||||||
|
self.clips.insert(clip.id, clip);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a clip by ID
|
||||||
|
pub fn get_clip(&self, id: MidiClipId) -> Option<&MidiClip> {
|
||||||
|
self.clips.get(&id)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a mutable clip by ID
|
||||||
|
pub fn get_clip_mut(&mut self, id: MidiClipId) -> Option<&mut MidiClip> {
|
||||||
|
self.clips.get_mut(&id)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove a clip from the pool
|
||||||
|
pub fn remove_clip(&mut self, id: MidiClipId) -> Option<MidiClip> {
|
||||||
|
self.clips.remove(&id)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Duplicate a clip, returning the new clip's ID
|
||||||
|
pub fn duplicate_clip(&mut self, id: MidiClipId) -> Option<MidiClipId> {
|
||||||
|
let clip = self.clips.get(&id)?;
|
||||||
|
let new_id = self.next_id;
|
||||||
|
self.next_id += 1;
|
||||||
|
|
||||||
|
let mut new_clip = clip.clone();
|
||||||
|
new_clip.id = new_id;
|
||||||
|
new_clip.name = format!("{} (copy)", clip.name);
|
||||||
|
|
||||||
|
self.clips.insert(new_id, new_clip);
|
||||||
|
Some(new_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get all clip IDs in the pool
|
||||||
|
pub fn clip_ids(&self) -> Vec<MidiClipId> {
|
||||||
|
self.clips.keys().copied().collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the number of clips in the pool
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.clips.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if the pool is empty
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.clips.is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Clear all clips from the pool
|
||||||
|
pub fn clear(&mut self) {
|
||||||
|
self.clips.clear();
|
||||||
|
self.next_id = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get an iterator over all clips
|
||||||
|
pub fn iter(&self) -> impl Iterator<Item = (&MidiClipId, &MidiClip)> {
|
||||||
|
self.clips.iter()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for MidiClipPool {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -6,6 +6,7 @@ pub mod engine;
|
||||||
pub mod export;
|
pub mod export;
|
||||||
pub mod metronome;
|
pub mod metronome;
|
||||||
pub mod midi;
|
pub mod midi;
|
||||||
|
pub mod midi_pool;
|
||||||
pub mod node_graph;
|
pub mod node_graph;
|
||||||
pub mod pool;
|
pub mod pool;
|
||||||
pub mod project;
|
pub mod project;
|
||||||
|
|
@ -15,12 +16,13 @@ pub mod track;
|
||||||
|
|
||||||
pub use automation::{AutomationLane, AutomationLaneId, AutomationPoint, CurveType, ParameterId};
|
pub use automation::{AutomationLane, AutomationLaneId, AutomationPoint, CurveType, ParameterId};
|
||||||
pub use buffer_pool::BufferPool;
|
pub use buffer_pool::BufferPool;
|
||||||
pub use clip::{Clip, ClipId};
|
pub use clip::{AudioClipInstance, AudioClipInstanceId, Clip, ClipId};
|
||||||
pub use engine::{Engine, EngineController};
|
pub use engine::{Engine, EngineController};
|
||||||
pub use export::{export_audio, ExportFormat, ExportSettings};
|
pub use export::{export_audio, ExportFormat, ExportSettings};
|
||||||
pub use metronome::Metronome;
|
pub use metronome::Metronome;
|
||||||
pub use midi::{MidiClip, MidiClipId, MidiEvent};
|
pub use midi::{MidiClip, MidiClipId, MidiClipInstance, MidiClipInstanceId, MidiEvent};
|
||||||
pub use pool::{AudioFile as PoolAudioFile, AudioPool};
|
pub use midi_pool::MidiClipPool;
|
||||||
|
pub use pool::{AudioClipPool, AudioFile as PoolAudioFile, AudioPool};
|
||||||
pub use project::Project;
|
pub use project::Project;
|
||||||
pub use recording::RecordingState;
|
pub use recording::RecordingState;
|
||||||
pub use sample_loader::{load_audio_file, SampleData};
|
pub use sample_loader::{load_audio_file, SampleData};
|
||||||
|
|
|
||||||
|
|
@ -119,13 +119,16 @@ impl AudioFile {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Pool of shared audio files
|
/// Pool of shared audio files (audio clip content)
|
||||||
pub struct AudioPool {
|
pub struct AudioClipPool {
|
||||||
files: Vec<AudioFile>,
|
files: Vec<AudioFile>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AudioPool {
|
/// Type alias for backwards compatibility
|
||||||
/// Create a new empty audio pool
|
pub type AudioPool = AudioClipPool;
|
||||||
|
|
||||||
|
impl AudioClipPool {
|
||||||
|
/// Create a new empty audio clip pool
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
files: Vec::new(),
|
files: Vec::new(),
|
||||||
|
|
@ -301,7 +304,7 @@ impl AudioPool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for AudioPool {
|
impl Default for AudioClipPool {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self::new()
|
Self::new()
|
||||||
}
|
}
|
||||||
|
|
@ -335,8 +338,8 @@ pub struct AudioPoolEntry {
|
||||||
pub embedded_data: Option<EmbeddedAudioData>,
|
pub embedded_data: Option<EmbeddedAudioData>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AudioPool {
|
impl AudioClipPool {
|
||||||
/// Serialize the audio pool for project saving
|
/// Serialize the audio clip pool for project saving
|
||||||
///
|
///
|
||||||
/// Files smaller than 10MB are embedded as base64.
|
/// Files smaller than 10MB are embedded as base64.
|
||||||
/// Larger files are stored as relative paths to the project file.
|
/// Larger files are stored as relative paths to the project file.
|
||||||
|
|
|
||||||
|
|
@ -1,19 +1,27 @@
|
||||||
use super::buffer_pool::BufferPool;
|
use super::buffer_pool::BufferPool;
|
||||||
use super::clip::Clip;
|
use super::clip::Clip;
|
||||||
use super::midi::{MidiClip, MidiEvent};
|
use super::midi::{MidiClip, MidiClipId, MidiClipInstance, MidiClipInstanceId, MidiEvent};
|
||||||
use super::pool::AudioPool;
|
use super::midi_pool::MidiClipPool;
|
||||||
|
use super::pool::AudioClipPool;
|
||||||
use super::track::{AudioTrack, Metatrack, MidiTrack, RenderContext, TrackId, TrackNode};
|
use super::track::{AudioTrack, Metatrack, MidiTrack, RenderContext, TrackId, TrackNode};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
/// Project manages the hierarchical track structure
|
/// Project manages the hierarchical track structure and clip pools
|
||||||
///
|
///
|
||||||
/// Tracks are stored in a flat HashMap but can be organized into groups,
|
/// Tracks are stored in a flat HashMap but can be organized into groups,
|
||||||
/// forming a tree structure. Groups render their children recursively.
|
/// forming a tree structure. Groups render their children recursively.
|
||||||
|
///
|
||||||
|
/// Clip content is stored in pools (MidiClipPool), while tracks store
|
||||||
|
/// clip instances that reference the pool content.
|
||||||
pub struct Project {
|
pub struct Project {
|
||||||
tracks: HashMap<TrackId, TrackNode>,
|
tracks: HashMap<TrackId, TrackNode>,
|
||||||
next_track_id: TrackId,
|
next_track_id: TrackId,
|
||||||
root_tracks: Vec<TrackId>, // Top-level tracks (not in any group)
|
root_tracks: Vec<TrackId>, // Top-level tracks (not in any group)
|
||||||
sample_rate: u32, // System sample rate
|
sample_rate: u32, // System sample rate
|
||||||
|
/// Pool for MIDI clip content
|
||||||
|
pub midi_clip_pool: MidiClipPool,
|
||||||
|
/// Next MIDI clip instance ID (for generating unique IDs)
|
||||||
|
next_midi_clip_instance_id: MidiClipInstanceId,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Project {
|
impl Project {
|
||||||
|
|
@ -24,6 +32,8 @@ impl Project {
|
||||||
next_track_id: 0,
|
next_track_id: 0,
|
||||||
root_tracks: Vec::new(),
|
root_tracks: Vec::new(),
|
||||||
sample_rate,
|
sample_rate,
|
||||||
|
midi_clip_pool: MidiClipPool::new(),
|
||||||
|
next_midi_clip_instance_id: 1,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -241,21 +251,81 @@ impl Project {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a MIDI clip to a MIDI track
|
/// Add a MIDI clip instance to a MIDI track
|
||||||
pub fn add_midi_clip(&mut self, track_id: TrackId, clip: MidiClip) -> Result<(), &'static str> {
|
/// The clip content should already exist in the midi_clip_pool
|
||||||
|
pub fn add_midi_clip_instance(&mut self, track_id: TrackId, instance: MidiClipInstance) -> Result<(), &'static str> {
|
||||||
if let Some(TrackNode::Midi(track)) = self.tracks.get_mut(&track_id) {
|
if let Some(TrackNode::Midi(track)) = self.tracks.get_mut(&track_id) {
|
||||||
track.add_clip(clip);
|
track.add_clip_instance(instance);
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err("Track not found or is not a MIDI track")
|
Err("Track not found or is not a MIDI track")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create a new MIDI clip in the pool and add an instance to a track
|
||||||
|
/// Returns (clip_id, instance_id) on success
|
||||||
|
pub fn create_midi_clip_with_instance(
|
||||||
|
&mut self,
|
||||||
|
track_id: TrackId,
|
||||||
|
events: Vec<MidiEvent>,
|
||||||
|
duration: f64,
|
||||||
|
name: String,
|
||||||
|
external_start: f64,
|
||||||
|
) -> Result<(MidiClipId, MidiClipInstanceId), &'static str> {
|
||||||
|
// Verify track exists and is a MIDI track
|
||||||
|
if !matches!(self.tracks.get(&track_id), Some(TrackNode::Midi(_))) {
|
||||||
|
return Err("Track not found or is not a MIDI track");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create clip in pool
|
||||||
|
let clip_id = self.midi_clip_pool.add_clip(events, duration, name);
|
||||||
|
|
||||||
|
// Create instance
|
||||||
|
let instance_id = self.next_midi_clip_instance_id;
|
||||||
|
self.next_midi_clip_instance_id += 1;
|
||||||
|
|
||||||
|
let instance = MidiClipInstance::from_full_clip(instance_id, clip_id, duration, external_start);
|
||||||
|
|
||||||
|
// Add instance to track
|
||||||
|
if let Some(TrackNode::Midi(track)) = self.tracks.get_mut(&track_id) {
|
||||||
|
track.add_clip_instance(instance);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((clip_id, instance_id))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate a new unique MIDI clip instance ID
|
||||||
|
pub fn next_midi_clip_instance_id(&mut self) -> MidiClipInstanceId {
|
||||||
|
let id = self.next_midi_clip_instance_id;
|
||||||
|
self.next_midi_clip_instance_id += 1;
|
||||||
|
id
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Legacy method for backwards compatibility - creates clip and instance from old MidiClip format
|
||||||
|
pub fn add_midi_clip(&mut self, track_id: TrackId, clip: MidiClip) -> Result<(), &'static str> {
|
||||||
|
self.add_midi_clip_at(track_id, clip, 0.0)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add a MIDI clip to the pool and create an instance at the given timeline position
|
||||||
|
pub fn add_midi_clip_at(&mut self, track_id: TrackId, clip: MidiClip, start_time: f64) -> Result<(), &'static str> {
|
||||||
|
// Add the clip to the pool (it already has events and duration)
|
||||||
|
let duration = clip.duration;
|
||||||
|
let clip_id = clip.id;
|
||||||
|
self.midi_clip_pool.add_existing_clip(clip);
|
||||||
|
|
||||||
|
// Create an instance that uses the full clip at the given position
|
||||||
|
let instance_id = self.next_midi_clip_instance_id();
|
||||||
|
let instance = MidiClipInstance::from_full_clip(instance_id, clip_id, duration, start_time);
|
||||||
|
|
||||||
|
self.add_midi_clip_instance(track_id, instance)
|
||||||
|
}
|
||||||
|
|
||||||
/// Render all root tracks into the output buffer
|
/// Render all root tracks into the output buffer
|
||||||
pub fn render(
|
pub fn render(
|
||||||
&mut self,
|
&mut self,
|
||||||
output: &mut [f32],
|
output: &mut [f32],
|
||||||
pool: &AudioPool,
|
audio_pool: &AudioClipPool,
|
||||||
|
midi_pool: &MidiClipPool,
|
||||||
buffer_pool: &mut BufferPool,
|
buffer_pool: &mut BufferPool,
|
||||||
playhead_seconds: f64,
|
playhead_seconds: f64,
|
||||||
sample_rate: u32,
|
sample_rate: u32,
|
||||||
|
|
@ -278,7 +348,8 @@ impl Project {
|
||||||
self.render_track(
|
self.render_track(
|
||||||
track_id,
|
track_id,
|
||||||
output,
|
output,
|
||||||
pool,
|
audio_pool,
|
||||||
|
midi_pool,
|
||||||
buffer_pool,
|
buffer_pool,
|
||||||
ctx,
|
ctx,
|
||||||
any_solo,
|
any_solo,
|
||||||
|
|
@ -292,7 +363,8 @@ impl Project {
|
||||||
&mut self,
|
&mut self,
|
||||||
track_id: TrackId,
|
track_id: TrackId,
|
||||||
output: &mut [f32],
|
output: &mut [f32],
|
||||||
pool: &AudioPool,
|
audio_pool: &AudioClipPool,
|
||||||
|
midi_pool: &MidiClipPool,
|
||||||
buffer_pool: &mut BufferPool,
|
buffer_pool: &mut BufferPool,
|
||||||
ctx: RenderContext,
|
ctx: RenderContext,
|
||||||
any_solo: bool,
|
any_solo: bool,
|
||||||
|
|
@ -336,11 +408,11 @@ impl Project {
|
||||||
match self.tracks.get_mut(&track_id) {
|
match self.tracks.get_mut(&track_id) {
|
||||||
Some(TrackNode::Audio(track)) => {
|
Some(TrackNode::Audio(track)) => {
|
||||||
// Render audio track directly into output
|
// Render audio track directly into output
|
||||||
track.render(output, pool, ctx.playhead_seconds, ctx.sample_rate, ctx.channels);
|
track.render(output, audio_pool, ctx.playhead_seconds, ctx.sample_rate, ctx.channels);
|
||||||
}
|
}
|
||||||
Some(TrackNode::Midi(track)) => {
|
Some(TrackNode::Midi(track)) => {
|
||||||
// Render MIDI track directly into output
|
// Render MIDI track directly into output
|
||||||
track.render(output, ctx.playhead_seconds, ctx.sample_rate, ctx.channels);
|
track.render(output, midi_pool, ctx.playhead_seconds, ctx.sample_rate, ctx.channels);
|
||||||
}
|
}
|
||||||
Some(TrackNode::Group(group)) => {
|
Some(TrackNode::Group(group)) => {
|
||||||
// Get children IDs, check if this group is soloed, and transform context
|
// Get children IDs, check if this group is soloed, and transform context
|
||||||
|
|
@ -360,7 +432,8 @@ impl Project {
|
||||||
self.render_track(
|
self.render_track(
|
||||||
child_id,
|
child_id,
|
||||||
&mut group_buffer,
|
&mut group_buffer,
|
||||||
pool,
|
audio_pool,
|
||||||
|
midi_pool,
|
||||||
buffer_pool,
|
buffer_pool,
|
||||||
child_ctx,
|
child_ctx,
|
||||||
any_solo,
|
any_solo,
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,10 @@
|
||||||
use super::automation::{AutomationLane, AutomationLaneId, ParameterId};
|
use super::automation::{AutomationLane, AutomationLaneId, ParameterId};
|
||||||
use super::clip::Clip;
|
use super::clip::AudioClipInstance;
|
||||||
use super::midi::{MidiClip, MidiEvent};
|
use super::midi::{MidiClipInstance, MidiEvent};
|
||||||
|
use super::midi_pool::MidiClipPool;
|
||||||
use super::node_graph::AudioGraph;
|
use super::node_graph::AudioGraph;
|
||||||
use super::node_graph::nodes::{AudioInputNode, AudioOutputNode};
|
use super::node_graph::nodes::{AudioInputNode, AudioOutputNode};
|
||||||
use super::pool::AudioPool;
|
use super::pool::AudioClipPool;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
/// Track ID type
|
/// Track ID type
|
||||||
|
|
@ -285,11 +286,12 @@ impl Metatrack {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// MIDI track with MIDI clips and a node-based instrument
|
/// MIDI track with MIDI clip instances and a node-based instrument
|
||||||
pub struct MidiTrack {
|
pub struct MidiTrack {
|
||||||
pub id: TrackId,
|
pub id: TrackId,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub clips: Vec<MidiClip>,
|
/// Clip instances placed on this track (reference clips in the MidiClipPool)
|
||||||
|
pub clip_instances: Vec<MidiClipInstance>,
|
||||||
pub instrument_graph: AudioGraph,
|
pub instrument_graph: AudioGraph,
|
||||||
pub volume: f32,
|
pub volume: f32,
|
||||||
pub muted: bool,
|
pub muted: bool,
|
||||||
|
|
@ -310,7 +312,7 @@ impl MidiTrack {
|
||||||
Self {
|
Self {
|
||||||
id,
|
id,
|
||||||
name,
|
name,
|
||||||
clips: Vec::new(),
|
clip_instances: Vec::new(),
|
||||||
instrument_graph: AudioGraph::new(sample_rate, default_buffer_size),
|
instrument_graph: AudioGraph::new(sample_rate, default_buffer_size),
|
||||||
volume: 1.0,
|
volume: 1.0,
|
||||||
muted: false,
|
muted: false,
|
||||||
|
|
@ -346,9 +348,9 @@ impl MidiTrack {
|
||||||
self.automation_lanes.remove(&lane_id).is_some()
|
self.automation_lanes.remove(&lane_id).is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a MIDI clip to this track
|
/// Add a MIDI clip instance to this track
|
||||||
pub fn add_clip(&mut self, clip: MidiClip) {
|
pub fn add_clip_instance(&mut self, instance: MidiClipInstance) {
|
||||||
self.clips.push(clip);
|
self.clip_instances.push(instance);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set track volume
|
/// Set track volume
|
||||||
|
|
@ -420,6 +422,7 @@ impl MidiTrack {
|
||||||
pub fn render(
|
pub fn render(
|
||||||
&mut self,
|
&mut self,
|
||||||
output: &mut [f32],
|
output: &mut [f32],
|
||||||
|
midi_pool: &MidiClipPool,
|
||||||
playhead_seconds: f64,
|
playhead_seconds: f64,
|
||||||
sample_rate: u32,
|
sample_rate: u32,
|
||||||
channels: u32,
|
channels: u32,
|
||||||
|
|
@ -427,18 +430,19 @@ impl MidiTrack {
|
||||||
let buffer_duration_seconds = output.len() as f64 / (sample_rate as f64 * channels as f64);
|
let buffer_duration_seconds = output.len() as f64 / (sample_rate as f64 * channels as f64);
|
||||||
let buffer_end_seconds = playhead_seconds + buffer_duration_seconds;
|
let buffer_end_seconds = playhead_seconds + buffer_duration_seconds;
|
||||||
|
|
||||||
// Collect MIDI events from all clips that overlap with current time range
|
// Collect MIDI events from all clip instances that overlap with current time range
|
||||||
let mut midi_events = Vec::new();
|
let mut midi_events = Vec::new();
|
||||||
for clip in &self.clips {
|
for instance in &self.clip_instances {
|
||||||
let events = clip.get_events_in_range(
|
// Get the clip content from the pool
|
||||||
|
if let Some(clip) = midi_pool.get_clip(instance.clip_id) {
|
||||||
|
let events = instance.get_events_in_range(
|
||||||
|
clip,
|
||||||
playhead_seconds,
|
playhead_seconds,
|
||||||
buffer_end_seconds,
|
buffer_end_seconds,
|
||||||
sample_rate,
|
|
||||||
);
|
);
|
||||||
|
|
||||||
// Events now have timestamps in seconds relative to clip start
|
|
||||||
midi_events.extend(events);
|
midi_events.extend(events);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Add live MIDI events (from virtual keyboard or MIDI controllers)
|
// Add live MIDI events (from virtual keyboard or MIDI controllers)
|
||||||
// This allows real-time input to be heard during playback/recording
|
// This allows real-time input to be heard during playback/recording
|
||||||
|
|
@ -480,11 +484,12 @@ impl MidiTrack {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Audio track with clips
|
/// Audio track with audio clip instances
|
||||||
pub struct AudioTrack {
|
pub struct AudioTrack {
|
||||||
pub id: TrackId,
|
pub id: TrackId,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub clips: Vec<Clip>,
|
/// Audio clip instances (reference content in the AudioClipPool)
|
||||||
|
pub clips: Vec<AudioClipInstance>,
|
||||||
pub volume: f32,
|
pub volume: f32,
|
||||||
pub muted: bool,
|
pub muted: bool,
|
||||||
pub solo: bool,
|
pub solo: bool,
|
||||||
|
|
@ -560,8 +565,8 @@ impl AudioTrack {
|
||||||
self.automation_lanes.remove(&lane_id).is_some()
|
self.automation_lanes.remove(&lane_id).is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a clip to this track
|
/// Add an audio clip instance to this track
|
||||||
pub fn add_clip(&mut self, clip: Clip) {
|
pub fn add_clip(&mut self, clip: AudioClipInstance) {
|
||||||
self.clips.push(clip);
|
self.clips.push(clip);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -590,7 +595,7 @@ impl AudioTrack {
|
||||||
pub fn render(
|
pub fn render(
|
||||||
&mut self,
|
&mut self,
|
||||||
output: &mut [f32],
|
output: &mut [f32],
|
||||||
pool: &AudioPool,
|
pool: &AudioClipPool,
|
||||||
playhead_seconds: f64,
|
playhead_seconds: f64,
|
||||||
sample_rate: u32,
|
sample_rate: u32,
|
||||||
channels: u32,
|
channels: u32,
|
||||||
|
|
@ -602,10 +607,10 @@ impl AudioTrack {
|
||||||
let mut clip_buffer = vec![0.0f32; output.len()];
|
let mut clip_buffer = vec![0.0f32; output.len()];
|
||||||
let mut rendered = 0;
|
let mut rendered = 0;
|
||||||
|
|
||||||
// Render all active clips into the temporary buffer
|
// Render all active clip instances into the temporary buffer
|
||||||
for clip in &self.clips {
|
for clip in &self.clips {
|
||||||
// Check if clip overlaps with current buffer time range
|
// Check if clip overlaps with current buffer time range
|
||||||
if clip.start_time < buffer_end_seconds && clip.end_time() > playhead_seconds {
|
if clip.external_start < buffer_end_seconds && clip.external_end() > playhead_seconds {
|
||||||
rendered += self.render_clip(
|
rendered += self.render_clip(
|
||||||
clip,
|
clip,
|
||||||
&mut clip_buffer,
|
&mut clip_buffer,
|
||||||
|
|
@ -667,12 +672,13 @@ impl AudioTrack {
|
||||||
volume
|
volume
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Render a single clip into the output buffer
|
/// Render a single audio clip instance into the output buffer
|
||||||
|
/// Handles looping when external_duration > internal_duration
|
||||||
fn render_clip(
|
fn render_clip(
|
||||||
&self,
|
&self,
|
||||||
clip: &Clip,
|
clip: &AudioClipInstance,
|
||||||
output: &mut [f32],
|
output: &mut [f32],
|
||||||
pool: &AudioPool,
|
pool: &AudioClipPool,
|
||||||
playhead_seconds: f64,
|
playhead_seconds: f64,
|
||||||
sample_rate: u32,
|
sample_rate: u32,
|
||||||
channels: u32,
|
channels: u32,
|
||||||
|
|
@ -680,46 +686,94 @@ impl AudioTrack {
|
||||||
let buffer_duration_seconds = output.len() as f64 / (sample_rate as f64 * channels as f64);
|
let buffer_duration_seconds = output.len() as f64 / (sample_rate as f64 * channels as f64);
|
||||||
let buffer_end_seconds = playhead_seconds + buffer_duration_seconds;
|
let buffer_end_seconds = playhead_seconds + buffer_duration_seconds;
|
||||||
|
|
||||||
// Determine the time range we need to render (intersection of buffer and clip)
|
// Determine the time range we need to render (intersection of buffer and clip external bounds)
|
||||||
let render_start_seconds = playhead_seconds.max(clip.start_time);
|
let render_start_seconds = playhead_seconds.max(clip.external_start);
|
||||||
let render_end_seconds = buffer_end_seconds.min(clip.end_time());
|
let render_end_seconds = buffer_end_seconds.min(clip.external_end());
|
||||||
|
|
||||||
// If no overlap, return early
|
// If no overlap, return early
|
||||||
if render_start_seconds >= render_end_seconds {
|
if render_start_seconds >= render_end_seconds {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Calculate offset into the output buffer (in interleaved samples)
|
let internal_duration = clip.internal_duration();
|
||||||
let output_offset_seconds = render_start_seconds - playhead_seconds;
|
if internal_duration <= 0.0 {
|
||||||
let output_offset_samples = (output_offset_seconds * sample_rate as f64 * channels as f64) as usize;
|
|
||||||
|
|
||||||
// Calculate position within the clip's audio file (in seconds)
|
|
||||||
let clip_position_seconds = render_start_seconds - clip.start_time + clip.offset;
|
|
||||||
|
|
||||||
// Calculate how many samples to render in the output
|
|
||||||
let render_duration_seconds = render_end_seconds - render_start_seconds;
|
|
||||||
let samples_to_render = (render_duration_seconds * sample_rate as f64 * channels as f64) as usize;
|
|
||||||
let samples_to_render = samples_to_render.min(output.len() - output_offset_samples);
|
|
||||||
|
|
||||||
// Get the slice of output buffer to write to
|
|
||||||
if output_offset_samples + samples_to_render > output.len() {
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
let output_slice = &mut output[output_offset_samples..output_offset_samples + samples_to_render];
|
|
||||||
|
|
||||||
// Calculate combined gain
|
// Calculate combined gain
|
||||||
let combined_gain = clip.gain * self.volume;
|
let combined_gain = clip.gain * self.volume;
|
||||||
|
|
||||||
// Render from pool with sample rate conversion
|
let mut total_rendered = 0;
|
||||||
// Pass the time position in seconds, let the pool handle sample rate conversion
|
|
||||||
pool.render_from_file(
|
// Process the render range sample by sample (or in chunks for efficiency)
|
||||||
|
// For looping clips, we need to handle wrap-around at the loop boundary
|
||||||
|
let samples_per_second = sample_rate as f64 * channels as f64;
|
||||||
|
|
||||||
|
// For now, render in a simpler way - iterate through the timeline range
|
||||||
|
// and use get_content_position for each sample position
|
||||||
|
let output_start_offset = ((render_start_seconds - playhead_seconds) * samples_per_second) as usize;
|
||||||
|
let output_end_offset = ((render_end_seconds - playhead_seconds) * samples_per_second) as usize;
|
||||||
|
|
||||||
|
if output_end_offset > output.len() || output_start_offset > output.len() {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If not looping, we can render in one chunk (more efficient)
|
||||||
|
if !clip.is_looping() {
|
||||||
|
// Simple case: no looping
|
||||||
|
let content_start = clip.get_content_position(render_start_seconds).unwrap_or(clip.internal_start);
|
||||||
|
let output_len = output.len();
|
||||||
|
let output_slice = &mut output[output_start_offset..output_end_offset.min(output_len)];
|
||||||
|
|
||||||
|
total_rendered = pool.render_from_file(
|
||||||
clip.audio_pool_index,
|
clip.audio_pool_index,
|
||||||
output_slice,
|
output_slice,
|
||||||
clip_position_seconds,
|
content_start,
|
||||||
combined_gain,
|
combined_gain,
|
||||||
sample_rate,
|
sample_rate,
|
||||||
channels,
|
channels,
|
||||||
)
|
);
|
||||||
|
} else {
|
||||||
|
// Looping case: need to handle wrap-around at loop boundaries
|
||||||
|
// Render in segments, one per loop iteration
|
||||||
|
let mut timeline_pos = render_start_seconds;
|
||||||
|
let mut output_offset = output_start_offset;
|
||||||
|
|
||||||
|
while timeline_pos < render_end_seconds && output_offset < output.len() {
|
||||||
|
// Calculate position within the loop
|
||||||
|
let relative_pos = timeline_pos - clip.external_start;
|
||||||
|
let loop_offset = relative_pos % internal_duration;
|
||||||
|
let content_pos = clip.internal_start + loop_offset;
|
||||||
|
|
||||||
|
// Calculate how much we can render before hitting the loop boundary
|
||||||
|
let time_to_loop_end = internal_duration - loop_offset;
|
||||||
|
let time_to_render_end = render_end_seconds - timeline_pos;
|
||||||
|
let chunk_duration = time_to_loop_end.min(time_to_render_end);
|
||||||
|
|
||||||
|
let chunk_samples = (chunk_duration * samples_per_second) as usize;
|
||||||
|
let chunk_samples = chunk_samples.min(output.len() - output_offset);
|
||||||
|
|
||||||
|
if chunk_samples == 0 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
let output_slice = &mut output[output_offset..output_offset + chunk_samples];
|
||||||
|
|
||||||
|
let rendered = pool.render_from_file(
|
||||||
|
clip.audio_pool_index,
|
||||||
|
output_slice,
|
||||||
|
content_pos,
|
||||||
|
combined_gain,
|
||||||
|
sample_rate,
|
||||||
|
channels,
|
||||||
|
);
|
||||||
|
|
||||||
|
total_rendered += rendered;
|
||||||
|
output_offset += chunk_samples;
|
||||||
|
timeline_pos += chunk_duration;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
total_rendered
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -28,10 +28,14 @@ pub enum Command {
|
||||||
SetTrackSolo(TrackId, bool),
|
SetTrackSolo(TrackId, bool),
|
||||||
|
|
||||||
// Clip management commands
|
// Clip management commands
|
||||||
/// Move a clip to a new timeline position
|
/// Move a clip to a new timeline position (track_id, clip_id, new_external_start)
|
||||||
MoveClip(TrackId, ClipId, f64),
|
MoveClip(TrackId, ClipId, f64),
|
||||||
/// Trim a clip (track_id, clip_id, new_start_time, new_duration, new_offset)
|
/// Trim a clip's internal boundaries (track_id, clip_id, new_internal_start, new_internal_end)
|
||||||
TrimClip(TrackId, ClipId, f64, f64, f64),
|
/// This changes which portion of the source content is used
|
||||||
|
TrimClip(TrackId, ClipId, f64, f64),
|
||||||
|
/// Extend/shrink a clip's external duration (track_id, clip_id, new_external_duration)
|
||||||
|
/// If duration > internal duration, the clip will loop
|
||||||
|
ExtendClip(TrackId, ClipId, f64),
|
||||||
|
|
||||||
// Metatrack management commands
|
// Metatrack management commands
|
||||||
/// Create a new metatrack with a name
|
/// Create a new metatrack with a name
|
||||||
|
|
@ -67,8 +71,8 @@ pub enum Command {
|
||||||
CreateMidiClip(TrackId, f64, f64),
|
CreateMidiClip(TrackId, f64, f64),
|
||||||
/// Add a MIDI note to a clip (track_id, clip_id, time_offset, note, velocity, duration)
|
/// Add a MIDI note to a clip (track_id, clip_id, time_offset, note, velocity, duration)
|
||||||
AddMidiNote(TrackId, MidiClipId, f64, u8, u8, f64),
|
AddMidiNote(TrackId, MidiClipId, f64, u8, u8, f64),
|
||||||
/// Add a pre-loaded MIDI clip to a track
|
/// Add a pre-loaded MIDI clip to a track (track_id, clip, start_time)
|
||||||
AddLoadedMidiClip(TrackId, MidiClip),
|
AddLoadedMidiClip(TrackId, MidiClip, f64),
|
||||||
/// Update MIDI clip notes (track_id, clip_id, notes: Vec<(start_time, note, velocity, duration)>)
|
/// Update MIDI clip notes (track_id, clip_id, notes: Vec<(start_time, note, velocity, duration)>)
|
||||||
/// NOTE: May need to switch to individual note operations if this becomes slow on clips with many notes
|
/// NOTE: May need to switch to individual note operations if this becomes slow on clips with many notes
|
||||||
UpdateMidiClipNotes(TrackId, MidiClipId, Vec<(f64, u8, u8, f64)>),
|
UpdateMidiClipNotes(TrackId, MidiClipId, Vec<(f64, u8, u8, f64)>),
|
||||||
|
|
|
||||||
|
|
@ -157,9 +157,8 @@ pub fn load_midi_file<P: AsRef<Path>>(
|
||||||
(final_delta_ticks as f64 / ticks_per_beat) * (microseconds_per_beat / 1_000_000.0);
|
(final_delta_ticks as f64 / ticks_per_beat) * (microseconds_per_beat / 1_000_000.0);
|
||||||
let duration_seconds = accumulated_time + final_delta_time;
|
let duration_seconds = accumulated_time + final_delta_time;
|
||||||
|
|
||||||
// Create the MIDI clip
|
// Create the MIDI clip (content only, positioning happens when creating instance)
|
||||||
let mut clip = MidiClip::new(clip_id, 0.0, duration_seconds);
|
let clip = MidiClip::new(clip_id, events, duration_seconds, "Imported MIDI".to_string());
|
||||||
clip.events = events;
|
|
||||||
|
|
||||||
Ok(clip)
|
Ok(clip)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -847,8 +847,7 @@ fn execute_command(
|
||||||
|
|
||||||
// Load the MIDI file
|
// Load the MIDI file
|
||||||
match load_midi_file(file_path, app.next_clip_id, 48000) {
|
match load_midi_file(file_path, app.next_clip_id, 48000) {
|
||||||
Ok(mut midi_clip) => {
|
Ok(midi_clip) => {
|
||||||
midi_clip.start_time = start_time;
|
|
||||||
let clip_id = midi_clip.id;
|
let clip_id = midi_clip.id;
|
||||||
let duration = midi_clip.duration;
|
let duration = midi_clip.duration;
|
||||||
let event_count = midi_clip.events.len();
|
let event_count = midi_clip.events.len();
|
||||||
|
|
@ -882,8 +881,8 @@ fn execute_command(
|
||||||
app.add_clip(track_id, clip_id, start_time, duration, file_path.to_string(), notes);
|
app.add_clip(track_id, clip_id, start_time, duration, file_path.to_string(), notes);
|
||||||
app.next_clip_id += 1;
|
app.next_clip_id += 1;
|
||||||
|
|
||||||
// Send to audio engine
|
// Send to audio engine with the start_time (clip content is separate from timeline position)
|
||||||
controller.add_loaded_midi_clip(track_id, midi_clip);
|
controller.add_loaded_midi_clip(track_id, midi_clip, start_time);
|
||||||
|
|
||||||
app.set_status(format!("Loaded {} ({} events, {:.2}s) to track {} at {:.2}s",
|
app.set_status(format!("Loaded {} ({} events, {:.2}s) to track {} at {:.2}s",
|
||||||
file_path, event_count, duration, track_id, start_time));
|
file_path, event_count, duration, track_id, start_time));
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
use daw_backend::{AudioEvent, AudioSystem, EngineController, EventEmitter, WaveformPeak};
|
use daw_backend::{AudioEvent, AudioSystem, EngineController, EventEmitter, WaveformPeak};
|
||||||
use daw_backend::audio::pool::AudioPoolEntry;
|
use daw_backend::audio::pool::AudioPoolEntry;
|
||||||
|
use ffmpeg_next::ffi::FF_LOSS_COLORQUANT;
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
@ -406,13 +407,28 @@ pub async fn audio_trim_clip(
|
||||||
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
||||||
track_id: u32,
|
track_id: u32,
|
||||||
clip_id: u32,
|
clip_id: u32,
|
||||||
new_start_time: f64,
|
internal_start: f64,
|
||||||
new_duration: f64,
|
internal_end: f64,
|
||||||
new_offset: f64,
|
|
||||||
) -> Result<(), String> {
|
) -> Result<(), String> {
|
||||||
let mut audio_state = state.lock().unwrap();
|
let mut audio_state = state.lock().unwrap();
|
||||||
if let Some(controller) = &mut audio_state.controller {
|
if let Some(controller) = &mut audio_state.controller {
|
||||||
controller.trim_clip(track_id, clip_id, new_start_time, new_duration, new_offset);
|
controller.trim_clip(track_id, clip_id, internal_start, internal_end);
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err("Audio not initialized".to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
pub async fn audio_extend_clip(
|
||||||
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
||||||
|
track_id: u32,
|
||||||
|
clip_id: u32,
|
||||||
|
new_external_duration: f64,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
let mut audio_state = state.lock().unwrap();
|
||||||
|
if let Some(controller) = &mut audio_state.controller {
|
||||||
|
controller.extend_clip(track_id, clip_id, new_external_duration);
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err("Audio not initialized".to_string())
|
Err("Audio not initialized".to_string())
|
||||||
|
|
@ -601,11 +617,8 @@ pub async fn audio_load_midi_file(
|
||||||
let sample_rate = audio_state.sample_rate;
|
let sample_rate = audio_state.sample_rate;
|
||||||
|
|
||||||
if let Some(controller) = &mut audio_state.controller {
|
if let Some(controller) = &mut audio_state.controller {
|
||||||
// Load and parse the MIDI file
|
// Load and parse the MIDI file (clip content only, no positioning)
|
||||||
let mut clip = daw_backend::load_midi_file(&path, 0, sample_rate)?;
|
let clip = daw_backend::load_midi_file(&path, 0, sample_rate)?;
|
||||||
|
|
||||||
// Set the start time
|
|
||||||
clip.start_time = start_time;
|
|
||||||
let duration = clip.duration;
|
let duration = clip.duration;
|
||||||
|
|
||||||
// Extract note data from MIDI events
|
// Extract note data from MIDI events
|
||||||
|
|
@ -631,8 +644,8 @@ pub async fn audio_load_midi_file(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add the loaded MIDI clip to the track
|
// Add the loaded MIDI clip to the track at the specified start_time
|
||||||
controller.add_loaded_midi_clip(track_id, clip);
|
controller.add_loaded_midi_clip(track_id, clip, start_time);
|
||||||
|
|
||||||
Ok(MidiFileMetadata {
|
Ok(MidiFileMetadata {
|
||||||
duration,
|
duration,
|
||||||
|
|
|
||||||
|
|
@ -256,6 +256,7 @@ pub fn run() {
|
||||||
audio::audio_add_clip,
|
audio::audio_add_clip,
|
||||||
audio::audio_move_clip,
|
audio::audio_move_clip,
|
||||||
audio::audio_trim_clip,
|
audio::audio_trim_clip,
|
||||||
|
audio::audio_extend_clip,
|
||||||
audio::audio_start_recording,
|
audio::audio_start_recording,
|
||||||
audio::audio_stop_recording,
|
audio::audio_stop_recording,
|
||||||
audio::audio_pause_recording,
|
audio::audio_pause_recording,
|
||||||
|
|
|
||||||
84
src/main.js
84
src/main.js
|
|
@ -1622,6 +1622,7 @@ async function toggleRecording() {
|
||||||
name: 'Recording...',
|
name: 'Recording...',
|
||||||
startTime: startTime,
|
startTime: startTime,
|
||||||
duration: clipDuration,
|
duration: clipDuration,
|
||||||
|
offset: 0,
|
||||||
notes: [],
|
notes: [],
|
||||||
loading: true
|
loading: true
|
||||||
});
|
});
|
||||||
|
|
@ -1818,12 +1819,28 @@ async function _save(path) {
|
||||||
// Serialize current layout structure (panes, splits, sizes)
|
// Serialize current layout structure (panes, splits, sizes)
|
||||||
const serializedLayout = serializeLayout(rootPane);
|
const serializedLayout = serializeLayout(rootPane);
|
||||||
|
|
||||||
|
// Serialize timeline state
|
||||||
|
let timelineState = null;
|
||||||
|
if (context.timelineWidget?.timelineState) {
|
||||||
|
const ts = context.timelineWidget.timelineState;
|
||||||
|
timelineState = {
|
||||||
|
timeFormat: ts.timeFormat,
|
||||||
|
framerate: ts.framerate,
|
||||||
|
bpm: ts.bpm,
|
||||||
|
timeSignature: ts.timeSignature,
|
||||||
|
pixelsPerSecond: ts.pixelsPerSecond,
|
||||||
|
viewportStartTime: ts.viewportStartTime,
|
||||||
|
snapToFrames: ts.snapToFrames,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
const fileData = {
|
const fileData = {
|
||||||
version: "2.0.0",
|
version: "2.0.0",
|
||||||
width: config.fileWidth,
|
width: config.fileWidth,
|
||||||
height: config.fileHeight,
|
height: config.fileHeight,
|
||||||
fps: config.framerate,
|
fps: config.framerate,
|
||||||
layoutState: serializedLayout, // Save current layout structure
|
layoutState: serializedLayout, // Save current layout structure
|
||||||
|
timelineState: timelineState, // Save timeline settings
|
||||||
actions: undoStack,
|
actions: undoStack,
|
||||||
json: root.toJSON(),
|
json: root.toJSON(),
|
||||||
// Audio pool at the end for human readability
|
// Audio pool at the end for human readability
|
||||||
|
|
@ -2275,6 +2292,44 @@ async function _open(path, returnJson = false) {
|
||||||
console.log('[JS] Skipping layout restoration');
|
console.log('[JS] Skipping layout restoration');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Restore timeline state if saved
|
||||||
|
if (file.timelineState && context.timelineWidget?.timelineState) {
|
||||||
|
const ts = context.timelineWidget.timelineState;
|
||||||
|
const saved = file.timelineState;
|
||||||
|
console.log('[JS] Restoring timeline state:', saved);
|
||||||
|
|
||||||
|
if (saved.timeFormat) ts.timeFormat = saved.timeFormat;
|
||||||
|
if (saved.framerate) ts.framerate = saved.framerate;
|
||||||
|
if (saved.bpm) ts.bpm = saved.bpm;
|
||||||
|
if (saved.timeSignature) ts.timeSignature = saved.timeSignature;
|
||||||
|
if (saved.pixelsPerSecond) ts.pixelsPerSecond = saved.pixelsPerSecond;
|
||||||
|
if (saved.viewportStartTime !== undefined) ts.viewportStartTime = saved.viewportStartTime;
|
||||||
|
if (saved.snapToFrames !== undefined) ts.snapToFrames = saved.snapToFrames;
|
||||||
|
|
||||||
|
// Update metronome button visibility based on restored time format
|
||||||
|
if (context.metronomeGroup) {
|
||||||
|
context.metronomeGroup.style.display = ts.timeFormat === 'measures' ? '' : 'none';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update time display
|
||||||
|
if (context.updateTimeDisplay) {
|
||||||
|
context.updateTimeDisplay();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update snap checkbox if it exists
|
||||||
|
const snapCheckbox = document.getElementById('snap-checkbox');
|
||||||
|
if (snapCheckbox) {
|
||||||
|
snapCheckbox.checked = ts.snapToFrames;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Trigger timeline redraw
|
||||||
|
if (context.timelineWidget.requestRedraw) {
|
||||||
|
context.timelineWidget.requestRedraw();
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('[JS] Timeline state restored successfully');
|
||||||
|
}
|
||||||
|
|
||||||
// Restore audio tracks and clips to the Rust backend
|
// Restore audio tracks and clips to the Rust backend
|
||||||
// The fromJSON method only creates JavaScript objects,
|
// The fromJSON method only creates JavaScript objects,
|
||||||
// but doesn't initialize them in the audio engine
|
// but doesn't initialize them in the audio engine
|
||||||
|
|
@ -5074,6 +5129,35 @@ function timeline() {
|
||||||
|
|
||||||
controls.push(timeDisplay);
|
controls.push(timeDisplay);
|
||||||
|
|
||||||
|
// Snap checkbox
|
||||||
|
const snapGroup = document.createElement("div");
|
||||||
|
snapGroup.className = "playback-controls-group";
|
||||||
|
snapGroup.style.display = "flex";
|
||||||
|
snapGroup.style.alignItems = "center";
|
||||||
|
snapGroup.style.gap = "4px";
|
||||||
|
|
||||||
|
const snapCheckbox = document.createElement("input");
|
||||||
|
snapCheckbox.type = "checkbox";
|
||||||
|
snapCheckbox.id = "snap-checkbox";
|
||||||
|
snapCheckbox.checked = timelineWidget.timelineState.snapToFrames;
|
||||||
|
snapCheckbox.style.cursor = "pointer";
|
||||||
|
snapCheckbox.addEventListener("change", () => {
|
||||||
|
timelineWidget.timelineState.snapToFrames = snapCheckbox.checked;
|
||||||
|
console.log('Snapping', snapCheckbox.checked ? 'enabled' : 'disabled');
|
||||||
|
});
|
||||||
|
|
||||||
|
const snapLabel = document.createElement("label");
|
||||||
|
snapLabel.htmlFor = "snap-checkbox";
|
||||||
|
snapLabel.textContent = "Snap";
|
||||||
|
snapLabel.style.cursor = "pointer";
|
||||||
|
snapLabel.style.fontSize = "12px";
|
||||||
|
snapLabel.style.color = "var(--text-secondary)";
|
||||||
|
|
||||||
|
snapGroup.appendChild(snapCheckbox);
|
||||||
|
snapGroup.appendChild(snapLabel);
|
||||||
|
|
||||||
|
controls.push(snapGroup);
|
||||||
|
|
||||||
return controls;
|
return controls;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1178,12 +1178,12 @@ class AudioTrack {
|
||||||
name: clip.name,
|
name: clip.name,
|
||||||
startTime: clip.startTime,
|
startTime: clip.startTime,
|
||||||
duration: clip.duration,
|
duration: clip.duration,
|
||||||
|
offset: clip.offset || 0, // Default to 0 if not present
|
||||||
};
|
};
|
||||||
|
|
||||||
// Restore audio-specific fields
|
// Restore audio-specific fields
|
||||||
if (clip.poolIndex !== undefined) {
|
if (clip.poolIndex !== undefined) {
|
||||||
clipData.poolIndex = clip.poolIndex;
|
clipData.poolIndex = clip.poolIndex;
|
||||||
clipData.offset = clip.offset;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Restore MIDI-specific fields
|
// Restore MIDI-specific fields
|
||||||
|
|
|
||||||
|
|
@ -97,7 +97,7 @@ export let config = {
|
||||||
currentLayout: "animation", // Current active layout key
|
currentLayout: "animation", // Current active layout key
|
||||||
defaultLayout: "animation", // Default layout for new files
|
defaultLayout: "animation", // Default layout for new files
|
||||||
showStartScreen: false, // Show layout picker on startup (disabled for now)
|
showStartScreen: false, // Show layout picker on startup (disabled for now)
|
||||||
restoreLayoutFromFile: false, // Restore layout when opening files
|
restoreLayoutFromFile: true, // Restore layout when opening files
|
||||||
customLayouts: [] // User-saved custom layouts
|
customLayouts: [] // User-saved custom layouts
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -24,7 +24,7 @@ class TimelineState {
|
||||||
this.rulerHeight = 30 // Height of time ruler in pixels
|
this.rulerHeight = 30 // Height of time ruler in pixels
|
||||||
|
|
||||||
// Snapping (Phase 5)
|
// Snapping (Phase 5)
|
||||||
this.snapToFrames = false // Whether to snap keyframes to frame boundaries
|
this.snapToFrames = true // Whether to snap keyframes to frame boundaries (default: on)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
||||||
383
src/widgets.js
383
src/widgets.js
|
|
@ -582,6 +582,54 @@ class TimelineWindowV2 extends Widget {
|
||||||
this.automationNameCache = new Map()
|
this.automationNameCache = new Map()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Quantize a time value to the nearest beat/measure division based on zoom level.
|
||||||
|
* Only applies when in measures mode and snapping is enabled.
|
||||||
|
* @param {number} time - The time value to quantize (in seconds)
|
||||||
|
* @returns {number} - The quantized time value
|
||||||
|
*/
|
||||||
|
quantizeTime(time) {
|
||||||
|
// Only quantize in measures mode with snapping enabled
|
||||||
|
if (this.timelineState.timeFormat !== 'measures' || !this.timelineState.snapToFrames) {
|
||||||
|
return time
|
||||||
|
}
|
||||||
|
|
||||||
|
const bpm = this.timelineState.bpm || 120
|
||||||
|
const beatsPerSecond = bpm / 60
|
||||||
|
const beatDuration = 1 / beatsPerSecond // Duration of one beat in seconds
|
||||||
|
const beatsPerMeasure = this.timelineState.timeSignature?.numerator || 4
|
||||||
|
|
||||||
|
// Calculate beat width in pixels
|
||||||
|
const beatWidth = beatDuration * this.timelineState.pixelsPerSecond
|
||||||
|
|
||||||
|
// Base threshold for zoom level detection (adjustable)
|
||||||
|
const zoomThreshold = 30
|
||||||
|
|
||||||
|
// Determine quantization level based on zoom (beat width in pixels)
|
||||||
|
// When zoomed out (small beat width), quantize to measures
|
||||||
|
// When zoomed in (large beat width), quantize to smaller divisions
|
||||||
|
let quantizeDuration
|
||||||
|
if (beatWidth < zoomThreshold * 0.5) {
|
||||||
|
// Very zoomed out: quantize to whole measures
|
||||||
|
quantizeDuration = beatDuration * beatsPerMeasure
|
||||||
|
} else if (beatWidth < zoomThreshold) {
|
||||||
|
// Zoomed out: quantize to half measures (2 beats in 4/4)
|
||||||
|
quantizeDuration = beatDuration * (beatsPerMeasure / 2)
|
||||||
|
} else if (beatWidth < zoomThreshold * 2) {
|
||||||
|
// Medium zoom: quantize to beats
|
||||||
|
quantizeDuration = beatDuration
|
||||||
|
} else if (beatWidth < zoomThreshold * 4) {
|
||||||
|
// Zoomed in: quantize to half beats (eighth notes in 4/4)
|
||||||
|
quantizeDuration = beatDuration / 2
|
||||||
|
} else {
|
||||||
|
// Very zoomed in: quantize to quarter beats (sixteenth notes in 4/4)
|
||||||
|
quantizeDuration = beatDuration / 4
|
||||||
|
}
|
||||||
|
|
||||||
|
// Round time to nearest quantization unit
|
||||||
|
return Math.round(time / quantizeDuration) * quantizeDuration
|
||||||
|
}
|
||||||
|
|
||||||
draw(ctx) {
|
draw(ctx) {
|
||||||
ctx.save()
|
ctx.save()
|
||||||
|
|
||||||
|
|
@ -594,9 +642,6 @@ class TimelineWindowV2 extends Widget {
|
||||||
ctx.fillStyle = backgroundColor
|
ctx.fillStyle = backgroundColor
|
||||||
ctx.fillRect(0, 0, this.width, this.height)
|
ctx.fillRect(0, 0, this.width, this.height)
|
||||||
|
|
||||||
// Draw snapping checkbox in ruler header area (Phase 5)
|
|
||||||
this.drawSnappingCheckbox(ctx)
|
|
||||||
|
|
||||||
// Draw time ruler at top, offset by track header width
|
// Draw time ruler at top, offset by track header width
|
||||||
ctx.save()
|
ctx.save()
|
||||||
ctx.translate(this.trackHeaderWidth, 0)
|
ctx.translate(this.trackHeaderWidth, 0)
|
||||||
|
|
@ -659,33 +704,6 @@ class TimelineWindowV2 extends Widget {
|
||||||
ctx.restore()
|
ctx.restore()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Draw snapping checkbox in ruler header area (Phase 5)
|
|
||||||
*/
|
|
||||||
drawSnappingCheckbox(ctx) {
|
|
||||||
const checkboxSize = 14
|
|
||||||
const checkboxX = 10
|
|
||||||
const checkboxY = (this.ruler.height - checkboxSize) / 2
|
|
||||||
|
|
||||||
// Draw checkbox border
|
|
||||||
ctx.strokeStyle = foregroundColor
|
|
||||||
ctx.lineWidth = 1
|
|
||||||
ctx.strokeRect(checkboxX, checkboxY, checkboxSize, checkboxSize)
|
|
||||||
|
|
||||||
// Fill if snapping is enabled
|
|
||||||
if (this.timelineState.snapToFrames) {
|
|
||||||
ctx.fillStyle = foregroundColor
|
|
||||||
ctx.fillRect(checkboxX + 2, checkboxY + 2, checkboxSize - 4, checkboxSize - 4)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Draw label
|
|
||||||
ctx.fillStyle = labelColor
|
|
||||||
ctx.font = '11px sans-serif'
|
|
||||||
ctx.textAlign = 'left'
|
|
||||||
ctx.textBaseline = 'middle'
|
|
||||||
ctx.fillText('Snap', checkboxX + checkboxSize + 6, this.ruler.height / 2)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Draw fixed track headers on the left (names, expand/collapse)
|
* Draw fixed track headers on the left (names, expand/collapse)
|
||||||
*/
|
*/
|
||||||
|
|
@ -1430,36 +1448,62 @@ class TimelineWindowV2 extends Widget {
|
||||||
const availableHeight = trackHeight - 10 - (verticalPadding * 2)
|
const availableHeight = trackHeight - 10 - (verticalPadding * 2)
|
||||||
const noteHeight = availableHeight / 12
|
const noteHeight = availableHeight / 12
|
||||||
|
|
||||||
// Calculate visible time range within the clip
|
// Get clip trim boundaries (internal_start = offset, internal_end depends on source)
|
||||||
|
const clipOffset = clip.offset || 0
|
||||||
|
// Use stored internalDuration if available (set when trimming), otherwise calculate from notes
|
||||||
|
let internalDuration
|
||||||
|
if (clip.internalDuration !== undefined) {
|
||||||
|
internalDuration = clip.internalDuration
|
||||||
|
} else {
|
||||||
|
// Fallback: calculate from actual notes (for clips that haven't been trimmed)
|
||||||
|
let contentEndTime = clipOffset
|
||||||
|
for (const note of clip.notes) {
|
||||||
|
const noteEnd = note.start_time + note.duration
|
||||||
|
if (noteEnd > contentEndTime) {
|
||||||
|
contentEndTime = noteEnd
|
||||||
|
}
|
||||||
|
}
|
||||||
|
internalDuration = contentEndTime - clipOffset
|
||||||
|
}
|
||||||
|
const contentEndTime = clipOffset + internalDuration
|
||||||
|
// If clip.duration exceeds internal duration, we're looping
|
||||||
|
const isLooping = clip.duration > internalDuration && internalDuration > 0
|
||||||
|
|
||||||
|
// Calculate visible time range within the clip (in clip-local time)
|
||||||
const clipEndX = startX + clipWidth
|
const clipEndX = startX + clipWidth
|
||||||
const visibleStartTime = this.timelineState.pixelToTime(Math.max(startX, 0)) - clip.startTime
|
const visibleStartTime = this.timelineState.pixelToTime(Math.max(startX, 0)) - clip.startTime
|
||||||
const visibleEndTime = this.timelineState.pixelToTime(Math.min(clipEndX, this.width)) - clip.startTime
|
const visibleEndTime = this.timelineState.pixelToTime(Math.min(clipEndX, this.width)) - clip.startTime
|
||||||
|
|
||||||
// Binary search to find first visible note
|
// Helper function to draw notes for a given loop iteration
|
||||||
let firstVisibleIdx = 0
|
const drawNotesForIteration = (loopOffset, opacity) => {
|
||||||
let left = 0
|
ctx.fillStyle = opacity < 1 ? `rgba(111, 220, 111, ${opacity})` : '#6fdc6f'
|
||||||
let right = clip.notes.length - 1
|
|
||||||
while (left <= right) {
|
|
||||||
const mid = Math.floor((left + right) / 2)
|
|
||||||
const noteEndTime = clip.notes[mid].start_time + clip.notes[mid].duration
|
|
||||||
|
|
||||||
if (noteEndTime < visibleStartTime) {
|
for (let i = 0; i < clip.notes.length; i++) {
|
||||||
left = mid + 1
|
|
||||||
firstVisibleIdx = left
|
|
||||||
} else {
|
|
||||||
right = mid - 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Draw visible notes only
|
|
||||||
ctx.fillStyle = '#6fdc6f' // Bright green for note bars
|
|
||||||
|
|
||||||
for (let i = firstVisibleIdx; i < clip.notes.length; i++) {
|
|
||||||
const note = clip.notes[i]
|
const note = clip.notes[i]
|
||||||
|
const noteEndTime = note.start_time + note.duration
|
||||||
|
|
||||||
|
// Skip notes that are outside the trimmed region
|
||||||
|
if (noteEndTime <= clipOffset || note.start_time >= contentEndTime) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate note position in this loop iteration
|
||||||
|
const noteDisplayStart = note.start_time - clipOffset + loopOffset
|
||||||
|
const noteDisplayEnd = noteEndTime - clipOffset + loopOffset
|
||||||
|
|
||||||
|
// Skip if this iteration's note is beyond clip duration
|
||||||
|
if (noteDisplayStart >= clip.duration) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
// Exit early if note starts after visible range
|
// Exit early if note starts after visible range
|
||||||
if (note.start_time > visibleEndTime) {
|
if (noteDisplayStart > visibleEndTime) {
|
||||||
break
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip if note ends before visible range
|
||||||
|
if (noteDisplayEnd < visibleStartTime) {
|
||||||
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// Calculate note position (pitch mod 12 for chromatic representation)
|
// Calculate note position (pitch mod 12 for chromatic representation)
|
||||||
|
|
@ -1468,8 +1512,8 @@ class TimelineWindowV2 extends Widget {
|
||||||
const noteY = y + 5 + ((11 - pitchClass) * noteHeight)
|
const noteY = y + 5 + ((11 - pitchClass) * noteHeight)
|
||||||
|
|
||||||
// Calculate note timing on timeline
|
// Calculate note timing on timeline
|
||||||
const noteStartX = this.timelineState.timeToPixel(clip.startTime + note.start_time)
|
const noteStartX = this.timelineState.timeToPixel(clip.startTime + noteDisplayStart)
|
||||||
const noteEndX = this.timelineState.timeToPixel(clip.startTime + note.start_time + note.duration)
|
let noteEndX = this.timelineState.timeToPixel(clip.startTime + Math.min(noteDisplayEnd, clip.duration))
|
||||||
|
|
||||||
// Clip to visible bounds
|
// Clip to visible bounds
|
||||||
const visibleStartX = Math.max(noteStartX, startX + 2)
|
const visibleStartX = Math.max(noteStartX, startX + 2)
|
||||||
|
|
@ -1486,6 +1530,19 @@ class TimelineWindowV2 extends Widget {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Draw primary notes at full opacity
|
||||||
|
drawNotesForIteration(0, 1.0)
|
||||||
|
|
||||||
|
// Draw looped iterations at 50% opacity
|
||||||
|
if (isLooping) {
|
||||||
|
let loopOffset = internalDuration
|
||||||
|
while (loopOffset < clip.duration) {
|
||||||
|
drawNotesForIteration(loopOffset, 0.5)
|
||||||
|
loopOffset += internalDuration
|
||||||
|
}
|
||||||
|
}
|
||||||
} else if (!isMIDI && clip.waveform && clip.waveform.length > 0) {
|
} else if (!isMIDI && clip.waveform && clip.waveform.length > 0) {
|
||||||
// Draw waveform for audio clips
|
// Draw waveform for audio clips
|
||||||
ctx.fillStyle = 'rgba(255, 255, 255, 0.3)'
|
ctx.fillStyle = 'rgba(255, 255, 255, 0.3)'
|
||||||
|
|
@ -1986,22 +2043,6 @@ class TimelineWindowV2 extends Widget {
|
||||||
}
|
}
|
||||||
|
|
||||||
mousedown(x, y) {
|
mousedown(x, y) {
|
||||||
// Check if clicking on snapping checkbox (Phase 5)
|
|
||||||
if (y <= this.ruler.height && x < this.trackHeaderWidth) {
|
|
||||||
const checkboxSize = 14
|
|
||||||
const checkboxX = 10
|
|
||||||
const checkboxY = (this.ruler.height - checkboxSize) / 2
|
|
||||||
|
|
||||||
if (x >= checkboxX && x <= checkboxX + checkboxSize &&
|
|
||||||
y >= checkboxY && y <= checkboxY + checkboxSize) {
|
|
||||||
// Toggle snapping
|
|
||||||
this.timelineState.snapToFrames = !this.timelineState.snapToFrames
|
|
||||||
console.log('Snapping', this.timelineState.snapToFrames ? 'enabled' : 'disabled')
|
|
||||||
if (this.requestRedraw) this.requestRedraw()
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if clicking in ruler area (after track headers)
|
// Check if clicking in ruler area (after track headers)
|
||||||
if (y <= this.ruler.height && x >= this.trackHeaderWidth) {
|
if (y <= this.ruler.height && x >= this.trackHeaderWidth) {
|
||||||
// Adjust x for ruler (remove track header offset)
|
// Adjust x for ruler (remove track header offset)
|
||||||
|
|
@ -2233,6 +2274,36 @@ class TimelineWindowV2 extends Widget {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check if clicking on loop corner (top-right) to extend/loop clip
|
||||||
|
const loopCornerInfo = this.getAudioClipLoopCornerAtPoint(track, adjustedX, adjustedY)
|
||||||
|
if (loopCornerInfo) {
|
||||||
|
// Skip if right-clicking (button 2)
|
||||||
|
if (this.lastClickEvent?.button === 2) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Select the track
|
||||||
|
this.selectTrack(track)
|
||||||
|
|
||||||
|
// Start loop corner dragging
|
||||||
|
this.draggingLoopCorner = {
|
||||||
|
track: track,
|
||||||
|
clip: loopCornerInfo.clip,
|
||||||
|
clipIndex: loopCornerInfo.clipIndex,
|
||||||
|
audioTrack: loopCornerInfo.audioTrack,
|
||||||
|
isMIDI: loopCornerInfo.isMIDI,
|
||||||
|
initialDuration: loopCornerInfo.clip.duration
|
||||||
|
}
|
||||||
|
|
||||||
|
// Enable global mouse events for dragging
|
||||||
|
this._globalEvents.add("mousemove")
|
||||||
|
this._globalEvents.add("mouseup")
|
||||||
|
|
||||||
|
console.log('Started dragging loop corner')
|
||||||
|
if (this.requestRedraw) this.requestRedraw()
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
// Check if clicking on audio clip edge to start trimming
|
// Check if clicking on audio clip edge to start trimming
|
||||||
const audioEdgeInfo = this.getAudioClipEdgeAtPoint(track, adjustedX, adjustedY)
|
const audioEdgeInfo = this.getAudioClipEdgeAtPoint(track, adjustedX, adjustedY)
|
||||||
if (audioEdgeInfo) {
|
if (audioEdgeInfo) {
|
||||||
|
|
@ -2934,6 +3005,47 @@ class TimelineWindowV2 extends Widget {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if hovering over the loop corner (top-right) of an audio/MIDI clip
|
||||||
|
* Returns clip info if in the loop corner zone
|
||||||
|
*/
|
||||||
|
getAudioClipLoopCornerAtPoint(track, x, y) {
|
||||||
|
if (track.type !== 'audio') return null
|
||||||
|
|
||||||
|
const trackIndex = this.trackHierarchy.tracks.indexOf(track)
|
||||||
|
if (trackIndex === -1) return null
|
||||||
|
|
||||||
|
const trackY = this.trackHierarchy.getTrackY(trackIndex)
|
||||||
|
const trackHeight = this.trackHierarchy.trackHeight
|
||||||
|
const clipTop = trackY + 5
|
||||||
|
const cornerSize = 12 // Size of the corner hot zone in pixels
|
||||||
|
|
||||||
|
// Check if y is in the top portion of the clip
|
||||||
|
if (y < clipTop || y > clipTop + cornerSize) return null
|
||||||
|
|
||||||
|
const clickTime = this.timelineState.pixelToTime(x)
|
||||||
|
const audioTrack = track.object
|
||||||
|
|
||||||
|
// Check each clip
|
||||||
|
for (let i = 0; i < audioTrack.clips.length; i++) {
|
||||||
|
const clip = audioTrack.clips[i]
|
||||||
|
const clipEnd = clip.startTime + clip.duration
|
||||||
|
const clipEndX = this.timelineState.timeToPixel(clipEnd)
|
||||||
|
|
||||||
|
// Check if x is near the right edge (within corner zone)
|
||||||
|
if (x >= clipEndX - cornerSize && x <= clipEndX) {
|
||||||
|
return {
|
||||||
|
clip: clip,
|
||||||
|
clipIndex: i,
|
||||||
|
audioTrack: audioTrack,
|
||||||
|
isMIDI: audioTrack.type === 'midi'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
getVideoClipAtPoint(track, x, y) {
|
getVideoClipAtPoint(track, x, y) {
|
||||||
if (track.type !== 'video') return null
|
if (track.type !== 'video') return null
|
||||||
|
|
||||||
|
|
@ -3848,19 +3960,23 @@ class TimelineWindowV2 extends Widget {
|
||||||
// Handle audio clip edge dragging (trimming)
|
// Handle audio clip edge dragging (trimming)
|
||||||
if (this.draggingAudioClipEdge) {
|
if (this.draggingAudioClipEdge) {
|
||||||
const adjustedX = x - this.trackHeaderWidth
|
const adjustedX = x - this.trackHeaderWidth
|
||||||
const newTime = this.timelineState.pixelToTime(adjustedX)
|
const rawTime = this.timelineState.pixelToTime(adjustedX)
|
||||||
const minClipDuration = this.context.config.minClipDuration
|
const minClipDuration = this.context.config.minClipDuration
|
||||||
|
|
||||||
if (this.draggingAudioClipEdge.edge === 'left') {
|
if (this.draggingAudioClipEdge.edge === 'left') {
|
||||||
// Dragging left edge - adjust startTime and offset
|
// Dragging left edge - adjust startTime and offset
|
||||||
const initialEnd = this.draggingAudioClipEdge.initialClipStart + this.draggingAudioClipEdge.initialClipDuration
|
const initialEnd = this.draggingAudioClipEdge.initialClipStart + this.draggingAudioClipEdge.initialClipDuration
|
||||||
const maxStartTime = initialEnd - minClipDuration
|
const maxStartTime = initialEnd - minClipDuration
|
||||||
const newStartTime = Math.max(0, Math.min(newTime, maxStartTime))
|
// Quantize the new start time
|
||||||
|
let newStartTime = Math.max(0, Math.min(rawTime, maxStartTime))
|
||||||
|
newStartTime = this.quantizeTime(newStartTime)
|
||||||
const startTimeDelta = newStartTime - this.draggingAudioClipEdge.initialClipStart
|
const startTimeDelta = newStartTime - this.draggingAudioClipEdge.initialClipStart
|
||||||
|
|
||||||
this.draggingAudioClipEdge.clip.startTime = newStartTime
|
this.draggingAudioClipEdge.clip.startTime = newStartTime
|
||||||
this.draggingAudioClipEdge.clip.offset = this.draggingAudioClipEdge.initialClipOffset + startTimeDelta
|
this.draggingAudioClipEdge.clip.offset = this.draggingAudioClipEdge.initialClipOffset + startTimeDelta
|
||||||
this.draggingAudioClipEdge.clip.duration = this.draggingAudioClipEdge.initialClipDuration - startTimeDelta
|
this.draggingAudioClipEdge.clip.duration = this.draggingAudioClipEdge.initialClipDuration - startTimeDelta
|
||||||
|
// Also update internalDuration when trimming (this is the content length before looping)
|
||||||
|
this.draggingAudioClipEdge.clip.internalDuration = this.draggingAudioClipEdge.initialClipDuration - startTimeDelta
|
||||||
|
|
||||||
// Also trim linked video clip if it exists
|
// Also trim linked video clip if it exists
|
||||||
if (this.draggingAudioClipEdge.clip.linkedVideoClip) {
|
if (this.draggingAudioClipEdge.clip.linkedVideoClip) {
|
||||||
|
|
@ -3872,14 +3988,21 @@ class TimelineWindowV2 extends Widget {
|
||||||
} else {
|
} else {
|
||||||
// Dragging right edge - adjust duration
|
// Dragging right edge - adjust duration
|
||||||
const minEndTime = this.draggingAudioClipEdge.initialClipStart + minClipDuration
|
const minEndTime = this.draggingAudioClipEdge.initialClipStart + minClipDuration
|
||||||
const newEndTime = Math.max(minEndTime, newTime)
|
// Quantize the new end time
|
||||||
|
let newEndTime = Math.max(minEndTime, rawTime)
|
||||||
|
newEndTime = this.quantizeTime(newEndTime)
|
||||||
let newDuration = newEndTime - this.draggingAudioClipEdge.clip.startTime
|
let newDuration = newEndTime - this.draggingAudioClipEdge.clip.startTime
|
||||||
|
|
||||||
// Constrain duration to not exceed source file duration minus offset
|
// Constrain duration to not exceed source file duration minus offset (for audio clips only)
|
||||||
const maxAvailableDuration = this.draggingAudioClipEdge.clip.sourceDuration - this.draggingAudioClipEdge.clip.offset
|
// MIDI clips don't have sourceDuration and can be extended freely
|
||||||
|
if (this.draggingAudioClipEdge.clip.sourceDuration !== undefined) {
|
||||||
|
const maxAvailableDuration = this.draggingAudioClipEdge.clip.sourceDuration - (this.draggingAudioClipEdge.clip.offset || 0)
|
||||||
newDuration = Math.min(newDuration, maxAvailableDuration)
|
newDuration = Math.min(newDuration, maxAvailableDuration)
|
||||||
|
}
|
||||||
|
|
||||||
this.draggingAudioClipEdge.clip.duration = newDuration
|
this.draggingAudioClipEdge.clip.duration = newDuration
|
||||||
|
// Also update internalDuration when trimming (this is the content length before looping)
|
||||||
|
this.draggingAudioClipEdge.clip.internalDuration = newDuration
|
||||||
|
|
||||||
// Also trim linked video clip if it exists
|
// Also trim linked video clip if it exists
|
||||||
if (this.draggingAudioClipEdge.clip.linkedVideoClip) {
|
if (this.draggingAudioClipEdge.clip.linkedVideoClip) {
|
||||||
|
|
@ -3893,6 +4016,25 @@ class TimelineWindowV2 extends Widget {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Handle loop corner dragging (extending/looping clip)
|
||||||
|
if (this.draggingLoopCorner) {
|
||||||
|
const adjustedX = x - this.trackHeaderWidth
|
||||||
|
const newTime = this.timelineState.pixelToTime(adjustedX)
|
||||||
|
const minClipDuration = this.context.config.minClipDuration
|
||||||
|
|
||||||
|
// Calculate new end time and quantize it
|
||||||
|
let newEndTime = Math.max(this.draggingLoopCorner.clip.startTime + minClipDuration, newTime)
|
||||||
|
newEndTime = this.quantizeTime(newEndTime)
|
||||||
|
const newDuration = newEndTime - this.draggingLoopCorner.clip.startTime
|
||||||
|
|
||||||
|
// Update clip duration (no maximum constraint - allows looping)
|
||||||
|
this.draggingLoopCorner.clip.duration = newDuration
|
||||||
|
|
||||||
|
// Trigger timeline redraw
|
||||||
|
if (this.requestRedraw) this.requestRedraw()
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
// Handle audio clip dragging
|
// Handle audio clip dragging
|
||||||
if (this.draggingAudioClip) {
|
if (this.draggingAudioClip) {
|
||||||
// Adjust coordinates to timeline area
|
// Adjust coordinates to timeline area
|
||||||
|
|
@ -4046,7 +4188,8 @@ class TimelineWindowV2 extends Widget {
|
||||||
// Update cursor based on hover position (when not dragging)
|
// Update cursor based on hover position (when not dragging)
|
||||||
if (!this.draggingAudioClip && !this.draggingVideoClip &&
|
if (!this.draggingAudioClip && !this.draggingVideoClip &&
|
||||||
!this.draggingAudioClipEdge && !this.draggingVideoClipEdge &&
|
!this.draggingAudioClipEdge && !this.draggingVideoClipEdge &&
|
||||||
!this.draggingKeyframe && !this.draggingPlayhead && !this.draggingSegment) {
|
!this.draggingKeyframe && !this.draggingPlayhead && !this.draggingSegment &&
|
||||||
|
!this.draggingLoopCorner) {
|
||||||
const trackY = y - this.ruler.height
|
const trackY = y - this.ruler.height
|
||||||
if (trackY >= 0 && x >= this.trackHeaderWidth) {
|
if (trackY >= 0 && x >= this.trackHeaderWidth) {
|
||||||
const adjustedY = trackY - this.trackScrollOffset
|
const adjustedY = trackY - this.trackScrollOffset
|
||||||
|
|
@ -4054,6 +4197,16 @@ class TimelineWindowV2 extends Widget {
|
||||||
const track = this.trackHierarchy.getTrackAtY(adjustedY)
|
const track = this.trackHierarchy.getTrackAtY(adjustedY)
|
||||||
|
|
||||||
if (track) {
|
if (track) {
|
||||||
|
// Check for audio/MIDI clip loop corner (top-right) - must check before edge detection
|
||||||
|
if (track.type === 'audio') {
|
||||||
|
const loopCornerInfo = this.getAudioClipLoopCornerAtPoint(track, adjustedX, adjustedY)
|
||||||
|
if (loopCornerInfo) {
|
||||||
|
// Use the same rotate cursor as the transform tool corner handles
|
||||||
|
this.cursor = "url(\"data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='24' height='24' fill='currentColor' viewBox='0 0 16 16'%3E%3Cpath fill-rule='evenodd' d='M8 3a5 5 0 1 1-4.546 2.914.5.5 0 0 0-.908-.417A6 6 0 1 0 8 2z'/%3E%3Cpath d='M8 4.466V.534a.25.25 0 0 0-.41-.192L5.23 2.308a.25.25 0 0 0 0 .384l2.36 1.966A.25.25 0 0 0 8 4.466'/%3E%3C/svg%3E\") 12 12, auto"
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Check for audio clip edge
|
// Check for audio clip edge
|
||||||
if (track.type === 'audio') {
|
if (track.type === 'audio') {
|
||||||
const audioEdgeInfo = this.getAudioClipEdgeAtPoint(track, adjustedX, adjustedY)
|
const audioEdgeInfo = this.getAudioClipEdgeAtPoint(track, adjustedX, adjustedY)
|
||||||
|
|
@ -4142,13 +4295,28 @@ class TimelineWindowV2 extends Widget {
|
||||||
if (this.draggingAudioClipEdge) {
|
if (this.draggingAudioClipEdge) {
|
||||||
console.log('Finished trimming audio clip edge')
|
console.log('Finished trimming audio clip edge')
|
||||||
|
|
||||||
// Update backend with new clip trim
|
const clip = this.draggingAudioClipEdge.clip
|
||||||
|
const trackId = this.draggingAudioClipEdge.audioTrack.audioTrackId
|
||||||
|
const clipId = clip.clipId
|
||||||
|
|
||||||
|
// If dragging left edge, also move the clip's timeline position
|
||||||
|
if (this.draggingAudioClipEdge.edge === 'left') {
|
||||||
|
invoke('audio_move_clip', {
|
||||||
|
trackId: trackId,
|
||||||
|
clipId: clipId,
|
||||||
|
newStartTime: clip.startTime
|
||||||
|
}).catch(error => {
|
||||||
|
console.error('Failed to move audio clip in backend:', error)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the internal trim boundaries
|
||||||
|
// internal_start = offset, internal_end = offset + duration (content region)
|
||||||
invoke('audio_trim_clip', {
|
invoke('audio_trim_clip', {
|
||||||
trackId: this.draggingAudioClipEdge.audioTrack.audioTrackId,
|
trackId: trackId,
|
||||||
clipId: this.draggingAudioClipEdge.clip.clipId,
|
clipId: clipId,
|
||||||
newStartTime: this.draggingAudioClipEdge.clip.startTime,
|
internalStart: clip.offset,
|
||||||
newDuration: this.draggingAudioClipEdge.clip.duration,
|
internalEnd: clip.offset + clip.duration
|
||||||
newOffset: this.draggingAudioClipEdge.clip.offset
|
|
||||||
}).catch(error => {
|
}).catch(error => {
|
||||||
console.error('Failed to trim audio clip in backend:', error)
|
console.error('Failed to trim audio clip in backend:', error)
|
||||||
})
|
})
|
||||||
|
|
@ -4168,6 +4336,33 @@ class TimelineWindowV2 extends Widget {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Complete loop corner dragging (extending/looping clip)
|
||||||
|
if (this.draggingLoopCorner) {
|
||||||
|
console.log('Finished extending clip via loop corner')
|
||||||
|
|
||||||
|
const clip = this.draggingLoopCorner.clip
|
||||||
|
const trackId = this.draggingLoopCorner.audioTrack.audioTrackId
|
||||||
|
const clipId = clip.clipId
|
||||||
|
|
||||||
|
// Call audio_extend_clip to update the external duration in the backend
|
||||||
|
invoke('audio_extend_clip', {
|
||||||
|
trackId: trackId,
|
||||||
|
clipId: clipId,
|
||||||
|
newExternalDuration: clip.duration
|
||||||
|
}).catch(error => {
|
||||||
|
console.error('Failed to extend audio clip in backend:', error)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Clean up dragging state
|
||||||
|
this.draggingLoopCorner = null
|
||||||
|
this._globalEvents.delete("mousemove")
|
||||||
|
this._globalEvents.delete("mouseup")
|
||||||
|
|
||||||
|
// Final redraw
|
||||||
|
if (this.requestRedraw) this.requestRedraw()
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
// Complete video clip edge dragging (trimming)
|
// Complete video clip edge dragging (trimming)
|
||||||
if (this.draggingVideoClipEdge) {
|
if (this.draggingVideoClipEdge) {
|
||||||
console.log('Finished trimming video clip edge')
|
console.log('Finished trimming video clip edge')
|
||||||
|
|
@ -4177,12 +4372,26 @@ class TimelineWindowV2 extends Widget {
|
||||||
const linkedAudioClip = this.draggingVideoClipEdge.clip.linkedAudioClip
|
const linkedAudioClip = this.draggingVideoClipEdge.clip.linkedAudioClip
|
||||||
const audioTrack = this.draggingVideoClipEdge.videoLayer.linkedAudioTrack
|
const audioTrack = this.draggingVideoClipEdge.videoLayer.linkedAudioTrack
|
||||||
if (audioTrack) {
|
if (audioTrack) {
|
||||||
|
const trackId = audioTrack.audioTrackId
|
||||||
|
const clipId = linkedAudioClip.clipId
|
||||||
|
|
||||||
|
// If dragging left edge, also move the clip's timeline position
|
||||||
|
if (this.draggingVideoClipEdge.edge === 'left') {
|
||||||
|
invoke('audio_move_clip', {
|
||||||
|
trackId: trackId,
|
||||||
|
clipId: clipId,
|
||||||
|
newStartTime: linkedAudioClip.startTime
|
||||||
|
}).catch(error => {
|
||||||
|
console.error('Failed to move linked audio clip in backend:', error)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the internal trim boundaries
|
||||||
invoke('audio_trim_clip', {
|
invoke('audio_trim_clip', {
|
||||||
trackId: audioTrack.audioTrackId,
|
trackId: trackId,
|
||||||
clipId: linkedAudioClip.clipId,
|
clipId: clipId,
|
||||||
newStartTime: linkedAudioClip.startTime,
|
internalStart: linkedAudioClip.offset,
|
||||||
newDuration: linkedAudioClip.duration,
|
internalEnd: linkedAudioClip.offset + linkedAudioClip.duration
|
||||||
newOffset: linkedAudioClip.offset
|
|
||||||
}).catch(error => {
|
}).catch(error => {
|
||||||
console.error('Failed to trim linked audio clip in backend:', error)
|
console.error('Failed to trim linked audio clip in backend:', error)
|
||||||
})
|
})
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue