Compare commits

...

3 Commits

Author SHA1 Message Date
Skyler Lehmkuhl a1ad0b44b1 Add support for audio trimming and looping 2025-11-28 06:53:28 -05:00
Skyler Lehmkuhl 422f97382b piano roll improvements 2025-11-24 11:11:10 -05:00
Skyler Lehmkuhl 1fcefab966 fix compilation warnings 2025-11-24 11:10:47 -05:00
20 changed files with 1866 additions and 438 deletions

View File

@ -1,21 +1,68 @@
/// Clip ID type
pub type ClipId = u32;
/// Audio clip instance ID type
pub type AudioClipInstanceId = u32;
/// Audio clip that references data in the AudioPool
/// Type alias for backwards compatibility
pub type ClipId = AudioClipInstanceId;
/// Audio clip instance that references content in the AudioClipPool
///
/// This represents a placed instance of audio content on the timeline.
/// The actual audio data is stored in the AudioClipPool and referenced by `audio_pool_index`.
///
/// ## Timing Model
/// - `internal_start` / `internal_end`: Define the region of the source audio to play (trimming)
/// - `external_start` / `external_duration`: Define where the clip appears on the timeline and how long
///
/// ## Looping
/// If `external_duration` is greater than `internal_end - internal_start`,
/// the clip will seamlessly loop back to `internal_start` when it reaches `internal_end`.
#[derive(Debug, Clone)]
pub struct Clip {
pub id: ClipId,
pub struct AudioClipInstance {
pub id: AudioClipInstanceId,
pub audio_pool_index: usize,
pub start_time: f64, // Position on timeline in seconds
pub duration: f64, // Clip duration in seconds
pub offset: f64, // Offset into audio file in seconds
pub gain: f32, // Clip-level gain
/// Start position within the audio content (seconds)
pub internal_start: f64,
/// End position within the audio content (seconds)
pub internal_end: f64,
/// Start position on the timeline (seconds)
pub external_start: f64,
/// Duration on the timeline (seconds) - can be longer than internal duration for looping
pub external_duration: f64,
/// Clip-level gain
pub gain: f32,
}
impl Clip {
/// Create a new clip
/// Type alias for backwards compatibility
pub type Clip = AudioClipInstance;
impl AudioClipInstance {
/// Create a new audio clip instance
pub fn new(
id: ClipId,
id: AudioClipInstanceId,
audio_pool_index: usize,
internal_start: f64,
internal_end: f64,
external_start: f64,
external_duration: f64,
) -> Self {
Self {
id,
audio_pool_index,
internal_start,
internal_end,
external_start,
external_duration,
gain: 1.0,
}
}
/// Create a clip instance from legacy parameters (for backwards compatibility)
/// Maps old start_time/duration/offset to new timing model
pub fn from_legacy(
id: AudioClipInstanceId,
audio_pool_index: usize,
start_time: f64,
duration: f64,
@ -24,22 +71,64 @@ impl Clip {
Self {
id,
audio_pool_index,
start_time,
duration,
offset,
internal_start: offset,
internal_end: offset + duration,
external_start: start_time,
external_duration: duration,
gain: 1.0,
}
}
/// Check if this clip is active at a given timeline position
/// Check if this clip instance is active at a given timeline position
pub fn is_active_at(&self, time_seconds: f64) -> bool {
let clip_end = self.start_time + self.duration;
time_seconds >= self.start_time && time_seconds < clip_end
time_seconds >= self.external_start && time_seconds < self.external_end()
}
/// Get the end time of this clip on the timeline
/// Get the end time of this clip instance on the timeline
pub fn external_end(&self) -> f64 {
self.external_start + self.external_duration
}
/// Get the end time of this clip instance on the timeline
/// (Alias for external_end(), for backwards compatibility)
pub fn end_time(&self) -> f64 {
self.start_time + self.duration
self.external_end()
}
/// Get the start time on the timeline
/// (Alias for external_start, for backwards compatibility)
pub fn start_time(&self) -> f64 {
self.external_start
}
/// Get the internal (content) duration
pub fn internal_duration(&self) -> f64 {
self.internal_end - self.internal_start
}
/// Check if this clip instance loops
pub fn is_looping(&self) -> bool {
self.external_duration > self.internal_duration()
}
/// Get the position within the audio content for a given timeline position
/// Returns None if the timeline position is outside this clip instance
/// Handles looping automatically
pub fn get_content_position(&self, timeline_pos: f64) -> Option<f64> {
if timeline_pos < self.external_start || timeline_pos >= self.external_end() {
return None;
}
let relative_pos = timeline_pos - self.external_start;
let internal_duration = self.internal_duration();
if internal_duration <= 0.0 {
return None;
}
// Wrap around for looping
let content_offset = relative_pos % internal_duration;
Some(self.internal_start + content_offset)
}
/// Set clip gain

View File

@ -1,9 +1,9 @@
use crate::audio::buffer_pool::BufferPool;
use crate::audio::clip::ClipId;
use crate::audio::clip::{AudioClipInstance, ClipId};
use crate::audio::metronome::Metronome;
use crate::audio::midi::{MidiClip, MidiClipId, MidiEvent};
use crate::audio::midi::{MidiClip, MidiClipId, MidiClipInstance, MidiEvent};
use crate::audio::node_graph::{nodes::*, AudioGraph};
use crate::audio::pool::AudioPool;
use crate::audio::pool::AudioClipPool;
use crate::audio::project::Project;
use crate::audio::recording::{MidiRecordingState, RecordingState};
use crate::audio::track::{Track, TrackId, TrackNode};
@ -16,7 +16,7 @@ use std::sync::Arc;
/// Audio engine for Phase 6: hierarchical tracks with groups
pub struct Engine {
project: Project,
audio_pool: AudioPool,
audio_pool: AudioClipPool,
buffer_pool: BufferPool,
playhead: u64, // Playhead position in samples
sample_rate: u32,
@ -78,7 +78,7 @@ impl Engine {
Self {
project: Project::new(sample_rate),
audio_pool: AudioPool::new(),
audio_pool: AudioClipPool::new(),
buffer_pool: BufferPool::new(8, buffer_size), // 8 buffers should handle deep nesting
playhead: 0,
sample_rate,
@ -164,12 +164,12 @@ impl Engine {
}
/// Get mutable reference to audio pool
pub fn audio_pool_mut(&mut self) -> &mut AudioPool {
pub fn audio_pool_mut(&mut self) -> &mut AudioClipPool {
&mut self.audio_pool
}
/// Get reference to audio pool
pub fn audio_pool(&self) -> &AudioPool {
pub fn audio_pool(&self) -> &AudioClipPool {
&self.audio_pool
}
@ -240,9 +240,15 @@ impl Engine {
let playhead_seconds = self.playhead as f64 / self.sample_rate as f64;
// Render the entire project hierarchy into the mix buffer
// Note: We need to use a raw pointer to avoid borrow checker issues
// The midi_clip_pool is part of project, so we extract a reference before mutable borrow
let midi_pool_ptr = &self.project.midi_clip_pool as *const _;
// SAFETY: The midi_clip_pool is not mutated during render, only read
let midi_pool_ref = unsafe { &*midi_pool_ptr };
self.project.render(
&mut self.mix_buffer,
&self.audio_pool,
midi_pool_ref,
&mut self.buffer_pool,
playhead_seconds,
self.sample_rate,
@ -314,10 +320,12 @@ impl Engine {
let clip_id = recording.clip_id;
let track_id = recording.track_id;
// Update clip duration in project
// Update clip duration in project as recording progresses
if let Some(crate::audio::track::TrackNode::Audio(track)) = self.project.get_track_mut(track_id) {
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
clip.duration = duration;
// Update both internal_end and external_duration as recording progresses
clip.internal_end = clip.internal_start + duration;
clip.external_duration = duration;
}
}
@ -384,33 +392,58 @@ impl Engine {
}
}
Command::MoveClip(track_id, clip_id, new_start_time) => {
// Moving just changes external_start, external_duration stays the same
match self.project.get_track_mut(track_id) {
Some(crate::audio::track::TrackNode::Audio(track)) => {
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
clip.start_time = new_start_time;
clip.external_start = new_start_time;
}
}
Some(crate::audio::track::TrackNode::Midi(track)) => {
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
clip.start_time = new_start_time;
// Note: clip_id here is the pool clip ID, not instance ID
if let Some(instance) = track.clip_instances.iter_mut().find(|c| c.clip_id == clip_id) {
instance.external_start = new_start_time;
}
}
_ => {}
}
}
Command::TrimClip(track_id, clip_id, new_start_time, new_duration, new_offset) => {
Command::TrimClip(track_id, clip_id, new_internal_start, new_internal_end) => {
// Trim changes which portion of the source content is used
// Also updates external_duration to match internal duration (no looping after trim)
match self.project.get_track_mut(track_id) {
Some(crate::audio::track::TrackNode::Audio(track)) => {
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
clip.start_time = new_start_time;
clip.duration = new_duration;
clip.offset = new_offset;
clip.internal_start = new_internal_start;
clip.internal_end = new_internal_end;
// By default, trimming sets external_duration to match internal duration
clip.external_duration = new_internal_end - new_internal_start;
}
}
Some(crate::audio::track::TrackNode::Midi(track)) => {
// Note: clip_id here is the pool clip ID, not instance ID
if let Some(instance) = track.clip_instances.iter_mut().find(|c| c.clip_id == clip_id) {
instance.internal_start = new_internal_start;
instance.internal_end = new_internal_end;
// By default, trimming sets external_duration to match internal duration
instance.external_duration = new_internal_end - new_internal_start;
}
}
_ => {}
}
}
Command::ExtendClip(track_id, clip_id, new_external_duration) => {
// Extend changes the external duration (enables looping if > internal duration)
match self.project.get_track_mut(track_id) {
Some(crate::audio::track::TrackNode::Audio(track)) => {
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
clip.start_time = new_start_time;
clip.duration = new_duration;
clip.external_duration = new_external_duration;
}
}
Some(crate::audio::track::TrackNode::Midi(track)) => {
// Note: clip_id here is the pool clip ID, not instance ID
if let Some(instance) = track.clip_instances.iter_mut().find(|c| c.clip_id == clip_id) {
instance.external_duration = new_external_duration;
}
}
_ => {}
@ -475,10 +508,10 @@ impl Engine {
pool_index, pool_size);
}
// Create a new clip with unique ID
// Create a new clip instance with unique ID using legacy parameters
let clip_id = self.next_clip_id;
self.next_clip_id += 1;
let clip = crate::audio::clip::Clip::new(
let clip = AudioClipInstance::from_legacy(
clip_id,
pool_index,
start_time,
@ -504,37 +537,57 @@ impl Engine {
Command::CreateMidiClip(track_id, start_time, duration) => {
// Get the next MIDI clip ID from the atomic counter
let clip_id = self.next_midi_clip_id_atomic.fetch_add(1, Ordering::Relaxed);
let clip = MidiClip::new(clip_id, start_time, duration);
let _ = self.project.add_midi_clip(track_id, clip);
// Notify UI about the new clip with its ID
// Create clip content in the pool
let clip = MidiClip::empty(clip_id, duration, format!("MIDI Clip {}", clip_id));
self.project.midi_clip_pool.add_existing_clip(clip);
// Create an instance for this clip on the track
let instance_id = self.project.next_midi_clip_instance_id();
let instance = MidiClipInstance::from_full_clip(instance_id, clip_id, duration, start_time);
if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
track.clip_instances.push(instance);
}
// Notify UI about the new clip with its ID (using clip_id for now)
let _ = self.event_tx.push(AudioEvent::ClipAdded(track_id, clip_id));
}
Command::AddMidiNote(track_id, clip_id, time_offset, note, velocity, duration) => {
// Add a MIDI note event to the specified clip
if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
// Add a MIDI note event to the specified clip in the pool
// Note: clip_id here refers to the clip in the pool, not the instance
if let Some(clip) = self.project.midi_clip_pool.get_clip_mut(clip_id) {
// Timestamp is now in seconds (sample-rate independent)
let note_on = MidiEvent::note_on(time_offset, 0, note, velocity);
clip.events.push(note_on);
clip.add_event(note_on);
// Add note off event
let note_off_time = time_offset + duration;
let note_off = MidiEvent::note_off(note_off_time, 0, note, 64);
clip.events.push(note_off);
// Sort events by timestamp (using partial_cmp for f64)
clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
}
}
}
Command::AddLoadedMidiClip(track_id, clip) => {
// Add a pre-loaded MIDI clip to the track
let _ = self.project.add_midi_clip(track_id, clip);
}
Command::UpdateMidiClipNotes(track_id, clip_id, notes) => {
// Update all notes in a MIDI clip
clip.add_event(note_off);
} else {
// Try legacy behavior: look for instance on track and find its clip
if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
if let Some(instance) = track.clip_instances.iter().find(|c| c.clip_id == clip_id) {
let actual_clip_id = instance.clip_id;
if let Some(clip) = self.project.midi_clip_pool.get_clip_mut(actual_clip_id) {
let note_on = MidiEvent::note_on(time_offset, 0, note, velocity);
clip.add_event(note_on);
let note_off_time = time_offset + duration;
let note_off = MidiEvent::note_off(note_off_time, 0, note, 64);
clip.add_event(note_off);
}
}
}
}
}
Command::AddLoadedMidiClip(track_id, clip, start_time) => {
// Add a pre-loaded MIDI clip to the track with the given start time
let _ = self.project.add_midi_clip_at(track_id, clip, start_time);
}
Command::UpdateMidiClipNotes(_track_id, clip_id, notes) => {
// Update all notes in a MIDI clip (directly in the pool)
if let Some(clip) = self.project.midi_clip_pool.get_clip_mut(clip_id) {
// Clear existing events
clip.events.clear();
@ -554,7 +607,6 @@ impl Engine {
clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
}
}
}
Command::RequestBufferPoolStats => {
// Send buffer pool statistics back to UI
let stats = self.buffer_pool.stats();
@ -728,7 +780,7 @@ impl Engine {
self.project = Project::new(self.sample_rate);
// Clear audio pool
self.audio_pool = AudioPool::new();
self.audio_pool = AudioClipPool::new();
// Reset buffer pool (recreate with same settings)
let buffer_size = 512 * self.channels as usize;
@ -1439,19 +1491,16 @@ impl Engine {
))),
}
}
Query::GetMidiClip(track_id, clip_id) => {
if let Some(TrackNode::Midi(track)) = self.project.get_track(track_id) {
if let Some(clip) = track.clips.iter().find(|c| c.id == clip_id) {
Query::GetMidiClip(_track_id, clip_id) => {
// Get MIDI clip data from the pool
if let Some(clip) = self.project.midi_clip_pool.get_clip(clip_id) {
use crate::command::MidiClipData;
QueryResponse::MidiClipData(Ok(MidiClipData {
duration: clip.duration,
events: clip.events.clone(),
}))
} else {
QueryResponse::MidiClipData(Err(format!("Clip {} not found in track {}", clip_id, track_id)))
}
} else {
QueryResponse::MidiClipData(Err(format!("Track {} not found or is not a MIDI track", track_id)))
QueryResponse::MidiClipData(Err(format!("Clip {} not found in pool", clip_id)))
}
}
@ -1622,7 +1671,10 @@ impl Engine {
Query::ExportAudio(settings, output_path) => {
// Perform export directly - this will block the audio thread but that's okay
// since we're exporting and not playing back anyway
match crate::audio::export_audio(&mut self.project, &self.audio_pool, &settings, &output_path) {
// Use raw pointer to get midi_pool reference before mutable borrow of project
let midi_pool_ptr: *const _ = &self.project.midi_clip_pool;
let midi_pool_ref = unsafe { &*midi_pool_ptr };
match crate::audio::export_audio(&mut self.project, &self.audio_pool, midi_pool_ref, &settings, &output_path) {
Ok(()) => QueryResponse::AudioExported(Ok(())),
Err(e) => QueryResponse::AudioExported(Err(e)),
}
@ -1658,9 +1710,10 @@ impl Engine {
let clip = crate::audio::clip::Clip::new(
clip_id,
0, // Temporary pool index, will be updated on finalization
start_time,
0.0, // Duration starts at 0, will be updated during recording
0.0,
0.0, // internal_start
0.0, // internal_end - Duration starts at 0, will be updated during recording
start_time, // external_start (timeline position)
start_time, // external_end - will be updated during recording
);
// Add clip to track
@ -1819,12 +1872,10 @@ impl Engine {
eprintln!("[MIDI_RECORDING] Stopping MIDI recording for clip_id={}, track_id={}, captured {} notes, duration={:.3}s",
clip_id, track_id, note_count, recording_duration);
// Update the MIDI clip using the existing UpdateMidiClipNotes logic
eprintln!("[MIDI_RECORDING] Looking for track {} to update clip", track_id);
if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
eprintln!("[MIDI_RECORDING] Found MIDI track, looking for clip {}", clip_id);
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
eprintln!("[MIDI_RECORDING] Found clip, clearing and adding {} notes", note_count);
// Update the MIDI clip in the pool (new model: clips are stored centrally in the pool)
eprintln!("[MIDI_RECORDING] Looking for clip {} in midi_clip_pool", clip_id);
if let Some(clip) = self.project.midi_clip_pool.get_clip_mut(clip_id) {
eprintln!("[MIDI_RECORDING] Found clip in pool, clearing and adding {} notes", note_count);
// Clear existing events
clip.events.clear();
@ -1850,11 +1901,18 @@ impl Engine {
// Sort events by timestamp (using partial_cmp for f64)
clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
eprintln!("[MIDI_RECORDING] Updated clip {} with {} notes ({} events)", clip_id, note_count, clip.events.len());
} else {
eprintln!("[MIDI_RECORDING] ERROR: Clip {} not found on track!", clip_id);
// Also update the clip instance's internal_end and external_duration to match the recording duration
if let Some(crate::audio::track::TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
if let Some(instance) = track.clip_instances.iter_mut().find(|i| i.clip_id == clip_id) {
instance.internal_end = recording_duration;
instance.external_duration = recording_duration;
eprintln!("[MIDI_RECORDING] Updated clip instance timing: internal_end={:.3}s, external_duration={:.3}s",
instance.internal_end, instance.external_duration);
}
}
} else {
eprintln!("[MIDI_RECORDING] ERROR: Track {} not found or not a MIDI track!", track_id);
eprintln!("[MIDI_RECORDING] ERROR: Clip {} not found in pool!", clip_id);
}
// Send event to UI
@ -1941,13 +1999,20 @@ impl EngineController {
let _ = self.command_tx.push(Command::SetTrackSolo(track_id, solo));
}
/// Move a clip to a new timeline position
/// Move a clip to a new timeline position (changes external_start)
pub fn move_clip(&mut self, track_id: TrackId, clip_id: ClipId, new_start_time: f64) {
let _ = self.command_tx.push(Command::MoveClip(track_id, clip_id, new_start_time));
}
pub fn trim_clip(&mut self, track_id: TrackId, clip_id: ClipId, new_start_time: f64, new_duration: f64, new_offset: f64) {
let _ = self.command_tx.push(Command::TrimClip(track_id, clip_id, new_start_time, new_duration, new_offset));
/// Trim a clip's internal boundaries (changes which portion of source content is used)
/// This also resets external_duration to match internal duration (disables looping)
pub fn trim_clip(&mut self, track_id: TrackId, clip_id: ClipId, new_internal_start: f64, new_internal_end: f64) {
let _ = self.command_tx.push(Command::TrimClip(track_id, clip_id, new_internal_start, new_internal_end));
}
/// Extend or shrink a clip's external duration (enables looping if > internal duration)
pub fn extend_clip(&mut self, track_id: TrackId, clip_id: ClipId, new_external_duration: f64) {
let _ = self.command_tx.push(Command::ExtendClip(track_id, clip_id, new_external_duration));
}
/// Send a generic command to the audio thread
@ -2071,9 +2136,9 @@ impl EngineController {
let _ = self.command_tx.push(Command::AddMidiNote(track_id, clip_id, time_offset, note, velocity, duration));
}
/// Add a pre-loaded MIDI clip to a track
pub fn add_loaded_midi_clip(&mut self, track_id: TrackId, clip: MidiClip) {
let _ = self.command_tx.push(Command::AddLoadedMidiClip(track_id, clip));
/// Add a pre-loaded MIDI clip to a track at the given timeline position
pub fn add_loaded_midi_clip(&mut self, track_id: TrackId, clip: MidiClip, start_time: f64) {
let _ = self.command_tx.push(Command::AddLoadedMidiClip(track_id, clip, start_time));
}
/// Update all notes in a MIDI clip

View File

@ -1,4 +1,5 @@
use super::buffer_pool::BufferPool;
use super::midi_pool::MidiClipPool;
use super::pool::AudioPool;
use super::project::Project;
use std::path::Path;
@ -61,11 +62,12 @@ impl Default for ExportSettings {
pub fn export_audio<P: AsRef<Path>>(
project: &mut Project,
pool: &AudioPool,
midi_pool: &MidiClipPool,
settings: &ExportSettings,
output_path: P,
) -> Result<(), String> {
// Render the project to memory
let samples = render_to_memory(project, pool, settings)?;
let samples = render_to_memory(project, pool, midi_pool, settings)?;
// Write to file based on format
match settings.format {
@ -80,6 +82,7 @@ pub fn export_audio<P: AsRef<Path>>(
fn render_to_memory(
project: &mut Project,
pool: &AudioPool,
midi_pool: &MidiClipPool,
settings: &ExportSettings,
) -> Result<Vec<f32>, String> {
// Calculate total number of frames
@ -113,6 +116,7 @@ fn render_to_memory(
project.render(
&mut render_buffer,
pool,
midi_pool,
&mut buffer_pool,
playhead,
settings.sample_rate,

View File

@ -63,73 +63,216 @@ impl MidiEvent {
}
}
/// MIDI clip ID type
/// MIDI clip ID type (for clips stored in the pool)
pub type MidiClipId = u32;
/// MIDI clip containing a sequence of MIDI events
/// MIDI clip instance ID type (for instances placed on tracks)
pub type MidiClipInstanceId = u32;
/// MIDI clip content - stores the actual MIDI events
///
/// This represents the content data stored in the MidiClipPool.
/// Events have timestamps relative to the start of the clip (0.0 = clip beginning).
#[derive(Debug, Clone)]
pub struct MidiClip {
pub id: MidiClipId,
pub events: Vec<MidiEvent>,
pub start_time: f64, // Position on timeline in seconds
pub duration: f64, // Clip duration in seconds
pub loop_enabled: bool,
pub duration: f64, // Total content duration in seconds
pub name: String,
}
impl MidiClip {
/// Create a new MIDI clip
pub fn new(id: MidiClipId, start_time: f64, duration: f64) -> Self {
/// Create a new MIDI clip with content
pub fn new(id: MidiClipId, events: Vec<MidiEvent>, duration: f64, name: String) -> Self {
let mut clip = Self {
id,
events,
duration,
name,
};
// Sort events by timestamp
clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
clip
}
/// Create an empty MIDI clip
pub fn empty(id: MidiClipId, duration: f64, name: String) -> Self {
Self {
id,
events: Vec::new(),
start_time,
duration,
loop_enabled: false,
name,
}
}
/// Add a MIDI event to the clip
pub fn add_event(&mut self, event: MidiEvent) {
self.events.push(event);
// Keep events sorted by timestamp (using partial_cmp for f64)
// Keep events sorted by timestamp
self.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
}
/// Get the end time of the clip
pub fn end_time(&self) -> f64 {
self.start_time + self.duration
/// Get events within a time range (relative to clip start)
/// This is used by MidiClipInstance to fetch events for a given portion
pub fn get_events_in_range(&self, start: f64, end: f64) -> Vec<MidiEvent> {
self.events
.iter()
.filter(|e| e.timestamp >= start && e.timestamp < end)
.copied()
.collect()
}
}
/// MIDI clip instance - a reference to MidiClip content with timeline positioning
///
/// ## Timing Model
/// - `internal_start` / `internal_end`: Define the region of the source clip to play (trimming)
/// - `external_start` / `external_duration`: Define where the instance appears on the timeline and how long
///
/// ## Looping
/// If `external_duration` is greater than `internal_end - internal_start`,
/// the instance will seamlessly loop back to `internal_start` when it reaches `internal_end`.
#[derive(Debug, Clone)]
pub struct MidiClipInstance {
pub id: MidiClipInstanceId,
pub clip_id: MidiClipId, // Reference to MidiClip in pool
/// Start position within the clip content (seconds)
pub internal_start: f64,
/// End position within the clip content (seconds)
pub internal_end: f64,
/// Start position on the timeline (seconds)
pub external_start: f64,
/// Duration on the timeline (seconds) - can be longer than internal duration for looping
pub external_duration: f64,
}
impl MidiClipInstance {
/// Create a new MIDI clip instance
pub fn new(
id: MidiClipInstanceId,
clip_id: MidiClipId,
internal_start: f64,
internal_end: f64,
external_start: f64,
external_duration: f64,
) -> Self {
Self {
id,
clip_id,
internal_start,
internal_end,
external_start,
external_duration,
}
}
/// Get events that should be triggered in a given time range
/// Create an instance that uses the full clip content (no trimming, no looping)
pub fn from_full_clip(
id: MidiClipInstanceId,
clip_id: MidiClipId,
clip_duration: f64,
external_start: f64,
) -> Self {
Self {
id,
clip_id,
internal_start: 0.0,
internal_end: clip_duration,
external_start,
external_duration: clip_duration,
}
}
/// Get the internal (content) duration
pub fn internal_duration(&self) -> f64 {
self.internal_end - self.internal_start
}
/// Get the end time on the timeline
pub fn external_end(&self) -> f64 {
self.external_start + self.external_duration
}
/// Check if this instance loops
pub fn is_looping(&self) -> bool {
self.external_duration > self.internal_duration()
}
/// Get the end time on the timeline (for backwards compatibility)
pub fn end_time(&self) -> f64 {
self.external_end()
}
/// Get the start time on the timeline (for backwards compatibility)
pub fn start_time(&self) -> f64 {
self.external_start
}
/// Check if this instance overlaps with a time range
pub fn overlaps_range(&self, range_start: f64, range_end: f64) -> bool {
self.external_start < range_end && self.external_end() > range_start
}
/// Get events that should be triggered in a given timeline range
///
/// Returns events along with their absolute timestamps in samples
/// This handles:
/// - Trimming (internal_start/internal_end)
/// - Looping (when external duration > internal duration)
/// - Time mapping from timeline to clip content
///
/// Returns events with timestamps adjusted to timeline time (not clip-relative)
pub fn get_events_in_range(
&self,
clip: &MidiClip,
range_start_seconds: f64,
range_end_seconds: f64,
_sample_rate: u32,
) -> Vec<MidiEvent> {
let mut result = Vec::new();
// Check if clip overlaps with the range
if range_start_seconds >= self.end_time() || range_end_seconds <= self.start_time {
// Check if instance overlaps with the range
if !self.overlaps_range(range_start_seconds, range_end_seconds) {
return result;
}
// Calculate the intersection
let play_start = range_start_seconds.max(self.start_time);
let play_end = range_end_seconds.min(self.end_time());
let internal_duration = self.internal_duration();
if internal_duration <= 0.0 {
return result;
}
// Position within the clip
let clip_position_seconds = play_start - self.start_time;
let clip_end_seconds = play_end - self.start_time;
// Calculate how many complete loops fit in the external duration
let num_loops = if self.external_duration > internal_duration {
(self.external_duration / internal_duration).ceil() as usize
} else {
1
};
// Find events in this range
// Note: event.timestamp is now in seconds relative to clip start
// Use half-open interval [start, end) to avoid triggering events twice
for event in &self.events {
if event.timestamp >= clip_position_seconds && event.timestamp < clip_end_seconds {
result.push(*event);
let external_end = self.external_end();
for loop_idx in 0..num_loops {
let loop_offset = loop_idx as f64 * internal_duration;
// Get events from the clip that fall within the internal range
for event in &clip.events {
// Skip events outside the trimmed region
if event.timestamp < self.internal_start || event.timestamp >= self.internal_end {
continue;
}
// Convert to timeline time
let relative_content_time = event.timestamp - self.internal_start;
let timeline_time = self.external_start + loop_offset + relative_content_time;
// Check if within current buffer range and instance bounds
if timeline_time >= range_start_seconds
&& timeline_time < range_end_seconds
&& timeline_time < external_end
{
let mut adjusted_event = *event;
adjusted_event.timestamp = timeline_time;
result.push(adjusted_event);
}
}
}

View File

@ -0,0 +1,101 @@
use std::collections::HashMap;
use super::midi::{MidiClip, MidiClipId, MidiEvent};
/// Pool for storing MIDI clip content
/// Similar to AudioClipPool but for MIDI data
pub struct MidiClipPool {
clips: HashMap<MidiClipId, MidiClip>,
next_id: MidiClipId,
}
impl MidiClipPool {
/// Create a new empty MIDI clip pool
pub fn new() -> Self {
Self {
clips: HashMap::new(),
next_id: 1, // Start at 1 so 0 can indicate "no clip"
}
}
/// Add a new clip to the pool with the given events and duration
/// Returns the ID of the newly created clip
pub fn add_clip(&mut self, events: Vec<MidiEvent>, duration: f64, name: String) -> MidiClipId {
let id = self.next_id;
self.next_id += 1;
let clip = MidiClip::new(id, events, duration, name);
self.clips.insert(id, clip);
id
}
/// Add an existing clip to the pool (used when loading projects)
/// The clip's ID is preserved
pub fn add_existing_clip(&mut self, clip: MidiClip) {
// Update next_id to avoid collisions
if clip.id >= self.next_id {
self.next_id = clip.id + 1;
}
self.clips.insert(clip.id, clip);
}
/// Get a clip by ID
pub fn get_clip(&self, id: MidiClipId) -> Option<&MidiClip> {
self.clips.get(&id)
}
/// Get a mutable clip by ID
pub fn get_clip_mut(&mut self, id: MidiClipId) -> Option<&mut MidiClip> {
self.clips.get_mut(&id)
}
/// Remove a clip from the pool
pub fn remove_clip(&mut self, id: MidiClipId) -> Option<MidiClip> {
self.clips.remove(&id)
}
/// Duplicate a clip, returning the new clip's ID
pub fn duplicate_clip(&mut self, id: MidiClipId) -> Option<MidiClipId> {
let clip = self.clips.get(&id)?;
let new_id = self.next_id;
self.next_id += 1;
let mut new_clip = clip.clone();
new_clip.id = new_id;
new_clip.name = format!("{} (copy)", clip.name);
self.clips.insert(new_id, new_clip);
Some(new_id)
}
/// Get all clip IDs in the pool
pub fn clip_ids(&self) -> Vec<MidiClipId> {
self.clips.keys().copied().collect()
}
/// Get the number of clips in the pool
pub fn len(&self) -> usize {
self.clips.len()
}
/// Check if the pool is empty
pub fn is_empty(&self) -> bool {
self.clips.is_empty()
}
/// Clear all clips from the pool
pub fn clear(&mut self) {
self.clips.clear();
self.next_id = 1;
}
/// Get an iterator over all clips
pub fn iter(&self) -> impl Iterator<Item = (&MidiClipId, &MidiClip)> {
self.clips.iter()
}
}
impl Default for MidiClipPool {
fn default() -> Self {
Self::new()
}
}

View File

@ -6,6 +6,7 @@ pub mod engine;
pub mod export;
pub mod metronome;
pub mod midi;
pub mod midi_pool;
pub mod node_graph;
pub mod pool;
pub mod project;
@ -15,12 +16,13 @@ pub mod track;
pub use automation::{AutomationLane, AutomationLaneId, AutomationPoint, CurveType, ParameterId};
pub use buffer_pool::BufferPool;
pub use clip::{Clip, ClipId};
pub use clip::{AudioClipInstance, AudioClipInstanceId, Clip, ClipId};
pub use engine::{Engine, EngineController};
pub use export::{export_audio, ExportFormat, ExportSettings};
pub use metronome::Metronome;
pub use midi::{MidiClip, MidiClipId, MidiEvent};
pub use pool::{AudioFile as PoolAudioFile, AudioPool};
pub use midi::{MidiClip, MidiClipId, MidiClipInstance, MidiClipInstanceId, MidiEvent};
pub use midi_pool::MidiClipPool;
pub use pool::{AudioClipPool, AudioFile as PoolAudioFile, AudioPool};
pub use project::Project;
pub use recording::RecordingState;
pub use sample_loader::{load_audio_file, SampleData};

View File

@ -119,13 +119,16 @@ impl AudioFile {
}
}
/// Pool of shared audio files
pub struct AudioPool {
/// Pool of shared audio files (audio clip content)
pub struct AudioClipPool {
files: Vec<AudioFile>,
}
impl AudioPool {
/// Create a new empty audio pool
/// Type alias for backwards compatibility
pub type AudioPool = AudioClipPool;
impl AudioClipPool {
/// Create a new empty audio clip pool
pub fn new() -> Self {
Self {
files: Vec::new(),
@ -301,7 +304,7 @@ impl AudioPool {
}
}
impl Default for AudioPool {
impl Default for AudioClipPool {
fn default() -> Self {
Self::new()
}
@ -335,8 +338,8 @@ pub struct AudioPoolEntry {
pub embedded_data: Option<EmbeddedAudioData>,
}
impl AudioPool {
/// Serialize the audio pool for project saving
impl AudioClipPool {
/// Serialize the audio clip pool for project saving
///
/// Files smaller than 10MB are embedded as base64.
/// Larger files are stored as relative paths to the project file.

View File

@ -1,19 +1,27 @@
use super::buffer_pool::BufferPool;
use super::clip::Clip;
use super::midi::{MidiClip, MidiEvent};
use super::pool::AudioPool;
use super::midi::{MidiClip, MidiClipId, MidiClipInstance, MidiClipInstanceId, MidiEvent};
use super::midi_pool::MidiClipPool;
use super::pool::AudioClipPool;
use super::track::{AudioTrack, Metatrack, MidiTrack, RenderContext, TrackId, TrackNode};
use std::collections::HashMap;
/// Project manages the hierarchical track structure
/// Project manages the hierarchical track structure and clip pools
///
/// Tracks are stored in a flat HashMap but can be organized into groups,
/// forming a tree structure. Groups render their children recursively.
///
/// Clip content is stored in pools (MidiClipPool), while tracks store
/// clip instances that reference the pool content.
pub struct Project {
tracks: HashMap<TrackId, TrackNode>,
next_track_id: TrackId,
root_tracks: Vec<TrackId>, // Top-level tracks (not in any group)
sample_rate: u32, // System sample rate
/// Pool for MIDI clip content
pub midi_clip_pool: MidiClipPool,
/// Next MIDI clip instance ID (for generating unique IDs)
next_midi_clip_instance_id: MidiClipInstanceId,
}
impl Project {
@ -24,6 +32,8 @@ impl Project {
next_track_id: 0,
root_tracks: Vec::new(),
sample_rate,
midi_clip_pool: MidiClipPool::new(),
next_midi_clip_instance_id: 1,
}
}
@ -241,21 +251,81 @@ impl Project {
}
}
/// Add a MIDI clip to a MIDI track
pub fn add_midi_clip(&mut self, track_id: TrackId, clip: MidiClip) -> Result<(), &'static str> {
/// Add a MIDI clip instance to a MIDI track
/// The clip content should already exist in the midi_clip_pool
pub fn add_midi_clip_instance(&mut self, track_id: TrackId, instance: MidiClipInstance) -> Result<(), &'static str> {
if let Some(TrackNode::Midi(track)) = self.tracks.get_mut(&track_id) {
track.add_clip(clip);
track.add_clip_instance(instance);
Ok(())
} else {
Err("Track not found or is not a MIDI track")
}
}
/// Create a new MIDI clip in the pool and add an instance to a track
/// Returns (clip_id, instance_id) on success
pub fn create_midi_clip_with_instance(
&mut self,
track_id: TrackId,
events: Vec<MidiEvent>,
duration: f64,
name: String,
external_start: f64,
) -> Result<(MidiClipId, MidiClipInstanceId), &'static str> {
// Verify track exists and is a MIDI track
if !matches!(self.tracks.get(&track_id), Some(TrackNode::Midi(_))) {
return Err("Track not found or is not a MIDI track");
}
// Create clip in pool
let clip_id = self.midi_clip_pool.add_clip(events, duration, name);
// Create instance
let instance_id = self.next_midi_clip_instance_id;
self.next_midi_clip_instance_id += 1;
let instance = MidiClipInstance::from_full_clip(instance_id, clip_id, duration, external_start);
// Add instance to track
if let Some(TrackNode::Midi(track)) = self.tracks.get_mut(&track_id) {
track.add_clip_instance(instance);
}
Ok((clip_id, instance_id))
}
/// Generate a new unique MIDI clip instance ID
pub fn next_midi_clip_instance_id(&mut self) -> MidiClipInstanceId {
let id = self.next_midi_clip_instance_id;
self.next_midi_clip_instance_id += 1;
id
}
/// Legacy method for backwards compatibility - creates clip and instance from old MidiClip format
pub fn add_midi_clip(&mut self, track_id: TrackId, clip: MidiClip) -> Result<(), &'static str> {
self.add_midi_clip_at(track_id, clip, 0.0)
}
/// Add a MIDI clip to the pool and create an instance at the given timeline position
pub fn add_midi_clip_at(&mut self, track_id: TrackId, clip: MidiClip, start_time: f64) -> Result<(), &'static str> {
// Add the clip to the pool (it already has events and duration)
let duration = clip.duration;
let clip_id = clip.id;
self.midi_clip_pool.add_existing_clip(clip);
// Create an instance that uses the full clip at the given position
let instance_id = self.next_midi_clip_instance_id();
let instance = MidiClipInstance::from_full_clip(instance_id, clip_id, duration, start_time);
self.add_midi_clip_instance(track_id, instance)
}
/// Render all root tracks into the output buffer
pub fn render(
&mut self,
output: &mut [f32],
pool: &AudioPool,
audio_pool: &AudioClipPool,
midi_pool: &MidiClipPool,
buffer_pool: &mut BufferPool,
playhead_seconds: f64,
sample_rate: u32,
@ -278,7 +348,8 @@ impl Project {
self.render_track(
track_id,
output,
pool,
audio_pool,
midi_pool,
buffer_pool,
ctx,
any_solo,
@ -292,7 +363,8 @@ impl Project {
&mut self,
track_id: TrackId,
output: &mut [f32],
pool: &AudioPool,
audio_pool: &AudioClipPool,
midi_pool: &MidiClipPool,
buffer_pool: &mut BufferPool,
ctx: RenderContext,
any_solo: bool,
@ -336,11 +408,11 @@ impl Project {
match self.tracks.get_mut(&track_id) {
Some(TrackNode::Audio(track)) => {
// Render audio track directly into output
track.render(output, pool, ctx.playhead_seconds, ctx.sample_rate, ctx.channels);
track.render(output, audio_pool, ctx.playhead_seconds, ctx.sample_rate, ctx.channels);
}
Some(TrackNode::Midi(track)) => {
// Render MIDI track directly into output
track.render(output, ctx.playhead_seconds, ctx.sample_rate, ctx.channels);
track.render(output, midi_pool, ctx.playhead_seconds, ctx.sample_rate, ctx.channels);
}
Some(TrackNode::Group(group)) => {
// Get children IDs, check if this group is soloed, and transform context
@ -360,7 +432,8 @@ impl Project {
self.render_track(
child_id,
&mut group_buffer,
pool,
audio_pool,
midi_pool,
buffer_pool,
child_ctx,
any_solo,

View File

@ -1,9 +1,10 @@
use super::automation::{AutomationLane, AutomationLaneId, ParameterId};
use super::clip::Clip;
use super::midi::{MidiClip, MidiEvent};
use super::clip::AudioClipInstance;
use super::midi::{MidiClipInstance, MidiEvent};
use super::midi_pool::MidiClipPool;
use super::node_graph::AudioGraph;
use super::node_graph::nodes::{AudioInputNode, AudioOutputNode};
use super::pool::AudioPool;
use super::pool::AudioClipPool;
use std::collections::HashMap;
/// Track ID type
@ -285,11 +286,12 @@ impl Metatrack {
}
}
/// MIDI track with MIDI clips and a node-based instrument
/// MIDI track with MIDI clip instances and a node-based instrument
pub struct MidiTrack {
pub id: TrackId,
pub name: String,
pub clips: Vec<MidiClip>,
/// Clip instances placed on this track (reference clips in the MidiClipPool)
pub clip_instances: Vec<MidiClipInstance>,
pub instrument_graph: AudioGraph,
pub volume: f32,
pub muted: bool,
@ -310,7 +312,7 @@ impl MidiTrack {
Self {
id,
name,
clips: Vec::new(),
clip_instances: Vec::new(),
instrument_graph: AudioGraph::new(sample_rate, default_buffer_size),
volume: 1.0,
muted: false,
@ -346,9 +348,9 @@ impl MidiTrack {
self.automation_lanes.remove(&lane_id).is_some()
}
/// Add a MIDI clip to this track
pub fn add_clip(&mut self, clip: MidiClip) {
self.clips.push(clip);
/// Add a MIDI clip instance to this track
pub fn add_clip_instance(&mut self, instance: MidiClipInstance) {
self.clip_instances.push(instance);
}
/// Set track volume
@ -420,6 +422,7 @@ impl MidiTrack {
pub fn render(
&mut self,
output: &mut [f32],
midi_pool: &MidiClipPool,
playhead_seconds: f64,
sample_rate: u32,
channels: u32,
@ -427,18 +430,19 @@ impl MidiTrack {
let buffer_duration_seconds = output.len() as f64 / (sample_rate as f64 * channels as f64);
let buffer_end_seconds = playhead_seconds + buffer_duration_seconds;
// Collect MIDI events from all clips that overlap with current time range
// Collect MIDI events from all clip instances that overlap with current time range
let mut midi_events = Vec::new();
for clip in &self.clips {
let events = clip.get_events_in_range(
for instance in &self.clip_instances {
// Get the clip content from the pool
if let Some(clip) = midi_pool.get_clip(instance.clip_id) {
let events = instance.get_events_in_range(
clip,
playhead_seconds,
buffer_end_seconds,
sample_rate,
);
// Events now have timestamps in seconds relative to clip start
midi_events.extend(events);
}
}
// Add live MIDI events (from virtual keyboard or MIDI controllers)
// This allows real-time input to be heard during playback/recording
@ -480,11 +484,12 @@ impl MidiTrack {
}
}
/// Audio track with clips
/// Audio track with audio clip instances
pub struct AudioTrack {
pub id: TrackId,
pub name: String,
pub clips: Vec<Clip>,
/// Audio clip instances (reference content in the AudioClipPool)
pub clips: Vec<AudioClipInstance>,
pub volume: f32,
pub muted: bool,
pub solo: bool,
@ -560,8 +565,8 @@ impl AudioTrack {
self.automation_lanes.remove(&lane_id).is_some()
}
/// Add a clip to this track
pub fn add_clip(&mut self, clip: Clip) {
/// Add an audio clip instance to this track
pub fn add_clip(&mut self, clip: AudioClipInstance) {
self.clips.push(clip);
}
@ -590,7 +595,7 @@ impl AudioTrack {
pub fn render(
&mut self,
output: &mut [f32],
pool: &AudioPool,
pool: &AudioClipPool,
playhead_seconds: f64,
sample_rate: u32,
channels: u32,
@ -602,10 +607,10 @@ impl AudioTrack {
let mut clip_buffer = vec![0.0f32; output.len()];
let mut rendered = 0;
// Render all active clips into the temporary buffer
// Render all active clip instances into the temporary buffer
for clip in &self.clips {
// Check if clip overlaps with current buffer time range
if clip.start_time < buffer_end_seconds && clip.end_time() > playhead_seconds {
if clip.external_start < buffer_end_seconds && clip.external_end() > playhead_seconds {
rendered += self.render_clip(
clip,
&mut clip_buffer,
@ -667,12 +672,13 @@ impl AudioTrack {
volume
}
/// Render a single clip into the output buffer
/// Render a single audio clip instance into the output buffer
/// Handles looping when external_duration > internal_duration
fn render_clip(
&self,
clip: &Clip,
clip: &AudioClipInstance,
output: &mut [f32],
pool: &AudioPool,
pool: &AudioClipPool,
playhead_seconds: f64,
sample_rate: u32,
channels: u32,
@ -680,46 +686,94 @@ impl AudioTrack {
let buffer_duration_seconds = output.len() as f64 / (sample_rate as f64 * channels as f64);
let buffer_end_seconds = playhead_seconds + buffer_duration_seconds;
// Determine the time range we need to render (intersection of buffer and clip)
let render_start_seconds = playhead_seconds.max(clip.start_time);
let render_end_seconds = buffer_end_seconds.min(clip.end_time());
// Determine the time range we need to render (intersection of buffer and clip external bounds)
let render_start_seconds = playhead_seconds.max(clip.external_start);
let render_end_seconds = buffer_end_seconds.min(clip.external_end());
// If no overlap, return early
if render_start_seconds >= render_end_seconds {
return 0;
}
// Calculate offset into the output buffer (in interleaved samples)
let output_offset_seconds = render_start_seconds - playhead_seconds;
let output_offset_samples = (output_offset_seconds * sample_rate as f64 * channels as f64) as usize;
// Calculate position within the clip's audio file (in seconds)
let clip_position_seconds = render_start_seconds - clip.start_time + clip.offset;
// Calculate how many samples to render in the output
let render_duration_seconds = render_end_seconds - render_start_seconds;
let samples_to_render = (render_duration_seconds * sample_rate as f64 * channels as f64) as usize;
let samples_to_render = samples_to_render.min(output.len() - output_offset_samples);
// Get the slice of output buffer to write to
if output_offset_samples + samples_to_render > output.len() {
let internal_duration = clip.internal_duration();
if internal_duration <= 0.0 {
return 0;
}
let output_slice = &mut output[output_offset_samples..output_offset_samples + samples_to_render];
// Calculate combined gain
let combined_gain = clip.gain * self.volume;
// Render from pool with sample rate conversion
// Pass the time position in seconds, let the pool handle sample rate conversion
pool.render_from_file(
let mut total_rendered = 0;
// Process the render range sample by sample (or in chunks for efficiency)
// For looping clips, we need to handle wrap-around at the loop boundary
let samples_per_second = sample_rate as f64 * channels as f64;
// For now, render in a simpler way - iterate through the timeline range
// and use get_content_position for each sample position
let output_start_offset = ((render_start_seconds - playhead_seconds) * samples_per_second) as usize;
let output_end_offset = ((render_end_seconds - playhead_seconds) * samples_per_second) as usize;
if output_end_offset > output.len() || output_start_offset > output.len() {
return 0;
}
// If not looping, we can render in one chunk (more efficient)
if !clip.is_looping() {
// Simple case: no looping
let content_start = clip.get_content_position(render_start_seconds).unwrap_or(clip.internal_start);
let output_len = output.len();
let output_slice = &mut output[output_start_offset..output_end_offset.min(output_len)];
total_rendered = pool.render_from_file(
clip.audio_pool_index,
output_slice,
clip_position_seconds,
content_start,
combined_gain,
sample_rate,
channels,
)
);
} else {
// Looping case: need to handle wrap-around at loop boundaries
// Render in segments, one per loop iteration
let mut timeline_pos = render_start_seconds;
let mut output_offset = output_start_offset;
while timeline_pos < render_end_seconds && output_offset < output.len() {
// Calculate position within the loop
let relative_pos = timeline_pos - clip.external_start;
let loop_offset = relative_pos % internal_duration;
let content_pos = clip.internal_start + loop_offset;
// Calculate how much we can render before hitting the loop boundary
let time_to_loop_end = internal_duration - loop_offset;
let time_to_render_end = render_end_seconds - timeline_pos;
let chunk_duration = time_to_loop_end.min(time_to_render_end);
let chunk_samples = (chunk_duration * samples_per_second) as usize;
let chunk_samples = chunk_samples.min(output.len() - output_offset);
if chunk_samples == 0 {
break;
}
let output_slice = &mut output[output_offset..output_offset + chunk_samples];
let rendered = pool.render_from_file(
clip.audio_pool_index,
output_slice,
content_pos,
combined_gain,
sample_rate,
channels,
);
total_rendered += rendered;
output_offset += chunk_samples;
timeline_pos += chunk_duration;
}
}
total_rendered
}
}

View File

@ -28,10 +28,14 @@ pub enum Command {
SetTrackSolo(TrackId, bool),
// Clip management commands
/// Move a clip to a new timeline position
/// Move a clip to a new timeline position (track_id, clip_id, new_external_start)
MoveClip(TrackId, ClipId, f64),
/// Trim a clip (track_id, clip_id, new_start_time, new_duration, new_offset)
TrimClip(TrackId, ClipId, f64, f64, f64),
/// Trim a clip's internal boundaries (track_id, clip_id, new_internal_start, new_internal_end)
/// This changes which portion of the source content is used
TrimClip(TrackId, ClipId, f64, f64),
/// Extend/shrink a clip's external duration (track_id, clip_id, new_external_duration)
/// If duration > internal duration, the clip will loop
ExtendClip(TrackId, ClipId, f64),
// Metatrack management commands
/// Create a new metatrack with a name
@ -67,8 +71,8 @@ pub enum Command {
CreateMidiClip(TrackId, f64, f64),
/// Add a MIDI note to a clip (track_id, clip_id, time_offset, note, velocity, duration)
AddMidiNote(TrackId, MidiClipId, f64, u8, u8, f64),
/// Add a pre-loaded MIDI clip to a track
AddLoadedMidiClip(TrackId, MidiClip),
/// Add a pre-loaded MIDI clip to a track (track_id, clip, start_time)
AddLoadedMidiClip(TrackId, MidiClip, f64),
/// Update MIDI clip notes (track_id, clip_id, notes: Vec<(start_time, note, velocity, duration)>)
/// NOTE: May need to switch to individual note operations if this becomes slow on clips with many notes
UpdateMidiClipNotes(TrackId, MidiClipId, Vec<(f64, u8, u8, f64)>),

View File

@ -157,9 +157,8 @@ pub fn load_midi_file<P: AsRef<Path>>(
(final_delta_ticks as f64 / ticks_per_beat) * (microseconds_per_beat / 1_000_000.0);
let duration_seconds = accumulated_time + final_delta_time;
// Create the MIDI clip
let mut clip = MidiClip::new(clip_id, 0.0, duration_seconds);
clip.events = events;
// Create the MIDI clip (content only, positioning happens when creating instance)
let clip = MidiClip::new(clip_id, events, duration_seconds, "Imported MIDI".to_string());
Ok(clip)
}

View File

@ -146,18 +146,9 @@ impl MidiInputManager {
connection,
});
println!("MIDI: Connected to: {}", port_name);
// Need to recreate MidiInput for next iteration
let _midi_in = MidiInput::new("Lightningbeam")
.map_err(|e| format!("Failed to recreate MIDI input: {}", e))?;
midi_in = _midi_in;
}
Err(e) => {
eprintln!("MIDI: Failed to connect to {}: {}", port_name, e);
// Recreate MidiInput to continue with other ports
let _midi_in = MidiInput::new("Lightningbeam")
.map_err(|e| format!("Failed to recreate MIDI input: {}", e))?;
midi_in = _midi_in;
}
}
}

View File

@ -847,8 +847,7 @@ fn execute_command(
// Load the MIDI file
match load_midi_file(file_path, app.next_clip_id, 48000) {
Ok(mut midi_clip) => {
midi_clip.start_time = start_time;
Ok(midi_clip) => {
let clip_id = midi_clip.id;
let duration = midi_clip.duration;
let event_count = midi_clip.events.len();
@ -882,8 +881,8 @@ fn execute_command(
app.add_clip(track_id, clip_id, start_time, duration, file_path.to_string(), notes);
app.next_clip_id += 1;
// Send to audio engine
controller.add_loaded_midi_clip(track_id, midi_clip);
// Send to audio engine with the start_time (clip content is separate from timeline position)
controller.add_loaded_midi_clip(track_id, midi_clip, start_time);
app.set_status(format!("Loaded {} ({} events, {:.2}s) to track {} at {:.2}s",
file_path, event_count, duration, track_id, start_time));

View File

@ -1,5 +1,6 @@
use daw_backend::{AudioEvent, AudioSystem, EngineController, EventEmitter, WaveformPeak};
use daw_backend::audio::pool::AudioPoolEntry;
use ffmpeg_next::ffi::FF_LOSS_COLORQUANT;
use std::sync::{Arc, Mutex};
use std::collections::HashMap;
use std::path::Path;
@ -406,13 +407,28 @@ pub async fn audio_trim_clip(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
clip_id: u32,
new_start_time: f64,
new_duration: f64,
new_offset: f64,
internal_start: f64,
internal_end: f64,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.trim_clip(track_id, clip_id, new_start_time, new_duration, new_offset);
controller.trim_clip(track_id, clip_id, internal_start, internal_end);
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn audio_extend_clip(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
clip_id: u32,
new_external_duration: f64,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.extend_clip(track_id, clip_id, new_external_duration);
Ok(())
} else {
Err("Audio not initialized".to_string())
@ -601,11 +617,8 @@ pub async fn audio_load_midi_file(
let sample_rate = audio_state.sample_rate;
if let Some(controller) = &mut audio_state.controller {
// Load and parse the MIDI file
let mut clip = daw_backend::load_midi_file(&path, 0, sample_rate)?;
// Set the start time
clip.start_time = start_time;
// Load and parse the MIDI file (clip content only, no positioning)
let clip = daw_backend::load_midi_file(&path, 0, sample_rate)?;
let duration = clip.duration;
// Extract note data from MIDI events
@ -631,8 +644,8 @@ pub async fn audio_load_midi_file(
}
}
// Add the loaded MIDI clip to the track
controller.add_loaded_midi_clip(track_id, clip);
// Add the loaded MIDI clip to the track at the specified start_time
controller.add_loaded_midi_clip(track_id, clip, start_time);
Ok(MidiFileMetadata {
duration,

View File

@ -256,6 +256,7 @@ pub fn run() {
audio::audio_add_clip,
audio::audio_move_clip,
audio::audio_trim_clip,
audio::audio_extend_clip,
audio::audio_start_recording,
audio::audio_stop_recording,
audio::audio_pause_recording,

View File

@ -1622,6 +1622,7 @@ async function toggleRecording() {
name: 'Recording...',
startTime: startTime,
duration: clipDuration,
offset: 0,
notes: [],
loading: true
});
@ -1818,12 +1819,28 @@ async function _save(path) {
// Serialize current layout structure (panes, splits, sizes)
const serializedLayout = serializeLayout(rootPane);
// Serialize timeline state
let timelineState = null;
if (context.timelineWidget?.timelineState) {
const ts = context.timelineWidget.timelineState;
timelineState = {
timeFormat: ts.timeFormat,
framerate: ts.framerate,
bpm: ts.bpm,
timeSignature: ts.timeSignature,
pixelsPerSecond: ts.pixelsPerSecond,
viewportStartTime: ts.viewportStartTime,
snapToFrames: ts.snapToFrames,
};
}
const fileData = {
version: "2.0.0",
width: config.fileWidth,
height: config.fileHeight,
fps: config.framerate,
layoutState: serializedLayout, // Save current layout structure
timelineState: timelineState, // Save timeline settings
actions: undoStack,
json: root.toJSON(),
// Audio pool at the end for human readability
@ -2275,6 +2292,44 @@ async function _open(path, returnJson = false) {
console.log('[JS] Skipping layout restoration');
}
// Restore timeline state if saved
if (file.timelineState && context.timelineWidget?.timelineState) {
const ts = context.timelineWidget.timelineState;
const saved = file.timelineState;
console.log('[JS] Restoring timeline state:', saved);
if (saved.timeFormat) ts.timeFormat = saved.timeFormat;
if (saved.framerate) ts.framerate = saved.framerate;
if (saved.bpm) ts.bpm = saved.bpm;
if (saved.timeSignature) ts.timeSignature = saved.timeSignature;
if (saved.pixelsPerSecond) ts.pixelsPerSecond = saved.pixelsPerSecond;
if (saved.viewportStartTime !== undefined) ts.viewportStartTime = saved.viewportStartTime;
if (saved.snapToFrames !== undefined) ts.snapToFrames = saved.snapToFrames;
// Update metronome button visibility based on restored time format
if (context.metronomeGroup) {
context.metronomeGroup.style.display = ts.timeFormat === 'measures' ? '' : 'none';
}
// Update time display
if (context.updateTimeDisplay) {
context.updateTimeDisplay();
}
// Update snap checkbox if it exists
const snapCheckbox = document.getElementById('snap-checkbox');
if (snapCheckbox) {
snapCheckbox.checked = ts.snapToFrames;
}
// Trigger timeline redraw
if (context.timelineWidget.requestRedraw) {
context.timelineWidget.requestRedraw();
}
console.log('[JS] Timeline state restored successfully');
}
// Restore audio tracks and clips to the Rust backend
// The fromJSON method only creates JavaScript objects,
// but doesn't initialize them in the audio engine
@ -5074,6 +5129,35 @@ function timeline() {
controls.push(timeDisplay);
// Snap checkbox
const snapGroup = document.createElement("div");
snapGroup.className = "playback-controls-group";
snapGroup.style.display = "flex";
snapGroup.style.alignItems = "center";
snapGroup.style.gap = "4px";
const snapCheckbox = document.createElement("input");
snapCheckbox.type = "checkbox";
snapCheckbox.id = "snap-checkbox";
snapCheckbox.checked = timelineWidget.timelineState.snapToFrames;
snapCheckbox.style.cursor = "pointer";
snapCheckbox.addEventListener("change", () => {
timelineWidget.timelineState.snapToFrames = snapCheckbox.checked;
console.log('Snapping', snapCheckbox.checked ? 'enabled' : 'disabled');
});
const snapLabel = document.createElement("label");
snapLabel.htmlFor = "snap-checkbox";
snapLabel.textContent = "Snap";
snapLabel.style.cursor = "pointer";
snapLabel.style.fontSize = "12px";
snapLabel.style.color = "var(--text-secondary)";
snapGroup.appendChild(snapCheckbox);
snapGroup.appendChild(snapLabel);
controls.push(snapGroup);
return controls;
};
@ -10508,11 +10592,108 @@ function piano() {
}
function pianoRoll() {
// Create container for piano roll and properties panel
let container = document.createElement("div");
container.className = "piano-roll-container";
container.style.position = "relative";
container.style.width = "100%";
container.style.height = "100%";
container.style.display = "flex";
let canvas = document.createElement("canvas");
canvas.className = "piano-roll";
canvas.style.flex = "1";
// Create properties panel
let propertiesPanel = document.createElement("div");
propertiesPanel.className = "piano-roll-properties";
propertiesPanel.style.display = "flex";
propertiesPanel.style.gap = "15px";
propertiesPanel.style.padding = "10px";
propertiesPanel.style.backgroundColor = "#1e1e1e";
propertiesPanel.style.borderLeft = "1px solid #333";
propertiesPanel.style.alignItems = "center";
propertiesPanel.style.fontSize = "12px";
propertiesPanel.style.color = "#ccc";
// Create property sections
const createPropertySection = (label, isEditable = false) => {
const section = document.createElement("div");
section.style.display = "flex";
section.style.flexDirection = "column";
section.style.gap = "5px";
const labelEl = document.createElement("label");
labelEl.textContent = label;
labelEl.style.fontSize = "11px";
labelEl.style.color = "#999";
section.appendChild(labelEl);
if (isEditable) {
const inputContainer = document.createElement("div");
inputContainer.style.display = "flex";
inputContainer.style.gap = "5px";
inputContainer.style.alignItems = "center";
const input = document.createElement("input");
input.type = "number";
input.style.width = "45px";
input.style.padding = "3px";
input.style.backgroundColor = "#2a2a2a";
input.style.border = "1px solid #444";
input.style.borderRadius = "3px";
input.style.color = "#ccc";
input.style.fontSize = "12px";
input.style.boxSizing = "border-box";
inputContainer.appendChild(input);
const slider = document.createElement("input");
slider.type = "range";
slider.style.flex = "1";
slider.style.minWidth = "80px";
inputContainer.appendChild(slider);
section.appendChild(inputContainer);
return { section, input, slider };
} else {
const value = document.createElement("span");
value.style.color = "#fff";
value.textContent = "-";
section.appendChild(value);
return { section, value };
}
};
const pitchSection = createPropertySection("Pitch");
const velocitySection = createPropertySection("Velocity", true);
const modulationSection = createPropertySection("Modulation", true);
// Configure velocity slider
velocitySection.input.min = 1;
velocitySection.input.max = 127;
velocitySection.slider.min = 1;
velocitySection.slider.max = 127;
// Configure modulation slider
modulationSection.input.min = 0;
modulationSection.input.max = 127;
modulationSection.slider.min = 0;
modulationSection.slider.max = 127;
propertiesPanel.appendChild(pitchSection.section);
propertiesPanel.appendChild(velocitySection.section);
propertiesPanel.appendChild(modulationSection.section);
container.appendChild(canvas);
container.appendChild(propertiesPanel);
// Create the piano roll editor widget
canvas.pianoRollEditor = new PianoRollEditor(0, 0, 0, 0);
canvas.pianoRollEditor.propertiesPanel = {
pitch: pitchSection.value,
velocity: { input: velocitySection.input, slider: velocitySection.slider },
modulation: { input: modulationSection.input, slider: modulationSection.slider }
};
function updateCanvasSize() {
const canvasStyles = window.getComputedStyle(canvas);
@ -10533,6 +10714,30 @@ function pianoRoll() {
// Render the piano roll
canvas.pianoRollEditor.draw(ctx);
// Update properties panel layout based on aspect ratio
const containerWidth = container.offsetWidth;
const containerHeight = container.offsetHeight;
const isWide = containerWidth > containerHeight;
if (isWide) {
// Side layout
container.style.flexDirection = "row";
propertiesPanel.style.flexDirection = "column";
propertiesPanel.style.width = "240px";
propertiesPanel.style.height = "auto";
propertiesPanel.style.borderLeft = "1px solid #333";
propertiesPanel.style.borderTop = "none";
propertiesPanel.style.alignItems = "stretch";
} else {
// Bottom layout
container.style.flexDirection = "column";
propertiesPanel.style.flexDirection = "row";
propertiesPanel.style.width = "auto";
propertiesPanel.style.height = "60px";
propertiesPanel.style.borderLeft = "none";
propertiesPanel.style.borderTop = "1px solid #333";
}
}
// Store references in context for global access and playback updates
@ -10543,7 +10748,7 @@ function pianoRoll() {
const resizeObserver = new ResizeObserver(() => {
updateCanvasSize();
});
resizeObserver.observe(canvas);
resizeObserver.observe(container);
// Pointer event handlers (works with mouse and touch)
canvas.addEventListener("pointerdown", (e) => {
@ -10585,7 +10790,69 @@ function pianoRoll() {
// Prevent text selection
canvas.addEventListener("selectstart", (e) => e.preventDefault());
return canvas;
// Add event handlers for velocity and modulation inputs/sliders
const syncInputSlider = (input, slider) => {
input.addEventListener("input", () => {
const value = parseInt(input.value);
if (!isNaN(value)) {
slider.value = value;
}
});
slider.addEventListener("input", () => {
input.value = slider.value;
});
};
syncInputSlider(velocitySection.input, velocitySection.slider);
syncInputSlider(modulationSection.input, modulationSection.slider);
// Handle property changes
const updateNoteProperty = (property, value) => {
const clipData = canvas.pianoRollEditor.getSelectedClip();
if (!clipData || !clipData.clip || !clipData.clip.notes) return;
if (canvas.pianoRollEditor.selectedNotes.size === 0) return;
for (const noteIndex of canvas.pianoRollEditor.selectedNotes) {
if (noteIndex >= 0 && noteIndex < clipData.clip.notes.length) {
const note = clipData.clip.notes[noteIndex];
if (property === "velocity") {
note.velocity = value;
} else if (property === "modulation") {
note.modulation = value;
}
}
}
canvas.pianoRollEditor.syncNotesToBackend(clipData);
updateCanvasSize();
};
velocitySection.input.addEventListener("change", (e) => {
const value = parseInt(e.target.value);
if (!isNaN(value) && value >= 1 && value <= 127) {
updateNoteProperty("velocity", value);
}
});
velocitySection.slider.addEventListener("change", (e) => {
const value = parseInt(e.target.value);
updateNoteProperty("velocity", value);
});
modulationSection.input.addEventListener("change", (e) => {
const value = parseInt(e.target.value);
if (!isNaN(value) && value >= 0 && value <= 127) {
updateNoteProperty("modulation", value);
}
});
modulationSection.slider.addEventListener("change", (e) => {
const value = parseInt(e.target.value);
updateNoteProperty("modulation", value);
});
return container;
}
function presetBrowser() {

View File

@ -1178,12 +1178,12 @@ class AudioTrack {
name: clip.name,
startTime: clip.startTime,
duration: clip.duration,
offset: clip.offset || 0, // Default to 0 if not present
};
// Restore audio-specific fields
if (clip.poolIndex !== undefined) {
clipData.poolIndex = clip.poolIndex;
clipData.offset = clip.offset;
}
// Restore MIDI-specific fields

View File

@ -97,7 +97,7 @@ export let config = {
currentLayout: "animation", // Current active layout key
defaultLayout: "animation", // Default layout for new files
showStartScreen: false, // Show layout picker on startup (disabled for now)
restoreLayoutFromFile: false, // Restore layout when opening files
restoreLayoutFromFile: true, // Restore layout when opening files
customLayouts: [] // User-saved custom layouts
};

View File

@ -24,7 +24,7 @@ class TimelineState {
this.rulerHeight = 30 // Height of time ruler in pixels
// Snapping (Phase 5)
this.snapToFrames = false // Whether to snap keyframes to frame boundaries
this.snapToFrames = true // Whether to snap keyframes to frame boundaries (default: on)
}
/**

File diff suppressed because it is too large Load Diff