add metatracks

This commit is contained in:
Skyler Lehmkuhl 2025-10-18 22:56:38 -04:00
parent 242f494219
commit f9e2d36f3a
9 changed files with 481 additions and 122 deletions

View File

@ -334,19 +334,34 @@ impl Engine {
Command::ClearEffects(track_id) => {
let _ = self.project.clear_effects(track_id);
}
Command::CreateGroup(name) => {
Command::CreateMetatrack(name) => {
let track_id = self.project.add_group_track(name.clone(), None);
// Notify UI about the new group
// Notify UI about the new metatrack
let _ = self.event_tx.push(AudioEvent::TrackCreated(track_id, true, name));
}
Command::AddToGroup(track_id, group_id) => {
// Move the track to the new group (Project handles removing from old parent)
self.project.move_to_group(track_id, group_id);
Command::AddToMetatrack(track_id, metatrack_id) => {
// Move the track to the new metatrack (Project handles removing from old parent)
self.project.move_to_group(track_id, metatrack_id);
}
Command::RemoveFromGroup(track_id) => {
Command::RemoveFromMetatrack(track_id) => {
// Move to root level (None as parent)
self.project.move_to_root(track_id);
}
Command::SetTimeStretch(track_id, stretch) => {
if let Some(crate::audio::track::TrackNode::Group(metatrack)) = self.project.get_track_mut(track_id) {
metatrack.time_stretch = stretch.max(0.01); // Prevent zero or negative stretch
}
}
Command::SetOffset(track_id, offset) => {
if let Some(crate::audio::track::TrackNode::Group(metatrack)) = self.project.get_track_mut(track_id) {
metatrack.offset = offset;
}
}
Command::SetPitchShift(track_id, semitones) => {
if let Some(crate::audio::track::TrackNode::Group(metatrack)) = self.project.get_track_mut(track_id) {
metatrack.pitch_shift = semitones;
}
}
Command::CreateMidiTrack(name) => {
let track_id = self.project.add_midi_track(name.clone(), None);
// Notify UI about the new MIDI track
@ -483,19 +498,36 @@ impl EngineController {
samples as f64 / (self.sample_rate as f64 * self.channels as f64)
}
/// Create a new group track
pub fn create_group(&mut self, name: String) {
let _ = self.command_tx.push(Command::CreateGroup(name));
/// Create a new metatrack
pub fn create_metatrack(&mut self, name: String) {
let _ = self.command_tx.push(Command::CreateMetatrack(name));
}
/// Add a track to a group
pub fn add_to_group(&mut self, track_id: TrackId, group_id: TrackId) {
let _ = self.command_tx.push(Command::AddToGroup(track_id, group_id));
/// Add a track to a metatrack
pub fn add_to_metatrack(&mut self, track_id: TrackId, metatrack_id: TrackId) {
let _ = self.command_tx.push(Command::AddToMetatrack(track_id, metatrack_id));
}
/// Remove a track from its parent group
pub fn remove_from_group(&mut self, track_id: TrackId) {
let _ = self.command_tx.push(Command::RemoveFromGroup(track_id));
/// Remove a track from its parent metatrack
pub fn remove_from_metatrack(&mut self, track_id: TrackId) {
let _ = self.command_tx.push(Command::RemoveFromMetatrack(track_id));
}
/// Set metatrack time stretch factor
/// 0.5 = half speed, 1.0 = normal, 2.0 = double speed
pub fn set_time_stretch(&mut self, track_id: TrackId, stretch: f32) {
let _ = self.command_tx.push(Command::SetTimeStretch(track_id, stretch));
}
/// Set metatrack time offset in seconds
/// Positive = shift content later, negative = shift earlier
pub fn set_offset(&mut self, track_id: TrackId, offset: f64) {
let _ = self.command_tx.push(Command::SetOffset(track_id, offset));
}
/// Set metatrack pitch shift in semitones (for future use)
pub fn set_pitch_shift(&mut self, track_id: TrackId, semitones: f32) {
let _ = self.command_tx.push(Command::SetPitchShift(track_id, semitones));
}
/// Create a new MIDI track

View File

@ -12,4 +12,4 @@ pub use engine::{Engine, EngineController};
pub use midi::{MidiClip, MidiClipId, MidiEvent};
pub use pool::{AudioFile as PoolAudioFile, AudioPool};
pub use project::Project;
pub use track::{AudioTrack, GroupTrack, MidiTrack, Track, TrackId, TrackNode};
pub use track::{AudioTrack, Metatrack, MidiTrack, RenderContext, Track, TrackId, TrackNode};

View File

@ -1,5 +1,19 @@
use std::path::PathBuf;
/// Cubic Hermite interpolation for smooth resampling
/// p0, p1, p2, p3 are four consecutive samples
/// x is the fractional position between p1 and p2 (0.0 to 1.0)
#[inline]
fn hermite_interpolate(p0: f32, p1: f32, p2: f32, p3: f32, x: f32) -> f32 {
// Hermite basis functions for smooth interpolation
let c0 = p1;
let c1 = 0.5 * (p2 - p0);
let c2 = p0 - 2.5 * p1 + 2.0 * p2 - 0.5 * p3;
let c3 = 0.5 * (p3 - p0) + 1.5 * (p1 - p2);
((c3 * x + c2) * x + c1) * x + c0
}
/// Audio file stored in the pool
#[derive(Debug, Clone)]
pub struct AudioFile {
@ -103,43 +117,61 @@ impl AudioPool {
break;
}
// Linear interpolation for better quality
let frac = src_frame_pos - src_frame_idx as f64;
let next_frame_idx = src_frame_idx + 1;
let next_sample_idx = next_frame_idx * src_channels as usize;
let can_interpolate = next_sample_idx + src_channels as usize <= audio_file.data.len() && frac > 0.0;
// Cubic Hermite interpolation for high-quality time stretching
let frac = (src_frame_pos - src_frame_idx as f64) as f32;
// We need 4 points for cubic interpolation: p0, p1, p2, p3
// where we interpolate between p1 and p2
let p1_frame = src_frame_idx;
let p0_frame = if p1_frame > 0 { p1_frame - 1 } else { p1_frame };
let p2_frame = p1_frame + 1;
let p3_frame = p1_frame + 2;
let p0_idx = p0_frame * src_channels as usize;
let p1_idx = p1_frame * src_channels as usize;
let p2_idx = p2_frame * src_channels as usize;
let p3_idx = p3_frame * src_channels as usize;
let can_interpolate = p3_idx + src_channels as usize <= audio_file.data.len();
// Read and convert channels
for dst_ch in 0..dst_channels {
let sample = if src_channels == dst_channels {
// Same number of channels - direct mapping
let ch = dst_ch as usize;
let s0 = audio_file.data[src_sample_idx + ch];
if can_interpolate {
let s1 = audio_file.data[next_sample_idx + ch];
s0 + (s1 - s0) * frac as f32
if can_interpolate && frac > 0.0 {
let p0 = audio_file.data[p0_idx + ch];
let p1 = audio_file.data[p1_idx + ch];
let p2 = audio_file.data[p2_idx + ch];
let p3 = audio_file.data[p3_idx + ch];
hermite_interpolate(p0, p1, p2, p3, frac)
} else {
s0
audio_file.data[p1_idx + ch]
}
} else if src_channels == 1 && dst_channels > 1 {
// Mono to multi-channel - duplicate to all channels
let s0 = audio_file.data[src_sample_idx];
if can_interpolate {
let s1 = audio_file.data[next_sample_idx];
s0 + (s1 - s0) * frac as f32
if can_interpolate && frac > 0.0 {
let p0 = audio_file.data[p0_idx];
let p1 = audio_file.data[p1_idx];
let p2 = audio_file.data[p2_idx];
let p3 = audio_file.data[p3_idx];
hermite_interpolate(p0, p1, p2, p3, frac)
} else {
s0
audio_file.data[p1_idx]
}
} else if src_channels > 1 && dst_channels == 1 {
// Multi-channel to mono - average all source channels
let mut sum = 0.0f32;
for src_ch in 0..src_channels {
let s0 = audio_file.data[src_sample_idx + src_ch as usize];
let s = if can_interpolate {
let s1 = audio_file.data[next_sample_idx + src_ch as usize];
s0 + (s1 - s0) * frac as f32
let ch = src_ch as usize;
let s = if can_interpolate && frac > 0.0 {
let p0 = audio_file.data[p0_idx + ch];
let p1 = audio_file.data[p1_idx + ch];
let p2 = audio_file.data[p2_idx + ch];
let p3 = audio_file.data[p3_idx + ch];
hermite_interpolate(p0, p1, p2, p3, frac)
} else {
s0
audio_file.data[p1_idx + ch]
};
sum += s;
}
@ -147,12 +179,14 @@ impl AudioPool {
} else {
// Mismatched channels - use modulo for simple mapping
let src_ch = (dst_ch % src_channels) as usize;
let s0 = audio_file.data[src_sample_idx + src_ch];
if can_interpolate {
let s1 = audio_file.data[next_sample_idx + src_ch];
s0 + (s1 - s0) * frac as f32
if can_interpolate && frac > 0.0 {
let p0 = audio_file.data[p0_idx + src_ch];
let p1 = audio_file.data[p1_idx + src_ch];
let p2 = audio_file.data[p2_idx + src_ch];
let p3 = audio_file.data[p3_idx + src_ch];
hermite_interpolate(p0, p1, p2, p3, frac)
} else {
s0
audio_file.data[p1_idx + src_ch]
}
};

View File

@ -2,7 +2,7 @@ use super::buffer_pool::BufferPool;
use super::clip::Clip;
use super::midi::MidiClip;
use super::pool::AudioPool;
use super::track::{AudioTrack, GroupTrack, MidiTrack, TrackId, TrackNode};
use super::track::{AudioTrack, Metatrack, MidiTrack, RenderContext, TrackId, TrackNode};
use crate::effects::Effect;
use std::collections::HashMap;
@ -69,7 +69,7 @@ impl Project {
/// The new group's ID
pub fn add_group_track(&mut self, name: String, parent_id: Option<TrackId>) -> TrackId {
let id = self.next_id();
let group = GroupTrack::new(id, name);
let group = Metatrack::new(id, name);
self.tracks.insert(id, TrackNode::Group(group));
if let Some(parent) = parent_id {
@ -285,6 +285,14 @@ impl Project {
let any_solo = self.any_solo();
// Create initial render context
let ctx = RenderContext::new(
playhead_seconds,
sample_rate,
channels,
output.len(),
);
// Render each root track
for &track_id in &self.root_tracks.clone() {
self.render_track(
@ -292,9 +300,7 @@ impl Project {
output,
pool,
buffer_pool,
playhead_seconds,
sample_rate,
channels,
ctx,
any_solo,
false, // root tracks are not inside a soloed parent
);
@ -308,9 +314,7 @@ impl Project {
output: &mut [f32],
pool: &AudioPool,
buffer_pool: &mut BufferPool,
playhead_seconds: f64,
sample_rate: u32,
channels: u32,
ctx: RenderContext,
any_solo: bool,
parent_is_soloed: bool,
) {
@ -352,16 +356,17 @@ impl Project {
match self.tracks.get_mut(&track_id) {
Some(TrackNode::Audio(track)) => {
// Render audio track directly into output
track.render(output, pool, playhead_seconds, sample_rate, channels);
track.render(output, pool, ctx.playhead_seconds, ctx.sample_rate, ctx.channels);
}
Some(TrackNode::Midi(track)) => {
// Render MIDI track directly into output
track.render(output, playhead_seconds, sample_rate, channels);
track.render(output, ctx.playhead_seconds, ctx.sample_rate, ctx.channels);
}
Some(TrackNode::Group(group)) => {
// Get children IDs and check if this group is soloed
// Get children IDs, check if this group is soloed, and transform context
let children: Vec<TrackId> = group.children.clone();
let this_group_is_soloed = group.solo;
let child_ctx = group.transform_context(ctx);
// Acquire a temporary buffer for the group mix
let mut group_buffer = buffer_pool.acquire();
@ -377,9 +382,7 @@ impl Project {
&mut group_buffer,
pool,
buffer_pool,
playhead_seconds,
sample_rate,
channels,
child_ctx,
any_solo,
children_parent_soloed,
);
@ -388,7 +391,7 @@ impl Project {
// Apply group effects
if let Some(TrackNode::Group(group)) = self.tracks.get_mut(&track_id) {
for effect in &mut group.effects {
effect.process(&mut group_buffer, channels as usize, sample_rate);
effect.process(&mut group_buffer, ctx.channels as usize, ctx.sample_rate);
}
// Apply group volume and mix into output

View File

@ -9,11 +9,56 @@ pub type TrackId = u32;
/// Type alias for backwards compatibility
pub type Track = AudioTrack;
/// Node in the track hierarchy - can be an audio track, MIDI track, or a group
/// Rendering context that carries timing information through the track hierarchy
///
/// This allows metatracks to transform time for their children (time stretch, offset, etc.)
#[derive(Debug, Clone, Copy)]
pub struct RenderContext {
/// Current playhead position in seconds (in transformed time)
pub playhead_seconds: f64,
/// Audio sample rate
pub sample_rate: u32,
/// Number of channels
pub channels: u32,
/// Size of the buffer being rendered (in interleaved samples)
pub buffer_size: usize,
/// Accumulated time stretch factor (1.0 = normal, 0.5 = half speed, 2.0 = double speed)
pub time_stretch: f32,
}
impl RenderContext {
/// Create a new render context
pub fn new(
playhead_seconds: f64,
sample_rate: u32,
channels: u32,
buffer_size: usize,
) -> Self {
Self {
playhead_seconds,
sample_rate,
channels,
buffer_size,
time_stretch: 1.0,
}
}
/// Get the duration of the buffer in seconds
pub fn buffer_duration(&self) -> f64 {
self.buffer_size as f64 / (self.sample_rate as f64 * self.channels as f64)
}
/// Get the end time of the buffer
pub fn buffer_end(&self) -> f64 {
self.playhead_seconds + self.buffer_duration()
}
}
/// Node in the track hierarchy - can be an audio track, MIDI track, or a metatrack
pub enum TrackNode {
Audio(AudioTrack),
Midi(MidiTrack),
Group(GroupTrack),
Group(Metatrack),
}
impl TrackNode {
@ -81,8 +126,8 @@ impl TrackNode {
}
}
/// Group track that contains other tracks (audio or groups)
pub struct GroupTrack {
/// Metatrack that contains other tracks with time transformation capabilities
pub struct Metatrack {
pub id: TrackId,
pub name: String,
pub children: Vec<TrackId>,
@ -90,10 +135,16 @@ pub struct GroupTrack {
pub volume: f32,
pub muted: bool,
pub solo: bool,
/// Time stretch factor (0.5 = half speed, 1.0 = normal, 2.0 = double speed)
pub time_stretch: f32,
/// Pitch shift in semitones (for future implementation)
pub pitch_shift: f32,
/// Time offset in seconds (shift content forward/backward in time)
pub offset: f64,
}
impl GroupTrack {
/// Create a new group track
impl Metatrack {
/// Create a new metatrack
pub fn new(id: TrackId, name: String) -> Self {
Self {
id,
@ -103,6 +154,9 @@ impl GroupTrack {
volume: 1.0,
muted: false,
solo: false,
time_stretch: 1.0,
pitch_shift: 0.0,
offset: 0.0,
}
}
@ -147,6 +201,32 @@ impl GroupTrack {
pub fn is_active(&self, any_solo: bool) -> bool {
!self.muted && (!any_solo || self.solo)
}
/// Transform a render context for this metatrack's children
///
/// Applies time stretching and offset transformations.
/// Time stretch affects how fast content plays: 0.5 = half speed, 2.0 = double speed
/// Offset shifts content forward/backward in time
pub fn transform_context(&self, ctx: RenderContext) -> RenderContext {
let mut transformed = ctx;
// Apply transformations in order:
// 1. First, subtract offset (positive offset = content appears later)
// At parent time 0.0s with offset=2.0s, child sees -2.0s (before content starts)
// At parent time 2.0s with offset=2.0s, child sees 0.0s (content starts)
let adjusted_playhead = transformed.playhead_seconds - self.offset;
// 2. Then apply time stretch (< 1.0 = slower/half speed, > 1.0 = faster/double speed)
// With stretch=0.5, when parent time is 2.0s, child reads from 1.0s (plays slower, pitches down)
// With stretch=2.0, when parent time is 2.0s, child reads from 4.0s (plays faster, pitches up)
// Note: This creates pitch shift as well - true time stretching would require resampling
transformed.playhead_seconds = adjusted_playhead * self.time_stretch as f64;
// Accumulate time stretch for nested metatracks
transformed.time_stretch *= self.time_stretch;
transformed
}
}
/// MIDI track with MIDI clips and a virtual instrument

View File

@ -35,13 +35,23 @@ pub enum Command {
/// Clear all effects from a track
ClearEffects(TrackId),
// Group management commands
/// Create a new group track with a name
CreateGroup(String),
/// Add a track to a group (track_id, group_id)
AddToGroup(TrackId, TrackId),
/// Remove a track from its parent group
RemoveFromGroup(TrackId),
// Metatrack management commands
/// Create a new metatrack with a name
CreateMetatrack(String),
/// Add a track to a metatrack (track_id, metatrack_id)
AddToMetatrack(TrackId, TrackId),
/// Remove a track from its parent metatrack
RemoveFromMetatrack(TrackId),
// Metatrack transformation commands
/// Set metatrack time stretch factor (track_id, stretch_factor)
/// 0.5 = half speed, 1.0 = normal, 2.0 = double speed
SetTimeStretch(TrackId, f32),
/// Set metatrack time offset in seconds (track_id, offset)
/// Positive = shift content later, negative = shift earlier
SetOffset(TrackId, f64),
/// Set metatrack pitch shift in semitones (track_id, semitones) - for future use
SetPitchShift(TrackId, f32),
// MIDI commands
/// Create a new MIDI track with a name
@ -63,6 +73,6 @@ pub enum AudioEvent {
PlaybackStopped,
/// Audio buffer underrun detected
BufferUnderrun,
/// A new track was created (track_id, is_group, name)
/// A new track was created (track_id, is_metatrack, name)
TrackCreated(TrackId, bool, String),
}

View File

@ -13,7 +13,7 @@ pub mod io;
// Re-export commonly used types
pub use audio::{
AudioPool, AudioTrack, BufferPool, Clip, ClipId, Engine, EngineController,
GroupTrack, MidiClip, MidiClipId, MidiEvent, MidiTrack, PoolAudioFile, Project, Track, TrackId, TrackNode,
Metatrack, MidiClip, MidiClipId, MidiEvent, MidiTrack, PoolAudioFile, Project, RenderContext, Track, TrackId, TrackNode,
};
pub use command::{AudioEvent, Command};
pub use effects::{Effect, GainEffect, PanEffect, SimpleEQ, SimpleSynth};

View File

@ -197,10 +197,10 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
AudioEvent::BufferUnderrun => {
eprintln!("\nWarning: Buffer underrun detected");
}
AudioEvent::TrackCreated(track_id, is_group, name) => {
AudioEvent::TrackCreated(track_id, is_metatrack, name) => {
print!("\r\x1b[K");
if is_group {
println!("Group {} created: '{}' (ID: {})", track_id, name, track_id);
if is_metatrack {
println!("Metatrack {} created: '{}' (ID: {})", track_id, name, track_id);
} else {
println!("Track {} created: '{}' (ID: {})", track_id, name, track_id);
}
@ -470,35 +470,35 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
} else {
println!("Usage: clearfx <track_id>");
}
} else if input.starts_with("group ") {
// Parse: group <name>
let name = input[6..].trim().to_string();
} else if input.starts_with("meta ") {
// Parse: meta <name>
let name = input[5..].trim().to_string();
if !name.is_empty() {
controller.create_group(name.clone());
println!("Created group '{}'", name);
controller.create_metatrack(name.clone());
println!("Created metatrack '{}'", name);
} else {
println!("Usage: group <name>");
println!("Usage: meta <name>");
}
} else if input.starts_with("addtogroup ") {
// Parse: addtogroup <track_id> <group_id>
} else if input.starts_with("addtometa ") {
// Parse: addtometa <track_id> <metatrack_id>
let parts: Vec<&str> = input.split_whitespace().collect();
if parts.len() == 3 {
if let (Ok(track_id), Ok(group_id)) = (parts[1].parse::<u32>(), parts[2].parse::<u32>()) {
controller.add_to_group(track_id, group_id);
println!("Added track {} to group {}", track_id, group_id);
if let (Ok(track_id), Ok(metatrack_id)) = (parts[1].parse::<u32>(), parts[2].parse::<u32>()) {
controller.add_to_metatrack(track_id, metatrack_id);
println!("Added track {} to metatrack {}", track_id, metatrack_id);
} else {
println!("Invalid format. Usage: addtogroup <track_id> <group_id>");
println!("Invalid format. Usage: addtometa <track_id> <metatrack_id>");
}
} else {
println!("Usage: addtogroup <track_id> <group_id>");
println!("Usage: addtometa <track_id> <metatrack_id>");
}
} else if input.starts_with("removefromgroup ") {
// Parse: removefromgroup <track_id>
if let Ok(track_id) = input[16..].trim().parse::<u32>() {
controller.remove_from_group(track_id);
println!("Removed track {} from its group", track_id);
} else if input.starts_with("removefrommeta ") {
// Parse: removefrommeta <track_id>
if let Ok(track_id) = input[15..].trim().parse::<u32>() {
controller.remove_from_metatrack(track_id);
println!("Removed track {} from its metatrack", track_id);
} else {
println!("Usage: removefromgroup <track_id>");
println!("Usage: removefrommeta <track_id>");
}
} else if input.starts_with("midi ") {
// Parse: midi <name>
@ -592,6 +592,58 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
} else {
println!("Usage: loadmidi <track_id> <file_path> [start_time]");
}
} else if input.starts_with("stretch ") {
// Parse: stretch <track_id> <factor>
let parts: Vec<&str> = input.split_whitespace().collect();
if parts.len() == 3 {
if let (Ok(track_id), Ok(stretch)) = (parts[1].parse::<u32>(), parts[2].parse::<f32>()) {
let ids = track_ids.lock().unwrap();
if ids.contains(&track_id) {
drop(ids);
controller.set_time_stretch(track_id, stretch);
let speed = if stretch < 0.99 {
format!("{:.0}% speed (slower)", stretch * 100.0)
} else if stretch > 1.01 {
format!("{:.0}% speed (faster)", stretch * 100.0)
} else {
"normal speed".to_string()
};
println!("Set time stretch on track {} to {:.2}x ({})", track_id, stretch, speed);
} else {
println!("Invalid track ID. Available tracks: {:?}", *ids);
}
} else {
println!("Invalid format. Usage: stretch <track_id> <factor>");
}
} else {
println!("Usage: stretch <track_id> <factor> (0.5=half speed, 1.0=normal, 2.0=double speed)");
}
} else if input.starts_with("offset ") {
// Parse: offset <track_id> <seconds>
let parts: Vec<&str> = input.split_whitespace().collect();
if parts.len() == 3 {
if let (Ok(track_id), Ok(offset)) = (parts[1].parse::<u32>(), parts[2].parse::<f64>()) {
let ids = track_ids.lock().unwrap();
if ids.contains(&track_id) {
drop(ids);
controller.set_offset(track_id, offset);
let direction = if offset > 0.01 {
format!("{:.2}s later", offset)
} else if offset < -0.01 {
format!("{:.2}s earlier", -offset)
} else {
"no offset".to_string()
};
println!("Set time offset on track {} to {:.2}s (content shifted {})", track_id, offset, direction);
} else {
println!("Invalid track ID. Available tracks: {:?}", *ids);
}
} else {
println!("Invalid format. Usage: offset <track_id> <seconds>");
}
} else {
println!("Usage: offset <track_id> <seconds> (positive=later, negative=earlier)");
}
} else if input == "help" || input == "h" {
print_help();
} else {
@ -630,10 +682,12 @@ fn print_help() {
println!(" eq <id> <l> <m> <h> - Add/update 3-band EQ (low, mid, high in dB)");
println!(" (e.g. 'eq 0 3.0 0.0 -2.0')");
println!(" clearfx <id> - Clear all effects from a track");
println!("\nGroup Commands:");
println!(" group <name> - Create a new group track");
println!(" addtogroup <t> <g> - Add track to group (e.g. 'addtogroup 0 2')");
println!(" removefromgroup <t> - Remove track from its parent group");
println!("\nMetatrack Commands:");
println!(" meta <name> - Create a new metatrack");
println!(" addtometa <t> <m> - Add track to metatrack (e.g. 'addtometa 0 2')");
println!(" removefrommeta <t> - Remove track from its parent metatrack");
println!(" stretch <id> <f> - Set time stretch (0.5=half speed, 1.0=normal, 2.0=double)");
println!(" offset <id> <s> - Set time offset in seconds (positive=later, negative=earlier)");
println!("\nMIDI Commands:");
println!(" midi <name> - Create a new MIDI track");
println!(" midiclip <t> <s> <d> - Create MIDI clip on track (start, duration)");

View File

@ -507,7 +507,7 @@ let actions = {
// Increment zOrder for all existing shapes
for (let existingShape of layer.shapes) {
if (existingShape !== newShape) {
let existingZOrderCurve = layer.animationData.curves[`shape.${existingShape.idx}.zOrder`];
let existingZOrderCurve = layer.animationData.curves[`shape.${existingShape.shapeId}.zOrder`];
if (existingZOrderCurve) {
// Find keyframe at this time and increment it
for (let kf of existingZOrderCurve.keyframes) {
@ -3216,20 +3216,70 @@ class Layer extends Widget {
// Get all shapes that exist at the given time
getVisibleShapes(time) {
const visibleShapes = [];
// Calculate tolerance based on framerate (half a frame)
const halfFrameDuration = 0.5 / config.framerate;
// Group shapes by shapeId
const shapesByShapeId = new Map();
for (let shape of this.shapes) {
if (shape instanceof TempShape) continue;
if (!shapesByShapeId.has(shape.shapeId)) {
shapesByShapeId.set(shape.shapeId, []);
}
shapesByShapeId.get(shape.shapeId).push(shape);
}
// Check if shape exists at current time
let existsValue = this.animationData.interpolate(`shape.${shape.shapeId}.exists`, time);
if (existsValue && existsValue > 0) {
// For each logical shape (shapeId), determine which version to return for EDITING
for (let [shapeId, shapes] of shapesByShapeId) {
// Check if this logical shape exists at current time
let existsValue = this.animationData.interpolate(`shape.${shapeId}.exists`, time);
if (existsValue === null || existsValue <= 0) continue;
// Get shapeIndex curve
const shapeIndexCurve = this.animationData.getCurve(`shape.${shapeId}.shapeIndex`);
if (!shapeIndexCurve || !shapeIndexCurve.keyframes || shapeIndexCurve.keyframes.length === 0) {
// No shapeIndex curve, return shape with index 0
const shape = shapes.find(s => s.shapeIndex === 0);
if (shape) {
visibleShapes.push(shape);
}
continue;
}
// Find bracketing keyframes
const { prev: prevKf, next: nextKf } = shapeIndexCurve.getBracketingKeyframes(time);
// Get interpolated shapeIndex value
let shapeIndexValue = shapeIndexCurve.interpolate(time);
if (shapeIndexValue === null) shapeIndexValue = 0;
// Check if we're at a keyframe (within half a frame)
const atPrevKeyframe = prevKf && Math.abs(shapeIndexValue - prevKf.value) < halfFrameDuration;
const atNextKeyframe = nextKf && Math.abs(shapeIndexValue - nextKf.value) < halfFrameDuration;
if (atPrevKeyframe) {
// At previous keyframe - return that version for editing
const shape = shapes.find(s => s.shapeIndex === prevKf.value);
if (shape) visibleShapes.push(shape);
} else if (atNextKeyframe) {
// At next keyframe - return that version for editing
const shape = shapes.find(s => s.shapeIndex === nextKf.value);
if (shape) visibleShapes.push(shape);
} else if (prevKf && prevKf.interpolation === 'hold') {
// Between keyframes but using "hold" interpolation - no morphing
// Return the previous keyframe's shape since that's what's shown
const shape = shapes.find(s => s.shapeIndex === prevKf.value);
if (shape) visibleShapes.push(shape);
}
// Otherwise: between keyframes with morphing, return nothing (can't edit a morph)
}
return visibleShapes;
}
draw(ctx) {
console.log(`[Layer.draw] CALLED - shapes:`, this.shapes ? this.shapes.length : 0);
// super.draw(ctx)
if (!this.visible) return;
let frameInfo = this.getFrameValue(this.frameNum);
@ -3260,11 +3310,8 @@ class Layer extends Widget {
// Process each logical shape (shapeId)
let visibleShapes = [];
for (let [shapeId, shapes] of shapesByShapeId) {
console.log(`[Layer.draw] Processing shapeId ${shapeId}, have ${shapes.length} versions:`, shapes.map(s => ({idx: s.idx, shapeIndex: s.shapeIndex})));
// Check if this logical shape exists at current time
let existsValue = this.animationData.interpolate(`shape.${shapeId}.exists`, currentTime);
console.log(`[Layer.draw] existsValue for ${shapeId} at time ${currentTime}:`, existsValue);
if (existsValue === null || existsValue <= 0) continue;
// Get z-order
@ -3283,12 +3330,10 @@ class Layer extends Widget {
// Find surrounding keyframes
const { prev: prevKf, next: nextKf } = getKeyframesSurrounding(shapeIndexCurve.keyframes, currentTime);
console.log(`[Layer.draw] Keyframes for ${shapeId}: prev=`, prevKf, 'next=', nextKf);
// Get interpolated value
let shapeIndexValue = shapeIndexCurve.interpolate(currentTime);
if (shapeIndexValue === null) shapeIndexValue = 0;
console.log(`[Layer.draw] shapeIndexValue at time ${currentTime}:`, shapeIndexValue);
// Sort shape versions by shapeIndex
shapes.sort((a, b) => a.shapeIndex - b.shapeIndex);
@ -3297,18 +3342,13 @@ class Layer extends Widget {
// Check if we're at either the previous or next keyframe value (no morphing needed)
const atPrevKeyframe = prevKf && Math.abs(shapeIndexValue - prevKf.value) < 0.001;
const atNextKeyframe = nextKf && Math.abs(shapeIndexValue - nextKf.value) < 0.001;
console.log(`[Layer.draw] atPrevKeyframe=${atPrevKeyframe}, atNextKeyframe=${atNextKeyframe}`);
if (atPrevKeyframe || atNextKeyframe) {
// No morphing - display the shape at the keyframe value
const targetValue = atNextKeyframe ? nextKf.value : prevKf.value;
console.log(`[Layer.draw] Showing single shape with shapeIndex=${targetValue}`);
const shape = shapes.find(s => s.shapeIndex === targetValue);
if (shape) {
console.log(`[Layer.draw] Found shape with idx=${shape.idx}, shapeIndex=${shape.shapeIndex}`);
visibleShapes.push({ shape, zOrder: zOrder || 0, selected: context.shapeselection.includes(shape) });
} else {
console.warn(`[Layer.draw] Could not find shape with shapeIndex=${targetValue}`);
}
} else if (prevKf && nextKf && prevKf.value !== nextKf.value) {
// Morph between shapes specified by surrounding keyframes
@ -4692,16 +4732,122 @@ class GraphicsObject extends Widget {
layer.activeShape.draw(cxt);
}
// NEW: Use AnimationData system to draw shapes
// NEW: Use AnimationData system to draw shapes with shape tweening/morphing
let currentTime = this.currentTime || 0;
let visibleShapes = [];
// Group shapes by shapeId (multiple Shape objects can share a shapeId for tweening)
const shapesByShapeId = new Map();
for (let shape of layer.shapes) {
if (shape instanceof TempShape) continue;
let existsValue = layer.animationData.interpolate(`shape.${shape.shapeId}.exists`, currentTime);
if (existsValue !== null && existsValue > 0) {
let zOrder = layer.animationData.interpolate(`shape.${shape.shapeId}.zOrder`, currentTime);
visibleShapes.push({ shape, zOrder: zOrder || 0 });
if (!shapesByShapeId.has(shape.shapeId)) {
shapesByShapeId.set(shape.shapeId, []);
}
shapesByShapeId.get(shape.shapeId).push(shape);
}
// Process each logical shape (shapeId) and determine what to draw
let visibleShapes = [];
for (let [shapeId, shapes] of shapesByShapeId) {
// Check if this logical shape exists at current time
const existsCurveKey = `shape.${shapeId}.exists`;
let existsValue = layer.animationData.interpolate(existsCurveKey, currentTime);
console.log(`[Widget.draw] Checking shape ${shapeId} at time ${currentTime}: existsValue=${existsValue}, curve=${layer.animationData.curves[existsCurveKey] ? 'exists' : 'missing'}`);
if (layer.animationData.curves[existsCurveKey]) {
console.log(`[Widget.draw] Curve keyframes:`, layer.animationData.curves[existsCurveKey].keyframes);
}
if (existsValue === null || existsValue <= 0) {
console.log(`[Widget.draw] Skipping shape ${shapeId} - not visible`);
continue;
}
// Get z-order
let zOrder = layer.animationData.interpolate(`shape.${shapeId}.zOrder`, currentTime);
// Get shapeIndex curve and surrounding keyframes
const shapeIndexCurve = layer.animationData.getCurve(`shape.${shapeId}.shapeIndex`);
console.log(`[Widget.draw] shapeIndexCurve for ${shapeId}:`, shapeIndexCurve ? 'exists' : 'missing', 'keyframes:', shapeIndexCurve?.keyframes?.length);
console.log(`[Widget.draw] Available shapes for ${shapeId}:`, shapes.map(s => ({idx: s.idx, shapeIndex: s.shapeIndex})));
if (!shapeIndexCurve || !shapeIndexCurve.keyframes || shapeIndexCurve.keyframes.length === 0) {
// No shapeIndex curve, just show shape with index 0
const shape = shapes.find(s => s.shapeIndex === 0);
console.log(`[Widget.draw] No shapeIndex curve - looking for shape with index 0:`, shape ? 'found' : 'NOT FOUND');
if (shape) {
console.log(`[Widget.draw] Adding shape to visibleShapes`);
visibleShapes.push({
shape,
zOrder: zOrder || 0,
selected: context.shapeselection.includes(shape)
});
}
continue;
}
// Find surrounding keyframes using AnimationCurve's built-in method
const { prev: prevKf, next: nextKf } = shapeIndexCurve.getBracketingKeyframes(currentTime);
console.log(`[Widget.draw] Keyframes: prevKf=${JSON.stringify(prevKf)}, nextKf=${JSON.stringify(nextKf)}`);
// Get interpolated value
let shapeIndexValue = shapeIndexCurve.interpolate(currentTime);
if (shapeIndexValue === null) shapeIndexValue = 0;
console.log(`[Widget.draw] shapeIndexValue=${shapeIndexValue}`);
// Sort shape versions by shapeIndex
shapes.sort((a, b) => a.shapeIndex - b.shapeIndex);
console.log(`[Widget.draw] Sorted shapes:`, shapes.map(s => `idx=${s.idx.substring(0,8)} shapeIndex=${s.shapeIndex}`));
// Determine whether to morph based on whether interpolated value equals a keyframe value
const atPrevKeyframe = prevKf && Math.abs(shapeIndexValue - prevKf.value) < 0.001;
const atNextKeyframe = nextKf && Math.abs(shapeIndexValue - nextKf.value) < 0.001;
console.log(`[Widget.draw] atPrevKeyframe=${atPrevKeyframe}, atNextKeyframe=${atNextKeyframe}`);
if (atPrevKeyframe || atNextKeyframe) {
// No morphing - display the shape at the keyframe value
const targetValue = atNextKeyframe ? nextKf.value : prevKf.value;
const shape = shapes.find(s => s.shapeIndex === targetValue);
if (shape) {
visibleShapes.push({
shape,
zOrder: zOrder || 0,
selected: context.shapeselection.includes(shape)
});
}
} else if (prevKf && nextKf && prevKf.value !== nextKf.value) {
// Morph between shapes specified by surrounding keyframes
const shape1 = shapes.find(s => s.shapeIndex === prevKf.value);
const shape2 = shapes.find(s => s.shapeIndex === nextKf.value);
if (shape1 && shape2) {
// Calculate t based on time position between keyframes
const t = (currentTime - prevKf.time) / (nextKf.time - prevKf.time);
const morphedShape = shape1.lerpShape(shape2, t);
visibleShapes.push({
shape: morphedShape,
zOrder: zOrder || 0,
selected: context.shapeselection.includes(shape1) || context.shapeselection.includes(shape2)
});
} else if (shape1) {
visibleShapes.push({
shape: shape1,
zOrder: zOrder || 0,
selected: context.shapeselection.includes(shape1)
});
} else if (shape2) {
visibleShapes.push({
shape: shape2,
zOrder: zOrder || 0,
selected: context.shapeselection.includes(shape2)
});
}
} else if (nextKf) {
// Only next keyframe exists, show that shape
const shape = shapes.find(s => s.shapeIndex === nextKf.value);
if (shape) {
visibleShapes.push({
shape,
zOrder: zOrder || 0,
selected: context.shapeselection.includes(shape)
});
}
}
}
@ -4709,9 +4855,9 @@ class GraphicsObject extends Widget {
visibleShapes.sort((a, b) => a.zOrder - b.zOrder);
// Draw sorted shapes
for (let { shape } of visibleShapes) {
for (let { shape, selected } of visibleShapes) {
let cxt = {...context}
if (context.shapeselection.indexOf(shape) >= 0) {
if (selected) {
cxt.selected = true
}
shape.draw(cxt);