Compare commits

...

3 Commits

Author SHA1 Message Date
Skyler Lehmkuhl 469849a0d6 Add nested audio tracks 2026-02-21 03:56:07 -05:00
Skyler Lehmkuhl 70855963cb Stack clips only on overlap 2026-02-21 01:22:48 -05:00
Skyler Lehmkuhl 1892f970c4 Initial work on movie clips 2026-02-21 00:54:38 -05:00
27 changed files with 1737 additions and 379 deletions

View File

@ -658,8 +658,8 @@ impl Engine {
_ => {}
}
}
Command::CreateMetatrack(name) => {
let track_id = self.project.add_group_track(name.clone(), None);
Command::CreateMetatrack(name, parent_id) => {
let track_id = self.project.add_group_track(name.clone(), parent_id);
// Notify UI about the new metatrack
let _ = self.event_tx.push(AudioEvent::TrackCreated(track_id, true, name));
}
@ -686,8 +686,18 @@ impl Engine {
metatrack.pitch_shift = semitones;
}
}
Command::CreateAudioTrack(name) => {
let track_id = self.project.add_audio_track(name.clone(), None);
Command::SetTrimStart(track_id, trim_start) => {
if let Some(crate::audio::track::TrackNode::Group(metatrack)) = self.project.get_track_mut(track_id) {
metatrack.trim_start = trim_start.max(0.0);
}
}
Command::SetTrimEnd(track_id, trim_end) => {
if let Some(crate::audio::track::TrackNode::Group(metatrack)) = self.project.get_track_mut(track_id) {
metatrack.trim_end = trim_end.map(|t| t.max(0.0));
}
}
Command::CreateAudioTrack(name, parent_id) => {
let track_id = self.project.add_audio_track(name.clone(), parent_id);
// Notify UI about the new audio track
let _ = self.event_tx.push(AudioEvent::TrackCreated(track_id, false, name));
}
@ -793,8 +803,8 @@ impl Engine {
eprintln!("[Engine] ERROR: Track {} not found or is not an audio track", track_id);
}
}
Command::CreateMidiTrack(name) => {
let track_id = self.project.add_midi_track(name.clone(), None);
Command::CreateMidiTrack(name, parent_id) => {
let track_id = self.project.add_midi_track(name.clone(), parent_id);
// Notify UI about the new MIDI track
let _ = self.event_tx.push(AudioEvent::TrackCreated(track_id, false, name));
}
@ -2348,20 +2358,24 @@ impl Engine {
QueryResponse::TrackGraphLoaded(result)
}
Query::CreateAudioTrackSync(name) => {
let track_id = self.project.add_audio_track(name.clone(), None);
eprintln!("[Engine] Created audio track '{}' with ID {}", name, track_id);
// Notify UI about the new audio track
Query::CreateAudioTrackSync(name, parent_id) => {
let track_id = self.project.add_audio_track(name.clone(), parent_id);
eprintln!("[Engine] Created audio track '{}' with ID {} (parent: {:?})", name, track_id, parent_id);
let _ = self.event_tx.push(AudioEvent::TrackCreated(track_id, false, name));
QueryResponse::TrackCreated(Ok(track_id))
}
Query::CreateMidiTrackSync(name) => {
let track_id = self.project.add_midi_track(name.clone(), None);
eprintln!("[Engine] Created MIDI track '{}' with ID {}", name, track_id);
// Notify UI about the new MIDI track
Query::CreateMidiTrackSync(name, parent_id) => {
let track_id = self.project.add_midi_track(name.clone(), parent_id);
eprintln!("[Engine] Created MIDI track '{}' with ID {} (parent: {:?})", name, track_id, parent_id);
let _ = self.event_tx.push(AudioEvent::TrackCreated(track_id, false, name));
QueryResponse::TrackCreated(Ok(track_id))
}
Query::CreateMetatrackSync(name, parent_id) => {
let track_id = self.project.add_group_track(name.clone(), parent_id);
eprintln!("[Engine] Created metatrack '{}' with ID {} (parent: {:?})", name, track_id, parent_id);
let _ = self.event_tx.push(AudioEvent::TrackCreated(track_id, true, name));
QueryResponse::TrackCreated(Ok(track_id))
}
Query::GetPoolWaveform(pool_index, target_peaks) => {
match self.audio_pool.generate_waveform(pool_index, target_peaks) {
Some(waveform) => QueryResponse::PoolWaveform(Ok(waveform)),
@ -2930,7 +2944,7 @@ impl EngineController {
/// Create a new metatrack
pub fn create_metatrack(&mut self, name: String) {
let _ = self.command_tx.push(Command::CreateMetatrack(name));
let _ = self.command_tx.push(Command::CreateMetatrack(name, None));
}
/// Add a track to a metatrack
@ -2960,9 +2974,19 @@ impl EngineController {
let _ = self.command_tx.push(Command::SetPitchShift(track_id, semitones));
}
/// Set metatrack trim start in seconds
pub fn set_trim_start(&mut self, track_id: TrackId, trim_start: f64) {
let _ = self.command_tx.push(Command::SetTrimStart(track_id, trim_start));
}
/// Set metatrack trim end in seconds (None = no end trim)
pub fn set_trim_end(&mut self, track_id: TrackId, trim_end: Option<f64>) {
let _ = self.command_tx.push(Command::SetTrimEnd(track_id, trim_end));
}
/// Create a new audio track
pub fn create_audio_track(&mut self, name: String) {
let _ = self.command_tx.push(Command::CreateAudioTrack(name));
let _ = self.command_tx.push(Command::CreateAudioTrack(name, None));
}
/// Add an audio file to the pool (must be called from non-audio thread with pre-loaded data)
@ -3012,7 +3036,7 @@ impl EngineController {
/// Create a new MIDI track
pub fn create_midi_track(&mut self, name: String) {
let _ = self.command_tx.push(Command::CreateMidiTrack(name));
let _ = self.command_tx.push(Command::CreateMidiTrack(name, None));
}
/// Add a MIDI clip to the pool without placing it on any track
@ -3022,8 +3046,8 @@ impl EngineController {
}
/// Create a new audio track synchronously (waits for creation to complete)
pub fn create_audio_track_sync(&mut self, name: String) -> Result<TrackId, String> {
if let Err(_) = self.query_tx.push(Query::CreateAudioTrackSync(name)) {
pub fn create_audio_track_sync(&mut self, name: String, parent: Option<TrackId>) -> Result<TrackId, String> {
if let Err(_) = self.query_tx.push(Query::CreateAudioTrackSync(name, parent)) {
return Err("Failed to send track creation query".to_string());
}
@ -3042,8 +3066,8 @@ impl EngineController {
}
/// Create a new MIDI track synchronously (waits for creation to complete)
pub fn create_midi_track_sync(&mut self, name: String) -> Result<TrackId, String> {
if let Err(_) = self.query_tx.push(Query::CreateMidiTrackSync(name)) {
pub fn create_midi_track_sync(&mut self, name: String, parent: Option<TrackId>) -> Result<TrackId, String> {
if let Err(_) = self.query_tx.push(Query::CreateMidiTrackSync(name, parent)) {
return Err("Failed to send track creation query".to_string());
}
@ -3061,6 +3085,25 @@ impl EngineController {
Err("Track creation timeout".to_string())
}
/// Create a new metatrack/group synchronously (waits for creation to complete)
pub fn create_group_track_sync(&mut self, name: String, parent: Option<TrackId>) -> Result<TrackId, String> {
if let Err(_) = self.query_tx.push(Query::CreateMetatrackSync(name, parent)) {
return Err("Failed to send metatrack creation query".to_string());
}
let start = std::time::Instant::now();
let timeout = std::time::Duration::from_secs(2);
while start.elapsed() < timeout {
if let Ok(QueryResponse::TrackCreated(result)) = self.query_response_rx.pop() {
return result;
}
std::thread::sleep(std::time::Duration::from_millis(1));
}
Err("Metatrack creation timeout".to_string())
}
/// Create a new MIDI clip on a track
pub fn create_midi_clip(&mut self, track_id: TrackId, start_time: f64, duration: f64) -> MidiClipId {
// Peek at the next clip ID that will be used

View File

@ -82,7 +82,7 @@ impl Project {
/// The new group's ID
pub fn add_group_track(&mut self, name: String, parent_id: Option<TrackId>) -> TrackId {
let id = self.next_id();
let group = Metatrack::new(id, name);
let group = Metatrack::new(id, name, self.sample_rate);
self.tracks.insert(id, TrackNode::Group(group));
if let Some(parent) = parent_id {
@ -450,6 +450,11 @@ impl Project {
track.render(output, &self.midi_clip_pool, ctx.playhead_seconds, ctx.sample_rate, ctx.channels);
}
Some(TrackNode::Group(group)) => {
// Skip rendering if playhead is outside the metatrack's trim window
if !group.is_active_at_time(ctx.playhead_seconds) {
return;
}
// Read group properties and transform context (index-based child iteration to avoid clone)
let num_children = group.children.len();
let this_group_is_soloed = group.solo;
@ -479,14 +484,37 @@ impl Project {
);
}
// Apply group volume and mix into output
// Route children's mix through metatrack's audio graph
if let Some(TrackNode::Group(group)) = self.tracks.get_mut(&track_id) {
for (out_sample, group_sample) in output.iter_mut().zip(group_buffer.iter()) {
*out_sample += group_sample * group.volume;
// Inject children's mix into audio graph's input node
let node_indices: Vec<_> = group.audio_graph.node_indices().collect();
for node_idx in node_indices {
if let Some(graph_node) = group.audio_graph.get_graph_node_mut(node_idx) {
if graph_node.node.node_type() == "AudioInput" {
if let Some(input_node) = graph_node.node.as_any_mut()
.downcast_mut::<super::node_graph::nodes::AudioInputNode>()
{
input_node.inject_audio(&group_buffer);
break;
}
}
}
}
// Process through the audio graph into a fresh buffer
let mut graph_output = buffer_pool.acquire();
graph_output.resize(output.len(), 0.0);
graph_output.fill(0.0);
group.audio_graph.process(&mut graph_output, &[], ctx.playhead_seconds);
// Apply group volume and mix into output
for (out_sample, graph_sample) in output.iter_mut().zip(graph_output.iter()) {
*out_sample += graph_sample * group.volume;
}
buffer_pool.release(graph_output);
}
// Release buffer back to pool
// Release children mix buffer back to pool
buffer_pool.release(group_buffer);
}
None => {}
@ -581,8 +609,8 @@ impl Project {
TrackNode::Midi(midi_track) => {
midi_track.prepare_for_save();
}
TrackNode::Group(_) => {
// Groups don't have audio graphs
TrackNode::Group(group) => {
group.prepare_for_save();
}
}
}
@ -604,8 +632,8 @@ impl Project {
TrackNode::Midi(midi_track) => {
midi_track.rebuild_audio_graph(self.sample_rate, buffer_size)?;
}
TrackNode::Group(_) => {
// Groups don't have audio graphs
TrackNode::Group(group) => {
group.rebuild_audio_graph(self.sample_rate, buffer_size)?;
}
}
}

View File

@ -153,7 +153,7 @@ impl TrackNode {
}
/// Metatrack that contains other tracks with time transformation capabilities
#[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize)]
pub struct Metatrack {
pub id: TrackId,
pub name: String,
@ -167,14 +167,50 @@ pub struct Metatrack {
pub pitch_shift: f32,
/// Time offset in seconds (shift content forward/backward in time)
pub offset: f64,
/// Trim start: offset into the metatrack's internal content (seconds)
/// Children will see time starting from this point
pub trim_start: f64,
/// Trim end: offset into the metatrack's internal content (seconds)
/// None means no end trim (play until content ends)
pub trim_end: Option<f64>,
/// Automation lanes for this metatrack
pub automation_lanes: HashMap<AutomationLaneId, AutomationLane>,
next_automation_id: AutomationLaneId,
/// Audio node graph for effects processing (input → output)
#[serde(skip, default = "default_audio_graph")]
pub audio_graph: AudioGraph,
/// Saved graph preset for serialization
audio_graph_preset: Option<GraphPreset>,
}
impl Clone for Metatrack {
fn clone(&self) -> Self {
Self {
id: self.id,
name: self.name.clone(),
children: self.children.clone(),
volume: self.volume,
muted: self.muted,
solo: self.solo,
time_stretch: self.time_stretch,
pitch_shift: self.pitch_shift,
offset: self.offset,
trim_start: self.trim_start,
trim_end: self.trim_end,
automation_lanes: self.automation_lanes.clone(),
next_automation_id: self.next_automation_id,
audio_graph: default_audio_graph(), // Create fresh graph, not cloned
audio_graph_preset: self.audio_graph_preset.clone(),
}
}
}
impl Metatrack {
/// Create a new metatrack
pub fn new(id: TrackId, name: String) -> Self {
/// Create a new metatrack with an audio graph (input → output)
pub fn new(id: TrackId, name: String, sample_rate: u32) -> Self {
let default_buffer_size = 8192;
let audio_graph = Self::create_default_graph(sample_rate, default_buffer_size);
Self {
id,
name,
@ -185,11 +221,52 @@ impl Metatrack {
time_stretch: 1.0,
pitch_shift: 0.0,
offset: 0.0,
trim_start: 0.0,
trim_end: None,
automation_lanes: HashMap::new(),
next_automation_id: 0,
audio_graph,
audio_graph_preset: None,
}
}
/// Create a default audio graph with AudioInput -> AudioOutput
fn create_default_graph(sample_rate: u32, buffer_size: usize) -> AudioGraph {
let mut graph = AudioGraph::new(sample_rate, buffer_size);
let input_node = Box::new(AudioInputNode::new("Audio Input"));
let input_id = graph.add_node(input_node);
graph.set_node_position(input_id, 100.0, 150.0);
let output_node = Box::new(AudioOutputNode::new("Audio Output"));
let output_id = graph.add_node(output_node);
graph.set_node_position(output_id, 500.0, 150.0);
let _ = graph.connect(input_id, 0, output_id, 0);
graph.set_output_node(Some(output_id));
graph
}
/// Prepare for serialization by saving the audio graph as a preset
pub fn prepare_for_save(&mut self) {
self.audio_graph_preset = Some(self.audio_graph.to_preset("Metatrack Graph"));
}
/// Rebuild the audio graph from preset after deserialization
pub fn rebuild_audio_graph(&mut self, sample_rate: u32, buffer_size: usize) -> Result<(), String> {
if let Some(preset) = &self.audio_graph_preset {
if !preset.nodes.is_empty() && preset.output_node.is_some() {
self.audio_graph = AudioGraph::from_preset(preset, sample_rate, buffer_size, None)?;
} else {
self.audio_graph = Self::create_default_graph(sample_rate, buffer_size);
}
} else {
self.audio_graph = Self::create_default_graph(sample_rate, buffer_size);
}
Ok(())
}
/// Add an automation lane to this metatrack
pub fn add_automation_lane(&mut self, parameter_id: ParameterId) -> AutomationLaneId {
let lane_id = self.next_automation_id;
@ -282,11 +359,27 @@ impl Metatrack {
!self.muted && (!any_solo || self.solo)
}
/// Check whether this metatrack should produce audio at the given parent time.
/// Returns false if the playhead is outside the trim window.
pub fn is_active_at_time(&self, parent_playhead: f64) -> bool {
let local_time = (parent_playhead - self.offset) * self.time_stretch as f64;
if local_time < self.trim_start {
return false;
}
if let Some(end) = self.trim_end {
if local_time >= end {
return false;
}
}
true
}
/// Transform a render context for this metatrack's children
///
/// Applies time stretching and offset transformations.
/// Applies time stretching, offset, and trim transformations.
/// Time stretch affects how fast content plays: 0.5 = half speed, 2.0 = double speed
/// Offset shifts content forward/backward in time
/// Trim start offsets into the internal content
pub fn transform_context(&self, ctx: RenderContext) -> RenderContext {
let mut transformed = ctx;
@ -300,7 +393,11 @@ impl Metatrack {
// With stretch=0.5, when parent time is 2.0s, child reads from 1.0s (plays slower, pitches down)
// With stretch=2.0, when parent time is 2.0s, child reads from 4.0s (plays faster, pitches up)
// Note: This creates pitch shift as well - true time stretching would require resampling
transformed.playhead_seconds = adjusted_playhead * self.time_stretch as f64;
let stretched = adjusted_playhead * self.time_stretch as f64;
// 3. Add trim_start so children see time starting from the trim point
// If trim_start=2.0, children start seeing time 2.0 when parent reaches offset
transformed.playhead_seconds = stretched + self.trim_start;
// Accumulate time stretch for nested metatracks
transformed.time_stretch *= self.time_stretch;

View File

@ -38,8 +38,8 @@ pub enum Command {
ExtendClip(TrackId, ClipId, f64),
// Metatrack management commands
/// Create a new metatrack with a name
CreateMetatrack(String),
/// Create a new metatrack with a name and optional parent group
CreateMetatrack(String, Option<TrackId>),
/// Add a track to a metatrack (track_id, metatrack_id)
AddToMetatrack(TrackId, TrackId),
/// Remove a track from its parent metatrack
@ -54,10 +54,16 @@ pub enum Command {
SetOffset(TrackId, f64),
/// Set metatrack pitch shift in semitones (track_id, semitones) - for future use
SetPitchShift(TrackId, f32),
/// Set metatrack trim start in seconds (track_id, trim_start)
/// Children won't hear content before this point
SetTrimStart(TrackId, f64),
/// Set metatrack trim end in seconds (track_id, trim_end)
/// None means no end trim
SetTrimEnd(TrackId, Option<f64>),
// Audio track commands
/// Create a new audio track with a name
CreateAudioTrack(String),
/// Create a new audio track with a name and optional parent group
CreateAudioTrack(String, Option<TrackId>),
/// Add an audio file to the pool (path, data, channels, sample_rate)
/// Returns the pool index via an AudioEvent
AddAudioFile(String, Vec<f32>, u32, u32),
@ -65,8 +71,8 @@ pub enum Command {
AddAudioClip(TrackId, usize, f64, f64, f64),
// MIDI commands
/// Create a new MIDI track with a name
CreateMidiTrack(String),
/// Create a new MIDI track with a name and optional parent group
CreateMidiTrack(String, Option<TrackId>),
/// Add a MIDI clip to the pool without placing it on a track
AddMidiClipToPool(MidiClip),
/// Create a new MIDI clip on a track (track_id, start_time, duration)
@ -361,10 +367,12 @@ pub enum Query {
SerializeTrackGraph(TrackId, std::path::PathBuf),
/// Load a track's effects/instrument graph (track_id, preset_json, project_path)
LoadTrackGraph(TrackId, String, std::path::PathBuf),
/// Create a new audio track (name) - returns track ID synchronously
CreateAudioTrackSync(String),
/// Create a new MIDI track (name) - returns track ID synchronously
CreateMidiTrackSync(String),
/// Create a new audio track (name, parent) - returns track ID synchronously
CreateAudioTrackSync(String, Option<TrackId>),
/// Create a new MIDI track (name, parent) - returns track ID synchronously
CreateMidiTrackSync(String, Option<TrackId>),
/// Create a new metatrack/group (name, parent) - returns track ID synchronously
CreateMetatrackSync(String, Option<TrackId>),
/// Get waveform data from audio pool (pool_index, target_peaks)
GetPoolWaveform(usize, usize),
/// Get file info from audio pool (pool_index) - returns (duration, sample_rate, channels)

View File

@ -43,6 +43,9 @@ pub struct BackendContext<'a> {
/// Mapping from document clip instance UUIDs to backend clip instance IDs
pub clip_instance_to_backend_map: &'a mut HashMap<Uuid, BackendClipInstanceId>,
/// Mapping from movie clip UUIDs to backend metatrack (group track) TrackIds
pub clip_to_metatrack_map: &'a HashMap<Uuid, daw_backend::TrackId>,
// Future: pub video_controller: Option<&'a mut VideoController>,
}
@ -133,7 +136,9 @@ pub struct ActionExecutor {
impl ActionExecutor {
/// Create a new action executor with the given document
pub fn new(document: Document) -> Self {
pub fn new(mut document: Document) -> Self {
// Rebuild transient lookup maps (not serialized)
document.rebuild_layer_to_clip_map();
Self {
document: Arc::new(document),
undo_stack: Vec::new(),

View File

@ -12,6 +12,9 @@ pub struct AddLayerAction {
/// The layer to add
layer: AnyLayer,
/// If Some, add to this VectorClip's layers instead of root
target_clip_id: Option<Uuid>,
/// ID of the created layer (set after execution)
created_layer_id: Option<Uuid>,
}
@ -26,6 +29,7 @@ impl AddLayerAction {
let layer = VectorLayer::new(name);
Self {
layer: AnyLayer::Vector(layer),
target_clip_id: None,
created_layer_id: None,
}
}
@ -38,10 +42,17 @@ impl AddLayerAction {
pub fn new(layer: AnyLayer) -> Self {
Self {
layer,
target_clip_id: None,
created_layer_id: None,
}
}
/// Set the target clip for this action (add layer inside a movie clip)
pub fn with_target_clip(mut self, clip_id: Option<Uuid>) -> Self {
self.target_clip_id = clip_id;
self
}
/// Get the ID of the created layer (after execution)
pub fn created_layer_id(&self) -> Option<Uuid> {
self.created_layer_id
@ -50,8 +61,19 @@ impl AddLayerAction {
impl Action for AddLayerAction {
fn execute(&mut self, document: &mut Document) -> Result<(), String> {
// Add layer to the document's root
let layer_id = document.root_mut().add_child(self.layer.clone());
let layer_id = if let Some(clip_id) = self.target_clip_id {
// Add layer inside a vector clip (movie clip)
let clip = document.vector_clips.get_mut(&clip_id)
.ok_or_else(|| format!("Target clip {} not found", clip_id))?;
let id = self.layer.id();
clip.layers.add_root(self.layer.clone());
// Register in layer_to_clip_map for O(1) lookup
document.layer_to_clip_map.insert(id, clip_id);
id
} else {
// Add layer to the document's root
document.root_mut().add_child(self.layer.clone())
};
// Store the ID for rollback
self.created_layer_id = Some(layer_id);
@ -62,7 +84,15 @@ impl Action for AddLayerAction {
fn rollback(&mut self, document: &mut Document) -> Result<(), String> {
// Remove the created layer if it exists
if let Some(layer_id) = self.created_layer_id {
document.root_mut().remove_child(&layer_id);
if let Some(clip_id) = self.target_clip_id {
// Remove from vector clip
if let Some(clip) = document.vector_clips.get_mut(&clip_id) {
clip.layers.roots.retain(|node| node.data.id() != layer_id);
}
document.layer_to_clip_map.remove(&layer_id);
} else {
document.root_mut().remove_child(&layer_id);
}
// Clear the stored ID
self.created_layer_id = None;

View File

@ -0,0 +1,244 @@
//! Convert to Movie Clip action
//!
//! Wraps selected shapes and/or clip instances into a new VectorClip
//! with is_group = false, giving it a real internal timeline.
//! Works with 1+ selected items (unlike Group which requires 2+).
use crate::action::Action;
use crate::animation::{AnimationCurve, AnimationTarget, Keyframe, TransformProperty};
use crate::clip::{ClipInstance, VectorClip};
use crate::document::Document;
use crate::layer::{AnyLayer, VectorLayer};
use crate::shape::Shape;
use uuid::Uuid;
use vello::kurbo::{Rect, Shape as KurboShape};
pub struct ConvertToMovieClipAction {
layer_id: Uuid,
time: f64,
shape_ids: Vec<Uuid>,
clip_instance_ids: Vec<Uuid>,
instance_id: Uuid,
created_clip_id: Option<Uuid>,
removed_shapes: Vec<Shape>,
removed_clip_instances: Vec<ClipInstance>,
}
impl ConvertToMovieClipAction {
pub fn new(
layer_id: Uuid,
time: f64,
shape_ids: Vec<Uuid>,
clip_instance_ids: Vec<Uuid>,
instance_id: Uuid,
) -> Self {
Self {
layer_id,
time,
shape_ids,
clip_instance_ids,
instance_id,
created_clip_id: None,
removed_shapes: Vec::new(),
removed_clip_instances: Vec::new(),
}
}
}
impl Action for ConvertToMovieClipAction {
fn execute(&mut self, document: &mut Document) -> Result<(), String> {
let layer = document
.get_layer(&self.layer_id)
.ok_or_else(|| format!("Layer {} not found", self.layer_id))?;
let vl = match layer {
AnyLayer::Vector(vl) => vl,
_ => return Err("Convert to Movie Clip is only supported on vector layers".to_string()),
};
// Collect shapes
let shapes_at_time = vl.shapes_at_time(self.time);
let mut collected_shapes: Vec<Shape> = Vec::new();
for id in &self.shape_ids {
if let Some(shape) = shapes_at_time.iter().find(|s| &s.id == id) {
collected_shapes.push(shape.clone());
}
}
// Collect clip instances
let mut collected_clip_instances: Vec<ClipInstance> = Vec::new();
for id in &self.clip_instance_ids {
if let Some(ci) = vl.clip_instances.iter().find(|ci| &ci.id == id) {
collected_clip_instances.push(ci.clone());
}
}
let total_items = collected_shapes.len() + collected_clip_instances.len();
if total_items < 1 {
return Err("Need at least 1 item to convert to movie clip".to_string());
}
// Compute combined bounding box
let mut combined_bbox: Option<Rect> = None;
for shape in &collected_shapes {
let local_bbox = shape.path().bounding_box();
let transform = shape.transform.to_affine();
let transformed_bbox = transform.transform_rect_bbox(local_bbox);
combined_bbox = Some(match combined_bbox {
Some(existing) => existing.union(transformed_bbox),
None => transformed_bbox,
});
}
for ci in &collected_clip_instances {
let content_bounds = if let Some(vector_clip) = document.get_vector_clip(&ci.clip_id) {
let clip_time = ((self.time - ci.timeline_start) * ci.playback_speed) + ci.trim_start;
vector_clip.calculate_content_bounds(document, clip_time)
} else if let Some(video_clip) = document.get_video_clip(&ci.clip_id) {
Rect::new(0.0, 0.0, video_clip.width, video_clip.height)
} else {
continue;
};
let ci_transform = ci.transform.to_affine();
let transformed_bbox = ci_transform.transform_rect_bbox(content_bounds);
combined_bbox = Some(match combined_bbox {
Some(existing) => existing.union(transformed_bbox),
None => transformed_bbox,
});
}
let bbox = combined_bbox.ok_or("Could not compute bounding box")?;
let center_x = (bbox.x0 + bbox.x1) / 2.0;
let center_y = (bbox.y0 + bbox.y1) / 2.0;
// Offset shapes relative to center
let mut clip_shapes: Vec<Shape> = collected_shapes.clone();
for shape in &mut clip_shapes {
shape.transform.x -= center_x;
shape.transform.y -= center_y;
}
let mut clip_instances_inside: Vec<ClipInstance> = collected_clip_instances.clone();
for ci in &mut clip_instances_inside {
ci.transform.x -= center_x;
ci.transform.y -= center_y;
}
// Create VectorClip with real timeline duration
let mut clip = VectorClip::new("Movie Clip", bbox.width(), bbox.height(), document.duration);
// is_group defaults to false — movie clips have real timelines
let clip_id = clip.id;
let mut inner_layer = VectorLayer::new("Layer 1");
for shape in clip_shapes {
inner_layer.add_shape_to_keyframe(shape, 0.0);
}
for ci in clip_instances_inside {
inner_layer.clip_instances.push(ci);
}
clip.layers.add_root(AnyLayer::Vector(inner_layer));
document.add_vector_clip(clip);
self.created_clip_id = Some(clip_id);
// Remove originals from the layer
let layer = document.get_layer_mut(&self.layer_id).unwrap();
let vl = match layer {
AnyLayer::Vector(vl) => vl,
_ => unreachable!(),
};
self.removed_shapes.clear();
for id in &self.shape_ids {
if let Some(shape) = vl.remove_shape_from_keyframe(id, self.time) {
self.removed_shapes.push(shape);
}
}
self.removed_clip_instances.clear();
for id in &self.clip_instance_ids {
if let Some(pos) = vl.clip_instances.iter().position(|ci| &ci.id == id) {
self.removed_clip_instances.push(vl.clip_instances.remove(pos));
}
}
// Place the new ClipInstance
let instance = ClipInstance::with_id(self.instance_id, clip_id)
.with_position(center_x, center_y)
.with_name("Movie Clip");
vl.clip_instances.push(instance);
// Create default animation curves
let props_and_values = [
(TransformProperty::X, center_x),
(TransformProperty::Y, center_y),
(TransformProperty::Rotation, 0.0),
(TransformProperty::ScaleX, 1.0),
(TransformProperty::ScaleY, 1.0),
(TransformProperty::SkewX, 0.0),
(TransformProperty::SkewY, 0.0),
(TransformProperty::Opacity, 1.0),
];
for (prop, value) in props_and_values {
let target = AnimationTarget::Object {
id: self.instance_id,
property: prop,
};
let mut curve = AnimationCurve::new(target.clone(), value);
curve.set_keyframe(Keyframe::linear(0.0, value));
vl.layer.animation_data.set_curve(curve);
}
Ok(())
}
fn rollback(&mut self, document: &mut Document) -> Result<(), String> {
let layer = document
.get_layer_mut(&self.layer_id)
.ok_or_else(|| format!("Layer {} not found", self.layer_id))?;
if let AnyLayer::Vector(vl) = layer {
// Remove animation curves
for prop in &[
TransformProperty::X, TransformProperty::Y,
TransformProperty::Rotation,
TransformProperty::ScaleX, TransformProperty::ScaleY,
TransformProperty::SkewX, TransformProperty::SkewY,
TransformProperty::Opacity,
] {
let target = AnimationTarget::Object {
id: self.instance_id,
property: *prop,
};
vl.layer.animation_data.remove_curve(&target);
}
// Remove the clip instance
vl.clip_instances.retain(|ci| ci.id != self.instance_id);
// Re-insert removed shapes
for shape in self.removed_shapes.drain(..) {
vl.add_shape_to_keyframe(shape, self.time);
}
// Re-insert removed clip instances
for ci in self.removed_clip_instances.drain(..) {
vl.clip_instances.push(ci);
}
}
// Remove the VectorClip from the document
if let Some(clip_id) = self.created_clip_id.take() {
document.remove_vector_clip(&clip_id);
}
Ok(())
}
fn description(&self) -> String {
let count = self.shape_ids.len() + self.clip_instance_ids.len();
format!("Convert {} object(s) to Movie Clip", count)
}
}

View File

@ -30,6 +30,7 @@ pub mod remove_clip_instances;
pub mod remove_shapes;
pub mod set_keyframe;
pub mod group_shapes;
pub mod convert_to_movie_clip;
pub use add_clip_instance::AddClipInstanceAction;
pub use add_effect::AddEffectAction;
@ -58,3 +59,4 @@ pub use remove_clip_instances::RemoveClipInstancesAction;
pub use remove_shapes::RemoveShapesAction;
pub use set_keyframe::SetKeyframeAction;
pub use group_shapes::GroupAction;
pub use convert_to_movie_clip::ConvertToMovieClipAction;

View File

@ -190,6 +190,21 @@ impl Action for MoveClipInstancesAction {
let layer = document.get_layer(layer_id)
.ok_or_else(|| format!("Layer {} not found", layer_id))?;
// Handle vector layers: update metatrack offset for movie clips with audio
if let AnyLayer::Vector(vl) = layer {
for (instance_id, _old_start, new_start) in moves {
if let Some(instance) = vl.clip_instances.iter().find(|ci| ci.id == *instance_id) {
// Check if this clip has a metatrack
if let Some(&metatrack_id) = backend.clip_to_metatrack_map.get(&instance.clip_id) {
controller.set_offset(metatrack_id, *new_start);
controller.set_trim_start(metatrack_id, instance.trim_start);
controller.set_trim_end(metatrack_id, instance.trim_end);
}
}
}
continue;
}
// Only process audio layers
if !matches!(layer, AnyLayer::Audio(_)) {
continue;
@ -260,6 +275,20 @@ impl Action for MoveClipInstancesAction {
let layer = document.get_layer(layer_id)
.ok_or_else(|| format!("Layer {} not found", layer_id))?;
// Handle vector layers: restore metatrack offset for movie clips with audio
if let AnyLayer::Vector(vl) = layer {
for (instance_id, old_start, _new_start) in moves {
if let Some(instance) = vl.clip_instances.iter().find(|ci| ci.id == *instance_id) {
if let Some(&metatrack_id) = backend.clip_to_metatrack_map.get(&instance.clip_id) {
controller.set_offset(metatrack_id, *old_start);
controller.set_trim_start(metatrack_id, instance.trim_start);
controller.set_trim_end(metatrack_id, instance.trim_end);
}
}
}
continue;
}
// Only process audio layers
if !matches!(layer, AnyLayer::Audio(_)) {
continue;

View File

@ -76,8 +76,8 @@ impl SetLayerPropertiesAction {
impl Action for SetLayerPropertiesAction {
fn execute(&mut self, document: &mut Document) -> Result<(), String> {
for (i, &layer_id) in self.layer_ids.iter().enumerate() {
// Find the layer in the document
if let Some(layer) = document.root_mut().get_child_mut(&layer_id) {
// Find the layer in the document (searches root + inside movie clips)
if let Some(layer) = document.get_layer_mut(&layer_id) {
// Store old value if not already stored
if self.old_values[i].is_none() {
self.old_values[i] = Some(match &self.property {
@ -106,8 +106,8 @@ impl Action for SetLayerPropertiesAction {
fn rollback(&mut self, document: &mut Document) -> Result<(), String> {
for (i, &layer_id) in self.layer_ids.iter().enumerate() {
// Find the layer in the document
if let Some(layer) = document.root_mut().get_child_mut(&layer_id) {
// Find the layer in the document (searches root + inside movie clips)
if let Some(layer) = document.get_layer_mut(&layer_id) {
// Restore old value if we have one
if let Some(old_value) = &self.old_values[i] {
match old_value {

View File

@ -355,6 +355,21 @@ impl Action for TrimClipInstancesAction {
let layer = document.get_layer(layer_id)
.ok_or_else(|| format!("Layer {} not found", layer_id))?;
// Handle vector layers: update metatrack trim for movie clips with audio
if let AnyLayer::Vector(vl) = layer {
for (instance_id, _trim_type, _old, _new) in trims {
if let Some(instance) = vl.clip_instances.iter().find(|ci| ci.id == *instance_id) {
if let Some(&metatrack_id) = backend.clip_to_metatrack_map.get(&instance.clip_id) {
// Instance already has new values after execute()
controller.set_offset(metatrack_id, instance.timeline_start);
controller.set_trim_start(metatrack_id, instance.trim_start);
controller.set_trim_end(metatrack_id, instance.trim_end);
}
}
}
continue;
}
// Only process audio layers
if !matches!(layer, AnyLayer::Audio(_)) {
continue;
@ -430,6 +445,21 @@ impl Action for TrimClipInstancesAction {
let layer = document.get_layer(layer_id)
.ok_or_else(|| format!("Layer {} not found", layer_id))?;
// Handle vector layers: restore metatrack trim for movie clips with audio
if let AnyLayer::Vector(vl) = layer {
for (instance_id, _trim_type, _old, _new) in trims {
if let Some(instance) = vl.clip_instances.iter().find(|ci| ci.id == *instance_id) {
if let Some(&metatrack_id) = backend.clip_to_metatrack_map.get(&instance.clip_id) {
// Instance already has old values after rollback()
controller.set_offset(metatrack_id, instance.timeline_start);
controller.set_trim_start(metatrack_id, instance.trim_start);
controller.set_trim_end(metatrack_id, instance.trim_end);
}
}
}
continue;
}
// Only process audio layers
if !matches!(layer, AnyLayer::Audio(_)) {
continue;

View File

@ -90,6 +90,65 @@ impl VectorClip {
}
}
/// Calculate the duration of this clip based on its internal content.
///
/// Considers:
/// - Vector layer keyframes (last keyframe time + one frame)
/// - Audio/video/effect layer clip instances (timeline_start + effective duration)
///
/// The `clip_duration_fn` resolves referenced clip durations for non-vector layers.
/// Falls back to the stored `duration` field if no content exists.
pub fn content_duration(&self, framerate: f64) -> f64 {
self.content_duration_with(framerate, |_| None)
}
/// Like `content_duration`, but with a closure that resolves clip durations
/// for audio/video/effect clip instances inside this movie clip.
pub fn content_duration_with(&self, framerate: f64, clip_duration_fn: impl Fn(&Uuid) -> Option<f64>) -> f64 {
let frame_duration = 1.0 / framerate;
let mut last_time: Option<f64> = None;
for layer_node in self.layers.iter() {
// Check clip instances on ALL layer types (vector, audio, video, effect)
let clip_instances: &[ClipInstance] = match &layer_node.data {
AnyLayer::Vector(vl) => &vl.clip_instances,
AnyLayer::Audio(al) => &al.clip_instances,
AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances,
};
for ci in clip_instances {
let end = if let Some(td) = ci.timeline_duration {
ci.timeline_start + td
} else if let Some(te) = ci.trim_end {
ci.timeline_start + (te - ci.trim_start).max(0.0)
} else if let Some(clip_dur) = clip_duration_fn(&ci.clip_id) {
ci.timeline_start + (clip_dur - ci.trim_start).max(0.0)
} else {
continue;
};
last_time = Some(match last_time {
Some(t) => t.max(end),
None => end,
});
}
// Also check vector layer keyframes
if let AnyLayer::Vector(vector_layer) = &layer_node.data {
if let Some(last_kf) = vector_layer.keyframes.last() {
last_time = Some(match last_time {
Some(t) => t.max(last_kf.time),
None => last_kf.time,
});
}
}
}
match last_time {
Some(t) => t + frame_duration,
None => self.duration,
}
}
/// Calculate the bounding box of all content in this clip at a specific time
///
/// This recursively calculates the union of all shape and nested clip bounding boxes

View File

@ -166,6 +166,11 @@ pub struct Document {
/// Current playback time in seconds
#[serde(skip)]
pub current_time: f64,
/// Reverse lookup: layer_id → clip_id for layers inside vector clips.
/// Enables O(1) lookup in get_layer/get_layer_mut instead of scanning all clips.
#[serde(skip)]
pub layer_to_clip_map: HashMap<Uuid, Uuid>,
}
impl Default for Document {
@ -195,6 +200,7 @@ impl Default for Document {
ui_layout: None,
ui_layout_base: None,
current_time: 0.0,
layer_to_clip_map: HashMap::new(),
}
}
}
@ -218,6 +224,27 @@ impl Document {
}
}
/// Rebuild the layer→clip reverse lookup map from all vector clips.
/// Call after deserialization or bulk clip modifications.
pub fn rebuild_layer_to_clip_map(&mut self) {
self.layer_to_clip_map.clear();
for (clip_id, clip) in &self.vector_clips {
for node in &clip.layers.roots {
self.layer_to_clip_map.insert(node.data.id(), *clip_id);
}
}
}
/// Register a layer as belonging to a clip (for O(1) lookup).
pub fn register_layer_in_clip(&mut self, layer_id: Uuid, clip_id: Uuid) {
self.layer_to_clip_map.insert(layer_id, clip_id);
}
/// Unregister a layer from the clip lookup map.
pub fn unregister_layer_from_clip(&mut self, layer_id: &Uuid) {
self.layer_to_clip_map.remove(layer_id);
}
/// Set the background color
pub fn with_background(mut self, color: ShapeColor) -> Self {
self.background_color = color;
@ -343,9 +370,31 @@ impl Document {
.filter(|layer| layer.layer().visible)
}
/// Get a layer by ID
/// Get visible layers for the current editing context
pub fn context_visible_layers(&self, clip_id: Option<&Uuid>) -> Vec<&AnyLayer> {
self.context_layers(clip_id)
.into_iter()
.filter(|layer| layer.layer().visible)
.collect()
}
/// Get a layer by ID (searches root layers, then clip layers via O(1) map lookup)
pub fn get_layer(&self, id: &Uuid) -> Option<&AnyLayer> {
self.root.get_child(id)
// First check root layers
if let Some(layer) = self.root.get_child(id) {
return Some(layer);
}
// O(1) lookup: check if this layer belongs to a clip
if let Some(clip_id) = self.layer_to_clip_map.get(id) {
if let Some(clip) = self.vector_clips.get(clip_id) {
for node in &clip.layers.roots {
if &node.data.id() == id {
return Some(&node.data);
}
}
}
}
None
}
// === MUTATION METHODS (pub(crate) - only accessible to action module) ===
@ -358,12 +407,68 @@ impl Document {
&mut self.root
}
/// Get mutable access to a layer by ID
/// Get mutable access to a layer by ID (searches root layers, then clip layers via O(1) map lookup)
///
/// This method is intentionally `pub(crate)` to ensure mutations
/// only happen through the action system.
pub fn get_layer_mut(&mut self, id: &Uuid) -> Option<&mut AnyLayer> {
self.root.get_child_mut(id)
// First check root layers
if self.root.get_child(id).is_some() {
return self.root.get_child_mut(id);
}
// O(1) lookup: check if this layer belongs to a clip
if let Some(clip_id) = self.layer_to_clip_map.get(id).copied() {
if let Some(clip) = self.vector_clips.get_mut(&clip_id) {
for node in &mut clip.layers.roots {
if &node.data.id() == id {
return Some(&mut node.data);
}
}
}
}
None
}
// === EDITING CONTEXT METHODS ===
/// Get the layers for the current editing context.
/// When `clip_id` is None, returns root layers. When Some, returns the clip's layers.
pub fn context_layers(&self, clip_id: Option<&Uuid>) -> Vec<&AnyLayer> {
match clip_id {
None => self.root.children.iter().collect(),
Some(id) => self.vector_clips.get(id)
.map(|clip| clip.layers.root_data())
.unwrap_or_default(),
}
}
/// Get mutable layers for the current editing context.
pub fn context_layers_mut(&mut self, clip_id: Option<&Uuid>) -> Vec<&mut AnyLayer> {
match clip_id {
None => self.root.children.iter_mut().collect(),
Some(id) => self.vector_clips.get_mut(id)
.map(|clip| clip.layers.root_data_mut())
.unwrap_or_default(),
}
}
/// Look up a layer by ID within an editing context.
pub fn get_layer_in_context(&self, clip_id: Option<&Uuid>, layer_id: &Uuid) -> Option<&AnyLayer> {
self.context_layers(clip_id).into_iter().find(|l| &l.id() == layer_id)
}
/// Look up a mutable layer by ID within an editing context.
pub fn get_layer_in_context_mut(&mut self, clip_id: Option<&Uuid>, layer_id: &Uuid) -> Option<&mut AnyLayer> {
self.context_layers_mut(clip_id).into_iter().find(|l| &l.id() == layer_id)
}
/// Get all layers across the entire document (root + inside all vector clips).
pub fn all_layers(&self) -> Vec<&AnyLayer> {
let mut layers: Vec<&AnyLayer> = self.root.children.iter().collect();
for clip in self.vector_clips.values() {
layers.extend(clip.layers.root_data());
}
layers
}
// === CLIP LIBRARY METHODS ===
@ -371,6 +476,10 @@ impl Document {
/// Add a vector clip to the library
pub fn add_vector_clip(&mut self, clip: VectorClip) -> Uuid {
let id = clip.id;
// Register all layers in the clip for O(1) reverse lookup
for node in &clip.layers.roots {
self.layer_to_clip_map.insert(node.data.id(), id);
}
self.vector_clips.insert(id, clip);
id
}
@ -439,7 +548,15 @@ impl Document {
/// Remove a vector clip from the library
pub fn remove_vector_clip(&mut self, id: &Uuid) -> Option<VectorClip> {
self.vector_clips.remove(id)
if let Some(clip) = self.vector_clips.remove(id) {
// Unregister all layers from the reverse lookup map
for node in &clip.layers.roots {
self.layer_to_clip_map.remove(&node.data.id());
}
Some(clip)
} else {
None
}
}
/// Remove a video clip from the library
@ -534,7 +651,25 @@ impl Document {
/// have infinite internal duration.
pub fn get_clip_duration(&self, clip_id: &Uuid) -> Option<f64> {
if let Some(clip) = self.vector_clips.get(clip_id) {
Some(clip.duration)
if clip.is_group {
Some(clip.duration)
} else {
Some(clip.content_duration_with(self.framerate, |id| {
// Resolve nested clip durations (audio, video, other vector clips)
if let Some(vc) = self.vector_clips.get(id) {
// Avoid deep recursion — use stored duration for nested vector clips
Some(vc.content_duration(self.framerate))
} else if let Some(ac) = self.audio_clips.get(id) {
Some(ac.duration)
} else if let Some(vc) = self.video_clips.get(id) {
Some(vc.duration)
} else if self.effect_definitions.contains_key(id) {
Some(crate::effect::EFFECT_DURATION)
} else {
None
}
}))
}
} else if let Some(clip) = self.video_clips.get(clip_id) {
Some(clip.duration)
} else if let Some(clip) = self.audio_clips.get(clip_id) {

View File

@ -60,6 +60,10 @@ pub struct SerializedAudioBackend {
/// Preserves the connection between UI layers and audio engine tracks across save/load
#[serde(default)]
pub layer_to_track_map: std::collections::HashMap<uuid::Uuid, u32>,
/// Mapping from movie clip UUIDs to backend metatrack (group track) TrackIds
#[serde(default)]
pub clip_to_metatrack_map: std::collections::HashMap<uuid::Uuid, u32>,
}
/// Settings for saving a project
@ -96,6 +100,9 @@ pub struct LoadedProject {
/// Mapping from UI layer UUIDs to backend TrackIds (empty for old files)
pub layer_to_track_map: std::collections::HashMap<uuid::Uuid, u32>,
/// Mapping from movie clip UUIDs to backend metatrack TrackIds (empty for old files)
pub clip_to_metatrack_map: std::collections::HashMap<uuid::Uuid, u32>,
/// Loaded audio pool entries
pub audio_pool_entries: Vec<AudioPoolEntry>,
@ -147,6 +154,7 @@ pub fn save_beam(
audio_project: &mut AudioProject,
audio_pool_entries: Vec<AudioPoolEntry>,
layer_to_track_map: &std::collections::HashMap<uuid::Uuid, u32>,
clip_to_metatrack_map: &std::collections::HashMap<uuid::Uuid, u32>,
_settings: &SaveSettings,
) -> Result<(), String> {
let fn_start = std::time::Instant::now();
@ -375,6 +383,7 @@ pub fn save_beam(
project: audio_project.clone(),
audio_pool_entries: modified_entries,
layer_to_track_map: layer_to_track_map.clone(),
clip_to_metatrack_map: clip_to_metatrack_map.clone(),
},
};
eprintln!("📊 [SAVE_BEAM] Step 5: Build BeamProject structure took {:.2}ms", step5_start.elapsed().as_secs_f64() * 1000.0);
@ -462,6 +471,7 @@ pub fn load_beam(path: &Path) -> Result<LoadedProject, String> {
let mut audio_project = beam_project.audio_backend.project;
let audio_pool_entries = beam_project.audio_backend.audio_pool_entries;
let layer_to_track_map = beam_project.audio_backend.layer_to_track_map;
let clip_to_metatrack_map = beam_project.audio_backend.clip_to_metatrack_map;
eprintln!("📊 [LOAD_BEAM] Step 5: Extract document and audio state took {:.2}ms", step5_start.elapsed().as_secs_f64() * 1000.0);
// 6. Rebuild AudioGraphs from presets
@ -607,6 +617,7 @@ pub fn load_beam(path: &Path) -> Result<LoadedProject, String> {
document,
audio_project,
layer_to_track_map,
clip_to_metatrack_map,
audio_pool_entries: restored_entries,
missing_files,
})

View File

@ -164,6 +164,13 @@ pub fn hit_test_clip_instances(
timeline_time: f64,
) -> Option<Uuid> {
for clip_instance in clip_instances.iter().rev() {
// Check time bounds: skip clip instances not active at this time
let clip_duration = document.get_clip_duration(&clip_instance.clip_id).unwrap_or(0.0);
let instance_end = clip_instance.timeline_start + clip_instance.effective_duration(clip_duration);
if timeline_time < clip_instance.timeline_start || timeline_time >= instance_end {
continue;
}
let clip_time = ((timeline_time - clip_instance.timeline_start) * clip_instance.playback_speed) + clip_instance.trim_start;
let content_bounds = if let Some(vector_clip) = document.get_vector_clip(&clip_instance.clip_id) {
@ -196,6 +203,13 @@ pub fn hit_test_clip_instances_in_rect(
let mut hits = Vec::new();
for clip_instance in clip_instances {
// Check time bounds: skip clip instances not active at this time
let clip_duration = document.get_clip_duration(&clip_instance.clip_id).unwrap_or(0.0);
let instance_end = clip_instance.timeline_start + clip_instance.effective_duration(clip_duration);
if timeline_time < clip_instance.timeline_start || timeline_time >= instance_end {
continue;
}
let clip_time = ((timeline_time - clip_instance.timeline_start) * clip_instance.playback_speed) + clip_instance.trim_start;
let content_bounds = if let Some(vector_clip) = document.get_vector_clip(&clip_instance.clip_id) {

View File

@ -328,48 +328,6 @@ impl VectorLayer {
// === MUTATION METHODS (pub(crate) - only accessible to action module) ===
/// Add a shape to this layer (internal, for actions only)
///
/// This method is intentionally `pub(crate)` to ensure mutations
/// only happen through the action system.
pub(crate) fn add_shape_internal(&mut self, shape: Shape) -> Uuid {
let id = shape.id;
self.shapes.insert(id, shape);
id
}
/// Add an object to this layer (internal, for actions only)
///
/// This method is intentionally `pub(crate)` to ensure mutations
/// only happen through the action system.
pub(crate) fn add_object_internal(&mut self, object: ShapeInstance) -> Uuid {
let id = object.id;
self.shape_instances.push(object);
id
}
/// Remove a shape from this layer (internal, for actions only)
///
/// Returns the removed shape if found.
/// This method is intentionally `pub(crate)` to ensure mutations
/// only happen through the action system.
pub(crate) fn remove_shape_internal(&mut self, id: &Uuid) -> Option<Shape> {
self.shapes.remove(id)
}
/// Remove an object from this layer (internal, for actions only)
///
/// Returns the removed object if found.
/// This method is intentionally `pub(crate)` to ensure mutations
/// only happen through the action system.
pub(crate) fn remove_object_internal(&mut self, id: &Uuid) -> Option<ShapeInstance> {
if let Some(index) = self.shape_instances.iter().position(|o| &o.id == id) {
Some(self.shape_instances.remove(index))
} else {
None
}
}
/// Modify an object in place (internal, for actions only)
///
/// Applies the given function to the object if found.

View File

@ -110,6 +110,18 @@ impl<T> LayerTree<T> {
}
}
impl<T> LayerTree<T> {
/// Get flat list of references to all root layer data
pub fn root_data(&self) -> Vec<&T> {
self.roots.iter().map(|n| &n.data).collect()
}
/// Get flat list of mutable references to all root layer data
pub fn root_data_mut(&mut self) -> Vec<&mut T> {
self.roots.iter_mut().map(|n| &mut n.data).collect()
}
}
impl<T> Default for LayerTree<T> {
fn default() -> Self {
Self::new()

View File

@ -373,7 +373,19 @@ pub fn render_document_with_transform(
// 2. Recursively render the root graphics object at current time
let time = document.current_time;
render_graphics_object(document, time, scene, base_transform, image_cache, video_manager, skip_instance_id);
// Check if any layers are soloed
let any_soloed = document.visible_layers().any(|layer| layer.soloed());
for layer in document.visible_layers() {
if any_soloed {
if layer.soloed() {
render_layer(document, time, layer, scene, base_transform, 1.0, image_cache, video_manager, skip_instance_id);
}
} else {
render_layer(document, time, layer, scene, base_transform, 1.0, image_cache, video_manager, skip_instance_id);
}
}
}
/// Draw the document background
@ -392,35 +404,6 @@ fn render_background(document: &Document, scene: &mut Scene, base_transform: Aff
);
}
/// Recursively render the root graphics object and its children
fn render_graphics_object(
document: &Document,
time: f64,
scene: &mut Scene,
base_transform: Affine,
image_cache: &mut ImageCache,
video_manager: &std::sync::Arc<std::sync::Mutex<crate::video::VideoManager>>,
skip_instance_id: Option<uuid::Uuid>,
) {
// Check if any layers are soloed
let any_soloed = document.visible_layers().any(|layer| layer.soloed());
// Render layers based on solo state
// If any layer is soloed, only render soloed layers
// Otherwise, render all visible layers
// Start with full opacity (1.0)
for layer in document.visible_layers() {
if any_soloed {
// Only render soloed layers when solo is active
if layer.soloed() {
render_layer(document, time, layer, scene, base_transform, 1.0, image_cache, video_manager, skip_instance_id);
}
} else {
// Render all visible layers when no solo is active
render_layer(document, time, layer, scene, base_transform, 1.0, image_cache, video_manager, skip_instance_id);
}
}
}
/// Render a single layer
fn render_layer(
@ -451,6 +434,42 @@ fn render_layer(
}
}
/// Render a single clip instance by ID to a scene.
/// Used for re-rendering the "focused" clip on top of a dimmed scene when editing inside a clip.
pub fn render_single_clip_instance(
document: &Document,
scene: &mut Scene,
base_transform: Affine,
layer_id: &uuid::Uuid,
instance_id: &uuid::Uuid,
image_cache: &mut ImageCache,
video_manager: &std::sync::Arc<std::sync::Mutex<crate::video::VideoManager>>,
) {
let time = document.current_time;
// Find the layer containing this instance
let Some(layer) = document.get_layer(layer_id) else { return };
let AnyLayer::Vector(vector_layer) = layer else { return };
let layer_opacity = vector_layer.layer.opacity;
// Find the specific clip instance
let Some(clip_instance) = vector_layer.clip_instances.iter().find(|ci| &ci.id == instance_id) else { return };
// Compute group_end_time if needed
let group_end_time = document.vector_clips.get(&clip_instance.clip_id)
.filter(|vc| vc.is_group)
.map(|_| {
let frame_duration = 1.0 / document.framerate;
vector_layer.group_visibility_end(&clip_instance.id, clip_instance.timeline_start, frame_duration)
});
render_clip_instance(
document, time, clip_instance, layer_opacity, scene, base_transform,
&vector_layer.layer.animation_data, image_cache, video_manager, group_end_time,
);
}
/// Render a clip instance (recursive rendering for nested compositions)
fn render_clip_instance(
document: &Document,
@ -479,7 +498,8 @@ fn render_clip_instance(
}
0.0
} else {
let Some(t) = clip_instance.remap_time(time, vector_clip.duration) else {
let clip_dur = document.get_clip_duration(&vector_clip.id).unwrap_or(vector_clip.duration);
let Some(t) = clip_instance.remap_time(time, clip_dur) else {
return; // Clip instance not active at this time
};
t

View File

@ -384,6 +384,7 @@ enum FileCommand {
path: std::path::PathBuf,
document: lightningbeam_core::document::Document,
layer_to_track_map: std::collections::HashMap<uuid::Uuid, u32>,
clip_to_metatrack_map: std::collections::HashMap<uuid::Uuid, u32>,
progress_tx: std::sync::mpsc::Sender<FileProgress>,
},
Load {
@ -458,8 +459,8 @@ impl FileOperationsWorker {
fn run(self) {
while let Ok(command) = self.command_rx.recv() {
match command {
FileCommand::Save { path, document, layer_to_track_map, progress_tx } => {
self.handle_save(path, document, &layer_to_track_map, progress_tx);
FileCommand::Save { path, document, layer_to_track_map, clip_to_metatrack_map, progress_tx } => {
self.handle_save(path, document, &layer_to_track_map, &clip_to_metatrack_map, progress_tx);
}
FileCommand::Load { path, progress_tx } => {
self.handle_load(path, progress_tx);
@ -474,6 +475,7 @@ impl FileOperationsWorker {
path: std::path::PathBuf,
document: lightningbeam_core::document::Document,
layer_to_track_map: &std::collections::HashMap<uuid::Uuid, u32>,
clip_to_metatrack_map: &std::collections::HashMap<uuid::Uuid, u32>,
progress_tx: std::sync::mpsc::Sender<FileProgress>,
) {
use lightningbeam_core::file_io::{save_beam, SaveSettings};
@ -516,7 +518,7 @@ impl FileOperationsWorker {
let step3_start = std::time::Instant::now();
let settings = SaveSettings::default();
match save_beam(&path, &document, &mut audio_project, audio_pool_entries, layer_to_track_map, &settings) {
match save_beam(&path, &document, &mut audio_project, audio_pool_entries, layer_to_track_map, clip_to_metatrack_map, &settings) {
Ok(()) => {
eprintln!("📊 [SAVE] Step 3: save_beam() took {:.2}ms", step3_start.elapsed().as_secs_f64() * 1000.0);
eprintln!("📊 [SAVE] ✅ Total save time: {:.2}ms", save_start.elapsed().as_secs_f64() * 1000.0);
@ -616,6 +618,51 @@ enum RecordingArmMode {
Manual,
}
/// Entry in the editing context stack — tracks which clip is being edited
#[derive(Clone)]
struct EditingContextEntry {
/// The VectorClip ID being edited
clip_id: Uuid,
/// The ClipInstance ID through which we entered
instance_id: Uuid,
/// The layer ID that contains the instance in the parent context
parent_layer_id: Uuid,
/// Saved playback time from the parent context (restored on exit)
saved_playback_time: f64,
/// Saved active layer ID from the parent context
saved_active_layer_id: Option<Uuid>,
}
/// Editing context stack — tracks which clip (or root) is being edited.
/// Empty stack = editing the document root.
#[derive(Clone, Default)]
struct EditingContext {
stack: Vec<EditingContextEntry>,
}
impl EditingContext {
fn current_clip_id(&self) -> Option<Uuid> {
self.stack.last().map(|e| e.clip_id)
}
fn current_instance_id(&self) -> Option<Uuid> {
self.stack.last().map(|e| e.instance_id)
}
fn current_parent_layer_id(&self) -> Option<Uuid> {
self.stack.last().map(|e| e.parent_layer_id)
}
fn push(&mut self, entry: EditingContextEntry) {
self.stack.push(entry);
}
fn pop(&mut self) -> Option<EditingContextEntry> {
self.stack.pop()
}
}
struct EditorApp {
layouts: Vec<LayoutDefinition>,
current_layout_index: usize,
@ -638,6 +685,7 @@ struct EditorApp {
action_executor: lightningbeam_core::action::ActionExecutor, // Action system for undo/redo
active_layer_id: Option<Uuid>, // Currently active layer for editing
selection: lightningbeam_core::selection::Selection, // Current selection state
editing_context: EditingContext, // Which clip (or root) we're editing
tool_state: lightningbeam_core::tool::ToolState, // Current tool interaction state
// Draw tool configuration
draw_simplify_mode: lightningbeam_core::tool::SimplifyMode, // Current simplification mode for draw tool
@ -658,6 +706,8 @@ struct EditorApp {
// Track ID mapping (Document layer UUIDs <-> daw-backend TrackIds)
layer_to_track_map: HashMap<Uuid, daw_backend::TrackId>,
track_to_layer_map: HashMap<daw_backend::TrackId, Uuid>,
// Movie clip ID -> backend metatrack (group track) mapping
clip_to_metatrack_map: HashMap<Uuid, daw_backend::TrackId>,
/// Generation counter - incremented on project load to force UI components to reload
project_generation: u64,
// Clip instance ID mapping (Document clip instance UUIDs <-> backend clip instance IDs)
@ -874,6 +924,7 @@ impl EditorApp {
action_executor,
active_layer_id: Some(layer_id),
selection: lightningbeam_core::selection::Selection::new(),
editing_context: EditingContext::default(),
tool_state: lightningbeam_core::tool::ToolState::default(),
draw_simplify_mode: lightningbeam_core::tool::SimplifyMode::Smooth, // Default to smooth curves
rdp_tolerance: 10.0, // Default RDP tolerance
@ -889,6 +940,7 @@ impl EditorApp {
)),
layer_to_track_map: HashMap::new(),
track_to_layer_map: HashMap::new(),
clip_to_metatrack_map: HashMap::new(),
project_generation: 0,
clip_instance_to_backend_map: HashMap::new(),
playback_time: 0.0, // Start at beginning
@ -1262,63 +1314,79 @@ impl EditorApp {
fn sync_audio_layers_to_backend(&mut self) {
use lightningbeam_core::layer::{AnyLayer, AudioLayerType};
// Iterate through all layers in the document
// Collect audio layers from root and inside vector clips
// Each entry: (layer_id, layer_name, audio_type, parent_clip_id)
let mut audio_layers_to_sync: Vec<(uuid::Uuid, String, AudioLayerType, Option<uuid::Uuid>)> = Vec::new();
// Root layers
for layer in &self.action_executor.document().root.children {
// Only process Audio layers
if let AnyLayer::Audio(audio_layer) = layer {
let layer_id = audio_layer.layer.id;
let layer_name = &audio_layer.layer.name;
// Skip if already mapped (shouldn't happen, but be defensive)
if self.layer_to_track_map.contains_key(&layer_id) {
continue;
if !self.layer_to_track_map.contains_key(&layer_id) {
audio_layers_to_sync.push((
layer_id,
audio_layer.layer.name.clone(),
audio_layer.audio_layer_type,
None,
));
}
}
}
// Handle both MIDI and Sampled audio tracks
match audio_layer.audio_layer_type {
AudioLayerType::Midi => {
// Create daw-backend MIDI track
if let Some(ref controller_arc) = self.audio_controller {
let mut controller = controller_arc.lock().unwrap();
match controller.create_midi_track_sync(layer_name.clone()) {
Ok(track_id) => {
// Store bidirectional mapping
self.layer_to_track_map.insert(layer_id, track_id);
self.track_to_layer_map.insert(track_id, layer_id);
// Layers inside vector clips
for (&clip_id, clip) in &self.action_executor.document().vector_clips {
for layer in clip.layers.root_data() {
if let AnyLayer::Audio(audio_layer) = layer {
let layer_id = audio_layer.layer.id;
if !self.layer_to_track_map.contains_key(&layer_id) {
audio_layers_to_sync.push((
layer_id,
audio_layer.layer.name.clone(),
audio_layer.audio_layer_type,
Some(clip_id),
));
}
}
}
}
// Load default instrument
if let Err(e) = default_instrument::load_default_instrument(&mut *controller, track_id) {
eprintln!("⚠️ Failed to load default instrument for {}: {}", layer_name, e);
} else {
println!("✅ Synced MIDI layer '{}' to backend (TrackId: {})", layer_name, track_id);
}
// Now create backend tracks for each
for (layer_id, layer_name, audio_type, parent_clip_id) in audio_layers_to_sync {
// If inside a clip, ensure a metatrack exists
let parent_track = parent_clip_id.and_then(|cid| self.ensure_metatrack_for_clip(cid));
// TODO: Sync any existing clips on this layer to the backend
// This will be implemented when we add clip synchronization
}
Err(e) => {
eprintln!("⚠️ Failed to create daw-backend track for MIDI layer '{}': {}", layer_name, e);
match audio_type {
AudioLayerType::Midi => {
if let Some(ref controller_arc) = self.audio_controller {
let mut controller = controller_arc.lock().unwrap();
match controller.create_midi_track_sync(layer_name.clone(), parent_track) {
Ok(track_id) => {
self.layer_to_track_map.insert(layer_id, track_id);
self.track_to_layer_map.insert(track_id, layer_id);
if let Err(e) = default_instrument::load_default_instrument(&mut *controller, track_id) {
eprintln!("⚠️ Failed to load default instrument for {}: {}", layer_name, e);
} else {
println!("✅ Synced MIDI layer '{}' to backend (TrackId: {}, parent: {:?})", layer_name, track_id, parent_track);
}
}
Err(e) => {
eprintln!("⚠️ Failed to create daw-backend track for MIDI layer '{}': {}", layer_name, e);
}
}
}
AudioLayerType::Sampled => {
// Create daw-backend Audio track
if let Some(ref controller_arc) = self.audio_controller {
let mut controller = controller_arc.lock().unwrap();
match controller.create_audio_track_sync(layer_name.clone()) {
Ok(track_id) => {
// Store bidirectional mapping
self.layer_to_track_map.insert(layer_id, track_id);
self.track_to_layer_map.insert(track_id, layer_id);
println!("✅ Synced Audio layer '{}' to backend (TrackId: {})", layer_name, track_id);
// TODO: Sync any existing clips on this layer to the backend
// This will be implemented when we add clip synchronization
}
Err(e) => {
eprintln!("⚠️ Failed to create daw-backend audio track for '{}': {}", layer_name, e);
}
}
AudioLayerType::Sampled => {
if let Some(ref controller_arc) = self.audio_controller {
let mut controller = controller_arc.lock().unwrap();
match controller.create_audio_track_sync(layer_name.clone(), parent_track) {
Ok(track_id) => {
self.layer_to_track_map.insert(layer_id, track_id);
self.track_to_layer_map.insert(track_id, layer_id);
println!("✅ Synced Audio layer '{}' to backend (TrackId: {}, parent: {:?})", layer_name, track_id, parent_track);
}
Err(e) => {
eprintln!("⚠️ Failed to create daw-backend audio track for '{}': {}", layer_name, e);
}
}
}
@ -1327,6 +1395,36 @@ impl EditorApp {
}
}
/// Ensure a backend metatrack (group track) exists for a movie clip.
/// Returns the metatrack's TrackId, creating one if needed.
fn ensure_metatrack_for_clip(&mut self, clip_id: Uuid) -> Option<daw_backend::TrackId> {
// Return existing metatrack if already mapped
if let Some(&track_id) = self.clip_to_metatrack_map.get(&clip_id) {
return Some(track_id);
}
// Create a new metatrack in the backend
let clip_name = self.action_executor.document().vector_clips
.get(&clip_id)
.map(|c| c.name.clone())
.unwrap_or_else(|| format!("Clip {}", clip_id));
if let Some(ref controller_arc) = self.audio_controller {
let mut controller = controller_arc.lock().unwrap();
match controller.create_group_track_sync(format!("[{}]", clip_name), None) {
Ok(track_id) => {
self.clip_to_metatrack_map.insert(clip_id, track_id);
println!("✅ Created metatrack for clip '{}' (TrackId: {})", clip_name, track_id);
return Some(track_id);
}
Err(e) => {
eprintln!("⚠️ Failed to create metatrack for clip '{}': {}", clip_name, e);
}
}
}
None
}
/// Split clip instances at the current playhead position
///
/// Only splits clips on the active layer, plus any clips linked to them
@ -1440,6 +1538,7 @@ impl EditorApp {
audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map,
clip_instance_to_backend_map: &mut self.clip_instance_to_backend_map,
clip_to_metatrack_map: &self.clip_to_metatrack_map,
};
if let Err(e) = self.action_executor.execute_with_backend(Box::new(action), &mut backend_context) {
@ -1585,7 +1684,6 @@ impl EditorApp {
/// Delete the current selection (for cut and delete operations)
fn clipboard_delete_selection(&mut self) {
use lightningbeam_core::layer::AnyLayer;
if !self.selection.clip_instances().is_empty() {
let active_layer_id = match self.active_layer_id {
@ -1613,6 +1711,7 @@ impl EditorApp {
audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map,
clip_instance_to_backend_map: &mut self.clip_instance_to_backend_map,
clip_to_metatrack_map: &self.clip_to_metatrack_map,
};
if let Err(e) = self
.action_executor
@ -1734,6 +1833,7 @@ impl EditorApp {
audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map,
clip_instance_to_backend_map: &mut self.clip_instance_to_backend_map,
clip_to_metatrack_map: &self.clip_to_metatrack_map,
};
if let Err(e) = self
.action_executor
@ -1848,6 +1948,7 @@ impl EditorApp {
audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map,
clip_instance_to_backend_map: &mut self.clip_instance_to_backend_map,
clip_to_metatrack_map: &self.clip_to_metatrack_map,
};
if let Err(e) = self.action_executor.execute_with_backend(Box::new(action), &mut backend_context) {
eprintln!("Duplicate clip failed: {}", e);
@ -1927,6 +2028,7 @@ impl EditorApp {
// TODO: Add ResetProject command to EngineController
self.layer_to_track_map.clear();
self.track_to_layer_map.clear();
self.clip_to_metatrack_map.clear();
// Clear file path
self.current_file_path = None;
@ -2130,6 +2232,7 @@ impl EditorApp {
audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map,
clip_instance_to_backend_map: &mut self.clip_instance_to_backend_map,
clip_to_metatrack_map: &self.clip_to_metatrack_map,
};
match self.action_executor.undo_with_backend(&mut backend_context) {
Ok(true) => {
@ -2165,6 +2268,7 @@ impl EditorApp {
audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map,
clip_instance_to_backend_map: &mut self.clip_instance_to_backend_map,
clip_to_metatrack_map: &self.clip_to_metatrack_map,
};
match self.action_executor.redo_with_backend(&mut backend_context) {
Ok(true) => {
@ -2241,6 +2345,28 @@ impl EditorApp {
}
}
}
MenuAction::ConvertToMovieClip => {
if let Some(layer_id) = self.active_layer_id {
let shape_ids: Vec<uuid::Uuid> = self.selection.shape_instances().to_vec();
let clip_ids: Vec<uuid::Uuid> = self.selection.clip_instances().to_vec();
if shape_ids.len() + clip_ids.len() >= 1 {
let instance_id = uuid::Uuid::new_v4();
let action = lightningbeam_core::actions::ConvertToMovieClipAction::new(
layer_id,
self.playback_time,
shape_ids,
clip_ids,
instance_id,
);
if let Err(e) = self.action_executor.execute(Box::new(action)) {
eprintln!("Failed to convert to movie clip: {}", e);
} else {
self.selection.clear();
self.selection.add_clip_instance(instance_id);
}
}
}
}
MenuAction::SendToBack => {
println!("Menu: Send to Back");
// TODO: Implement send to back
@ -2259,58 +2385,71 @@ impl EditorApp {
// Layer menu
MenuAction::AddLayer => {
// Create a new vector layer with a default name
let layer_count = self.action_executor.document().root.children.len();
let editing_clip_id = self.editing_context.current_clip_id();
let context_layers = self.action_executor.document().context_layers(editing_clip_id.as_ref());
let layer_count = context_layers.len();
let layer_name = format!("Layer {}", layer_count + 1);
let action = lightningbeam_core::actions::AddLayerAction::new_vector(layer_name);
let action = lightningbeam_core::actions::AddLayerAction::new_vector(layer_name)
.with_target_clip(editing_clip_id);
let _ = self.action_executor.execute(Box::new(action));
// Select the newly created layer (last child in the document)
if let Some(last_layer) = self.action_executor.document().root.children.last() {
// Select the newly created layer (last in context)
let context_layers = self.action_executor.document().context_layers(editing_clip_id.as_ref());
if let Some(last_layer) = context_layers.last() {
self.active_layer_id = Some(last_layer.id());
}
}
MenuAction::AddVideoLayer => {
println!("Menu: Add Video Layer");
// Create a new video layer with a default name
let layer_number = self.action_executor.document().root.children.len() + 1;
let editing_clip_id = self.editing_context.current_clip_id();
let context_layers = self.action_executor.document().context_layers(editing_clip_id.as_ref());
let layer_number = context_layers.len() + 1;
let layer_name = format!("Video {}", layer_number);
let new_layer = lightningbeam_core::layer::AnyLayer::Video(
lightningbeam_core::layer::VideoLayer::new(&layer_name)
);
// Add the layer to the document
self.action_executor.document_mut().root.add_child(new_layer.clone());
let action = lightningbeam_core::actions::AddLayerAction::new(new_layer)
.with_target_clip(editing_clip_id);
let _ = self.action_executor.execute(Box::new(action));
// Set it as the active layer
if let Some(last_layer) = self.action_executor.document().root.children.last() {
let context_layers = self.action_executor.document().context_layers(editing_clip_id.as_ref());
if let Some(last_layer) = context_layers.last() {
self.active_layer_id = Some(last_layer.id());
}
}
MenuAction::AddAudioTrack => {
// Create a new sampled audio layer with a default name
let layer_count = self.action_executor.document().root.children.len();
let editing_clip_id = self.editing_context.current_clip_id();
let context_layers = self.action_executor.document().context_layers(editing_clip_id.as_ref());
let layer_count = context_layers.len();
let layer_name = format!("Audio Track {}", layer_count + 1);
// Create audio layer in document
let audio_layer = AudioLayer::new_sampled(layer_name.clone());
let action = lightningbeam_core::actions::AddLayerAction::new(AnyLayer::Audio(audio_layer));
let action = lightningbeam_core::actions::AddLayerAction::new(AnyLayer::Audio(audio_layer))
.with_target_clip(editing_clip_id);
let _ = self.action_executor.execute(Box::new(action));
// Get the newly created layer ID
if let Some(last_layer) = self.action_executor.document().root.children.last() {
let context_layers = self.action_executor.document().context_layers(editing_clip_id.as_ref());
if let Some(last_layer) = context_layers.last() {
let layer_id = last_layer.id();
self.active_layer_id = Some(layer_id);
// If inside a clip, ensure a metatrack exists for it
let parent_track = editing_clip_id.and_then(|cid| self.ensure_metatrack_for_clip(cid));
// Create corresponding daw-backend audio track
if let Some(ref controller_arc) = self.audio_controller {
let mut controller = controller_arc.lock().unwrap();
match controller.create_audio_track_sync(layer_name.clone()) {
match controller.create_audio_track_sync(layer_name.clone(), parent_track) {
Ok(track_id) => {
// Store bidirectional mapping
self.layer_to_track_map.insert(layer_id, track_id);
self.track_to_layer_map.insert(track_id, layer_id);
println!("✅ Created {} (backend TrackId: {})", layer_name, track_id);
println!("✅ Created {} (backend TrackId: {}, parent: {:?})", layer_name, track_id, parent_track);
}
Err(e) => {
eprintln!("⚠️ Failed to create daw-backend audio track for {}: {}", layer_name, e);
@ -2322,23 +2461,30 @@ impl EditorApp {
}
MenuAction::AddMidiTrack => {
// Create a new MIDI audio layer with a default name
let layer_count = self.action_executor.document().root.children.len();
let editing_clip_id = self.editing_context.current_clip_id();
let context_layers = self.action_executor.document().context_layers(editing_clip_id.as_ref());
let layer_count = context_layers.len();
let layer_name = format!("MIDI Track {}", layer_count + 1);
// Create MIDI layer in document
let midi_layer = AudioLayer::new_midi(layer_name.clone());
let action = lightningbeam_core::actions::AddLayerAction::new(AnyLayer::Audio(midi_layer));
let action = lightningbeam_core::actions::AddLayerAction::new(AnyLayer::Audio(midi_layer))
.with_target_clip(editing_clip_id);
let _ = self.action_executor.execute(Box::new(action));
// Get the newly created layer ID
if let Some(last_layer) = self.action_executor.document().root.children.last() {
let context_layers = self.action_executor.document().context_layers(editing_clip_id.as_ref());
if let Some(last_layer) = context_layers.last() {
let layer_id = last_layer.id();
self.active_layer_id = Some(layer_id);
// If inside a clip, ensure a metatrack exists for it
let parent_track = editing_clip_id.and_then(|cid| self.ensure_metatrack_for_clip(cid));
// Create corresponding daw-backend MIDI track
if let Some(ref controller_arc) = self.audio_controller {
let mut controller = controller_arc.lock().unwrap();
match controller.create_midi_track_sync(layer_name.clone()) {
match controller.create_midi_track_sync(layer_name.clone(), parent_track) {
Ok(track_id) => {
// Store bidirectional mapping
self.layer_to_track_map.insert(layer_id, track_id);
@ -2559,6 +2705,7 @@ impl EditorApp {
path: path.clone(),
document,
layer_to_track_map: self.layer_to_track_map.clone(),
clip_to_metatrack_map: self.clip_to_metatrack_map.clone(),
progress_tx,
};
@ -2708,6 +2855,15 @@ impl EditorApp {
eprintln!("📊 [APPLY] Step 5: No saved track mappings (old file format)");
}
// Restore clip-to-metatrack mappings
if !loaded_project.clip_to_metatrack_map.is_empty() {
for (&clip_id, &track_id) in &loaded_project.clip_to_metatrack_map {
self.clip_to_metatrack_map.insert(clip_id, track_id);
}
eprintln!("📊 [APPLY] Step 5b: Restored {} clip-to-metatrack mappings",
loaded_project.clip_to_metatrack_map.len());
}
// Sync any audio layers that don't have a mapping yet (new layers, or old file format)
let step6_start = std::time::Instant::now();
self.sync_audio_layers_to_backend();
@ -3249,12 +3405,16 @@ impl EditorApp {
// Update active layer to the new layer
self.active_layer_id = target_layer_id;
// If inside a clip, ensure a metatrack exists for it
let editing_clip_id = self.editing_context.current_clip_id();
let parent_track = editing_clip_id.and_then(|cid| self.ensure_metatrack_for_clip(cid));
// Create a backend audio/MIDI track and add the mapping
if let Some(ref controller_arc) = self.audio_controller {
let mut controller = controller_arc.lock().unwrap();
match asset_info.clip_type {
panes::DragClipType::AudioSampled => {
match controller.create_audio_track_sync(layer_name.clone()) {
match controller.create_audio_track_sync(layer_name.clone(), parent_track) {
Ok(track_id) => {
self.layer_to_track_map.insert(layer_id, track_id);
self.track_to_layer_map.insert(track_id, layer_id);
@ -3263,7 +3423,7 @@ impl EditorApp {
}
}
panes::DragClipType::AudioMidi => {
match controller.create_midi_track_sync(layer_name.clone()) {
match controller.create_midi_track_sync(layer_name.clone(), parent_track) {
Ok(track_id) => {
self.layer_to_track_map.insert(layer_id, track_id);
self.track_to_layer_map.insert(track_id, layer_id);
@ -3364,6 +3524,7 @@ impl EditorApp {
audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map,
clip_instance_to_backend_map: &mut self.clip_instance_to_backend_map,
clip_to_metatrack_map: &self.clip_to_metatrack_map,
};
if let Err(e) = self.action_executor.execute_with_backend(Box::new(action), &mut backend_context) {
@ -3409,6 +3570,7 @@ impl EditorApp {
audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map,
clip_instance_to_backend_map: &mut self.clip_instance_to_backend_map,
clip_to_metatrack_map: &self.clip_to_metatrack_map,
};
if let Err(e) = self.action_executor.execute_with_backend(Box::new(audio_action), &mut backend_context) {
@ -3437,8 +3599,9 @@ impl EditorApp {
let document = self.action_executor.document();
let mut video_instance_info: Option<(uuid::Uuid, uuid::Uuid, f64)> = None; // (layer_id, instance_id, timeline_start)
// Search all layers for a video clip instance with matching clip_id
for layer in &document.root.children {
// Search all layers (root + inside movie clips) for a video clip instance with matching clip_id
let all_layers = document.all_layers();
for layer in &all_layers {
if let AnyLayer::Video(video_layer) = layer {
for instance in &video_layer.clip_instances {
if instance.clip_id == video_clip_id {
@ -3491,6 +3654,7 @@ impl EditorApp {
audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map,
clip_instance_to_backend_map: &mut self.clip_instance_to_backend_map,
clip_to_metatrack_map: &self.clip_to_metatrack_map,
};
if let Err(e) = self.action_executor.execute_with_backend(Box::new(audio_action), &mut backend_context) {
@ -3855,10 +4019,8 @@ impl eframe::App for EditorApp {
let clip_instance = ClipInstance::new(doc_clip_id)
.with_timeline_start(self.recording_start_time);
// Add instance to layer
if let Some(layer) = self.action_executor.document_mut().root.children.iter_mut()
.find(|l| l.id() == layer_id)
{
// Add instance to layer (works for root and inside movie clips)
if let Some(layer) = self.action_executor.document_mut().get_layer_mut(&layer_id) {
if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer {
audio_layer.clip_instances.push(clip_instance);
println!("✅ Created recording clip instance on layer {}", layer_id);
@ -3880,8 +4042,7 @@ impl eframe::App for EditorApp {
// First, find the clip_id from the layer (read-only borrow)
let clip_id = {
let document = self.action_executor.document();
document.root.children.iter()
.find(|l| l.id() == layer_id)
document.get_layer(&layer_id)
.and_then(|layer| {
if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer {
audio_layer.clip_instances.last().map(|i| i.clip_id)
@ -3946,8 +4107,7 @@ impl eframe::App for EditorApp {
// First, find the clip instance and clip id
let (clip_id, instance_id, timeline_start, trim_start) = {
let document = self.action_executor.document();
document.root.children.iter()
.find(|l| l.id() == layer_id)
document.get_layer(&layer_id)
.and_then(|layer| {
if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer {
audio_layer.clip_instances.last().map(|instance| {
@ -4025,8 +4185,7 @@ impl eframe::App for EditorApp {
if let Some(layer_id) = self.recording_layer_id {
let doc_clip_id = {
let document = self.action_executor.document();
document.root.children.iter()
.find(|l| l.id() == layer_id)
document.get_layer(&layer_id)
.and_then(|layer| {
if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer {
audio_layer.clip_instances.last().map(|i| i.clip_id)
@ -4085,8 +4244,7 @@ impl eframe::App for EditorApp {
if let Some(layer_id) = self.recording_layer_id {
let doc_clip_id = {
let document = self.action_executor.document();
document.root.children.iter()
.find(|l| l.id() == layer_id)
document.get_layer(&layer_id)
.and_then(|layer| {
if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer {
audio_layer.clip_instances.last().map(|i| i.clip_id)
@ -4429,6 +4587,10 @@ impl eframe::App for EditorApp {
// Menu actions queued by pane context menus
let mut pending_menu_actions: Vec<MenuAction> = Vec::new();
// Editing context navigation requests from stage pane
let mut pending_enter_clip: Option<(Uuid, Uuid, Uuid)> = None;
let mut pending_exit_clip = false;
// Queue for effect thumbnail requests (collected during rendering)
let mut effect_thumbnail_requests: Vec<Uuid> = Vec::new();
// Empty cache fallback if generator not initialized
@ -4440,6 +4602,7 @@ impl eframe::App for EditorApp {
{
let time = self.playback_time;
let document = self.action_executor.document_mut();
// Bake animation transforms for root layers
for layer in document.root.children.iter_mut() {
if let lightningbeam_core::layer::AnyLayer::Vector(vl) = layer {
for ci in &mut vl.clip_instances {
@ -4451,6 +4614,20 @@ impl eframe::App for EditorApp {
}
}
}
// Bake animation transforms for layers inside movie clips
for clip in document.vector_clips.values_mut() {
for layer_node in clip.layers.roots.iter_mut() {
if let lightningbeam_core::layer::AnyLayer::Vector(vl) = &mut layer_node.data {
for ci in &mut vl.clip_instances {
let (t, opacity) = vl.layer.animation_data.eval_clip_instance_transform(
ci.id, time, &ci.transform, ci.opacity,
);
ci.transform = t;
ci.opacity = opacity;
}
}
}
}
}
// Create render context
@ -4468,6 +4645,11 @@ impl eframe::App for EditorApp {
theme: &self.theme,
action_executor: &mut self.action_executor,
selection: &mut self.selection,
editing_clip_id: self.editing_context.current_clip_id(),
editing_instance_id: self.editing_context.current_instance_id(),
editing_parent_layer_id: self.editing_context.current_parent_layer_id(),
pending_enter_clip: &mut pending_enter_clip,
pending_exit_clip: &mut pending_exit_clip,
active_layer_id: &mut self.active_layer_id,
tool_state: &mut self.tool_state,
pending_actions: &mut pending_actions,
@ -4559,6 +4741,7 @@ impl eframe::App for EditorApp {
audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map,
clip_instance_to_backend_map: &mut self.clip_instance_to_backend_map,
clip_to_metatrack_map: &self.clip_to_metatrack_map,
};
// Execute action with backend synchronization
@ -4576,6 +4759,34 @@ impl eframe::App for EditorApp {
self.handle_menu_action(action);
}
// Process editing context navigation (enter/exit movie clips)
if let Some((clip_id, instance_id, parent_layer_id)) = pending_enter_clip {
let entry = EditingContextEntry {
clip_id,
instance_id,
parent_layer_id,
saved_playback_time: self.playback_time,
saved_active_layer_id: self.active_layer_id,
};
self.editing_context.push(entry);
self.selection.clear();
// Set active layer to the clip's first layer
let first_layer_id = self.action_executor.document()
.get_vector_clip(&clip_id)
.and_then(|clip| clip.layers.roots.first())
.map(|node| node.data.id());
self.active_layer_id = first_layer_id;
// Reset playback time to 0 when entering a clip
self.playback_time = 0.0;
}
if pending_exit_clip {
if let Some(entry) = self.editing_context.pop() {
self.selection.clear();
self.active_layer_id = entry.saved_active_layer_id;
self.playback_time = entry.saved_playback_time;
}
}
// Set cursor based on hover state
if let Some((_, is_horizontal)) = self.hovered_divider {
if is_horizontal {
@ -4735,6 +4946,11 @@ struct RenderContext<'a> {
theme: &'a Theme,
action_executor: &'a mut lightningbeam_core::action::ActionExecutor,
selection: &'a mut lightningbeam_core::selection::Selection,
editing_clip_id: Option<Uuid>,
editing_instance_id: Option<Uuid>,
editing_parent_layer_id: Option<Uuid>,
pending_enter_clip: &'a mut Option<(Uuid, Uuid, Uuid)>,
pending_exit_clip: &'a mut bool,
active_layer_id: &'a mut Option<Uuid>,
tool_state: &'a mut lightningbeam_core::tool::ToolState,
pending_actions: &'a mut Vec<Box<dyn lightningbeam_core::action::Action>>,
@ -5272,6 +5488,11 @@ fn render_pane(
project_generation: ctx.project_generation,
script_to_edit: ctx.script_to_edit,
script_saved: ctx.script_saved,
editing_clip_id: ctx.editing_clip_id,
editing_instance_id: ctx.editing_instance_id,
editing_parent_layer_id: ctx.editing_parent_layer_id,
pending_enter_clip: ctx.pending_enter_clip,
pending_exit_clip: ctx.pending_exit_clip,
};
pane_instance.render_header(&mut header_ui, &mut shared);
}
@ -5345,6 +5566,11 @@ fn render_pane(
project_generation: ctx.project_generation,
script_to_edit: ctx.script_to_edit,
script_saved: ctx.script_saved,
editing_clip_id: ctx.editing_clip_id,
editing_instance_id: ctx.editing_instance_id,
editing_parent_layer_id: ctx.editing_parent_layer_id,
pending_enter_clip: ctx.pending_enter_clip,
pending_exit_clip: ctx.pending_exit_clip,
};
// Render pane content (header was already rendered above)

View File

@ -163,6 +163,7 @@ pub enum MenuAction {
// Modify menu
Group,
ConvertToMovieClip,
SendToBack,
BringToFront,
SplitClip,
@ -259,6 +260,7 @@ impl MenuItemDef {
// Modify menu items
const GROUP: Self = Self { label: "Group", action: MenuAction::Group, shortcut: Some(Shortcut::new(ShortcutKey::G, CTRL, NO_SHIFT, NO_ALT)) };
const CONVERT_TO_MOVIE_CLIP: Self = Self { label: "Convert to Movie Clip", action: MenuAction::ConvertToMovieClip, shortcut: None };
const SEND_TO_BACK: Self = Self { label: "Send to back", action: MenuAction::SendToBack, shortcut: None };
const BRING_TO_FRONT: Self = Self { label: "Bring to front", action: MenuAction::BringToFront, shortcut: None };
const SPLIT_CLIP: Self = Self { label: "Split Clip", action: MenuAction::SplitClip, shortcut: Some(Shortcut::new(ShortcutKey::K, CTRL, NO_SHIFT, NO_ALT)) };
@ -369,6 +371,7 @@ impl MenuItemDef {
label: "Modify",
children: &[
MenuDef::Item(&Self::GROUP),
MenuDef::Item(&Self::CONVERT_TO_MOVIE_CLIP),
MenuDef::Separator,
MenuDef::Item(&Self::SEND_TO_BACK),
MenuDef::Item(&Self::BRING_TO_FRONT),

View File

@ -1310,8 +1310,8 @@ impl AssetLibraryPane {
/// Check if an asset is currently in use (has clip instances on layers)
fn is_asset_in_use(document: &Document, asset_id: Uuid, category: AssetCategory) -> bool {
// Check all layers for clip instances referencing this asset
for layer in &document.root.children {
// Check all layers (root + inside movie clips) for clip instances referencing this asset
for layer in document.all_layers() {
match layer {
lightningbeam_core::layer::AnyLayer::Vector(vl) => {
if category == AssetCategory::Vector {

View File

@ -154,6 +154,16 @@ pub struct SharedPaneState<'a> {
pub action_executor: &'a mut lightningbeam_core::action::ActionExecutor,
/// Current selection state (mutable for tools to modify)
pub selection: &'a mut lightningbeam_core::selection::Selection,
/// Which VectorClip is being edited (None = document root)
pub editing_clip_id: Option<uuid::Uuid>,
/// The clip instance ID being edited
pub editing_instance_id: Option<uuid::Uuid>,
/// The parent layer ID containing the clip instance being edited
pub editing_parent_layer_id: Option<uuid::Uuid>,
/// Request to enter a movie clip for editing: (clip_id, instance_id, parent_layer_id)
pub pending_enter_clip: &'a mut Option<(uuid::Uuid, uuid::Uuid, uuid::Uuid)>,
/// Request to exit the current movie clip
pub pending_exit_clip: &'a mut bool,
/// Currently active layer ID
pub active_layer_id: &'a mut Option<uuid::Uuid>,
/// Current tool interaction state (mutable for tools to modify)

View File

@ -636,10 +636,12 @@ impl NodeGraphPane {
let mut controller = audio_controller.lock().unwrap();
// Node graph actions don't use clip instances, so we use an empty map
let mut empty_clip_map = std::collections::HashMap::new();
let empty_metatrack_map = std::collections::HashMap::new();
let mut backend_context = lightningbeam_core::action::BackendContext {
audio_controller: Some(&mut *controller),
layer_to_track_map: shared.layer_to_track_map,
clip_instance_to_backend_map: &mut empty_clip_map,
clip_to_metatrack_map: &empty_metatrack_map,
};
if let Err(e) = shared.action_executor.execute_with_backend(action, &mut backend_context) {
@ -797,7 +799,7 @@ impl NodeGraphPane {
if let Some(path) = rfd::FileDialog::new().pick_folder() {
match crate::sample_import::scan_folder(&path) {
Ok(samples) => {
let scan_result = crate::sample_import::build_import_layers(samples, &path);
let scan_result = crate::sample_import::build_import_layers(samples);
let track_id = backend_track_id;
let dialog = crate::sample_import_dialog::SampleImportDialog::new(
path, scan_result, track_id, backend_node_id, node_id,

View File

@ -380,6 +380,12 @@ struct VelloRenderContext {
shape_editing_cache: Option<ShapeEditingCache>,
/// Surface format for blit pipelines
target_format: wgpu::TextureFormat,
/// Which VectorClip is being edited (None = document root)
editing_clip_id: Option<uuid::Uuid>,
/// The clip instance ID being edited (for skip + re-render)
editing_instance_id: Option<uuid::Uuid>,
/// The parent layer ID containing the clip instance being edited
editing_parent_layer_id: Option<uuid::Uuid>,
}
/// Callback for Vello rendering within egui
@ -436,6 +442,23 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
let camera_transform = Affine::translate((self.ctx.pan_offset.x as f64, self.ctx.pan_offset.y as f64))
* Affine::scale(self.ctx.zoom as f64);
// Overlay transform: camera + clip instance transform (for rendering overlays in clip-local space)
let overlay_transform = if let (Some(parent_layer_id), Some(instance_id)) = (self.ctx.editing_parent_layer_id, self.ctx.editing_instance_id) {
let clip_affine = self.ctx.document.get_layer(&parent_layer_id)
.and_then(|layer| {
if let lightningbeam_core::layer::AnyLayer::Vector(vl) = layer {
vl.clip_instances.iter().find(|ci| ci.id == instance_id)
} else {
None
}
})
.map(|ci| ci.transform.to_affine())
.unwrap_or(Affine::IDENTITY);
camera_transform * clip_affine
} else {
camera_transform
};
// Choose rendering path based on HDR compositing flag
let mut scene = if USE_HDR_COMPOSITING {
// HDR Compositing Pipeline: render each layer separately for proper opacity
@ -448,12 +471,19 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Skip rendering the shape instance being edited (for vector editing preview)
let skip_instance_id = self.ctx.shape_editing_cache.as_ref().map(|cache| cache.instance_id);
// When editing inside a clip, skip the clip instance in the main pass
// (it will be re-rendered on top after the dim overlay)
let editing_skip_id = self.ctx.editing_clip_id.as_ref().and_then(|_| {
self.ctx.editing_instance_id
});
let effective_skip = skip_instance_id.or(editing_skip_id);
let composite_result = lightningbeam_core::renderer::render_document_for_compositing(
&self.ctx.document,
camera_transform,
&mut image_cache,
&shared.video_manager,
skip_instance_id,
effective_skip,
);
drop(image_cache);
@ -677,6 +707,89 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
drop(effect_processor);
// When editing inside a clip: dim overlay + re-render the clip at full opacity
if let (Some(parent_layer_id), Some(instance_id)) = (self.ctx.editing_parent_layer_id, self.ctx.editing_instance_id) {
// 1. Render dim overlay scene
let mut dim_scene = vello::Scene::new();
let doc_rect = vello::kurbo::Rect::new(0.0, 0.0, self.ctx.document.width, self.ctx.document.height);
dim_scene.fill(
vello::peniko::Fill::NonZero,
camera_transform,
vello::peniko::Color::new([0.0, 0.0, 0.0, 0.5]),
None,
&doc_rect,
);
// Composite dim overlay onto HDR texture
let dim_srgb_handle = buffer_pool.acquire(device, lightningbeam_core::gpu::BufferSpec::new(width, height, lightningbeam_core::gpu::BufferFormat::Rgba8Srgb));
let dim_hdr_handle = buffer_pool.acquire(device, lightningbeam_core::gpu::BufferSpec::new(width, height, BufferFormat::Rgba16Float));
if let (Some(dim_srgb_view), Some(dim_hdr_view), Some(hdr_view)) = (
buffer_pool.get_view(dim_srgb_handle),
buffer_pool.get_view(dim_hdr_handle),
&instance_resources.hdr_texture_view,
) {
let dim_params = vello::RenderParams {
base_color: vello::peniko::Color::TRANSPARENT,
width, height,
antialiasing_method: vello::AaConfig::Msaa16,
};
if let Ok(mut renderer) = shared.renderer.lock() {
renderer.render_to_texture(device, queue, &dim_scene, dim_srgb_view, &dim_params).ok();
}
let mut enc = device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: Some("dim_srgb_to_linear") });
shared.srgb_to_linear.convert(device, &mut enc, dim_srgb_view, dim_hdr_view);
queue.submit(Some(enc.finish()));
let dim_layer = lightningbeam_core::gpu::CompositorLayer::normal(dim_hdr_handle, 1.0);
let mut enc = device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: Some("dim_composite") });
shared.compositor.composite(device, queue, &mut enc, &[dim_layer], &buffer_pool, hdr_view, None);
queue.submit(Some(enc.finish()));
}
buffer_pool.release(dim_srgb_handle);
buffer_pool.release(dim_hdr_handle);
// 2. Re-render the clip instance at full opacity
let mut clip_scene = vello::Scene::new();
let mut image_cache = shared.image_cache.lock().unwrap();
lightningbeam_core::renderer::render_single_clip_instance(
&self.ctx.document,
&mut clip_scene,
camera_transform,
&parent_layer_id,
&instance_id,
&mut image_cache,
&shared.video_manager,
);
drop(image_cache);
let clip_srgb_handle = buffer_pool.acquire(device, lightningbeam_core::gpu::BufferSpec::new(width, height, lightningbeam_core::gpu::BufferFormat::Rgba8Srgb));
let clip_hdr_handle = buffer_pool.acquire(device, lightningbeam_core::gpu::BufferSpec::new(width, height, BufferFormat::Rgba16Float));
if let (Some(clip_srgb_view), Some(clip_hdr_view), Some(hdr_view)) = (
buffer_pool.get_view(clip_srgb_handle),
buffer_pool.get_view(clip_hdr_handle),
&instance_resources.hdr_texture_view,
) {
let clip_params = vello::RenderParams {
base_color: vello::peniko::Color::TRANSPARENT,
width, height,
antialiasing_method: vello::AaConfig::Msaa16,
};
if let Ok(mut renderer) = shared.renderer.lock() {
renderer.render_to_texture(device, queue, &clip_scene, clip_srgb_view, &clip_params).ok();
}
let mut enc = device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: Some("clip_srgb_to_linear") });
shared.srgb_to_linear.convert(device, &mut enc, clip_srgb_view, clip_hdr_view);
queue.submit(Some(enc.finish()));
let clip_layer = lightningbeam_core::gpu::CompositorLayer::normal(clip_hdr_handle, 1.0);
let mut enc = device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: Some("clip_composite") });
shared.compositor.composite(device, queue, &mut enc, &[clip_layer], &buffer_pool, hdr_view, None);
queue.submit(Some(enc.finish()));
}
buffer_pool.release(clip_srgb_handle);
buffer_pool.release(clip_hdr_handle);
}
// Advance frame counter for buffer cleanup
buffer_pool.next_frame();
drop(buffer_pool);
@ -692,14 +805,43 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Skip rendering the shape instance being edited (for vector editing preview)
let skip_instance_id = self.ctx.shape_editing_cache.as_ref().map(|cache| cache.instance_id);
let editing_skip_id = self.ctx.editing_clip_id.as_ref().and_then(|_| {
self.ctx.editing_instance_id
});
let effective_skip = skip_instance_id.or(editing_skip_id);
lightningbeam_core::renderer::render_document_with_transform(
&self.ctx.document,
&mut scene,
camera_transform,
&mut image_cache,
&shared.video_manager,
skip_instance_id,
effective_skip,
);
// When editing inside a clip: dim overlay + re-render the clip at full opacity
if let (Some(parent_layer_id), Some(instance_id)) = (self.ctx.editing_parent_layer_id, self.ctx.editing_instance_id) {
// Semi-transparent dim overlay
let doc_rect = vello::kurbo::Rect::new(0.0, 0.0, self.ctx.document.width, self.ctx.document.height);
scene.fill(
vello::peniko::Fill::NonZero,
camera_transform,
vello::peniko::Color::new([0.0, 0.0, 0.0, 0.5]),
None,
&doc_rect,
);
// Re-render the clip instance on top
lightningbeam_core::renderer::render_single_clip_instance(
&self.ctx.document,
&mut scene,
camera_transform,
&parent_layer_id,
&instance_id,
&mut image_cache,
&shared.video_manager,
);
}
drop(image_cache);
scene
};
@ -751,7 +893,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
* Affine::rotate(shape.transform.rotation.to_radians())
* Affine::scale_non_uniform(shape.transform.scale_x, shape.transform.scale_y)
* skew_transform;
let combined_transform = camera_transform * object_transform;
let combined_transform = overlay_transform * object_transform;
// Render shape with semi-transparent fill (light blue, 40% opacity)
let alpha_color = Color::from_rgba8(100, 150, 255, 100);
@ -772,7 +914,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
use vello::kurbo::Stroke;
let clip_transform = Affine::translate((new_x, new_y));
let combined_transform = camera_transform * clip_transform;
let combined_transform = overlay_transform * clip_transform;
// Calculate clip bounds for preview
let clip_time = ((self.ctx.playback_time - clip_inst.timeline_start) * clip_inst.playback_speed) + clip_inst.trim_start;
@ -822,7 +964,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Apply object transform and camera transform
let object_transform = Affine::translate((shape.transform.x, shape.transform.y));
let combined_transform = camera_transform * object_transform;
let combined_transform = overlay_transform * object_transform;
// Create selection rectangle
let selection_rect = KurboRect::new(bbox.x0, bbox.y0, bbox.x1, bbox.y1);
@ -868,9 +1010,15 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
}
// Also draw selection outlines for clip instances
let _clip_instance_count = self.ctx.selection.clip_instances().len();
for &clip_id in self.ctx.selection.clip_instances() {
if let Some(clip_instance) = vector_layer.clip_instances.iter().find(|ci| ci.id == clip_id) {
// Skip clip instances not active at current time
let clip_dur = self.ctx.document.get_clip_duration(&clip_instance.clip_id).unwrap_or(0.0);
let instance_end = clip_instance.timeline_start + clip_instance.effective_duration(clip_dur);
if self.ctx.playback_time < clip_instance.timeline_start || self.ctx.playback_time >= instance_end {
continue;
}
// Calculate clip-local time
let clip_time = ((self.ctx.playback_time - clip_instance.timeline_start) * clip_instance.playback_speed) + clip_instance.trim_start;
@ -886,7 +1034,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Apply clip instance transform and camera transform
let clip_transform = clip_instance.transform.to_affine();
let combined_transform = camera_transform * clip_transform;
let combined_transform = overlay_transform * clip_transform;
// Draw selection outline with different color for clip instances
let clip_selection_color = Color::from_rgb8(255, 120, 0); // Orange
@ -943,7 +1091,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
let marquee_fill = Color::from_rgba8(0, 120, 255, 100);
scene.fill(
Fill::NonZero,
camera_transform,
overlay_transform,
marquee_fill,
None,
&marquee_rect,
@ -952,7 +1100,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Border stroke
scene.stroke(
&Stroke::new(1.0),
camera_transform,
overlay_transform,
selection_color,
None,
&marquee_rect,
@ -1006,7 +1154,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
if width > 0.0 && height > 0.0 {
let rect = KurboRect::new(0.0, 0.0, width, height);
let preview_transform = camera_transform * Affine::translate((position.x, position.y));
let preview_transform = overlay_transform * Affine::translate((position.x, position.y));
if self.ctx.fill_enabled {
let fill_color = Color::from_rgba8(
@ -1079,7 +1227,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
};
if rx > 0.0 && ry > 0.0 {
let preview_transform = camera_transform * Affine::translate((position.x, position.y));
let preview_transform = overlay_transform * Affine::translate((position.x, position.y));
let fill_color = Color::from_rgba8(
self.ctx.fill_color.r(),
@ -1132,7 +1280,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
let line = Line::new(*start_point, *current_point);
scene.stroke(
&Stroke::new(2.0),
camera_transform,
overlay_transform,
stroke_color,
None,
&line,
@ -1151,7 +1299,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
let radius = (dx * dx + dy * dy).sqrt();
if radius > 5.0 && num_sides >= 3 {
let preview_transform = camera_transform * Affine::translate((center.x, center.y));
let preview_transform = overlay_transform * Affine::translate((center.x, center.y));
// Use actual fill color (same as final shape)
let fill_color = Color::from_rgba8(
@ -1229,7 +1377,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
);
scene.fill(
Fill::NonZero,
camera_transform,
overlay_transform,
fill_color,
None,
&preview_path,
@ -1245,7 +1393,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
scene.stroke(
&Stroke::new(self.ctx.stroke_width),
camera_transform,
overlay_transform,
stroke_color,
None,
&preview_path,
@ -1261,10 +1409,10 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
let preview_path = rebuild_bezpath(&cache.editable_data);
// Get the layer first, then the shape from the layer
if let Some(layer) = (*self.ctx.document).root.get_child(&cache.layer_id) {
if let Some(layer) = (*self.ctx.document).get_layer(&cache.layer_id) {
if let lightningbeam_core::layer::AnyLayer::Vector(vector_layer) = layer {
if let Some(shape) = vector_layer.get_shape_in_keyframe(&cache.shape_id, self.ctx.playback_time) {
let transform = camera_transform * cache.local_to_world;
let transform = overlay_transform * cache.local_to_world;
// Render fill with FULL OPACITY (same as original)
if let Some(fill_color) = &shape.fill_color {
@ -1389,7 +1537,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
scene.stroke(
&Stroke::new(stroke_width),
camera_transform,
overlay_transform,
handle_color,
None,
&bbox_path,
@ -1407,7 +1555,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Fill
scene.fill(
Fill::NonZero,
camera_transform,
overlay_transform,
handle_color,
None,
&handle_rect,
@ -1416,7 +1564,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// White outline
scene.stroke(
&Stroke::new(1.0),
camera_transform,
overlay_transform,
Color::from_rgb8(255, 255, 255),
None,
&handle_rect,
@ -1437,7 +1585,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Fill
scene.fill(
Fill::NonZero,
camera_transform,
overlay_transform,
handle_color,
None,
&edge_circle,
@ -1446,7 +1594,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// White outline
scene.stroke(
&Stroke::new(1.0),
camera_transform,
overlay_transform,
Color::from_rgb8(255, 255, 255),
None,
&edge_circle,
@ -1471,7 +1619,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Fill with different color (green)
scene.fill(
Fill::NonZero,
camera_transform,
overlay_transform,
Color::from_rgb8(50, 200, 50),
None,
&rotation_circle,
@ -1480,7 +1628,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// White outline
scene.stroke(
&Stroke::new(1.0),
camera_transform,
overlay_transform,
Color::from_rgb8(255, 255, 255),
None,
&rotation_circle,
@ -1496,7 +1644,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
scene.stroke(
&Stroke::new(1.0),
camera_transform,
overlay_transform,
Color::from_rgb8(50, 200, 50),
None,
&line_path,
@ -1526,7 +1674,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
let handle_color = Color::from_rgb8(0, 120, 255);
let rotation_handle_offset = 20.0 / self.ctx.zoom.max(0.5) as f64;
scene.stroke(&Stroke::new(stroke_width), camera_transform, handle_color, None, &bbox);
scene.stroke(&Stroke::new(stroke_width), overlay_transform, handle_color, None, &bbox);
let corners = [
vello::kurbo::Point::new(bbox.x0, bbox.y0),
@ -1540,8 +1688,8 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
corner.x - handle_size / 2.0, corner.y - handle_size / 2.0,
corner.x + handle_size / 2.0, corner.y + handle_size / 2.0,
);
scene.fill(Fill::NonZero, camera_transform, handle_color, None, &handle_rect);
scene.stroke(&Stroke::new(1.0), camera_transform, Color::from_rgb8(255, 255, 255), None, &handle_rect);
scene.fill(Fill::NonZero, overlay_transform, handle_color, None, &handle_rect);
scene.stroke(&Stroke::new(1.0), overlay_transform, Color::from_rgb8(255, 255, 255), None, &handle_rect);
}
let edges = [
@ -1553,14 +1701,14 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
for edge in &edges {
let edge_circle = Circle::new(*edge, handle_size / 2.0);
scene.fill(Fill::NonZero, camera_transform, handle_color, None, &edge_circle);
scene.stroke(&Stroke::new(1.0), camera_transform, Color::from_rgb8(255, 255, 255), None, &edge_circle);
scene.fill(Fill::NonZero, overlay_transform, handle_color, None, &edge_circle);
scene.stroke(&Stroke::new(1.0), overlay_transform, Color::from_rgb8(255, 255, 255), None, &edge_circle);
}
let rotation_handle_pos = vello::kurbo::Point::new(bbox.center().x, bbox.y0 - rotation_handle_offset);
let rotation_circle = Circle::new(rotation_handle_pos, handle_size / 2.0);
scene.fill(Fill::NonZero, camera_transform, Color::from_rgb8(50, 200, 50), None, &rotation_circle);
scene.stroke(&Stroke::new(1.0), camera_transform, Color::from_rgb8(255, 255, 255), None, &rotation_circle);
scene.fill(Fill::NonZero, overlay_transform, Color::from_rgb8(50, 200, 50), None, &rotation_circle);
scene.stroke(&Stroke::new(1.0), overlay_transform, Color::from_rgb8(255, 255, 255), None, &rotation_circle);
let line_path = {
let mut path = vello::kurbo::BezPath::new();
@ -1568,7 +1716,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
path.line_to(vello::kurbo::Point::new(bbox.center().x, bbox.y0));
path
};
scene.stroke(&Stroke::new(1.0), camera_transform, Color::from_rgb8(50, 200, 50), None, &line_path);
scene.stroke(&Stroke::new(1.0), overlay_transform, Color::from_rgb8(50, 200, 50), None, &line_path);
}
}
}
@ -1660,7 +1808,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
scene.stroke(
&Stroke::new(stroke_width),
camera_transform,
overlay_transform,
handle_color,
None,
&bbox_path,
@ -1678,7 +1826,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Fill
scene.fill(
Fill::NonZero,
camera_transform,
overlay_transform,
handle_color,
None,
&handle_rect,
@ -1687,7 +1835,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// White outline
scene.stroke(
&Stroke::new(1.0),
camera_transform,
overlay_transform,
Color::from_rgb8(255, 255, 255),
None,
&handle_rect,
@ -1708,7 +1856,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Fill
scene.fill(
Fill::NonZero,
camera_transform,
overlay_transform,
handle_color,
None,
&edge_circle,
@ -1717,7 +1865,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// White outline
scene.stroke(
&Stroke::new(1.0),
camera_transform,
overlay_transform,
Color::from_rgb8(255, 255, 255),
None,
&edge_circle,
@ -1740,7 +1888,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Fill with different color (green)
scene.fill(
Fill::NonZero,
camera_transform,
overlay_transform,
Color::from_rgb8(50, 200, 50),
None,
&rotation_circle,
@ -1749,7 +1897,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// White outline
scene.stroke(
&Stroke::new(1.0),
camera_transform,
overlay_transform,
Color::from_rgb8(255, 255, 255),
None,
&rotation_circle,
@ -1765,7 +1913,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
scene.stroke(
&Stroke::new(1.0),
camera_transform,
overlay_transform,
Color::from_rgb8(50, 200, 50),
None,
&line_path,
@ -2075,6 +2223,50 @@ impl StagePane {
}
}
/// Convert a document-space position to clip-local coordinates when editing inside a clip.
/// Returns the position unchanged when at root level.
fn doc_to_clip_local(&self, doc_pos: egui::Vec2, shared: &SharedPaneState) -> egui::Vec2 {
if let (Some(parent_layer_id), Some(instance_id)) = (shared.editing_parent_layer_id, shared.editing_instance_id) {
let document = shared.action_executor.document();
let clip_affine = document.get_layer(&parent_layer_id)
.and_then(|layer| {
if let lightningbeam_core::layer::AnyLayer::Vector(vl) = layer {
vl.clip_instances.iter().find(|ci| ci.id == instance_id)
} else {
None
}
})
.map(|ci| ci.transform.to_affine())
.unwrap_or(vello::kurbo::Affine::IDENTITY);
let inv = clip_affine.inverse();
let p = inv * vello::kurbo::Point::new(doc_pos.x as f64, doc_pos.y as f64);
egui::vec2(p.x as f32, p.y as f32)
} else {
doc_pos
}
}
/// Convert a clip-local position back to document-space coordinates.
/// Returns the position unchanged when at root level.
fn clip_local_to_doc(&self, local_pos: vello::kurbo::Point, shared: &SharedPaneState) -> vello::kurbo::Point {
if let (Some(parent_layer_id), Some(instance_id)) = (shared.editing_parent_layer_id, shared.editing_instance_id) {
let document = shared.action_executor.document();
let clip_affine = document.get_layer(&parent_layer_id)
.and_then(|layer| {
if let lightningbeam_core::layer::AnyLayer::Vector(vl) = layer {
vl.clip_instances.iter().find(|ci| ci.id == instance_id)
} else {
None
}
})
.map(|ci| ci.transform.to_affine())
.unwrap_or(vello::kurbo::Affine::IDENTITY);
clip_affine * local_pos
} else {
local_pos
}
}
/// Execute a view action with the given parameters
/// Called from main.rs after determining this is the best handler
pub fn execute_view_action(&mut self, action: &crate::menu::MenuAction, zoom_center: egui::Vec2) {
@ -2185,6 +2377,41 @@ impl StagePane {
let point = Point::new(world_pos.x as f64, world_pos.y as f64);
// Double-click: enter/exit movie clip editing
if response.double_clicked() {
// Hit test clip instances at the click position
let document = shared.action_executor.document();
let clip_hit = hit_test::hit_test_clip_instances(
&vector_layer.clip_instances,
document,
point,
Affine::IDENTITY,
*shared.playback_time,
);
if let Some(instance_id) = clip_hit {
// Find the clip instance to get its clip_id
if let Some(clip_instance) = vector_layer.clip_instances.iter().find(|ci| ci.id == instance_id) {
// Check if this is a movie clip (not a group)
if let Some(vector_clip) = document.get_vector_clip(&clip_instance.clip_id) {
if !vector_clip.is_group {
// Enter the movie clip
*shared.pending_enter_clip = Some((
clip_instance.clip_id,
instance_id,
active_layer_id,
));
return;
}
}
}
} else if shared.editing_clip_id.is_some() {
// Double-click on empty space while inside a clip: exit
*shared.pending_exit_clip = true;
return;
}
}
// Mouse down: start interaction (check on initial press, not after drag starts)
// Scope this section to drop vector_layer borrow before drag handling
let mouse_pressed = ui.input(|i| i.pointer.primary_pressed());
@ -5419,7 +5646,8 @@ impl StagePane {
// Get last known mouse position (will be at edge if offscreen)
if let Some(mouse_pos) = ui.input(|i| i.pointer.latest_pos()) {
let mouse_canvas_pos = mouse_pos - rect.min;
let world_pos = (mouse_canvas_pos - self.pan_offset) / self.zoom;
let world_pos_doc = (mouse_canvas_pos - self.pan_offset) / self.zoom;
let world_pos = self.doc_to_clip_local(world_pos_doc, shared);
let point = Point::new(world_pos.x as f64, world_pos.y as f64);
let delta = point - start_mouse;
@ -5548,7 +5776,9 @@ impl StagePane {
let mouse_canvas_pos = mouse_pos - rect.min;
// Convert screen position to world position (accounting for pan and zoom)
let world_pos = (mouse_canvas_pos - self.pan_offset) / self.zoom;
// When inside a clip, further transform to clip-local coordinates
let world_pos_doc = (mouse_canvas_pos - self.pan_offset) / self.zoom;
let world_pos = self.doc_to_clip_local(world_pos_doc, shared);
// Handle tool input (only if not using Alt modifier for panning)
if !alt_held {
@ -5678,18 +5908,22 @@ impl StagePane {
_ => return,
};
// Get mouse position in world coordinates
// Get mouse position in world coordinates (clip-local when inside a clip)
let mouse_screen_pos = ui.input(|i| i.pointer.hover_pos()).unwrap_or(rect.center());
let mouse_canvas_pos = mouse_screen_pos - rect.min;
let mouse_world_pos = Point::new(
((mouse_canvas_pos.x - self.pan_offset.x) / self.zoom) as f64,
((mouse_canvas_pos.y - self.pan_offset.y) / self.zoom) as f64,
let mouse_doc_pos = egui::vec2(
(mouse_canvas_pos.x - self.pan_offset.x) / self.zoom,
(mouse_canvas_pos.y - self.pan_offset.y) / self.zoom,
);
let mouse_local = self.doc_to_clip_local(mouse_doc_pos, shared);
let mouse_world_pos = Point::new(mouse_local.x as f64, mouse_local.y as f64);
// Helper to convert world coordinates to screen coordinates
// Helper to convert world coordinates (clip-local) to screen coordinates
let world_to_screen = |world_pos: Point| -> egui::Pos2 {
let screen_x = (world_pos.x as f32 * self.zoom) + self.pan_offset.x + rect.min.x;
let screen_y = (world_pos.y as f32 * self.zoom) + self.pan_offset.y + rect.min.y;
// When inside a clip, first transform from clip-local to document space
let doc_pos = self.clip_local_to_doc(world_pos, shared);
let screen_x = (doc_pos.x as f32 * self.zoom) + self.pan_offset.x + rect.min.x;
let screen_y = (doc_pos.y as f32 * self.zoom) + self.pan_offset.y + rect.min.y;
egui::pos2(screen_x, screen_y)
};
@ -6254,12 +6488,13 @@ impl PaneRenderer for StagePane {
}
}
// Calculate drag delta for preview rendering (world space)
// Calculate drag delta for preview rendering (clip-local space)
let drag_delta = if let lightningbeam_core::tool::ToolState::DraggingSelection { ref start_mouse, .. } = shared.tool_state {
// Get current mouse position in world coordinates
// Get current mouse position in clip-local coordinates (matching start_mouse)
if let Some(mouse_pos) = ui.input(|i| i.pointer.hover_pos()) {
let mouse_canvas_pos = mouse_pos - rect.min;
let world_mouse = (mouse_canvas_pos - self.pan_offset) / self.zoom;
let world_mouse_doc = (mouse_canvas_pos - self.pan_offset) / self.zoom;
let world_mouse = self.doc_to_clip_local(world_mouse_doc, shared);
let delta_x = world_mouse.x as f64 - start_mouse.x;
let delta_y = world_mouse.y as f64 - start_mouse.y;
@ -6294,6 +6529,9 @@ impl PaneRenderer for StagePane {
video_manager: shared.video_manager.clone(),
shape_editing_cache: self.shape_editing_cache.clone(),
target_format: shared.target_format,
editing_clip_id: shared.editing_clip_id,
editing_instance_id: shared.editing_instance_id,
editing_parent_layer_id: shared.editing_parent_layer_id,
}};
let cb = egui_wgpu::Callback::new_paint_callback(
@ -6313,6 +6551,63 @@ impl PaneRenderer for StagePane {
egui::Color32::from_gray(200),
);
// Render breadcrumb navigation when inside a movie clip
if shared.editing_clip_id.is_some() {
let document = shared.action_executor.document();
// Build breadcrumb names from the editing context
// We only have the current clip_id, so show "Scene 1 > ClipName"
let clip_name = shared.editing_clip_id
.and_then(|id| document.get_vector_clip(&id))
.map(|c| c.name.clone())
.unwrap_or_else(|| "Unknown".to_string());
let breadcrumb_y = rect.min.y + 30.0;
let breadcrumb_x = rect.min.x + 10.0;
// Background pill
let scene_text = "Scene 1";
let separator = " > ";
let full_text = format!("{}{}{}", scene_text, separator, clip_name);
let font = egui::FontId::proportional(13.0);
let galley = ui.painter().layout_no_wrap(full_text.clone(), font.clone(), egui::Color32::WHITE);
let text_rect = egui::Rect::from_min_size(
egui::pos2(breadcrumb_x, breadcrumb_y),
galley.size() + egui::vec2(16.0, 8.0),
);
ui.painter().rect_filled(
text_rect,
4.0,
egui::Color32::from_rgba_unmultiplied(0, 0, 0, 180),
);
// "Scene 1" as clickable (exit clip)
let scene_galley = ui.painter().layout_no_wrap(
scene_text.to_string(), font.clone(), egui::Color32::from_rgb(120, 180, 255),
);
let scene_rect = egui::Rect::from_min_size(
egui::pos2(breadcrumb_x + 8.0, breadcrumb_y + 4.0),
scene_galley.size(),
);
let scene_response = ui.allocate_rect(scene_rect, egui::Sense::click());
ui.painter().galley(scene_rect.min, scene_galley, egui::Color32::WHITE);
if scene_response.clicked() {
*shared.pending_exit_clip = true;
}
if scene_response.hovered() {
ui.ctx().set_cursor_icon(egui::CursorIcon::PointingHand);
}
// Separator + clip name (not clickable, it's the current level)
let rest_text = format!("{}{}", separator, clip_name);
ui.painter().text(
egui::pos2(scene_rect.max.x, breadcrumb_y + 4.0),
egui::Align2::LEFT_TOP,
rest_text,
font,
egui::Color32::WHITE,
);
}
// Render vector editing overlays (vertices, control points, etc.)
self.render_vector_editing_overlays(ui, rect, shared);

View File

@ -20,18 +20,74 @@ const EDGE_DETECTION_PIXELS: f32 = 8.0; // Distance from edge to detect trim han
const LOOP_CORNER_SIZE: f32 = 12.0; // Size of loop corner hotzone at top-right of clip
const MIN_CLIP_WIDTH_PX: f32 = 8.0; // Minimum visible width for very short clips (e.g. groups)
/// Calculate vertical bounds for a clip instance within a layer row.
/// For vector layers with multiple clip instances, stacks them vertically.
/// Returns (y_min, y_max) relative to the layer top.
fn clip_instance_y_bounds(
/// Compute stacking row assignments for clip instances on a vector layer.
/// Only clips that overlap in time are stacked; non-overlapping clips share row 0.
/// Returns a Vec of (row, total_rows) for each clip instance.
fn compute_clip_stacking_from_ranges(
ranges: &[(f64, f64)],
) -> Vec<(usize, usize)> {
if ranges.len() <= 1 {
return vec![(0, 1); ranges.len()];
}
// Greedy row assignment: assign each clip to the first row where it doesn't overlap
let mut row_assignments = vec![0usize; ranges.len()];
let mut row_ends: Vec<f64> = Vec::new(); // track the end time of the last clip in each row
// Sort indices by start time for greedy packing
let mut sorted_indices: Vec<usize> = (0..ranges.len()).collect();
sorted_indices.sort_by(|&a, &b| ranges[a].0.partial_cmp(&ranges[b].0).unwrap_or(std::cmp::Ordering::Equal));
for &idx in &sorted_indices {
let (start, end) = ranges[idx];
// Find first row where this clip fits (no overlap)
let mut assigned_row = None;
for (row, row_end) in row_ends.iter_mut().enumerate() {
if start >= *row_end {
*row_end = end;
assigned_row = Some(row);
break;
}
}
if let Some(row) = assigned_row {
row_assignments[idx] = row;
} else {
row_assignments[idx] = row_ends.len();
row_ends.push(end);
}
}
let total_rows = row_ends.len().max(1);
row_assignments.iter().map(|&row| (row, total_rows)).collect()
}
fn compute_clip_stacking(
document: &lightningbeam_core::document::Document,
layer: &AnyLayer,
clip_index: usize,
clip_count: usize,
) -> (f32, f32) {
if matches!(layer, AnyLayer::Vector(_)) && clip_count > 1 {
clip_instances: &[lightningbeam_core::clip::ClipInstance],
) -> Vec<(usize, usize)> {
if !matches!(layer, AnyLayer::Vector(_)) || clip_instances.len() <= 1 {
return vec![(0, 1); clip_instances.len()];
}
let ranges: Vec<(f64, f64)> = clip_instances.iter().map(|ci| {
let clip_dur = effective_clip_duration(document, layer, ci).unwrap_or(0.0);
let start = ci.effective_start();
let end = start + ci.total_duration(clip_dur);
(start, end)
}).collect();
compute_clip_stacking_from_ranges(&ranges)
}
/// Calculate vertical bounds for a clip instance within a layer row.
/// `row` is the stacking row (0-based), `total_rows` is the total number of rows needed.
/// Returns (y_min, y_max) relative to the layer top.
fn clip_instance_y_bounds(row: usize, total_rows: usize) -> (f32, f32) {
if total_rows > 1 {
let usable_height = LAYER_HEIGHT - 20.0; // 10px padding top/bottom
let row_height = (usable_height / clip_count as f32).min(20.0);
let top = 10.0 + clip_index as f32 * row_height;
let row_height = (usable_height / total_rows as f32).min(20.0);
let top = 10.0 + row as f32 * row_height;
(top, top + row_height - 1.0)
} else {
(10.0, LAYER_HEIGHT - 10.0)
@ -54,7 +110,8 @@ fn effective_clip_duration(
let end = vl.group_visibility_end(&clip_instance.id, clip_instance.timeline_start, frame_duration);
Some((end - clip_instance.timeline_start).max(0.0))
} else {
Some(vc.duration)
// Movie clips: duration based on all internal content (keyframes + clip instances)
document.get_clip_duration(&clip_instance.clip_id)
}
}
AnyLayer::Audio(_) => document.get_audio_clip(&clip_instance.clip_id).map(|c| c.duration),
@ -130,13 +187,15 @@ fn find_sampled_audio_track_for_clip(
document: &lightningbeam_core::document::Document,
clip_id: uuid::Uuid,
timeline_start: f64,
editing_clip_id: Option<&uuid::Uuid>,
) -> Option<uuid::Uuid> {
// Get the clip duration
let clip_duration = document.get_clip_duration(&clip_id)?;
let clip_end = timeline_start + clip_duration;
// Check each sampled audio layer
for layer in &document.root.children {
let context_layers = document.context_layers(editing_clip_id);
for &layer in &context_layers {
if let AnyLayer::Audio(audio_layer) = layer {
if audio_layer.audio_layer_type == AudioLayerType::Sampled {
// Check if there's any overlap with existing clips on this layer
@ -213,7 +272,8 @@ impl TimelinePane {
// Get layer type (copy it so we can drop the document borrow before mutating)
let layer_type = {
let document = shared.action_executor.document();
let Some(layer) = document.root.children.iter().find(|l| l.id() == active_layer_id) else {
let context_layers = document.context_layers(shared.editing_clip_id.as_ref());
let Some(layer) = context_layers.iter().copied().find(|l| l.id() == active_layer_id) else {
println!("⚠️ Active layer not found in document");
return;
};
@ -255,9 +315,7 @@ impl TimelinePane {
let clip_instance = ClipInstance::new(doc_clip_id)
.with_timeline_start(start_time);
if let Some(layer) = shared.action_executor.document_mut().root.children.iter_mut()
.find(|l| l.id() == active_layer_id)
{
if let Some(layer) = shared.action_executor.document_mut().get_layer_mut(&active_layer_id) {
if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer {
audio_layer.clip_instances.push(clip_instance);
}
@ -295,7 +353,8 @@ impl TimelinePane {
fn stop_recording(&mut self, shared: &mut SharedPaneState) {
// Determine if this is MIDI or audio recording by checking the layer type
let is_midi_recording = if let Some(layer_id) = *shared.recording_layer_id {
shared.action_executor.document().root.children.iter()
let context_layers = shared.action_executor.document().context_layers(shared.editing_clip_id.as_ref());
context_layers.iter().copied()
.find(|l| l.id() == layer_id)
.map(|layer| {
if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer {
@ -337,8 +396,10 @@ impl TimelinePane {
document: &lightningbeam_core::document::Document,
content_rect: egui::Rect,
header_rect: egui::Rect,
editing_clip_id: Option<&uuid::Uuid>,
) -> Option<(ClipDragType, uuid::Uuid)> {
let layer_count = document.root.children.len();
let context_layers = document.context_layers(editing_clip_id);
let layer_count = context_layers.len();
// Check if pointer is in valid area
if pointer_pos.y < header_rect.min.y {
@ -355,8 +416,8 @@ impl TimelinePane {
return None;
}
let layers: Vec<_> = document.root.children.iter().rev().collect();
let layer = layers.get(hovered_layer_index)?;
let rev_layers: Vec<&lightningbeam_core::layer::AnyLayer> = context_layers.iter().rev().copied().collect();
let layer = rev_layers.get(hovered_layer_index)?;
let _layer_data = layer.layer();
let clip_instances = match layer {
@ -367,7 +428,7 @@ impl TimelinePane {
};
// Check each clip instance
let clip_count = clip_instances.len();
let stacking = compute_clip_stacking(document, layer, clip_instances);
for (ci_idx, clip_instance) in clip_instances.iter().enumerate() {
let clip_duration = effective_clip_duration(document, layer, clip_instance)?;
@ -382,7 +443,8 @@ impl TimelinePane {
if mouse_x >= start_x && mouse_x <= end_x {
// Check vertical bounds for stacked vector layer clips
let layer_top = header_rect.min.y + (hovered_layer_index as f32 * LAYER_HEIGHT) - self.viewport_scroll_y;
let (cy_min, cy_max) = clip_instance_y_bounds(layer, ci_idx, clip_count);
let (row, total_rows) = stacking[ci_idx];
let (cy_min, cy_max) = clip_instance_y_bounds(row, total_rows);
let mouse_rel_y = pointer_pos.y - layer_top;
if mouse_rel_y < cy_min || mouse_rel_y > cy_max {
continue;
@ -711,7 +773,8 @@ impl TimelinePane {
theme: &crate::theme::Theme,
active_layer_id: &Option<uuid::Uuid>,
pending_actions: &mut Vec<Box<dyn lightningbeam_core::action::Action>>,
document: &lightningbeam_core::document::Document,
_document: &lightningbeam_core::document::Document,
context_layers: &[&lightningbeam_core::layer::AnyLayer],
) {
// Background for header column
let header_style = theme.style(".timeline-header", ui.ctx());
@ -734,7 +797,8 @@ impl TimelinePane {
let secondary_text_color = egui::Color32::from_gray(150);
// Draw layer headers from document (reversed so newest layers appear on top)
for (i, layer) in document.root.children.iter().rev().enumerate() {
for (i, layer) in context_layers.iter().rev().enumerate() {
let layer = *layer;
let y = rect.min.y + i as f32 * LAYER_HEIGHT - self.viewport_scroll_y;
// Skip if layer is outside visible area
@ -993,6 +1057,7 @@ impl TimelinePane {
waveform_gpu_dirty: &mut std::collections::HashSet<usize>,
target_format: wgpu::TextureFormat,
waveform_stereo: bool,
context_layers: &[&lightningbeam_core::layer::AnyLayer],
) -> Vec<(egui::Rect, uuid::Uuid, f64, f64)> {
let painter = ui.painter();
@ -1014,7 +1079,8 @@ impl TimelinePane {
}
// Draw layer rows from document (reversed so newest layers appear on top)
for (i, layer) in document.root.children.iter().rev().enumerate() {
for (i, layer) in context_layers.iter().rev().enumerate() {
let layer = *layer;
let y = rect.min.y + i as f32 * LAYER_HEIGHT - self.viewport_scroll_y;
// Skip if layer is outside visible area
@ -1085,7 +1151,71 @@ impl TimelinePane {
None
};
let clip_instance_count = clip_instances.len();
// Compute stacking using preview positions (with drag offsets) for vector layers
let clip_stacking = if matches!(layer, AnyLayer::Vector(_)) && clip_instances.len() > 1 {
let preview_ranges: Vec<(f64, f64)> = clip_instances.iter().map(|ci| {
let clip_dur = effective_clip_duration(document, layer, ci).unwrap_or(0.0);
let mut start = ci.effective_start();
let mut duration = ci.total_duration(clip_dur);
let is_selected = selection.contains_clip_instance(&ci.id);
let is_linked = if self.clip_drag_state.is_some() {
instance_to_group.get(&ci.id).map_or(false, |group| {
group.members.iter().any(|(_, mid)| *mid != ci.id && selection.contains_clip_instance(mid))
})
} else {
false
};
if let Some(drag_type) = self.clip_drag_state {
if is_selected || is_linked {
match drag_type {
ClipDragType::Move => {
if let Some(offset) = group_move_offset {
start = (ci.effective_start() + offset).max(0.0);
}
}
ClipDragType::TrimLeft => {
let new_trim = (ci.trim_start + self.drag_offset).max(0.0).min(clip_dur);
let offset = new_trim - ci.trim_start;
start = (ci.timeline_start + offset).max(0.0);
duration = (clip_dur - new_trim).max(0.0);
if let Some(trim_end) = ci.trim_end {
duration = (trim_end - new_trim).max(0.0);
}
}
ClipDragType::TrimRight => {
let old_trim_end = ci.trim_end.unwrap_or(clip_dur);
let new_trim_end = (old_trim_end + self.drag_offset).max(ci.trim_start).min(clip_dur);
duration = (new_trim_end - ci.trim_start).max(0.0);
}
ClipDragType::LoopExtendRight => {
let trim_end = ci.trim_end.unwrap_or(clip_dur);
let content_window = (trim_end - ci.trim_start).max(0.0);
let current_right = ci.timeline_duration.unwrap_or(content_window);
let new_right = (current_right + self.drag_offset).max(content_window);
let loop_before = ci.loop_before.unwrap_or(0.0);
duration = loop_before + new_right;
}
ClipDragType::LoopExtendLeft => {
let trim_end = ci.trim_end.unwrap_or(clip_dur);
let content_window = (trim_end - ci.trim_start).max(0.001);
let current_loop_before = ci.loop_before.unwrap_or(0.0);
let desired = (current_loop_before - self.drag_offset).max(0.0);
let snapped = (desired / content_window).round() * content_window;
start = ci.timeline_start - snapped;
duration = snapped + ci.effective_duration(clip_dur);
}
}
}
}
(start, start + duration)
}).collect();
compute_clip_stacking_from_ranges(&preview_ranges)
} else {
compute_clip_stacking(document, layer, clip_instances)
};
for (clip_instance_index, clip_instance) in clip_instances.iter().enumerate() {
// Get the clip to determine duration
let clip_duration = effective_clip_duration(document, layer, clip_instance);
@ -1303,7 +1433,8 @@ impl TimelinePane {
),
};
let (cy_min, cy_max) = clip_instance_y_bounds(layer, clip_instance_index, clip_instance_count);
let (row, total_rows) = clip_stacking[clip_instance_index];
let (cy_min, cy_max) = clip_instance_y_bounds(row, total_rows);
let clip_rect = egui::Rect::from_min_max(
egui::pos2(rect.min.x + visible_start_x, y + cy_min),
@ -1719,6 +1850,8 @@ impl TimelinePane {
playback_time: &mut f64,
_is_playing: &mut bool,
audio_controller: Option<&std::sync::Arc<std::sync::Mutex<daw_backend::EngineController>>>,
context_layers: &[&lightningbeam_core::layer::AnyLayer],
editing_clip_id: Option<&uuid::Uuid>,
) {
// Don't allocate the header area for input - let widgets handle it directly
// Only allocate content area (ruler + layers) with click and drag
@ -1761,7 +1894,7 @@ impl TimelinePane {
let clicked_layer_index = (relative_y / LAYER_HEIGHT) as usize;
// Get the layer at this index (accounting for reversed display order)
if clicked_layer_index < layer_count {
let layers: Vec<_> = document.root.children.iter().rev().collect();
let layers: Vec<_> = context_layers.iter().rev().copied().collect();
if let Some(layer) = layers.get(clicked_layer_index) {
let _layer_data = layer.layer();
@ -1774,7 +1907,7 @@ impl TimelinePane {
};
// Check if click is within any clip instance
let click_clip_count = clip_instances.len();
let click_stacking = compute_clip_stacking(document, layer, clip_instances);
let click_layer_top = pos.y - (relative_y % LAYER_HEIGHT);
for (ci_idx, clip_instance) in clip_instances.iter().enumerate() {
let clip_duration = effective_clip_duration(document, layer, clip_instance);
@ -1788,7 +1921,8 @@ impl TimelinePane {
let ci_start_x = self.time_to_x(instance_start);
let ci_end_x = self.time_to_x(instance_end).max(ci_start_x + MIN_CLIP_WIDTH_PX);
let click_x = pos.x - content_rect.min.x;
let (cy_min, cy_max) = clip_instance_y_bounds(layer, ci_idx, click_clip_count);
let (row, total_rows) = click_stacking[ci_idx];
let (cy_min, cy_max) = clip_instance_y_bounds(row, total_rows);
let click_rel_y = pos.y - click_layer_top;
if click_x >= ci_start_x && click_x <= ci_end_x
&& click_rel_y >= cy_min && click_rel_y <= cy_max
@ -1828,7 +1962,7 @@ impl TimelinePane {
// Get the layer at this index (accounting for reversed display order)
if clicked_layer_index < layer_count {
let layers: Vec<_> = document.root.children.iter().rev().collect();
let layers: Vec<_> = context_layers.iter().rev().copied().collect();
if let Some(layer) = layers.get(clicked_layer_index) {
*active_layer_id = Some(layer.id());
}
@ -1853,6 +1987,7 @@ impl TimelinePane {
document,
content_rect,
header_rect,
editing_clip_id,
) {
// If this clip is not selected, select it (respecting shift key)
if !selection.contains_clip_instance(&clip_id) {
@ -1886,7 +2021,7 @@ impl TimelinePane {
HashMap::new();
// Iterate through all layers to find selected clip instances
for layer in &document.root.children {
for &layer in context_layers {
let layer_id = layer.id();
// Get clip instances for this layer
@ -1937,7 +2072,7 @@ impl TimelinePane {
> = HashMap::new();
// Iterate through all layers to find selected clip instances
for layer in &document.root.children {
for &layer in context_layers {
let layer_id = layer.id();
let _layer_data = layer.layer();
@ -2078,7 +2213,7 @@ impl TimelinePane {
ClipDragType::LoopExtendRight => {
let mut layer_loops: HashMap<uuid::Uuid, Vec<lightningbeam_core::actions::loop_clip_instances::LoopEntry>> = HashMap::new();
for layer in &document.root.children {
for &layer in context_layers {
let layer_id = layer.id();
let clip_instances = match layer {
lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances,
@ -2150,7 +2285,7 @@ impl TimelinePane {
// Extend loop_before (pre-loop region)
let mut layer_loops: HashMap<uuid::Uuid, Vec<lightningbeam_core::actions::loop_clip_instances::LoopEntry>> = HashMap::new();
for layer in &document.root.children {
for &layer in context_layers {
let layer_id = layer.id();
let clip_instances = match layer {
lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances,
@ -2242,7 +2377,7 @@ impl TimelinePane {
// Get the layer at this index (accounting for reversed display order)
if clicked_layer_index < layer_count {
let layers: Vec<_> = document.root.children.iter().rev().collect();
let layers: Vec<_> = context_layers.iter().rev().copied().collect();
if let Some(layer) = layers.get(clicked_layer_index) {
*active_layer_id = Some(layer.id());
// Clear clip instance selection when clicking on empty layer area
@ -2387,6 +2522,7 @@ impl TimelinePane {
document,
content_rect,
header_rect,
editing_clip_id,
) {
match drag_type {
ClipDragType::TrimLeft | ClipDragType::TrimRight => {
@ -2535,11 +2671,13 @@ impl PaneRenderer for TimelinePane {
// Get document from action executor
let document = shared.action_executor.document();
let layer_count = document.root.children.len();
let editing_clip_id = shared.editing_clip_id;
let context_layers = document.context_layers(editing_clip_id.as_ref());
let layer_count = context_layers.len();
// Calculate project duration from last clip endpoint across all layers
let mut max_endpoint: f64 = 10.0; // Default minimum duration
for layer in &document.root.children {
for &layer in &context_layers {
let clip_instances = match layer {
lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Audio(al) => &al.clip_instances,
@ -2606,7 +2744,7 @@ impl PaneRenderer for TimelinePane {
// Render layer header column with clipping
ui.set_clip_rect(layer_headers_rect.intersect(original_clip_rect));
self.render_layer_headers(ui, layer_headers_rect, shared.theme, shared.active_layer_id, &mut shared.pending_actions, document);
self.render_layer_headers(ui, layer_headers_rect, shared.theme, shared.active_layer_id, &mut shared.pending_actions, document, &context_layers);
// Render time ruler (clip to ruler rect)
ui.set_clip_rect(ruler_rect.intersect(original_clip_rect));
@ -2614,7 +2752,7 @@ impl PaneRenderer for TimelinePane {
// Render layer rows with clipping
ui.set_clip_rect(content_rect.intersect(original_clip_rect));
let video_clip_hovers = self.render_layers(ui, content_rect, shared.theme, document, shared.active_layer_id, shared.selection, shared.midi_event_cache, shared.raw_audio_cache, shared.waveform_gpu_dirty, shared.target_format, shared.waveform_stereo);
let video_clip_hovers = self.render_layers(ui, content_rect, shared.theme, document, shared.active_layer_id, shared.selection, shared.midi_event_cache, shared.raw_audio_cache, shared.waveform_gpu_dirty, shared.target_format, shared.waveform_stereo, &context_layers);
// Render playhead on top (clip to timeline area)
ui.set_clip_rect(timeline_rect.intersect(original_clip_rect));
@ -2638,6 +2776,8 @@ impl PaneRenderer for TimelinePane {
shared.playback_time,
shared.is_playing,
shared.audio_controller,
&context_layers,
editing_clip_id.as_ref(),
);
// Context menu: detect right-click on clips or empty timeline space
@ -2646,7 +2786,7 @@ impl PaneRenderer for TimelinePane {
if secondary_clicked {
if let Some(pos) = ui.input(|i| i.pointer.interact_pos()) {
if content_rect.contains(pos) {
if let Some((_drag_type, clip_id)) = self.detect_clip_at_pointer(pos, document, content_rect, layer_headers_rect) {
if let Some((_drag_type, clip_id)) = self.detect_clip_at_pointer(pos, document, content_rect, layer_headers_rect, editing_clip_id.as_ref()) {
// Right-clicked on a clip
if !shared.selection.contains_clip_instance(&clip_id) {
shared.selection.select_only_clip_instance(clip_id);
@ -2934,7 +3074,7 @@ impl PaneRenderer for TimelinePane {
let hovered_layer_index = (relative_y / LAYER_HEIGHT) as usize;
// Get the layer at this index (accounting for reversed display order)
let layers: Vec<_> = document.root.children.iter().rev().collect();
let layers: Vec<_> = context_layers.iter().rev().copied().collect();
if let Some(layer) = layers.get(hovered_layer_index) {
let is_compatible = can_drop_on_layer(layer, dragging.clip_type);
@ -3077,7 +3217,7 @@ impl PaneRenderer for TimelinePane {
// Find or create sampled audio track where the audio won't overlap
let audio_layer_id = {
let doc = shared.action_executor.document();
let result = find_sampled_audio_track_for_clip(doc, linked_audio_clip_id, drop_time);
let result = find_sampled_audio_track_for_clip(doc, linked_audio_clip_id, drop_time, editing_clip_id.as_ref());
if let Some(id) = result {
eprintln!("DEBUG: Found existing audio track without overlap: {}", id);
} else {
@ -3171,8 +3311,15 @@ impl PaneRenderer for TimelinePane {
let new_layer = super::create_layer_for_clip_type(dragging.clip_type, &layer_name);
let new_layer_id = new_layer.id();
// Add the layer
shared.action_executor.document_mut().root.add_child(new_layer);
// Add the layer to the current editing context
if let Some(clip_id) = shared.editing_clip_id {
if let Some(clip) = shared.action_executor.document_mut().vector_clips.get_mut(&clip_id) {
clip.layers.add_root(new_layer);
}
shared.action_executor.document_mut().layer_to_clip_map.insert(new_layer_id, clip_id);
} else {
shared.action_executor.document_mut().root.add_child(new_layer);
}
// Now add the clip to the new layer
if dragging.clip_type == DragClipType::Effect {

View File

@ -51,17 +51,6 @@ fn parse_note_letter(s: &str) -> Option<(u8, usize)> {
}
}
/// Convert a note name like "C4", "A#3", "Bb2" to a MIDI note number.
pub fn note_name_to_midi(note: &str, octave: i8) -> Option<u8> {
let (semitone, _) = parse_note_letter(note)?;
let midi = (octave as i32 + 1) * 12 + semitone as i32;
if (0..=127).contains(&midi) {
Some(midi as u8)
} else {
None
}
}
/// Format a MIDI note number as a note name (e.g., 60 → "C4").
pub fn midi_to_note_name(midi: u8) -> String {
const NAMES: [&str; 12] = ["C", "C#", "D", "D#", "E", "F", "F#", "G", "G#", "A", "A#", "B"];
@ -214,8 +203,7 @@ fn tokenize(stem: &str) -> Vec<&str> {
}
/// Parse a sample filename to extract note, velocity, round-robin, and loop hint info.
/// `folder_path` is used for loop/articulation context from parent directory names.
pub fn parse_sample_filename(path: &Path, folder_path: &Path) -> ParsedSample {
pub fn parse_sample_filename(path: &Path) -> ParsedSample {
let filename = path.file_name()
.map(|n| n.to_string_lossy().to_string())
.unwrap_or_default();
@ -390,7 +378,7 @@ pub fn scan_folder(folder_path: &Path) -> std::io::Result<Vec<ParsedSample>> {
collect_audio_files(folder_path, &mut files)?;
let mut samples: Vec<ParsedSample> = files.iter()
.map(|path| parse_sample_filename(path, folder_path))
.map(|path| parse_sample_filename(path))
.collect();
// Percussion pass: for samples with no detected note, try GM drum mapping
@ -467,7 +455,6 @@ pub struct FolderScanResult {
pub loop_mode: LoopMode,
pub velocity_markers: Vec<String>,
pub velocity_ranges: Vec<(String, u8, u8)>,
pub detected_articulation: Option<String>,
}
/// Compute auto key ranges for a sorted list of unique MIDI notes.
@ -533,28 +520,9 @@ fn detect_global_loop_mode(samples: &[ParsedSample]) -> LoopMode {
}
}
/// Detect articulation from folder path.
fn detect_articulation(folder_path: &Path) -> Option<String> {
for component in folder_path.components().rev() {
if let std::path::Component::Normal(name) = component {
let lower = name.to_string_lossy().to_lowercase();
match lower.as_str() {
"sustain" | "vibrato" | "tremolo" | "pizzicato" | "staccato" |
"legato" | "marcato" | "spiccato" | "arco" => {
return Some(name.to_string_lossy().to_string());
}
_ => {}
}
}
}
None
}
/// Build import layers from parsed samples with auto key ranges and velocity mapping.
pub fn build_import_layers(samples: Vec<ParsedSample>, folder_path: &Path) -> FolderScanResult {
pub fn build_import_layers(samples: Vec<ParsedSample>) -> FolderScanResult {
let loop_mode = detect_global_loop_mode(&samples);
let detected_articulation = detect_articulation(folder_path);
// Separate mapped vs unmapped
let mut mapped: Vec<ParsedSample> = Vec::new();
let mut unmapped: Vec<ParsedSample> = Vec::new();
@ -623,7 +591,6 @@ pub fn build_import_layers(samples: Vec<ParsedSample>, folder_path: &Path) -> Fo
loop_mode,
velocity_markers,
velocity_ranges,
detected_articulation,
}
}
@ -662,13 +629,13 @@ mod tests {
use super::*;
#[test]
fn test_note_name_to_midi() {
assert_eq!(note_name_to_midi("C", 4), Some(60));
assert_eq!(note_name_to_midi("A", 4), Some(69));
assert_eq!(note_name_to_midi("A#", 3), Some(58));
assert_eq!(note_name_to_midi("Bb", 2), Some(46));
assert_eq!(note_name_to_midi("C", -1), Some(0));
assert_eq!(note_name_to_midi("G", 9), Some(127));
fn test_try_note_octave() {
assert_eq!(try_note_octave("C4"), Some(60));
assert_eq!(try_note_octave("A4"), Some(69));
assert_eq!(try_note_octave("A#3"), Some(58));
assert_eq!(try_note_octave("Bb2"), Some(46));
assert_eq!(try_note_octave("C-1"), Some(0));
assert_eq!(try_note_octave("G9"), Some(127));
}
#[test]
@ -676,7 +643,6 @@ mod tests {
// Horns: horns-sus-ff-a#2-PB-loop.wav
let p = parse_sample_filename(
Path::new("/samples/horns-sus-ff-a#2-PB-loop.wav"),
Path::new("/samples"),
);
assert_eq!(p.detected_note, Some(46)); // A#2
assert_eq!(p.velocity_marker, Some("ff".to_string()));
@ -685,7 +651,6 @@ mod tests {
// Philharmonia: viola_A#3-staccato-rr1-PB.wav
let p = parse_sample_filename(
Path::new("/samples/viola_A#3-staccato-rr1-PB.wav"),
Path::new("/samples"),
);
assert_eq!(p.detected_note, Some(58)); // A#3
assert_eq!(p.rr_index, Some(1));
@ -694,7 +659,6 @@ mod tests {
// Bare note: A1.mp3
let p = parse_sample_filename(
Path::new("/samples/A1.mp3"),
Path::new("/samples"),
);
assert_eq!(p.detected_note, Some(33)); // A1
}
@ -704,21 +668,18 @@ mod tests {
// NoBudgetOrch: 2_A-PB.wav
let p = parse_sample_filename(
Path::new("/samples/2_A-PB.wav"),
Path::new("/samples"),
);
assert_eq!(p.detected_note, Some(45)); // A2
// 3_Gb-PB.wav
let p = parse_sample_filename(
Path::new("/samples/3_Gb-PB.wav"),
Path::new("/samples"),
);
assert_eq!(p.detected_note, Some(54)); // Gb3
// 1_Bb.wav
let p = parse_sample_filename(
Path::new("/samples/1_Bb.wav"),
Path::new("/samples"),
);
assert_eq!(p.detected_note, Some(34)); // Bb1
}
@ -728,7 +689,6 @@ mod tests {
// NoBudgetOrch TubularBells: 3_A_f.wav
let p = parse_sample_filename(
Path::new("/samples/3_A_f.wav"),
Path::new("/samples"),
);
assert_eq!(p.detected_note, Some(57)); // A3
assert_eq!(p.velocity_marker, Some("f".to_string()));
@ -736,7 +696,6 @@ mod tests {
// 3_C_p.wav
let p = parse_sample_filename(
Path::new("/samples/3_C_p.wav"),
Path::new("/samples"),
);
assert_eq!(p.detected_note, Some(48)); // C3
assert_eq!(p.velocity_marker, Some("p".to_string()));
@ -747,7 +706,6 @@ mod tests {
// NoBudgetOrch: 5_C_2-PB.wav → C5, rr2
let p = parse_sample_filename(
Path::new("/samples/5_C_2-PB.wav"),
Path::new("/samples"),
);
assert_eq!(p.detected_note, Some(72)); // C5
assert_eq!(p.rr_index, Some(2));
@ -755,7 +713,6 @@ mod tests {
// rr marker: viola_A#3-staccato-rr1-PB.wav
let p = parse_sample_filename(
Path::new("/samples/viola_A#3-staccato-rr1-PB.wav"),
Path::new("/samples"),
);
assert_eq!(p.rr_index, Some(1));
}
@ -764,13 +721,11 @@ mod tests {
fn test_loop_hints_from_folder() {
let p = parse_sample_filename(
Path::new("/libs/Cello/Sustain/2_A.wav"),
Path::new("/libs/Cello/Sustain"),
);
assert_eq!(p.loop_hint, LoopHint::Loop);
let p = parse_sample_filename(
Path::new("/libs/Cello/Pizzicato/2_A-PB.wav"),
Path::new("/libs/Cello/Pizzicato"),
);
assert_eq!(p.loop_hint, LoopHint::OneShot);
}

View File

@ -8,7 +8,7 @@ use egui_node_graph2::NodeId;
use std::path::PathBuf;
use crate::sample_import::{
FolderScanResult, ImportLayer, midi_to_note_name, recalc_key_ranges,
FolderScanResult, midi_to_note_name, recalc_key_ranges,
};
use daw_backend::audio::node_graph::nodes::LoopMode;
@ -234,9 +234,4 @@ impl SampleImportDialog {
!self.should_close
}
/// Get the enabled layers for import.
pub fn enabled_layers(&self) -> Vec<&ImportLayer> {
self.scan_result.layers.iter().filter(|l| l.enabled).collect()
}
}