Initial work on movie clips

This commit is contained in:
Skyler Lehmkuhl 2026-02-21 00:54:38 -05:00
parent 3ba6dcb3d2
commit 1892f970c4
17 changed files with 1005 additions and 220 deletions

View File

@ -133,7 +133,9 @@ pub struct ActionExecutor {
impl ActionExecutor { impl ActionExecutor {
/// Create a new action executor with the given document /// Create a new action executor with the given document
pub fn new(document: Document) -> Self { pub fn new(mut document: Document) -> Self {
// Rebuild transient lookup maps (not serialized)
document.rebuild_layer_to_clip_map();
Self { Self {
document: Arc::new(document), document: Arc::new(document),
undo_stack: Vec::new(), undo_stack: Vec::new(),

View File

@ -0,0 +1,244 @@
//! Convert to Movie Clip action
//!
//! Wraps selected shapes and/or clip instances into a new VectorClip
//! with is_group = false, giving it a real internal timeline.
//! Works with 1+ selected items (unlike Group which requires 2+).
use crate::action::Action;
use crate::animation::{AnimationCurve, AnimationTarget, Keyframe, TransformProperty};
use crate::clip::{ClipInstance, VectorClip};
use crate::document::Document;
use crate::layer::{AnyLayer, VectorLayer};
use crate::shape::Shape;
use uuid::Uuid;
use vello::kurbo::{Rect, Shape as KurboShape};
pub struct ConvertToMovieClipAction {
layer_id: Uuid,
time: f64,
shape_ids: Vec<Uuid>,
clip_instance_ids: Vec<Uuid>,
instance_id: Uuid,
created_clip_id: Option<Uuid>,
removed_shapes: Vec<Shape>,
removed_clip_instances: Vec<ClipInstance>,
}
impl ConvertToMovieClipAction {
pub fn new(
layer_id: Uuid,
time: f64,
shape_ids: Vec<Uuid>,
clip_instance_ids: Vec<Uuid>,
instance_id: Uuid,
) -> Self {
Self {
layer_id,
time,
shape_ids,
clip_instance_ids,
instance_id,
created_clip_id: None,
removed_shapes: Vec::new(),
removed_clip_instances: Vec::new(),
}
}
}
impl Action for ConvertToMovieClipAction {
fn execute(&mut self, document: &mut Document) -> Result<(), String> {
let layer = document
.get_layer(&self.layer_id)
.ok_or_else(|| format!("Layer {} not found", self.layer_id))?;
let vl = match layer {
AnyLayer::Vector(vl) => vl,
_ => return Err("Convert to Movie Clip is only supported on vector layers".to_string()),
};
// Collect shapes
let shapes_at_time = vl.shapes_at_time(self.time);
let mut collected_shapes: Vec<Shape> = Vec::new();
for id in &self.shape_ids {
if let Some(shape) = shapes_at_time.iter().find(|s| &s.id == id) {
collected_shapes.push(shape.clone());
}
}
// Collect clip instances
let mut collected_clip_instances: Vec<ClipInstance> = Vec::new();
for id in &self.clip_instance_ids {
if let Some(ci) = vl.clip_instances.iter().find(|ci| &ci.id == id) {
collected_clip_instances.push(ci.clone());
}
}
let total_items = collected_shapes.len() + collected_clip_instances.len();
if total_items < 1 {
return Err("Need at least 1 item to convert to movie clip".to_string());
}
// Compute combined bounding box
let mut combined_bbox: Option<Rect> = None;
for shape in &collected_shapes {
let local_bbox = shape.path().bounding_box();
let transform = shape.transform.to_affine();
let transformed_bbox = transform.transform_rect_bbox(local_bbox);
combined_bbox = Some(match combined_bbox {
Some(existing) => existing.union(transformed_bbox),
None => transformed_bbox,
});
}
for ci in &collected_clip_instances {
let content_bounds = if let Some(vector_clip) = document.get_vector_clip(&ci.clip_id) {
let clip_time = ((self.time - ci.timeline_start) * ci.playback_speed) + ci.trim_start;
vector_clip.calculate_content_bounds(document, clip_time)
} else if let Some(video_clip) = document.get_video_clip(&ci.clip_id) {
Rect::new(0.0, 0.0, video_clip.width, video_clip.height)
} else {
continue;
};
let ci_transform = ci.transform.to_affine();
let transformed_bbox = ci_transform.transform_rect_bbox(content_bounds);
combined_bbox = Some(match combined_bbox {
Some(existing) => existing.union(transformed_bbox),
None => transformed_bbox,
});
}
let bbox = combined_bbox.ok_or("Could not compute bounding box")?;
let center_x = (bbox.x0 + bbox.x1) / 2.0;
let center_y = (bbox.y0 + bbox.y1) / 2.0;
// Offset shapes relative to center
let mut clip_shapes: Vec<Shape> = collected_shapes.clone();
for shape in &mut clip_shapes {
shape.transform.x -= center_x;
shape.transform.y -= center_y;
}
let mut clip_instances_inside: Vec<ClipInstance> = collected_clip_instances.clone();
for ci in &mut clip_instances_inside {
ci.transform.x -= center_x;
ci.transform.y -= center_y;
}
// Create VectorClip with real timeline duration
let mut clip = VectorClip::new("Movie Clip", bbox.width(), bbox.height(), document.duration);
// is_group defaults to false — movie clips have real timelines
let clip_id = clip.id;
let mut inner_layer = VectorLayer::new("Layer 1");
for shape in clip_shapes {
inner_layer.add_shape_to_keyframe(shape, 0.0);
}
for ci in clip_instances_inside {
inner_layer.clip_instances.push(ci);
}
clip.layers.add_root(AnyLayer::Vector(inner_layer));
document.add_vector_clip(clip);
self.created_clip_id = Some(clip_id);
// Remove originals from the layer
let layer = document.get_layer_mut(&self.layer_id).unwrap();
let vl = match layer {
AnyLayer::Vector(vl) => vl,
_ => unreachable!(),
};
self.removed_shapes.clear();
for id in &self.shape_ids {
if let Some(shape) = vl.remove_shape_from_keyframe(id, self.time) {
self.removed_shapes.push(shape);
}
}
self.removed_clip_instances.clear();
for id in &self.clip_instance_ids {
if let Some(pos) = vl.clip_instances.iter().position(|ci| &ci.id == id) {
self.removed_clip_instances.push(vl.clip_instances.remove(pos));
}
}
// Place the new ClipInstance
let instance = ClipInstance::with_id(self.instance_id, clip_id)
.with_position(center_x, center_y)
.with_name("Movie Clip");
vl.clip_instances.push(instance);
// Create default animation curves
let props_and_values = [
(TransformProperty::X, center_x),
(TransformProperty::Y, center_y),
(TransformProperty::Rotation, 0.0),
(TransformProperty::ScaleX, 1.0),
(TransformProperty::ScaleY, 1.0),
(TransformProperty::SkewX, 0.0),
(TransformProperty::SkewY, 0.0),
(TransformProperty::Opacity, 1.0),
];
for (prop, value) in props_and_values {
let target = AnimationTarget::Object {
id: self.instance_id,
property: prop,
};
let mut curve = AnimationCurve::new(target.clone(), value);
curve.set_keyframe(Keyframe::linear(0.0, value));
vl.layer.animation_data.set_curve(curve);
}
Ok(())
}
fn rollback(&mut self, document: &mut Document) -> Result<(), String> {
let layer = document
.get_layer_mut(&self.layer_id)
.ok_or_else(|| format!("Layer {} not found", self.layer_id))?;
if let AnyLayer::Vector(vl) = layer {
// Remove animation curves
for prop in &[
TransformProperty::X, TransformProperty::Y,
TransformProperty::Rotation,
TransformProperty::ScaleX, TransformProperty::ScaleY,
TransformProperty::SkewX, TransformProperty::SkewY,
TransformProperty::Opacity,
] {
let target = AnimationTarget::Object {
id: self.instance_id,
property: *prop,
};
vl.layer.animation_data.remove_curve(&target);
}
// Remove the clip instance
vl.clip_instances.retain(|ci| ci.id != self.instance_id);
// Re-insert removed shapes
for shape in self.removed_shapes.drain(..) {
vl.add_shape_to_keyframe(shape, self.time);
}
// Re-insert removed clip instances
for ci in self.removed_clip_instances.drain(..) {
vl.clip_instances.push(ci);
}
}
// Remove the VectorClip from the document
if let Some(clip_id) = self.created_clip_id.take() {
document.remove_vector_clip(&clip_id);
}
Ok(())
}
fn description(&self) -> String {
let count = self.shape_ids.len() + self.clip_instance_ids.len();
format!("Convert {} object(s) to Movie Clip", count)
}
}

View File

@ -30,6 +30,7 @@ pub mod remove_clip_instances;
pub mod remove_shapes; pub mod remove_shapes;
pub mod set_keyframe; pub mod set_keyframe;
pub mod group_shapes; pub mod group_shapes;
pub mod convert_to_movie_clip;
pub use add_clip_instance::AddClipInstanceAction; pub use add_clip_instance::AddClipInstanceAction;
pub use add_effect::AddEffectAction; pub use add_effect::AddEffectAction;
@ -58,3 +59,4 @@ pub use remove_clip_instances::RemoveClipInstancesAction;
pub use remove_shapes::RemoveShapesAction; pub use remove_shapes::RemoveShapesAction;
pub use set_keyframe::SetKeyframeAction; pub use set_keyframe::SetKeyframeAction;
pub use group_shapes::GroupAction; pub use group_shapes::GroupAction;
pub use convert_to_movie_clip::ConvertToMovieClipAction;

View File

@ -90,6 +90,30 @@ impl VectorClip {
} }
} }
/// Calculate the duration of this clip based on its internal keyframe content.
/// Returns the time of the last keyframe across all layers, plus one frame.
/// Falls back to the stored `duration` field if no keyframes exist.
pub fn content_duration(&self, framerate: f64) -> f64 {
let frame_duration = 1.0 / framerate;
let mut last_time: Option<f64> = None;
for layer_node in self.layers.iter() {
if let AnyLayer::Vector(vector_layer) = &layer_node.data {
if let Some(last_kf) = vector_layer.keyframes.last() {
last_time = Some(match last_time {
Some(t) => t.max(last_kf.time),
None => last_kf.time,
});
}
}
}
match last_time {
Some(t) => t + frame_duration,
None => self.duration,
}
}
/// Calculate the bounding box of all content in this clip at a specific time /// Calculate the bounding box of all content in this clip at a specific time
/// ///
/// This recursively calculates the union of all shape and nested clip bounding boxes /// This recursively calculates the union of all shape and nested clip bounding boxes

View File

@ -166,6 +166,11 @@ pub struct Document {
/// Current playback time in seconds /// Current playback time in seconds
#[serde(skip)] #[serde(skip)]
pub current_time: f64, pub current_time: f64,
/// Reverse lookup: layer_id → clip_id for layers inside vector clips.
/// Enables O(1) lookup in get_layer/get_layer_mut instead of scanning all clips.
#[serde(skip)]
pub layer_to_clip_map: HashMap<Uuid, Uuid>,
} }
impl Default for Document { impl Default for Document {
@ -195,6 +200,7 @@ impl Default for Document {
ui_layout: None, ui_layout: None,
ui_layout_base: None, ui_layout_base: None,
current_time: 0.0, current_time: 0.0,
layer_to_clip_map: HashMap::new(),
} }
} }
} }
@ -218,6 +224,27 @@ impl Document {
} }
} }
/// Rebuild the layer→clip reverse lookup map from all vector clips.
/// Call after deserialization or bulk clip modifications.
pub fn rebuild_layer_to_clip_map(&mut self) {
self.layer_to_clip_map.clear();
for (clip_id, clip) in &self.vector_clips {
for node in &clip.layers.roots {
self.layer_to_clip_map.insert(node.data.id(), *clip_id);
}
}
}
/// Register a layer as belonging to a clip (for O(1) lookup).
pub fn register_layer_in_clip(&mut self, layer_id: Uuid, clip_id: Uuid) {
self.layer_to_clip_map.insert(layer_id, clip_id);
}
/// Unregister a layer from the clip lookup map.
pub fn unregister_layer_from_clip(&mut self, layer_id: &Uuid) {
self.layer_to_clip_map.remove(layer_id);
}
/// Set the background color /// Set the background color
pub fn with_background(mut self, color: ShapeColor) -> Self { pub fn with_background(mut self, color: ShapeColor) -> Self {
self.background_color = color; self.background_color = color;
@ -343,9 +370,31 @@ impl Document {
.filter(|layer| layer.layer().visible) .filter(|layer| layer.layer().visible)
} }
/// Get a layer by ID /// Get visible layers for the current editing context
pub fn context_visible_layers(&self, clip_id: Option<&Uuid>) -> Vec<&AnyLayer> {
self.context_layers(clip_id)
.into_iter()
.filter(|layer| layer.layer().visible)
.collect()
}
/// Get a layer by ID (searches root layers, then clip layers via O(1) map lookup)
pub fn get_layer(&self, id: &Uuid) -> Option<&AnyLayer> { pub fn get_layer(&self, id: &Uuid) -> Option<&AnyLayer> {
self.root.get_child(id) // First check root layers
if let Some(layer) = self.root.get_child(id) {
return Some(layer);
}
// O(1) lookup: check if this layer belongs to a clip
if let Some(clip_id) = self.layer_to_clip_map.get(id) {
if let Some(clip) = self.vector_clips.get(clip_id) {
for node in &clip.layers.roots {
if &node.data.id() == id {
return Some(&node.data);
}
}
}
}
None
} }
// === MUTATION METHODS (pub(crate) - only accessible to action module) === // === MUTATION METHODS (pub(crate) - only accessible to action module) ===
@ -358,12 +407,59 @@ impl Document {
&mut self.root &mut self.root
} }
/// Get mutable access to a layer by ID /// Get mutable access to a layer by ID (searches root layers, then clip layers via O(1) map lookup)
/// ///
/// This method is intentionally `pub(crate)` to ensure mutations /// This method is intentionally `pub(crate)` to ensure mutations
/// only happen through the action system. /// only happen through the action system.
pub fn get_layer_mut(&mut self, id: &Uuid) -> Option<&mut AnyLayer> { pub fn get_layer_mut(&mut self, id: &Uuid) -> Option<&mut AnyLayer> {
self.root.get_child_mut(id) // First check root layers
if self.root.get_child(id).is_some() {
return self.root.get_child_mut(id);
}
// O(1) lookup: check if this layer belongs to a clip
if let Some(clip_id) = self.layer_to_clip_map.get(id).copied() {
if let Some(clip) = self.vector_clips.get_mut(&clip_id) {
for node in &mut clip.layers.roots {
if &node.data.id() == id {
return Some(&mut node.data);
}
}
}
}
None
}
// === EDITING CONTEXT METHODS ===
/// Get the layers for the current editing context.
/// When `clip_id` is None, returns root layers. When Some, returns the clip's layers.
pub fn context_layers(&self, clip_id: Option<&Uuid>) -> Vec<&AnyLayer> {
match clip_id {
None => self.root.children.iter().collect(),
Some(id) => self.vector_clips.get(id)
.map(|clip| clip.layers.root_data())
.unwrap_or_default(),
}
}
/// Get mutable layers for the current editing context.
pub fn context_layers_mut(&mut self, clip_id: Option<&Uuid>) -> Vec<&mut AnyLayer> {
match clip_id {
None => self.root.children.iter_mut().collect(),
Some(id) => self.vector_clips.get_mut(id)
.map(|clip| clip.layers.root_data_mut())
.unwrap_or_default(),
}
}
/// Look up a layer by ID within an editing context.
pub fn get_layer_in_context(&self, clip_id: Option<&Uuid>, layer_id: &Uuid) -> Option<&AnyLayer> {
self.context_layers(clip_id).into_iter().find(|l| &l.id() == layer_id)
}
/// Look up a mutable layer by ID within an editing context.
pub fn get_layer_in_context_mut(&mut self, clip_id: Option<&Uuid>, layer_id: &Uuid) -> Option<&mut AnyLayer> {
self.context_layers_mut(clip_id).into_iter().find(|l| &l.id() == layer_id)
} }
// === CLIP LIBRARY METHODS === // === CLIP LIBRARY METHODS ===
@ -371,6 +467,10 @@ impl Document {
/// Add a vector clip to the library /// Add a vector clip to the library
pub fn add_vector_clip(&mut self, clip: VectorClip) -> Uuid { pub fn add_vector_clip(&mut self, clip: VectorClip) -> Uuid {
let id = clip.id; let id = clip.id;
// Register all layers in the clip for O(1) reverse lookup
for node in &clip.layers.roots {
self.layer_to_clip_map.insert(node.data.id(), id);
}
self.vector_clips.insert(id, clip); self.vector_clips.insert(id, clip);
id id
} }
@ -439,7 +539,15 @@ impl Document {
/// Remove a vector clip from the library /// Remove a vector clip from the library
pub fn remove_vector_clip(&mut self, id: &Uuid) -> Option<VectorClip> { pub fn remove_vector_clip(&mut self, id: &Uuid) -> Option<VectorClip> {
self.vector_clips.remove(id) if let Some(clip) = self.vector_clips.remove(id) {
// Unregister all layers from the reverse lookup map
for node in &clip.layers.roots {
self.layer_to_clip_map.remove(&node.data.id());
}
Some(clip)
} else {
None
}
} }
/// Remove a video clip from the library /// Remove a video clip from the library
@ -534,7 +642,11 @@ impl Document {
/// have infinite internal duration. /// have infinite internal duration.
pub fn get_clip_duration(&self, clip_id: &Uuid) -> Option<f64> { pub fn get_clip_duration(&self, clip_id: &Uuid) -> Option<f64> {
if let Some(clip) = self.vector_clips.get(clip_id) { if let Some(clip) = self.vector_clips.get(clip_id) {
if clip.is_group {
Some(clip.duration) Some(clip.duration)
} else {
Some(clip.content_duration(self.framerate))
}
} else if let Some(clip) = self.video_clips.get(clip_id) { } else if let Some(clip) = self.video_clips.get(clip_id) {
Some(clip.duration) Some(clip.duration)
} else if let Some(clip) = self.audio_clips.get(clip_id) { } else if let Some(clip) = self.audio_clips.get(clip_id) {

View File

@ -164,6 +164,13 @@ pub fn hit_test_clip_instances(
timeline_time: f64, timeline_time: f64,
) -> Option<Uuid> { ) -> Option<Uuid> {
for clip_instance in clip_instances.iter().rev() { for clip_instance in clip_instances.iter().rev() {
// Check time bounds: skip clip instances not active at this time
let clip_duration = document.get_clip_duration(&clip_instance.clip_id).unwrap_or(0.0);
let instance_end = clip_instance.timeline_start + clip_instance.effective_duration(clip_duration);
if timeline_time < clip_instance.timeline_start || timeline_time >= instance_end {
continue;
}
let clip_time = ((timeline_time - clip_instance.timeline_start) * clip_instance.playback_speed) + clip_instance.trim_start; let clip_time = ((timeline_time - clip_instance.timeline_start) * clip_instance.playback_speed) + clip_instance.trim_start;
let content_bounds = if let Some(vector_clip) = document.get_vector_clip(&clip_instance.clip_id) { let content_bounds = if let Some(vector_clip) = document.get_vector_clip(&clip_instance.clip_id) {
@ -196,6 +203,13 @@ pub fn hit_test_clip_instances_in_rect(
let mut hits = Vec::new(); let mut hits = Vec::new();
for clip_instance in clip_instances { for clip_instance in clip_instances {
// Check time bounds: skip clip instances not active at this time
let clip_duration = document.get_clip_duration(&clip_instance.clip_id).unwrap_or(0.0);
let instance_end = clip_instance.timeline_start + clip_instance.effective_duration(clip_duration);
if timeline_time < clip_instance.timeline_start || timeline_time >= instance_end {
continue;
}
let clip_time = ((timeline_time - clip_instance.timeline_start) * clip_instance.playback_speed) + clip_instance.trim_start; let clip_time = ((timeline_time - clip_instance.timeline_start) * clip_instance.playback_speed) + clip_instance.trim_start;
let content_bounds = if let Some(vector_clip) = document.get_vector_clip(&clip_instance.clip_id) { let content_bounds = if let Some(vector_clip) = document.get_vector_clip(&clip_instance.clip_id) {

View File

@ -328,48 +328,6 @@ impl VectorLayer {
// === MUTATION METHODS (pub(crate) - only accessible to action module) === // === MUTATION METHODS (pub(crate) - only accessible to action module) ===
/// Add a shape to this layer (internal, for actions only)
///
/// This method is intentionally `pub(crate)` to ensure mutations
/// only happen through the action system.
pub(crate) fn add_shape_internal(&mut self, shape: Shape) -> Uuid {
let id = shape.id;
self.shapes.insert(id, shape);
id
}
/// Add an object to this layer (internal, for actions only)
///
/// This method is intentionally `pub(crate)` to ensure mutations
/// only happen through the action system.
pub(crate) fn add_object_internal(&mut self, object: ShapeInstance) -> Uuid {
let id = object.id;
self.shape_instances.push(object);
id
}
/// Remove a shape from this layer (internal, for actions only)
///
/// Returns the removed shape if found.
/// This method is intentionally `pub(crate)` to ensure mutations
/// only happen through the action system.
pub(crate) fn remove_shape_internal(&mut self, id: &Uuid) -> Option<Shape> {
self.shapes.remove(id)
}
/// Remove an object from this layer (internal, for actions only)
///
/// Returns the removed object if found.
/// This method is intentionally `pub(crate)` to ensure mutations
/// only happen through the action system.
pub(crate) fn remove_object_internal(&mut self, id: &Uuid) -> Option<ShapeInstance> {
if let Some(index) = self.shape_instances.iter().position(|o| &o.id == id) {
Some(self.shape_instances.remove(index))
} else {
None
}
}
/// Modify an object in place (internal, for actions only) /// Modify an object in place (internal, for actions only)
/// ///
/// Applies the given function to the object if found. /// Applies the given function to the object if found.

View File

@ -110,6 +110,18 @@ impl<T> LayerTree<T> {
} }
} }
impl<T> LayerTree<T> {
/// Get flat list of references to all root layer data
pub fn root_data(&self) -> Vec<&T> {
self.roots.iter().map(|n| &n.data).collect()
}
/// Get flat list of mutable references to all root layer data
pub fn root_data_mut(&mut self) -> Vec<&mut T> {
self.roots.iter_mut().map(|n| &mut n.data).collect()
}
}
impl<T> Default for LayerTree<T> { impl<T> Default for LayerTree<T> {
fn default() -> Self { fn default() -> Self {
Self::new() Self::new()

View File

@ -373,7 +373,19 @@ pub fn render_document_with_transform(
// 2. Recursively render the root graphics object at current time // 2. Recursively render the root graphics object at current time
let time = document.current_time; let time = document.current_time;
render_graphics_object(document, time, scene, base_transform, image_cache, video_manager, skip_instance_id);
// Check if any layers are soloed
let any_soloed = document.visible_layers().any(|layer| layer.soloed());
for layer in document.visible_layers() {
if any_soloed {
if layer.soloed() {
render_layer(document, time, layer, scene, base_transform, 1.0, image_cache, video_manager, skip_instance_id);
}
} else {
render_layer(document, time, layer, scene, base_transform, 1.0, image_cache, video_manager, skip_instance_id);
}
}
} }
/// Draw the document background /// Draw the document background
@ -392,35 +404,6 @@ fn render_background(document: &Document, scene: &mut Scene, base_transform: Aff
); );
} }
/// Recursively render the root graphics object and its children
fn render_graphics_object(
document: &Document,
time: f64,
scene: &mut Scene,
base_transform: Affine,
image_cache: &mut ImageCache,
video_manager: &std::sync::Arc<std::sync::Mutex<crate::video::VideoManager>>,
skip_instance_id: Option<uuid::Uuid>,
) {
// Check if any layers are soloed
let any_soloed = document.visible_layers().any(|layer| layer.soloed());
// Render layers based on solo state
// If any layer is soloed, only render soloed layers
// Otherwise, render all visible layers
// Start with full opacity (1.0)
for layer in document.visible_layers() {
if any_soloed {
// Only render soloed layers when solo is active
if layer.soloed() {
render_layer(document, time, layer, scene, base_transform, 1.0, image_cache, video_manager, skip_instance_id);
}
} else {
// Render all visible layers when no solo is active
render_layer(document, time, layer, scene, base_transform, 1.0, image_cache, video_manager, skip_instance_id);
}
}
}
/// Render a single layer /// Render a single layer
fn render_layer( fn render_layer(
@ -451,6 +434,42 @@ fn render_layer(
} }
} }
/// Render a single clip instance by ID to a scene.
/// Used for re-rendering the "focused" clip on top of a dimmed scene when editing inside a clip.
pub fn render_single_clip_instance(
document: &Document,
scene: &mut Scene,
base_transform: Affine,
layer_id: &uuid::Uuid,
instance_id: &uuid::Uuid,
image_cache: &mut ImageCache,
video_manager: &std::sync::Arc<std::sync::Mutex<crate::video::VideoManager>>,
) {
let time = document.current_time;
// Find the layer containing this instance
let Some(layer) = document.get_layer(layer_id) else { return };
let AnyLayer::Vector(vector_layer) = layer else { return };
let layer_opacity = vector_layer.layer.opacity;
// Find the specific clip instance
let Some(clip_instance) = vector_layer.clip_instances.iter().find(|ci| &ci.id == instance_id) else { return };
// Compute group_end_time if needed
let group_end_time = document.vector_clips.get(&clip_instance.clip_id)
.filter(|vc| vc.is_group)
.map(|_| {
let frame_duration = 1.0 / document.framerate;
vector_layer.group_visibility_end(&clip_instance.id, clip_instance.timeline_start, frame_duration)
});
render_clip_instance(
document, time, clip_instance, layer_opacity, scene, base_transform,
&vector_layer.layer.animation_data, image_cache, video_manager, group_end_time,
);
}
/// Render a clip instance (recursive rendering for nested compositions) /// Render a clip instance (recursive rendering for nested compositions)
fn render_clip_instance( fn render_clip_instance(
document: &Document, document: &Document,
@ -479,7 +498,8 @@ fn render_clip_instance(
} }
0.0 0.0
} else { } else {
let Some(t) = clip_instance.remap_time(time, vector_clip.duration) else { let clip_dur = vector_clip.content_duration(document.framerate);
let Some(t) = clip_instance.remap_time(time, clip_dur) else {
return; // Clip instance not active at this time return; // Clip instance not active at this time
}; };
t t

View File

@ -616,6 +616,51 @@ enum RecordingArmMode {
Manual, Manual,
} }
/// Entry in the editing context stack — tracks which clip is being edited
#[derive(Clone)]
struct EditingContextEntry {
/// The VectorClip ID being edited
clip_id: Uuid,
/// The ClipInstance ID through which we entered
instance_id: Uuid,
/// The layer ID that contains the instance in the parent context
parent_layer_id: Uuid,
/// Saved playback time from the parent context (restored on exit)
saved_playback_time: f64,
/// Saved active layer ID from the parent context
saved_active_layer_id: Option<Uuid>,
}
/// Editing context stack — tracks which clip (or root) is being edited.
/// Empty stack = editing the document root.
#[derive(Clone, Default)]
struct EditingContext {
stack: Vec<EditingContextEntry>,
}
impl EditingContext {
fn current_clip_id(&self) -> Option<Uuid> {
self.stack.last().map(|e| e.clip_id)
}
fn current_instance_id(&self) -> Option<Uuid> {
self.stack.last().map(|e| e.instance_id)
}
fn current_parent_layer_id(&self) -> Option<Uuid> {
self.stack.last().map(|e| e.parent_layer_id)
}
fn push(&mut self, entry: EditingContextEntry) {
self.stack.push(entry);
}
fn pop(&mut self) -> Option<EditingContextEntry> {
self.stack.pop()
}
}
struct EditorApp { struct EditorApp {
layouts: Vec<LayoutDefinition>, layouts: Vec<LayoutDefinition>,
current_layout_index: usize, current_layout_index: usize,
@ -638,6 +683,7 @@ struct EditorApp {
action_executor: lightningbeam_core::action::ActionExecutor, // Action system for undo/redo action_executor: lightningbeam_core::action::ActionExecutor, // Action system for undo/redo
active_layer_id: Option<Uuid>, // Currently active layer for editing active_layer_id: Option<Uuid>, // Currently active layer for editing
selection: lightningbeam_core::selection::Selection, // Current selection state selection: lightningbeam_core::selection::Selection, // Current selection state
editing_context: EditingContext, // Which clip (or root) we're editing
tool_state: lightningbeam_core::tool::ToolState, // Current tool interaction state tool_state: lightningbeam_core::tool::ToolState, // Current tool interaction state
// Draw tool configuration // Draw tool configuration
draw_simplify_mode: lightningbeam_core::tool::SimplifyMode, // Current simplification mode for draw tool draw_simplify_mode: lightningbeam_core::tool::SimplifyMode, // Current simplification mode for draw tool
@ -874,6 +920,7 @@ impl EditorApp {
action_executor, action_executor,
active_layer_id: Some(layer_id), active_layer_id: Some(layer_id),
selection: lightningbeam_core::selection::Selection::new(), selection: lightningbeam_core::selection::Selection::new(),
editing_context: EditingContext::default(),
tool_state: lightningbeam_core::tool::ToolState::default(), tool_state: lightningbeam_core::tool::ToolState::default(),
draw_simplify_mode: lightningbeam_core::tool::SimplifyMode::Smooth, // Default to smooth curves draw_simplify_mode: lightningbeam_core::tool::SimplifyMode::Smooth, // Default to smooth curves
rdp_tolerance: 10.0, // Default RDP tolerance rdp_tolerance: 10.0, // Default RDP tolerance
@ -1585,7 +1632,6 @@ impl EditorApp {
/// Delete the current selection (for cut and delete operations) /// Delete the current selection (for cut and delete operations)
fn clipboard_delete_selection(&mut self) { fn clipboard_delete_selection(&mut self) {
use lightningbeam_core::layer::AnyLayer;
if !self.selection.clip_instances().is_empty() { if !self.selection.clip_instances().is_empty() {
let active_layer_id = match self.active_layer_id { let active_layer_id = match self.active_layer_id {
@ -2241,6 +2287,28 @@ impl EditorApp {
} }
} }
} }
MenuAction::ConvertToMovieClip => {
if let Some(layer_id) = self.active_layer_id {
let shape_ids: Vec<uuid::Uuid> = self.selection.shape_instances().to_vec();
let clip_ids: Vec<uuid::Uuid> = self.selection.clip_instances().to_vec();
if shape_ids.len() + clip_ids.len() >= 1 {
let instance_id = uuid::Uuid::new_v4();
let action = lightningbeam_core::actions::ConvertToMovieClipAction::new(
layer_id,
self.playback_time,
shape_ids,
clip_ids,
instance_id,
);
if let Err(e) = self.action_executor.execute(Box::new(action)) {
eprintln!("Failed to convert to movie clip: {}", e);
} else {
self.selection.clear();
self.selection.add_clip_instance(instance_id);
}
}
}
}
MenuAction::SendToBack => { MenuAction::SendToBack => {
println!("Menu: Send to Back"); println!("Menu: Send to Back");
// TODO: Implement send to back // TODO: Implement send to back
@ -4429,6 +4497,10 @@ impl eframe::App for EditorApp {
// Menu actions queued by pane context menus // Menu actions queued by pane context menus
let mut pending_menu_actions: Vec<MenuAction> = Vec::new(); let mut pending_menu_actions: Vec<MenuAction> = Vec::new();
// Editing context navigation requests from stage pane
let mut pending_enter_clip: Option<(Uuid, Uuid, Uuid)> = None;
let mut pending_exit_clip = false;
// Queue for effect thumbnail requests (collected during rendering) // Queue for effect thumbnail requests (collected during rendering)
let mut effect_thumbnail_requests: Vec<Uuid> = Vec::new(); let mut effect_thumbnail_requests: Vec<Uuid> = Vec::new();
// Empty cache fallback if generator not initialized // Empty cache fallback if generator not initialized
@ -4468,6 +4540,11 @@ impl eframe::App for EditorApp {
theme: &self.theme, theme: &self.theme,
action_executor: &mut self.action_executor, action_executor: &mut self.action_executor,
selection: &mut self.selection, selection: &mut self.selection,
editing_clip_id: self.editing_context.current_clip_id(),
editing_instance_id: self.editing_context.current_instance_id(),
editing_parent_layer_id: self.editing_context.current_parent_layer_id(),
pending_enter_clip: &mut pending_enter_clip,
pending_exit_clip: &mut pending_exit_clip,
active_layer_id: &mut self.active_layer_id, active_layer_id: &mut self.active_layer_id,
tool_state: &mut self.tool_state, tool_state: &mut self.tool_state,
pending_actions: &mut pending_actions, pending_actions: &mut pending_actions,
@ -4576,6 +4653,34 @@ impl eframe::App for EditorApp {
self.handle_menu_action(action); self.handle_menu_action(action);
} }
// Process editing context navigation (enter/exit movie clips)
if let Some((clip_id, instance_id, parent_layer_id)) = pending_enter_clip {
let entry = EditingContextEntry {
clip_id,
instance_id,
parent_layer_id,
saved_playback_time: self.playback_time,
saved_active_layer_id: self.active_layer_id,
};
self.editing_context.push(entry);
self.selection.clear();
// Set active layer to the clip's first layer
let first_layer_id = self.action_executor.document()
.get_vector_clip(&clip_id)
.and_then(|clip| clip.layers.roots.first())
.map(|node| node.data.id());
self.active_layer_id = first_layer_id;
// Reset playback time to 0 when entering a clip
self.playback_time = 0.0;
}
if pending_exit_clip {
if let Some(entry) = self.editing_context.pop() {
self.selection.clear();
self.active_layer_id = entry.saved_active_layer_id;
self.playback_time = entry.saved_playback_time;
}
}
// Set cursor based on hover state // Set cursor based on hover state
if let Some((_, is_horizontal)) = self.hovered_divider { if let Some((_, is_horizontal)) = self.hovered_divider {
if is_horizontal { if is_horizontal {
@ -4735,6 +4840,11 @@ struct RenderContext<'a> {
theme: &'a Theme, theme: &'a Theme,
action_executor: &'a mut lightningbeam_core::action::ActionExecutor, action_executor: &'a mut lightningbeam_core::action::ActionExecutor,
selection: &'a mut lightningbeam_core::selection::Selection, selection: &'a mut lightningbeam_core::selection::Selection,
editing_clip_id: Option<Uuid>,
editing_instance_id: Option<Uuid>,
editing_parent_layer_id: Option<Uuid>,
pending_enter_clip: &'a mut Option<(Uuid, Uuid, Uuid)>,
pending_exit_clip: &'a mut bool,
active_layer_id: &'a mut Option<Uuid>, active_layer_id: &'a mut Option<Uuid>,
tool_state: &'a mut lightningbeam_core::tool::ToolState, tool_state: &'a mut lightningbeam_core::tool::ToolState,
pending_actions: &'a mut Vec<Box<dyn lightningbeam_core::action::Action>>, pending_actions: &'a mut Vec<Box<dyn lightningbeam_core::action::Action>>,
@ -5272,6 +5382,11 @@ fn render_pane(
project_generation: ctx.project_generation, project_generation: ctx.project_generation,
script_to_edit: ctx.script_to_edit, script_to_edit: ctx.script_to_edit,
script_saved: ctx.script_saved, script_saved: ctx.script_saved,
editing_clip_id: ctx.editing_clip_id,
editing_instance_id: ctx.editing_instance_id,
editing_parent_layer_id: ctx.editing_parent_layer_id,
pending_enter_clip: ctx.pending_enter_clip,
pending_exit_clip: ctx.pending_exit_clip,
}; };
pane_instance.render_header(&mut header_ui, &mut shared); pane_instance.render_header(&mut header_ui, &mut shared);
} }
@ -5345,6 +5460,11 @@ fn render_pane(
project_generation: ctx.project_generation, project_generation: ctx.project_generation,
script_to_edit: ctx.script_to_edit, script_to_edit: ctx.script_to_edit,
script_saved: ctx.script_saved, script_saved: ctx.script_saved,
editing_clip_id: ctx.editing_clip_id,
editing_instance_id: ctx.editing_instance_id,
editing_parent_layer_id: ctx.editing_parent_layer_id,
pending_enter_clip: ctx.pending_enter_clip,
pending_exit_clip: ctx.pending_exit_clip,
}; };
// Render pane content (header was already rendered above) // Render pane content (header was already rendered above)

View File

@ -163,6 +163,7 @@ pub enum MenuAction {
// Modify menu // Modify menu
Group, Group,
ConvertToMovieClip,
SendToBack, SendToBack,
BringToFront, BringToFront,
SplitClip, SplitClip,
@ -259,6 +260,7 @@ impl MenuItemDef {
// Modify menu items // Modify menu items
const GROUP: Self = Self { label: "Group", action: MenuAction::Group, shortcut: Some(Shortcut::new(ShortcutKey::G, CTRL, NO_SHIFT, NO_ALT)) }; const GROUP: Self = Self { label: "Group", action: MenuAction::Group, shortcut: Some(Shortcut::new(ShortcutKey::G, CTRL, NO_SHIFT, NO_ALT)) };
const CONVERT_TO_MOVIE_CLIP: Self = Self { label: "Convert to Movie Clip", action: MenuAction::ConvertToMovieClip, shortcut: None };
const SEND_TO_BACK: Self = Self { label: "Send to back", action: MenuAction::SendToBack, shortcut: None }; const SEND_TO_BACK: Self = Self { label: "Send to back", action: MenuAction::SendToBack, shortcut: None };
const BRING_TO_FRONT: Self = Self { label: "Bring to front", action: MenuAction::BringToFront, shortcut: None }; const BRING_TO_FRONT: Self = Self { label: "Bring to front", action: MenuAction::BringToFront, shortcut: None };
const SPLIT_CLIP: Self = Self { label: "Split Clip", action: MenuAction::SplitClip, shortcut: Some(Shortcut::new(ShortcutKey::K, CTRL, NO_SHIFT, NO_ALT)) }; const SPLIT_CLIP: Self = Self { label: "Split Clip", action: MenuAction::SplitClip, shortcut: Some(Shortcut::new(ShortcutKey::K, CTRL, NO_SHIFT, NO_ALT)) };
@ -369,6 +371,7 @@ impl MenuItemDef {
label: "Modify", label: "Modify",
children: &[ children: &[
MenuDef::Item(&Self::GROUP), MenuDef::Item(&Self::GROUP),
MenuDef::Item(&Self::CONVERT_TO_MOVIE_CLIP),
MenuDef::Separator, MenuDef::Separator,
MenuDef::Item(&Self::SEND_TO_BACK), MenuDef::Item(&Self::SEND_TO_BACK),
MenuDef::Item(&Self::BRING_TO_FRONT), MenuDef::Item(&Self::BRING_TO_FRONT),

View File

@ -154,6 +154,16 @@ pub struct SharedPaneState<'a> {
pub action_executor: &'a mut lightningbeam_core::action::ActionExecutor, pub action_executor: &'a mut lightningbeam_core::action::ActionExecutor,
/// Current selection state (mutable for tools to modify) /// Current selection state (mutable for tools to modify)
pub selection: &'a mut lightningbeam_core::selection::Selection, pub selection: &'a mut lightningbeam_core::selection::Selection,
/// Which VectorClip is being edited (None = document root)
pub editing_clip_id: Option<uuid::Uuid>,
/// The clip instance ID being edited
pub editing_instance_id: Option<uuid::Uuid>,
/// The parent layer ID containing the clip instance being edited
pub editing_parent_layer_id: Option<uuid::Uuid>,
/// Request to enter a movie clip for editing: (clip_id, instance_id, parent_layer_id)
pub pending_enter_clip: &'a mut Option<(uuid::Uuid, uuid::Uuid, uuid::Uuid)>,
/// Request to exit the current movie clip
pub pending_exit_clip: &'a mut bool,
/// Currently active layer ID /// Currently active layer ID
pub active_layer_id: &'a mut Option<uuid::Uuid>, pub active_layer_id: &'a mut Option<uuid::Uuid>,
/// Current tool interaction state (mutable for tools to modify) /// Current tool interaction state (mutable for tools to modify)

View File

@ -797,7 +797,7 @@ impl NodeGraphPane {
if let Some(path) = rfd::FileDialog::new().pick_folder() { if let Some(path) = rfd::FileDialog::new().pick_folder() {
match crate::sample_import::scan_folder(&path) { match crate::sample_import::scan_folder(&path) {
Ok(samples) => { Ok(samples) => {
let scan_result = crate::sample_import::build_import_layers(samples, &path); let scan_result = crate::sample_import::build_import_layers(samples);
let track_id = backend_track_id; let track_id = backend_track_id;
let dialog = crate::sample_import_dialog::SampleImportDialog::new( let dialog = crate::sample_import_dialog::SampleImportDialog::new(
path, scan_result, track_id, backend_node_id, node_id, path, scan_result, track_id, backend_node_id, node_id,

View File

@ -380,6 +380,12 @@ struct VelloRenderContext {
shape_editing_cache: Option<ShapeEditingCache>, shape_editing_cache: Option<ShapeEditingCache>,
/// Surface format for blit pipelines /// Surface format for blit pipelines
target_format: wgpu::TextureFormat, target_format: wgpu::TextureFormat,
/// Which VectorClip is being edited (None = document root)
editing_clip_id: Option<uuid::Uuid>,
/// The clip instance ID being edited (for skip + re-render)
editing_instance_id: Option<uuid::Uuid>,
/// The parent layer ID containing the clip instance being edited
editing_parent_layer_id: Option<uuid::Uuid>,
} }
/// Callback for Vello rendering within egui /// Callback for Vello rendering within egui
@ -436,6 +442,23 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
let camera_transform = Affine::translate((self.ctx.pan_offset.x as f64, self.ctx.pan_offset.y as f64)) let camera_transform = Affine::translate((self.ctx.pan_offset.x as f64, self.ctx.pan_offset.y as f64))
* Affine::scale(self.ctx.zoom as f64); * Affine::scale(self.ctx.zoom as f64);
// Overlay transform: camera + clip instance transform (for rendering overlays in clip-local space)
let overlay_transform = if let (Some(parent_layer_id), Some(instance_id)) = (self.ctx.editing_parent_layer_id, self.ctx.editing_instance_id) {
let clip_affine = self.ctx.document.get_layer(&parent_layer_id)
.and_then(|layer| {
if let lightningbeam_core::layer::AnyLayer::Vector(vl) = layer {
vl.clip_instances.iter().find(|ci| ci.id == instance_id)
} else {
None
}
})
.map(|ci| ci.transform.to_affine())
.unwrap_or(Affine::IDENTITY);
camera_transform * clip_affine
} else {
camera_transform
};
// Choose rendering path based on HDR compositing flag // Choose rendering path based on HDR compositing flag
let mut scene = if USE_HDR_COMPOSITING { let mut scene = if USE_HDR_COMPOSITING {
// HDR Compositing Pipeline: render each layer separately for proper opacity // HDR Compositing Pipeline: render each layer separately for proper opacity
@ -448,12 +471,19 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Skip rendering the shape instance being edited (for vector editing preview) // Skip rendering the shape instance being edited (for vector editing preview)
let skip_instance_id = self.ctx.shape_editing_cache.as_ref().map(|cache| cache.instance_id); let skip_instance_id = self.ctx.shape_editing_cache.as_ref().map(|cache| cache.instance_id);
// When editing inside a clip, skip the clip instance in the main pass
// (it will be re-rendered on top after the dim overlay)
let editing_skip_id = self.ctx.editing_clip_id.as_ref().and_then(|_| {
self.ctx.editing_instance_id
});
let effective_skip = skip_instance_id.or(editing_skip_id);
let composite_result = lightningbeam_core::renderer::render_document_for_compositing( let composite_result = lightningbeam_core::renderer::render_document_for_compositing(
&self.ctx.document, &self.ctx.document,
camera_transform, camera_transform,
&mut image_cache, &mut image_cache,
&shared.video_manager, &shared.video_manager,
skip_instance_id, effective_skip,
); );
drop(image_cache); drop(image_cache);
@ -677,6 +707,89 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
drop(effect_processor); drop(effect_processor);
// When editing inside a clip: dim overlay + re-render the clip at full opacity
if let (Some(parent_layer_id), Some(instance_id)) = (self.ctx.editing_parent_layer_id, self.ctx.editing_instance_id) {
// 1. Render dim overlay scene
let mut dim_scene = vello::Scene::new();
let doc_rect = vello::kurbo::Rect::new(0.0, 0.0, self.ctx.document.width, self.ctx.document.height);
dim_scene.fill(
vello::peniko::Fill::NonZero,
camera_transform,
vello::peniko::Color::new([0.0, 0.0, 0.0, 0.5]),
None,
&doc_rect,
);
// Composite dim overlay onto HDR texture
let dim_srgb_handle = buffer_pool.acquire(device, lightningbeam_core::gpu::BufferSpec::new(width, height, lightningbeam_core::gpu::BufferFormat::Rgba8Srgb));
let dim_hdr_handle = buffer_pool.acquire(device, lightningbeam_core::gpu::BufferSpec::new(width, height, BufferFormat::Rgba16Float));
if let (Some(dim_srgb_view), Some(dim_hdr_view), Some(hdr_view)) = (
buffer_pool.get_view(dim_srgb_handle),
buffer_pool.get_view(dim_hdr_handle),
&instance_resources.hdr_texture_view,
) {
let dim_params = vello::RenderParams {
base_color: vello::peniko::Color::TRANSPARENT,
width, height,
antialiasing_method: vello::AaConfig::Msaa16,
};
if let Ok(mut renderer) = shared.renderer.lock() {
renderer.render_to_texture(device, queue, &dim_scene, dim_srgb_view, &dim_params).ok();
}
let mut enc = device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: Some("dim_srgb_to_linear") });
shared.srgb_to_linear.convert(device, &mut enc, dim_srgb_view, dim_hdr_view);
queue.submit(Some(enc.finish()));
let dim_layer = lightningbeam_core::gpu::CompositorLayer::normal(dim_hdr_handle, 1.0);
let mut enc = device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: Some("dim_composite") });
shared.compositor.composite(device, queue, &mut enc, &[dim_layer], &buffer_pool, hdr_view, None);
queue.submit(Some(enc.finish()));
}
buffer_pool.release(dim_srgb_handle);
buffer_pool.release(dim_hdr_handle);
// 2. Re-render the clip instance at full opacity
let mut clip_scene = vello::Scene::new();
let mut image_cache = shared.image_cache.lock().unwrap();
lightningbeam_core::renderer::render_single_clip_instance(
&self.ctx.document,
&mut clip_scene,
camera_transform,
&parent_layer_id,
&instance_id,
&mut image_cache,
&shared.video_manager,
);
drop(image_cache);
let clip_srgb_handle = buffer_pool.acquire(device, lightningbeam_core::gpu::BufferSpec::new(width, height, lightningbeam_core::gpu::BufferFormat::Rgba8Srgb));
let clip_hdr_handle = buffer_pool.acquire(device, lightningbeam_core::gpu::BufferSpec::new(width, height, BufferFormat::Rgba16Float));
if let (Some(clip_srgb_view), Some(clip_hdr_view), Some(hdr_view)) = (
buffer_pool.get_view(clip_srgb_handle),
buffer_pool.get_view(clip_hdr_handle),
&instance_resources.hdr_texture_view,
) {
let clip_params = vello::RenderParams {
base_color: vello::peniko::Color::TRANSPARENT,
width, height,
antialiasing_method: vello::AaConfig::Msaa16,
};
if let Ok(mut renderer) = shared.renderer.lock() {
renderer.render_to_texture(device, queue, &clip_scene, clip_srgb_view, &clip_params).ok();
}
let mut enc = device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: Some("clip_srgb_to_linear") });
shared.srgb_to_linear.convert(device, &mut enc, clip_srgb_view, clip_hdr_view);
queue.submit(Some(enc.finish()));
let clip_layer = lightningbeam_core::gpu::CompositorLayer::normal(clip_hdr_handle, 1.0);
let mut enc = device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: Some("clip_composite") });
shared.compositor.composite(device, queue, &mut enc, &[clip_layer], &buffer_pool, hdr_view, None);
queue.submit(Some(enc.finish()));
}
buffer_pool.release(clip_srgb_handle);
buffer_pool.release(clip_hdr_handle);
}
// Advance frame counter for buffer cleanup // Advance frame counter for buffer cleanup
buffer_pool.next_frame(); buffer_pool.next_frame();
drop(buffer_pool); drop(buffer_pool);
@ -692,14 +805,43 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Skip rendering the shape instance being edited (for vector editing preview) // Skip rendering the shape instance being edited (for vector editing preview)
let skip_instance_id = self.ctx.shape_editing_cache.as_ref().map(|cache| cache.instance_id); let skip_instance_id = self.ctx.shape_editing_cache.as_ref().map(|cache| cache.instance_id);
let editing_skip_id = self.ctx.editing_clip_id.as_ref().and_then(|_| {
self.ctx.editing_instance_id
});
let effective_skip = skip_instance_id.or(editing_skip_id);
lightningbeam_core::renderer::render_document_with_transform( lightningbeam_core::renderer::render_document_with_transform(
&self.ctx.document, &self.ctx.document,
&mut scene, &mut scene,
camera_transform, camera_transform,
&mut image_cache, &mut image_cache,
&shared.video_manager, &shared.video_manager,
skip_instance_id, effective_skip,
); );
// When editing inside a clip: dim overlay + re-render the clip at full opacity
if let (Some(parent_layer_id), Some(instance_id)) = (self.ctx.editing_parent_layer_id, self.ctx.editing_instance_id) {
// Semi-transparent dim overlay
let doc_rect = vello::kurbo::Rect::new(0.0, 0.0, self.ctx.document.width, self.ctx.document.height);
scene.fill(
vello::peniko::Fill::NonZero,
camera_transform,
vello::peniko::Color::new([0.0, 0.0, 0.0, 0.5]),
None,
&doc_rect,
);
// Re-render the clip instance on top
lightningbeam_core::renderer::render_single_clip_instance(
&self.ctx.document,
&mut scene,
camera_transform,
&parent_layer_id,
&instance_id,
&mut image_cache,
&shared.video_manager,
);
}
drop(image_cache); drop(image_cache);
scene scene
}; };
@ -751,7 +893,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
* Affine::rotate(shape.transform.rotation.to_radians()) * Affine::rotate(shape.transform.rotation.to_radians())
* Affine::scale_non_uniform(shape.transform.scale_x, shape.transform.scale_y) * Affine::scale_non_uniform(shape.transform.scale_x, shape.transform.scale_y)
* skew_transform; * skew_transform;
let combined_transform = camera_transform * object_transform; let combined_transform = overlay_transform * object_transform;
// Render shape with semi-transparent fill (light blue, 40% opacity) // Render shape with semi-transparent fill (light blue, 40% opacity)
let alpha_color = Color::from_rgba8(100, 150, 255, 100); let alpha_color = Color::from_rgba8(100, 150, 255, 100);
@ -772,7 +914,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
use vello::kurbo::Stroke; use vello::kurbo::Stroke;
let clip_transform = Affine::translate((new_x, new_y)); let clip_transform = Affine::translate((new_x, new_y));
let combined_transform = camera_transform * clip_transform; let combined_transform = overlay_transform * clip_transform;
// Calculate clip bounds for preview // Calculate clip bounds for preview
let clip_time = ((self.ctx.playback_time - clip_inst.timeline_start) * clip_inst.playback_speed) + clip_inst.trim_start; let clip_time = ((self.ctx.playback_time - clip_inst.timeline_start) * clip_inst.playback_speed) + clip_inst.trim_start;
@ -822,7 +964,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Apply object transform and camera transform // Apply object transform and camera transform
let object_transform = Affine::translate((shape.transform.x, shape.transform.y)); let object_transform = Affine::translate((shape.transform.x, shape.transform.y));
let combined_transform = camera_transform * object_transform; let combined_transform = overlay_transform * object_transform;
// Create selection rectangle // Create selection rectangle
let selection_rect = KurboRect::new(bbox.x0, bbox.y0, bbox.x1, bbox.y1); let selection_rect = KurboRect::new(bbox.x0, bbox.y0, bbox.x1, bbox.y1);
@ -868,9 +1010,15 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
} }
// Also draw selection outlines for clip instances // Also draw selection outlines for clip instances
let _clip_instance_count = self.ctx.selection.clip_instances().len();
for &clip_id in self.ctx.selection.clip_instances() { for &clip_id in self.ctx.selection.clip_instances() {
if let Some(clip_instance) = vector_layer.clip_instances.iter().find(|ci| ci.id == clip_id) { if let Some(clip_instance) = vector_layer.clip_instances.iter().find(|ci| ci.id == clip_id) {
// Skip clip instances not active at current time
let clip_dur = self.ctx.document.get_clip_duration(&clip_instance.clip_id).unwrap_or(0.0);
let instance_end = clip_instance.timeline_start + clip_instance.effective_duration(clip_dur);
if self.ctx.playback_time < clip_instance.timeline_start || self.ctx.playback_time >= instance_end {
continue;
}
// Calculate clip-local time // Calculate clip-local time
let clip_time = ((self.ctx.playback_time - clip_instance.timeline_start) * clip_instance.playback_speed) + clip_instance.trim_start; let clip_time = ((self.ctx.playback_time - clip_instance.timeline_start) * clip_instance.playback_speed) + clip_instance.trim_start;
@ -886,7 +1034,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Apply clip instance transform and camera transform // Apply clip instance transform and camera transform
let clip_transform = clip_instance.transform.to_affine(); let clip_transform = clip_instance.transform.to_affine();
let combined_transform = camera_transform * clip_transform; let combined_transform = overlay_transform * clip_transform;
// Draw selection outline with different color for clip instances // Draw selection outline with different color for clip instances
let clip_selection_color = Color::from_rgb8(255, 120, 0); // Orange let clip_selection_color = Color::from_rgb8(255, 120, 0); // Orange
@ -943,7 +1091,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
let marquee_fill = Color::from_rgba8(0, 120, 255, 100); let marquee_fill = Color::from_rgba8(0, 120, 255, 100);
scene.fill( scene.fill(
Fill::NonZero, Fill::NonZero,
camera_transform, overlay_transform,
marquee_fill, marquee_fill,
None, None,
&marquee_rect, &marquee_rect,
@ -952,7 +1100,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Border stroke // Border stroke
scene.stroke( scene.stroke(
&Stroke::new(1.0), &Stroke::new(1.0),
camera_transform, overlay_transform,
selection_color, selection_color,
None, None,
&marquee_rect, &marquee_rect,
@ -1006,7 +1154,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
if width > 0.0 && height > 0.0 { if width > 0.0 && height > 0.0 {
let rect = KurboRect::new(0.0, 0.0, width, height); let rect = KurboRect::new(0.0, 0.0, width, height);
let preview_transform = camera_transform * Affine::translate((position.x, position.y)); let preview_transform = overlay_transform * Affine::translate((position.x, position.y));
if self.ctx.fill_enabled { if self.ctx.fill_enabled {
let fill_color = Color::from_rgba8( let fill_color = Color::from_rgba8(
@ -1079,7 +1227,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
}; };
if rx > 0.0 && ry > 0.0 { if rx > 0.0 && ry > 0.0 {
let preview_transform = camera_transform * Affine::translate((position.x, position.y)); let preview_transform = overlay_transform * Affine::translate((position.x, position.y));
let fill_color = Color::from_rgba8( let fill_color = Color::from_rgba8(
self.ctx.fill_color.r(), self.ctx.fill_color.r(),
@ -1132,7 +1280,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
let line = Line::new(*start_point, *current_point); let line = Line::new(*start_point, *current_point);
scene.stroke( scene.stroke(
&Stroke::new(2.0), &Stroke::new(2.0),
camera_transform, overlay_transform,
stroke_color, stroke_color,
None, None,
&line, &line,
@ -1151,7 +1299,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
let radius = (dx * dx + dy * dy).sqrt(); let radius = (dx * dx + dy * dy).sqrt();
if radius > 5.0 && num_sides >= 3 { if radius > 5.0 && num_sides >= 3 {
let preview_transform = camera_transform * Affine::translate((center.x, center.y)); let preview_transform = overlay_transform * Affine::translate((center.x, center.y));
// Use actual fill color (same as final shape) // Use actual fill color (same as final shape)
let fill_color = Color::from_rgba8( let fill_color = Color::from_rgba8(
@ -1229,7 +1377,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
); );
scene.fill( scene.fill(
Fill::NonZero, Fill::NonZero,
camera_transform, overlay_transform,
fill_color, fill_color,
None, None,
&preview_path, &preview_path,
@ -1245,7 +1393,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
scene.stroke( scene.stroke(
&Stroke::new(self.ctx.stroke_width), &Stroke::new(self.ctx.stroke_width),
camera_transform, overlay_transform,
stroke_color, stroke_color,
None, None,
&preview_path, &preview_path,
@ -1261,10 +1409,10 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
let preview_path = rebuild_bezpath(&cache.editable_data); let preview_path = rebuild_bezpath(&cache.editable_data);
// Get the layer first, then the shape from the layer // Get the layer first, then the shape from the layer
if let Some(layer) = (*self.ctx.document).root.get_child(&cache.layer_id) { if let Some(layer) = (*self.ctx.document).get_layer(&cache.layer_id) {
if let lightningbeam_core::layer::AnyLayer::Vector(vector_layer) = layer { if let lightningbeam_core::layer::AnyLayer::Vector(vector_layer) = layer {
if let Some(shape) = vector_layer.get_shape_in_keyframe(&cache.shape_id, self.ctx.playback_time) { if let Some(shape) = vector_layer.get_shape_in_keyframe(&cache.shape_id, self.ctx.playback_time) {
let transform = camera_transform * cache.local_to_world; let transform = overlay_transform * cache.local_to_world;
// Render fill with FULL OPACITY (same as original) // Render fill with FULL OPACITY (same as original)
if let Some(fill_color) = &shape.fill_color { if let Some(fill_color) = &shape.fill_color {
@ -1389,7 +1537,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
scene.stroke( scene.stroke(
&Stroke::new(stroke_width), &Stroke::new(stroke_width),
camera_transform, overlay_transform,
handle_color, handle_color,
None, None,
&bbox_path, &bbox_path,
@ -1407,7 +1555,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Fill // Fill
scene.fill( scene.fill(
Fill::NonZero, Fill::NonZero,
camera_transform, overlay_transform,
handle_color, handle_color,
None, None,
&handle_rect, &handle_rect,
@ -1416,7 +1564,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// White outline // White outline
scene.stroke( scene.stroke(
&Stroke::new(1.0), &Stroke::new(1.0),
camera_transform, overlay_transform,
Color::from_rgb8(255, 255, 255), Color::from_rgb8(255, 255, 255),
None, None,
&handle_rect, &handle_rect,
@ -1437,7 +1585,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Fill // Fill
scene.fill( scene.fill(
Fill::NonZero, Fill::NonZero,
camera_transform, overlay_transform,
handle_color, handle_color,
None, None,
&edge_circle, &edge_circle,
@ -1446,7 +1594,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// White outline // White outline
scene.stroke( scene.stroke(
&Stroke::new(1.0), &Stroke::new(1.0),
camera_transform, overlay_transform,
Color::from_rgb8(255, 255, 255), Color::from_rgb8(255, 255, 255),
None, None,
&edge_circle, &edge_circle,
@ -1471,7 +1619,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Fill with different color (green) // Fill with different color (green)
scene.fill( scene.fill(
Fill::NonZero, Fill::NonZero,
camera_transform, overlay_transform,
Color::from_rgb8(50, 200, 50), Color::from_rgb8(50, 200, 50),
None, None,
&rotation_circle, &rotation_circle,
@ -1480,7 +1628,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// White outline // White outline
scene.stroke( scene.stroke(
&Stroke::new(1.0), &Stroke::new(1.0),
camera_transform, overlay_transform,
Color::from_rgb8(255, 255, 255), Color::from_rgb8(255, 255, 255),
None, None,
&rotation_circle, &rotation_circle,
@ -1496,7 +1644,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
scene.stroke( scene.stroke(
&Stroke::new(1.0), &Stroke::new(1.0),
camera_transform, overlay_transform,
Color::from_rgb8(50, 200, 50), Color::from_rgb8(50, 200, 50),
None, None,
&line_path, &line_path,
@ -1526,7 +1674,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
let handle_color = Color::from_rgb8(0, 120, 255); let handle_color = Color::from_rgb8(0, 120, 255);
let rotation_handle_offset = 20.0 / self.ctx.zoom.max(0.5) as f64; let rotation_handle_offset = 20.0 / self.ctx.zoom.max(0.5) as f64;
scene.stroke(&Stroke::new(stroke_width), camera_transform, handle_color, None, &bbox); scene.stroke(&Stroke::new(stroke_width), overlay_transform, handle_color, None, &bbox);
let corners = [ let corners = [
vello::kurbo::Point::new(bbox.x0, bbox.y0), vello::kurbo::Point::new(bbox.x0, bbox.y0),
@ -1540,8 +1688,8 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
corner.x - handle_size / 2.0, corner.y - handle_size / 2.0, corner.x - handle_size / 2.0, corner.y - handle_size / 2.0,
corner.x + handle_size / 2.0, corner.y + handle_size / 2.0, corner.x + handle_size / 2.0, corner.y + handle_size / 2.0,
); );
scene.fill(Fill::NonZero, camera_transform, handle_color, None, &handle_rect); scene.fill(Fill::NonZero, overlay_transform, handle_color, None, &handle_rect);
scene.stroke(&Stroke::new(1.0), camera_transform, Color::from_rgb8(255, 255, 255), None, &handle_rect); scene.stroke(&Stroke::new(1.0), overlay_transform, Color::from_rgb8(255, 255, 255), None, &handle_rect);
} }
let edges = [ let edges = [
@ -1553,14 +1701,14 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
for edge in &edges { for edge in &edges {
let edge_circle = Circle::new(*edge, handle_size / 2.0); let edge_circle = Circle::new(*edge, handle_size / 2.0);
scene.fill(Fill::NonZero, camera_transform, handle_color, None, &edge_circle); scene.fill(Fill::NonZero, overlay_transform, handle_color, None, &edge_circle);
scene.stroke(&Stroke::new(1.0), camera_transform, Color::from_rgb8(255, 255, 255), None, &edge_circle); scene.stroke(&Stroke::new(1.0), overlay_transform, Color::from_rgb8(255, 255, 255), None, &edge_circle);
} }
let rotation_handle_pos = vello::kurbo::Point::new(bbox.center().x, bbox.y0 - rotation_handle_offset); let rotation_handle_pos = vello::kurbo::Point::new(bbox.center().x, bbox.y0 - rotation_handle_offset);
let rotation_circle = Circle::new(rotation_handle_pos, handle_size / 2.0); let rotation_circle = Circle::new(rotation_handle_pos, handle_size / 2.0);
scene.fill(Fill::NonZero, camera_transform, Color::from_rgb8(50, 200, 50), None, &rotation_circle); scene.fill(Fill::NonZero, overlay_transform, Color::from_rgb8(50, 200, 50), None, &rotation_circle);
scene.stroke(&Stroke::new(1.0), camera_transform, Color::from_rgb8(255, 255, 255), None, &rotation_circle); scene.stroke(&Stroke::new(1.0), overlay_transform, Color::from_rgb8(255, 255, 255), None, &rotation_circle);
let line_path = { let line_path = {
let mut path = vello::kurbo::BezPath::new(); let mut path = vello::kurbo::BezPath::new();
@ -1568,7 +1716,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
path.line_to(vello::kurbo::Point::new(bbox.center().x, bbox.y0)); path.line_to(vello::kurbo::Point::new(bbox.center().x, bbox.y0));
path path
}; };
scene.stroke(&Stroke::new(1.0), camera_transform, Color::from_rgb8(50, 200, 50), None, &line_path); scene.stroke(&Stroke::new(1.0), overlay_transform, Color::from_rgb8(50, 200, 50), None, &line_path);
} }
} }
} }
@ -1660,7 +1808,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
scene.stroke( scene.stroke(
&Stroke::new(stroke_width), &Stroke::new(stroke_width),
camera_transform, overlay_transform,
handle_color, handle_color,
None, None,
&bbox_path, &bbox_path,
@ -1678,7 +1826,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Fill // Fill
scene.fill( scene.fill(
Fill::NonZero, Fill::NonZero,
camera_transform, overlay_transform,
handle_color, handle_color,
None, None,
&handle_rect, &handle_rect,
@ -1687,7 +1835,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// White outline // White outline
scene.stroke( scene.stroke(
&Stroke::new(1.0), &Stroke::new(1.0),
camera_transform, overlay_transform,
Color::from_rgb8(255, 255, 255), Color::from_rgb8(255, 255, 255),
None, None,
&handle_rect, &handle_rect,
@ -1708,7 +1856,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Fill // Fill
scene.fill( scene.fill(
Fill::NonZero, Fill::NonZero,
camera_transform, overlay_transform,
handle_color, handle_color,
None, None,
&edge_circle, &edge_circle,
@ -1717,7 +1865,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// White outline // White outline
scene.stroke( scene.stroke(
&Stroke::new(1.0), &Stroke::new(1.0),
camera_transform, overlay_transform,
Color::from_rgb8(255, 255, 255), Color::from_rgb8(255, 255, 255),
None, None,
&edge_circle, &edge_circle,
@ -1740,7 +1888,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Fill with different color (green) // Fill with different color (green)
scene.fill( scene.fill(
Fill::NonZero, Fill::NonZero,
camera_transform, overlay_transform,
Color::from_rgb8(50, 200, 50), Color::from_rgb8(50, 200, 50),
None, None,
&rotation_circle, &rotation_circle,
@ -1749,7 +1897,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// White outline // White outline
scene.stroke( scene.stroke(
&Stroke::new(1.0), &Stroke::new(1.0),
camera_transform, overlay_transform,
Color::from_rgb8(255, 255, 255), Color::from_rgb8(255, 255, 255),
None, None,
&rotation_circle, &rotation_circle,
@ -1765,7 +1913,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
scene.stroke( scene.stroke(
&Stroke::new(1.0), &Stroke::new(1.0),
camera_transform, overlay_transform,
Color::from_rgb8(50, 200, 50), Color::from_rgb8(50, 200, 50),
None, None,
&line_path, &line_path,
@ -2075,6 +2223,50 @@ impl StagePane {
} }
} }
/// Convert a document-space position to clip-local coordinates when editing inside a clip.
/// Returns the position unchanged when at root level.
fn doc_to_clip_local(&self, doc_pos: egui::Vec2, shared: &SharedPaneState) -> egui::Vec2 {
if let (Some(parent_layer_id), Some(instance_id)) = (shared.editing_parent_layer_id, shared.editing_instance_id) {
let document = shared.action_executor.document();
let clip_affine = document.get_layer(&parent_layer_id)
.and_then(|layer| {
if let lightningbeam_core::layer::AnyLayer::Vector(vl) = layer {
vl.clip_instances.iter().find(|ci| ci.id == instance_id)
} else {
None
}
})
.map(|ci| ci.transform.to_affine())
.unwrap_or(vello::kurbo::Affine::IDENTITY);
let inv = clip_affine.inverse();
let p = inv * vello::kurbo::Point::new(doc_pos.x as f64, doc_pos.y as f64);
egui::vec2(p.x as f32, p.y as f32)
} else {
doc_pos
}
}
/// Convert a clip-local position back to document-space coordinates.
/// Returns the position unchanged when at root level.
fn clip_local_to_doc(&self, local_pos: vello::kurbo::Point, shared: &SharedPaneState) -> vello::kurbo::Point {
if let (Some(parent_layer_id), Some(instance_id)) = (shared.editing_parent_layer_id, shared.editing_instance_id) {
let document = shared.action_executor.document();
let clip_affine = document.get_layer(&parent_layer_id)
.and_then(|layer| {
if let lightningbeam_core::layer::AnyLayer::Vector(vl) = layer {
vl.clip_instances.iter().find(|ci| ci.id == instance_id)
} else {
None
}
})
.map(|ci| ci.transform.to_affine())
.unwrap_or(vello::kurbo::Affine::IDENTITY);
clip_affine * local_pos
} else {
local_pos
}
}
/// Execute a view action with the given parameters /// Execute a view action with the given parameters
/// Called from main.rs after determining this is the best handler /// Called from main.rs after determining this is the best handler
pub fn execute_view_action(&mut self, action: &crate::menu::MenuAction, zoom_center: egui::Vec2) { pub fn execute_view_action(&mut self, action: &crate::menu::MenuAction, zoom_center: egui::Vec2) {
@ -2185,6 +2377,41 @@ impl StagePane {
let point = Point::new(world_pos.x as f64, world_pos.y as f64); let point = Point::new(world_pos.x as f64, world_pos.y as f64);
// Double-click: enter/exit movie clip editing
if response.double_clicked() {
// Hit test clip instances at the click position
let document = shared.action_executor.document();
let clip_hit = hit_test::hit_test_clip_instances(
&vector_layer.clip_instances,
document,
point,
Affine::IDENTITY,
*shared.playback_time,
);
if let Some(instance_id) = clip_hit {
// Find the clip instance to get its clip_id
if let Some(clip_instance) = vector_layer.clip_instances.iter().find(|ci| ci.id == instance_id) {
// Check if this is a movie clip (not a group)
if let Some(vector_clip) = document.get_vector_clip(&clip_instance.clip_id) {
if !vector_clip.is_group {
// Enter the movie clip
*shared.pending_enter_clip = Some((
clip_instance.clip_id,
instance_id,
active_layer_id,
));
return;
}
}
}
} else if shared.editing_clip_id.is_some() {
// Double-click on empty space while inside a clip: exit
*shared.pending_exit_clip = true;
return;
}
}
// Mouse down: start interaction (check on initial press, not after drag starts) // Mouse down: start interaction (check on initial press, not after drag starts)
// Scope this section to drop vector_layer borrow before drag handling // Scope this section to drop vector_layer borrow before drag handling
let mouse_pressed = ui.input(|i| i.pointer.primary_pressed()); let mouse_pressed = ui.input(|i| i.pointer.primary_pressed());
@ -5419,7 +5646,8 @@ impl StagePane {
// Get last known mouse position (will be at edge if offscreen) // Get last known mouse position (will be at edge if offscreen)
if let Some(mouse_pos) = ui.input(|i| i.pointer.latest_pos()) { if let Some(mouse_pos) = ui.input(|i| i.pointer.latest_pos()) {
let mouse_canvas_pos = mouse_pos - rect.min; let mouse_canvas_pos = mouse_pos - rect.min;
let world_pos = (mouse_canvas_pos - self.pan_offset) / self.zoom; let world_pos_doc = (mouse_canvas_pos - self.pan_offset) / self.zoom;
let world_pos = self.doc_to_clip_local(world_pos_doc, shared);
let point = Point::new(world_pos.x as f64, world_pos.y as f64); let point = Point::new(world_pos.x as f64, world_pos.y as f64);
let delta = point - start_mouse; let delta = point - start_mouse;
@ -5548,7 +5776,9 @@ impl StagePane {
let mouse_canvas_pos = mouse_pos - rect.min; let mouse_canvas_pos = mouse_pos - rect.min;
// Convert screen position to world position (accounting for pan and zoom) // Convert screen position to world position (accounting for pan and zoom)
let world_pos = (mouse_canvas_pos - self.pan_offset) / self.zoom; // When inside a clip, further transform to clip-local coordinates
let world_pos_doc = (mouse_canvas_pos - self.pan_offset) / self.zoom;
let world_pos = self.doc_to_clip_local(world_pos_doc, shared);
// Handle tool input (only if not using Alt modifier for panning) // Handle tool input (only if not using Alt modifier for panning)
if !alt_held { if !alt_held {
@ -5678,18 +5908,22 @@ impl StagePane {
_ => return, _ => return,
}; };
// Get mouse position in world coordinates // Get mouse position in world coordinates (clip-local when inside a clip)
let mouse_screen_pos = ui.input(|i| i.pointer.hover_pos()).unwrap_or(rect.center()); let mouse_screen_pos = ui.input(|i| i.pointer.hover_pos()).unwrap_or(rect.center());
let mouse_canvas_pos = mouse_screen_pos - rect.min; let mouse_canvas_pos = mouse_screen_pos - rect.min;
let mouse_world_pos = Point::new( let mouse_doc_pos = egui::vec2(
((mouse_canvas_pos.x - self.pan_offset.x) / self.zoom) as f64, (mouse_canvas_pos.x - self.pan_offset.x) / self.zoom,
((mouse_canvas_pos.y - self.pan_offset.y) / self.zoom) as f64, (mouse_canvas_pos.y - self.pan_offset.y) / self.zoom,
); );
let mouse_local = self.doc_to_clip_local(mouse_doc_pos, shared);
let mouse_world_pos = Point::new(mouse_local.x as f64, mouse_local.y as f64);
// Helper to convert world coordinates to screen coordinates // Helper to convert world coordinates (clip-local) to screen coordinates
let world_to_screen = |world_pos: Point| -> egui::Pos2 { let world_to_screen = |world_pos: Point| -> egui::Pos2 {
let screen_x = (world_pos.x as f32 * self.zoom) + self.pan_offset.x + rect.min.x; // When inside a clip, first transform from clip-local to document space
let screen_y = (world_pos.y as f32 * self.zoom) + self.pan_offset.y + rect.min.y; let doc_pos = self.clip_local_to_doc(world_pos, shared);
let screen_x = (doc_pos.x as f32 * self.zoom) + self.pan_offset.x + rect.min.x;
let screen_y = (doc_pos.y as f32 * self.zoom) + self.pan_offset.y + rect.min.y;
egui::pos2(screen_x, screen_y) egui::pos2(screen_x, screen_y)
}; };
@ -6254,12 +6488,13 @@ impl PaneRenderer for StagePane {
} }
} }
// Calculate drag delta for preview rendering (world space) // Calculate drag delta for preview rendering (clip-local space)
let drag_delta = if let lightningbeam_core::tool::ToolState::DraggingSelection { ref start_mouse, .. } = shared.tool_state { let drag_delta = if let lightningbeam_core::tool::ToolState::DraggingSelection { ref start_mouse, .. } = shared.tool_state {
// Get current mouse position in world coordinates // Get current mouse position in clip-local coordinates (matching start_mouse)
if let Some(mouse_pos) = ui.input(|i| i.pointer.hover_pos()) { if let Some(mouse_pos) = ui.input(|i| i.pointer.hover_pos()) {
let mouse_canvas_pos = mouse_pos - rect.min; let mouse_canvas_pos = mouse_pos - rect.min;
let world_mouse = (mouse_canvas_pos - self.pan_offset) / self.zoom; let world_mouse_doc = (mouse_canvas_pos - self.pan_offset) / self.zoom;
let world_mouse = self.doc_to_clip_local(world_mouse_doc, shared);
let delta_x = world_mouse.x as f64 - start_mouse.x; let delta_x = world_mouse.x as f64 - start_mouse.x;
let delta_y = world_mouse.y as f64 - start_mouse.y; let delta_y = world_mouse.y as f64 - start_mouse.y;
@ -6294,6 +6529,9 @@ impl PaneRenderer for StagePane {
video_manager: shared.video_manager.clone(), video_manager: shared.video_manager.clone(),
shape_editing_cache: self.shape_editing_cache.clone(), shape_editing_cache: self.shape_editing_cache.clone(),
target_format: shared.target_format, target_format: shared.target_format,
editing_clip_id: shared.editing_clip_id,
editing_instance_id: shared.editing_instance_id,
editing_parent_layer_id: shared.editing_parent_layer_id,
}}; }};
let cb = egui_wgpu::Callback::new_paint_callback( let cb = egui_wgpu::Callback::new_paint_callback(
@ -6313,6 +6551,63 @@ impl PaneRenderer for StagePane {
egui::Color32::from_gray(200), egui::Color32::from_gray(200),
); );
// Render breadcrumb navigation when inside a movie clip
if shared.editing_clip_id.is_some() {
let document = shared.action_executor.document();
// Build breadcrumb names from the editing context
// We only have the current clip_id, so show "Scene 1 > ClipName"
let clip_name = shared.editing_clip_id
.and_then(|id| document.get_vector_clip(&id))
.map(|c| c.name.clone())
.unwrap_or_else(|| "Unknown".to_string());
let breadcrumb_y = rect.min.y + 30.0;
let breadcrumb_x = rect.min.x + 10.0;
// Background pill
let scene_text = "Scene 1";
let separator = " > ";
let full_text = format!("{}{}{}", scene_text, separator, clip_name);
let font = egui::FontId::proportional(13.0);
let galley = ui.painter().layout_no_wrap(full_text.clone(), font.clone(), egui::Color32::WHITE);
let text_rect = egui::Rect::from_min_size(
egui::pos2(breadcrumb_x, breadcrumb_y),
galley.size() + egui::vec2(16.0, 8.0),
);
ui.painter().rect_filled(
text_rect,
4.0,
egui::Color32::from_rgba_unmultiplied(0, 0, 0, 180),
);
// "Scene 1" as clickable (exit clip)
let scene_galley = ui.painter().layout_no_wrap(
scene_text.to_string(), font.clone(), egui::Color32::from_rgb(120, 180, 255),
);
let scene_rect = egui::Rect::from_min_size(
egui::pos2(breadcrumb_x + 8.0, breadcrumb_y + 4.0),
scene_galley.size(),
);
let scene_response = ui.allocate_rect(scene_rect, egui::Sense::click());
ui.painter().galley(scene_rect.min, scene_galley, egui::Color32::WHITE);
if scene_response.clicked() {
*shared.pending_exit_clip = true;
}
if scene_response.hovered() {
ui.ctx().set_cursor_icon(egui::CursorIcon::PointingHand);
}
// Separator + clip name (not clickable, it's the current level)
let rest_text = format!("{}{}", separator, clip_name);
ui.painter().text(
egui::pos2(scene_rect.max.x, breadcrumb_y + 4.0),
egui::Align2::LEFT_TOP,
rest_text,
font,
egui::Color32::WHITE,
);
}
// Render vector editing overlays (vertices, control points, etc.) // Render vector editing overlays (vertices, control points, etc.)
self.render_vector_editing_overlays(ui, rect, shared); self.render_vector_editing_overlays(ui, rect, shared);

View File

@ -54,7 +54,8 @@ fn effective_clip_duration(
let end = vl.group_visibility_end(&clip_instance.id, clip_instance.timeline_start, frame_duration); let end = vl.group_visibility_end(&clip_instance.id, clip_instance.timeline_start, frame_duration);
Some((end - clip_instance.timeline_start).max(0.0)) Some((end - clip_instance.timeline_start).max(0.0))
} else { } else {
Some(vc.duration) // Movie clips: duration based on internal keyframe content
Some(vc.content_duration(document.framerate))
} }
} }
AnyLayer::Audio(_) => document.get_audio_clip(&clip_instance.clip_id).map(|c| c.duration), AnyLayer::Audio(_) => document.get_audio_clip(&clip_instance.clip_id).map(|c| c.duration),
@ -130,13 +131,15 @@ fn find_sampled_audio_track_for_clip(
document: &lightningbeam_core::document::Document, document: &lightningbeam_core::document::Document,
clip_id: uuid::Uuid, clip_id: uuid::Uuid,
timeline_start: f64, timeline_start: f64,
editing_clip_id: Option<&uuid::Uuid>,
) -> Option<uuid::Uuid> { ) -> Option<uuid::Uuid> {
// Get the clip duration // Get the clip duration
let clip_duration = document.get_clip_duration(&clip_id)?; let clip_duration = document.get_clip_duration(&clip_id)?;
let clip_end = timeline_start + clip_duration; let clip_end = timeline_start + clip_duration;
// Check each sampled audio layer // Check each sampled audio layer
for layer in &document.root.children { let context_layers = document.context_layers(editing_clip_id);
for &layer in &context_layers {
if let AnyLayer::Audio(audio_layer) = layer { if let AnyLayer::Audio(audio_layer) = layer {
if audio_layer.audio_layer_type == AudioLayerType::Sampled { if audio_layer.audio_layer_type == AudioLayerType::Sampled {
// Check if there's any overlap with existing clips on this layer // Check if there's any overlap with existing clips on this layer
@ -213,7 +216,8 @@ impl TimelinePane {
// Get layer type (copy it so we can drop the document borrow before mutating) // Get layer type (copy it so we can drop the document borrow before mutating)
let layer_type = { let layer_type = {
let document = shared.action_executor.document(); let document = shared.action_executor.document();
let Some(layer) = document.root.children.iter().find(|l| l.id() == active_layer_id) else { let context_layers = document.context_layers(shared.editing_clip_id.as_ref());
let Some(layer) = context_layers.iter().copied().find(|l| l.id() == active_layer_id) else {
println!("⚠️ Active layer not found in document"); println!("⚠️ Active layer not found in document");
return; return;
}; };
@ -295,7 +299,8 @@ impl TimelinePane {
fn stop_recording(&mut self, shared: &mut SharedPaneState) { fn stop_recording(&mut self, shared: &mut SharedPaneState) {
// Determine if this is MIDI or audio recording by checking the layer type // Determine if this is MIDI or audio recording by checking the layer type
let is_midi_recording = if let Some(layer_id) = *shared.recording_layer_id { let is_midi_recording = if let Some(layer_id) = *shared.recording_layer_id {
shared.action_executor.document().root.children.iter() let context_layers = shared.action_executor.document().context_layers(shared.editing_clip_id.as_ref());
context_layers.iter().copied()
.find(|l| l.id() == layer_id) .find(|l| l.id() == layer_id)
.map(|layer| { .map(|layer| {
if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer { if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer {
@ -337,8 +342,10 @@ impl TimelinePane {
document: &lightningbeam_core::document::Document, document: &lightningbeam_core::document::Document,
content_rect: egui::Rect, content_rect: egui::Rect,
header_rect: egui::Rect, header_rect: egui::Rect,
editing_clip_id: Option<&uuid::Uuid>,
) -> Option<(ClipDragType, uuid::Uuid)> { ) -> Option<(ClipDragType, uuid::Uuid)> {
let layer_count = document.root.children.len(); let context_layers = document.context_layers(editing_clip_id);
let layer_count = context_layers.len();
// Check if pointer is in valid area // Check if pointer is in valid area
if pointer_pos.y < header_rect.min.y { if pointer_pos.y < header_rect.min.y {
@ -355,8 +362,8 @@ impl TimelinePane {
return None; return None;
} }
let layers: Vec<_> = document.root.children.iter().rev().collect(); let rev_layers: Vec<&lightningbeam_core::layer::AnyLayer> = context_layers.iter().rev().copied().collect();
let layer = layers.get(hovered_layer_index)?; let layer = rev_layers.get(hovered_layer_index)?;
let _layer_data = layer.layer(); let _layer_data = layer.layer();
let clip_instances = match layer { let clip_instances = match layer {
@ -711,7 +718,8 @@ impl TimelinePane {
theme: &crate::theme::Theme, theme: &crate::theme::Theme,
active_layer_id: &Option<uuid::Uuid>, active_layer_id: &Option<uuid::Uuid>,
pending_actions: &mut Vec<Box<dyn lightningbeam_core::action::Action>>, pending_actions: &mut Vec<Box<dyn lightningbeam_core::action::Action>>,
document: &lightningbeam_core::document::Document, _document: &lightningbeam_core::document::Document,
context_layers: &[&lightningbeam_core::layer::AnyLayer],
) { ) {
// Background for header column // Background for header column
let header_style = theme.style(".timeline-header", ui.ctx()); let header_style = theme.style(".timeline-header", ui.ctx());
@ -734,7 +742,8 @@ impl TimelinePane {
let secondary_text_color = egui::Color32::from_gray(150); let secondary_text_color = egui::Color32::from_gray(150);
// Draw layer headers from document (reversed so newest layers appear on top) // Draw layer headers from document (reversed so newest layers appear on top)
for (i, layer) in document.root.children.iter().rev().enumerate() { for (i, layer) in context_layers.iter().rev().enumerate() {
let layer = *layer;
let y = rect.min.y + i as f32 * LAYER_HEIGHT - self.viewport_scroll_y; let y = rect.min.y + i as f32 * LAYER_HEIGHT - self.viewport_scroll_y;
// Skip if layer is outside visible area // Skip if layer is outside visible area
@ -993,6 +1002,7 @@ impl TimelinePane {
waveform_gpu_dirty: &mut std::collections::HashSet<usize>, waveform_gpu_dirty: &mut std::collections::HashSet<usize>,
target_format: wgpu::TextureFormat, target_format: wgpu::TextureFormat,
waveform_stereo: bool, waveform_stereo: bool,
context_layers: &[&lightningbeam_core::layer::AnyLayer],
) -> Vec<(egui::Rect, uuid::Uuid, f64, f64)> { ) -> Vec<(egui::Rect, uuid::Uuid, f64, f64)> {
let painter = ui.painter(); let painter = ui.painter();
@ -1014,7 +1024,8 @@ impl TimelinePane {
} }
// Draw layer rows from document (reversed so newest layers appear on top) // Draw layer rows from document (reversed so newest layers appear on top)
for (i, layer) in document.root.children.iter().rev().enumerate() { for (i, layer) in context_layers.iter().rev().enumerate() {
let layer = *layer;
let y = rect.min.y + i as f32 * LAYER_HEIGHT - self.viewport_scroll_y; let y = rect.min.y + i as f32 * LAYER_HEIGHT - self.viewport_scroll_y;
// Skip if layer is outside visible area // Skip if layer is outside visible area
@ -1719,6 +1730,8 @@ impl TimelinePane {
playback_time: &mut f64, playback_time: &mut f64,
_is_playing: &mut bool, _is_playing: &mut bool,
audio_controller: Option<&std::sync::Arc<std::sync::Mutex<daw_backend::EngineController>>>, audio_controller: Option<&std::sync::Arc<std::sync::Mutex<daw_backend::EngineController>>>,
context_layers: &[&lightningbeam_core::layer::AnyLayer],
editing_clip_id: Option<&uuid::Uuid>,
) { ) {
// Don't allocate the header area for input - let widgets handle it directly // Don't allocate the header area for input - let widgets handle it directly
// Only allocate content area (ruler + layers) with click and drag // Only allocate content area (ruler + layers) with click and drag
@ -1761,7 +1774,7 @@ impl TimelinePane {
let clicked_layer_index = (relative_y / LAYER_HEIGHT) as usize; let clicked_layer_index = (relative_y / LAYER_HEIGHT) as usize;
// Get the layer at this index (accounting for reversed display order) // Get the layer at this index (accounting for reversed display order)
if clicked_layer_index < layer_count { if clicked_layer_index < layer_count {
let layers: Vec<_> = document.root.children.iter().rev().collect(); let layers: Vec<_> = context_layers.iter().rev().copied().collect();
if let Some(layer) = layers.get(clicked_layer_index) { if let Some(layer) = layers.get(clicked_layer_index) {
let _layer_data = layer.layer(); let _layer_data = layer.layer();
@ -1828,7 +1841,7 @@ impl TimelinePane {
// Get the layer at this index (accounting for reversed display order) // Get the layer at this index (accounting for reversed display order)
if clicked_layer_index < layer_count { if clicked_layer_index < layer_count {
let layers: Vec<_> = document.root.children.iter().rev().collect(); let layers: Vec<_> = context_layers.iter().rev().copied().collect();
if let Some(layer) = layers.get(clicked_layer_index) { if let Some(layer) = layers.get(clicked_layer_index) {
*active_layer_id = Some(layer.id()); *active_layer_id = Some(layer.id());
} }
@ -1853,6 +1866,7 @@ impl TimelinePane {
document, document,
content_rect, content_rect,
header_rect, header_rect,
editing_clip_id,
) { ) {
// If this clip is not selected, select it (respecting shift key) // If this clip is not selected, select it (respecting shift key)
if !selection.contains_clip_instance(&clip_id) { if !selection.contains_clip_instance(&clip_id) {
@ -1886,7 +1900,7 @@ impl TimelinePane {
HashMap::new(); HashMap::new();
// Iterate through all layers to find selected clip instances // Iterate through all layers to find selected clip instances
for layer in &document.root.children { for &layer in context_layers {
let layer_id = layer.id(); let layer_id = layer.id();
// Get clip instances for this layer // Get clip instances for this layer
@ -1937,7 +1951,7 @@ impl TimelinePane {
> = HashMap::new(); > = HashMap::new();
// Iterate through all layers to find selected clip instances // Iterate through all layers to find selected clip instances
for layer in &document.root.children { for &layer in context_layers {
let layer_id = layer.id(); let layer_id = layer.id();
let _layer_data = layer.layer(); let _layer_data = layer.layer();
@ -2078,7 +2092,7 @@ impl TimelinePane {
ClipDragType::LoopExtendRight => { ClipDragType::LoopExtendRight => {
let mut layer_loops: HashMap<uuid::Uuid, Vec<lightningbeam_core::actions::loop_clip_instances::LoopEntry>> = HashMap::new(); let mut layer_loops: HashMap<uuid::Uuid, Vec<lightningbeam_core::actions::loop_clip_instances::LoopEntry>> = HashMap::new();
for layer in &document.root.children { for &layer in context_layers {
let layer_id = layer.id(); let layer_id = layer.id();
let clip_instances = match layer { let clip_instances = match layer {
lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances, lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances,
@ -2150,7 +2164,7 @@ impl TimelinePane {
// Extend loop_before (pre-loop region) // Extend loop_before (pre-loop region)
let mut layer_loops: HashMap<uuid::Uuid, Vec<lightningbeam_core::actions::loop_clip_instances::LoopEntry>> = HashMap::new(); let mut layer_loops: HashMap<uuid::Uuid, Vec<lightningbeam_core::actions::loop_clip_instances::LoopEntry>> = HashMap::new();
for layer in &document.root.children { for &layer in context_layers {
let layer_id = layer.id(); let layer_id = layer.id();
let clip_instances = match layer { let clip_instances = match layer {
lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances, lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances,
@ -2242,7 +2256,7 @@ impl TimelinePane {
// Get the layer at this index (accounting for reversed display order) // Get the layer at this index (accounting for reversed display order)
if clicked_layer_index < layer_count { if clicked_layer_index < layer_count {
let layers: Vec<_> = document.root.children.iter().rev().collect(); let layers: Vec<_> = context_layers.iter().rev().copied().collect();
if let Some(layer) = layers.get(clicked_layer_index) { if let Some(layer) = layers.get(clicked_layer_index) {
*active_layer_id = Some(layer.id()); *active_layer_id = Some(layer.id());
// Clear clip instance selection when clicking on empty layer area // Clear clip instance selection when clicking on empty layer area
@ -2387,6 +2401,7 @@ impl TimelinePane {
document, document,
content_rect, content_rect,
header_rect, header_rect,
editing_clip_id,
) { ) {
match drag_type { match drag_type {
ClipDragType::TrimLeft | ClipDragType::TrimRight => { ClipDragType::TrimLeft | ClipDragType::TrimRight => {
@ -2535,11 +2550,13 @@ impl PaneRenderer for TimelinePane {
// Get document from action executor // Get document from action executor
let document = shared.action_executor.document(); let document = shared.action_executor.document();
let layer_count = document.root.children.len(); let editing_clip_id = shared.editing_clip_id;
let context_layers = document.context_layers(editing_clip_id.as_ref());
let layer_count = context_layers.len();
// Calculate project duration from last clip endpoint across all layers // Calculate project duration from last clip endpoint across all layers
let mut max_endpoint: f64 = 10.0; // Default minimum duration let mut max_endpoint: f64 = 10.0; // Default minimum duration
for layer in &document.root.children { for &layer in &context_layers {
let clip_instances = match layer { let clip_instances = match layer {
lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances, lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Audio(al) => &al.clip_instances, lightningbeam_core::layer::AnyLayer::Audio(al) => &al.clip_instances,
@ -2606,7 +2623,7 @@ impl PaneRenderer for TimelinePane {
// Render layer header column with clipping // Render layer header column with clipping
ui.set_clip_rect(layer_headers_rect.intersect(original_clip_rect)); ui.set_clip_rect(layer_headers_rect.intersect(original_clip_rect));
self.render_layer_headers(ui, layer_headers_rect, shared.theme, shared.active_layer_id, &mut shared.pending_actions, document); self.render_layer_headers(ui, layer_headers_rect, shared.theme, shared.active_layer_id, &mut shared.pending_actions, document, &context_layers);
// Render time ruler (clip to ruler rect) // Render time ruler (clip to ruler rect)
ui.set_clip_rect(ruler_rect.intersect(original_clip_rect)); ui.set_clip_rect(ruler_rect.intersect(original_clip_rect));
@ -2614,7 +2631,7 @@ impl PaneRenderer for TimelinePane {
// Render layer rows with clipping // Render layer rows with clipping
ui.set_clip_rect(content_rect.intersect(original_clip_rect)); ui.set_clip_rect(content_rect.intersect(original_clip_rect));
let video_clip_hovers = self.render_layers(ui, content_rect, shared.theme, document, shared.active_layer_id, shared.selection, shared.midi_event_cache, shared.raw_audio_cache, shared.waveform_gpu_dirty, shared.target_format, shared.waveform_stereo); let video_clip_hovers = self.render_layers(ui, content_rect, shared.theme, document, shared.active_layer_id, shared.selection, shared.midi_event_cache, shared.raw_audio_cache, shared.waveform_gpu_dirty, shared.target_format, shared.waveform_stereo, &context_layers);
// Render playhead on top (clip to timeline area) // Render playhead on top (clip to timeline area)
ui.set_clip_rect(timeline_rect.intersect(original_clip_rect)); ui.set_clip_rect(timeline_rect.intersect(original_clip_rect));
@ -2638,6 +2655,8 @@ impl PaneRenderer for TimelinePane {
shared.playback_time, shared.playback_time,
shared.is_playing, shared.is_playing,
shared.audio_controller, shared.audio_controller,
&context_layers,
editing_clip_id.as_ref(),
); );
// Context menu: detect right-click on clips or empty timeline space // Context menu: detect right-click on clips or empty timeline space
@ -2646,7 +2665,7 @@ impl PaneRenderer for TimelinePane {
if secondary_clicked { if secondary_clicked {
if let Some(pos) = ui.input(|i| i.pointer.interact_pos()) { if let Some(pos) = ui.input(|i| i.pointer.interact_pos()) {
if content_rect.contains(pos) { if content_rect.contains(pos) {
if let Some((_drag_type, clip_id)) = self.detect_clip_at_pointer(pos, document, content_rect, layer_headers_rect) { if let Some((_drag_type, clip_id)) = self.detect_clip_at_pointer(pos, document, content_rect, layer_headers_rect, editing_clip_id.as_ref()) {
// Right-clicked on a clip // Right-clicked on a clip
if !shared.selection.contains_clip_instance(&clip_id) { if !shared.selection.contains_clip_instance(&clip_id) {
shared.selection.select_only_clip_instance(clip_id); shared.selection.select_only_clip_instance(clip_id);
@ -2934,7 +2953,7 @@ impl PaneRenderer for TimelinePane {
let hovered_layer_index = (relative_y / LAYER_HEIGHT) as usize; let hovered_layer_index = (relative_y / LAYER_HEIGHT) as usize;
// Get the layer at this index (accounting for reversed display order) // Get the layer at this index (accounting for reversed display order)
let layers: Vec<_> = document.root.children.iter().rev().collect(); let layers: Vec<_> = context_layers.iter().rev().copied().collect();
if let Some(layer) = layers.get(hovered_layer_index) { if let Some(layer) = layers.get(hovered_layer_index) {
let is_compatible = can_drop_on_layer(layer, dragging.clip_type); let is_compatible = can_drop_on_layer(layer, dragging.clip_type);
@ -3077,7 +3096,7 @@ impl PaneRenderer for TimelinePane {
// Find or create sampled audio track where the audio won't overlap // Find or create sampled audio track where the audio won't overlap
let audio_layer_id = { let audio_layer_id = {
let doc = shared.action_executor.document(); let doc = shared.action_executor.document();
let result = find_sampled_audio_track_for_clip(doc, linked_audio_clip_id, drop_time); let result = find_sampled_audio_track_for_clip(doc, linked_audio_clip_id, drop_time, editing_clip_id.as_ref());
if let Some(id) = result { if let Some(id) = result {
eprintln!("DEBUG: Found existing audio track without overlap: {}", id); eprintln!("DEBUG: Found existing audio track without overlap: {}", id);
} else { } else {

View File

@ -51,17 +51,6 @@ fn parse_note_letter(s: &str) -> Option<(u8, usize)> {
} }
} }
/// Convert a note name like "C4", "A#3", "Bb2" to a MIDI note number.
pub fn note_name_to_midi(note: &str, octave: i8) -> Option<u8> {
let (semitone, _) = parse_note_letter(note)?;
let midi = (octave as i32 + 1) * 12 + semitone as i32;
if (0..=127).contains(&midi) {
Some(midi as u8)
} else {
None
}
}
/// Format a MIDI note number as a note name (e.g., 60 → "C4"). /// Format a MIDI note number as a note name (e.g., 60 → "C4").
pub fn midi_to_note_name(midi: u8) -> String { pub fn midi_to_note_name(midi: u8) -> String {
const NAMES: [&str; 12] = ["C", "C#", "D", "D#", "E", "F", "F#", "G", "G#", "A", "A#", "B"]; const NAMES: [&str; 12] = ["C", "C#", "D", "D#", "E", "F", "F#", "G", "G#", "A", "A#", "B"];
@ -214,8 +203,7 @@ fn tokenize(stem: &str) -> Vec<&str> {
} }
/// Parse a sample filename to extract note, velocity, round-robin, and loop hint info. /// Parse a sample filename to extract note, velocity, round-robin, and loop hint info.
/// `folder_path` is used for loop/articulation context from parent directory names. pub fn parse_sample_filename(path: &Path) -> ParsedSample {
pub fn parse_sample_filename(path: &Path, folder_path: &Path) -> ParsedSample {
let filename = path.file_name() let filename = path.file_name()
.map(|n| n.to_string_lossy().to_string()) .map(|n| n.to_string_lossy().to_string())
.unwrap_or_default(); .unwrap_or_default();
@ -390,7 +378,7 @@ pub fn scan_folder(folder_path: &Path) -> std::io::Result<Vec<ParsedSample>> {
collect_audio_files(folder_path, &mut files)?; collect_audio_files(folder_path, &mut files)?;
let mut samples: Vec<ParsedSample> = files.iter() let mut samples: Vec<ParsedSample> = files.iter()
.map(|path| parse_sample_filename(path, folder_path)) .map(|path| parse_sample_filename(path))
.collect(); .collect();
// Percussion pass: for samples with no detected note, try GM drum mapping // Percussion pass: for samples with no detected note, try GM drum mapping
@ -467,7 +455,6 @@ pub struct FolderScanResult {
pub loop_mode: LoopMode, pub loop_mode: LoopMode,
pub velocity_markers: Vec<String>, pub velocity_markers: Vec<String>,
pub velocity_ranges: Vec<(String, u8, u8)>, pub velocity_ranges: Vec<(String, u8, u8)>,
pub detected_articulation: Option<String>,
} }
/// Compute auto key ranges for a sorted list of unique MIDI notes. /// Compute auto key ranges for a sorted list of unique MIDI notes.
@ -533,28 +520,9 @@ fn detect_global_loop_mode(samples: &[ParsedSample]) -> LoopMode {
} }
} }
/// Detect articulation from folder path.
fn detect_articulation(folder_path: &Path) -> Option<String> {
for component in folder_path.components().rev() {
if let std::path::Component::Normal(name) = component {
let lower = name.to_string_lossy().to_lowercase();
match lower.as_str() {
"sustain" | "vibrato" | "tremolo" | "pizzicato" | "staccato" |
"legato" | "marcato" | "spiccato" | "arco" => {
return Some(name.to_string_lossy().to_string());
}
_ => {}
}
}
}
None
}
/// Build import layers from parsed samples with auto key ranges and velocity mapping. /// Build import layers from parsed samples with auto key ranges and velocity mapping.
pub fn build_import_layers(samples: Vec<ParsedSample>, folder_path: &Path) -> FolderScanResult { pub fn build_import_layers(samples: Vec<ParsedSample>) -> FolderScanResult {
let loop_mode = detect_global_loop_mode(&samples); let loop_mode = detect_global_loop_mode(&samples);
let detected_articulation = detect_articulation(folder_path);
// Separate mapped vs unmapped // Separate mapped vs unmapped
let mut mapped: Vec<ParsedSample> = Vec::new(); let mut mapped: Vec<ParsedSample> = Vec::new();
let mut unmapped: Vec<ParsedSample> = Vec::new(); let mut unmapped: Vec<ParsedSample> = Vec::new();
@ -623,7 +591,6 @@ pub fn build_import_layers(samples: Vec<ParsedSample>, folder_path: &Path) -> Fo
loop_mode, loop_mode,
velocity_markers, velocity_markers,
velocity_ranges, velocity_ranges,
detected_articulation,
} }
} }
@ -662,13 +629,13 @@ mod tests {
use super::*; use super::*;
#[test] #[test]
fn test_note_name_to_midi() { fn test_try_note_octave() {
assert_eq!(note_name_to_midi("C", 4), Some(60)); assert_eq!(try_note_octave("C4"), Some(60));
assert_eq!(note_name_to_midi("A", 4), Some(69)); assert_eq!(try_note_octave("A4"), Some(69));
assert_eq!(note_name_to_midi("A#", 3), Some(58)); assert_eq!(try_note_octave("A#3"), Some(58));
assert_eq!(note_name_to_midi("Bb", 2), Some(46)); assert_eq!(try_note_octave("Bb2"), Some(46));
assert_eq!(note_name_to_midi("C", -1), Some(0)); assert_eq!(try_note_octave("C-1"), Some(0));
assert_eq!(note_name_to_midi("G", 9), Some(127)); assert_eq!(try_note_octave("G9"), Some(127));
} }
#[test] #[test]
@ -676,7 +643,6 @@ mod tests {
// Horns: horns-sus-ff-a#2-PB-loop.wav // Horns: horns-sus-ff-a#2-PB-loop.wav
let p = parse_sample_filename( let p = parse_sample_filename(
Path::new("/samples/horns-sus-ff-a#2-PB-loop.wav"), Path::new("/samples/horns-sus-ff-a#2-PB-loop.wav"),
Path::new("/samples"),
); );
assert_eq!(p.detected_note, Some(46)); // A#2 assert_eq!(p.detected_note, Some(46)); // A#2
assert_eq!(p.velocity_marker, Some("ff".to_string())); assert_eq!(p.velocity_marker, Some("ff".to_string()));
@ -685,7 +651,6 @@ mod tests {
// Philharmonia: viola_A#3-staccato-rr1-PB.wav // Philharmonia: viola_A#3-staccato-rr1-PB.wav
let p = parse_sample_filename( let p = parse_sample_filename(
Path::new("/samples/viola_A#3-staccato-rr1-PB.wav"), Path::new("/samples/viola_A#3-staccato-rr1-PB.wav"),
Path::new("/samples"),
); );
assert_eq!(p.detected_note, Some(58)); // A#3 assert_eq!(p.detected_note, Some(58)); // A#3
assert_eq!(p.rr_index, Some(1)); assert_eq!(p.rr_index, Some(1));
@ -694,7 +659,6 @@ mod tests {
// Bare note: A1.mp3 // Bare note: A1.mp3
let p = parse_sample_filename( let p = parse_sample_filename(
Path::new("/samples/A1.mp3"), Path::new("/samples/A1.mp3"),
Path::new("/samples"),
); );
assert_eq!(p.detected_note, Some(33)); // A1 assert_eq!(p.detected_note, Some(33)); // A1
} }
@ -704,21 +668,18 @@ mod tests {
// NoBudgetOrch: 2_A-PB.wav // NoBudgetOrch: 2_A-PB.wav
let p = parse_sample_filename( let p = parse_sample_filename(
Path::new("/samples/2_A-PB.wav"), Path::new("/samples/2_A-PB.wav"),
Path::new("/samples"),
); );
assert_eq!(p.detected_note, Some(45)); // A2 assert_eq!(p.detected_note, Some(45)); // A2
// 3_Gb-PB.wav // 3_Gb-PB.wav
let p = parse_sample_filename( let p = parse_sample_filename(
Path::new("/samples/3_Gb-PB.wav"), Path::new("/samples/3_Gb-PB.wav"),
Path::new("/samples"),
); );
assert_eq!(p.detected_note, Some(54)); // Gb3 assert_eq!(p.detected_note, Some(54)); // Gb3
// 1_Bb.wav // 1_Bb.wav
let p = parse_sample_filename( let p = parse_sample_filename(
Path::new("/samples/1_Bb.wav"), Path::new("/samples/1_Bb.wav"),
Path::new("/samples"),
); );
assert_eq!(p.detected_note, Some(34)); // Bb1 assert_eq!(p.detected_note, Some(34)); // Bb1
} }
@ -728,7 +689,6 @@ mod tests {
// NoBudgetOrch TubularBells: 3_A_f.wav // NoBudgetOrch TubularBells: 3_A_f.wav
let p = parse_sample_filename( let p = parse_sample_filename(
Path::new("/samples/3_A_f.wav"), Path::new("/samples/3_A_f.wav"),
Path::new("/samples"),
); );
assert_eq!(p.detected_note, Some(57)); // A3 assert_eq!(p.detected_note, Some(57)); // A3
assert_eq!(p.velocity_marker, Some("f".to_string())); assert_eq!(p.velocity_marker, Some("f".to_string()));
@ -736,7 +696,6 @@ mod tests {
// 3_C_p.wav // 3_C_p.wav
let p = parse_sample_filename( let p = parse_sample_filename(
Path::new("/samples/3_C_p.wav"), Path::new("/samples/3_C_p.wav"),
Path::new("/samples"),
); );
assert_eq!(p.detected_note, Some(48)); // C3 assert_eq!(p.detected_note, Some(48)); // C3
assert_eq!(p.velocity_marker, Some("p".to_string())); assert_eq!(p.velocity_marker, Some("p".to_string()));
@ -747,7 +706,6 @@ mod tests {
// NoBudgetOrch: 5_C_2-PB.wav → C5, rr2 // NoBudgetOrch: 5_C_2-PB.wav → C5, rr2
let p = parse_sample_filename( let p = parse_sample_filename(
Path::new("/samples/5_C_2-PB.wav"), Path::new("/samples/5_C_2-PB.wav"),
Path::new("/samples"),
); );
assert_eq!(p.detected_note, Some(72)); // C5 assert_eq!(p.detected_note, Some(72)); // C5
assert_eq!(p.rr_index, Some(2)); assert_eq!(p.rr_index, Some(2));
@ -755,7 +713,6 @@ mod tests {
// rr marker: viola_A#3-staccato-rr1-PB.wav // rr marker: viola_A#3-staccato-rr1-PB.wav
let p = parse_sample_filename( let p = parse_sample_filename(
Path::new("/samples/viola_A#3-staccato-rr1-PB.wav"), Path::new("/samples/viola_A#3-staccato-rr1-PB.wav"),
Path::new("/samples"),
); );
assert_eq!(p.rr_index, Some(1)); assert_eq!(p.rr_index, Some(1));
} }
@ -764,13 +721,11 @@ mod tests {
fn test_loop_hints_from_folder() { fn test_loop_hints_from_folder() {
let p = parse_sample_filename( let p = parse_sample_filename(
Path::new("/libs/Cello/Sustain/2_A.wav"), Path::new("/libs/Cello/Sustain/2_A.wav"),
Path::new("/libs/Cello/Sustain"),
); );
assert_eq!(p.loop_hint, LoopHint::Loop); assert_eq!(p.loop_hint, LoopHint::Loop);
let p = parse_sample_filename( let p = parse_sample_filename(
Path::new("/libs/Cello/Pizzicato/2_A-PB.wav"), Path::new("/libs/Cello/Pizzicato/2_A-PB.wav"),
Path::new("/libs/Cello/Pizzicato"),
); );
assert_eq!(p.loop_hint, LoopHint::OneShot); assert_eq!(p.loop_hint, LoopHint::OneShot);
} }

View File

@ -8,7 +8,7 @@ use egui_node_graph2::NodeId;
use std::path::PathBuf; use std::path::PathBuf;
use crate::sample_import::{ use crate::sample_import::{
FolderScanResult, ImportLayer, midi_to_note_name, recalc_key_ranges, FolderScanResult, midi_to_note_name, recalc_key_ranges,
}; };
use daw_backend::audio::node_graph::nodes::LoopMode; use daw_backend::audio::node_graph::nodes::LoopMode;
@ -234,9 +234,4 @@ impl SampleImportDialog {
!self.should_close !self.should_close
} }
/// Get the enabled layers for import.
pub fn enabled_layers(&self) -> Vec<&ImportLayer> {
self.scan_result.layers.iter().filter(|l| l.enabled).collect()
}
} }