use group layers instead of linked tracks

This commit is contained in:
Skyler Lehmkuhl 2026-03-01 09:00:55 -05:00
parent 520776c6e5
commit b87e4325c2
18 changed files with 1040 additions and 164 deletions

View File

@ -101,6 +101,9 @@ impl Action for AddClipInstanceAction {
AnyLayer::Effect(_) => {
return Err("Cannot add clip instances to effect layers".to_string());
}
AnyLayer::Group(_) => {
return Err("Cannot add clip instances directly to group layers".to_string());
}
}
self.executed = true;
@ -136,6 +139,9 @@ impl Action for AddClipInstanceAction {
AnyLayer::Effect(_) => {
// Effect layers don't have clip instances, nothing to rollback
}
AnyLayer::Group(_) => {
// Group layers don't have clip instances, nothing to rollback
}
}
self.executed = false;

View File

@ -15,6 +15,9 @@ pub struct AddLayerAction {
/// If Some, add to this VectorClip's layers instead of root
target_clip_id: Option<Uuid>,
/// If Some, add as a child of this GroupLayer instead of root
target_group_id: Option<Uuid>,
/// ID of the created layer (set after execution)
created_layer_id: Option<Uuid>,
}
@ -30,6 +33,7 @@ impl AddLayerAction {
Self {
layer: AnyLayer::Vector(layer),
target_clip_id: None,
target_group_id: None,
created_layer_id: None,
}
}
@ -43,6 +47,7 @@ impl AddLayerAction {
Self {
layer,
target_clip_id: None,
target_group_id: None,
created_layer_id: None,
}
}
@ -53,6 +58,12 @@ impl AddLayerAction {
self
}
/// Set the target group for this action (add layer inside a group layer)
pub fn with_target_group(mut self, group_id: Uuid) -> Self {
self.target_group_id = Some(group_id);
self
}
/// Get the ID of the created layer (after execution)
pub fn created_layer_id(&self) -> Option<Uuid> {
self.created_layer_id
@ -61,7 +72,18 @@ impl AddLayerAction {
impl Action for AddLayerAction {
fn execute(&mut self, document: &mut Document) -> Result<(), String> {
let layer_id = if let Some(clip_id) = self.target_clip_id {
let layer_id = if let Some(group_id) = self.target_group_id {
// Add layer inside a group layer
let id = self.layer.id();
if let Some(AnyLayer::Group(g)) = document.root.children.iter_mut()
.find(|l| l.id() == group_id)
{
g.add_child(self.layer.clone());
} else {
return Err(format!("Target group {} not found", group_id));
}
id
} else if let Some(clip_id) = self.target_clip_id {
// Add layer inside a vector clip (movie clip)
let clip = document.vector_clips.get_mut(&clip_id)
.ok_or_else(|| format!("Target clip {} not found", clip_id))?;
@ -84,7 +106,14 @@ impl Action for AddLayerAction {
fn rollback(&mut self, document: &mut Document) -> Result<(), String> {
// Remove the created layer if it exists
if let Some(layer_id) = self.created_layer_id {
if let Some(clip_id) = self.target_clip_id {
if let Some(group_id) = self.target_group_id {
// Remove from group layer
if let Some(AnyLayer::Group(g)) = document.root.children.iter_mut()
.find(|l| l.id() == group_id)
{
g.children.retain(|l| l.id() != layer_id);
}
} else if let Some(clip_id) = self.target_clip_id {
// Remove from vector clip
if let Some(clip) = document.vector_clips.get_mut(&clip_id) {
clip.layers.roots.retain(|node| node.data.id() != layer_id);
@ -107,6 +136,7 @@ impl Action for AddLayerAction {
AnyLayer::Audio(_) => "Add audio layer",
AnyLayer::Video(_) => "Add video layer",
AnyLayer::Effect(_) => "Add effect layer",
AnyLayer::Group(_) => "Add group layer",
}
.to_string()
}

View File

@ -35,6 +35,7 @@ impl Action for LoopClipInstancesAction {
AnyLayer::Audio(al) => &mut al.clip_instances,
AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(el) => &mut el.clip_instances,
AnyLayer::Group(_) => continue,
};
for (instance_id, _old_dur, new_dur, _old_lb, new_lb) in loops {
@ -57,6 +58,7 @@ impl Action for LoopClipInstancesAction {
AnyLayer::Audio(al) => &mut al.clip_instances,
AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(el) => &mut el.clip_instances,
AnyLayer::Group(_) => continue,
};
for (instance_id, old_dur, _new_dur, old_lb, _new_lb) in loops {

View File

@ -29,6 +29,7 @@ pub mod set_keyframe;
pub mod group_shapes;
pub mod convert_to_movie_clip;
pub mod region_split;
pub mod toggle_group_expansion;
pub use add_clip_instance::AddClipInstanceAction;
pub use add_effect::AddEffectAction;
@ -56,3 +57,4 @@ pub use set_keyframe::SetKeyframeAction;
pub use group_shapes::GroupAction;
pub use convert_to_movie_clip::ConvertToMovieClipAction;
pub use region_split::RegionSplitAction;
pub use toggle_group_expansion::ToggleGroupExpansionAction;

View File

@ -56,6 +56,7 @@ impl Action for MoveClipInstancesAction {
AnyLayer::Audio(al) => &al.clip_instances,
AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => &[],
};
if let Some(instance) = clip_instances.iter().find(|ci| ci.id == *member_instance_id) {
@ -93,6 +94,7 @@ impl Action for MoveClipInstancesAction {
AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Vector(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => &[],
};
let group: Vec<(Uuid, f64, f64)> = moves.iter().filter_map(|(id, old_start, _)| {
@ -126,6 +128,7 @@ impl Action for MoveClipInstancesAction {
AnyLayer::Audio(al) => &mut al.clip_instances,
AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(el) => &mut el.clip_instances,
AnyLayer::Group(_) => continue,
};
// Update timeline_start for each clip instance
@ -151,6 +154,7 @@ impl Action for MoveClipInstancesAction {
AnyLayer::Audio(al) => &mut al.clip_instances,
AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(el) => &mut el.clip_instances,
AnyLayer::Group(_) => continue,
};
// Restore original timeline_start for each clip instance

View File

@ -44,6 +44,7 @@ impl Action for RemoveClipInstancesAction {
AnyLayer::Audio(al) => &mut al.clip_instances,
AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(el) => &mut el.clip_instances,
AnyLayer::Group(_) => continue,
};
// Find and remove the instance, saving it for rollback
@ -68,6 +69,7 @@ impl Action for RemoveClipInstancesAction {
AnyLayer::Audio(al) => &mut al.clip_instances,
AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(el) => &mut el.clip_instances,
AnyLayer::Group(_) => continue,
};
clip_instances.push(instance);

View File

@ -112,6 +112,7 @@ impl Action for SplitClipInstanceAction {
AnyLayer::Audio(al) => &al.clip_instances,
AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => return Err("Cannot split clip instances on group layers".to_string()),
};
let instance = clip_instances
@ -228,6 +229,9 @@ impl Action for SplitClipInstanceAction {
}
el.clip_instances.push(right_instance);
}
AnyLayer::Group(_) => {
return Err("Cannot split clip instances on group layers".to_string());
}
}
self.executed = true;
@ -283,6 +287,9 @@ impl Action for SplitClipInstanceAction {
inst.timeline_duration = self.original_timeline_duration;
}
}
AnyLayer::Group(_) => {
// Group layers don't have clip instances, nothing to rollback
}
}
self.executed = false;

View File

@ -0,0 +1,62 @@
//! Toggle group layer expansion state (collapsed/expanded in timeline)
use crate::action::Action;
use crate::document::Document;
use crate::layer::AnyLayer;
use uuid::Uuid;
/// Action that toggles a group layer's expanded/collapsed state
pub struct ToggleGroupExpansionAction {
group_id: Uuid,
new_expanded: bool,
old_expanded: Option<bool>,
}
impl ToggleGroupExpansionAction {
pub fn new(group_id: Uuid, expanded: bool) -> Self {
Self {
group_id,
new_expanded: expanded,
old_expanded: None,
}
}
}
impl Action for ToggleGroupExpansionAction {
fn execute(&mut self, document: &mut Document) -> Result<(), String> {
if let Some(AnyLayer::Group(g)) = document
.root
.children
.iter_mut()
.find(|l| l.id() == self.group_id)
{
self.old_expanded = Some(g.expanded);
g.expanded = self.new_expanded;
Ok(())
} else {
Err(format!("Group layer {} not found", self.group_id))
}
}
fn rollback(&mut self, document: &mut Document) -> Result<(), String> {
if let Some(old) = self.old_expanded {
if let Some(AnyLayer::Group(g)) = document
.root
.children
.iter_mut()
.find(|l| l.id() == self.group_id)
{
g.expanded = old;
}
}
Ok(())
}
fn description(&self) -> String {
if self.new_expanded {
"Expand group".to_string()
} else {
"Collapse group".to_string()
}
}
}

View File

@ -99,6 +99,7 @@ impl Action for TransformClipInstancesAction {
}
}
AnyLayer::Effect(_) => {}
AnyLayer::Group(_) => {}
}
Ok(())
}
@ -136,6 +137,7 @@ impl Action for TransformClipInstancesAction {
}
}
AnyLayer::Effect(_) => {}
AnyLayer::Group(_) => {}
}
Ok(())
}

View File

@ -99,6 +99,7 @@ impl Action for TrimClipInstancesAction {
AnyLayer::Audio(al) => &al.clip_instances,
AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => &[],
};
if let Some(instance) = clip_instances.iter().find(|ci| ci.id == *member_instance_id) {
@ -134,6 +135,7 @@ impl Action for TrimClipInstancesAction {
AnyLayer::Audio(al) => &al.clip_instances,
AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => &[],
};
if let Some(instance) = clip_instances.iter().find(|ci| ci.id == *member_instance_id) {
@ -176,6 +178,7 @@ impl Action for TrimClipInstancesAction {
AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Vector(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => &[],
};
let instance = clip_instances.iter()
@ -267,6 +270,7 @@ impl Action for TrimClipInstancesAction {
AnyLayer::Audio(al) => &mut al.clip_instances,
AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(el) => &mut el.clip_instances,
AnyLayer::Group(_) => continue,
};
// Apply trims
@ -305,6 +309,7 @@ impl Action for TrimClipInstancesAction {
AnyLayer::Audio(al) => &mut al.clip_instances,
AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(el) => &mut el.clip_instances,
AnyLayer::Group(_) => continue,
};
// Restore original trim values

View File

@ -115,6 +115,7 @@ impl VectorClip {
AnyLayer::Audio(al) => &al.clip_instances,
AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => &[],
};
for ci in clip_instances {
let end = if let Some(td) = ci.timeline_duration {

View File

@ -31,6 +31,7 @@ impl ClipboardLayerType {
AudioLayerType::Midi => ClipboardLayerType::AudioMidi,
},
AnyLayer::Effect(_) => ClipboardLayerType::Effect,
AnyLayer::Group(_) => ClipboardLayerType::Vector, // Groups don't have a direct clipboard type; treat as vector
}
}

View File

@ -44,14 +44,62 @@ impl GraphicsObject {
id
}
/// Get a child layer by ID
/// Get a child layer by ID (searches direct children and recurses into groups)
pub fn get_child(&self, id: &Uuid) -> Option<&AnyLayer> {
self.children.iter().find(|l| &l.id() == id)
for layer in &self.children {
if &layer.id() == id {
return Some(layer);
}
if let AnyLayer::Group(group) = layer {
if let Some(found) = Self::find_in_group(&group.children, id) {
return Some(found);
}
}
}
None
}
/// Get a mutable child layer by ID
/// Get a mutable child layer by ID (searches direct children and recurses into groups)
pub fn get_child_mut(&mut self, id: &Uuid) -> Option<&mut AnyLayer> {
self.children.iter_mut().find(|l| &l.id() == id)
for layer in &mut self.children {
if &layer.id() == id {
return Some(layer);
}
if let AnyLayer::Group(group) = layer {
if let Some(found) = Self::find_in_group_mut(&mut group.children, id) {
return Some(found);
}
}
}
None
}
fn find_in_group<'a>(children: &'a [AnyLayer], id: &Uuid) -> Option<&'a AnyLayer> {
for child in children {
if &child.id() == id {
return Some(child);
}
if let AnyLayer::Group(group) = child {
if let Some(found) = Self::find_in_group(&group.children, id) {
return Some(found);
}
}
}
None
}
fn find_in_group_mut<'a>(children: &'a mut [AnyLayer], id: &Uuid) -> Option<&'a mut AnyLayer> {
for child in children {
if &child.id() == id {
return Some(child);
}
if let AnyLayer::Group(group) = child {
if let Some(found) = Self::find_in_group_mut(&mut group.children, id) {
return Some(found);
}
}
}
None
}
/// Remove a child layer by ID
@ -371,6 +419,52 @@ impl Document {
}
}
}
crate::layer::AnyLayer::Group(group) => {
// Recurse into group children to find their clip instance endpoints
fn process_group_children(
children: &[crate::layer::AnyLayer],
doc: &Document,
max_end: &mut f64,
calc_end: &dyn Fn(&ClipInstance, f64) -> f64,
) {
for child in children {
match child {
crate::layer::AnyLayer::Vector(vl) => {
for inst in &vl.clip_instances {
if let Some(clip) = doc.vector_clips.get(&inst.clip_id) {
*max_end = max_end.max(calc_end(inst, clip.duration));
}
}
}
crate::layer::AnyLayer::Audio(al) => {
for inst in &al.clip_instances {
if let Some(clip) = doc.audio_clips.get(&inst.clip_id) {
*max_end = max_end.max(calc_end(inst, clip.duration));
}
}
}
crate::layer::AnyLayer::Video(vl) => {
for inst in &vl.clip_instances {
if let Some(clip) = doc.video_clips.get(&inst.clip_id) {
*max_end = max_end.max(calc_end(inst, clip.duration));
}
}
}
crate::layer::AnyLayer::Effect(el) => {
for inst in &el.clip_instances {
if let Some(dur) = doc.get_clip_duration(&inst.clip_id) {
*max_end = max_end.max(calc_end(inst, dur));
}
}
}
crate::layer::AnyLayer::Group(g) => {
process_group_children(&g.children, doc, max_end, calc_end);
}
}
}
}
process_group_children(&group.children, self, &mut max_end_time, &calculate_instance_end);
}
}
}
@ -489,7 +583,16 @@ impl Document {
/// Get all layers across the entire document (root + inside all vector clips).
pub fn all_layers(&self) -> Vec<&AnyLayer> {
let mut layers: Vec<&AnyLayer> = self.root.children.iter().collect();
let mut layers: Vec<&AnyLayer> = Vec::new();
fn collect_layers<'a>(list: &'a [AnyLayer], out: &mut Vec<&'a AnyLayer>) {
for layer in list {
out.push(layer);
if let AnyLayer::Group(g) = layer {
collect_layers(&g.children, out);
}
}
}
collect_layers(&self.root.children, &mut layers);
for clip in self.vector_clips.values() {
layers.extend(clip.layers.root_data());
}
@ -718,6 +821,7 @@ impl Document {
AnyLayer::Video(video) => &video.clip_instances,
AnyLayer::Vector(vector) => &vector.clip_instances,
AnyLayer::Effect(effect) => &effect.clip_instances,
AnyLayer::Group(_) => &[],
};
let instance = instances.iter().find(|inst| &inst.id == instance_id)?;
@ -756,6 +860,7 @@ impl Document {
AnyLayer::Video(video) => &video.clip_instances,
AnyLayer::Vector(vector) => &vector.clip_instances,
AnyLayer::Effect(effect) => &effect.clip_instances,
AnyLayer::Group(_) => &[],
};
for instance in instances {
@ -799,7 +904,7 @@ impl Document {
let desired_start = desired_start.max(0.0);
// Vector layers don't need overlap adjustment, but still respect timeline start
if matches!(layer, AnyLayer::Vector(_)) {
if matches!(layer, AnyLayer::Vector(_) | AnyLayer::Group(_)) {
return Some(desired_start);
}
@ -816,6 +921,7 @@ impl Document {
AnyLayer::Video(video) => &video.clip_instances,
AnyLayer::Effect(effect) => &effect.clip_instances,
AnyLayer::Vector(_) => return Some(desired_start), // Shouldn't reach here
AnyLayer::Group(_) => return Some(desired_start), // Groups don't have own clips
};
let mut occupied_ranges: Vec<(f64, f64, Uuid)> = Vec::new();
@ -898,7 +1004,7 @@ impl Document {
let Some(layer) = self.get_layer(layer_id) else {
return desired_offset;
};
if matches!(layer, AnyLayer::Vector(_)) {
if matches!(layer, AnyLayer::Vector(_) | AnyLayer::Group(_)) {
return desired_offset;
}
@ -909,6 +1015,7 @@ impl Document {
AnyLayer::Video(v) => &v.clip_instances,
AnyLayer::Effect(e) => &e.clip_instances,
AnyLayer::Vector(v) => &v.clip_instances,
AnyLayer::Group(_) => &[],
};
// Collect non-group clip ranges
@ -966,8 +1073,8 @@ impl Document {
};
// Only check audio, video, and effect layers
if matches!(layer, AnyLayer::Vector(_)) {
return current_timeline_start; // No limit for vector layers
if matches!(layer, AnyLayer::Vector(_) | AnyLayer::Group(_)) {
return current_timeline_start; // No limit for vector/group layers
};
// Find the nearest clip to the left
@ -978,6 +1085,7 @@ impl Document {
AnyLayer::Video(video) => &video.clip_instances,
AnyLayer::Effect(effect) => &effect.clip_instances,
AnyLayer::Vector(vector) => &vector.clip_instances,
AnyLayer::Group(_) => &[],
};
for other in instances {
@ -1015,8 +1123,8 @@ impl Document {
};
// Only check audio, video, and effect layers
if matches!(layer, AnyLayer::Vector(_)) {
return f64::MAX; // No limit for vector layers
if matches!(layer, AnyLayer::Vector(_) | AnyLayer::Group(_)) {
return f64::MAX; // No limit for vector/group layers
}
let instances: &[ClipInstance] = match layer {
@ -1024,6 +1132,7 @@ impl Document {
AnyLayer::Video(video) => &video.clip_instances,
AnyLayer::Effect(effect) => &effect.clip_instances,
AnyLayer::Vector(vector) => &vector.clip_instances,
AnyLayer::Group(_) => &[],
};
let mut nearest_start = f64::MAX;
@ -1060,7 +1169,7 @@ impl Document {
return current_effective_start;
};
if matches!(layer, AnyLayer::Vector(_)) {
if matches!(layer, AnyLayer::Vector(_) | AnyLayer::Group(_)) {
return current_effective_start;
}
@ -1069,6 +1178,7 @@ impl Document {
AnyLayer::Video(video) => &video.clip_instances,
AnyLayer::Effect(effect) => &effect.clip_instances,
AnyLayer::Vector(vector) => &vector.clip_instances,
AnyLayer::Group(_) => &[],
};
let mut nearest_end = 0.0;

View File

@ -25,6 +25,8 @@ pub enum LayerType {
Automation,
/// Visual effects layer
Effect,
/// Group layer containing child layers (e.g. video + audio)
Group,
}
/// Common trait for all layer types
@ -694,6 +696,80 @@ impl VideoLayer {
}
}
/// Group layer containing child layers (e.g. video + audio).
/// Collapsible in the timeline; when collapsed shows a merged clip view.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct GroupLayer {
/// Base layer properties
pub layer: Layer,
/// Child layers in this group (typically one VideoLayer + one AudioLayer)
pub children: Vec<AnyLayer>,
/// Whether the group is expanded in the timeline
#[serde(default = "default_true")]
pub expanded: bool,
}
fn default_true() -> bool {
true
}
impl LayerTrait for GroupLayer {
fn id(&self) -> Uuid { self.layer.id }
fn name(&self) -> &str { &self.layer.name }
fn set_name(&mut self, name: String) { self.layer.name = name; }
fn has_custom_name(&self) -> bool { self.layer.has_custom_name }
fn set_has_custom_name(&mut self, custom: bool) { self.layer.has_custom_name = custom; }
fn visible(&self) -> bool { self.layer.visible }
fn set_visible(&mut self, visible: bool) { self.layer.visible = visible; }
fn opacity(&self) -> f64 { self.layer.opacity }
fn set_opacity(&mut self, opacity: f64) { self.layer.opacity = opacity; }
fn volume(&self) -> f64 { self.layer.volume }
fn set_volume(&mut self, volume: f64) { self.layer.volume = volume; }
fn muted(&self) -> bool { self.layer.muted }
fn set_muted(&mut self, muted: bool) { self.layer.muted = muted; }
fn soloed(&self) -> bool { self.layer.soloed }
fn set_soloed(&mut self, soloed: bool) { self.layer.soloed = soloed; }
fn locked(&self) -> bool { self.layer.locked }
fn set_locked(&mut self, locked: bool) { self.layer.locked = locked; }
}
impl GroupLayer {
/// Create a new group layer
pub fn new(name: impl Into<String>) -> Self {
Self {
layer: Layer::new(LayerType::Group, name),
children: Vec::new(),
expanded: true,
}
}
/// Add a child layer to this group
pub fn add_child(&mut self, layer: AnyLayer) {
self.children.push(layer);
}
/// Get clip instances from all child layers as (child_layer_id, &ClipInstance) pairs
pub fn all_child_clip_instances(&self) -> Vec<(Uuid, &ClipInstance)> {
let mut result = Vec::new();
for child in &self.children {
let child_id = child.id();
let instances: &[ClipInstance] = match child {
AnyLayer::Audio(l) => &l.clip_instances,
AnyLayer::Video(l) => &l.clip_instances,
AnyLayer::Vector(l) => &l.clip_instances,
AnyLayer::Effect(l) => &l.clip_instances,
AnyLayer::Group(_) => &[], // no nested groups
};
for ci in instances {
result.push((child_id, ci));
}
}
result
}
}
/// Unified layer enum for polymorphic handling
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum AnyLayer {
@ -701,6 +777,7 @@ pub enum AnyLayer {
Audio(AudioLayer),
Video(VideoLayer),
Effect(EffectLayer),
Group(GroupLayer),
}
impl LayerTrait for AnyLayer {
@ -710,6 +787,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Audio(l) => l.id(),
AnyLayer::Video(l) => l.id(),
AnyLayer::Effect(l) => l.id(),
AnyLayer::Group(l) => l.id(),
}
}
@ -719,6 +797,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Audio(l) => l.name(),
AnyLayer::Video(l) => l.name(),
AnyLayer::Effect(l) => l.name(),
AnyLayer::Group(l) => l.name(),
}
}
@ -728,6 +807,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Audio(l) => l.set_name(name),
AnyLayer::Video(l) => l.set_name(name),
AnyLayer::Effect(l) => l.set_name(name),
AnyLayer::Group(l) => l.set_name(name),
}
}
@ -737,6 +817,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Audio(l) => l.has_custom_name(),
AnyLayer::Video(l) => l.has_custom_name(),
AnyLayer::Effect(l) => l.has_custom_name(),
AnyLayer::Group(l) => l.has_custom_name(),
}
}
@ -746,6 +827,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Audio(l) => l.set_has_custom_name(custom),
AnyLayer::Video(l) => l.set_has_custom_name(custom),
AnyLayer::Effect(l) => l.set_has_custom_name(custom),
AnyLayer::Group(l) => l.set_has_custom_name(custom),
}
}
@ -755,6 +837,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Audio(l) => l.visible(),
AnyLayer::Video(l) => l.visible(),
AnyLayer::Effect(l) => l.visible(),
AnyLayer::Group(l) => l.visible(),
}
}
@ -764,6 +847,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Audio(l) => l.set_visible(visible),
AnyLayer::Video(l) => l.set_visible(visible),
AnyLayer::Effect(l) => l.set_visible(visible),
AnyLayer::Group(l) => l.set_visible(visible),
}
}
@ -773,6 +857,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Audio(l) => l.opacity(),
AnyLayer::Video(l) => l.opacity(),
AnyLayer::Effect(l) => l.opacity(),
AnyLayer::Group(l) => l.opacity(),
}
}
@ -782,6 +867,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Audio(l) => l.set_opacity(opacity),
AnyLayer::Video(l) => l.set_opacity(opacity),
AnyLayer::Effect(l) => l.set_opacity(opacity),
AnyLayer::Group(l) => l.set_opacity(opacity),
}
}
@ -791,6 +877,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Audio(l) => l.volume(),
AnyLayer::Video(l) => l.volume(),
AnyLayer::Effect(l) => l.volume(),
AnyLayer::Group(l) => l.volume(),
}
}
@ -800,6 +887,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Audio(l) => l.set_volume(volume),
AnyLayer::Video(l) => l.set_volume(volume),
AnyLayer::Effect(l) => l.set_volume(volume),
AnyLayer::Group(l) => l.set_volume(volume),
}
}
@ -809,6 +897,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Audio(l) => l.muted(),
AnyLayer::Video(l) => l.muted(),
AnyLayer::Effect(l) => l.muted(),
AnyLayer::Group(l) => l.muted(),
}
}
@ -818,6 +907,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Audio(l) => l.set_muted(muted),
AnyLayer::Video(l) => l.set_muted(muted),
AnyLayer::Effect(l) => l.set_muted(muted),
AnyLayer::Group(l) => l.set_muted(muted),
}
}
@ -827,6 +917,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Audio(l) => l.soloed(),
AnyLayer::Video(l) => l.soloed(),
AnyLayer::Effect(l) => l.soloed(),
AnyLayer::Group(l) => l.soloed(),
}
}
@ -836,6 +927,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Audio(l) => l.set_soloed(soloed),
AnyLayer::Video(l) => l.set_soloed(soloed),
AnyLayer::Effect(l) => l.set_soloed(soloed),
AnyLayer::Group(l) => l.set_soloed(soloed),
}
}
@ -845,6 +937,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Audio(l) => l.locked(),
AnyLayer::Video(l) => l.locked(),
AnyLayer::Effect(l) => l.locked(),
AnyLayer::Group(l) => l.locked(),
}
}
@ -854,6 +947,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Audio(l) => l.set_locked(locked),
AnyLayer::Video(l) => l.set_locked(locked),
AnyLayer::Effect(l) => l.set_locked(locked),
AnyLayer::Group(l) => l.set_locked(locked),
}
}
}
@ -866,6 +960,7 @@ impl AnyLayer {
AnyLayer::Audio(l) => &l.layer,
AnyLayer::Video(l) => &l.layer,
AnyLayer::Effect(l) => &l.layer,
AnyLayer::Group(l) => &l.layer,
}
}
@ -876,6 +971,7 @@ impl AnyLayer {
AnyLayer::Audio(l) => &mut l.layer,
AnyLayer::Video(l) => &mut l.layer,
AnyLayer::Effect(l) => &mut l.layer,
AnyLayer::Group(l) => &mut l.layer,
}
}

View File

@ -295,6 +295,17 @@ pub fn render_layer_isolated(
.collect();
return RenderedLayer::effect_layer(layer_id, opacity, active_effects);
}
AnyLayer::Group(group_layer) => {
// Render each child layer's content into the group's scene
for child in &group_layer.children {
render_layer(
document, time, child, &mut rendered.scene, base_transform,
1.0, // Full opacity - layer opacity handled in compositing
image_cache, video_manager, camera_frame,
);
}
rendered.has_content = !group_layer.children.is_empty();
}
}
rendered
@ -434,6 +445,12 @@ fn render_layer(
AnyLayer::Effect(_) => {
// Effect layers are processed during GPU compositing, not rendered to scene
}
AnyLayer::Group(group_layer) => {
// Render each child layer in the group
for child in &group_layer.children {
render_layer(document, time, child, scene, base_transform, parent_opacity, image_cache, video_manager, camera_frame);
}
}
}
}

View File

@ -1429,6 +1429,36 @@ impl EditorApp {
}
}
// Layers inside group layers (recursive)
fn collect_audio_from_groups(
layers: &[lightningbeam_core::layer::AnyLayer],
parent_group_id: Option<uuid::Uuid>,
existing: &std::collections::HashMap<uuid::Uuid, daw_backend::TrackId>,
out: &mut Vec<(uuid::Uuid, String, AudioLayerType, Option<uuid::Uuid>)>,
) {
for layer in layers {
if let AnyLayer::Group(group) = layer {
let gid = group.layer.id;
collect_audio_from_groups(&group.children, Some(gid), existing, out);
} else if let AnyLayer::Audio(audio_layer) = layer {
if parent_group_id.is_some() && !existing.contains_key(&audio_layer.layer.id) {
out.push((
audio_layer.layer.id,
audio_layer.layer.name.clone(),
audio_layer.audio_layer_type,
parent_group_id,
));
}
}
}
}
collect_audio_from_groups(
&self.action_executor.document().root.children,
None,
&self.layer_to_track_map,
&mut audio_layers_to_sync,
);
// Layers inside vector clips
for (&clip_id, clip) in &self.action_executor.document().vector_clips {
for layer in clip.layers.root_data() {
@ -1447,9 +1477,9 @@ impl EditorApp {
}
// Now create backend tracks for each
for (layer_id, layer_name, audio_type, parent_clip_id) in audio_layers_to_sync {
// If inside a clip, ensure a metatrack exists
let parent_track = parent_clip_id.and_then(|cid| self.ensure_metatrack_for_clip(cid));
for (layer_id, layer_name, audio_type, parent_id) in audio_layers_to_sync {
// If inside a clip or group, ensure a metatrack exists
let parent_track = parent_id.and_then(|pid| self.ensure_metatrack_for_parent(pid));
match audio_type {
AudioLayerType::Midi => {
@ -1491,6 +1521,53 @@ impl EditorApp {
}
}
/// Ensure a backend metatrack exists for a parent container (VectorClip or GroupLayer).
/// Checks if the ID belongs to a GroupLayer first, then falls back to VectorClip.
fn ensure_metatrack_for_parent(&mut self, parent_id: Uuid) -> Option<daw_backend::TrackId> {
// Return existing metatrack if already mapped
if let Some(&track_id) = self.clip_to_metatrack_map.get(&parent_id) {
return Some(track_id);
}
// Check if it's a GroupLayer
let is_group = self.action_executor.document().root.children.iter()
.any(|l| matches!(l, lightningbeam_core::layer::AnyLayer::Group(g) if g.layer.id == parent_id));
if is_group {
return self.ensure_metatrack_for_group(parent_id);
}
// Fall back to VectorClip
self.ensure_metatrack_for_clip(parent_id)
}
/// Ensure a backend metatrack (group track) exists for a GroupLayer.
fn ensure_metatrack_for_group(&mut self, group_layer_id: Uuid) -> Option<daw_backend::TrackId> {
if let Some(&track_id) = self.clip_to_metatrack_map.get(&group_layer_id) {
return Some(track_id);
}
let group_name = self.action_executor.document().root.children.iter()
.find(|l| l.id() == group_layer_id)
.map(|l| l.name().to_string())
.unwrap_or_else(|| "Group".to_string());
if let Some(ref controller_arc) = self.audio_controller {
let mut controller = controller_arc.lock().unwrap();
match controller.create_group_track_sync(format!("[{}]", group_name), None) {
Ok(track_id) => {
self.clip_to_metatrack_map.insert(group_layer_id, track_id);
println!("✅ Created metatrack for group '{}' (TrackId: {})", group_name, track_id);
return Some(track_id);
}
Err(e) => {
eprintln!("⚠️ Failed to create metatrack for group '{}': {}", group_name, e);
}
}
}
None
}
/// Ensure a backend metatrack (group track) exists for a movie clip.
/// Returns the metatrack's TrackId, creating one if needed.
fn ensure_metatrack_for_clip(&mut self, clip_id: Uuid) -> Option<daw_backend::TrackId> {
@ -1574,6 +1651,7 @@ impl EditorApp {
AnyLayer::Audio(al) => find_splittable_clips(&al.clip_instances, split_time, document),
AnyLayer::Video(vl) => find_splittable_clips(&vl.clip_instances, split_time, document),
AnyLayer::Effect(el) => find_splittable_clips(&el.clip_instances, split_time, document),
AnyLayer::Group(_) => vec![],
};
for instance_id in active_layer_clips {
@ -1591,6 +1669,7 @@ impl EditorApp {
AnyLayer::Audio(al) => find_splittable_clips(&al.clip_instances, split_time, document),
AnyLayer::Video(vl) => find_splittable_clips(&vl.clip_instances, split_time, document),
AnyLayer::Effect(el) => find_splittable_clips(&el.clip_instances, split_time, document),
AnyLayer::Group(_) => vec![],
};
if member_splittable.contains(member_instance_id) {
clips_to_split.push((*member_layer_id, *member_instance_id));
@ -1696,12 +1775,14 @@ impl EditorApp {
let layer_type = ClipboardLayerType::from_layer(layer);
let instances: Vec<_> = match layer {
let clip_slice: &[lightningbeam_core::clip::ClipInstance] = match layer {
AnyLayer::Vector(vl) => &vl.clip_instances,
AnyLayer::Audio(al) => &al.clip_instances,
AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances,
}
AnyLayer::Group(_) => &[],
};
let instances: Vec<_> = clip_slice
.iter()
.filter(|ci| self.selection.contains_clip_instance(&ci.id))
.cloned()
@ -1992,11 +2073,12 @@ impl EditorApp {
Some(l) => l,
None => return,
};
let instances = match layer {
let instances: &[lightningbeam_core::clip::ClipInstance] = match layer {
AnyLayer::Vector(vl) => &vl.clip_instances,
AnyLayer::Audio(al) => &al.clip_instances,
AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => &[],
};
instances.iter()
.filter(|ci| selection.contains_clip_instance(&ci.id))
@ -3745,57 +3827,91 @@ impl EditorApp {
// Find the video clip instance in the document
let document = self.action_executor.document();
let mut video_instance_info: Option<(uuid::Uuid, uuid::Uuid, f64)> = None; // (layer_id, instance_id, timeline_start)
let mut video_instance_info: Option<(uuid::Uuid, f64, bool)> = None; // (layer_id, timeline_start, already_in_group)
// Search all layers (root + inside movie clips) for a video clip instance with matching clip_id
let all_layers = document.all_layers();
for layer in &all_layers {
if let AnyLayer::Video(video_layer) = layer {
// Search root layers for a video clip instance with matching clip_id
for layer in &document.root.children {
match layer {
AnyLayer::Video(video_layer) => {
for instance in &video_layer.clip_instances {
if instance.clip_id == video_clip_id {
video_instance_info = Some((
video_layer.layer.id,
instance.id,
instance.timeline_start,
));
video_instance_info = Some((video_layer.layer.id, instance.timeline_start, false));
break;
}
}
}
AnyLayer::Group(group) => {
for child in &group.children {
if let AnyLayer::Video(video_layer) = child {
for instance in &video_layer.clip_instances {
if instance.clip_id == video_clip_id {
video_instance_info = Some((video_layer.layer.id, instance.timeline_start, true));
break;
}
}
}
}
}
_ => {}
}
if video_instance_info.is_some() {
break;
}
}
// If we found a video instance, auto-place the audio
if let Some((video_layer_id, video_instance_id, timeline_start)) = video_instance_info {
// Find or create sampled audio track
let audio_layer_id = {
let doc = self.action_executor.document();
panes::find_sampled_audio_track(doc)
}.unwrap_or_else(|| {
// Create new sampled audio layer
let audio_layer = AudioLayer::new_sampled("Audio Track");
self.action_executor.document_mut().root.add_child(
AnyLayer::Audio(audio_layer)
)
});
// If we found a video instance, wrap it in a GroupLayer with a new AudioLayer
if let Some((video_layer_id, timeline_start, already_in_group)) = video_instance_info {
if already_in_group {
// Video is already in a group (shouldn't happen normally, but handle it)
println!(" Video already in a group layer, skipping auto-group");
return;
}
// Sync newly created audio layer with backend BEFORE adding clip instances
// Get video name for the group
let video_name = self.action_executor.document().video_clips
.get(&video_clip_id)
.map(|c| c.name.clone())
.unwrap_or_else(|| "Video".to_string());
// Remove the VideoLayer from root
let video_layer_opt = {
let doc = self.action_executor.document_mut();
let idx = doc.root.children.iter().position(|l| l.id() == video_layer_id);
idx.map(|i| doc.root.children.remove(i))
};
let Some(video_layer) = video_layer_opt else {
eprintln!("❌ Could not find video layer {} in root to move into group", video_layer_id);
return;
};
// Create AudioLayer for the extracted audio
let audio_layer = AudioLayer::new_sampled("Audio");
let audio_layer_id = audio_layer.layer.id;
// Build GroupLayer containing both
let mut group = GroupLayer::new(video_name);
group.expanded = false; // start collapsed
group.add_child(video_layer);
group.add_child(AnyLayer::Audio(audio_layer));
let group_id = group.layer.id;
// Add GroupLayer to root
self.action_executor.document_mut().root.add_child(AnyLayer::Group(group));
// Sync backend (creates metatrack for group + audio track as child)
self.sync_audio_layers_to_backend();
// Create audio clip instance at same timeline position as video
let audio_instance = ClipInstance::new(audio_clip_id)
.with_timeline_start(timeline_start);
let audio_instance_id = audio_instance.id;
// Execute audio action with backend sync
// Execute audio clip placement with backend sync
let audio_action = lightningbeam_core::actions::AddClipInstanceAction::new(
audio_layer_id,
audio_instance,
);
// Execute with backend synchronization
if let Some(ref controller_arc) = self.audio_controller {
let mut controller = controller_arc.lock().unwrap();
let mut backend_context = lightningbeam_core::action::BackendContext {
@ -3806,19 +3922,13 @@ impl EditorApp {
};
if let Err(e) = self.action_executor.execute_with_backend(Box::new(audio_action), &mut backend_context) {
eprintln!("❌ Failed to execute extracted audio AddClipInstanceAction with backend: {}", e);
eprintln!("❌ Failed to place extracted audio clip: {}", e);
}
} else {
let _ = self.action_executor.execute(Box::new(audio_action));
}
// Create instance group linking video and audio
let mut group = lightningbeam_core::instance_group::InstanceGroup::new();
group.add_member(video_layer_id, video_instance_id);
group.add_member(audio_layer_id, audio_instance_id);
self.action_executor.document_mut().add_instance_group(group);
println!(" 🔗 Auto-placed audio and linked to video instance");
println!(" 🔗 Created group layer '{}' with video + audio", group_id);
}
}

View File

@ -1262,6 +1262,9 @@ impl AssetLibraryPane {
}
}
}
lightningbeam_core::layer::AnyLayer::Group(_) => {
// Group layers don't have their own clip instances
}
}
}
false

View File

@ -8,7 +8,7 @@
use eframe::egui;
use lightningbeam_core::clip::ClipInstance;
use lightningbeam_core::layer::{AnyLayer, AudioLayerType, LayerTrait};
use lightningbeam_core::layer::{AnyLayer, AudioLayerType, GroupLayer, LayerTrait};
use super::{DragClipType, NodePath, PaneRenderer, SharedPaneState};
const RULER_HEIGHT: f32 = 30.0;
@ -117,6 +117,7 @@ fn effective_clip_duration(
AnyLayer::Audio(_) => document.get_audio_clip(&clip_instance.clip_id).map(|c| c.duration),
AnyLayer::Video(_) => document.get_video_clip(&clip_instance.clip_id).map(|c| c.duration),
AnyLayer::Effect(_) => Some(lightningbeam_core::effect::EFFECT_DURATION),
AnyLayer::Group(_) => None,
}
}
@ -198,6 +199,123 @@ fn can_drop_on_layer(layer: &AnyLayer, clip_type: DragClipType) -> bool {
}
}
/// Represents a single row in the timeline's virtual layer list.
/// Expanded groups show their children directly (no separate header row).
/// Collapsed groups show as a single row with merged clips.
#[derive(Clone, Copy)]
#[allow(dead_code)]
enum TimelineRow<'a> {
/// A normal standalone layer (not in any group)
Normal(&'a AnyLayer),
/// A collapsed group -- single row with expand triangle and merged clips
CollapsedGroup { group: &'a GroupLayer, depth: u32 },
/// A child layer inside an expanded group
GroupChild {
child: &'a AnyLayer,
group: &'a GroupLayer, // the immediate parent group (for collapse action)
depth: u32, // nesting depth (1 = direct child of root group)
show_collapse: bool, // true for first visible child -- shows collapse triangle
},
}
impl<'a> TimelineRow<'a> {
fn layer_id(&self) -> uuid::Uuid {
match self {
TimelineRow::Normal(l) => l.id(),
TimelineRow::CollapsedGroup { group, .. } => group.layer.id,
TimelineRow::GroupChild { child, .. } => child.id(),
}
}
fn as_any_layer(&self) -> Option<&'a AnyLayer> {
match self {
TimelineRow::Normal(l) => Some(l),
TimelineRow::CollapsedGroup { .. } => None,
TimelineRow::GroupChild { child, .. } => Some(child),
}
}
}
/// Build a flattened list of timeline rows from the reversed context_layers.
/// Expanded groups emit their children directly (no header row).
/// Collapsed groups emit a single CollapsedGroup row.
fn build_timeline_rows<'a>(context_layers: &[&'a AnyLayer]) -> Vec<TimelineRow<'a>> {
let mut rows = Vec::new();
for &layer in context_layers.iter().rev() {
flatten_layer(layer, 0, None, &mut rows);
}
rows
}
fn flatten_layer<'a>(
layer: &'a AnyLayer,
depth: u32,
parent_group: Option<&'a GroupLayer>,
rows: &mut Vec<TimelineRow<'a>>,
) {
match layer {
AnyLayer::Group(g) if !g.expanded => {
rows.push(TimelineRow::CollapsedGroup { group: g, depth });
}
AnyLayer::Group(g) => {
// Expanded group: no header row, emit children directly.
// The first emitted row gets the collapse triangle for this group.
let mut first_emitted = true;
for child in &g.children {
let before_len = rows.len();
flatten_layer(child, depth + 1, Some(g), rows);
// Mark the first emitted GroupChild row with the collapse triangle
if first_emitted && rows.len() > before_len {
if let Some(TimelineRow::GroupChild { show_collapse, group, .. }) = rows.get_mut(before_len) {
*show_collapse = true;
*group = g; // point to THIS group for the collapse action
}
first_emitted = false;
}
}
}
_ => {
if depth > 0 {
if let Some(group) = parent_group {
rows.push(TimelineRow::GroupChild {
child: layer,
group,
depth,
show_collapse: false,
});
}
} else {
rows.push(TimelineRow::Normal(layer));
}
}
}
}
/// Collect all (layer_ref, clip_instances) tuples from context_layers,
/// recursively descending into group children.
/// Returns (&AnyLayer, &[ClipInstance]) so callers have access to both layer info and clips.
fn all_layer_clip_instances<'a>(context_layers: &[&'a AnyLayer]) -> Vec<(&'a AnyLayer, &'a [ClipInstance])> {
let mut result = Vec::new();
for &layer in context_layers {
collect_clip_instances(layer, &mut result);
}
result
}
fn collect_clip_instances<'a>(layer: &'a AnyLayer, result: &mut Vec<(&'a AnyLayer, &'a [ClipInstance])>) {
match layer {
AnyLayer::Vector(l) => result.push((layer, &l.clip_instances)),
AnyLayer::Audio(l) => result.push((layer, &l.clip_instances)),
AnyLayer::Video(l) => result.push((layer, &l.clip_instances)),
AnyLayer::Effect(l) => result.push((layer, &l.clip_instances)),
AnyLayer::Group(g) => {
for child in &g.children {
collect_clip_instances(child, result);
}
}
}
}
/// Find an existing sampled audio track in the document where a clip can be placed without overlap
/// Returns the layer ID if found, None otherwise
fn find_sampled_audio_track_for_clip(
@ -472,7 +590,8 @@ impl TimelinePane {
editing_clip_id: Option<&uuid::Uuid>,
) -> Option<(ClipDragType, uuid::Uuid)> {
let context_layers = document.context_layers(editing_clip_id);
let layer_count = context_layers.len();
let rows = build_timeline_rows(&context_layers);
let layer_count = rows.len();
// Check if pointer is in valid area
if pointer_pos.y < header_rect.min.y {
@ -489,15 +608,21 @@ impl TimelinePane {
return None;
}
let rev_layers: Vec<&lightningbeam_core::layer::AnyLayer> = context_layers.iter().rev().copied().collect();
let layer = rev_layers.get(hovered_layer_index)?;
let row = &rows[hovered_layer_index];
// Collapsed groups have no directly clickable clips
let layer: &AnyLayer = match row {
TimelineRow::Normal(l) => l,
TimelineRow::GroupChild { child, .. } => child,
TimelineRow::CollapsedGroup { .. } => return None,
};
let _layer_data = layer.layer();
let clip_instances = match layer {
let clip_instances: &[ClipInstance] = match layer {
lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Audio(al) => &al.clip_instances,
lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Effect(el) => &el.clip_instances,
lightningbeam_core::layer::AnyLayer::Group(_) => &[],
};
// Check each clip instance
@ -555,6 +680,76 @@ impl TimelinePane {
None
}
/// Detect if the pointer is over a merged span in a collapsed group row.
/// Returns all child clip instance IDs that contribute to the hit span.
fn detect_collapsed_group_at_pointer(
&self,
pointer_pos: egui::Pos2,
document: &lightningbeam_core::document::Document,
content_rect: egui::Rect,
header_rect: egui::Rect,
editing_clip_id: Option<&uuid::Uuid>,
) -> Option<Vec<uuid::Uuid>> {
let context_layers = document.context_layers(editing_clip_id);
let rows = build_timeline_rows(&context_layers);
if pointer_pos.y < header_rect.min.y || pointer_pos.x < content_rect.min.x {
return None;
}
let relative_y = pointer_pos.y - header_rect.min.y + self.viewport_scroll_y;
let hovered_index = (relative_y / LAYER_HEIGHT) as usize;
if hovered_index >= rows.len() {
return None;
}
let TimelineRow::CollapsedGroup { group, .. } = &rows[hovered_index] else {
return None;
};
// Compute merged spans with the child clip IDs that contribute to each
let child_clips = group.all_child_clip_instances();
let mut spans: Vec<(f64, f64, Vec<uuid::Uuid>)> = Vec::new(); // (start, end, clip_ids)
for (_child_layer_id, ci) in &child_clips {
let clip_dur = document.get_clip_duration(&ci.clip_id).unwrap_or_else(|| {
ci.trim_end.unwrap_or(1.0) - ci.trim_start
});
let start = ci.effective_start();
let end = start + ci.total_duration(clip_dur);
spans.push((start, end, vec![ci.id]));
}
spans.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap_or(std::cmp::Ordering::Equal));
// Merge overlapping spans
let mut merged: Vec<(f64, f64, Vec<uuid::Uuid>)> = Vec::new();
for (s, e, ids) in spans {
if let Some(last) = merged.last_mut() {
if s <= last.1 {
last.1 = last.1.max(e);
last.2.extend(ids);
} else {
merged.push((s, e, ids));
}
} else {
merged.push((s, e, ids));
}
}
// Check which merged span the pointer is over
let mouse_x = pointer_pos.x - content_rect.min.x;
for (s, e, ids) in merged {
let sx = self.time_to_x(s);
let ex = self.time_to_x(e).max(sx + MIN_CLIP_WIDTH_PX);
if mouse_x >= sx && mouse_x <= ex {
return Some(ids);
}
}
None
}
/// Zoom in by a fixed increment
pub fn zoom_in(&mut self, center_x: f32) {
self.apply_zoom_at_point(0.2, center_x);
@ -902,9 +1097,11 @@ impl TimelinePane {
let text_color = text_style.text_color.unwrap_or(egui::Color32::from_gray(200));
let secondary_text_color = egui::Color32::from_gray(150);
// Draw layer headers from document (reversed so newest layers appear on top)
for (i, layer) in context_layers.iter().rev().enumerate() {
let layer = *layer;
// Build virtual row list (accounts for group expansion)
let rows = build_timeline_rows(context_layers);
// Draw layer headers from virtual row list
for (i, row) in rows.iter().enumerate() {
let y = rect.min.y + i as f32 * LAYER_HEIGHT - self.viewport_scroll_y;
// Skip if layer is outside visible area
@ -912,13 +1109,55 @@ impl TimelinePane {
continue;
}
// Indent for group children and collapsed groups based on depth
let indent = match row {
TimelineRow::GroupChild { depth, .. } => *depth as f32 * 16.0,
TimelineRow::CollapsedGroup { depth, .. } => *depth as f32 * 16.0,
_ => 0.0,
};
let header_rect = egui::Rect::from_min_size(
egui::pos2(rect.min.x, y),
egui::vec2(LAYER_HEADER_WIDTH, LAYER_HEIGHT),
);
// Determine the AnyLayer or GroupLayer for this row
let (layer_id, layer_name, layer_type, type_color) = match row {
TimelineRow::Normal(layer) => {
let data = layer.layer();
let (lt, tc) = match layer {
AnyLayer::Vector(_) => ("Vector", egui::Color32::from_rgb(255, 180, 100)),
AnyLayer::Audio(al) => match al.audio_layer_type {
AudioLayerType::Midi => ("MIDI", egui::Color32::from_rgb(100, 255, 150)),
AudioLayerType::Sampled => ("Audio", egui::Color32::from_rgb(100, 180, 255)),
},
AnyLayer::Video(_) => ("Video", egui::Color32::from_rgb(180, 100, 255)),
AnyLayer::Effect(_) => ("Effect", egui::Color32::from_rgb(255, 100, 180)),
AnyLayer::Group(_) => ("Group", egui::Color32::from_rgb(0, 180, 180)),
};
(layer.id(), data.name.clone(), lt, tc)
}
TimelineRow::CollapsedGroup { group, .. } => {
(group.layer.id, group.layer.name.clone(), "Group", egui::Color32::from_rgb(0, 180, 180))
}
TimelineRow::GroupChild { child, .. } => {
let data = child.layer();
let (lt, tc) = match child {
AnyLayer::Vector(_) => ("Vector", egui::Color32::from_rgb(255, 180, 100)),
AnyLayer::Audio(al) => match al.audio_layer_type {
AudioLayerType::Midi => ("MIDI", egui::Color32::from_rgb(100, 255, 150)),
AudioLayerType::Sampled => ("Audio", egui::Color32::from_rgb(100, 180, 255)),
},
AnyLayer::Video(_) => ("Video", egui::Color32::from_rgb(180, 100, 255)),
AnyLayer::Effect(_) => ("Effect", egui::Color32::from_rgb(255, 100, 180)),
AnyLayer::Group(_) => ("Group", egui::Color32::from_rgb(0, 180, 180)),
};
(child.id(), data.name.clone(), lt, tc)
}
};
// Active vs inactive background colors
let is_active = active_layer_id.map_or(false, |id| id == layer.id());
let is_active = active_layer_id.map_or(false, |id| id == layer_id);
let bg_color = if is_active {
active_color
} else {
@ -927,39 +1166,106 @@ impl TimelinePane {
ui.painter().rect_filled(header_rect, 0.0, bg_color);
// Get layer info
let layer_data = layer.layer();
let layer_name = &layer_data.name;
let (layer_type, type_color) = match layer {
lightningbeam_core::layer::AnyLayer::Vector(_) => ("Vector", egui::Color32::from_rgb(255, 180, 100)), // Orange
lightningbeam_core::layer::AnyLayer::Audio(audio_layer) => {
match audio_layer.audio_layer_type {
lightningbeam_core::layer::AudioLayerType::Midi => ("MIDI", egui::Color32::from_rgb(100, 255, 150)), // Green
lightningbeam_core::layer::AudioLayerType::Sampled => ("Audio", egui::Color32::from_rgb(100, 180, 255)), // Blue
}
}
lightningbeam_core::layer::AnyLayer::Video(_) => ("Video", egui::Color32::from_rgb(180, 100, 255)), // Purple
lightningbeam_core::layer::AnyLayer::Effect(_) => ("Effect", egui::Color32::from_rgb(255, 100, 180)), // Pink
};
// Color indicator bar on the left edge
let indicator_rect = egui::Rect::from_min_size(
// Gutter area (left of indicator) — solid group color, with collapse chevron
if indent > 0.0 {
let gutter_rect = egui::Rect::from_min_size(
header_rect.min,
egui::vec2(indent, LAYER_HEIGHT),
);
// Solid dark group color for the gutter strip
let group_color = match row {
TimelineRow::GroupChild { .. } | TimelineRow::CollapsedGroup { .. } => {
// Solid dark teal for the group gutter
egui::Color32::from_rgb(0, 50, 50)
}
_ => header_bg,
};
ui.painter().rect_filled(gutter_rect, 0.0, group_color);
// Thin colored accent line at right edge of gutter (group color)
let accent_rect = egui::Rect::from_min_size(
egui::pos2(header_rect.min.x + indent - 2.0, y),
egui::vec2(2.0, LAYER_HEIGHT),
);
ui.painter().rect_filled(accent_rect, 0.0, egui::Color32::from_rgb(0, 180, 180));
// Draw collapse triangle on first child row (painted, not text)
if let TimelineRow::GroupChild { show_collapse: true, .. } = row {
let cx = header_rect.min.x + indent * 0.5;
let cy = y + LAYER_HEIGHT * 0.5;
let s = 4.0; // half-size of triangle
// Down-pointing triangle (▼) for collapse
let tri = vec![
egui::pos2(cx - s, cy - s * 0.6),
egui::pos2(cx + s, cy - s * 0.6),
egui::pos2(cx, cy + s * 0.6),
];
ui.painter().add(egui::Shape::convex_polygon(tri, egui::Color32::from_gray(180), egui::Stroke::NONE));
}
// Make the ENTIRE gutter clickable for collapse on any GroupChild row
if let TimelineRow::GroupChild { group, .. } = row {
let gutter_response = ui.scope_builder(egui::UiBuilder::new().max_rect(gutter_rect), |ui| {
ui.allocate_rect(gutter_rect, egui::Sense::click())
}).inner;
if gutter_response.clicked() {
self.layer_control_clicked = true;
pending_actions.push(Box::new(
lightningbeam_core::actions::ToggleGroupExpansionAction::new(group.layer.id, false),
));
}
}
}
// Color indicator bar on the left edge (after gutter)
let indicator_rect = egui::Rect::from_min_size(
header_rect.min + egui::vec2(indent, 0.0),
egui::vec2(4.0, LAYER_HEIGHT),
);
ui.painter().rect_filled(indicator_rect, 0.0, type_color);
// Expand triangle in the header for collapsed groups
let mut name_x_offset = 10.0 + indent;
if let TimelineRow::CollapsedGroup { group, .. } = row {
// Right-pointing triangle (▶) for expand, painted manually
let cx = header_rect.min.x + indent + 14.0;
let cy = y + 17.0;
let s = 4.0;
let tri = vec![
egui::pos2(cx - s * 0.6, cy - s),
egui::pos2(cx - s * 0.6, cy + s),
egui::pos2(cx + s * 0.6, cy),
];
ui.painter().add(egui::Shape::convex_polygon(tri, egui::Color32::from_gray(180), egui::Stroke::NONE));
// Clickable area for expand
let chevron_rect = egui::Rect::from_min_size(
egui::pos2(header_rect.min.x + indent + 4.0, y + 4.0),
egui::vec2(20.0, 24.0),
);
let chevron_response = ui.scope_builder(egui::UiBuilder::new().max_rect(chevron_rect), |ui| {
ui.allocate_rect(chevron_rect, egui::Sense::click())
}).inner;
if chevron_response.clicked() {
self.layer_control_clicked = true;
pending_actions.push(Box::new(
lightningbeam_core::actions::ToggleGroupExpansionAction::new(group.layer.id, true),
));
}
name_x_offset = 10.0 + indent + 18.0;
}
// Layer name
ui.painter().text(
header_rect.min + egui::vec2(10.0, 10.0),
header_rect.min + egui::vec2(name_x_offset, 10.0),
egui::Align2::LEFT_TOP,
layer_name,
&layer_name,
egui::FontId::proportional(14.0),
text_color,
);
// Layer type (smaller text below name with colored background)
let type_text_pos = header_rect.min + egui::vec2(10.0, 28.0);
let type_text_pos = header_rect.min + egui::vec2(name_x_offset, 28.0);
let type_text_galley = ui.painter().layout_no_wrap(
layer_type.to_string(),
egui::FontId::proportional(11.0),
@ -985,6 +1291,18 @@ impl TimelinePane {
secondary_text_color,
);
// Get the AnyLayer reference for controls
let any_layer_for_controls: Option<&AnyLayer> = match row {
TimelineRow::Normal(l) => Some(l),
TimelineRow::CollapsedGroup { group, .. } => {
// We need an AnyLayer ref - find it from context_layers
context_layers.iter().rev().copied().find(|l| l.id() == group.layer.id)
}
TimelineRow::GroupChild { child, .. } => Some(child),
};
let Some(layer_for_controls) = any_layer_for_controls else { continue; };
// Layer controls (mute, solo, lock, volume)
let controls_top = header_rect.min.y + 4.0;
let controls_right = header_rect.max.x - 8.0;
@ -1013,15 +1331,14 @@ impl TimelinePane {
);
// Get layer ID and current property values from the layer we already have
let layer_id = layer.id();
let current_volume = layer.volume();
let is_muted = layer.muted();
let is_soloed = layer.soloed();
let is_locked = layer.locked();
let current_volume = layer_for_controls.volume();
let is_muted = layer_for_controls.muted();
let is_soloed = layer_for_controls.soloed();
let is_locked = layer_for_controls.locked();
// Mute button — or camera toggle for video layers
let is_video_layer = matches!(layer, lightningbeam_core::layer::AnyLayer::Video(_));
let camera_enabled = if let lightningbeam_core::layer::AnyLayer::Video(v) = layer {
let is_video_layer = matches!(layer_for_controls, lightningbeam_core::layer::AnyLayer::Video(_));
let camera_enabled = if let lightningbeam_core::layer::AnyLayer::Video(v) = layer_for_controls {
v.camera_enabled
} else {
false
@ -1213,9 +1530,11 @@ impl TimelinePane {
}
}
// Draw layer rows from document (reversed so newest layers appear on top)
for (i, layer) in context_layers.iter().rev().enumerate() {
let layer = *layer;
// Build virtual row list (accounts for group expansion)
let rows = build_timeline_rows(context_layers);
// Draw layer rows from virtual row list
for (i, row) in rows.iter().enumerate() {
let y = rect.min.y + i as f32 * LAYER_HEIGHT - self.viewport_scroll_y;
// Skip if layer is outside visible area
@ -1228,8 +1547,10 @@ impl TimelinePane {
egui::vec2(rect.width(), LAYER_HEIGHT),
);
let row_layer_id = row.layer_id();
// Active vs inactive background colors
let is_active = active_layer_id.map_or(false, |id| id == layer.id());
let is_active = active_layer_id.map_or(false, |id| id == row_layer_id);
let bg_color = if is_active {
active_color
} else {
@ -1277,12 +1598,98 @@ impl TimelinePane {
}
}
// For collapsed groups, render merged clip spans and skip normal clip rendering
if let TimelineRow::CollapsedGroup { group: g, .. } = row {
// Collect all child clip time ranges (with drag preview offset)
let child_clips = g.all_child_clip_instances();
let is_move_drag = self.clip_drag_state == Some(ClipDragType::Move);
let mut ranges: Vec<(f64, f64)> = Vec::new();
for (_child_layer_id, ci) in &child_clips {
let clip_dur = document.get_clip_duration(&ci.clip_id).unwrap_or_else(|| {
ci.trim_end.unwrap_or(1.0) - ci.trim_start
});
let mut start = ci.effective_start();
let dur = ci.total_duration(clip_dur);
// Apply drag offset for selected clips during move
if is_move_drag && selection.contains_clip_instance(&ci.id) {
start = (start + self.drag_offset).max(0.0);
}
ranges.push((start, start + dur));
}
// Sort and merge overlapping ranges
ranges.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap_or(std::cmp::Ordering::Equal));
let mut merged: Vec<(f64, f64)> = Vec::new();
for (s, e) in ranges {
if let Some(last) = merged.last_mut() {
if s <= last.1 {
last.1 = last.1.max(e);
} else {
merged.push((s, e));
}
} else {
merged.push((s, e));
}
}
// Check if any child clips are selected (for highlight)
let any_selected = child_clips.iter().any(|(_, ci)| selection.contains_clip_instance(&ci.id));
// Draw each merged span as a teal bar (brighter when selected)
let teal = if any_selected {
egui::Color32::from_rgb(30, 190, 190)
} else {
egui::Color32::from_rgb(0, 150, 150)
};
let bright_teal = if any_selected {
egui::Color32::from_rgb(150, 255, 255)
} else {
egui::Color32::from_rgb(100, 220, 220)
};
for (s, e) in &merged {
let sx = self.time_to_x(*s);
let ex = self.time_to_x(*e).max(sx + MIN_CLIP_WIDTH_PX);
if ex >= 0.0 && sx <= rect.width() {
let vsx = sx.max(0.0);
let vex = ex.min(rect.width());
let span_rect = egui::Rect::from_min_max(
egui::pos2(rect.min.x + vsx, y + 10.0),
egui::pos2(rect.min.x + vex, y + LAYER_HEIGHT - 10.0),
);
painter.rect_filled(span_rect, 3.0, teal);
painter.rect_stroke(
span_rect,
3.0,
egui::Stroke::new(1.0, bright_teal),
egui::StrokeKind::Middle,
);
}
}
// Separator line at bottom
painter.line_segment(
[
egui::pos2(layer_rect.min.x, layer_rect.max.y),
egui::pos2(layer_rect.max.x, layer_rect.max.y),
],
egui::Stroke::new(1.0, egui::Color32::from_gray(20)),
);
continue; // Skip normal clip rendering for collapsed groups
}
// Get the AnyLayer for normal rendering (Normal or GroupChild rows)
let layer: &AnyLayer = match row {
TimelineRow::Normal(l) => l,
TimelineRow::GroupChild { child, .. } => child,
TimelineRow::CollapsedGroup { .. } => unreachable!(), // handled above
};
// Draw clip instances for this layer
let clip_instances = match layer {
let clip_instances: &[ClipInstance] = match layer {
lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Audio(al) => &al.clip_instances,
lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Effect(el) => &el.clip_instances,
lightningbeam_core::layer::AnyLayer::Group(_) => &[],
};
// For moves, precompute the clamped offset so all selected clips move uniformly
@ -1585,6 +1992,10 @@ impl TimelinePane {
egui::Color32::from_rgb(220, 80, 160), // Pink
egui::Color32::from_rgb(255, 120, 200), // Bright pink
),
lightningbeam_core::layer::AnyLayer::Group(_) => (
egui::Color32::from_rgb(0, 150, 150), // Teal
egui::Color32::from_rgb(100, 220, 220), // Bright teal
),
};
let (row, total_rows) = clip_stacking[clip_instance_index];
@ -2046,18 +2457,41 @@ impl TimelinePane {
if pos.y >= header_rect.min.y && pos.x >= content_rect.min.x {
let relative_y = pos.y - header_rect.min.y + self.viewport_scroll_y;
let clicked_layer_index = (relative_y / LAYER_HEIGHT) as usize;
// Get the layer at this index (accounting for reversed display order)
if clicked_layer_index < layer_count {
let layers: Vec<_> = context_layers.iter().rev().copied().collect();
if let Some(layer) = layers.get(clicked_layer_index) {
// Get the layer at this index (using virtual rows for group support)
let click_rows = build_timeline_rows(context_layers);
if clicked_layer_index < click_rows.len() {
let click_row = &click_rows[clicked_layer_index];
// Check collapsed groups first (merged spans)
if matches!(click_row, TimelineRow::CollapsedGroup { .. }) {
if let Some(child_ids) = self.detect_collapsed_group_at_pointer(
pos, document, content_rect, header_rect, editing_clip_id,
) {
if !child_ids.is_empty() {
if shift_held {
for id in &child_ids {
selection.add_clip_instance(*id);
}
} else {
selection.clear_clip_instances();
for id in &child_ids {
selection.add_clip_instance(*id);
}
}
*active_layer_id = Some(click_row.layer_id());
clicked_clip_instance = true;
}
}
} else if let Some(layer) = click_row.as_any_layer() {
// Normal or GroupChild rows: check individual clips
let _layer_data = layer.layer();
// Get clip instances for this layer
let clip_instances = match layer {
let clip_instances: &[ClipInstance] = match layer {
lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Audio(al) => &al.clip_instances,
lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Effect(el) => &el.clip_instances,
lightningbeam_core::layer::AnyLayer::Group(_) => &[],
};
// Check if click is within any clip instance
@ -2114,12 +2548,10 @@ impl TimelinePane {
let relative_y = pos.y - header_rect.min.y + self.viewport_scroll_y;
let clicked_layer_index = (relative_y / LAYER_HEIGHT) as usize;
// Get the layer at this index (accounting for reversed display order)
if clicked_layer_index < layer_count {
let layers: Vec<_> = context_layers.iter().rev().copied().collect();
if let Some(layer) = layers.get(clicked_layer_index) {
*active_layer_id = Some(layer.id());
}
// Get the layer at this index (using virtual rows for group support)
let header_rows = build_timeline_rows(context_layers);
if clicked_layer_index < header_rows.len() {
*active_layer_id = Some(header_rows[clicked_layer_index].layer_id());
}
}
}
@ -2155,6 +2587,24 @@ impl TimelinePane {
// Start dragging with the detected drag type
self.clip_drag_state = Some(drag_type);
self.drag_offset = 0.0;
} else if let Some(child_ids) = self.detect_collapsed_group_at_pointer(
mousedown_pos,
document,
content_rect,
header_rect,
editing_clip_id,
) {
// Collapsed group merged span — select all child clips and start Move drag
if !child_ids.is_empty() {
if !shift_held {
selection.clear_clip_instances();
}
for id in &child_ids {
selection.add_clip_instance(*id);
}
self.clip_drag_state = Some(ClipDragType::Move);
self.drag_offset = 0.0;
}
}
}
}
@ -2174,18 +2624,9 @@ impl TimelinePane {
let mut layer_moves: HashMap<uuid::Uuid, Vec<(uuid::Uuid, f64, f64)>> =
HashMap::new();
// Iterate through all layers to find selected clip instances
for &layer in context_layers {
// Iterate through all layers (including group children) to find selected clip instances
for (layer, clip_instances) in all_layer_clip_instances(context_layers) {
let layer_id = layer.id();
// Get clip instances for this layer
let clip_instances = match layer {
lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Audio(al) => &al.clip_instances,
lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Effect(el) => &el.clip_instances,
};
// Find selected clip instances in this layer
for clip_instance in clip_instances {
if selection.contains_clip_instance(&clip_instance.id) {
@ -2225,25 +2666,9 @@ impl TimelinePane {
)>,
> = HashMap::new();
// Iterate through all layers to find selected clip instances
for &layer in context_layers {
// Iterate through all layers (including group children) to find selected clip instances
for (layer, clip_instances) in all_layer_clip_instances(context_layers) {
let layer_id = layer.id();
let _layer_data = layer.layer();
let clip_instances = match layer {
lightningbeam_core::layer::AnyLayer::Vector(vl) => {
&vl.clip_instances
}
lightningbeam_core::layer::AnyLayer::Audio(al) => {
&al.clip_instances
}
lightningbeam_core::layer::AnyLayer::Video(vl) => {
&vl.clip_instances
}
lightningbeam_core::layer::AnyLayer::Effect(el) => {
&el.clip_instances
}
};
// Find selected clip instances in this layer
for clip_instance in clip_instances {
@ -2367,14 +2792,8 @@ impl TimelinePane {
ClipDragType::LoopExtendRight => {
let mut layer_loops: HashMap<uuid::Uuid, Vec<lightningbeam_core::actions::loop_clip_instances::LoopEntry>> = HashMap::new();
for &layer in context_layers {
for (layer, clip_instances) in all_layer_clip_instances(context_layers) {
let layer_id = layer.id();
let clip_instances = match layer {
lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Audio(al) => &al.clip_instances,
lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Effect(el) => &el.clip_instances,
};
for clip_instance in clip_instances {
if selection.contains_clip_instance(&clip_instance.id) {
@ -2439,14 +2858,8 @@ impl TimelinePane {
// Extend loop_before (pre-loop region)
let mut layer_loops: HashMap<uuid::Uuid, Vec<lightningbeam_core::actions::loop_clip_instances::LoopEntry>> = HashMap::new();
for &layer in context_layers {
for (layer, clip_instances) in all_layer_clip_instances(context_layers) {
let layer_id = layer.id();
let clip_instances = match layer {
lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Audio(al) => &al.clip_instances,
lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Effect(el) => &el.clip_instances,
};
for clip_instance in clip_instances {
if selection.contains_clip_instance(&clip_instance.id) {
@ -2529,11 +2942,10 @@ impl TimelinePane {
let relative_y = pos.y - header_rect.min.y + self.viewport_scroll_y;
let clicked_layer_index = (relative_y / LAYER_HEIGHT) as usize;
// Get the layer at this index (accounting for reversed display order)
if clicked_layer_index < layer_count {
let layers: Vec<_> = context_layers.iter().rev().copied().collect();
if let Some(layer) = layers.get(clicked_layer_index) {
*active_layer_id = Some(layer.id());
// Get the layer at this index (using virtual rows for group support)
let empty_click_rows = build_timeline_rows(context_layers);
if clicked_layer_index < empty_click_rows.len() {
*active_layer_id = Some(empty_click_rows[clicked_layer_index].layer_id());
// Clear clip instance selection when clicking on empty layer area
if !shift_held {
selection.clear_clip_instances();
@ -2542,7 +2954,6 @@ impl TimelinePane {
}
}
}
}
// Get mouse position relative to content area
let mouse_pos = response.hover_pos().unwrap_or(content_rect.center());
@ -2909,16 +3320,18 @@ impl PaneRenderer for TimelinePane {
let document = shared.action_executor.document();
let editing_clip_id = shared.editing_clip_id;
let context_layers = document.context_layers(editing_clip_id.as_ref());
let layer_count = context_layers.len();
// Use virtual row count (includes expanded group children) for height calculations
let layer_count = build_timeline_rows(&context_layers).len();
// Calculate project duration from last clip endpoint across all layers
let mut max_endpoint: f64 = 10.0; // Default minimum duration
for &layer in &context_layers {
let clip_instances = match layer {
let clip_instances: &[ClipInstance] = match layer {
lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Audio(al) => &al.clip_instances,
lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Effect(el) => &el.clip_instances,
lightningbeam_core::layer::AnyLayer::Group(_) => &[],
};
for clip_instance in clip_instances {
@ -3054,6 +3467,7 @@ impl PaneRenderer for TimelinePane {
AnyLayer::Audio(al) => &al.clip_instances,
AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => &[],
};
for inst in instances {
if !shared.selection.contains_clip_instance(&inst.id) { continue; }
@ -3083,6 +3497,7 @@ impl PaneRenderer for TimelinePane {
AnyLayer::Audio(al) => &al.clip_instances,
AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => &[],
};
// Check each selected clip
enabled = instances.iter()
@ -3309,10 +3724,11 @@ impl PaneRenderer for TimelinePane {
let relative_y = pointer_pos.y - content_rect.min.y + self.viewport_scroll_y;
let hovered_layer_index = (relative_y / LAYER_HEIGHT) as usize;
// Get the layer at this index (accounting for reversed display order)
let layers: Vec<_> = context_layers.iter().rev().copied().collect();
// Get the layer at this index (using virtual rows for group support)
let drop_rows = build_timeline_rows(&context_layers);
if let Some(layer) = layers.get(hovered_layer_index) {
let drop_layer = drop_rows.get(hovered_layer_index).and_then(|r| r.as_any_layer());
if let Some(layer) = drop_layer {
let is_compatible = can_drop_on_layer(layer, dragging.clip_type);
// Visual feedback: highlight compatible tracks