start work on compositing, effects

This commit is contained in:
Skyler Lehmkuhl 2025-12-08 09:30:22 -05:00
parent 420f3bf7b9
commit 7eb61ab0a8
31 changed files with 3172 additions and 150 deletions

View File

@ -98,6 +98,9 @@ impl Action for AddClipInstanceAction {
AnyLayer::Video(video_layer) => { AnyLayer::Video(video_layer) => {
video_layer.clip_instances.push(self.clip_instance.clone()); video_layer.clip_instances.push(self.clip_instance.clone());
} }
AnyLayer::Effect(_) => {
return Err("Cannot add clip instances to effect layers".to_string());
}
} }
self.executed = true; self.executed = true;
@ -130,6 +133,9 @@ impl Action for AddClipInstanceAction {
.clip_instances .clip_instances
.retain(|ci| ci.id != instance_id); .retain(|ci| ci.id != instance_id);
} }
AnyLayer::Effect(_) => {
// Effect layers don't have clip instances, nothing to rollback
}
} }
self.executed = false; self.executed = false;

View File

@ -0,0 +1,232 @@
//! Add effect action
//!
//! Handles adding a new effect instance (as a ClipInstance) to an effect layer.
use crate::action::Action;
use crate::clip::ClipInstance;
use crate::document::Document;
use crate::layer::AnyLayer;
use uuid::Uuid;
/// Action that adds an effect instance to an effect layer
///
/// Effect instances are represented as ClipInstance objects where clip_id
/// references an EffectDefinition.
pub struct AddEffectAction {
/// ID of the layer to add the effect to
layer_id: Uuid,
/// The clip instance (effect) to add
instance: Option<ClipInstance>,
/// Index to insert at (None = append to end)
insert_index: Option<usize>,
/// ID of the created effect (set after execution)
created_effect_id: Option<Uuid>,
}
impl AddEffectAction {
/// Create a new add effect action
///
/// # Arguments
///
/// * `layer_id` - ID of the effect layer to add the effect to
/// * `instance` - The clip instance (referencing an effect definition) to add
pub fn new(layer_id: Uuid, instance: ClipInstance) -> Self {
Self {
layer_id,
instance: Some(instance),
insert_index: None,
created_effect_id: None,
}
}
/// Create a new add effect action that inserts at a specific index
///
/// # Arguments
///
/// * `layer_id` - ID of the effect layer to add the effect to
/// * `instance` - The clip instance (referencing an effect definition) to add
/// * `index` - Index to insert at
pub fn at_index(layer_id: Uuid, instance: ClipInstance, index: usize) -> Self {
Self {
layer_id,
instance: Some(instance),
insert_index: Some(index),
created_effect_id: None,
}
}
/// Get the ID of the created effect (after execution)
pub fn created_effect_id(&self) -> Option<Uuid> {
self.created_effect_id
}
/// Get the layer ID this effect was added to
pub fn layer_id(&self) -> Uuid {
self.layer_id
}
}
impl Action for AddEffectAction {
fn execute(&mut self, document: &mut Document) -> Result<(), String> {
// Take the instance (can only execute once without rollback)
let instance = self.instance.take()
.ok_or_else(|| "Effect already added (call rollback first)".to_string())?;
// Store the instance ID
let instance_id = instance.id;
// Find the effect layer
let layer = document.get_layer_mut(&self.layer_id)
.ok_or_else(|| format!("Layer {} not found", self.layer_id))?;
// Ensure it's an effect layer
let effect_layer = match layer {
AnyLayer::Effect(ref mut el) => el,
_ => return Err("Layer is not an effect layer".to_string()),
};
// Add or insert the effect
match self.insert_index {
Some(index) => {
effect_layer.insert_clip_instance(index, instance);
}
None => {
effect_layer.add_clip_instance(instance);
}
}
self.created_effect_id = Some(instance_id);
Ok(())
}
fn rollback(&mut self, document: &mut Document) -> Result<(), String> {
let instance_id = self.created_effect_id
.ok_or_else(|| "No effect to remove (not executed yet)".to_string())?;
// Find the effect layer
let layer = document.get_layer_mut(&self.layer_id)
.ok_or_else(|| format!("Layer {} not found", self.layer_id))?;
// Ensure it's an effect layer
let effect_layer = match layer {
AnyLayer::Effect(ref mut el) => el,
_ => return Err("Layer is not an effect layer".to_string()),
};
// Remove the instance and store it for potential re-execution
let removed = effect_layer.remove_clip_instance(&instance_id)
.ok_or_else(|| format!("Effect instance {} not found", instance_id))?;
// Store the instance back for potential redo
self.instance = Some(removed);
self.created_effect_id = None;
Ok(())
}
fn description(&self) -> String {
"Add effect".to_string()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::effect::{EffectCategory, EffectDefinition, EffectParameterDef};
use crate::effect_layer::EffectLayer;
use crate::layer::AnyLayer;
fn create_test_setup() -> (Document, Uuid, EffectDefinition) {
let mut document = Document::new("Test");
// Create effect layer
let effect_layer = EffectLayer::new("Effects");
let layer_id = effect_layer.layer.id;
document.root_mut().add_child(AnyLayer::Effect(effect_layer));
// Create effect definition
let def = EffectDefinition::new(
"Test Effect",
EffectCategory::Color,
"// shader code",
vec![EffectParameterDef::float_range("intensity", "Intensity", 1.0, 0.0, 2.0)],
);
(document, layer_id, def)
}
#[test]
fn test_add_effect() {
let (mut document, layer_id, def) = create_test_setup();
let instance = def.create_instance(0.0, 10.0);
let instance_id = instance.id;
let mut action = AddEffectAction::new(layer_id, instance);
action.execute(&mut document).unwrap();
// Verify effect was added
assert_eq!(action.created_effect_id(), Some(instance_id));
let layer = document.get_layer(&layer_id).unwrap();
if let AnyLayer::Effect(el) = layer {
assert_eq!(el.clip_instances.len(), 1);
assert_eq!(el.clip_instances[0].id, instance_id);
} else {
panic!("Expected effect layer");
}
}
#[test]
fn test_add_effect_rollback() {
let (mut document, layer_id, def) = create_test_setup();
let instance = def.create_instance(0.0, 10.0);
let mut action = AddEffectAction::new(layer_id, instance);
action.execute(&mut document).unwrap();
action.rollback(&mut document).unwrap();
// Verify effect was removed
let layer = document.get_layer(&layer_id).unwrap();
if let AnyLayer::Effect(el) = layer {
assert_eq!(el.clip_instances.len(), 0);
} else {
panic!("Expected effect layer");
}
}
#[test]
fn test_add_effect_at_index() {
let (mut document, layer_id, def) = create_test_setup();
// Add first effect
let instance1 = def.create_instance(0.0, 10.0);
let id1 = instance1.id;
let mut action1 = AddEffectAction::new(layer_id, instance1);
action1.execute(&mut document).unwrap();
// Add second effect
let instance2 = def.create_instance(0.0, 10.0);
let id2 = instance2.id;
let mut action2 = AddEffectAction::new(layer_id, instance2);
action2.execute(&mut document).unwrap();
// Insert third effect at index 1 (between first and second)
let instance3 = def.create_instance(0.0, 10.0);
let id3 = instance3.id;
let mut action3 = AddEffectAction::at_index(layer_id, instance3, 1);
action3.execute(&mut document).unwrap();
// Verify order: [id1, id3, id2]
let layer = document.get_layer(&layer_id).unwrap();
if let AnyLayer::Effect(el) = layer {
assert_eq!(el.clip_instances.len(), 3);
assert_eq!(el.clip_instances[0].id, id1);
assert_eq!(el.clip_instances[1].id, id3);
assert_eq!(el.clip_instances[2].id, id2);
} else {
panic!("Expected effect layer");
}
}
}

View File

@ -76,6 +76,7 @@ impl Action for AddLayerAction {
AnyLayer::Vector(_) => "Add vector layer", AnyLayer::Vector(_) => "Add vector layer",
AnyLayer::Audio(_) => "Add audio layer", AnyLayer::Audio(_) => "Add audio layer",
AnyLayer::Video(_) => "Add video layer", AnyLayer::Video(_) => "Add video layer",
AnyLayer::Effect(_) => "Add effect layer",
} }
.to_string() .to_string()
} }

View File

@ -4,11 +4,13 @@
//! through the action system. //! through the action system.
pub mod add_clip_instance; pub mod add_clip_instance;
pub mod add_effect;
pub mod add_layer; pub mod add_layer;
pub mod add_shape; pub mod add_shape;
pub mod move_clip_instances; pub mod move_clip_instances;
pub mod move_objects; pub mod move_objects;
pub mod paint_bucket; pub mod paint_bucket;
pub mod remove_effect;
pub mod set_document_properties; pub mod set_document_properties;
pub mod set_instance_properties; pub mod set_instance_properties;
pub mod set_layer_properties; pub mod set_layer_properties;
@ -18,11 +20,13 @@ pub mod transform_objects;
pub mod trim_clip_instances; pub mod trim_clip_instances;
pub use add_clip_instance::AddClipInstanceAction; pub use add_clip_instance::AddClipInstanceAction;
pub use add_effect::AddEffectAction;
pub use add_layer::AddLayerAction; pub use add_layer::AddLayerAction;
pub use add_shape::AddShapeAction; pub use add_shape::AddShapeAction;
pub use move_clip_instances::MoveClipInstancesAction; pub use move_clip_instances::MoveClipInstancesAction;
pub use move_objects::MoveShapeInstancesAction; pub use move_objects::MoveShapeInstancesAction;
pub use paint_bucket::PaintBucketAction; pub use paint_bucket::PaintBucketAction;
pub use remove_effect::RemoveEffectAction;
pub use set_document_properties::SetDocumentPropertiesAction; pub use set_document_properties::SetDocumentPropertiesAction;
pub use set_instance_properties::{InstancePropertyChange, SetInstancePropertiesAction}; pub use set_instance_properties::{InstancePropertyChange, SetInstancePropertiesAction};
pub use set_layer_properties::{LayerProperty, SetLayerPropertiesAction}; pub use set_layer_properties::{LayerProperty, SetLayerPropertiesAction};

View File

@ -3,6 +3,7 @@
//! Handles moving one or more clip instances along the timeline. //! Handles moving one or more clip instances along the timeline.
use crate::action::Action; use crate::action::Action;
use crate::clip::ClipInstance;
use crate::document::Document; use crate::document::Document;
use crate::layer::AnyLayer; use crate::layer::AnyLayer;
use std::collections::HashMap; use std::collections::HashMap;
@ -50,10 +51,11 @@ impl Action for MoveClipInstancesAction {
// Find member's current position // Find member's current position
if let Some(layer) = document.get_layer(member_layer_id) { if let Some(layer) = document.get_layer(member_layer_id) {
let clip_instances = match layer { let clip_instances: &[ClipInstance] = match layer {
AnyLayer::Vector(vl) => &vl.clip_instances, AnyLayer::Vector(vl) => &vl.clip_instances,
AnyLayer::Audio(al) => &al.clip_instances, AnyLayer::Audio(al) => &al.clip_instances,
AnyLayer::Video(vl) => &vl.clip_instances, AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(_) => continue, // Effect layers don't have clip instances
}; };
if let Some(instance) = clip_instances.iter().find(|ci| ci.id == *member_instance_id) { if let Some(instance) = clip_instances.iter().find(|ci| ci.id == *member_instance_id) {
@ -88,10 +90,11 @@ impl Action for MoveClipInstancesAction {
for (instance_id, old_start, new_start) in moves { for (instance_id, old_start, new_start) in moves {
// Get the instance to calculate its duration // Get the instance to calculate its duration
let clip_instances = match layer { let clip_instances: &[ClipInstance] = match layer {
AnyLayer::Audio(al) => &al.clip_instances, AnyLayer::Audio(al) => &al.clip_instances,
AnyLayer::Video(vl) => &vl.clip_instances, AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Vector(vl) => &vl.clip_instances, AnyLayer::Vector(vl) => &vl.clip_instances,
AnyLayer::Effect(_) => continue, // Effect layers don't have clip instances
}; };
let instance = clip_instances.iter() let instance = clip_instances.iter()
@ -138,6 +141,7 @@ impl Action for MoveClipInstancesAction {
AnyLayer::Vector(vl) => &mut vl.clip_instances, AnyLayer::Vector(vl) => &mut vl.clip_instances,
AnyLayer::Audio(al) => &mut al.clip_instances, AnyLayer::Audio(al) => &mut al.clip_instances,
AnyLayer::Video(vl) => &mut vl.clip_instances, AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(_) => continue, // Effect layers don't have clip instances
}; };
// Update timeline_start for each clip instance // Update timeline_start for each clip instance
@ -162,6 +166,7 @@ impl Action for MoveClipInstancesAction {
AnyLayer::Vector(vl) => &mut vl.clip_instances, AnyLayer::Vector(vl) => &mut vl.clip_instances,
AnyLayer::Audio(al) => &mut al.clip_instances, AnyLayer::Audio(al) => &mut al.clip_instances,
AnyLayer::Video(vl) => &mut vl.clip_instances, AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(_) => continue, // Effect layers don't have clip instances
}; };
// Restore original timeline_start for each clip instance // Restore original timeline_start for each clip instance

View File

@ -0,0 +1,223 @@
//! Remove effect action
//!
//! Handles removing an effect instance (ClipInstance) from an effect layer.
use crate::action::Action;
use crate::clip::ClipInstance;
use crate::document::Document;
use crate::layer::AnyLayer;
use uuid::Uuid;
/// Action that removes an effect instance from an effect layer
pub struct RemoveEffectAction {
/// ID of the layer containing the effect
layer_id: Uuid,
/// ID of the effect instance to remove
instance_id: Uuid,
/// The removed instance (stored for undo)
removed_instance: Option<ClipInstance>,
/// Index where the instance was (for proper undo position)
removed_index: Option<usize>,
}
impl RemoveEffectAction {
/// Create a new remove effect action
///
/// # Arguments
///
/// * `layer_id` - ID of the effect layer containing the effect
/// * `instance_id` - ID of the clip instance to remove
pub fn new(layer_id: Uuid, instance_id: Uuid) -> Self {
Self {
layer_id,
instance_id,
removed_instance: None,
removed_index: None,
}
}
/// Get the layer ID
pub fn layer_id(&self) -> Uuid {
self.layer_id
}
/// Get the instance ID that was/will be removed
pub fn instance_id(&self) -> Uuid {
self.instance_id
}
}
impl Action for RemoveEffectAction {
fn execute(&mut self, document: &mut Document) -> Result<(), String> {
// Find the effect layer
let layer = document.get_layer_mut(&self.layer_id)
.ok_or_else(|| format!("Layer {} not found", self.layer_id))?;
// Ensure it's an effect layer
let effect_layer = match layer {
AnyLayer::Effect(ref mut el) => el,
_ => return Err("Layer is not an effect layer".to_string()),
};
// Find the index before removing
let index = effect_layer.clip_instance_index(&self.instance_id)
.ok_or_else(|| format!("Effect instance {} not found", self.instance_id))?;
// Remove the instance
let removed = effect_layer.remove_clip_instance(&self.instance_id)
.ok_or_else(|| format!("Effect instance {} not found", self.instance_id))?;
// Store for undo
self.removed_instance = Some(removed);
self.removed_index = Some(index);
Ok(())
}
fn rollback(&mut self, document: &mut Document) -> Result<(), String> {
let instance = self.removed_instance.take()
.ok_or_else(|| "No instance to restore (not executed yet)".to_string())?;
let index = self.removed_index
.ok_or_else(|| "No index stored (not executed yet)".to_string())?;
// Find the effect layer
let layer = document.get_layer_mut(&self.layer_id)
.ok_or_else(|| format!("Layer {} not found", self.layer_id))?;
// Ensure it's an effect layer
let effect_layer = match layer {
AnyLayer::Effect(ref mut el) => el,
_ => return Err("Layer is not an effect layer".to_string()),
};
// Insert the instance back at its original position
effect_layer.insert_clip_instance(index, instance);
self.removed_index = None;
Ok(())
}
fn description(&self) -> String {
"Remove effect".to_string()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::effect::{EffectCategory, EffectDefinition, EffectParameterDef};
use crate::effect_layer::EffectLayer;
use crate::layer::AnyLayer;
fn create_test_setup() -> (Document, Uuid, EffectDefinition) {
let mut document = Document::new("Test");
// Create effect layer
let effect_layer = EffectLayer::new("Effects");
let layer_id = effect_layer.layer.id;
document.root_mut().add_child(AnyLayer::Effect(effect_layer));
// Create effect definition
let def = EffectDefinition::new(
"Test Effect",
EffectCategory::Color,
"// shader code",
vec![EffectParameterDef::float_range("intensity", "Intensity", 1.0, 0.0, 2.0)],
);
(document, layer_id, def)
}
#[test]
fn test_remove_effect() {
let (mut document, layer_id, def) = create_test_setup();
// Add an effect first
let instance = def.create_instance(0.0, 10.0);
let instance_id = instance.id;
if let Some(AnyLayer::Effect(el)) = document.get_layer_mut(&layer_id) {
el.add_clip_instance(instance);
}
// Verify effect exists
if let Some(AnyLayer::Effect(el)) = document.get_layer(&layer_id) {
assert_eq!(el.clip_instances.len(), 1);
}
// Remove the effect
let mut action = RemoveEffectAction::new(layer_id, instance_id);
action.execute(&mut document).unwrap();
// Verify effect was removed
if let Some(AnyLayer::Effect(el)) = document.get_layer(&layer_id) {
assert_eq!(el.clip_instances.len(), 0);
}
}
#[test]
fn test_remove_effect_rollback() {
let (mut document, layer_id, def) = create_test_setup();
// Add an effect first
let instance = def.create_instance(0.0, 10.0);
let instance_id = instance.id;
if let Some(AnyLayer::Effect(el)) = document.get_layer_mut(&layer_id) {
el.add_clip_instance(instance);
}
// Remove and rollback
let mut action = RemoveEffectAction::new(layer_id, instance_id);
action.execute(&mut document).unwrap();
action.rollback(&mut document).unwrap();
// Verify effect was restored
if let Some(AnyLayer::Effect(el)) = document.get_layer(&layer_id) {
assert_eq!(el.clip_instances.len(), 1);
assert_eq!(el.clip_instances[0].id, instance_id);
}
}
#[test]
fn test_remove_preserves_order() {
let (mut document, layer_id, def) = create_test_setup();
// Add three effects
let instance1 = def.create_instance(0.0, 10.0);
let id1 = instance1.id;
let instance2 = def.create_instance(0.0, 10.0);
let id2 = instance2.id;
let instance3 = def.create_instance(0.0, 10.0);
let id3 = instance3.id;
if let Some(AnyLayer::Effect(el)) = document.get_layer_mut(&layer_id) {
el.add_clip_instance(instance1);
el.add_clip_instance(instance2);
el.add_clip_instance(instance3);
}
// Remove middle effect
let mut action = RemoveEffectAction::new(layer_id, id2);
action.execute(&mut document).unwrap();
// Verify order: [id1, id3]
if let Some(AnyLayer::Effect(el)) = document.get_layer(&layer_id) {
assert_eq!(el.clip_instances.len(), 2);
assert_eq!(el.clip_instances[0].id, id1);
assert_eq!(el.clip_instances[1].id, id3);
}
// Rollback - effect should be restored at index 1
action.rollback(&mut document).unwrap();
// Verify order: [id1, id2, id3]
if let Some(AnyLayer::Effect(el)) = document.get_layer(&layer_id) {
assert_eq!(el.clip_instances.len(), 3);
assert_eq!(el.clip_instances[0].id, id1);
assert_eq!(el.clip_instances[1].id, id2);
assert_eq!(el.clip_instances[2].id, id3);
}
}
}

View File

@ -40,6 +40,7 @@ impl Action for TransformClipInstancesAction {
AnyLayer::Vector(vl) => &mut vl.clip_instances, AnyLayer::Vector(vl) => &mut vl.clip_instances,
AnyLayer::Audio(al) => &mut al.clip_instances, AnyLayer::Audio(al) => &mut al.clip_instances,
AnyLayer::Video(vl) => &mut vl.clip_instances, AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(_) => return Ok(()), // Effect layers don't have clip instances
}; };
// Apply new transforms // Apply new transforms
@ -62,6 +63,7 @@ impl Action for TransformClipInstancesAction {
AnyLayer::Vector(vl) => &mut vl.clip_instances, AnyLayer::Vector(vl) => &mut vl.clip_instances,
AnyLayer::Audio(al) => &mut al.clip_instances, AnyLayer::Audio(al) => &mut al.clip_instances,
AnyLayer::Video(vl) => &mut vl.clip_instances, AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(_) => return Ok(()), // Effect layers don't have clip instances
}; };
// Restore old transforms // Restore old transforms

View File

@ -3,6 +3,7 @@
//! Handles trimming one or more clip instances by adjusting trim_start and/or trim_end. //! Handles trimming one or more clip instances by adjusting trim_start and/or trim_end.
use crate::action::Action; use crate::action::Action;
use crate::clip::ClipInstance;
use crate::document::Document; use crate::document::Document;
use crate::layer::AnyLayer; use crate::layer::AnyLayer;
use std::collections::HashMap; use std::collections::HashMap;
@ -93,10 +94,11 @@ impl Action for TrimClipInstancesAction {
// Find member's current values // Find member's current values
if let Some(layer) = document.get_layer(member_layer_id) { if let Some(layer) = document.get_layer(member_layer_id) {
let clip_instances = match layer { let clip_instances: &[ClipInstance] = match layer {
AnyLayer::Vector(vl) => &vl.clip_instances, AnyLayer::Vector(vl) => &vl.clip_instances,
AnyLayer::Audio(al) => &al.clip_instances, AnyLayer::Audio(al) => &al.clip_instances,
AnyLayer::Video(vl) => &vl.clip_instances, AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(_) => continue,
}; };
if let Some(instance) = clip_instances.iter().find(|ci| ci.id == *member_instance_id) { if let Some(instance) = clip_instances.iter().find(|ci| ci.id == *member_instance_id) {
@ -127,10 +129,11 @@ impl Action for TrimClipInstancesAction {
// Find member's current trim_end // Find member's current trim_end
if let Some(layer) = document.get_layer(member_layer_id) { if let Some(layer) = document.get_layer(member_layer_id) {
let clip_instances = match layer { let clip_instances: &[ClipInstance] = match layer {
AnyLayer::Vector(vl) => &vl.clip_instances, AnyLayer::Vector(vl) => &vl.clip_instances,
AnyLayer::Audio(al) => &al.clip_instances, AnyLayer::Audio(al) => &al.clip_instances,
AnyLayer::Video(vl) => &vl.clip_instances, AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(_) => continue,
}; };
if let Some(instance) = clip_instances.iter().find(|ci| ci.id == *member_instance_id) { if let Some(instance) = clip_instances.iter().find(|ci| ci.id == *member_instance_id) {
@ -168,10 +171,11 @@ impl Action for TrimClipInstancesAction {
let mut clamped_layer_trims = Vec::new(); let mut clamped_layer_trims = Vec::new();
for (instance_id, trim_type, old, new) in trims { for (instance_id, trim_type, old, new) in trims {
let clip_instances = match layer { let clip_instances: &[ClipInstance] = match layer {
AnyLayer::Audio(al) => &al.clip_instances, AnyLayer::Audio(al) => &al.clip_instances,
AnyLayer::Video(vl) => &vl.clip_instances, AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Vector(vl) => &vl.clip_instances, AnyLayer::Vector(vl) => &vl.clip_instances,
AnyLayer::Effect(_) => continue, // Effect layers don't have clip instances
}; };
let instance = clip_instances.iter() let instance = clip_instances.iter()
@ -262,6 +266,7 @@ impl Action for TrimClipInstancesAction {
AnyLayer::Vector(vl) => &mut vl.clip_instances, AnyLayer::Vector(vl) => &mut vl.clip_instances,
AnyLayer::Audio(al) => &mut al.clip_instances, AnyLayer::Audio(al) => &mut al.clip_instances,
AnyLayer::Video(vl) => &mut vl.clip_instances, AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(_) => continue, // Effect layers don't have clip instances
}; };
// Apply trims // Apply trims
@ -299,6 +304,7 @@ impl Action for TrimClipInstancesAction {
AnyLayer::Vector(vl) => &mut vl.clip_instances, AnyLayer::Vector(vl) => &mut vl.clip_instances,
AnyLayer::Audio(al) => &mut al.clip_instances, AnyLayer::Audio(al) => &mut al.clip_instances,
AnyLayer::Video(vl) => &mut vl.clip_instances, AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(_) => continue, // Effect layers don't have clip instances
}; };
// Restore original trim values // Restore original trim values

View File

@ -600,6 +600,15 @@ impl ClipInstance {
self self
} }
/// Set explicit timeline duration by setting trim_end
///
/// For effect instances, this effectively sets the duration since
/// effects have infinite internal duration (trim_start defaults to 0).
pub fn with_timeline_duration(mut self, duration: f64) -> Self {
self.trim_end = Some(self.trim_start + duration);
self
}
/// Get the effective duration of this instance (accounting for trimming and looping) /// Get the effective duration of this instance (accounting for trimming and looping)
/// If timeline_duration is set, returns that (enabling content looping) /// If timeline_duration is set, returns that (enabling content looping)
/// Otherwise returns the trimmed content duration /// Otherwise returns the trimmed content duration

View File

@ -4,6 +4,7 @@
//! and a root graphics object containing the scene graph. //! and a root graphics object containing the scene graph.
use crate::clip::{AudioClip, ClipInstance, ImageAsset, VideoClip, VectorClip}; use crate::clip::{AudioClip, ClipInstance, ImageAsset, VideoClip, VectorClip};
use crate::effect::EffectDefinition;
use crate::layer::AnyLayer; use crate::layer::AnyLayer;
use crate::layout::LayoutNode; use crate::layout::LayoutNode;
use crate::shape::ShapeColor; use crate::shape::ShapeColor;
@ -110,6 +111,10 @@ pub struct Document {
/// Instance groups for linked clip instances /// Instance groups for linked clip instances
pub instance_groups: HashMap<Uuid, crate::instance_group::InstanceGroup>, pub instance_groups: HashMap<Uuid, crate::instance_group::InstanceGroup>,
/// Effect definitions (all effects are embedded in the document)
#[serde(default)]
pub effect_definitions: HashMap<Uuid, EffectDefinition>,
/// Current UI layout state (serialized for save/load) /// Current UI layout state (serialized for save/load)
#[serde(default, skip_serializing_if = "Option::is_none")] #[serde(default, skip_serializing_if = "Option::is_none")]
pub ui_layout: Option<LayoutNode>, pub ui_layout: Option<LayoutNode>,
@ -139,6 +144,7 @@ impl Default for Document {
audio_clips: HashMap::new(), audio_clips: HashMap::new(),
image_assets: HashMap::new(), image_assets: HashMap::new(),
instance_groups: HashMap::new(), instance_groups: HashMap::new(),
effect_definitions: HashMap::new(),
ui_layout: None, ui_layout: None,
ui_layout_base: None, ui_layout_base: None,
current_time: 0.0, current_time: 0.0,
@ -236,6 +242,14 @@ impl Document {
} }
} }
} }
crate::layer::AnyLayer::Effect(effect_layer) => {
for instance in &effect_layer.clip_instances {
if let Some(clip_duration) = self.get_clip_duration(&instance.clip_id) {
let end_time = calculate_instance_end(instance, clip_duration);
max_end_time = max_end_time.max(end_time);
}
}
}
} }
} }
@ -393,11 +407,42 @@ impl Document {
self.image_assets.remove(id) self.image_assets.remove(id)
} }
// === EFFECT DEFINITION METHODS ===
/// Add an effect definition to the document
pub fn add_effect_definition(&mut self, definition: EffectDefinition) -> Uuid {
let id = definition.id;
self.effect_definitions.insert(id, definition);
id
}
/// Get an effect definition by ID
pub fn get_effect_definition(&self, id: &Uuid) -> Option<&EffectDefinition> {
self.effect_definitions.get(id)
}
/// Get a mutable effect definition by ID
pub fn get_effect_definition_mut(&mut self, id: &Uuid) -> Option<&mut EffectDefinition> {
self.effect_definitions.get_mut(id)
}
/// Remove an effect definition from the document
pub fn remove_effect_definition(&mut self, id: &Uuid) -> Option<EffectDefinition> {
self.effect_definitions.remove(id)
}
/// Get all effect definitions
pub fn effect_definitions(&self) -> impl Iterator<Item = &EffectDefinition> {
self.effect_definitions.values()
}
// === CLIP OVERLAP DETECTION METHODS === // === CLIP OVERLAP DETECTION METHODS ===
/// Get the duration of any clip type by ID /// Get the duration of any clip type by ID
/// ///
/// Searches through all clip libraries to find the clip and return its duration /// Searches through all clip libraries to find the clip and return its duration.
/// For effect definitions, returns `EFFECT_DURATION` (f64::MAX) since effects
/// have infinite internal duration.
pub fn get_clip_duration(&self, clip_id: &Uuid) -> Option<f64> { pub fn get_clip_duration(&self, clip_id: &Uuid) -> Option<f64> {
if let Some(clip) = self.vector_clips.get(clip_id) { if let Some(clip) = self.vector_clips.get(clip_id) {
Some(clip.duration) Some(clip.duration)
@ -405,6 +450,10 @@ impl Document {
Some(clip.duration) Some(clip.duration)
} else if let Some(clip) = self.audio_clips.get(clip_id) { } else if let Some(clip) = self.audio_clips.get(clip_id) {
Some(clip.duration) Some(clip.duration)
} else if self.effect_definitions.contains_key(clip_id) {
// Effects have infinite internal duration - their timeline length
// is controlled by ClipInstance.trim_end
Some(crate::effect::EFFECT_DURATION)
} else { } else {
None None
} }
@ -415,10 +464,11 @@ impl Document {
let layer = self.get_layer(layer_id)?; let layer = self.get_layer(layer_id)?;
// Find the clip instance // Find the clip instance
let instances = match layer { let instances: &[ClipInstance] = match layer {
AnyLayer::Audio(audio) => &audio.clip_instances, AnyLayer::Audio(audio) => &audio.clip_instances,
AnyLayer::Video(video) => &video.clip_instances, AnyLayer::Video(video) => &video.clip_instances,
AnyLayer::Vector(vector) => &vector.clip_instances, AnyLayer::Vector(vector) => &vector.clip_instances,
AnyLayer::Effect(effect) => &effect.clip_instances,
}; };
let instance = instances.iter().find(|inst| &inst.id == instance_id)?; let instance = instances.iter().find(|inst| &inst.id == instance_id)?;
@ -435,7 +485,7 @@ impl Document {
/// ///
/// Returns (overlaps, conflicting_instance_id) /// Returns (overlaps, conflicting_instance_id)
/// ///
/// Only checks audio and video layers - vector/MIDI layers return false /// Only checks audio, video, and effect layers - vector/MIDI layers return false
pub fn check_overlap_on_layer( pub fn check_overlap_on_layer(
&self, &self,
layer_id: &Uuid, layer_id: &Uuid,
@ -447,15 +497,16 @@ impl Document {
return (false, None); return (false, None);
}; };
// Only check audio and video layers // Check audio, video, and effect layers (effects cannot overlap on same layer)
if !matches!(layer, AnyLayer::Audio(_) | AnyLayer::Video(_)) { if !matches!(layer, AnyLayer::Audio(_) | AnyLayer::Video(_) | AnyLayer::Effect(_)) {
return (false, None); return (false, None);
} }
let instances = match layer { let instances: &[ClipInstance] = match layer {
AnyLayer::Audio(audio) => &audio.clip_instances, AnyLayer::Audio(audio) => &audio.clip_instances,
AnyLayer::Video(video) => &video.clip_instances, AnyLayer::Video(video) => &video.clip_instances,
AnyLayer::Vector(vector) => &vector.clip_instances, AnyLayer::Vector(vector) => &vector.clip_instances,
AnyLayer::Effect(effect) => &effect.clip_instances,
}; };
for instance in instances { for instance in instances {
@ -502,8 +553,8 @@ impl Document {
// Clamp to timeline start (can't go before 0) // Clamp to timeline start (can't go before 0)
let desired_start = desired_start.max(0.0); let desired_start = desired_start.max(0.0);
// Vector/MIDI layers don't need overlap adjustment, but still respect timeline start // Vector layers don't need overlap adjustment, but still respect timeline start
if !matches!(layer, AnyLayer::Audio(_) | AnyLayer::Video(_)) { if matches!(layer, AnyLayer::Vector(_)) {
return Some(desired_start); return Some(desired_start);
} }
@ -515,10 +566,11 @@ impl Document {
} }
// Collect all existing clip time ranges on this layer // Collect all existing clip time ranges on this layer
let instances = match layer { let instances: &[ClipInstance] = match layer {
AnyLayer::Audio(audio) => &audio.clip_instances, AnyLayer::Audio(audio) => &audio.clip_instances,
AnyLayer::Video(video) => &video.clip_instances, AnyLayer::Video(video) => &video.clip_instances,
_ => return Some(desired_start), // Shouldn't reach here AnyLayer::Effect(effect) => &effect.clip_instances,
AnyLayer::Vector(_) => return Some(desired_start), // Shouldn't reach here
}; };
let mut occupied_ranges: Vec<(f64, f64, Uuid)> = Vec::new(); let mut occupied_ranges: Vec<(f64, f64, Uuid)> = Vec::new();
@ -599,17 +651,18 @@ impl Document {
return current_timeline_start; // No limit if layer not found return current_timeline_start; // No limit if layer not found
}; };
// Only check audio and video layers // Only check audio, video, and effect layers
if !matches!(layer, AnyLayer::Audio(_) | AnyLayer::Video(_)) { if matches!(layer, AnyLayer::Vector(_)) {
return current_timeline_start; // No limit for vector layers return current_timeline_start; // No limit for vector layers
}; };
// Find the nearest clip to the left // Find the nearest clip to the left
let mut nearest_end = 0.0; // Can extend to timeline start by default let mut nearest_end = 0.0; // Can extend to timeline start by default
let instances = match layer { let instances: &[ClipInstance] = match layer {
AnyLayer::Audio(audio) => &audio.clip_instances, AnyLayer::Audio(audio) => &audio.clip_instances,
AnyLayer::Video(video) => &video.clip_instances, AnyLayer::Video(video) => &video.clip_instances,
AnyLayer::Effect(effect) => &effect.clip_instances,
AnyLayer::Vector(vector) => &vector.clip_instances, AnyLayer::Vector(vector) => &vector.clip_instances,
}; };
@ -648,14 +701,15 @@ impl Document {
return f64::MAX; // No limit if layer not found return f64::MAX; // No limit if layer not found
}; };
// Only check audio and video layers // Only check audio, video, and effect layers
if !matches!(layer, AnyLayer::Audio(_) | AnyLayer::Video(_)) { if matches!(layer, AnyLayer::Vector(_)) {
return f64::MAX; // No limit for vector layers return f64::MAX; // No limit for vector layers
} }
let instances = match layer { let instances: &[ClipInstance] = match layer {
AnyLayer::Audio(audio) => &audio.clip_instances, AnyLayer::Audio(audio) => &audio.clip_instances,
AnyLayer::Video(video) => &video.clip_instances, AnyLayer::Video(video) => &video.clip_instances,
AnyLayer::Effect(effect) => &effect.clip_instances,
AnyLayer::Vector(vector) => &vector.clip_instances, AnyLayer::Vector(vector) => &vector.clip_instances,
}; };

View File

@ -0,0 +1,570 @@
//! Effect system for Lightningbeam
//!
//! Provides GPU-accelerated visual effects with animatable parameters.
//! Effects are defined by WGSL shaders embedded directly in the document.
//!
//! Effect instances are represented as `ClipInstance` objects where `clip_id`
//! references an `EffectDefinition`. Effects are treated as having infinite
//! internal duration (`EFFECT_DURATION`), with timeline duration controlled
//! solely by `timeline_start` and `timeline_duration`.
use crate::animation::AnimationCurve;
use crate::clip::ClipInstance;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use uuid::Uuid;
/// Constant representing "infinite" effect duration for clip lookups.
/// Effects don't have an inherent duration like video/audio clips.
/// Their timeline duration is controlled by `ClipInstance.timeline_duration`.
pub const EFFECT_DURATION: f64 = f64::MAX;
/// Category of effect for UI organization
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum EffectCategory {
/// Color adjustments (brightness, contrast, hue, saturation)
Color,
/// Blur effects (gaussian, motion, radial)
Blur,
/// Distortion effects (warp, ripple, twirl)
Distort,
/// Stylize effects (glow, sharpen, posterize)
Stylize,
/// Generate effects (noise, gradients, patterns)
Generate,
/// Keying effects (chroma key, luma key)
Keying,
/// Transition effects (wipe, dissolve, etc.)
Transition,
/// Time-based effects (echo, frame hold)
Time,
/// Custom user-defined effect
Custom,
}
/// Type of effect parameter
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum ParameterType {
/// Floating point value
Float,
/// Integer value
Int,
/// Boolean toggle
Bool,
/// RGBA color
Color,
/// 2D point/vector
Point2D,
/// Angle in degrees
Angle,
/// Enum with named options
Enum,
}
/// Value of an effect parameter
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ParameterValue {
Float(f64),
Int(i64),
Bool(bool),
Color { r: f64, g: f64, b: f64, a: f64 },
Point2D { x: f64, y: f64 },
Angle(f64),
Enum(u32),
}
impl ParameterValue {
/// Get as f64 for shader uniform packing (returns 0.0 for non-float types)
pub fn as_f32(&self) -> f32 {
match self {
ParameterValue::Float(v) => *v as f32,
ParameterValue::Int(v) => *v as f32,
ParameterValue::Bool(v) => if *v { 1.0 } else { 0.0 },
ParameterValue::Angle(v) => *v as f32,
ParameterValue::Enum(v) => *v as f32,
ParameterValue::Color { r, .. } => *r as f32,
ParameterValue::Point2D { x, .. } => *x as f32,
}
}
/// Pack color value into 4 f32s [r, g, b, a]
pub fn as_color_f32(&self) -> [f32; 4] {
match self {
ParameterValue::Color { r, g, b, a } => [*r as f32, *g as f32, *b as f32, *a as f32],
_ => [0.0, 0.0, 0.0, 1.0],
}
}
/// Pack point value into 2 f32s [x, y]
pub fn as_point_f32(&self) -> [f32; 2] {
match self {
ParameterValue::Point2D { x, y } => [*x as f32, *y as f32],
_ => [0.0, 0.0],
}
}
}
impl Default for ParameterValue {
fn default() -> Self {
ParameterValue::Float(0.0)
}
}
/// Definition of a single effect parameter
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct EffectParameterDef {
/// Internal parameter name (used in shader)
pub name: String,
/// Display label for UI
pub label: String,
/// Parameter data type
pub param_type: ParameterType,
/// Default value
pub default_value: ParameterValue,
/// Minimum allowed value (for numeric types)
pub min_value: Option<ParameterValue>,
/// Maximum allowed value (for numeric types)
pub max_value: Option<ParameterValue>,
/// Whether this parameter can be animated
pub animatable: bool,
/// Enum option names (for ParameterType::Enum)
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub enum_options: Vec<String>,
}
impl EffectParameterDef {
/// Create a new float parameter definition
pub fn float(name: impl Into<String>, label: impl Into<String>, default: f64) -> Self {
Self {
name: name.into(),
label: label.into(),
param_type: ParameterType::Float,
default_value: ParameterValue::Float(default),
min_value: None,
max_value: None,
animatable: true,
enum_options: Vec::new(),
}
}
/// Create a float parameter with range constraints
pub fn float_range(
name: impl Into<String>,
label: impl Into<String>,
default: f64,
min: f64,
max: f64,
) -> Self {
Self {
name: name.into(),
label: label.into(),
param_type: ParameterType::Float,
default_value: ParameterValue::Float(default),
min_value: Some(ParameterValue::Float(min)),
max_value: Some(ParameterValue::Float(max)),
animatable: true,
enum_options: Vec::new(),
}
}
/// Create a boolean parameter definition
pub fn boolean(name: impl Into<String>, label: impl Into<String>, default: bool) -> Self {
Self {
name: name.into(),
label: label.into(),
param_type: ParameterType::Bool,
default_value: ParameterValue::Bool(default),
min_value: None,
max_value: None,
animatable: false,
enum_options: Vec::new(),
}
}
/// Create a color parameter definition
pub fn color(name: impl Into<String>, label: impl Into<String>, r: f64, g: f64, b: f64, a: f64) -> Self {
Self {
name: name.into(),
label: label.into(),
param_type: ParameterType::Color,
default_value: ParameterValue::Color { r, g, b, a },
min_value: None,
max_value: None,
animatable: true,
enum_options: Vec::new(),
}
}
/// Create an angle parameter definition (in degrees)
pub fn angle(name: impl Into<String>, label: impl Into<String>, default: f64) -> Self {
Self {
name: name.into(),
label: label.into(),
param_type: ParameterType::Angle,
default_value: ParameterValue::Angle(default),
min_value: Some(ParameterValue::Angle(0.0)),
max_value: Some(ParameterValue::Angle(360.0)),
animatable: true,
enum_options: Vec::new(),
}
}
/// Create a point parameter definition
pub fn point(name: impl Into<String>, label: impl Into<String>, x: f64, y: f64) -> Self {
Self {
name: name.into(),
label: label.into(),
param_type: ParameterType::Point2D,
default_value: ParameterValue::Point2D { x, y },
min_value: None,
max_value: None,
animatable: true,
enum_options: Vec::new(),
}
}
}
/// Type of input an effect can accept
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum EffectInputType {
/// Input from a specific layer
Layer,
/// Input from the composition (all layers below, already composited)
Composition,
/// Input from another effect in the chain
Effect,
}
/// Definition of an effect input slot
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct EffectInput {
/// Name of this input
pub name: String,
/// Type of input expected
pub input_type: EffectInputType,
/// Whether this input is required
pub required: bool,
}
impl EffectInput {
/// Create a required composition input (most common case)
pub fn composition(name: impl Into<String>) -> Self {
Self {
name: name.into(),
input_type: EffectInputType::Composition,
required: true,
}
}
/// Create an optional layer input
pub fn layer(name: impl Into<String>, required: bool) -> Self {
Self {
name: name.into(),
input_type: EffectInputType::Layer,
required,
}
}
}
/// Complete definition of an effect (embedded shader + metadata)
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct EffectDefinition {
/// Unique identifier for this effect definition
pub id: Uuid,
/// Display name
pub name: String,
/// Optional description
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
/// Effect category for UI organization
pub category: EffectCategory,
/// WGSL shader source code (embedded directly)
pub shader_code: String,
/// Input slots for this effect
pub inputs: Vec<EffectInput>,
/// Parameter definitions
pub parameters: Vec<EffectParameterDef>,
}
impl EffectDefinition {
/// Create a new effect definition with a single composition input
pub fn new(
name: impl Into<String>,
category: EffectCategory,
shader_code: impl Into<String>,
parameters: Vec<EffectParameterDef>,
) -> Self {
Self {
id: Uuid::new_v4(),
name: name.into(),
description: None,
category,
shader_code: shader_code.into(),
inputs: vec![EffectInput::composition("source")],
parameters,
}
}
/// Create with a specific ID (for built-in effects with stable IDs)
pub fn with_id(id: Uuid, name: impl Into<String>, category: EffectCategory, shader_code: impl Into<String>, parameters: Vec<EffectParameterDef>) -> Self {
Self {
id,
name: name.into(),
description: None,
category,
shader_code: shader_code.into(),
inputs: vec![EffectInput::composition("source")],
parameters,
}
}
/// Add a description
pub fn with_description(mut self, description: impl Into<String>) -> Self {
self.description = Some(description.into());
self
}
/// Add custom inputs
pub fn with_inputs(mut self, inputs: Vec<EffectInput>) -> Self {
self.inputs = inputs;
self
}
/// Get a parameter definition by name
pub fn get_parameter(&self, name: &str) -> Option<&EffectParameterDef> {
self.parameters.iter().find(|p| p.name == name)
}
/// Create a ClipInstance for this effect definition
///
/// The returned ClipInstance references this effect definition via `clip_id`.
/// Effects use `timeline_duration` to control their length since they have
/// infinite internal duration.
///
/// # Arguments
///
/// * `timeline_start` - When the effect starts on the timeline (seconds)
/// * `duration` - How long the effect appears on the timeline (seconds)
pub fn create_instance(&self, timeline_start: f64, duration: f64) -> ClipInstance {
ClipInstance::new(self.id)
.with_timeline_start(timeline_start)
.with_timeline_duration(duration)
}
}
/// Connection to an input source for an effect
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum InputConnection {
/// Connect to a specific layer (by ID)
Layer(Uuid),
/// Connect to the composited result of all layers below
Composition,
/// Connect to the output of another effect instance
Effect(Uuid),
}
/// Animated parameter value for an effect instance
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct AnimatedParameter {
/// Parameter name (matches EffectParameterDef.name)
pub name: String,
/// Current/base value
pub value: ParameterValue,
/// Optional animation curve (for animatable parameters)
#[serde(default, skip_serializing_if = "Option::is_none")]
pub animation: Option<AnimationCurve>,
}
impl AnimatedParameter {
/// Create a new non-animated parameter
pub fn new(name: impl Into<String>, value: ParameterValue) -> Self {
Self {
name: name.into(),
value,
animation: None,
}
}
/// Create with animation
pub fn with_animation(name: impl Into<String>, value: ParameterValue, curve: AnimationCurve) -> Self {
Self {
name: name.into(),
value,
animation: Some(curve),
}
}
/// Get the value at a specific time
pub fn value_at(&self, time: f64) -> ParameterValue {
if let Some(ref curve) = self.animation {
// Apply animation curve to get animated value
let animated_value = curve.eval(time);
// Convert based on parameter type
match &self.value {
ParameterValue::Float(_) => ParameterValue::Float(animated_value),
ParameterValue::Int(_) => ParameterValue::Int(animated_value.round() as i64),
ParameterValue::Bool(_) => ParameterValue::Bool(animated_value > 0.5),
ParameterValue::Angle(_) => ParameterValue::Angle(animated_value),
ParameterValue::Enum(_) => ParameterValue::Enum(animated_value.round() as u32),
// Color and Point2D would need multiple curves, so just use base value
ParameterValue::Color { .. } => self.value.clone(),
ParameterValue::Point2D { .. } => self.value.clone(),
}
} else {
self.value.clone()
}
}
}
/// Instance of an effect applied to a layer
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct EffectInstance {
/// Unique identifier for this instance
pub id: Uuid,
/// ID of the effect definition this is an instance of
pub effect_id: Uuid,
/// Start time on the timeline (when effect becomes active)
pub timeline_start: f64,
/// End time on the timeline (when effect stops)
pub timeline_end: f64,
/// Input connections (parallel to EffectDefinition.inputs)
pub input_connections: Vec<Option<InputConnection>>,
/// Parameter values (name -> animated value)
pub parameters: HashMap<String, AnimatedParameter>,
/// Whether the effect is enabled
pub enabled: bool,
/// Mix/blend amount (0.0 = original, 1.0 = full effect)
pub mix: f64,
}
impl EffectInstance {
/// Create a new effect instance from a definition
pub fn new(definition: &EffectDefinition, timeline_start: f64, timeline_end: f64) -> Self {
// Initialize parameters from definition defaults
let mut parameters = HashMap::new();
for param_def in &definition.parameters {
parameters.insert(
param_def.name.clone(),
AnimatedParameter::new(param_def.name.clone(), param_def.default_value.clone()),
);
}
// Initialize input connections (Composition for required, None for optional)
let input_connections = definition.inputs.iter()
.map(|input| {
if input.required && input.input_type == EffectInputType::Composition {
Some(InputConnection::Composition)
} else {
None
}
})
.collect();
Self {
id: Uuid::new_v4(),
effect_id: definition.id,
timeline_start,
timeline_end,
input_connections,
parameters,
enabled: true,
mix: 1.0,
}
}
/// Check if the effect is active at a given time
pub fn is_active_at(&self, time: f64) -> bool {
self.enabled && time >= self.timeline_start && time < self.timeline_end
}
/// Get a parameter value at a specific time
pub fn get_parameter_at(&self, name: &str, time: f64) -> Option<ParameterValue> {
self.parameters.get(name).map(|p| p.value_at(time))
}
/// Set a parameter value (non-animated)
pub fn set_parameter(&mut self, name: &str, value: ParameterValue) {
if let Some(param) = self.parameters.get_mut(name) {
param.value = value;
param.animation = None;
}
}
/// Get all parameter values at a specific time as f32 array for shader uniform
pub fn get_uniform_params(&self, time: f64, definitions: &[EffectParameterDef]) -> Vec<f32> {
let mut params = Vec::with_capacity(16);
for def in definitions {
if let Some(param) = self.parameters.get(&def.name) {
let value = param.value_at(time);
match def.param_type {
ParameterType::Float | ParameterType::Int | ParameterType::Bool |
ParameterType::Angle | ParameterType::Enum => {
params.push(value.as_f32());
}
ParameterType::Color => {
let color = value.as_color_f32();
params.extend_from_slice(&color);
}
ParameterType::Point2D => {
let point = value.as_point_f32();
params.extend_from_slice(&point);
}
}
}
}
// Pad to 16 floats for uniform alignment
while params.len() < 16 {
params.push(0.0);
}
params
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_effect_definition_creation() {
let def = EffectDefinition::new(
"Test Effect",
EffectCategory::Color,
"// shader code",
vec![EffectParameterDef::float_range("intensity", "Intensity", 1.0, 0.0, 2.0)],
);
assert_eq!(def.name, "Test Effect");
assert_eq!(def.category, EffectCategory::Color);
assert_eq!(def.parameters.len(), 1);
assert_eq!(def.inputs.len(), 1);
}
#[test]
fn test_effect_instance_creation() {
let def = EffectDefinition::new(
"Blur",
EffectCategory::Blur,
"// blur shader",
vec![
EffectParameterDef::float_range("radius", "Radius", 10.0, 0.0, 100.0),
EffectParameterDef::float_range("quality", "Quality", 1.0, 0.0, 1.0),
],
);
let instance = EffectInstance::new(&def, 0.0, 10.0);
assert_eq!(instance.effect_id, def.id);
assert!(instance.is_active_at(5.0));
assert!(!instance.is_active_at(15.0));
assert_eq!(instance.parameters.len(), 2);
}
#[test]
fn test_parameter_value_as_f32() {
assert_eq!(ParameterValue::Float(1.5).as_f32(), 1.5);
assert_eq!(ParameterValue::Int(42).as_f32(), 42.0);
assert_eq!(ParameterValue::Bool(true).as_f32(), 1.0);
assert_eq!(ParameterValue::Bool(false).as_f32(), 0.0);
assert_eq!(ParameterValue::Angle(90.0).as_f32(), 90.0);
}
}

View File

@ -0,0 +1,289 @@
//! Effect layer type for Lightningbeam
//!
//! An EffectLayer applies visual effects to the composition below it.
//! Effect instances are stored as `ClipInstance` objects where `clip_id`
//! references an `EffectDefinition`.
use crate::clip::ClipInstance;
use crate::layer::{Layer, LayerTrait, LayerType};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
/// Layer type that applies visual effects to the composition
///
/// Effect instances are represented as `ClipInstance` objects.
/// The `clip_id` field references an `EffectDefinition` rather than a clip.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct EffectLayer {
/// Base layer properties
pub layer: Layer,
/// Effect instances (as ClipInstances referencing EffectDefinitions)
pub clip_instances: Vec<ClipInstance>,
}
impl LayerTrait for EffectLayer {
fn id(&self) -> Uuid {
self.layer.id
}
fn name(&self) -> &str {
&self.layer.name
}
fn set_name(&mut self, name: String) {
self.layer.name = name;
}
fn has_custom_name(&self) -> bool {
self.layer.has_custom_name
}
fn set_has_custom_name(&mut self, custom: bool) {
self.layer.has_custom_name = custom;
}
fn visible(&self) -> bool {
self.layer.visible
}
fn set_visible(&mut self, visible: bool) {
self.layer.visible = visible;
}
fn opacity(&self) -> f64 {
self.layer.opacity
}
fn set_opacity(&mut self, opacity: f64) {
self.layer.opacity = opacity;
}
fn volume(&self) -> f64 {
self.layer.volume
}
fn set_volume(&mut self, volume: f64) {
self.layer.volume = volume;
}
fn muted(&self) -> bool {
self.layer.muted
}
fn set_muted(&mut self, muted: bool) {
self.layer.muted = muted;
}
fn soloed(&self) -> bool {
self.layer.soloed
}
fn set_soloed(&mut self, soloed: bool) {
self.layer.soloed = soloed;
}
fn locked(&self) -> bool {
self.layer.locked
}
fn set_locked(&mut self, locked: bool) {
self.layer.locked = locked;
}
}
impl EffectLayer {
/// Create a new effect layer
pub fn new(name: impl Into<String>) -> Self {
Self {
layer: Layer::new(LayerType::Effect, name),
clip_instances: Vec::new(),
}
}
/// Create with a specific ID
pub fn with_id(id: Uuid, name: impl Into<String>) -> Self {
Self {
layer: Layer::with_id(id, LayerType::Effect, name),
clip_instances: Vec::new(),
}
}
/// Add a clip instance (effect) to this layer
pub fn add_clip_instance(&mut self, instance: ClipInstance) -> Uuid {
let id = instance.id;
self.clip_instances.push(instance);
id
}
/// Insert a clip instance at a specific index
pub fn insert_clip_instance(&mut self, index: usize, instance: ClipInstance) -> Uuid {
let id = instance.id;
let index = index.min(self.clip_instances.len());
self.clip_instances.insert(index, instance);
id
}
/// Remove a clip instance by ID
pub fn remove_clip_instance(&mut self, id: &Uuid) -> Option<ClipInstance> {
if let Some(index) = self.clip_instances.iter().position(|e| &e.id == id) {
Some(self.clip_instances.remove(index))
} else {
None
}
}
/// Get a clip instance by ID
pub fn get_clip_instance(&self, id: &Uuid) -> Option<&ClipInstance> {
self.clip_instances.iter().find(|e| &e.id == id)
}
/// Get a mutable clip instance by ID
pub fn get_clip_instance_mut(&mut self, id: &Uuid) -> Option<&mut ClipInstance> {
self.clip_instances.iter_mut().find(|e| &e.id == id)
}
/// Get all clip instances (effects) that are active at a given time
///
/// Uses `EFFECT_DURATION` to calculate effective duration for each instance.
pub fn active_clip_instances_at(&self, time: f64) -> Vec<&ClipInstance> {
use crate::effect::EFFECT_DURATION;
self.clip_instances
.iter()
.filter(|e| {
let end = e.timeline_start + e.effective_duration(EFFECT_DURATION);
time >= e.timeline_start && time < end
})
.collect()
}
/// Get the index of a clip instance
pub fn clip_instance_index(&self, id: &Uuid) -> Option<usize> {
self.clip_instances.iter().position(|e| &e.id == id)
}
/// Move a clip instance to a new position in the layer
pub fn move_clip_instance(&mut self, id: &Uuid, new_index: usize) -> bool {
if let Some(current_index) = self.clip_instance_index(id) {
let instance = self.clip_instances.remove(current_index);
let new_index = new_index.min(self.clip_instances.len());
self.clip_instances.insert(new_index, instance);
true
} else {
false
}
}
/// Reorder clip instances by providing a list of IDs in desired order
pub fn reorder_clip_instances(&mut self, order: &[Uuid]) {
let mut new_order = Vec::with_capacity(self.clip_instances.len());
// Add instances in the specified order
for id in order {
if let Some(index) = self.clip_instances.iter().position(|e| &e.id == id) {
new_order.push(self.clip_instances.remove(index));
}
}
// Append any instances not in the order list
new_order.append(&mut self.clip_instances);
self.clip_instances = new_order;
}
// === MUTATION METHODS (pub(crate) - only accessible to action module) ===
/// Add a clip instance (internal, for actions only)
pub(crate) fn add_clip_instance_internal(&mut self, instance: ClipInstance) -> Uuid {
self.add_clip_instance(instance)
}
/// Remove a clip instance (internal, for actions only)
pub(crate) fn remove_clip_instance_internal(&mut self, id: &Uuid) -> Option<ClipInstance> {
self.remove_clip_instance(id)
}
/// Insert a clip instance at a specific index (internal, for actions only)
pub(crate) fn insert_clip_instance_internal(&mut self, index: usize, instance: ClipInstance) -> Uuid {
self.insert_clip_instance(index, instance)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::effect::{EffectCategory, EffectDefinition, EffectParameterDef};
fn create_test_effect_def() -> EffectDefinition {
EffectDefinition::new(
"Test Effect",
EffectCategory::Color,
"// shader code",
vec![EffectParameterDef::float_range("intensity", "Intensity", 1.0, 0.0, 2.0)],
)
}
#[test]
fn test_effect_layer_creation() {
let layer = EffectLayer::new("Effects");
assert_eq!(layer.name(), "Effects");
assert_eq!(layer.clip_instances.len(), 0);
}
#[test]
fn test_add_effect() {
let mut layer = EffectLayer::new("Effects");
let def = create_test_effect_def();
let effect = def.create_instance(0.0, 10.0);
let effect_id = effect.id;
let id = layer.add_clip_instance(effect);
assert_eq!(id, effect_id);
assert_eq!(layer.clip_instances.len(), 1);
assert!(layer.get_clip_instance(&effect_id).is_some());
}
#[test]
fn test_active_effects() {
let mut layer = EffectLayer::new("Effects");
let def = create_test_effect_def();
// Effect 1: active from 0 to 5
let effect1 = def.create_instance(0.0, 5.0);
layer.add_clip_instance(effect1);
// Effect 2: active from 3 to 10
let effect2 = def.create_instance(3.0, 7.0); // 3.0 + 7.0 = 10.0 end
layer.add_clip_instance(effect2);
// At time 2: only effect1 active
assert_eq!(layer.active_clip_instances_at(2.0).len(), 1);
// At time 4: both effects active
assert_eq!(layer.active_clip_instances_at(4.0).len(), 2);
// At time 7: only effect2 active
assert_eq!(layer.active_clip_instances_at(7.0).len(), 1);
}
#[test]
fn test_effect_reordering() {
let mut layer = EffectLayer::new("Effects");
let def = create_test_effect_def();
let effect1 = def.create_instance(0.0, 10.0);
let id1 = effect1.id;
layer.add_clip_instance(effect1);
let effect2 = def.create_instance(0.0, 10.0);
let id2 = effect2.id;
layer.add_clip_instance(effect2);
// Initially: [id1, id2]
assert_eq!(layer.clip_instance_index(&id1), Some(0));
assert_eq!(layer.clip_instance_index(&id2), Some(1));
// Move id1 to index 1: [id2, id1]
layer.move_clip_instance(&id1, 1);
assert_eq!(layer.clip_instance_index(&id1), Some(1));
assert_eq!(layer.clip_instance_index(&id2), Some(0));
}
}

View File

@ -0,0 +1,191 @@
//! Default effect definitions registry
//!
//! Provides default effect definitions with embedded WGSL shaders.
//! These are copied into documents when used - no runtime dependency on registry.
//!
//! Built-in effects use stable UUIDs so they can be reliably looked up by ID.
use crate::effect::{EffectCategory, EffectDefinition, EffectParameterDef};
use uuid::Uuid;
// Stable UUIDs for built-in effects (randomly generated, never change)
const GRAYSCALE_ID: Uuid = Uuid::from_u128(0xac2cd8ce_4ea3_4c84_8c70_5cfc4dae22fb);
const INVERT_ID: Uuid = Uuid::from_u128(0x9ff36aef_5f40_45b2_bf42_cbe7fa52bd3a);
const BRIGHTNESS_CONTRAST_ID: Uuid = Uuid::from_u128(0x6cd772c9_ea8a_4b1e_93fb_2aa1d3306f62);
const HUE_SATURATION_ID: Uuid = Uuid::from_u128(0x3f210ac2_4eb5_436a_8337_c583d19dcbe1);
const COLOR_TINT_ID: Uuid = Uuid::from_u128(0x7b85ea51_22d6_4506_8689_85bdcd9ca6db);
const GAUSSIAN_BLUR_ID: Uuid = Uuid::from_u128(0x3e36bc88_3495_4f8b_ad07_8a5cdcc4c05b);
const VIGNETTE_ID: Uuid = Uuid::from_u128(0xf21873da_df9e_4ba2_ba5d_46a276e6485c);
const SHARPEN_ID: Uuid = Uuid::from_u128(0x217f644a_c4a1_46ed_b9b7_86b820792b29);
/// Registry of default built-in effects
pub struct EffectRegistry;
impl EffectRegistry {
/// Get all available default effect definitions
pub fn get_all() -> Vec<EffectDefinition> {
vec![
Self::grayscale(),
Self::invert(),
Self::brightness_contrast(),
Self::hue_saturation(),
Self::color_tint(),
Self::gaussian_blur(),
Self::vignette(),
Self::sharpen(),
]
}
/// Get a specific effect by name
pub fn get_by_name(name: &str) -> Option<EffectDefinition> {
match name.to_lowercase().as_str() {
"grayscale" => Some(Self::grayscale()),
"invert" => Some(Self::invert()),
"brightness/contrast" | "brightness_contrast" => Some(Self::brightness_contrast()),
"hue/saturation" | "hue_saturation" => Some(Self::hue_saturation()),
"color tint" | "color_tint" => Some(Self::color_tint()),
"gaussian blur" | "gaussian_blur" => Some(Self::gaussian_blur()),
"vignette" => Some(Self::vignette()),
"sharpen" => Some(Self::sharpen()),
_ => None,
}
}
/// Get a specific effect by its UUID
pub fn get_by_id(id: &Uuid) -> Option<EffectDefinition> {
Self::get_all().into_iter().find(|def| def.id == *id)
}
/// Grayscale effect - converts to black and white
pub fn grayscale() -> EffectDefinition {
EffectDefinition::with_id(
GRAYSCALE_ID,
"Grayscale",
EffectCategory::Color,
include_str!("shaders/effect_grayscale.wgsl"),
vec![
EffectParameterDef::float_range("amount", "Amount", 1.0, 0.0, 1.0),
],
).with_description("Convert image to grayscale")
}
/// Invert effect - inverts colors
pub fn invert() -> EffectDefinition {
EffectDefinition::with_id(
INVERT_ID,
"Invert",
EffectCategory::Color,
include_str!("shaders/effect_invert.wgsl"),
vec![
EffectParameterDef::float_range("amount", "Amount", 1.0, 0.0, 1.0),
],
).with_description("Invert image colors")
}
/// Brightness/Contrast adjustment
pub fn brightness_contrast() -> EffectDefinition {
EffectDefinition::with_id(
BRIGHTNESS_CONTRAST_ID,
"Brightness/Contrast",
EffectCategory::Color,
include_str!("shaders/effect_brightness_contrast.wgsl"),
vec![
EffectParameterDef::float_range("brightness", "Brightness", 0.0, -1.0, 1.0),
EffectParameterDef::float_range("contrast", "Contrast", 1.0, 0.0, 3.0),
],
).with_description("Adjust brightness and contrast")
}
/// Hue/Saturation adjustment
pub fn hue_saturation() -> EffectDefinition {
EffectDefinition::with_id(
HUE_SATURATION_ID,
"Hue/Saturation",
EffectCategory::Color,
include_str!("shaders/effect_hue_saturation.wgsl"),
vec![
EffectParameterDef::angle("hue", "Hue Shift", 0.0),
EffectParameterDef::float_range("saturation", "Saturation", 1.0, 0.0, 3.0),
EffectParameterDef::float_range("lightness", "Lightness", 0.0, -1.0, 1.0),
],
).with_description("Adjust hue, saturation, and lightness")
}
/// Color tint effect
pub fn color_tint() -> EffectDefinition {
EffectDefinition::with_id(
COLOR_TINT_ID,
"Color Tint",
EffectCategory::Color,
include_str!("shaders/effect_color_tint.wgsl"),
vec![
EffectParameterDef::color("tint_color", "Tint Color", 1.0, 0.5, 0.0, 1.0),
EffectParameterDef::float_range("amount", "Amount", 0.5, 0.0, 1.0),
],
).with_description("Apply a color tint overlay")
}
/// Gaussian blur effect
pub fn gaussian_blur() -> EffectDefinition {
EffectDefinition::with_id(
GAUSSIAN_BLUR_ID,
"Gaussian Blur",
EffectCategory::Blur,
include_str!("shaders/effect_blur.wgsl"),
vec![
EffectParameterDef::float_range("radius", "Radius", 5.0, 0.0, 50.0),
EffectParameterDef::float_range("quality", "Quality", 1.0, 0.0, 1.0),
],
).with_description("Gaussian blur effect")
}
/// Vignette effect - darkens edges
pub fn vignette() -> EffectDefinition {
EffectDefinition::with_id(
VIGNETTE_ID,
"Vignette",
EffectCategory::Stylize,
include_str!("shaders/effect_vignette.wgsl"),
vec![
EffectParameterDef::float_range("radius", "Radius", 0.5, 0.0, 1.5),
EffectParameterDef::float_range("softness", "Softness", 0.5, 0.0, 1.0),
EffectParameterDef::float_range("amount", "Amount", 0.5, 0.0, 1.0),
],
).with_description("Add a vignette darkening effect to edges")
}
/// Sharpen effect
pub fn sharpen() -> EffectDefinition {
EffectDefinition::with_id(
SHARPEN_ID,
"Sharpen",
EffectCategory::Stylize,
include_str!("shaders/effect_sharpen.wgsl"),
vec![
EffectParameterDef::float_range("amount", "Amount", 1.0, 0.0, 3.0),
EffectParameterDef::float_range("radius", "Radius", 1.0, 0.5, 5.0),
],
).with_description("Sharpen image details")
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_get_all_effects() {
let effects = EffectRegistry::get_all();
assert!(effects.len() >= 8);
}
#[test]
fn test_get_by_name() {
let grayscale = EffectRegistry::get_by_name("grayscale");
assert!(grayscale.is_some());
assert_eq!(grayscale.unwrap().name, "Grayscale");
let unknown = EffectRegistry::get_by_name("unknown_effect");
assert!(unknown.is_none());
}
}

View File

@ -0,0 +1,440 @@
//! GPU effect processor for shader-based visual effects
//!
//! Compiles effect shaders and applies them to textures in the compositing pipeline.
use crate::effect::{EffectDefinition, EffectInstance};
use std::collections::HashMap;
use uuid::Uuid;
use super::buffer_pool::{BufferHandle, BufferPool, BufferSpec, BufferFormat};
/// Uniform data for effect shaders
///
/// Parameters are packed as vec4s (4 floats each) for proper GPU alignment.
/// - params0: parameters 0-3
/// - params1: parameters 4-7
/// - params2: parameters 8-11
/// - params3: parameters 12-15
#[repr(C)]
#[derive(Clone, Copy, Debug, bytemuck::Pod, bytemuck::Zeroable)]
pub struct EffectUniforms {
/// Parameters 0-3 (packed as vec4 for 16-byte alignment)
pub params0: [f32; 4],
/// Parameters 4-7
pub params1: [f32; 4],
/// Parameters 8-11
pub params2: [f32; 4],
/// Parameters 12-15
pub params3: [f32; 4],
/// Source texture width
pub texture_width: f32,
/// Source texture height
pub texture_height: f32,
/// Current time in seconds
pub time: f32,
/// Mix/blend amount (0.0 = original, 1.0 = full effect)
pub mix: f32,
}
impl Default for EffectUniforms {
fn default() -> Self {
Self {
params0: [0.0; 4],
params1: [0.0; 4],
params2: [0.0; 4],
params3: [0.0; 4],
texture_width: 1.0,
texture_height: 1.0,
time: 0.0,
mix: 1.0,
}
}
}
impl EffectUniforms {
/// Set parameters from a flat array of up to 16 floats
pub fn set_params(&mut self, params: &[f32]) {
for (i, &val) in params.iter().take(16).enumerate() {
match i / 4 {
0 => self.params0[i % 4] = val,
1 => self.params1[i % 4] = val,
2 => self.params2[i % 4] = val,
3 => self.params3[i % 4] = val,
_ => {}
}
}
}
}
/// A compiled effect ready for GPU execution
struct CompiledEffect {
/// The render pipeline for this effect
pipeline: wgpu::RenderPipeline,
}
/// GPU processor for visual effects
///
/// Manages shader compilation and execution for effect layers.
/// Effects are applied as fullscreen passes that read from a source texture
/// and write to a destination texture.
pub struct EffectProcessor {
/// Compiled effect pipelines keyed by effect definition ID
compiled_effects: HashMap<Uuid, CompiledEffect>,
/// Bind group layout for effect shaders (shared across all effects)
bind_group_layout: wgpu::BindGroupLayout,
/// Sampler for texture sampling
sampler: wgpu::Sampler,
/// Output texture format
output_format: wgpu::TextureFormat,
}
impl EffectProcessor {
/// Create a new effect processor
pub fn new(device: &wgpu::Device, output_format: wgpu::TextureFormat) -> Self {
// Create bind group layout matching effect shader expectations
let bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
label: Some("effect_bind_group_layout"),
entries: &[
// Source texture (binding 0)
wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Texture {
sample_type: wgpu::TextureSampleType::Float { filterable: true },
view_dimension: wgpu::TextureViewDimension::D2,
multisampled: false,
},
count: None,
},
// Sampler (binding 1)
wgpu::BindGroupLayoutEntry {
binding: 1,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),
count: None,
},
// Uniforms (binding 2)
wgpu::BindGroupLayoutEntry {
binding: 2,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: None,
},
count: None,
},
],
});
// Create sampler for effect textures
let sampler = device.create_sampler(&wgpu::SamplerDescriptor {
label: Some("effect_sampler"),
address_mode_u: wgpu::AddressMode::ClampToEdge,
address_mode_v: wgpu::AddressMode::ClampToEdge,
address_mode_w: wgpu::AddressMode::ClampToEdge,
mag_filter: wgpu::FilterMode::Linear,
min_filter: wgpu::FilterMode::Linear,
mipmap_filter: wgpu::FilterMode::Nearest,
..Default::default()
});
Self {
compiled_effects: HashMap::new(),
bind_group_layout,
sampler,
output_format,
}
}
/// Compile an effect definition into a GPU pipeline
///
/// Returns true if compilation was successful, false if the shader failed to compile.
pub fn compile_effect(&mut self, device: &wgpu::Device, definition: &EffectDefinition) -> bool {
// Check if already compiled
if self.compiled_effects.contains_key(&definition.id) {
return true;
}
// Create pipeline layout
let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: Some(&format!("effect_pipeline_layout_{}", definition.name)),
bind_group_layouts: &[&self.bind_group_layout],
push_constant_ranges: &[],
});
// Create shader module from embedded WGSL
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: Some(&format!("effect_shader_{}", definition.name)),
source: wgpu::ShaderSource::Wgsl(definition.shader_code.as_str().into()),
});
// Create render pipeline
let pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: Some(&format!("effect_pipeline_{}", definition.name)),
layout: Some(&pipeline_layout),
vertex: wgpu::VertexState {
module: &shader,
entry_point: Some("vs_main"),
buffers: &[],
compilation_options: wgpu::PipelineCompilationOptions::default(),
},
fragment: Some(wgpu::FragmentState {
module: &shader,
entry_point: Some("fs_main"),
targets: &[Some(wgpu::ColorTargetState {
format: self.output_format,
// No blending - effect completely replaces the pixel
blend: None,
write_mask: wgpu::ColorWrites::ALL,
})],
compilation_options: wgpu::PipelineCompilationOptions::default(),
}),
primitive: wgpu::PrimitiveState {
topology: wgpu::PrimitiveTopology::TriangleStrip,
strip_index_format: None,
front_face: wgpu::FrontFace::Ccw,
cull_mode: None,
polygon_mode: wgpu::PolygonMode::Fill,
unclipped_depth: false,
conservative: false,
},
depth_stencil: None,
multisample: wgpu::MultisampleState::default(),
multiview: None,
cache: None,
});
self.compiled_effects.insert(definition.id, CompiledEffect {
pipeline,
});
true
}
/// Remove a compiled effect (e.g., when an effect definition is removed from the document)
pub fn remove_effect(&mut self, effect_id: &Uuid) {
self.compiled_effects.remove(effect_id);
}
/// Check if an effect is compiled
pub fn is_compiled(&self, effect_id: &Uuid) -> bool {
self.compiled_effects.contains_key(effect_id)
}
/// Apply an effect instance
///
/// Renders from source_view to dest_view using the effect shader.
/// Parameters are evaluated at the given time.
pub fn apply_effect(
&self,
device: &wgpu::Device,
queue: &wgpu::Queue,
encoder: &mut wgpu::CommandEncoder,
definition: &EffectDefinition,
instance: &EffectInstance,
source_view: &wgpu::TextureView,
dest_view: &wgpu::TextureView,
width: u32,
height: u32,
time: f64,
) -> bool {
// Get compiled effect
let Some(compiled) = self.compiled_effects.get(&definition.id) else {
return false;
};
// Build uniforms from instance parameters
let param_values = instance.get_uniform_params(time, &definition.parameters);
let mut uniforms = EffectUniforms {
texture_width: width as f32,
texture_height: height as f32,
time: time as f32,
mix: instance.mix as f32,
..Default::default()
};
uniforms.set_params(&param_values);
// Create uniform buffer
let uniform_buffer = device.create_buffer(&wgpu::BufferDescriptor {
label: Some("effect_uniforms"),
size: std::mem::size_of::<EffectUniforms>() as u64,
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
mapped_at_creation: false,
});
queue.write_buffer(&uniform_buffer, 0, bytemuck::bytes_of(&uniforms));
// Create bind group
let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("effect_bind_group"),
layout: &self.bind_group_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::TextureView(source_view),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::Sampler(&self.sampler),
},
wgpu::BindGroupEntry {
binding: 2,
resource: uniform_buffer.as_entire_binding(),
},
],
});
// Render pass
let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some(&format!("effect_pass_{}", definition.name)),
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
view: dest_view,
resolve_target: None,
ops: wgpu::Operations {
load: wgpu::LoadOp::Load,
store: wgpu::StoreOp::Store,
},
})],
depth_stencil_attachment: None,
occlusion_query_set: None,
timestamp_writes: None,
});
render_pass.set_pipeline(&compiled.pipeline);
render_pass.set_bind_group(0, &bind_group, &[]);
render_pass.draw(0..4, 0..1);
true
}
/// Apply a chain of effects, ping-ponging between buffers
///
/// This is the main entry point for applying multiple effects to a composition.
/// Effects are applied in order, with the output of each becoming the input of the next.
pub fn apply_effect_chain(
&self,
device: &wgpu::Device,
queue: &wgpu::Queue,
encoder: &mut wgpu::CommandEncoder,
buffer_pool: &mut BufferPool,
definitions: &HashMap<Uuid, EffectDefinition>,
instances: &[&EffectInstance],
source: BufferHandle,
width: u32,
height: u32,
time: f64,
) -> Option<BufferHandle> {
if instances.is_empty() {
return Some(source);
}
// We need two buffers for ping-ponging
let spec = BufferSpec::new(width, height, BufferFormat::Rgba16Float);
let mut current_source = source;
let mut temp_buffer: Option<BufferHandle> = None;
for instance in instances.iter() {
// Skip disabled effects
if !instance.enabled {
continue;
}
// Get effect definition
let Some(definition) = definitions.get(&instance.effect_id) else {
continue;
};
// Acquire destination buffer (reuse temp buffer if available)
let dest = if let Some(buf) = temp_buffer.take() {
buf
} else {
buffer_pool.acquire(device, spec)
};
// Get views
let Some(source_view) = buffer_pool.get_view(current_source) else {
continue;
};
let Some(dest_view) = buffer_pool.get_view(dest) else {
continue;
};
// Apply effect
if self.apply_effect(
device,
queue,
encoder,
definition,
instance,
source_view,
dest_view,
width,
height,
time,
) {
// Swap buffers for next iteration
// Previous source becomes temp (can be reused)
if current_source != source {
temp_buffer = Some(current_source);
}
current_source = dest;
} else {
// Effect failed, release the dest buffer
buffer_pool.release(dest);
}
}
// Release temp buffer if we still have one
if let Some(buf) = temp_buffer {
buffer_pool.release(buf);
}
// Return final result (if we processed any effects, it's different from source)
Some(current_source)
}
/// Get the bind group layout (for external use if needed)
pub fn bind_group_layout(&self) -> &wgpu::BindGroupLayout {
&self.bind_group_layout
}
/// Get the number of compiled effects
pub fn compiled_count(&self) -> usize {
self.compiled_effects.len()
}
/// Clear all compiled effects (e.g., on device loss)
pub fn clear(&mut self) {
self.compiled_effects.clear();
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_effect_uniforms_size() {
// Verify uniform struct is properly sized for GPU alignment
let size = std::mem::size_of::<EffectUniforms>();
// 16 floats (64 bytes) + 4 floats (16 bytes) = 80 bytes
assert_eq!(size, 80);
}
#[test]
fn test_effect_uniforms_default() {
let uniforms = EffectUniforms::default();
assert_eq!(uniforms.params0, [0.0; 4]);
assert_eq!(uniforms.params1, [0.0; 4]);
assert_eq!(uniforms.params2, [0.0; 4]);
assert_eq!(uniforms.params3, [0.0; 4]);
assert_eq!(uniforms.mix, 1.0);
}
#[test]
fn test_effect_uniforms_set_params() {
let mut uniforms = EffectUniforms::default();
uniforms.set_params(&[1.0, 2.0, 3.0, 4.0, 5.0, 6.0]);
assert_eq!(uniforms.params0, [1.0, 2.0, 3.0, 4.0]);
assert_eq!(uniforms.params1, [5.0, 6.0, 0.0, 0.0]);
}
}

View File

@ -7,10 +7,12 @@
pub mod buffer_pool; pub mod buffer_pool;
pub mod compositor; pub mod compositor;
pub mod effect_processor;
// Re-export commonly used types // Re-export commonly used types
pub use buffer_pool::{BufferHandle, BufferPool, BufferSpec, BufferFormat}; pub use buffer_pool::{BufferHandle, BufferPool, BufferSpec, BufferFormat};
pub use compositor::{Compositor, CompositorLayer, BlendMode}; pub use compositor::{Compositor, CompositorLayer, BlendMode};
pub use effect_processor::{EffectProcessor, EffectUniforms};
/// Standard HDR internal texture format (16-bit float per channel) /// Standard HDR internal texture format (16-bit float per channel)
pub const HDR_FORMAT: wgpu::TextureFormat = wgpu::TextureFormat::Rgba16Float; pub const HDR_FORMAT: wgpu::TextureFormat = wgpu::TextureFormat::Rgba16Float;

View File

@ -4,6 +4,7 @@
use crate::animation::AnimationData; use crate::animation::AnimationData;
use crate::clip::ClipInstance; use crate::clip::ClipInstance;
use crate::effect_layer::EffectLayer;
use crate::object::ShapeInstance; use crate::object::ShapeInstance;
use crate::shape::Shape; use crate::shape::Shape;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -21,6 +22,8 @@ pub enum LayerType {
Video, Video,
/// Generic automation layer /// Generic automation layer
Automation, Automation,
/// Visual effects layer
Effect,
} }
/// Common trait for all layer types /// Common trait for all layer types
@ -546,6 +549,7 @@ pub enum AnyLayer {
Vector(VectorLayer), Vector(VectorLayer),
Audio(AudioLayer), Audio(AudioLayer),
Video(VideoLayer), Video(VideoLayer),
Effect(EffectLayer),
} }
impl LayerTrait for AnyLayer { impl LayerTrait for AnyLayer {
@ -554,6 +558,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Vector(l) => l.id(), AnyLayer::Vector(l) => l.id(),
AnyLayer::Audio(l) => l.id(), AnyLayer::Audio(l) => l.id(),
AnyLayer::Video(l) => l.id(), AnyLayer::Video(l) => l.id(),
AnyLayer::Effect(l) => l.id(),
} }
} }
@ -562,6 +567,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Vector(l) => l.name(), AnyLayer::Vector(l) => l.name(),
AnyLayer::Audio(l) => l.name(), AnyLayer::Audio(l) => l.name(),
AnyLayer::Video(l) => l.name(), AnyLayer::Video(l) => l.name(),
AnyLayer::Effect(l) => l.name(),
} }
} }
@ -570,6 +576,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Vector(l) => l.set_name(name), AnyLayer::Vector(l) => l.set_name(name),
AnyLayer::Audio(l) => l.set_name(name), AnyLayer::Audio(l) => l.set_name(name),
AnyLayer::Video(l) => l.set_name(name), AnyLayer::Video(l) => l.set_name(name),
AnyLayer::Effect(l) => l.set_name(name),
} }
} }
@ -578,6 +585,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Vector(l) => l.has_custom_name(), AnyLayer::Vector(l) => l.has_custom_name(),
AnyLayer::Audio(l) => l.has_custom_name(), AnyLayer::Audio(l) => l.has_custom_name(),
AnyLayer::Video(l) => l.has_custom_name(), AnyLayer::Video(l) => l.has_custom_name(),
AnyLayer::Effect(l) => l.has_custom_name(),
} }
} }
@ -586,6 +594,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Vector(l) => l.set_has_custom_name(custom), AnyLayer::Vector(l) => l.set_has_custom_name(custom),
AnyLayer::Audio(l) => l.set_has_custom_name(custom), AnyLayer::Audio(l) => l.set_has_custom_name(custom),
AnyLayer::Video(l) => l.set_has_custom_name(custom), AnyLayer::Video(l) => l.set_has_custom_name(custom),
AnyLayer::Effect(l) => l.set_has_custom_name(custom),
} }
} }
@ -594,6 +603,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Vector(l) => l.visible(), AnyLayer::Vector(l) => l.visible(),
AnyLayer::Audio(l) => l.visible(), AnyLayer::Audio(l) => l.visible(),
AnyLayer::Video(l) => l.visible(), AnyLayer::Video(l) => l.visible(),
AnyLayer::Effect(l) => l.visible(),
} }
} }
@ -602,6 +612,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Vector(l) => l.set_visible(visible), AnyLayer::Vector(l) => l.set_visible(visible),
AnyLayer::Audio(l) => l.set_visible(visible), AnyLayer::Audio(l) => l.set_visible(visible),
AnyLayer::Video(l) => l.set_visible(visible), AnyLayer::Video(l) => l.set_visible(visible),
AnyLayer::Effect(l) => l.set_visible(visible),
} }
} }
@ -610,6 +621,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Vector(l) => l.opacity(), AnyLayer::Vector(l) => l.opacity(),
AnyLayer::Audio(l) => l.opacity(), AnyLayer::Audio(l) => l.opacity(),
AnyLayer::Video(l) => l.opacity(), AnyLayer::Video(l) => l.opacity(),
AnyLayer::Effect(l) => l.opacity(),
} }
} }
@ -618,6 +630,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Vector(l) => l.set_opacity(opacity), AnyLayer::Vector(l) => l.set_opacity(opacity),
AnyLayer::Audio(l) => l.set_opacity(opacity), AnyLayer::Audio(l) => l.set_opacity(opacity),
AnyLayer::Video(l) => l.set_opacity(opacity), AnyLayer::Video(l) => l.set_opacity(opacity),
AnyLayer::Effect(l) => l.set_opacity(opacity),
} }
} }
@ -626,6 +639,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Vector(l) => l.volume(), AnyLayer::Vector(l) => l.volume(),
AnyLayer::Audio(l) => l.volume(), AnyLayer::Audio(l) => l.volume(),
AnyLayer::Video(l) => l.volume(), AnyLayer::Video(l) => l.volume(),
AnyLayer::Effect(l) => l.volume(),
} }
} }
@ -634,6 +648,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Vector(l) => l.set_volume(volume), AnyLayer::Vector(l) => l.set_volume(volume),
AnyLayer::Audio(l) => l.set_volume(volume), AnyLayer::Audio(l) => l.set_volume(volume),
AnyLayer::Video(l) => l.set_volume(volume), AnyLayer::Video(l) => l.set_volume(volume),
AnyLayer::Effect(l) => l.set_volume(volume),
} }
} }
@ -642,6 +657,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Vector(l) => l.muted(), AnyLayer::Vector(l) => l.muted(),
AnyLayer::Audio(l) => l.muted(), AnyLayer::Audio(l) => l.muted(),
AnyLayer::Video(l) => l.muted(), AnyLayer::Video(l) => l.muted(),
AnyLayer::Effect(l) => l.muted(),
} }
} }
@ -650,6 +666,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Vector(l) => l.set_muted(muted), AnyLayer::Vector(l) => l.set_muted(muted),
AnyLayer::Audio(l) => l.set_muted(muted), AnyLayer::Audio(l) => l.set_muted(muted),
AnyLayer::Video(l) => l.set_muted(muted), AnyLayer::Video(l) => l.set_muted(muted),
AnyLayer::Effect(l) => l.set_muted(muted),
} }
} }
@ -658,6 +675,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Vector(l) => l.soloed(), AnyLayer::Vector(l) => l.soloed(),
AnyLayer::Audio(l) => l.soloed(), AnyLayer::Audio(l) => l.soloed(),
AnyLayer::Video(l) => l.soloed(), AnyLayer::Video(l) => l.soloed(),
AnyLayer::Effect(l) => l.soloed(),
} }
} }
@ -666,6 +684,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Vector(l) => l.set_soloed(soloed), AnyLayer::Vector(l) => l.set_soloed(soloed),
AnyLayer::Audio(l) => l.set_soloed(soloed), AnyLayer::Audio(l) => l.set_soloed(soloed),
AnyLayer::Video(l) => l.set_soloed(soloed), AnyLayer::Video(l) => l.set_soloed(soloed),
AnyLayer::Effect(l) => l.set_soloed(soloed),
} }
} }
@ -674,6 +693,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Vector(l) => l.locked(), AnyLayer::Vector(l) => l.locked(),
AnyLayer::Audio(l) => l.locked(), AnyLayer::Audio(l) => l.locked(),
AnyLayer::Video(l) => l.locked(), AnyLayer::Video(l) => l.locked(),
AnyLayer::Effect(l) => l.locked(),
} }
} }
@ -682,6 +702,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Vector(l) => l.set_locked(locked), AnyLayer::Vector(l) => l.set_locked(locked),
AnyLayer::Audio(l) => l.set_locked(locked), AnyLayer::Audio(l) => l.set_locked(locked),
AnyLayer::Video(l) => l.set_locked(locked), AnyLayer::Video(l) => l.set_locked(locked),
AnyLayer::Effect(l) => l.set_locked(locked),
} }
} }
} }
@ -693,6 +714,7 @@ impl AnyLayer {
AnyLayer::Vector(l) => &l.layer, AnyLayer::Vector(l) => &l.layer,
AnyLayer::Audio(l) => &l.layer, AnyLayer::Audio(l) => &l.layer,
AnyLayer::Video(l) => &l.layer, AnyLayer::Video(l) => &l.layer,
AnyLayer::Effect(l) => &l.layer,
} }
} }
@ -702,6 +724,7 @@ impl AnyLayer {
AnyLayer::Vector(l) => &mut l.layer, AnyLayer::Vector(l) => &mut l.layer,
AnyLayer::Audio(l) => &mut l.layer, AnyLayer::Audio(l) => &mut l.layer,
AnyLayer::Video(l) => &mut l.layer, AnyLayer::Video(l) => &mut l.layer,
AnyLayer::Effect(l) => &mut l.layer,
} }
} }

View File

@ -14,6 +14,9 @@ pub mod layer;
pub mod layer_tree; pub mod layer_tree;
pub mod clip; pub mod clip;
pub mod instance_group; pub mod instance_group;
pub mod effect;
pub mod effect_layer;
pub mod effect_registry;
pub mod document; pub mod document;
pub mod renderer; pub mod renderer;
pub mod video; pub mod video;

View File

@ -9,7 +9,7 @@
//! The compositing mode enables proper per-layer opacity, blend modes, and effects. //! The compositing mode enables proper per-layer opacity, blend modes, and effects.
use crate::animation::TransformProperty; use crate::animation::TransformProperty;
use crate::clip::ImageAsset; use crate::clip::{ClipInstance, ImageAsset};
use crate::document::Document; use crate::document::Document;
use crate::gpu::BlendMode; use crate::gpu::BlendMode;
use crate::layer::{AnyLayer, LayerTrait, VectorLayer}; use crate::layer::{AnyLayer, LayerTrait, VectorLayer};
@ -86,6 +86,18 @@ fn decode_image_asset(asset: &ImageAsset) -> Option<Image> {
// Per-Layer Rendering for HDR Compositing Pipeline // Per-Layer Rendering for HDR Compositing Pipeline
// ============================================================================ // ============================================================================
/// Type of rendered layer for compositor handling
#[derive(Clone, Debug)]
pub enum RenderedLayerType {
/// Regular content layer (vector, video) - composite its scene
Content,
/// Effect layer - apply effects to current composite state
Effect {
/// Active effect instances at the current time
effect_instances: Vec<ClipInstance>,
},
}
/// Metadata for a rendered layer, used for compositing /// Metadata for a rendered layer, used for compositing
pub struct RenderedLayer { pub struct RenderedLayer {
/// The layer's unique identifier /// The layer's unique identifier
@ -98,6 +110,8 @@ pub struct RenderedLayer {
pub blend_mode: BlendMode, pub blend_mode: BlendMode,
/// Whether this layer has any visible content /// Whether this layer has any visible content
pub has_content: bool, pub has_content: bool,
/// Type of layer for compositor (content vs effect)
pub layer_type: RenderedLayerType,
} }
impl RenderedLayer { impl RenderedLayer {
@ -109,6 +123,7 @@ impl RenderedLayer {
opacity: 1.0, opacity: 1.0,
blend_mode: BlendMode::Normal, blend_mode: BlendMode::Normal,
has_content: false, has_content: false,
layer_type: RenderedLayerType::Content,
} }
} }
@ -120,6 +135,20 @@ impl RenderedLayer {
opacity, opacity,
blend_mode, blend_mode,
has_content: false, has_content: false,
layer_type: RenderedLayerType::Content,
}
}
/// Create an effect layer with active effect instances
pub fn effect_layer(layer_id: Uuid, opacity: f32, effect_instances: Vec<ClipInstance>) -> Self {
let has_content = !effect_instances.is_empty();
Self {
layer_id,
scene: Scene::new(),
opacity,
blend_mode: BlendMode::Normal,
has_content,
layer_type: RenderedLayerType::Effect { effect_instances },
} }
} }
} }
@ -246,6 +275,16 @@ pub fn render_layer_isolated(
); );
rendered.has_content = !video_layer.clip_instances.is_empty(); rendered.has_content = !video_layer.clip_instances.is_empty();
} }
AnyLayer::Effect(effect_layer) => {
// Effect layers are processed during compositing, not rendered to scene
// Return early with a dedicated effect layer type
let active_effects: Vec<ClipInstance> = effect_layer
.active_clip_instances_at(time)
.into_iter()
.cloned()
.collect();
return RenderedLayer::effect_layer(layer_id, opacity, active_effects);
}
} }
rendered rendered
@ -395,6 +434,9 @@ fn render_layer(
let mut video_mgr = video_manager.lock().unwrap(); let mut video_mgr = video_manager.lock().unwrap();
render_video_layer(document, time, video_layer, scene, base_transform, parent_opacity, &mut video_mgr); render_video_layer(document, time, video_layer, scene, base_transform, parent_opacity, &mut video_mgr);
} }
AnyLayer::Effect(_) => {
// Effect layers are processed during GPU compositing, not rendered to scene
}
} }
} }

View File

@ -0,0 +1,73 @@
// Gaussian Blur Effect Shader
// Simple box blur approximation (real Gaussian would need multiple passes)
struct Uniforms {
// params packed as vec4s for proper 16-byte alignment
params0: vec4<f32>,
params1: vec4<f32>,
params2: vec4<f32>,
params3: vec4<f32>,
texture_width: f32,
texture_height: f32,
time: f32,
mix: f32,
}
struct VertexOutput {
@builtin(position) position: vec4<f32>,
@location(0) uv: vec2<f32>,
}
@group(0) @binding(0) var source_tex: texture_2d<f32>;
@group(0) @binding(1) var source_sampler: sampler;
@group(0) @binding(2) var<uniform> uniforms: Uniforms;
@vertex
fn vs_main(@builtin(vertex_index) vertex_index: u32) -> VertexOutput {
var out: VertexOutput;
let x = f32((vertex_index & 1u) << 1u);
let y = f32(vertex_index & 2u);
out.position = vec4<f32>(x * 2.0 - 1.0, 1.0 - y * 2.0, 0.0, 1.0);
out.uv = vec2<f32>(x, y);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
let src = textureSample(source_tex, source_sampler, in.uv);
let radius = uniforms.params0.x; // Blur radius in pixels
let quality = uniforms.params0.y; // Quality (0-1, affects sample count)
if (radius < 0.5) {
return vec4<f32>(mix(src.rgb, src.rgb, uniforms.mix), src.a);
}
let pixel_size = vec2<f32>(1.0 / uniforms.texture_width, 1.0 / uniforms.texture_height);
// Sample count based on quality (5-13 samples per direction)
let samples = i32(5.0 + quality * 8.0);
let half_samples = samples / 2;
var color = vec3<f32>(0.0);
var total_weight = 0.0;
// Simple box blur with gaussian-like weighting
for (var y = -half_samples; y <= half_samples; y++) {
for (var x = -half_samples; x <= half_samples; x++) {
let offset = vec2<f32>(f32(x), f32(y)) * pixel_size * radius / f32(half_samples);
let sample_pos = in.uv + offset;
// Gaussian-like weight based on distance
let dist = length(vec2<f32>(f32(x), f32(y))) / f32(half_samples);
let weight = exp(-dist * dist * 2.0);
color += textureSample(source_tex, source_sampler, sample_pos).rgb * weight;
total_weight += weight;
}
}
color /= total_weight;
let result = mix(src.rgb, color, uniforms.mix);
return vec4<f32>(result, src.a);
}

View File

@ -0,0 +1,51 @@
// Brightness/Contrast Effect Shader
struct Uniforms {
// params packed as vec4s for proper 16-byte alignment
params0: vec4<f32>,
params1: vec4<f32>,
params2: vec4<f32>,
params3: vec4<f32>,
texture_width: f32,
texture_height: f32,
time: f32,
mix: f32,
}
struct VertexOutput {
@builtin(position) position: vec4<f32>,
@location(0) uv: vec2<f32>,
}
@group(0) @binding(0) var source_tex: texture_2d<f32>;
@group(0) @binding(1) var source_sampler: sampler;
@group(0) @binding(2) var<uniform> uniforms: Uniforms;
@vertex
fn vs_main(@builtin(vertex_index) vertex_index: u32) -> VertexOutput {
var out: VertexOutput;
let x = f32((vertex_index & 1u) << 1u);
let y = f32(vertex_index & 2u);
out.position = vec4<f32>(x * 2.0 - 1.0, 1.0 - y * 2.0, 0.0, 1.0);
out.uv = vec2<f32>(x, y);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
let src = textureSample(source_tex, source_sampler, in.uv);
let brightness = uniforms.params0.x; // -1 to 1
let contrast = uniforms.params0.y; // 0 to 3
// Apply brightness (additive)
var color = src.rgb + vec3<f32>(brightness);
// Apply contrast (multiply around midpoint 0.5)
color = (color - vec3<f32>(0.5)) * contrast + vec3<f32>(0.5);
// Clamp to valid range
color = clamp(color, vec3<f32>(0.0), vec3<f32>(1.0));
let result = mix(src.rgb, color, uniforms.mix);
return vec4<f32>(result, src.a);
}

View File

@ -0,0 +1,47 @@
// Color Tint Effect Shader
struct Uniforms {
// params packed as vec4s for proper 16-byte alignment
params0: vec4<f32>,
params1: vec4<f32>,
params2: vec4<f32>,
params3: vec4<f32>,
texture_width: f32,
texture_height: f32,
time: f32,
mix: f32,
}
struct VertexOutput {
@builtin(position) position: vec4<f32>,
@location(0) uv: vec2<f32>,
}
@group(0) @binding(0) var source_tex: texture_2d<f32>;
@group(0) @binding(1) var source_sampler: sampler;
@group(0) @binding(2) var<uniform> uniforms: Uniforms;
@vertex
fn vs_main(@builtin(vertex_index) vertex_index: u32) -> VertexOutput {
var out: VertexOutput;
let x = f32((vertex_index & 1u) << 1u);
let y = f32(vertex_index & 2u);
out.position = vec4<f32>(x * 2.0 - 1.0, 1.0 - y * 2.0, 0.0, 1.0);
out.uv = vec2<f32>(x, y);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
let src = textureSample(source_tex, source_sampler, in.uv);
// Tint color from params (RGBA in params0)
let tint = uniforms.params0.xyz;
let amount = uniforms.params1.x; // Amount parameter (params[4])
// Apply tint as multiplicative blend
let tinted = src.rgb * mix(vec3<f32>(1.0), tint, amount);
let result = mix(src.rgb, tinted, uniforms.mix);
return vec4<f32>(result, src.a);
}

View File

@ -0,0 +1,47 @@
// Grayscale Effect Shader
// Converts image to grayscale using luminance weights
struct Uniforms {
// params packed as vec4s for proper 16-byte alignment
params0: vec4<f32>,
params1: vec4<f32>,
params2: vec4<f32>,
params3: vec4<f32>,
texture_width: f32,
texture_height: f32,
time: f32,
mix: f32,
}
struct VertexOutput {
@builtin(position) position: vec4<f32>,
@location(0) uv: vec2<f32>,
}
@group(0) @binding(0) var source_tex: texture_2d<f32>;
@group(0) @binding(1) var source_sampler: sampler;
@group(0) @binding(2) var<uniform> uniforms: Uniforms;
// Fullscreen triangle vertex shader
@vertex
fn vs_main(@builtin(vertex_index) vertex_index: u32) -> VertexOutput {
var out: VertexOutput;
let x = f32((vertex_index & 1u) << 1u);
let y = f32(vertex_index & 2u);
out.position = vec4<f32>(x * 2.0 - 1.0, 1.0 - y * 2.0, 0.0, 1.0);
out.uv = vec2<f32>(x, y);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
let src = textureSample(source_tex, source_sampler, in.uv);
let amount = uniforms.params0.x; // grayscale amount
// ITU-R BT.709 luminance coefficients
let luminance = dot(src.rgb, vec3<f32>(0.2126, 0.7152, 0.0722));
let gray = vec3<f32>(luminance, luminance, luminance);
let result = mix(src.rgb, gray, amount * uniforms.mix);
return vec4<f32>(result, src.a);
}

View File

@ -0,0 +1,107 @@
// Hue/Saturation/Lightness Effect Shader
struct Uniforms {
// params packed as vec4s for proper 16-byte alignment
params0: vec4<f32>,
params1: vec4<f32>,
params2: vec4<f32>,
params3: vec4<f32>,
texture_width: f32,
texture_height: f32,
time: f32,
mix: f32,
}
struct VertexOutput {
@builtin(position) position: vec4<f32>,
@location(0) uv: vec2<f32>,
}
@group(0) @binding(0) var source_tex: texture_2d<f32>;
@group(0) @binding(1) var source_sampler: sampler;
@group(0) @binding(2) var<uniform> uniforms: Uniforms;
@vertex
fn vs_main(@builtin(vertex_index) vertex_index: u32) -> VertexOutput {
var out: VertexOutput;
let x = f32((vertex_index & 1u) << 1u);
let y = f32(vertex_index & 2u);
out.position = vec4<f32>(x * 2.0 - 1.0, 1.0 - y * 2.0, 0.0, 1.0);
out.uv = vec2<f32>(x, y);
return out;
}
// Convert RGB to HSL
fn rgb_to_hsl(c: vec3<f32>) -> vec3<f32> {
let cmax = max(max(c.r, c.g), c.b);
let cmin = min(min(c.r, c.g), c.b);
let delta = cmax - cmin;
var h = 0.0;
var s = 0.0;
let l = (cmax + cmin) / 2.0;
if (delta > 0.0) {
s = select(delta / (2.0 - cmax - cmin), delta / (cmax + cmin), l < 0.5);
if (cmax == c.r) {
h = (c.g - c.b) / delta + select(6.0, 0.0, c.g >= c.b);
} else if (cmax == c.g) {
h = (c.b - c.r) / delta + 2.0;
} else {
h = (c.r - c.g) / delta + 4.0;
}
h /= 6.0;
}
return vec3<f32>(h, s, l);
}
// Helper function for HSL to RGB
fn hue_to_rgb(p: f32, q: f32, t: f32) -> f32 {
var tt = t;
if (tt < 0.0) { tt += 1.0; }
if (tt > 1.0) { tt -= 1.0; }
if (tt < 1.0/6.0) { return p + (q - p) * 6.0 * tt; }
if (tt < 1.0/2.0) { return q; }
if (tt < 2.0/3.0) { return p + (q - p) * (2.0/3.0 - tt) * 6.0; }
return p;
}
// Convert HSL to RGB
fn hsl_to_rgb(hsl: vec3<f32>) -> vec3<f32> {
if (hsl.y == 0.0) {
return vec3<f32>(hsl.z, hsl.z, hsl.z);
}
let q = select(hsl.z + hsl.y - hsl.z * hsl.y, hsl.z * (1.0 + hsl.y), hsl.z < 0.5);
let p = 2.0 * hsl.z - q;
return vec3<f32>(
hue_to_rgb(p, q, hsl.x + 1.0/3.0),
hue_to_rgb(p, q, hsl.x),
hue_to_rgb(p, q, hsl.x - 1.0/3.0)
);
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
let src = textureSample(source_tex, source_sampler, in.uv);
let hue_shift = uniforms.params0.x / 360.0; // Convert degrees to 0-1 range
let saturation = uniforms.params0.y; // Multiplier (1.0 = no change)
let lightness = uniforms.params0.z; // Additive (-1 to 1)
// Convert to HSL
var hsl = rgb_to_hsl(src.rgb);
// Apply adjustments
hsl.x = fract(hsl.x + hue_shift); // Shift hue (wrapping)
hsl.y = clamp(hsl.y * saturation, 0.0, 1.0); // Multiply saturation
hsl.z = clamp(hsl.z + lightness, 0.0, 1.0); // Add lightness
// Convert back to RGB
let adjusted = hsl_to_rgb(hsl);
let result = mix(src.rgb, adjusted, uniforms.mix);
return vec4<f32>(result, src.a);
}

View File

@ -0,0 +1,45 @@
// Invert Effect Shader
// Inverts color values
struct Uniforms {
// params packed as vec4s for proper 16-byte alignment
// params[0-3] in params0, params[4-7] in params1, etc.
params0: vec4<f32>,
params1: vec4<f32>,
params2: vec4<f32>,
params3: vec4<f32>,
texture_width: f32,
texture_height: f32,
time: f32,
mix: f32,
}
struct VertexOutput {
@builtin(position) position: vec4<f32>,
@location(0) uv: vec2<f32>,
}
@group(0) @binding(0) var source_tex: texture_2d<f32>;
@group(0) @binding(1) var source_sampler: sampler;
@group(0) @binding(2) var<uniform> uniforms: Uniforms;
@vertex
fn vs_main(@builtin(vertex_index) vertex_index: u32) -> VertexOutput {
var out: VertexOutput;
let x = f32((vertex_index & 1u) << 1u);
let y = f32(vertex_index & 2u);
out.position = vec4<f32>(x * 2.0 - 1.0, 1.0 - y * 2.0, 0.0, 1.0);
out.uv = vec2<f32>(x, y);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
let src = textureSample(source_tex, source_sampler, in.uv);
let amount = uniforms.params0.x; // params[0]
let inverted = vec3<f32>(1.0) - src.rgb;
let result = mix(src.rgb, inverted, amount * uniforms.mix);
return vec4<f32>(result, src.a);
}

View File

@ -0,0 +1,60 @@
// Sharpen Effect Shader
// Unsharp mask style sharpening
struct Uniforms {
// params packed as vec4s for proper 16-byte alignment
params0: vec4<f32>,
params1: vec4<f32>,
params2: vec4<f32>,
params3: vec4<f32>,
texture_width: f32,
texture_height: f32,
time: f32,
mix: f32,
}
struct VertexOutput {
@builtin(position) position: vec4<f32>,
@location(0) uv: vec2<f32>,
}
@group(0) @binding(0) var source_tex: texture_2d<f32>;
@group(0) @binding(1) var source_sampler: sampler;
@group(0) @binding(2) var<uniform> uniforms: Uniforms;
@vertex
fn vs_main(@builtin(vertex_index) vertex_index: u32) -> VertexOutput {
var out: VertexOutput;
let x = f32((vertex_index & 1u) << 1u);
let y = f32(vertex_index & 2u);
out.position = vec4<f32>(x * 2.0 - 1.0, 1.0 - y * 2.0, 0.0, 1.0);
out.uv = vec2<f32>(x, y);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
let src = textureSample(source_tex, source_sampler, in.uv);
let amount = uniforms.params0.x; // Sharpen amount (0-3)
let radius = uniforms.params0.y; // Sample radius (0.5-5)
let pixel_size = vec2<f32>(1.0 / uniforms.texture_width, 1.0 / uniforms.texture_height) * radius;
// Sample neighbors for edge detection
let left = textureSample(source_tex, source_sampler, in.uv - vec2<f32>(pixel_size.x, 0.0)).rgb;
let right = textureSample(source_tex, source_sampler, in.uv + vec2<f32>(pixel_size.x, 0.0)).rgb;
let top = textureSample(source_tex, source_sampler, in.uv - vec2<f32>(0.0, pixel_size.y)).rgb;
let bottom = textureSample(source_tex, source_sampler, in.uv + vec2<f32>(0.0, pixel_size.y)).rgb;
// Average of neighbors (blur)
let blur = (left + right + top + bottom) * 0.25;
// Unsharp mask: original + (original - blur) * amount
let sharpened = src.rgb + (src.rgb - blur) * amount;
// Clamp to valid range
let clamped = clamp(sharpened, vec3<f32>(0.0), vec3<f32>(1.0));
let result = mix(src.rgb, clamped, uniforms.mix);
return vec4<f32>(result, src.a);
}

View File

@ -0,0 +1,55 @@
// Vignette Effect Shader
// Darkens edges of the image
struct Uniforms {
// params packed as vec4s for proper 16-byte alignment
params0: vec4<f32>,
params1: vec4<f32>,
params2: vec4<f32>,
params3: vec4<f32>,
texture_width: f32,
texture_height: f32,
time: f32,
mix: f32,
}
struct VertexOutput {
@builtin(position) position: vec4<f32>,
@location(0) uv: vec2<f32>,
}
@group(0) @binding(0) var source_tex: texture_2d<f32>;
@group(0) @binding(1) var source_sampler: sampler;
@group(0) @binding(2) var<uniform> uniforms: Uniforms;
@vertex
fn vs_main(@builtin(vertex_index) vertex_index: u32) -> VertexOutput {
var out: VertexOutput;
let x = f32((vertex_index & 1u) << 1u);
let y = f32(vertex_index & 2u);
out.position = vec4<f32>(x * 2.0 - 1.0, 1.0 - y * 2.0, 0.0, 1.0);
out.uv = vec2<f32>(x, y);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
let src = textureSample(source_tex, source_sampler, in.uv);
let radius = uniforms.params0.x; // Vignette radius (0.5 = normal)
let softness = uniforms.params0.y; // Edge softness (0-1)
let amount = uniforms.params0.z; // Darkness amount (0-1)
// Calculate distance from center (normalized to -1 to 1)
let center = vec2<f32>(0.5, 0.5);
let dist = distance(in.uv, center);
// Create vignette factor with smooth falloff
let inner = radius;
let outer = radius + softness;
let vignette = 1.0 - smoothstep(inner, outer, dist) * amount;
let vignetted = src.rgb * vignette;
let result = mix(src.rgb, vignetted, uniforms.mix);
return vec4<f32>(result, src.a);
}

View File

@ -2055,6 +2055,7 @@ impl EditorApp {
panes::DragClipType::AudioSampled => "Audio", panes::DragClipType::AudioSampled => "Audio",
panes::DragClipType::AudioMidi => "MIDI", panes::DragClipType::AudioMidi => "MIDI",
panes::DragClipType::Image => "Image", panes::DragClipType::Image => "Image",
panes::DragClipType::Effect => "Effect",
}); });
let new_layer = panes::create_layer_for_clip_type(asset_info.clip_type, &layer_name); let new_layer = panes::create_layer_for_clip_type(asset_info.clip_type, &layer_name);

View File

@ -514,6 +514,82 @@ fn shape_color_to_tiny_skia(color: &ShapeColor) -> tiny_skia::Color {
tiny_skia::Color::from_rgba8(color.r, color.g, color.b, color.a) tiny_skia::Color::from_rgba8(color.r, color.g, color.b, color.a)
} }
/// Generate a simple effect thumbnail with a pink gradient
fn generate_effect_thumbnail() -> Vec<u8> {
let size = THUMBNAIL_SIZE as usize;
let mut rgba = vec![0u8; size * size * 4];
// Pink gradient background with "FX" visual indicator
for y in 0..size {
for x in 0..size {
let brightness = 1.0 - (y as f32 / size as f32) * 0.3;
let idx = (y * size + x) * 4;
rgba[idx] = (220.0 * brightness) as u8; // R
rgba[idx + 1] = (80.0 * brightness) as u8; // G
rgba[idx + 2] = (160.0 * brightness) as u8; // B
rgba[idx + 3] = 200; // A
}
}
// Draw a simple "FX" pattern in the center using darker pixels
let center = size / 2;
let letter_size = size / 4;
// Draw "F" - vertical bar
for y in (center - letter_size)..(center + letter_size) {
let x = center - letter_size;
let idx = (y * size + x) * 4;
rgba[idx] = 255;
rgba[idx + 1] = 255;
rgba[idx + 2] = 255;
rgba[idx + 3] = 255;
}
// Draw "F" - top horizontal
for x in (center - letter_size)..(center - 2) {
let y = center - letter_size;
let idx = (y * size + x) * 4;
rgba[idx] = 255;
rgba[idx + 1] = 255;
rgba[idx + 2] = 255;
rgba[idx + 3] = 255;
}
// Draw "F" - middle horizontal
for x in (center - letter_size)..(center - 4) {
let y = center;
let idx = (y * size + x) * 4;
rgba[idx] = 255;
rgba[idx + 1] = 255;
rgba[idx + 2] = 255;
rgba[idx + 3] = 255;
}
// Draw "X" - diagonal lines
for i in 0..letter_size {
// Top-left to bottom-right
let x1 = center + 2 + i;
let y1 = center - letter_size + i * 2;
if x1 < size && y1 < size {
let idx = (y1 * size + x1) * 4;
rgba[idx] = 255;
rgba[idx + 1] = 255;
rgba[idx + 2] = 255;
rgba[idx + 3] = 255;
}
// Top-right to bottom-left
let x2 = center + letter_size - i;
let y2 = center - letter_size + i * 2;
if x2 < size && y2 < size {
let idx = (y2 * size + x2) * 4;
rgba[idx] = 255;
rgba[idx + 1] = 255;
rgba[idx + 2] = 255;
rgba[idx + 3] = 255;
}
}
rgba
}
/// Ellipsize a string to fit within a maximum character count /// Ellipsize a string to fit within a maximum character count
fn ellipsize(s: &str, max_chars: usize) -> String { fn ellipsize(s: &str, max_chars: usize) -> String {
if s.chars().count() <= max_chars { if s.chars().count() <= max_chars {
@ -532,6 +608,7 @@ pub enum AssetCategory {
Video, Video,
Audio, Audio,
Images, Images,
Effects,
} }
impl AssetCategory { impl AssetCategory {
@ -542,6 +619,7 @@ impl AssetCategory {
AssetCategory::Video => "Video", AssetCategory::Video => "Video",
AssetCategory::Audio => "Audio", AssetCategory::Audio => "Audio",
AssetCategory::Images => "Images", AssetCategory::Images => "Images",
AssetCategory::Effects => "Effects",
} }
} }
@ -552,6 +630,7 @@ impl AssetCategory {
AssetCategory::Video, AssetCategory::Video,
AssetCategory::Audio, AssetCategory::Audio,
AssetCategory::Images, AssetCategory::Images,
AssetCategory::Effects,
] ]
} }
@ -563,6 +642,7 @@ impl AssetCategory {
AssetCategory::Video => egui::Color32::from_rgb(255, 150, 100), // Orange AssetCategory::Video => egui::Color32::from_rgb(255, 150, 100), // Orange
AssetCategory::Audio => egui::Color32::from_rgb(100, 255, 150), // Green AssetCategory::Audio => egui::Color32::from_rgb(100, 255, 150), // Green
AssetCategory::Images => egui::Color32::from_rgb(255, 200, 100), // Yellow/Gold AssetCategory::Images => egui::Color32::from_rgb(255, 200, 100), // Yellow/Gold
AssetCategory::Effects => egui::Color32::from_rgb(220, 80, 160), // Pink
} }
} }
} }
@ -578,6 +658,8 @@ pub struct AssetEntry {
pub duration: f64, pub duration: f64,
pub dimensions: Option<(f64, f64)>, pub dimensions: Option<(f64, f64)>,
pub extra_info: String, pub extra_info: String,
/// True for built-in effects from the registry (not editable/deletable)
pub is_builtin: bool,
} }
/// Pending delete confirmation state /// Pending delete confirmation state
@ -658,6 +740,7 @@ impl AssetLibraryPane {
duration: clip.duration, duration: clip.duration,
dimensions: Some((clip.width, clip.height)), dimensions: Some((clip.width, clip.height)),
extra_info: format!("{}x{}", clip.width as u32, clip.height as u32), extra_info: format!("{}x{}", clip.width as u32, clip.height as u32),
is_builtin: false,
}); });
} }
@ -671,6 +754,7 @@ impl AssetLibraryPane {
duration: clip.duration, duration: clip.duration,
dimensions: Some((clip.width, clip.height)), dimensions: Some((clip.width, clip.height)),
extra_info: format!("{:.0}fps", clip.frame_rate), extra_info: format!("{:.0}fps", clip.frame_rate),
is_builtin: false,
}); });
} }
@ -699,6 +783,7 @@ impl AssetLibraryPane {
duration: clip.duration, duration: clip.duration,
dimensions: None, dimensions: None,
extra_info, extra_info,
is_builtin: false,
}); });
} }
@ -712,9 +797,46 @@ impl AssetLibraryPane {
duration: 0.0, // Images don't have duration duration: 0.0, // Images don't have duration
dimensions: Some((asset.width as f64, asset.height as f64)), dimensions: Some((asset.width as f64, asset.height as f64)),
extra_info: format!("{}x{}", asset.width, asset.height), extra_info: format!("{}x{}", asset.width, asset.height),
is_builtin: false,
}); });
} }
// Collect built-in effects from registry
for effect_def in lightningbeam_core::effect_registry::EffectRegistry::get_all() {
assets.push(AssetEntry {
id: effect_def.id,
name: effect_def.name.clone(),
category: AssetCategory::Effects,
drag_clip_type: DragClipType::Effect,
duration: 5.0, // Default duration when dropped
dimensions: None,
extra_info: format!("{:?}", effect_def.category),
is_builtin: true, // Built-in from registry
});
}
// Collect user-edited effects from document (that aren't in registry)
let registry_ids: HashSet<Uuid> = lightningbeam_core::effect_registry::EffectRegistry::get_all()
.iter()
.map(|e| e.id)
.collect();
for effect_def in document.effect_definitions.values() {
if !registry_ids.contains(&effect_def.id) {
// User-created/modified effect
assets.push(AssetEntry {
id: effect_def.id,
name: effect_def.name.clone(),
category: AssetCategory::Effects,
drag_clip_type: DragClipType::Effect,
duration: 5.0,
dimensions: None,
extra_info: format!("{:?}", effect_def.category),
is_builtin: false, // User effect
});
}
}
// Sort alphabetically by name // Sort alphabetically by name
assets.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase())); assets.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase()));
@ -729,8 +851,13 @@ impl AssetLibraryPane {
.iter() .iter()
.filter(|asset| { .filter(|asset| {
// Category filter // Category filter
let category_matches = self.selected_category == AssetCategory::All let category_matches = if self.selected_category == AssetCategory::All {
|| asset.category == self.selected_category; // "All" tab: show everything EXCEPT built-in effects
// (built-in effects only appear in the Effects tab)
!(asset.category == AssetCategory::Effects && asset.is_builtin)
} else {
asset.category == self.selected_category
};
// Search filter // Search filter
let search_matches = let search_matches =
@ -773,6 +900,15 @@ impl AssetLibraryPane {
} }
} }
} }
lightningbeam_core::layer::AnyLayer::Effect(el) => {
if category == AssetCategory::Effects {
for instance in &el.clip_instances {
if instance.clip_id == asset_id {
return true;
}
}
}
}
} }
} }
false false
@ -793,6 +929,9 @@ impl AssetLibraryPane {
AssetCategory::Images => { AssetCategory::Images => {
document.remove_image_asset(&asset_id); document.remove_image_asset(&asset_id);
} }
AssetCategory::Effects => {
document.effect_definitions.remove(&asset_id);
}
AssetCategory::All => {} // Not a real category for deletion AssetCategory::All => {} // Not a real category for deletion
} }
} }
@ -820,6 +959,11 @@ impl AssetLibraryPane {
asset.name = new_name.to_string(); asset.name = new_name.to_string();
} }
} }
AssetCategory::Effects => {
if let Some(effect) = document.effect_definitions.get_mut(&asset_id) {
effect.name = new_name.to_string();
}
}
AssetCategory::All => {} // Not a real category for renaming AssetCategory::All => {} // Not a real category for renaming
} }
} }
@ -1268,6 +1412,9 @@ impl AssetLibraryPane {
Some(generate_placeholder_thumbnail(AssetCategory::Audio, 200)) Some(generate_placeholder_thumbnail(AssetCategory::Audio, 200))
} }
} }
AssetCategory::Effects => {
Some(generate_effect_thumbnail())
}
AssetCategory::All => None, AssetCategory::All => None,
} }
}); });
@ -1555,6 +1702,9 @@ impl AssetLibraryPane {
Some(generate_placeholder_thumbnail(AssetCategory::Audio, 200)) Some(generate_placeholder_thumbnail(AssetCategory::Audio, 200))
} }
} }
AssetCategory::Effects => {
Some(generate_effect_thumbnail())
}
AssetCategory::All => None, AssetCategory::All => None,
} }
}); });
@ -1739,6 +1889,7 @@ impl PaneRenderer for AssetLibraryPane {
if let Some(asset) = all_assets.iter().find(|a| a.id == context_asset_id) { if let Some(asset) = all_assets.iter().find(|a| a.id == context_asset_id) {
let asset_name = asset.name.clone(); let asset_name = asset.name.clone();
let asset_category = asset.category; let asset_category = asset.category;
let asset_is_builtin = asset.is_builtin;
let in_use = Self::is_asset_in_use( let in_use = Self::is_asset_in_use(
shared.action_executor.document(), shared.action_executor.document(),
context_asset_id, context_asset_id,
@ -1754,25 +1905,32 @@ impl PaneRenderer for AssetLibraryPane {
egui::Frame::popup(ui.style()).show(ui, |ui| { egui::Frame::popup(ui.style()).show(ui, |ui| {
ui.set_min_width(120.0); ui.set_min_width(120.0);
if ui.button("Rename").clicked() { // Built-in effects cannot be renamed or deleted
// Start inline rename if asset_is_builtin {
self.rename_state = Some(RenameState { ui.label(egui::RichText::new("Built-in effect")
asset_id: context_asset_id, .color(egui::Color32::from_gray(120))
category: asset_category, .italics());
edit_text: asset_name.clone(), } else {
}); if ui.button("Rename").clicked() {
self.context_menu = None; // Start inline rename
} self.rename_state = Some(RenameState {
asset_id: context_asset_id,
category: asset_category,
edit_text: asset_name.clone(),
});
self.context_menu = None;
}
if ui.button("Delete").clicked() { if ui.button("Delete").clicked() {
// Set up pending delete confirmation // Set up pending delete confirmation
self.pending_delete = Some(PendingDelete { self.pending_delete = Some(PendingDelete {
asset_id: context_asset_id, asset_id: context_asset_id,
asset_name: asset_name.clone(), asset_name: asset_name.clone(),
category: asset_category, category: asset_category,
in_use, in_use,
}); });
self.context_menu = None; self.context_menu = None;
}
} }
}); });
}); });

View File

@ -33,6 +33,8 @@ pub enum DragClipType {
AudioMidi, AudioMidi,
/// Static image asset /// Static image asset
Image, Image,
/// Effect (shader-based visual effect)
Effect,
} }
/// Information about an asset being dragged from the Asset Library /// Information about an asset being dragged from the Asset Library
@ -91,6 +93,7 @@ pub fn layer_matches_clip_type(layer: &lightningbeam_core::layer::AnyLayer, clip
(AnyLayer::Audio(audio), DragClipType::AudioMidi) => { (AnyLayer::Audio(audio), DragClipType::AudioMidi) => {
audio.audio_layer_type == AudioLayerType::Midi audio.audio_layer_type == AudioLayerType::Midi
} }
(AnyLayer::Effect(_), DragClipType::Effect) => true,
_ => false, _ => false,
} }
} }
@ -98,6 +101,7 @@ pub fn layer_matches_clip_type(layer: &lightningbeam_core::layer::AnyLayer, clip
/// Create a new layer of the appropriate type for a clip /// Create a new layer of the appropriate type for a clip
pub fn create_layer_for_clip_type(clip_type: DragClipType, name: &str) -> lightningbeam_core::layer::AnyLayer { pub fn create_layer_for_clip_type(clip_type: DragClipType, name: &str) -> lightningbeam_core::layer::AnyLayer {
use lightningbeam_core::layer::*; use lightningbeam_core::layer::*;
use lightningbeam_core::effect_layer::EffectLayer;
match clip_type { match clip_type {
DragClipType::Vector => AnyLayer::Vector(VectorLayer::new(name)), DragClipType::Vector => AnyLayer::Vector(VectorLayer::new(name)),
DragClipType::Video => AnyLayer::Video(VideoLayer::new(name)), DragClipType::Video => AnyLayer::Video(VideoLayer::new(name)),
@ -105,6 +109,7 @@ pub fn create_layer_for_clip_type(clip_type: DragClipType, name: &str) -> lightn
DragClipType::AudioMidi => AnyLayer::Audio(AudioLayer::new_midi(name)), DragClipType::AudioMidi => AnyLayer::Audio(AudioLayer::new_midi(name)),
// Images are placed as shapes on vector layers, not their own layer type // Images are placed as shapes on vector layers, not their own layer type
DragClipType::Image => AnyLayer::Vector(VectorLayer::new(name)), DragClipType::Image => AnyLayer::Vector(VectorLayer::new(name)),
DragClipType::Effect => AnyLayer::Effect(EffectLayer::new(name)),
} }
} }

View File

@ -6,8 +6,9 @@
use eframe::egui; use eframe::egui;
use lightningbeam_core::action::Action; use lightningbeam_core::action::Action;
use lightningbeam_core::clip::ClipInstance; use lightningbeam_core::clip::ClipInstance;
use lightningbeam_core::gpu::{BufferPool, Compositor}; use lightningbeam_core::gpu::{BufferPool, BufferFormat, BufferSpec, Compositor, EffectProcessor, HDR_FORMAT};
use lightningbeam_core::layer::{AnyLayer, AudioLayer, AudioLayerType, VideoLayer, VectorLayer}; use lightningbeam_core::layer::{AnyLayer, AudioLayer, AudioLayerType, VideoLayer, VectorLayer};
use lightningbeam_core::renderer::RenderedLayerType;
use super::{DragClipType, NodePath, PaneRenderer, SharedPaneState}; use super::{DragClipType, NodePath, PaneRenderer, SharedPaneState};
use std::sync::{Arc, Mutex, OnceLock}; use std::sync::{Arc, Mutex, OnceLock};
use vello::kurbo::Shape; use vello::kurbo::Shape;
@ -32,6 +33,8 @@ struct SharedVelloResources {
buffer_pool: Mutex<BufferPool>, buffer_pool: Mutex<BufferPool>,
/// Compositor for layer blending /// Compositor for layer blending
compositor: Compositor, compositor: Compositor,
/// Effect processor for GPU shader effects
effect_processor: Mutex<EffectProcessor>,
} }
/// Per-instance Vello resources (created for each Stage pane) /// Per-instance Vello resources (created for each Stage pane)
@ -196,7 +199,10 @@ impl SharedVelloResources {
// Use HDR format for internal compositing // Use HDR format for internal compositing
let compositor = Compositor::new(device, lightningbeam_core::gpu::HDR_FORMAT); let compositor = Compositor::new(device, lightningbeam_core::gpu::HDR_FORMAT);
println!("✅ Vello shared resources initialized (renderer, shaders, and HDR compositor)"); // Initialize effect processor for GPU shader effects
let effect_processor = EffectProcessor::new(device, lightningbeam_core::gpu::HDR_FORMAT);
println!("✅ Vello shared resources initialized (renderer, shaders, HDR compositor, and effect processor)");
Ok(Self { Ok(Self {
renderer: Arc::new(Mutex::new(renderer)), renderer: Arc::new(Mutex::new(renderer)),
@ -208,6 +214,7 @@ impl SharedVelloResources {
video_manager, video_manager,
buffer_pool: Mutex::new(buffer_pool), buffer_pool: Mutex::new(buffer_pool),
compositor, compositor,
effect_processor: Mutex::new(effect_processor),
}) })
} }
} }
@ -490,47 +497,144 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
} }
buffer_pool.release(bg_handle); buffer_pool.release(bg_handle);
// HDR buffer spec for effect processing
let hdr_spec = BufferSpec::new(width, height, BufferFormat::Rgba16Float);
// Lock effect processor
let mut effect_processor = shared.effect_processor.lock().unwrap();
// Now render and composite each layer incrementally // Now render and composite each layer incrementally
for rendered_layer in &composite_result.layers { for rendered_layer in &composite_result.layers {
if !rendered_layer.has_content { if !rendered_layer.has_content {
continue; continue;
} }
// Acquire a buffer for this layer match &rendered_layer.layer_type {
let layer_handle = buffer_pool.acquire(device, layer_spec); RenderedLayerType::Content => {
// Regular content layer - render and composite as before
let layer_handle = buffer_pool.acquire(device, layer_spec);
if let (Some(layer_view), Some(hdr_view)) = (buffer_pool.get_view(layer_handle), &instance_resources.hdr_texture_view) { if let (Some(layer_view), Some(hdr_view)) = (buffer_pool.get_view(layer_handle), &instance_resources.hdr_texture_view) {
// Render layer scene to buffer // Render layer scene to buffer
if let Ok(mut renderer) = shared.renderer.lock() { if let Ok(mut renderer) = shared.renderer.lock() {
renderer.render_to_texture(device, queue, &rendered_layer.scene, layer_view, &layer_render_params).ok(); renderer.render_to_texture(device, queue, &rendered_layer.scene, layer_view, &layer_render_params).ok();
}
// Composite this layer onto the HDR accumulator with its opacity
let compositor_layer = lightningbeam_core::gpu::CompositorLayer::new(
layer_handle,
rendered_layer.opacity,
rendered_layer.blend_mode,
);
let mut encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor {
label: Some("layer_composite_encoder"),
});
shared.compositor.composite(
device,
queue,
&mut encoder,
&[compositor_layer],
&buffer_pool,
hdr_view,
None, // Don't clear - blend onto existing content
);
queue.submit(Some(encoder.finish()));
}
buffer_pool.release(layer_handle);
} }
RenderedLayerType::Effect { effect_instances } => {
// Effect layer - apply effects to the current HDR accumulator
let current_time = self.document.current_time;
// Composite this layer onto the HDR accumulator with its opacity for effect_instance in effect_instances {
let compositor_layer = lightningbeam_core::gpu::CompositorLayer::new( // Get effect definition from document
layer_handle, let Some(effect_def) = self.document.get_effect_definition(&effect_instance.clip_id) else {
rendered_layer.opacity, println!("Effect definition not found for clip_id: {:?}", effect_instance.clip_id);
rendered_layer.blend_mode, continue;
); };
let mut encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor { // Compile effect if needed
label: Some("layer_composite_encoder"), if !effect_processor.is_compiled(&effect_def.id) {
}); let success = effect_processor.compile_effect(device, effect_def);
shared.compositor.composite( if !success {
device, eprintln!("Failed to compile effect: {}", effect_def.name);
queue, continue;
&mut encoder, }
&[compositor_layer], println!("Compiled effect: {}", effect_def.name);
&buffer_pool, }
hdr_view,
None, // Don't clear - blend onto existing content // Create EffectInstance from ClipInstance for the processor
); // For now, create a simple effect instance with default parameters
queue.submit(Some(encoder.finish())); let effect_inst = lightningbeam_core::effect::EffectInstance::new(
effect_def,
effect_instance.timeline_start,
effect_instance.timeline_start + effect_instance.effective_duration(lightningbeam_core::effect::EFFECT_DURATION),
);
// Acquire temp buffer for effect output (HDR format)
let effect_output_handle = buffer_pool.acquire(device, hdr_spec);
if let (Some(hdr_view), Some(effect_output_view)) = (
&instance_resources.hdr_texture_view,
buffer_pool.get_view(effect_output_handle),
) {
let mut encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor {
label: Some("effect_encoder"),
});
// Apply effect: HDR accumulator → effect output buffer
let applied = effect_processor.apply_effect(
device,
queue,
&mut encoder,
effect_def,
&effect_inst,
hdr_view,
effect_output_view,
width,
height,
current_time,
);
if applied {
queue.submit(Some(encoder.finish()));
// Copy effect output back to HDR accumulator
// We need to blit the effect result back to the HDR texture
let mut copy_encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor {
label: Some("effect_copy_encoder"),
});
// Use compositor to copy (with opacity 1.0, replacing content)
let effect_layer = lightningbeam_core::gpu::CompositorLayer::normal(
effect_output_handle,
rendered_layer.opacity, // Apply effect layer opacity
);
shared.compositor.composite(
device,
queue,
&mut copy_encoder,
&[effect_layer],
&buffer_pool,
hdr_view,
Some([0.0, 0.0, 0.0, 0.0]), // Clear with transparent (we're replacing)
);
queue.submit(Some(copy_encoder.finish()));
} else {
eprintln!("Effect {} failed to apply", effect_def.name);
}
}
buffer_pool.release(effect_output_handle);
}
}
} }
// Release buffer immediately - it can be reused for next layer
buffer_pool.release(layer_handle);
} }
drop(effect_processor);
// Advance frame counter for buffer cleanup // Advance frame counter for buffer cleanup
buffer_pool.next_frame(); buffer_pool.next_frame();
drop(buffer_pool); drop(buffer_pool);
@ -4982,6 +5086,7 @@ impl PaneRenderer for StagePane {
DragClipType::AudioSampled => "Audio", DragClipType::AudioSampled => "Audio",
DragClipType::AudioMidi => "MIDI", DragClipType::AudioMidi => "MIDI",
DragClipType::Image => "Image", DragClipType::Image => "Image",
DragClipType::Effect => "Effect",
}); });
let new_layer = super::create_layer_for_clip_type(dragging.clip_type, &layer_name); let new_layer = super::create_layer_for_clip_type(dragging.clip_type, &layer_name);
@ -5030,6 +5135,30 @@ impl PaneRenderer for StagePane {
shape_instance, shape_instance,
); );
shared.pending_actions.push(Box::new(action)); shared.pending_actions.push(Box::new(action));
} else if dragging.clip_type == DragClipType::Effect {
// Handle effect drops specially
// Get effect definition from registry or document
let effect_def = lightningbeam_core::effect_registry::EffectRegistry::get_by_id(&dragging.clip_id)
.or_else(|| shared.action_executor.document().get_effect_definition(&dragging.clip_id).cloned());
if let Some(def) = effect_def {
// Ensure effect definition is in document (copy from registry if built-in)
if shared.action_executor.document().get_effect_definition(&def.id).is_none() {
shared.action_executor.document_mut().add_effect_definition(def.clone());
}
// Create clip instance for effect with 5 second default duration
let clip_instance = ClipInstance::new(def.id)
.with_timeline_start(drop_time)
.with_timeline_duration(5.0);
// Use AddEffectAction for effect layers
let action = lightningbeam_core::actions::AddEffectAction::new(
layer_id,
clip_instance,
);
shared.pending_actions.push(Box::new(action));
}
} else { } else {
// For clips, create a clip instance // For clips, create a clip instance
let mut clip_instance = ClipInstance::new(dragging.clip_id) let mut clip_instance = ClipInstance::new(dragging.clip_id)

View File

@ -68,6 +68,7 @@ fn can_drop_on_layer(layer: &AnyLayer, clip_type: DragClipType) -> bool {
(AnyLayer::Audio(audio), DragClipType::AudioMidi) => { (AnyLayer::Audio(audio), DragClipType::AudioMidi) => {
audio.audio_layer_type == AudioLayerType::Midi audio.audio_layer_type == AudioLayerType::Midi
} }
(AnyLayer::Effect(_), DragClipType::Effect) => true,
_ => false, _ => false,
} }
} }
@ -171,6 +172,7 @@ impl TimelinePane {
lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances, lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Audio(al) => &al.clip_instances, lightningbeam_core::layer::AnyLayer::Audio(al) => &al.clip_instances,
lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances, lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Effect(el) => &el.clip_instances,
}; };
// Check each clip instance // Check each clip instance
@ -185,6 +187,9 @@ impl TimelinePane {
lightningbeam_core::layer::AnyLayer::Video(_) => { lightningbeam_core::layer::AnyLayer::Video(_) => {
document.get_video_clip(&clip_instance.clip_id).map(|c| c.duration) document.get_video_clip(&clip_instance.clip_id).map(|c| c.duration)
} }
lightningbeam_core::layer::AnyLayer::Effect(_) => {
Some(lightningbeam_core::effect::EFFECT_DURATION)
}
}?; }?;
let instance_duration = clip_instance.effective_duration(clip_duration); let instance_duration = clip_instance.effective_duration(clip_duration);
@ -860,6 +865,7 @@ impl TimelinePane {
} }
} }
lightningbeam_core::layer::AnyLayer::Video(_) => ("Video", egui::Color32::from_rgb(180, 100, 255)), // Purple lightningbeam_core::layer::AnyLayer::Video(_) => ("Video", egui::Color32::from_rgb(180, 100, 255)), // Purple
lightningbeam_core::layer::AnyLayer::Effect(_) => ("Effect", egui::Color32::from_rgb(255, 100, 180)), // Pink
}; };
// Color indicator bar on the left edge // Color indicator bar on the left edge
@ -1154,6 +1160,7 @@ impl TimelinePane {
lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances, lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Audio(al) => &al.clip_instances, lightningbeam_core::layer::AnyLayer::Audio(al) => &al.clip_instances,
lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances, lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Effect(el) => &el.clip_instances,
}; };
for clip_instance in clip_instances { for clip_instance in clip_instances {
@ -1171,6 +1178,9 @@ impl TimelinePane {
document.get_video_clip(&clip_instance.clip_id) document.get_video_clip(&clip_instance.clip_id)
.map(|c| c.duration) .map(|c| c.duration)
} }
lightningbeam_core::layer::AnyLayer::Effect(_) => {
Some(lightningbeam_core::effect::EFFECT_DURATION)
}
}; };
if let Some(clip_duration) = clip_duration { if let Some(clip_duration) = clip_duration {
@ -1329,6 +1339,10 @@ impl TimelinePane {
egui::Color32::from_rgb(150, 80, 220), // Purple egui::Color32::from_rgb(150, 80, 220), // Purple
egui::Color32::from_rgb(200, 150, 255), // Bright purple egui::Color32::from_rgb(200, 150, 255), // Bright purple
), ),
lightningbeam_core::layer::AnyLayer::Effect(_) => (
egui::Color32::from_rgb(220, 80, 160), // Pink
egui::Color32::from_rgb(255, 120, 200), // Bright pink
),
}; };
let clip_rect = egui::Rect::from_min_max( let clip_rect = egui::Rect::from_min_max(
@ -1552,6 +1566,7 @@ impl TimelinePane {
lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances, lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Audio(al) => &al.clip_instances, lightningbeam_core::layer::AnyLayer::Audio(al) => &al.clip_instances,
lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances, lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Effect(el) => &el.clip_instances,
}; };
// Check if click is within any clip instance // Check if click is within any clip instance
@ -1570,6 +1585,9 @@ impl TimelinePane {
document.get_video_clip(&clip_instance.clip_id) document.get_video_clip(&clip_instance.clip_id)
.map(|c| c.duration) .map(|c| c.duration)
} }
lightningbeam_core::layer::AnyLayer::Effect(_) => {
Some(lightningbeam_core::effect::EFFECT_DURATION)
}
}; };
if let Some(clip_duration) = clip_duration { if let Some(clip_duration) = clip_duration {
@ -1678,6 +1696,7 @@ impl TimelinePane {
lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances, lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Audio(al) => &al.clip_instances, lightningbeam_core::layer::AnyLayer::Audio(al) => &al.clip_instances,
lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances, lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Effect(el) => &el.clip_instances,
}; };
// Find selected clip instances in this layer // Find selected clip instances in this layer
@ -1734,6 +1753,9 @@ impl TimelinePane {
lightningbeam_core::layer::AnyLayer::Video(vl) => { lightningbeam_core::layer::AnyLayer::Video(vl) => {
&vl.clip_instances &vl.clip_instances
} }
lightningbeam_core::layer::AnyLayer::Effect(el) => {
&el.clip_instances
}
}; };
// Find selected clip instances in this layer // Find selected clip instances in this layer
@ -1756,6 +1778,9 @@ impl TimelinePane {
.get_video_clip(&clip_instance.clip_id) .get_video_clip(&clip_instance.clip_id)
.map(|c| c.duration) .map(|c| c.duration)
} }
lightningbeam_core::layer::AnyLayer::Effect(_) => {
Some(lightningbeam_core::effect::EFFECT_DURATION)
}
}; };
if let Some(clip_duration) = clip_duration { if let Some(clip_duration) = clip_duration {
@ -2113,6 +2138,7 @@ impl PaneRenderer for TimelinePane {
lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances, lightningbeam_core::layer::AnyLayer::Vector(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Audio(al) => &al.clip_instances, lightningbeam_core::layer::AnyLayer::Audio(al) => &al.clip_instances,
lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances, lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Effect(el) => &el.clip_instances,
}; };
for clip_instance in clip_instances { for clip_instance in clip_instances {
@ -2130,6 +2156,9 @@ impl PaneRenderer for TimelinePane {
document.get_video_clip(&clip_instance.clip_id) document.get_video_clip(&clip_instance.clip_id)
.map(|c| c.duration) .map(|c| c.duration)
} }
lightningbeam_core::layer::AnyLayer::Effect(_) => {
Some(lightningbeam_core::effect::EFFECT_DURATION)
}
}; };
if let Some(clip_duration) = clip_duration { if let Some(clip_duration) = clip_duration {
@ -2357,103 +2386,216 @@ impl PaneRenderer for TimelinePane {
let layer_id = layer.id(); let layer_id = layer.id();
let drop_time = self.x_to_time(pointer_pos.x - content_rect.min.x).max(0.0); let drop_time = self.x_to_time(pointer_pos.x - content_rect.min.x).max(0.0);
// Get document dimensions for centering and create clip instance // Handle effect drops specially
let (center_x, center_y, mut clip_instance) = { if dragging.clip_type == DragClipType::Effect {
let doc = shared.action_executor.document(); // Get effect definition from registry or document
let center_x = doc.width / 2.0; let effect_def = lightningbeam_core::effect_registry::EffectRegistry::get_by_id(&dragging.clip_id)
let center_y = doc.height / 2.0; .or_else(|| shared.action_executor.document().get_effect_definition(&dragging.clip_id).cloned());
let mut clip_instance = ClipInstance::new(dragging.clip_id) if let Some(def) = effect_def {
.with_timeline_start(drop_time); // Ensure effect definition is in document (copy from registry if built-in)
if shared.action_executor.document().get_effect_definition(&def.id).is_none() {
shared.action_executor.document_mut().add_effect_definition(def.clone());
}
// For video clips, scale to fill document dimensions // Create clip instance for effect with 5 second default duration
if dragging.clip_type == DragClipType::Video { let clip_instance = ClipInstance::new(def.id)
if let Some((video_width, video_height)) = dragging.dimensions { .with_timeline_start(drop_time)
// Calculate scale to fill document .with_timeline_duration(5.0);
let scale_x = doc.width / video_width;
let scale_y = doc.height / video_height;
clip_instance.transform.scale_x = scale_x; // Use AddEffectAction for effect layers
clip_instance.transform.scale_y = scale_y; let action = lightningbeam_core::actions::AddEffectAction::new(
layer_id,
clip_instance,
);
shared.pending_actions.push(Box::new(action));
}
// Position at (0, 0) to center the scaled video // Clear drag state
// (scaled dimensions = document dimensions, so top-left at origin centers it) *shared.dragging_asset = None;
clip_instance.transform.x = 0.0; } else {
clip_instance.transform.y = 0.0; // Get document dimensions for centering and create clip instance
let (center_x, center_y, mut clip_instance) = {
let doc = shared.action_executor.document();
let center_x = doc.width / 2.0;
let center_y = doc.height / 2.0;
let mut clip_instance = ClipInstance::new(dragging.clip_id)
.with_timeline_start(drop_time);
// For video clips, scale to fill document dimensions
if dragging.clip_type == DragClipType::Video {
if let Some((video_width, video_height)) = dragging.dimensions {
// Calculate scale to fill document
let scale_x = doc.width / video_width;
let scale_y = doc.height / video_height;
clip_instance.transform.scale_x = scale_x;
clip_instance.transform.scale_y = scale_y;
// Position at (0, 0) to center the scaled video
// (scaled dimensions = document dimensions, so top-left at origin centers it)
clip_instance.transform.x = 0.0;
clip_instance.transform.y = 0.0;
} else {
// No dimensions available, use document center
clip_instance.transform.x = center_x;
clip_instance.transform.y = center_y;
}
} else { } else {
// No dimensions available, use document center // Non-video clips: center at document center
clip_instance.transform.x = center_x; clip_instance.transform.x = center_x;
clip_instance.transform.y = center_y; clip_instance.transform.y = center_y;
} }
(center_x, center_y, clip_instance)
}; // doc is dropped here
// Save instance ID for potential grouping
let video_instance_id = clip_instance.id;
// Create and queue action for video
let action = lightningbeam_core::actions::AddClipInstanceAction::new(
layer_id,
clip_instance,
);
shared.pending_actions.push(Box::new(action));
// If video has linked audio, auto-place it and create group
if let Some(linked_audio_clip_id) = dragging.linked_audio_clip_id {
eprintln!("DEBUG: Video has linked audio clip: {}", linked_audio_clip_id);
// Find or create sampled audio track where the audio won't overlap
let audio_layer_id = {
let doc = shared.action_executor.document();
let result = find_sampled_audio_track_for_clip(doc, linked_audio_clip_id, drop_time);
if let Some(id) = result {
eprintln!("DEBUG: Found existing audio track without overlap: {}", id);
} else {
eprintln!("DEBUG: No suitable audio track found, will create new one");
}
result
}.unwrap_or_else(|| {
eprintln!("DEBUG: Creating new audio track");
// Create new sampled audio layer
let audio_layer = lightningbeam_core::layer::AudioLayer::new_sampled("Audio Track");
let layer_id = shared.action_executor.document_mut().root.add_child(
lightningbeam_core::layer::AnyLayer::Audio(audio_layer)
);
eprintln!("DEBUG: Created audio layer with ID: {}", layer_id);
layer_id
});
eprintln!("DEBUG: Using audio layer ID: {}", audio_layer_id);
// Create audio clip instance at same timeline position
let audio_instance = ClipInstance::new(linked_audio_clip_id)
.with_timeline_start(drop_time);
let audio_instance_id = audio_instance.id;
eprintln!("DEBUG: Created audio instance: {} for clip: {}", audio_instance_id, linked_audio_clip_id);
// Queue audio action
let audio_action = lightningbeam_core::actions::AddClipInstanceAction::new(
audio_layer_id,
audio_instance,
);
shared.pending_actions.push(Box::new(audio_action));
eprintln!("DEBUG: Queued audio action, total pending: {}", shared.pending_actions.len());
// Create instance group linking video and audio
let mut group = lightningbeam_core::instance_group::InstanceGroup::new();
group.add_member(layer_id, video_instance_id);
group.add_member(audio_layer_id, audio_instance_id);
shared.action_executor.document_mut().add_instance_group(group);
eprintln!("DEBUG: Created instance group");
} else { } else {
// Non-video clips: center at document center eprintln!("DEBUG: Video has NO linked audio clip!");
clip_instance.transform.x = center_x;
clip_instance.transform.y = center_y;
} }
(center_x, center_y, clip_instance) // Clear drag state
}; // doc is dropped here *shared.dragging_asset = None;
}
}
} else {
// No existing layer at this position - show "create new layer" indicator
// and handle drop to create a new layer
let layer_y = content_rect.min.y + hovered_layer_index as f32 * LAYER_HEIGHT - self.viewport_scroll_y;
let highlight_rect = egui::Rect::from_min_size(
egui::pos2(content_rect.min.x, layer_y),
egui::vec2(content_rect.width(), LAYER_HEIGHT),
);
// Save instance ID for potential grouping // Blue highlight for "will create new layer"
let video_instance_id = clip_instance.id; ui.painter().rect_filled(
highlight_rect,
0.0,
egui::Color32::from_rgba_unmultiplied(100, 150, 255, 40),
);
// Create and queue action for video // Show drop time indicator
let action = lightningbeam_core::actions::AddClipInstanceAction::new( let drop_time = self.x_to_time(pointer_pos.x - content_rect.min.x).max(0.0);
layer_id, let drop_x = self.time_to_x(drop_time);
clip_instance, if drop_x >= 0.0 && drop_x <= content_rect.width() {
ui.painter().line_segment(
[
egui::pos2(content_rect.min.x + drop_x, layer_y),
egui::pos2(content_rect.min.x + drop_x, layer_y + LAYER_HEIGHT),
],
egui::Stroke::new(2.0, egui::Color32::WHITE),
); );
shared.pending_actions.push(Box::new(action)); }
// If video has linked audio, auto-place it and create group // Handle drop on mouse release - create new layer
if let Some(linked_audio_clip_id) = dragging.linked_audio_clip_id { if ui.input(|i| i.pointer.any_released()) {
eprintln!("DEBUG: Video has linked audio clip: {}", linked_audio_clip_id); let drop_time = self.x_to_time(pointer_pos.x - content_rect.min.x).max(0.0);
// Find or create sampled audio track where the audio won't overlap // Create the appropriate layer type
let audio_layer_id = { let layer_name = format!("{} Layer", match dragging.clip_type {
let doc = shared.action_executor.document(); DragClipType::Vector => "Vector",
let result = find_sampled_audio_track_for_clip(doc, linked_audio_clip_id, drop_time); DragClipType::Video => "Video",
if let Some(id) = result { DragClipType::AudioSampled => "Audio",
eprintln!("DEBUG: Found existing audio track without overlap: {}", id); DragClipType::AudioMidi => "MIDI",
} else { DragClipType::Image => "Image",
eprintln!("DEBUG: No suitable audio track found, will create new one"); DragClipType::Effect => "Effect",
});
let new_layer = super::create_layer_for_clip_type(dragging.clip_type, &layer_name);
let new_layer_id = new_layer.id();
// Add the layer
shared.action_executor.document_mut().root.add_child(new_layer);
// Now add the clip to the new layer
if dragging.clip_type == DragClipType::Effect {
// Handle effect drops
let effect_def = lightningbeam_core::effect_registry::EffectRegistry::get_by_id(&dragging.clip_id)
.or_else(|| shared.action_executor.document().get_effect_definition(&dragging.clip_id).cloned());
if let Some(def) = effect_def {
if shared.action_executor.document().get_effect_definition(&def.id).is_none() {
shared.action_executor.document_mut().add_effect_definition(def.clone());
} }
result
}.unwrap_or_else(|| { let clip_instance = ClipInstance::new(def.id)
eprintln!("DEBUG: Creating new audio track"); .with_timeline_start(drop_time)
// Create new sampled audio layer .with_timeline_duration(5.0);
let audio_layer = lightningbeam_core::layer::AudioLayer::new_sampled("Audio Track");
let layer_id = shared.action_executor.document_mut().root.add_child( let action = lightningbeam_core::actions::AddEffectAction::new(
lightningbeam_core::layer::AnyLayer::Audio(audio_layer) new_layer_id,
clip_instance,
); );
eprintln!("DEBUG: Created audio layer with ID: {}", layer_id); shared.pending_actions.push(Box::new(action));
layer_id }
});
eprintln!("DEBUG: Using audio layer ID: {}", audio_layer_id);
// Create audio clip instance at same timeline position
let audio_instance = ClipInstance::new(linked_audio_clip_id)
.with_timeline_start(drop_time);
let audio_instance_id = audio_instance.id;
eprintln!("DEBUG: Created audio instance: {} for clip: {}", audio_instance_id, linked_audio_clip_id);
// Queue audio action
let audio_action = lightningbeam_core::actions::AddClipInstanceAction::new(
audio_layer_id,
audio_instance,
);
shared.pending_actions.push(Box::new(audio_action));
eprintln!("DEBUG: Queued audio action, total pending: {}", shared.pending_actions.len());
// Create instance group linking video and audio
let mut group = lightningbeam_core::instance_group::InstanceGroup::new();
group.add_member(layer_id, video_instance_id);
group.add_member(audio_layer_id, audio_instance_id);
shared.action_executor.document_mut().add_instance_group(group);
eprintln!("DEBUG: Created instance group");
} else { } else {
eprintln!("DEBUG: Video has NO linked audio clip!"); // Handle other clip types
let clip_instance = ClipInstance::new(dragging.clip_id)
.with_timeline_start(drop_time);
let action = lightningbeam_core::actions::AddClipInstanceAction::new(
new_layer_id,
clip_instance,
);
shared.pending_actions.push(Box::new(action));
} }
// Clear drag state // Clear drag state