Compare commits

..

5 Commits

Author SHA1 Message Date
Skyler Lehmkuhl 1c3f794958 Merge remote and fix color space 2026-03-01 15:50:53 -05:00
Skyler Lehmkuhl da02edb9f5 Move raster editing to GPU 2026-03-01 15:41:28 -05:00
Skyler Lehmkuhl e85efe7405 Fix smudge tool 2026-03-01 14:00:39 -05:00
Skyler Lehmkuhl 1c7256a12e Add raster layers 2026-03-01 13:16:49 -05:00
Skyler Lehmkuhl 4b638b882f Make tools dependent on layer type 2026-03-01 11:22:03 -05:00
39 changed files with 2927 additions and 76 deletions

View File

@ -104,6 +104,9 @@ impl Action for AddClipInstanceAction {
AnyLayer::Group(_) => { AnyLayer::Group(_) => {
return Err("Cannot add clip instances directly to group layers".to_string()); return Err("Cannot add clip instances directly to group layers".to_string());
} }
AnyLayer::Raster(_) => {
return Err("Cannot add clip instances directly to group layers".to_string());
}
} }
self.executed = true; self.executed = true;
@ -142,6 +145,9 @@ impl Action for AddClipInstanceAction {
AnyLayer::Group(_) => { AnyLayer::Group(_) => {
// Group layers don't have clip instances, nothing to rollback // Group layers don't have clip instances, nothing to rollback
} }
AnyLayer::Raster(_) => {
// Raster layers don't have clip instances, nothing to rollback
}
} }
self.executed = false; self.executed = false;

View File

@ -137,6 +137,7 @@ impl Action for AddLayerAction {
AnyLayer::Video(_) => "Add video layer", AnyLayer::Video(_) => "Add video layer",
AnyLayer::Effect(_) => "Add effect layer", AnyLayer::Effect(_) => "Add effect layer",
AnyLayer::Group(_) => "Add group layer", AnyLayer::Group(_) => "Add group layer",
AnyLayer::Raster(_) => "Add raster layer",
} }
.to_string() .to_string()
} }

View File

@ -36,6 +36,7 @@ impl Action for LoopClipInstancesAction {
AnyLayer::Video(vl) => &mut vl.clip_instances, AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(el) => &mut el.clip_instances, AnyLayer::Effect(el) => &mut el.clip_instances,
AnyLayer::Group(_) => continue, AnyLayer::Group(_) => continue,
AnyLayer::Raster(_) => continue,
}; };
for (instance_id, _old_dur, new_dur, _old_lb, new_lb) in loops { for (instance_id, _old_dur, new_dur, _old_lb, new_lb) in loops {
@ -59,6 +60,7 @@ impl Action for LoopClipInstancesAction {
AnyLayer::Video(vl) => &mut vl.clip_instances, AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(el) => &mut el.clip_instances, AnyLayer::Effect(el) => &mut el.clip_instances,
AnyLayer::Group(_) => continue, AnyLayer::Group(_) => continue,
AnyLayer::Raster(_) => continue,
}; };
for (instance_id, old_dur, _new_dur, old_lb, _new_lb) in loops { for (instance_id, old_dur, _new_dur, old_lb, _new_lb) in loops {

View File

@ -31,6 +31,7 @@ pub mod convert_to_movie_clip;
pub mod region_split; pub mod region_split;
pub mod toggle_group_expansion; pub mod toggle_group_expansion;
pub mod group_layers; pub mod group_layers;
pub mod raster_stroke;
pub mod move_layer; pub mod move_layer;
pub use add_clip_instance::AddClipInstanceAction; pub use add_clip_instance::AddClipInstanceAction;
@ -61,4 +62,5 @@ pub use convert_to_movie_clip::ConvertToMovieClipAction;
pub use region_split::RegionSplitAction; pub use region_split::RegionSplitAction;
pub use toggle_group_expansion::ToggleGroupExpansionAction; pub use toggle_group_expansion::ToggleGroupExpansionAction;
pub use group_layers::GroupLayersAction; pub use group_layers::GroupLayersAction;
pub use raster_stroke::RasterStrokeAction;
pub use move_layer::MoveLayerAction; pub use move_layer::MoveLayerAction;

View File

@ -57,6 +57,7 @@ impl Action for MoveClipInstancesAction {
AnyLayer::Video(vl) => &vl.clip_instances, AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances, AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => &[], AnyLayer::Group(_) => &[],
AnyLayer::Raster(_) => &[],
}; };
if let Some(instance) = clip_instances.iter().find(|ci| ci.id == *member_instance_id) { if let Some(instance) = clip_instances.iter().find(|ci| ci.id == *member_instance_id) {
@ -95,6 +96,7 @@ impl Action for MoveClipInstancesAction {
AnyLayer::Vector(vl) => &vl.clip_instances, AnyLayer::Vector(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances, AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => &[], AnyLayer::Group(_) => &[],
AnyLayer::Raster(_) => &[],
}; };
let group: Vec<(Uuid, f64, f64)> = moves.iter().filter_map(|(id, old_start, _)| { let group: Vec<(Uuid, f64, f64)> = moves.iter().filter_map(|(id, old_start, _)| {
@ -129,6 +131,7 @@ impl Action for MoveClipInstancesAction {
AnyLayer::Video(vl) => &mut vl.clip_instances, AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(el) => &mut el.clip_instances, AnyLayer::Effect(el) => &mut el.clip_instances,
AnyLayer::Group(_) => continue, AnyLayer::Group(_) => continue,
AnyLayer::Raster(_) => continue,
}; };
// Update timeline_start for each clip instance // Update timeline_start for each clip instance
@ -155,6 +158,7 @@ impl Action for MoveClipInstancesAction {
AnyLayer::Video(vl) => &mut vl.clip_instances, AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(el) => &mut el.clip_instances, AnyLayer::Effect(el) => &mut el.clip_instances,
AnyLayer::Group(_) => continue, AnyLayer::Group(_) => continue,
AnyLayer::Raster(_) => continue,
}; };
// Restore original timeline_start for each clip instance // Restore original timeline_start for each clip instance

View File

@ -0,0 +1,81 @@
//! Raster stroke action — records and undoes a brush stroke on a RasterLayer.
//!
//! The brush engine paints directly into `RasterKeyframe::raw_pixels` during the
//! drag (via `document_mut()`). This action captures the pixel buffer state
//! *before* and *after* the stroke so it can be undone / redone without
//! re-running the brush engine.
//!
//! `execute` → swap in `buffer_after`
//! `rollback` → swap in `buffer_before`
use crate::action::Action;
use crate::document::Document;
use crate::layer::AnyLayer;
use uuid::Uuid;
/// Action that records a single brush stroke for undo/redo.
///
/// The stroke must already be painted into the document's `raw_pixels` before
/// this action is executed for the first time.
pub struct RasterStrokeAction {
layer_id: Uuid,
time: f64,
/// Raw RGBA pixels *before* the stroke (for rollback / undo)
buffer_before: Vec<u8>,
/// Raw RGBA pixels *after* the stroke (for execute / redo)
buffer_after: Vec<u8>,
width: u32,
height: u32,
}
impl RasterStrokeAction {
/// Create the action.
///
/// * `buffer_before` raw RGBA pixels captured just before the stroke began.
/// * `buffer_after` raw RGBA pixels captured just after the stroke finished.
pub fn new(
layer_id: Uuid,
time: f64,
buffer_before: Vec<u8>,
buffer_after: Vec<u8>,
width: u32,
height: u32,
) -> Self {
Self { layer_id, time, buffer_before, buffer_after, width, height }
}
}
impl Action for RasterStrokeAction {
fn execute(&mut self, document: &mut Document) -> Result<(), String> {
let kf = get_keyframe_mut(document, &self.layer_id, self.time, self.width, self.height)?;
kf.raw_pixels = self.buffer_after.clone();
Ok(())
}
fn rollback(&mut self, document: &mut Document) -> Result<(), String> {
let kf = get_keyframe_mut(document, &self.layer_id, self.time, self.width, self.height)?;
kf.raw_pixels = self.buffer_before.clone();
Ok(())
}
fn description(&self) -> String {
"Paint stroke".to_string()
}
}
fn get_keyframe_mut<'a>(
document: &'a mut Document,
layer_id: &Uuid,
time: f64,
width: u32,
height: u32,
) -> Result<&'a mut crate::raster_layer::RasterKeyframe, String> {
let layer = document
.get_layer_mut(layer_id)
.ok_or_else(|| format!("Layer {} not found", layer_id))?;
let raster = match layer {
AnyLayer::Raster(rl) => rl,
_ => return Err("Not a raster layer".to_string()),
};
Ok(raster.ensure_keyframe_at(time, width, height))
}

View File

@ -45,6 +45,7 @@ impl Action for RemoveClipInstancesAction {
AnyLayer::Video(vl) => &mut vl.clip_instances, AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(el) => &mut el.clip_instances, AnyLayer::Effect(el) => &mut el.clip_instances,
AnyLayer::Group(_) => continue, AnyLayer::Group(_) => continue,
AnyLayer::Raster(_) => continue,
}; };
// Find and remove the instance, saving it for rollback // Find and remove the instance, saving it for rollback
@ -70,6 +71,7 @@ impl Action for RemoveClipInstancesAction {
AnyLayer::Video(vl) => &mut vl.clip_instances, AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(el) => &mut el.clip_instances, AnyLayer::Effect(el) => &mut el.clip_instances,
AnyLayer::Group(_) => continue, AnyLayer::Group(_) => continue,
AnyLayer::Raster(_) => continue,
}; };
clip_instances.push(instance); clip_instances.push(instance);

View File

@ -113,6 +113,7 @@ impl Action for SplitClipInstanceAction {
AnyLayer::Video(vl) => &vl.clip_instances, AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances, AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => return Err("Cannot split clip instances on group layers".to_string()), AnyLayer::Group(_) => return Err("Cannot split clip instances on group layers".to_string()),
AnyLayer::Raster(_) => return Err("Cannot split clip instances on group layers".to_string()),
}; };
let instance = clip_instances let instance = clip_instances
@ -232,6 +233,9 @@ impl Action for SplitClipInstanceAction {
AnyLayer::Group(_) => { AnyLayer::Group(_) => {
return Err("Cannot split clip instances on group layers".to_string()); return Err("Cannot split clip instances on group layers".to_string());
} }
AnyLayer::Raster(_) => {
return Err("Cannot split clip instances on group layers".to_string());
}
} }
self.executed = true; self.executed = true;
@ -290,6 +294,9 @@ impl Action for SplitClipInstanceAction {
AnyLayer::Group(_) => { AnyLayer::Group(_) => {
// Group layers don't have clip instances, nothing to rollback // Group layers don't have clip instances, nothing to rollback
} }
AnyLayer::Raster(_) => {
// Raster layers don't have clip instances, nothing to rollback
}
} }
self.executed = false; self.executed = false;

View File

@ -100,6 +100,7 @@ impl Action for TransformClipInstancesAction {
} }
AnyLayer::Effect(_) => {} AnyLayer::Effect(_) => {}
AnyLayer::Group(_) => {} AnyLayer::Group(_) => {}
AnyLayer::Raster(_) => {}
} }
Ok(()) Ok(())
} }
@ -138,6 +139,7 @@ impl Action for TransformClipInstancesAction {
} }
AnyLayer::Effect(_) => {} AnyLayer::Effect(_) => {}
AnyLayer::Group(_) => {} AnyLayer::Group(_) => {}
AnyLayer::Raster(_) => {}
} }
Ok(()) Ok(())
} }

View File

@ -100,6 +100,7 @@ impl Action for TrimClipInstancesAction {
AnyLayer::Video(vl) => &vl.clip_instances, AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances, AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => &[], AnyLayer::Group(_) => &[],
AnyLayer::Raster(_) => &[],
}; };
if let Some(instance) = clip_instances.iter().find(|ci| ci.id == *member_instance_id) { if let Some(instance) = clip_instances.iter().find(|ci| ci.id == *member_instance_id) {
@ -136,6 +137,7 @@ impl Action for TrimClipInstancesAction {
AnyLayer::Video(vl) => &vl.clip_instances, AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances, AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => &[], AnyLayer::Group(_) => &[],
AnyLayer::Raster(_) => &[],
}; };
if let Some(instance) = clip_instances.iter().find(|ci| ci.id == *member_instance_id) { if let Some(instance) = clip_instances.iter().find(|ci| ci.id == *member_instance_id) {
@ -179,6 +181,7 @@ impl Action for TrimClipInstancesAction {
AnyLayer::Vector(vl) => &vl.clip_instances, AnyLayer::Vector(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances, AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => &[], AnyLayer::Group(_) => &[],
AnyLayer::Raster(_) => &[],
}; };
let instance = clip_instances.iter() let instance = clip_instances.iter()
@ -271,6 +274,7 @@ impl Action for TrimClipInstancesAction {
AnyLayer::Video(vl) => &mut vl.clip_instances, AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(el) => &mut el.clip_instances, AnyLayer::Effect(el) => &mut el.clip_instances,
AnyLayer::Group(_) => continue, AnyLayer::Group(_) => continue,
AnyLayer::Raster(_) => continue,
}; };
// Apply trims // Apply trims
@ -310,6 +314,7 @@ impl Action for TrimClipInstancesAction {
AnyLayer::Video(vl) => &mut vl.clip_instances, AnyLayer::Video(vl) => &mut vl.clip_instances,
AnyLayer::Effect(el) => &mut el.clip_instances, AnyLayer::Effect(el) => &mut el.clip_instances,
AnyLayer::Group(_) => continue, AnyLayer::Group(_) => continue,
AnyLayer::Raster(_) => continue,
}; };
// Restore original trim values // Restore original trim values

View File

@ -0,0 +1,595 @@
//! Raster brush engine — pure-Rust MyPaint-style Gaussian dab renderer
//!
//! ## Algorithm
//!
//! Based on the libmypaint brush engine (ISC license, Martin Renold et al.).
//!
//! ### Dab shape
//! For each pixel at normalised squared distance `rr = (dist / radius)²` from the
//! dab centre, the opacity weight is calculated using two linear segments:
//!
//! ```text
//! opa
//! ^
//! * .
//! | *
//! | .
//! +-----------*> rr
//! 0 hardness 1
//! ```
//!
//! - segment 1 (rr ≤ hardness): `opa = 1 + rr * (-(1/hardness - 1))`
//! - segment 2 (hardness < rr ≤ 1): `opa = hardness/(1-hardness) - rr * hardness/(1-hardness)`
//! - rr > 1: opa = 0
//!
//! ### Dab placement
//! Dabs are placed along the stroke polyline at intervals of
//! `spacing = radius * dabs_per_radius`. Fractional remainder is tracked across
//! consecutive `apply_stroke` calls via `StrokeState`.
//!
//! ### Blending
//! Normal mode uses the standard "over" operator on premultiplied RGBA:
//! ```text
//! result_a = opa_a + (1 - opa_a) * bottom_a
//! result_rgb = opa_a * top_rgb + (1 - opa_a) * bottom_rgb
//! ```
//! Erase mode: subtract `opa_a` from the destination alpha and premultiply.
use image::RgbaImage;
use crate::raster_layer::{RasterBlendMode, StrokeRecord};
/// A single brush dab ready for GPU dispatch.
///
/// Padded to 64 bytes (4 × 16 bytes) for WGSL struct alignment in a storage buffer.
#[repr(C)]
#[derive(Clone, Copy, Debug, bytemuck::Pod, bytemuck::Zeroable)]
pub struct GpuDab {
/// Dab centre X (canvas pixels)
pub x: f32,
/// Dab centre Y (canvas pixels)
pub y: f32,
/// Dab radius (pixels)
pub radius: f32,
/// Hardness 0.01.0 (controls the falloff curve shape)
pub hardness: f32,
/// Composite opacity for this dab
pub opacity: f32,
/// Brush color R (linear, premultiplied)
pub color_r: f32,
/// Brush color G
pub color_g: f32,
/// Brush color B
pub color_b: f32,
/// Brush color A
pub color_a: f32,
/// Normalized stroke direction X (smudge only; 0 otherwise)
pub ndx: f32,
/// Normalized stroke direction Y (smudge only; 0 otherwise)
pub ndy: f32,
/// Distance to sample behind stroke for smudge (smudge only; 0 otherwise)
pub smudge_dist: f32,
/// Blend mode: 0 = Normal, 1 = Erase, 2 = Smudge
pub blend_mode: u32,
pub _pad0: u32,
pub _pad1: u32,
pub _pad2: u32,
}
/// Transient brush stroke state (tracks partial dab position between segments)
pub struct StrokeState {
/// Distance along the path already "consumed" toward the next dab (in pixels)
pub distance_since_last_dab: f32,
}
impl StrokeState {
pub fn new() -> Self {
Self { distance_since_last_dab: 0.0 }
}
}
impl Default for StrokeState {
fn default() -> Self { Self::new() }
}
/// Pure-Rust MyPaint-style Gaussian dab brush engine
pub struct BrushEngine;
impl BrushEngine {
/// Compute the list of GPU dabs for a stroke segment.
///
/// Uses the same dab-spacing logic as [`apply_stroke_with_state`] but produces
/// [`GpuDab`] structs for upload to the GPU compute pipeline instead of painting
/// into a pixel buffer.
///
/// Also returns the union bounding box of all dabs as `(x0, y0, x1, y1)` in
/// integer canvas pixel coordinates (clamped to non-negative values; `x0==i32::MAX`
/// when the returned Vec is empty).
pub fn compute_dabs(
stroke: &StrokeRecord,
state: &mut StrokeState,
) -> (Vec<GpuDab>, (i32, i32, i32, i32)) {
let mut dabs: Vec<GpuDab> = Vec::new();
let mut bbox = (i32::MAX, i32::MAX, i32::MIN, i32::MIN);
let blend_mode_u = match stroke.blend_mode {
RasterBlendMode::Normal => 0u32,
RasterBlendMode::Erase => 1u32,
RasterBlendMode::Smudge => 2u32,
};
let mut push_dab = |dabs: &mut Vec<GpuDab>,
bbox: &mut (i32, i32, i32, i32),
x: f32, y: f32,
radius: f32, opacity: f32,
ndx: f32, ndy: f32, smudge_dist: f32| {
let r_fringe = radius + 1.0;
bbox.0 = bbox.0.min((x - r_fringe).floor() as i32);
bbox.1 = bbox.1.min((y - r_fringe).floor() as i32);
bbox.2 = bbox.2.max((x + r_fringe).ceil() as i32);
bbox.3 = bbox.3.max((y + r_fringe).ceil() as i32);
dabs.push(GpuDab {
x, y, radius,
hardness: stroke.brush_settings.hardness,
opacity,
color_r: stroke.color[0],
color_g: stroke.color[1],
color_b: stroke.color[2],
color_a: stroke.color[3],
ndx, ndy, smudge_dist,
blend_mode: blend_mode_u,
_pad0: 0, _pad1: 0, _pad2: 0,
});
};
if stroke.points.len() < 2 {
if let Some(pt) = stroke.points.first() {
let r = stroke.brush_settings.radius_at_pressure(pt.pressure);
let o = stroke.brush_settings.opacity_at_pressure(pt.pressure);
// Single-tap smudge has no direction — skip (same as CPU engine)
if !matches!(stroke.blend_mode, RasterBlendMode::Smudge) {
push_dab(&mut dabs, &mut bbox, pt.x, pt.y, r, o, 0.0, 0.0, 0.0);
}
state.distance_since_last_dab = 0.0;
}
return (dabs, bbox);
}
for window in stroke.points.windows(2) {
let p0 = &window[0];
let p1 = &window[1];
let dx = p1.x - p0.x;
let dy = p1.y - p0.y;
let seg_len = (dx * dx + dy * dy).sqrt();
if seg_len < 1e-4 { continue; }
let mut t = 0.0f32;
while t < 1.0 {
let pressure = p0.pressure + t * (p1.pressure - p0.pressure);
let radius = stroke.brush_settings.radius_at_pressure(pressure);
let spacing = (radius * stroke.brush_settings.dabs_per_radius).max(0.5);
let dist_to_next = spacing - state.distance_since_last_dab;
let seg_t_to_next = (dist_to_next / seg_len).max(0.0);
if seg_t_to_next > 1.0 - t {
state.distance_since_last_dab += seg_len * (1.0 - t);
break;
}
t += seg_t_to_next;
let x2 = p0.x + t * dx;
let y2 = p0.y + t * dy;
let pressure2 = p0.pressure + t * (p1.pressure - p0.pressure);
let radius2 = stroke.brush_settings.radius_at_pressure(pressure2);
let opacity2 = stroke.brush_settings.opacity_at_pressure(pressure2);
if matches!(stroke.blend_mode, RasterBlendMode::Smudge) {
let ndx = dx / seg_len;
let ndy = dy / seg_len;
let smudge_dist =
(radius2 * stroke.brush_settings.dabs_per_radius).max(1.0);
push_dab(&mut dabs, &mut bbox,
x2, y2, radius2, opacity2, ndx, ndy, smudge_dist);
} else {
push_dab(&mut dabs, &mut bbox,
x2, y2, radius2, opacity2, 0.0, 0.0, 0.0);
}
state.distance_since_last_dab = 0.0;
}
}
(dabs, bbox)
}
/// Apply a complete stroke to a pixel buffer.
///
/// A fresh [`StrokeState`] is created for each stroke (starts with full dab
/// placement spacing so the first dab lands at the very first point).
pub fn apply_stroke(buffer: &mut RgbaImage, stroke: &StrokeRecord) {
let mut state = StrokeState::new();
// Ensure the very first point always gets a dab
state.distance_since_last_dab = f32::MAX;
Self::apply_stroke_with_state(buffer, stroke, &mut state);
}
/// Apply a stroke segment to a buffer while preserving dab-placement state.
///
/// Use this when building up a stroke incrementally (e.g. live drawing) so
/// that dab spacing is consistent across motion events.
pub fn apply_stroke_with_state(
buffer: &mut RgbaImage,
stroke: &StrokeRecord,
state: &mut StrokeState,
) {
if stroke.points.len() < 2 {
// Single-point "tap": draw one dab at the given pressure
if let Some(pt) = stroke.points.first() {
let r = stroke.brush_settings.radius_at_pressure(pt.pressure);
let o = stroke.brush_settings.opacity_at_pressure(pt.pressure);
// Smudge has no drag direction on a single tap — skip painting
if !matches!(stroke.blend_mode, RasterBlendMode::Smudge) {
Self::render_dab(buffer, pt.x, pt.y, r, stroke.brush_settings.hardness,
o, stroke.color, stroke.blend_mode);
}
state.distance_since_last_dab = 0.0;
}
return;
}
for window in stroke.points.windows(2) {
let p0 = &window[0];
let p1 = &window[1];
let dx = p1.x - p0.x;
let dy = p1.y - p0.y;
let seg_len = (dx * dx + dy * dy).sqrt();
if seg_len < 1e-4 {
continue;
}
// Interpolate across this segment
let mut t = 0.0f32;
while t < 1.0 {
let pressure = p0.pressure + t * (p1.pressure - p0.pressure);
let radius = stroke.brush_settings.radius_at_pressure(pressure);
let spacing = radius * stroke.brush_settings.dabs_per_radius;
let spacing = spacing.max(0.5); // at least half a pixel
let dist_to_next = spacing - state.distance_since_last_dab;
let seg_t_to_next = (dist_to_next / seg_len).max(0.0);
if seg_t_to_next > 1.0 - t {
// Not enough distance left in this segment for another dab
state.distance_since_last_dab += seg_len * (1.0 - t);
break;
}
t += seg_t_to_next;
let x2 = p0.x + t * dx;
let y2 = p0.y + t * dy;
let pressure2 = p0.pressure + t * (p1.pressure - p0.pressure);
let radius2 = stroke.brush_settings.radius_at_pressure(pressure2);
let opacity2 = stroke.brush_settings.opacity_at_pressure(pressure2);
if matches!(stroke.blend_mode, RasterBlendMode::Smudge) {
// Directional warp smudge: each pixel in the dab footprint
// samples from a position offset backwards along the stroke,
// preserving lateral color structure.
let ndx = dx / seg_len;
let ndy = dy / seg_len;
let smudge_dist = (radius2 * stroke.brush_settings.dabs_per_radius).max(1.0);
Self::render_smudge_dab(buffer, x2, y2, radius2,
stroke.brush_settings.hardness,
opacity2, ndx, ndy, smudge_dist);
} else {
Self::render_dab(buffer, x2, y2, radius2,
stroke.brush_settings.hardness,
opacity2, stroke.color, stroke.blend_mode);
}
state.distance_since_last_dab = 0.0;
}
}
}
/// Render a single Gaussian dab at pixel position (x, y).
///
/// Uses the two-segment linear falloff from MyPaint/libmypaint for the
/// opacity mask, then blends using the requested `blend_mode`.
pub fn render_dab(
buffer: &mut RgbaImage,
x: f32,
y: f32,
radius: f32,
hardness: f32,
opacity: f32,
color: [f32; 4],
blend_mode: RasterBlendMode,
) {
if radius < 0.5 || opacity <= 0.0 {
return;
}
let hardness = hardness.clamp(1e-3, 1.0);
// Pre-compute the two linear-segment coefficients (from libmypaint render_dab_mask)
let seg1_offset = 1.0f32;
let seg1_slope = -(1.0 / hardness - 1.0);
let seg2_offset = hardness / (1.0 - hardness);
let seg2_slope = -hardness / (1.0 - hardness);
let r_fringe = radius + 1.0;
let x0 = ((x - r_fringe).floor() as i32).max(0) as u32;
let y0 = ((y - r_fringe).floor() as i32).max(0) as u32;
let x1 = ((x + r_fringe).ceil() as i32).min(buffer.width() as i32 - 1).max(0) as u32;
let y1 = ((y + r_fringe).ceil() as i32).min(buffer.height() as i32 - 1).max(0) as u32;
let one_over_r2 = 1.0 / (radius * radius);
for py in y0..=y1 {
for px in x0..=x1 {
let dx = px as f32 + 0.5 - x;
let dy = py as f32 + 0.5 - y;
let rr = (dx * dx + dy * dy) * one_over_r2;
if rr > 1.0 {
continue;
}
// Two-segment opacity (identical to libmypaint calculate_opa)
let opa_weight = if rr <= hardness {
seg1_offset + rr * seg1_slope
} else {
seg2_offset + rr * seg2_slope
}
.clamp(0.0, 1.0);
let dab_alpha = opa_weight * opacity * color[3];
if dab_alpha <= 0.0 {
continue;
}
let pixel = buffer.get_pixel_mut(px, py);
let dst = [
pixel[0] as f32 / 255.0,
pixel[1] as f32 / 255.0,
pixel[2] as f32 / 255.0,
pixel[3] as f32 / 255.0,
];
let (out_r, out_g, out_b, out_a) = match blend_mode {
RasterBlendMode::Normal | RasterBlendMode::Smudge => {
// Standard "over" operator (smudge pre-computes its color upstream)
let oa = dab_alpha;
let ba = 1.0 - oa;
let out_a = oa + ba * dst[3];
let out_r = oa * color[0] + ba * dst[0];
let out_g = oa * color[1] + ba * dst[1];
let out_b = oa * color[2] + ba * dst[2];
(out_r, out_g, out_b, out_a)
}
RasterBlendMode::Erase => {
// Multiplicative erase: each dab removes dab_alpha *fraction* of remaining
// alpha. This prevents dense overlapping dabs from summing past 1.0 and
// fully erasing at low opacity — opacity now controls the per-dab fraction
// removed rather than an absolute amount.
let new_a = dst[3] * (1.0 - dab_alpha);
let scale = if dst[3] > 1e-6 { new_a / dst[3] } else { 0.0 };
(dst[0] * scale, dst[1] * scale, dst[2] * scale, new_a)
}
};
pixel[0] = (out_r.clamp(0.0, 1.0) * 255.0) as u8;
pixel[1] = (out_g.clamp(0.0, 1.0) * 255.0) as u8;
pixel[2] = (out_b.clamp(0.0, 1.0) * 255.0) as u8;
pixel[3] = (out_a.clamp(0.0, 1.0) * 255.0) as u8;
}
}
}
/// Render a smudge dab using directional per-pixel warp.
///
/// Each pixel in the dab footprint samples from the canvas at a position offset
/// backwards along `(ndx, ndy)` by `smudge_dist` pixels, then blends that
/// sampled color over the current pixel weighted by the dab opacity.
///
/// Because each pixel samples its own source position, lateral color structure
/// is preserved: dragging over a 1-pixel dot with a 20-pixel brush produces a
/// narrow streak rather than a uniform smear.
///
/// Updates are collected before any writes to avoid read/write aliasing.
fn render_smudge_dab(
buffer: &mut RgbaImage,
x: f32,
y: f32,
radius: f32,
hardness: f32,
opacity: f32,
ndx: f32, // normalized stroke direction x
ndy: f32, // normalized stroke direction y
smudge_dist: f32,
) {
if radius < 0.5 || opacity <= 0.0 {
return;
}
let hardness = hardness.clamp(1e-3, 1.0);
let seg1_offset = 1.0f32;
let seg1_slope = -(1.0 / hardness - 1.0);
let seg2_offset = hardness / (1.0 - hardness);
let seg2_slope = -hardness / (1.0 - hardness);
let r_fringe = radius + 1.0;
let x0 = ((x - r_fringe).floor() as i32).max(0) as u32;
let y0 = ((y - r_fringe).floor() as i32).max(0) as u32;
let x1 = ((x + r_fringe).ceil() as i32).min(buffer.width() as i32 - 1).max(0) as u32;
let y1 = ((y + r_fringe).ceil() as i32).min(buffer.height() as i32 - 1).max(0) as u32;
let one_over_r2 = 1.0 / (radius * radius);
// Collect updates before writing to avoid aliasing between source and dest reads
let mut updates: Vec<(u32, u32, [u8; 4])> = Vec::new();
for py in y0..=y1 {
for px in x0..=x1 {
let fdx = px as f32 + 0.5 - x;
let fdy = py as f32 + 0.5 - y;
let rr = (fdx * fdx + fdy * fdy) * one_over_r2;
if rr > 1.0 {
continue;
}
let opa_weight = if rr <= hardness {
seg1_offset + rr * seg1_slope
} else {
seg2_offset + rr * seg2_slope
}
.clamp(0.0, 1.0);
let alpha = opa_weight * opacity;
if alpha <= 0.0 {
continue;
}
// Sample from one dab-spacing behind the current position along stroke
let src_x = px as f32 + 0.5 - ndx * smudge_dist;
let src_y = py as f32 + 0.5 - ndy * smudge_dist;
let src = Self::sample_bilinear(buffer, src_x, src_y);
let dst = buffer.get_pixel(px, py);
let da = 1.0 - alpha;
let out = [
((alpha * src[0] + da * dst[0] as f32 / 255.0).clamp(0.0, 1.0) * 255.0) as u8,
((alpha * src[1] + da * dst[1] as f32 / 255.0).clamp(0.0, 1.0) * 255.0) as u8,
((alpha * src[2] + da * dst[2] as f32 / 255.0).clamp(0.0, 1.0) * 255.0) as u8,
((alpha * src[3] + da * dst[3] as f32 / 255.0).clamp(0.0, 1.0) * 255.0) as u8,
];
updates.push((px, py, out));
}
}
for (px, py, rgba) in updates {
let p = buffer.get_pixel_mut(px, py);
p[0] = rgba[0];
p[1] = rgba[1];
p[2] = rgba[2];
p[3] = rgba[3];
}
}
/// Bilinearly sample a floating-point position from the buffer, clamped to bounds.
fn sample_bilinear(buffer: &RgbaImage, x: f32, y: f32) -> [f32; 4] {
let w = buffer.width() as i32;
let h = buffer.height() as i32;
let x0 = (x.floor() as i32).clamp(0, w - 1);
let y0 = (y.floor() as i32).clamp(0, h - 1);
let x1 = (x0 + 1).min(w - 1);
let y1 = (y0 + 1).min(h - 1);
let fx = (x - x0 as f32).clamp(0.0, 1.0);
let fy = (y - y0 as f32).clamp(0.0, 1.0);
let p00 = buffer.get_pixel(x0 as u32, y0 as u32);
let p10 = buffer.get_pixel(x1 as u32, y0 as u32);
let p01 = buffer.get_pixel(x0 as u32, y1 as u32);
let p11 = buffer.get_pixel(x1 as u32, y1 as u32);
let mut out = [0.0f32; 4];
for i in 0..4 {
let top = p00[i] as f32 * (1.0 - fx) + p10[i] as f32 * fx;
let bot = p01[i] as f32 * (1.0 - fx) + p11[i] as f32 * fx;
out[i] = (top * (1.0 - fy) + bot * fy) / 255.0;
}
out
}
}
/// Create an `RgbaImage` from a raw RGBA pixel buffer.
///
/// If `raw` is empty a blank (transparent) image of the given dimensions is returned.
/// Panics if `raw.len() != width * height * 4` (and `raw` is non-empty).
pub fn image_from_raw(raw: Vec<u8>, width: u32, height: u32) -> RgbaImage {
if raw.is_empty() {
RgbaImage::new(width, height)
} else {
RgbaImage::from_raw(width, height, raw)
.expect("raw_pixels length mismatch")
}
}
/// Encode an `RgbaImage` as a PNG byte vector
pub fn encode_png(img: &RgbaImage) -> Result<Vec<u8>, String> {
let mut buf = std::io::Cursor::new(Vec::new());
img.write_to(&mut buf, image::ImageFormat::Png)
.map_err(|e| format!("PNG encode error: {e}"))?;
Ok(buf.into_inner())
}
/// Decode PNG bytes into an `RgbaImage`
pub fn decode_png(data: &[u8]) -> Result<RgbaImage, String> {
image::load_from_memory(data)
.map(|img| img.to_rgba8())
.map_err(|e| format!("PNG decode error: {e}"))
}
#[cfg(test)]
mod tests {
use super::*;
use crate::raster_layer::{StrokePoint, StrokeRecord, RasterBlendMode};
use crate::brush_settings::BrushSettings;
fn make_stroke(color: [f32; 4]) -> StrokeRecord {
StrokeRecord {
brush_settings: BrushSettings::default_round_hard(),
color,
blend_mode: RasterBlendMode::Normal,
points: vec![
StrokePoint { x: 10.0, y: 10.0, pressure: 0.8, tilt_x: 0.0, tilt_y: 0.0, timestamp: 0.0 },
StrokePoint { x: 50.0, y: 10.0, pressure: 0.8, tilt_x: 0.0, tilt_y: 0.0, timestamp: 0.1 },
],
}
}
#[test]
fn test_stroke_modifies_buffer() {
let mut img = RgbaImage::new(100, 100);
let stroke = make_stroke([1.0, 0.0, 0.0, 1.0]); // red
BrushEngine::apply_stroke(&mut img, &stroke);
// The center pixel should have some red
let px = img.get_pixel(30, 10);
assert!(px[0] > 0, "expected red paint");
}
#[test]
fn test_erase_reduces_alpha() {
let mut img = RgbaImage::from_pixel(100, 100, image::Rgba([200, 100, 50, 255]));
let stroke = StrokeRecord {
brush_settings: BrushSettings::default_round_hard(),
color: [0.0, 0.0, 0.0, 1.0],
blend_mode: RasterBlendMode::Erase,
points: vec![
StrokePoint { x: 50.0, y: 50.0, pressure: 1.0, tilt_x: 0.0, tilt_y: 0.0, timestamp: 0.0 },
],
};
BrushEngine::apply_stroke(&mut img, &stroke);
let px = img.get_pixel(50, 50);
assert!(px[3] < 255, "alpha should be reduced by erase");
}
#[test]
fn test_png_roundtrip() {
let mut img = RgbaImage::new(64, 64);
let px = img.get_pixel_mut(10, 10);
*px = image::Rgba([255, 128, 0, 255]);
let png = encode_png(&img).unwrap();
let decoded = decode_png(&png).unwrap();
assert_eq!(decoded.get_pixel(10, 10), img.get_pixel(10, 10));
}
}

View File

@ -0,0 +1,148 @@
//! Brush settings for the raster paint engine
//!
//! Settings that describe the appearance and behavior of a paint brush.
//! Compatible with MyPaint .myb brush file format (subset).
use serde::{Deserialize, Serialize};
/// Settings for a paint brush
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BrushSettings {
/// log(radius) base value; actual radius = exp(radius_log)
pub radius_log: f32,
/// Edge hardness 0.0 (fully soft/gaussian) to 1.0 (hard edge)
pub hardness: f32,
/// Base opacity 0.01.0
pub opaque: f32,
/// Dab spacing as fraction of radius (smaller = denser strokes)
pub dabs_per_radius: f32,
/// HSV hue (0.01.0); usually overridden by stroke color
pub color_h: f32,
/// HSV saturation (0.01.0)
pub color_s: f32,
/// HSV value (0.01.0)
pub color_v: f32,
/// How much pressure increases/decreases radius
/// Final radius = exp(radius_log + pressure_radius_gain * pressure)
pub pressure_radius_gain: f32,
/// How much pressure increases/decreases opacity
/// Final opacity = opaque * (1 + pressure_opacity_gain * (pressure - 0.5))
pub pressure_opacity_gain: f32,
}
impl BrushSettings {
/// Default soft round brush (smooth Gaussian falloff)
pub fn default_round_soft() -> Self {
Self {
radius_log: 2.0, // radius ≈ 7.4 px
hardness: 0.1,
opaque: 0.8,
dabs_per_radius: 0.25,
color_h: 0.0,
color_s: 0.0,
color_v: 0.0,
pressure_radius_gain: 0.5,
pressure_opacity_gain: 1.0,
}
}
/// Default hard round brush (sharp edge)
pub fn default_round_hard() -> Self {
Self {
radius_log: 2.0,
hardness: 0.9,
opaque: 1.0,
dabs_per_radius: 0.2,
color_h: 0.0,
color_s: 0.0,
color_v: 0.0,
pressure_radius_gain: 0.3,
pressure_opacity_gain: 0.8,
}
}
/// Compute actual radius at a given pressure level
pub fn radius_at_pressure(&self, pressure: f32) -> f32 {
let r = self.radius_log + self.pressure_radius_gain * (pressure - 0.5);
r.exp().clamp(0.5, 500.0)
}
/// Compute actual opacity at a given pressure level
pub fn opacity_at_pressure(&self, pressure: f32) -> f32 {
let o = self.opaque * (1.0 + self.pressure_opacity_gain * (pressure - 0.5));
o.clamp(0.0, 1.0)
}
/// Parse a MyPaint .myb JSON brush file (subset).
///
/// Reads `radius_logarithmic`, `hardness`, `opaque`, `dabs_per_basic_radius`,
/// `color_h`, `color_s`, `color_v` from the `settings` key's `base_value` fields.
pub fn from_myb(json: &str) -> Result<Self, String> {
let v: serde_json::Value =
serde_json::from_str(json).map_err(|e| format!("JSON parse error: {e}"))?;
let settings = v.get("settings").ok_or("Missing 'settings' key")?;
let read_base = |name: &str, default: f32| -> f32 {
settings
.get(name)
.and_then(|s| s.get("base_value"))
.and_then(|bv| bv.as_f64())
.map(|f| f as f32)
.unwrap_or(default)
};
// Pressure dynamics: read from the "inputs" mapping of radius/opacity
// For simplicity, look for the pressure input point in radius_logarithmic
let pressure_radius_gain = settings
.get("radius_logarithmic")
.and_then(|s| s.get("inputs"))
.and_then(|inp| inp.get("pressure"))
.and_then(|pts| pts.as_array())
.and_then(|arr| {
// arr = [[x0,y0],[x1,y1],...] approximate as linear gain at x=1.0
if arr.len() >= 2 {
let y0 = arr[0].get(1)?.as_f64()? as f32;
let y1 = arr[arr.len() - 1].get(1)?.as_f64()? as f32;
Some((y1 - y0) * 0.5)
} else {
None
}
})
.unwrap_or(0.5);
let pressure_opacity_gain = settings
.get("opaque")
.and_then(|s| s.get("inputs"))
.and_then(|inp| inp.get("pressure"))
.and_then(|pts| pts.as_array())
.and_then(|arr| {
if arr.len() >= 2 {
let y0 = arr[0].get(1)?.as_f64()? as f32;
let y1 = arr[arr.len() - 1].get(1)?.as_f64()? as f32;
Some(y1 - y0)
} else {
None
}
})
.unwrap_or(1.0);
Ok(Self {
radius_log: read_base("radius_logarithmic", 2.0),
hardness: read_base("hardness", 0.5).clamp(0.0, 1.0),
opaque: read_base("opaque", 1.0).clamp(0.0, 1.0),
dabs_per_radius: read_base("dabs_per_basic_radius", 0.25).clamp(0.01, 10.0),
color_h: read_base("color_h", 0.0),
color_s: read_base("color_s", 0.0),
color_v: read_base("color_v", 0.0),
pressure_radius_gain,
pressure_opacity_gain,
})
}
}
impl Default for BrushSettings {
fn default() -> Self {
Self::default_round_soft()
}
}

View File

@ -116,6 +116,7 @@ impl VectorClip {
AnyLayer::Video(vl) => &vl.clip_instances, AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances, AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => &[], AnyLayer::Group(_) => &[],
AnyLayer::Raster(_) => &[],
}; };
for ci in clip_instances { for ci in clip_instances {
let end = if let Some(td) = ci.timeline_duration { let end = if let Some(td) = ci.timeline_duration {

View File

@ -32,6 +32,7 @@ impl ClipboardLayerType {
}, },
AnyLayer::Effect(_) => ClipboardLayerType::Effect, AnyLayer::Effect(_) => ClipboardLayerType::Effect,
AnyLayer::Group(_) => ClipboardLayerType::Vector, // Groups don't have a direct clipboard type; treat as vector AnyLayer::Group(_) => ClipboardLayerType::Vector, // Groups don't have a direct clipboard type; treat as vector
AnyLayer::Raster(_) => ClipboardLayerType::Vector, // Raster layers treated as vector for clipboard purposes
} }
} }

View File

@ -419,6 +419,9 @@ impl Document {
} }
} }
} }
crate::layer::AnyLayer::Raster(_) => {
// Raster layers don't have clip instances
}
crate::layer::AnyLayer::Group(group) => { crate::layer::AnyLayer::Group(group) => {
// Recurse into group children to find their clip instance endpoints // Recurse into group children to find their clip instance endpoints
fn process_group_children( fn process_group_children(
@ -457,6 +460,9 @@ impl Document {
} }
} }
} }
crate::layer::AnyLayer::Raster(_) => {
// Raster layers don't have clip instances
}
crate::layer::AnyLayer::Group(g) => { crate::layer::AnyLayer::Group(g) => {
process_group_children(&g.children, doc, max_end, calc_end); process_group_children(&g.children, doc, max_end, calc_end);
} }
@ -822,6 +828,7 @@ impl Document {
AnyLayer::Vector(vector) => &vector.clip_instances, AnyLayer::Vector(vector) => &vector.clip_instances,
AnyLayer::Effect(effect) => &effect.clip_instances, AnyLayer::Effect(effect) => &effect.clip_instances,
AnyLayer::Group(_) => &[], AnyLayer::Group(_) => &[],
AnyLayer::Raster(_) => &[],
}; };
let instance = instances.iter().find(|inst| &inst.id == instance_id)?; let instance = instances.iter().find(|inst| &inst.id == instance_id)?;
@ -861,6 +868,7 @@ impl Document {
AnyLayer::Vector(vector) => &vector.clip_instances, AnyLayer::Vector(vector) => &vector.clip_instances,
AnyLayer::Effect(effect) => &effect.clip_instances, AnyLayer::Effect(effect) => &effect.clip_instances,
AnyLayer::Group(_) => &[], AnyLayer::Group(_) => &[],
AnyLayer::Raster(_) => &[],
}; };
for instance in instances { for instance in instances {
@ -922,6 +930,7 @@ impl Document {
AnyLayer::Effect(effect) => &effect.clip_instances, AnyLayer::Effect(effect) => &effect.clip_instances,
AnyLayer::Vector(_) => return Some(desired_start), // Shouldn't reach here AnyLayer::Vector(_) => return Some(desired_start), // Shouldn't reach here
AnyLayer::Group(_) => return Some(desired_start), // Groups don't have own clips AnyLayer::Group(_) => return Some(desired_start), // Groups don't have own clips
AnyLayer::Raster(_) => return Some(desired_start), // Raster layers don't have own clips
}; };
let mut occupied_ranges: Vec<(f64, f64, Uuid)> = Vec::new(); let mut occupied_ranges: Vec<(f64, f64, Uuid)> = Vec::new();
@ -1016,6 +1025,7 @@ impl Document {
AnyLayer::Effect(e) => &e.clip_instances, AnyLayer::Effect(e) => &e.clip_instances,
AnyLayer::Vector(v) => &v.clip_instances, AnyLayer::Vector(v) => &v.clip_instances,
AnyLayer::Group(_) => &[], AnyLayer::Group(_) => &[],
AnyLayer::Raster(_) => &[],
}; };
// Collect non-group clip ranges // Collect non-group clip ranges
@ -1086,6 +1096,7 @@ impl Document {
AnyLayer::Effect(effect) => &effect.clip_instances, AnyLayer::Effect(effect) => &effect.clip_instances,
AnyLayer::Vector(vector) => &vector.clip_instances, AnyLayer::Vector(vector) => &vector.clip_instances,
AnyLayer::Group(_) => &[], AnyLayer::Group(_) => &[],
AnyLayer::Raster(_) => &[],
}; };
for other in instances { for other in instances {
@ -1133,6 +1144,7 @@ impl Document {
AnyLayer::Effect(effect) => &effect.clip_instances, AnyLayer::Effect(effect) => &effect.clip_instances,
AnyLayer::Vector(vector) => &vector.clip_instances, AnyLayer::Vector(vector) => &vector.clip_instances,
AnyLayer::Group(_) => &[], AnyLayer::Group(_) => &[],
AnyLayer::Raster(_) => &[],
}; };
let mut nearest_start = f64::MAX; let mut nearest_start = f64::MAX;
@ -1179,6 +1191,7 @@ impl Document {
AnyLayer::Effect(effect) => &effect.clip_instances, AnyLayer::Effect(effect) => &effect.clip_instances,
AnyLayer::Vector(vector) => &vector.clip_instances, AnyLayer::Vector(vector) => &vector.clip_instances,
AnyLayer::Group(_) => &[], AnyLayer::Group(_) => &[],
AnyLayer::Raster(_) => &[],
}; };
let mut nearest_end = 0.0; let mut nearest_end = 0.0;

View File

@ -370,6 +370,37 @@ pub fn save_beam(
eprintln!("📊 [SAVE_BEAM] - ZIP writing: {:.2}ms", zip_write_time); eprintln!("📊 [SAVE_BEAM] - ZIP writing: {:.2}ms", zip_write_time);
} }
// 4b. Write raster layer PNG buffers to ZIP (media/raster/<keyframe-uuid>.png)
let step4b_start = std::time::Instant::now();
let raster_file_options = FileOptions::default()
.compression_method(CompressionMethod::Stored); // PNG is already compressed
let mut raster_count = 0usize;
for layer in &document.root.children {
if let crate::layer::AnyLayer::Raster(rl) = layer {
for kf in &rl.keyframes {
if !kf.raw_pixels.is_empty() {
// Encode raw RGBA to PNG for storage
let img = crate::brush_engine::image_from_raw(
kf.raw_pixels.clone(), kf.width, kf.height,
);
match crate::brush_engine::encode_png(&img) {
Ok(png_bytes) => {
let zip_path = kf.media_path.clone();
zip.start_file(&zip_path, raster_file_options)
.map_err(|e| format!("Failed to create {} in ZIP: {}", zip_path, e))?;
zip.write_all(&png_bytes)
.map_err(|e| format!("Failed to write {}: {}", zip_path, e))?;
raster_count += 1;
}
Err(e) => eprintln!("⚠️ [SAVE_BEAM] Failed to encode raster PNG {}: {}", kf.media_path, e),
}
}
}
}
}
eprintln!("📊 [SAVE_BEAM] Step 4b: Write {} raster PNG buffers took {:.2}ms",
raster_count, step4b_start.elapsed().as_secs_f64() * 1000.0);
// 5. Build BeamProject structure with modified entries // 5. Build BeamProject structure with modified entries
let step5_start = std::time::Instant::now(); let step5_start = std::time::Instant::now();
let now = chrono::Utc::now().to_rfc3339(); let now = chrono::Utc::now().to_rfc3339();
@ -467,7 +498,7 @@ pub fn load_beam(path: &Path) -> Result<LoadedProject, String> {
// 5. Extract document and audio backend state // 5. Extract document and audio backend state
let step5_start = std::time::Instant::now(); let step5_start = std::time::Instant::now();
let document = beam_project.ui_state; let mut document = beam_project.ui_state;
let mut audio_project = beam_project.audio_backend.project; let mut audio_project = beam_project.audio_backend.project;
let audio_pool_entries = beam_project.audio_backend.audio_pool_entries; let audio_pool_entries = beam_project.audio_backend.audio_pool_entries;
let layer_to_track_map = beam_project.audio_backend.layer_to_track_map; let layer_to_track_map = beam_project.audio_backend.layer_to_track_map;
@ -584,6 +615,37 @@ pub fn load_beam(path: &Path) -> Result<LoadedProject, String> {
eprintln!("📊 [LOAD_BEAM] - FLAC decoding: {:.2}ms", flac_decode_time); eprintln!("📊 [LOAD_BEAM] - FLAC decoding: {:.2}ms", flac_decode_time);
} }
// 7b. Load raster layer PNG buffers from ZIP
let step7b_start = std::time::Instant::now();
let mut raster_load_count = 0usize;
for layer in document.root.children.iter_mut() {
if let crate::layer::AnyLayer::Raster(rl) = layer {
for kf in &mut rl.keyframes {
if !kf.media_path.is_empty() {
match zip.by_name(&kf.media_path) {
Ok(mut png_file) => {
let mut png_bytes = Vec::new();
let _ = png_file.read_to_end(&mut png_bytes);
// Decode PNG into raw RGBA pixels for fast in-memory access
match crate::brush_engine::decode_png(&png_bytes) {
Ok(rgba) => {
kf.raw_pixels = rgba.into_raw();
raster_load_count += 1;
}
Err(e) => eprintln!("⚠️ [LOAD_BEAM] Failed to decode raster PNG {}: {}", kf.media_path, e),
}
}
Err(_) => {
// Keyframe PNG not in ZIP yet (new keyframe); leave raw_pixels empty
}
}
}
}
}
}
eprintln!("📊 [LOAD_BEAM] Step 7b: Load {} raster PNG buffers took {:.2}ms",
raster_load_count, step7b_start.elapsed().as_secs_f64() * 1000.0);
// 8. Check for missing external files // 8. Check for missing external files
// An entry is missing if it has a relative_path (external reference) // An entry is missing if it has a relative_path (external reference)
// but no embedded_data and the file doesn't exist // but no embedded_data and the file doesn't exist

View File

@ -7,6 +7,7 @@ use crate::clip::ClipInstance;
use crate::dcel::Dcel; use crate::dcel::Dcel;
use crate::effect_layer::EffectLayer; use crate::effect_layer::EffectLayer;
use crate::object::ShapeInstance; use crate::object::ShapeInstance;
use crate::raster_layer::RasterLayer;
use crate::shape::Shape; use crate::shape::Shape;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::HashMap; use std::collections::HashMap;
@ -27,6 +28,8 @@ pub enum LayerType {
Effect, Effect,
/// Group layer containing child layers (e.g. video + audio) /// Group layer containing child layers (e.g. video + audio)
Group, Group,
/// Raster pixel-buffer painting layer
Raster,
} }
/// Common trait for all layer types /// Common trait for all layer types
@ -769,6 +772,7 @@ impl GroupLayer {
AnyLayer::Vector(l) => &l.clip_instances, AnyLayer::Vector(l) => &l.clip_instances,
AnyLayer::Effect(l) => &l.clip_instances, AnyLayer::Effect(l) => &l.clip_instances,
AnyLayer::Group(_) => &[], // no nested groups AnyLayer::Group(_) => &[], // no nested groups
AnyLayer::Raster(_) => &[], // raster layers have no clip instances
}; };
for ci in instances { for ci in instances {
result.push((child_id, ci)); result.push((child_id, ci));
@ -786,6 +790,7 @@ pub enum AnyLayer {
Video(VideoLayer), Video(VideoLayer),
Effect(EffectLayer), Effect(EffectLayer),
Group(GroupLayer), Group(GroupLayer),
Raster(RasterLayer),
} }
impl LayerTrait for AnyLayer { impl LayerTrait for AnyLayer {
@ -796,6 +801,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Video(l) => l.id(), AnyLayer::Video(l) => l.id(),
AnyLayer::Effect(l) => l.id(), AnyLayer::Effect(l) => l.id(),
AnyLayer::Group(l) => l.id(), AnyLayer::Group(l) => l.id(),
AnyLayer::Raster(l) => l.id(),
} }
} }
@ -806,6 +812,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Video(l) => l.name(), AnyLayer::Video(l) => l.name(),
AnyLayer::Effect(l) => l.name(), AnyLayer::Effect(l) => l.name(),
AnyLayer::Group(l) => l.name(), AnyLayer::Group(l) => l.name(),
AnyLayer::Raster(l) => l.name(),
} }
} }
@ -816,6 +823,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Video(l) => l.set_name(name), AnyLayer::Video(l) => l.set_name(name),
AnyLayer::Effect(l) => l.set_name(name), AnyLayer::Effect(l) => l.set_name(name),
AnyLayer::Group(l) => l.set_name(name), AnyLayer::Group(l) => l.set_name(name),
AnyLayer::Raster(l) => l.set_name(name),
} }
} }
@ -826,6 +834,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Video(l) => l.has_custom_name(), AnyLayer::Video(l) => l.has_custom_name(),
AnyLayer::Effect(l) => l.has_custom_name(), AnyLayer::Effect(l) => l.has_custom_name(),
AnyLayer::Group(l) => l.has_custom_name(), AnyLayer::Group(l) => l.has_custom_name(),
AnyLayer::Raster(l) => l.has_custom_name(),
} }
} }
@ -836,6 +845,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Video(l) => l.set_has_custom_name(custom), AnyLayer::Video(l) => l.set_has_custom_name(custom),
AnyLayer::Effect(l) => l.set_has_custom_name(custom), AnyLayer::Effect(l) => l.set_has_custom_name(custom),
AnyLayer::Group(l) => l.set_has_custom_name(custom), AnyLayer::Group(l) => l.set_has_custom_name(custom),
AnyLayer::Raster(l) => l.set_has_custom_name(custom),
} }
} }
@ -846,6 +856,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Video(l) => l.visible(), AnyLayer::Video(l) => l.visible(),
AnyLayer::Effect(l) => l.visible(), AnyLayer::Effect(l) => l.visible(),
AnyLayer::Group(l) => l.visible(), AnyLayer::Group(l) => l.visible(),
AnyLayer::Raster(l) => l.visible(),
} }
} }
@ -856,6 +867,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Video(l) => l.set_visible(visible), AnyLayer::Video(l) => l.set_visible(visible),
AnyLayer::Effect(l) => l.set_visible(visible), AnyLayer::Effect(l) => l.set_visible(visible),
AnyLayer::Group(l) => l.set_visible(visible), AnyLayer::Group(l) => l.set_visible(visible),
AnyLayer::Raster(l) => l.set_visible(visible),
} }
} }
@ -866,6 +878,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Video(l) => l.opacity(), AnyLayer::Video(l) => l.opacity(),
AnyLayer::Effect(l) => l.opacity(), AnyLayer::Effect(l) => l.opacity(),
AnyLayer::Group(l) => l.opacity(), AnyLayer::Group(l) => l.opacity(),
AnyLayer::Raster(l) => l.opacity(),
} }
} }
@ -876,6 +889,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Video(l) => l.set_opacity(opacity), AnyLayer::Video(l) => l.set_opacity(opacity),
AnyLayer::Effect(l) => l.set_opacity(opacity), AnyLayer::Effect(l) => l.set_opacity(opacity),
AnyLayer::Group(l) => l.set_opacity(opacity), AnyLayer::Group(l) => l.set_opacity(opacity),
AnyLayer::Raster(l) => l.set_opacity(opacity),
} }
} }
@ -886,6 +900,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Video(l) => l.volume(), AnyLayer::Video(l) => l.volume(),
AnyLayer::Effect(l) => l.volume(), AnyLayer::Effect(l) => l.volume(),
AnyLayer::Group(l) => l.volume(), AnyLayer::Group(l) => l.volume(),
AnyLayer::Raster(l) => l.volume(),
} }
} }
@ -896,6 +911,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Video(l) => l.set_volume(volume), AnyLayer::Video(l) => l.set_volume(volume),
AnyLayer::Effect(l) => l.set_volume(volume), AnyLayer::Effect(l) => l.set_volume(volume),
AnyLayer::Group(l) => l.set_volume(volume), AnyLayer::Group(l) => l.set_volume(volume),
AnyLayer::Raster(l) => l.set_volume(volume),
} }
} }
@ -906,6 +922,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Video(l) => l.muted(), AnyLayer::Video(l) => l.muted(),
AnyLayer::Effect(l) => l.muted(), AnyLayer::Effect(l) => l.muted(),
AnyLayer::Group(l) => l.muted(), AnyLayer::Group(l) => l.muted(),
AnyLayer::Raster(l) => l.muted(),
} }
} }
@ -916,6 +933,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Video(l) => l.set_muted(muted), AnyLayer::Video(l) => l.set_muted(muted),
AnyLayer::Effect(l) => l.set_muted(muted), AnyLayer::Effect(l) => l.set_muted(muted),
AnyLayer::Group(l) => l.set_muted(muted), AnyLayer::Group(l) => l.set_muted(muted),
AnyLayer::Raster(l) => l.set_muted(muted),
} }
} }
@ -926,6 +944,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Video(l) => l.soloed(), AnyLayer::Video(l) => l.soloed(),
AnyLayer::Effect(l) => l.soloed(), AnyLayer::Effect(l) => l.soloed(),
AnyLayer::Group(l) => l.soloed(), AnyLayer::Group(l) => l.soloed(),
AnyLayer::Raster(l) => l.soloed(),
} }
} }
@ -936,6 +955,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Video(l) => l.set_soloed(soloed), AnyLayer::Video(l) => l.set_soloed(soloed),
AnyLayer::Effect(l) => l.set_soloed(soloed), AnyLayer::Effect(l) => l.set_soloed(soloed),
AnyLayer::Group(l) => l.set_soloed(soloed), AnyLayer::Group(l) => l.set_soloed(soloed),
AnyLayer::Raster(l) => l.set_soloed(soloed),
} }
} }
@ -946,6 +966,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Video(l) => l.locked(), AnyLayer::Video(l) => l.locked(),
AnyLayer::Effect(l) => l.locked(), AnyLayer::Effect(l) => l.locked(),
AnyLayer::Group(l) => l.locked(), AnyLayer::Group(l) => l.locked(),
AnyLayer::Raster(l) => l.locked(),
} }
} }
@ -956,6 +977,7 @@ impl LayerTrait for AnyLayer {
AnyLayer::Video(l) => l.set_locked(locked), AnyLayer::Video(l) => l.set_locked(locked),
AnyLayer::Effect(l) => l.set_locked(locked), AnyLayer::Effect(l) => l.set_locked(locked),
AnyLayer::Group(l) => l.set_locked(locked), AnyLayer::Group(l) => l.set_locked(locked),
AnyLayer::Raster(l) => l.set_locked(locked),
} }
} }
} }
@ -969,6 +991,7 @@ impl AnyLayer {
AnyLayer::Video(l) => &l.layer, AnyLayer::Video(l) => &l.layer,
AnyLayer::Effect(l) => &l.layer, AnyLayer::Effect(l) => &l.layer,
AnyLayer::Group(l) => &l.layer, AnyLayer::Group(l) => &l.layer,
AnyLayer::Raster(l) => &l.layer,
} }
} }
@ -980,6 +1003,7 @@ impl AnyLayer {
AnyLayer::Video(l) => &mut l.layer, AnyLayer::Video(l) => &mut l.layer,
AnyLayer::Effect(l) => &mut l.layer, AnyLayer::Effect(l) => &mut l.layer,
AnyLayer::Group(l) => &mut l.layer, AnyLayer::Group(l) => &mut l.layer,
AnyLayer::Raster(l) => &mut l.layer,
} }
} }

View File

@ -48,6 +48,9 @@ pub mod dcel2;
pub use dcel2 as dcel; pub use dcel2 as dcel;
pub mod snap; pub mod snap;
pub mod webcam; pub mod webcam;
pub mod raster_layer;
pub mod brush_settings;
pub mod brush_engine;
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
pub mod test_mode; pub mod test_mode;

View File

@ -0,0 +1,197 @@
//! Raster (pixel-buffer) layer for Lightningbeam
//!
//! Each keyframe holds a PNG-compressed pixel buffer stored in the .beam ZIP
//! under `media/raster/<uuid>.png`. A brush engine renders dabs along strokes
//! and the resulting RGBA image is composited into the Vello scene.
use crate::brush_settings::BrushSettings;
use crate::layer::{Layer, LayerTrait, LayerType};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
/// How a raster stroke blends onto the layer buffer
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum RasterBlendMode {
/// Normal alpha-compositing (paint over)
Normal,
/// Erase pixels (reduce alpha)
Erase,
/// Smudge / blend surrounding pixels
Smudge,
}
impl Default for RasterBlendMode {
fn default() -> Self {
Self::Normal
}
}
/// A single point along a stroke
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct StrokePoint {
pub x: f32,
pub y: f32,
/// Pen/tablet pressure 0.01.0 (mouse uses 0.5)
pub pressure: f32,
/// Pen tilt X in radians
pub tilt_x: f32,
/// Pen tilt Y in radians
pub tilt_y: f32,
/// Seconds elapsed since start of this stroke
pub timestamp: f64,
}
/// Record of a single brush stroke applied to a keyframe
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct StrokeRecord {
pub brush_settings: BrushSettings,
/// RGBA linear color [r, g, b, a]
pub color: [f32; 4],
pub blend_mode: RasterBlendMode,
pub points: Vec<StrokePoint>,
}
/// Specifies how the raster content transitions to the next keyframe
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum TweenType {
/// Hold the pixel buffer until the next keyframe
Hold,
}
impl Default for TweenType {
fn default() -> Self {
Self::Hold
}
}
/// A single keyframe of a raster layer
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct RasterKeyframe {
/// Unique ID for this keyframe (used as pixel-cache key)
pub id: Uuid,
/// Time position in seconds
pub time: f64,
pub width: u32,
pub height: u32,
/// ZIP-relative path: `"media/raster/<uuid>.png"`
pub media_path: String,
/// Stroke history (for potential replay / future non-destructive editing)
pub stroke_log: Vec<StrokeRecord>,
pub tween_after: TweenType,
/// Raw RGBA pixel buffer (width × height × 4 bytes).
///
/// This is the working in-memory representation used by the brush engine and renderer.
/// NOT serialized to the document JSON — populated from the ZIP's PNG on load,
/// and encoded back to PNG on save. An empty Vec means the canvas is blank (transparent).
#[serde(skip)]
pub raw_pixels: Vec<u8>,
}
impl RasterKeyframe {
/// Returns true when the pixel buffer has been initialised (non-blank).
pub fn has_pixels(&self) -> bool {
!self.raw_pixels.is_empty()
}
pub fn new(time: f64, width: u32, height: u32) -> Self {
let id = Uuid::new_v4();
let media_path = format!("media/raster/{}.png", id);
Self {
id,
time,
width,
height,
media_path,
stroke_log: Vec::new(),
tween_after: TweenType::Hold,
raw_pixels: Vec::new(),
}
}
}
/// A pixel-buffer painting layer
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct RasterLayer {
/// Base layer properties (id, name, opacity, visibility, …)
pub layer: Layer,
/// Keyframes sorted by time
pub keyframes: Vec<RasterKeyframe>,
}
impl RasterLayer {
/// Create a new raster layer with the given name
pub fn new(name: impl Into<String>) -> Self {
Self {
layer: Layer::new(LayerType::Raster, name),
keyframes: Vec::new(),
}
}
// === Keyframe accessors ===
/// Get the active keyframe at-or-before `time`
pub fn keyframe_at(&self, time: f64) -> Option<&RasterKeyframe> {
let idx = self.keyframes.partition_point(|kf| kf.time <= time);
if idx > 0 { Some(&self.keyframes[idx - 1]) } else { None }
}
/// Get a mutable reference to the active keyframe at-or-before `time`
pub fn keyframe_at_mut(&mut self, time: f64) -> Option<&mut RasterKeyframe> {
let idx = self.keyframes.partition_point(|kf| kf.time <= time);
if idx > 0 { Some(&mut self.keyframes[idx - 1]) } else { None }
}
/// Find the index of a keyframe at exactly `time` (within tolerance)
fn keyframe_index_at_exact(&self, time: f64, tolerance: f64) -> Option<usize> {
self.keyframes.iter().position(|kf| (kf.time - time).abs() < tolerance)
}
/// Ensure a keyframe exists at `time`; create one (with given dimensions) if not.
///
/// If `width`/`height` are 0 the new keyframe inherits dimensions from the
/// previous active keyframe, falling back to 1920×1080.
pub fn ensure_keyframe_at(&mut self, time: f64, width: u32, height: u32) -> &mut RasterKeyframe {
let tolerance = 0.001;
if let Some(idx) = self.keyframe_index_at_exact(time, tolerance) {
return &mut self.keyframes[idx];
}
let (w, h) = if width == 0 || height == 0 {
self.keyframe_at(time)
.map(|kf| (kf.width, kf.height))
.unwrap_or((1920, 1080))
} else {
(width, height)
};
let insert_idx = self.keyframes.partition_point(|kf| kf.time < time);
self.keyframes.insert(insert_idx, RasterKeyframe::new(time, w, h));
&mut self.keyframes[insert_idx]
}
/// Return the ZIP-relative PNG path for the active keyframe at `time`, or `None`.
pub fn buffer_path_at_time(&self, time: f64) -> Option<&str> {
self.keyframe_at(time).map(|kf| kf.media_path.as_str())
}
}
// Delegate all LayerTrait methods to self.layer
impl LayerTrait for RasterLayer {
fn id(&self) -> Uuid { self.layer.id }
fn name(&self) -> &str { &self.layer.name }
fn set_name(&mut self, name: String) { self.layer.name = name; }
fn has_custom_name(&self) -> bool { self.layer.has_custom_name }
fn set_has_custom_name(&mut self, custom: bool) { self.layer.has_custom_name = custom; }
fn visible(&self) -> bool { self.layer.visible }
fn set_visible(&mut self, visible: bool) { self.layer.visible = visible; }
fn opacity(&self) -> f64 { self.layer.opacity }
fn set_opacity(&mut self, opacity: f64) { self.layer.opacity = opacity; }
fn volume(&self) -> f64 { self.layer.volume }
fn set_volume(&mut self, volume: f64) { self.layer.volume = volume; }
fn muted(&self) -> bool { self.layer.muted }
fn set_muted(&mut self, muted: bool) { self.layer.muted = muted; }
fn soloed(&self) -> bool { self.layer.soloed }
fn set_soloed(&mut self, soloed: bool) { self.layer.soloed = soloed; }
fn locked(&self) -> bool { self.layer.locked }
fn set_locked(&mut self, locked: bool) { self.layer.locked = locked; }
}

View File

@ -306,6 +306,11 @@ pub fn render_layer_isolated(
} }
rendered.has_content = !group_layer.children.is_empty(); rendered.has_content = !group_layer.children.is_empty();
} }
AnyLayer::Raster(raster_layer) => {
render_raster_layer_to_scene(raster_layer, time, &mut rendered.scene, base_transform);
rendered.has_content = raster_layer.keyframe_at(time)
.map_or(false, |kf| kf.has_pixels());
}
} }
rendered rendered
@ -334,6 +339,35 @@ fn render_vector_layer_to_scene(
); );
} }
/// Render a raster layer's active keyframe to a Vello scene using an ImageBrush.
///
/// Uses `raw_pixels` directly — no PNG decode needed.
fn render_raster_layer_to_scene(
layer: &crate::raster_layer::RasterLayer,
time: f64,
scene: &mut Scene,
base_transform: Affine,
) {
let Some(kf) = layer.keyframe_at(time) else { return };
if kf.raw_pixels.is_empty() {
return;
}
let image_data = ImageData {
data: Blob::from(kf.raw_pixels.clone()),
format: ImageFormat::Rgba8,
width: kf.width,
height: kf.height,
// raw_pixels stores sRGB-encoded premultiplied RGBA (channels are
// gamma-encoded, alpha is linear). Premultiplied tells Vello to
// decode the sRGB channels without premultiplying again.
alpha_type: ImageAlphaType::AlphaPremultiplied,
};
let brush = ImageBrush::new(image_data);
let canvas_rect = Rect::new(0.0, 0.0, kf.width as f64, kf.height as f64);
scene.fill(Fill::NonZero, base_transform, &brush, None, &canvas_rect);
}
/// Render a video layer to an isolated scene (for compositing pipeline) /// Render a video layer to an isolated scene (for compositing pipeline)
fn render_video_layer_to_scene( fn render_video_layer_to_scene(
document: &Document, document: &Document,
@ -451,6 +485,9 @@ fn render_layer(
render_layer(document, time, child, scene, base_transform, parent_opacity, image_cache, video_manager, camera_frame); render_layer(document, time, child, scene, base_transform, parent_opacity, image_cache, video_manager, camera_frame);
} }
} }
AnyLayer::Raster(raster_layer) => {
render_raster_layer_to_scene(raster_layer, time, scene, base_transform);
}
} }
} }

View File

@ -35,6 +35,12 @@ pub enum Tool {
Text, Text,
/// Region select tool - select sub-regions of shapes by clipping /// Region select tool - select sub-regions of shapes by clipping
RegionSelect, RegionSelect,
/// Split tool - split audio/video clips at a point
Split,
/// Erase tool - erase raster pixels
Erase,
/// Smudge tool - smudge/blend raster pixels
Smudge,
} }
/// Region select mode /// Region select mode
@ -64,6 +70,11 @@ pub enum ToolState {
simplify_mode: SimplifyMode, simplify_mode: SimplifyMode,
}, },
/// Drawing a raster paint stroke
DrawingRasterStroke {
points: Vec<crate::raster_layer::StrokePoint>,
},
/// Dragging selected objects /// Dragging selected objects
DraggingSelection { DraggingSelection {
start_pos: Point, start_pos: Point,
@ -210,6 +221,9 @@ impl Tool {
Tool::BezierEdit => "Bezier Edit", Tool::BezierEdit => "Bezier Edit",
Tool::Text => "Text", Tool::Text => "Text",
Tool::RegionSelect => "Region Select", Tool::RegionSelect => "Region Select",
Tool::Split => "Split",
Tool::Erase => "Erase",
Tool::Smudge => "Smudge",
} }
} }
@ -228,10 +242,13 @@ impl Tool {
Tool::BezierEdit => "bezier_edit.svg", Tool::BezierEdit => "bezier_edit.svg",
Tool::Text => "text.svg", Tool::Text => "text.svg",
Tool::RegionSelect => "region_select.svg", Tool::RegionSelect => "region_select.svg",
Tool::Split => "split.svg",
Tool::Erase => "erase.svg",
Tool::Smudge => "smudge.svg",
} }
} }
/// Get all available tools /// Get all vector-layer tools (the full drawing toolset)
pub fn all() -> &'static [Tool] { pub fn all() -> &'static [Tool] {
&[ &[
Tool::Select, Tool::Select,
@ -249,6 +266,17 @@ impl Tool {
] ]
} }
/// Get the tools available for a given layer type
pub fn for_layer_type(layer_type: Option<crate::layer::LayerType>) -> &'static [Tool] {
use crate::layer::LayerType;
match layer_type {
None | Some(LayerType::Vector) => Tool::all(),
Some(LayerType::Audio) | Some(LayerType::Video) => &[Tool::Select, Tool::Split],
Some(LayerType::Raster) => &[Tool::Select, Tool::Draw, Tool::Erase, Tool::Smudge, Tool::Eyedropper],
_ => &[Tool::Select],
}
}
/// Get keyboard shortcut hint /// Get keyboard shortcut hint
pub fn shortcut_hint(self) -> &'static str { pub fn shortcut_hint(self) -> &'static str {
match self { match self {
@ -264,6 +292,9 @@ impl Tool {
Tool::BezierEdit => "A", Tool::BezierEdit => "A",
Tool::Text => "T", Tool::Text => "T",
Tool::RegionSelect => "S", Tool::RegionSelect => "S",
Tool::Split => "C",
Tool::Erase => "X",
Tool::Smudge => "U",
} }
} }
} }

View File

@ -119,25 +119,18 @@
"name": "Drawing/Painting", "name": "Drawing/Painting",
"description": "Minimal UI - just canvas and drawing tools", "description": "Minimal UI - just canvas and drawing tools",
"layout": { "layout": {
"type": "vertical-grid", "type": "horizontal-grid",
"percent": 8, "percent": 15,
"children": [ "children": [
{ "type": "pane", "name": "toolbar" },
{ {
"type": "horizontal-grid", "type": "vertical-grid",
"percent": 85, "percent": 30,
"children": [ "children": [
{ "type": "pane", "name": "stage" }, { "type": "pane", "name": "toolbar" },
{ { "type": "pane", "name": "infopanel" }
"type": "vertical-grid",
"percent": 70,
"children": [
{ "type": "pane", "name": "infopanel" },
{ "type": "pane", "name": "timelineV2" }
]
}
] ]
} },
{ "type": "pane", "name": "stage" }
] ]
} }
} }

View File

@ -44,6 +44,9 @@ impl CustomCursor {
Tool::BezierEdit => CustomCursor::BezierEdit, Tool::BezierEdit => CustomCursor::BezierEdit,
Tool::Text => CustomCursor::Text, Tool::Text => CustomCursor::Text,
Tool::RegionSelect => CustomCursor::Select, // Reuse select cursor for now Tool::RegionSelect => CustomCursor::Select, // Reuse select cursor for now
Tool::Split => CustomCursor::Select, // Reuse select cursor for now
Tool::Erase => CustomCursor::Draw, // Reuse draw cursor for raster erase
Tool::Smudge => CustomCursor::Draw, // Reuse draw cursor for raster smudge
} }
} }

View File

@ -0,0 +1,691 @@
//! GPU-accelerated raster brush engine.
//!
//! [`GpuBrushEngine`] wraps the `brush_dab.wgsl` compute pipeline and manages
//! per-keyframe canvas texture pairs (ping-pong) used as the live canvas during
//! raster painting.
//!
//! ## Lifecycle
//!
//! 1. **Stroke start** — caller supplies the initial pixel data; the engine uploads
//! it to both canvas textures so either can serve as source/dest.
//! 2. **Each drag event** — [`GpuBrushEngine::render_dabs`] copies src→dst,
//! dispatches the compute shader, then swaps src/dst.
//! 3. **Stroke end** — [`GpuBrushEngine::readback_canvas`] copies the current
//! source texture into a staging buffer and returns the raw RGBA bytes
//! (blocking — uses `device.poll(Maintain::Wait)`).
//! 4. **Idle** — canvas textures are kept alive for the next stroke (no re-upload
//! needed if the layer has not changed).
use std::collections::HashMap;
use uuid::Uuid;
use lightningbeam_core::brush_engine::GpuDab;
// ---------------------------------------------------------------------------
// Colour-space helpers
// ---------------------------------------------------------------------------
/// Decode one sRGB-encoded byte to linear float [0, 1].
fn srgb_to_linear(c: f32) -> f32 {
if c <= 0.04045 {
c / 12.92
} else {
((c + 0.055) / 1.055).powf(2.4)
}
}
/// Encode one linear float [0, 1] to an sRGB-encoded byte.
fn linear_to_srgb_byte(c: u8) -> u8 {
let f = c as f32 / 255.0;
let encoded = if f <= 0.0031308 {
f * 12.92
} else {
1.055 * f.powf(1.0 / 2.4) - 0.055
};
(encoded * 255.0 + 0.5) as u8
}
// ---------------------------------------------------------------------------
// Per-keyframe canvas texture pair (ping-pong)
// ---------------------------------------------------------------------------
/// A pair of textures used for double-buffered canvas rendering.
///
/// `current` indexes the texture that holds the up-to-date canvas state.
pub struct CanvasPair {
pub textures: [wgpu::Texture; 2],
pub views: [wgpu::TextureView; 2],
/// Index (0 or 1) of the texture that is the current "source" (authoritative).
pub current: usize,
pub width: u32,
pub height: u32,
}
impl CanvasPair {
pub fn new(device: &wgpu::Device, width: u32, height: u32) -> Self {
let desc = wgpu::TextureDescriptor {
label: Some("raster_canvas"),
size: wgpu::Extent3d { width, height, depth_or_array_layers: 1 },
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Rgba8Unorm,
usage: wgpu::TextureUsages::TEXTURE_BINDING
| wgpu::TextureUsages::STORAGE_BINDING
| wgpu::TextureUsages::COPY_SRC
| wgpu::TextureUsages::COPY_DST,
view_formats: &[],
};
let t0 = device.create_texture(&desc);
let t1 = device.create_texture(&desc);
let v0 = t0.create_view(&wgpu::TextureViewDescriptor::default());
let v1 = t1.create_view(&wgpu::TextureViewDescriptor::default());
Self {
textures: [t0, t1],
views: [v0, v1],
current: 0,
width,
height,
}
}
/// Upload raw RGBA bytes to both textures (call once at stroke start).
///
/// `pixels` is expected to be **sRGB-encoded premultiplied** (the format stored
/// in `raw_pixels` / PNG files). The values are decoded to linear premultiplied
/// before being written to the canvas, which operates entirely in linear space.
pub fn upload(&self, queue: &wgpu::Queue, pixels: &[u8]) {
// Decode sRGB-premultiplied → linear premultiplied for the GPU canvas.
let linear: Vec<u8> = pixels.chunks_exact(4).flat_map(|p| {
let r = (srgb_to_linear(p[0] as f32 / 255.0) * 255.0 + 0.5) as u8;
let g = (srgb_to_linear(p[1] as f32 / 255.0) * 255.0 + 0.5) as u8;
let b = (srgb_to_linear(p[2] as f32 / 255.0) * 255.0 + 0.5) as u8;
[r, g, b, p[3]]
}).collect();
let layout = wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(self.width * 4),
rows_per_image: Some(self.height),
};
let extent = wgpu::Extent3d {
width: self.width,
height: self.height,
depth_or_array_layers: 1,
};
for tex in &self.textures {
queue.write_texture(
wgpu::TexelCopyTextureInfo {
texture: tex,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
&linear,
layout,
extent,
);
}
}
/// Source (current, authoritative) texture.
pub fn src(&self) -> &wgpu::Texture { &self.textures[self.current] }
/// Source texture view.
pub fn src_view(&self) -> &wgpu::TextureView { &self.views[self.current] }
/// Destination (write target) texture.
pub fn dst(&self) -> &wgpu::Texture { &self.textures[1 - self.current] }
/// Destination texture view.
pub fn dst_view(&self) -> &wgpu::TextureView { &self.views[1 - self.current] }
/// Commit the just-completed dispatch: make dst the new source.
pub fn swap(&mut self) { self.current = 1 - self.current; }
}
// ---------------------------------------------------------------------------
// GpuBrushEngine
// ---------------------------------------------------------------------------
/// GPU brush engine — holds the compute pipeline and per-keyframe canvas pairs.
pub struct GpuBrushEngine {
compute_pipeline: wgpu::ComputePipeline,
compute_bg_layout: wgpu::BindGroupLayout,
/// Canvas texture pairs keyed by keyframe UUID.
pub canvases: HashMap<Uuid, CanvasPair>,
}
/// CPU-side parameters uniform for the compute shader.
#[repr(C)]
#[derive(Clone, Copy, bytemuck::Pod, bytemuck::Zeroable)]
struct DabParams {
bbox_x0: i32,
bbox_y0: i32,
bbox_w: u32,
bbox_h: u32,
num_dabs: u32,
canvas_w: u32,
canvas_h: u32,
_pad: u32,
}
impl GpuBrushEngine {
/// Create the pipeline. Returns `Err` if the device lacks the required
/// storage-texture capability for `Rgba8Unorm`.
pub fn new(device: &wgpu::Device) -> Self {
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: Some("brush_dab_shader"),
source: wgpu::ShaderSource::Wgsl(
include_str!("panes/shaders/brush_dab.wgsl").into(),
),
});
let compute_bg_layout = device.create_bind_group_layout(
&wgpu::BindGroupLayoutDescriptor {
label: Some("brush_dab_bgl"),
entries: &[
// 0: dab storage buffer (read-only)
wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::COMPUTE,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Storage { read_only: true },
has_dynamic_offset: false,
min_binding_size: None,
},
count: None,
},
// 1: params uniform
wgpu::BindGroupLayoutEntry {
binding: 1,
visibility: wgpu::ShaderStages::COMPUTE,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: None,
},
count: None,
},
// 2: canvas source (sampled)
wgpu::BindGroupLayoutEntry {
binding: 2,
visibility: wgpu::ShaderStages::COMPUTE,
ty: wgpu::BindingType::Texture {
sample_type: wgpu::TextureSampleType::Float { filterable: true },
view_dimension: wgpu::TextureViewDimension::D2,
multisampled: false,
},
count: None,
},
// 3: canvas destination (write-only storage)
wgpu::BindGroupLayoutEntry {
binding: 3,
visibility: wgpu::ShaderStages::COMPUTE,
ty: wgpu::BindingType::StorageTexture {
access: wgpu::StorageTextureAccess::WriteOnly,
format: wgpu::TextureFormat::Rgba8Unorm,
view_dimension: wgpu::TextureViewDimension::D2,
},
count: None,
},
],
},
);
let pipeline_layout = device.create_pipeline_layout(
&wgpu::PipelineLayoutDescriptor {
label: Some("brush_dab_pl"),
bind_group_layouts: &[&compute_bg_layout],
push_constant_ranges: &[],
},
);
let compute_pipeline = device.create_compute_pipeline(
&wgpu::ComputePipelineDescriptor {
label: Some("brush_dab_pipeline"),
layout: Some(&pipeline_layout),
module: &shader,
entry_point: Some("main"),
compilation_options: Default::default(),
cache: None,
},
);
Self {
compute_pipeline,
compute_bg_layout,
canvases: HashMap::new(),
}
}
/// Ensure a canvas pair exists for `keyframe_id` at the given dimensions.
///
/// If the canvas exists but has different dimensions it is replaced.
pub fn ensure_canvas(
&mut self,
device: &wgpu::Device,
keyframe_id: Uuid,
width: u32,
height: u32,
) -> &mut CanvasPair {
let needs_new = self.canvases.get(&keyframe_id)
.map_or(true, |c| c.width != width || c.height != height);
if needs_new {
self.canvases.insert(keyframe_id, CanvasPair::new(device, width, height));
}
self.canvases.get_mut(&keyframe_id).unwrap()
}
/// Dispatch the brush compute shader for `dabs` onto the canvas of `keyframe_id`.
///
/// * Pre-fills `dst` from `src` so untouched pixels are preserved.
/// * Dispatches the compute shader.
/// * Swaps src/dst so the just-written texture becomes the new source.
///
/// `dab_bbox` is `(x0, y0, x1, y1)` — the union bounding box of all dabs.
/// If `dabs` is empty or the bbox is invalid, does nothing.
pub fn render_dabs(
&mut self,
device: &wgpu::Device,
queue: &wgpu::Queue,
keyframe_id: Uuid,
dabs: &[GpuDab],
bbox: (i32, i32, i32, i32),
canvas_w: u32,
canvas_h: u32,
) {
if dabs.is_empty() || bbox.0 == i32::MAX { return; }
let canvas = match self.canvases.get_mut(&keyframe_id) {
Some(c) => c,
None => return,
};
// Clamp bbox to canvas bounds
let x0 = bbox.0.max(0) as u32;
let y0 = bbox.1.max(0) as u32;
let x1 = (bbox.2.min(canvas_w as i32 - 1)).max(0) as u32;
let y1 = (bbox.3.min(canvas_h as i32 - 1)).max(0) as u32;
if x1 < x0 || y1 < y0 { return; }
let bbox_w = x1 - x0 + 1;
let bbox_h = y1 - y0 + 1;
// --- Pre-fill dst from src: copy the ENTIRE canvas so every pixel outside
// the dab bounding box is preserved across the ping-pong swap.
// Copying only the bbox would leave dst with data from two frames ago
// in all other regions, causing missing dabs on alternating frames. ---
let mut copy_encoder = device.create_command_encoder(
&wgpu::CommandEncoderDescriptor { label: Some("canvas_copy_encoder") },
);
let full_extent = wgpu::Extent3d {
width: canvas.width,
height: canvas.height,
depth_or_array_layers: 1,
};
copy_encoder.copy_texture_to_texture(
wgpu::TexelCopyTextureInfo {
texture: canvas.src(),
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
wgpu::TexelCopyTextureInfo {
texture: canvas.dst(),
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
full_extent,
);
queue.submit(Some(copy_encoder.finish()));
// --- Upload dab data and params ---
let dab_bytes = bytemuck::cast_slice(dabs);
let dab_buf = device.create_buffer(&wgpu::BufferDescriptor {
label: Some("dab_storage_buf"),
size: dab_bytes.len() as u64,
usage: wgpu::BufferUsages::STORAGE | wgpu::BufferUsages::COPY_DST,
mapped_at_creation: false,
});
queue.write_buffer(&dab_buf, 0, dab_bytes);
let params = DabParams {
bbox_x0: x0 as i32,
bbox_y0: y0 as i32,
bbox_w,
bbox_h,
num_dabs: dabs.len() as u32,
canvas_w,
canvas_h,
_pad: 0,
};
let params_buf = device.create_buffer(&wgpu::BufferDescriptor {
label: Some("dab_params_buf"),
size: std::mem::size_of::<DabParams>() as u64,
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
mapped_at_creation: false,
});
queue.write_buffer(&params_buf, 0, bytemuck::bytes_of(&params));
let bg = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("brush_dab_bg"),
layout: &self.compute_bg_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: dab_buf.as_entire_binding(),
},
wgpu::BindGroupEntry {
binding: 1,
resource: params_buf.as_entire_binding(),
},
wgpu::BindGroupEntry {
binding: 2,
resource: wgpu::BindingResource::TextureView(canvas.src_view()),
},
wgpu::BindGroupEntry {
binding: 3,
resource: wgpu::BindingResource::TextureView(canvas.dst_view()),
},
],
});
// --- Dispatch ---
let mut compute_encoder = device.create_command_encoder(
&wgpu::CommandEncoderDescriptor { label: Some("brush_dab_encoder") },
);
{
let mut pass = compute_encoder.begin_compute_pass(
&wgpu::ComputePassDescriptor {
label: Some("brush_dab_pass"),
timestamp_writes: None,
},
);
pass.set_pipeline(&self.compute_pipeline);
pass.set_bind_group(0, &bg, &[]);
let wg_x = bbox_w.div_ceil(8);
let wg_y = bbox_h.div_ceil(8);
pass.dispatch_workgroups(wg_x, wg_y, 1);
}
queue.submit(Some(compute_encoder.finish()));
// Swap: dst is now the authoritative source
canvas.swap();
}
/// Read the current canvas back to a CPU `Vec<u8>` (raw RGBA, row-major).
///
/// **Blocks** until the GPU work is complete (`Maintain::Wait`).
/// Should only be called at stroke end, not every frame.
///
/// Returns `None` if no canvas exists for `keyframe_id`.
pub fn readback_canvas(
&self,
device: &wgpu::Device,
queue: &wgpu::Queue,
keyframe_id: Uuid,
) -> Option<Vec<u8>> {
let canvas = self.canvases.get(&keyframe_id)?;
let width = canvas.width;
let height = canvas.height;
// wgpu requires bytes_per_row to be a multiple of 256
let bytes_per_row_aligned =
((width * 4 + 255) / 256) * 256;
let total_bytes = (bytes_per_row_aligned * height) as u64;
let staging = device.create_buffer(&wgpu::BufferDescriptor {
label: Some("canvas_readback_buf"),
size: total_bytes,
usage: wgpu::BufferUsages::MAP_READ | wgpu::BufferUsages::COPY_DST,
mapped_at_creation: false,
});
let mut encoder = device.create_command_encoder(
&wgpu::CommandEncoderDescriptor { label: Some("canvas_readback_encoder") },
);
encoder.copy_texture_to_buffer(
wgpu::TexelCopyTextureInfo {
texture: canvas.src(),
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
wgpu::TexelCopyBufferInfo {
buffer: &staging,
layout: wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(bytes_per_row_aligned),
rows_per_image: Some(height),
},
},
wgpu::Extent3d { width, height, depth_or_array_layers: 1 },
);
queue.submit(Some(encoder.finish()));
// Block until complete
let slice = staging.slice(..);
let (tx, rx) = std::sync::mpsc::channel();
slice.map_async(wgpu::MapMode::Read, move |r| { let _ = tx.send(r); });
let _ = device.poll(wgpu::PollType::wait_indefinitely());
if rx.recv().ok()?.is_err() { return None; }
let mapped = slice.get_mapped_range();
// De-stride: copy only `width * 4` bytes per row (drop alignment padding)
let bytes_per_row_tight = (width * 4) as usize;
let bytes_per_row_src = bytes_per_row_aligned as usize;
let mut pixels = vec![0u8; (width * height * 4) as usize];
for row in 0..height as usize {
let src = &mapped[row * bytes_per_row_src .. row * bytes_per_row_src + bytes_per_row_tight];
let dst = &mut pixels[row * bytes_per_row_tight .. (row + 1) * bytes_per_row_tight];
dst.copy_from_slice(src);
}
drop(mapped);
staging.unmap();
// Encode linear premultiplied → sRGB-encoded premultiplied so the returned
// bytes match what Vello expects (ImageAlphaType::Premultiplied with sRGB
// channels). Alpha is left unchanged.
for pixel in pixels.chunks_exact_mut(4) {
pixel[0] = linear_to_srgb_byte(pixel[0]);
pixel[1] = linear_to_srgb_byte(pixel[1]);
pixel[2] = linear_to_srgb_byte(pixel[2]);
}
Some(pixels)
}
/// Remove the canvas pair for a keyframe (e.g. when the layer is deleted).
pub fn remove_canvas(&mut self, keyframe_id: &Uuid) {
self.canvases.remove(keyframe_id);
}
}
// ---------------------------------------------------------------------------
// Canvas blit pipeline (renders canvas texture to layer sRGB buffer)
// ---------------------------------------------------------------------------
/// Bind group layout + pipeline for blitting a canvas texture (at document
/// resolution) into a layer render buffer (at viewport resolution), applying
/// the camera transform.
pub struct CanvasBlitPipeline {
pub pipeline: wgpu::RenderPipeline,
pub bg_layout: wgpu::BindGroupLayout,
pub sampler: wgpu::Sampler,
}
/// Camera parameters uniform for canvas_blit.wgsl.
#[repr(C)]
#[derive(Clone, Copy, bytemuck::Pod, bytemuck::Zeroable)]
pub struct CameraParams {
pub pan_x: f32,
pub pan_y: f32,
pub zoom: f32,
pub canvas_w: f32,
pub canvas_h: f32,
pub viewport_w: f32,
pub viewport_h: f32,
pub _pad: f32,
}
impl CanvasBlitPipeline {
pub fn new(device: &wgpu::Device) -> Self {
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: Some("canvas_blit_shader"),
source: wgpu::ShaderSource::Wgsl(
include_str!("panes/shaders/canvas_blit.wgsl").into(),
),
});
let bg_layout = device.create_bind_group_layout(
&wgpu::BindGroupLayoutDescriptor {
label: Some("canvas_blit_bgl"),
entries: &[
wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Texture {
sample_type: wgpu::TextureSampleType::Float { filterable: true },
view_dimension: wgpu::TextureViewDimension::D2,
multisampled: false,
},
count: None,
},
wgpu::BindGroupLayoutEntry {
binding: 1,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),
count: None,
},
wgpu::BindGroupLayoutEntry {
binding: 2,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: None,
},
count: None,
},
],
},
);
let pipeline_layout = device.create_pipeline_layout(
&wgpu::PipelineLayoutDescriptor {
label: Some("canvas_blit_pl"),
bind_group_layouts: &[&bg_layout],
push_constant_ranges: &[],
},
);
let pipeline = device.create_render_pipeline(
&wgpu::RenderPipelineDescriptor {
label: Some("canvas_blit_pipeline"),
layout: Some(&pipeline_layout),
vertex: wgpu::VertexState {
module: &shader,
entry_point: Some("vs_main"),
buffers: &[],
compilation_options: Default::default(),
},
fragment: Some(wgpu::FragmentState {
module: &shader,
entry_point: Some("fs_main"),
targets: &[Some(wgpu::ColorTargetState {
format: wgpu::TextureFormat::Rgba8Unorm,
blend: None, // canvas already stores premultiplied alpha
write_mask: wgpu::ColorWrites::ALL,
})],
compilation_options: Default::default(),
}),
primitive: wgpu::PrimitiveState {
topology: wgpu::PrimitiveTopology::TriangleStrip,
..Default::default()
},
depth_stencil: None,
multisample: wgpu::MultisampleState::default(),
multiview: None,
cache: None,
},
);
let sampler = device.create_sampler(&wgpu::SamplerDescriptor {
label: Some("canvas_blit_sampler"),
address_mode_u: wgpu::AddressMode::ClampToEdge,
address_mode_v: wgpu::AddressMode::ClampToEdge,
address_mode_w: wgpu::AddressMode::ClampToEdge,
mag_filter: wgpu::FilterMode::Linear,
min_filter: wgpu::FilterMode::Linear,
mipmap_filter: wgpu::FilterMode::Nearest,
..Default::default()
});
Self { pipeline, bg_layout, sampler }
}
/// Render the canvas texture into `target_view` (Rgba8Unorm) with the given camera.
///
/// `target_view` is cleared to transparent before writing.
pub fn blit(
&self,
device: &wgpu::Device,
queue: &wgpu::Queue,
canvas_view: &wgpu::TextureView,
target_view: &wgpu::TextureView,
camera: &CameraParams,
) {
// Upload camera params
let cam_buf = device.create_buffer(&wgpu::BufferDescriptor {
label: Some("canvas_blit_cam_buf"),
size: std::mem::size_of::<CameraParams>() as u64,
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
mapped_at_creation: false,
});
queue.write_buffer(&cam_buf, 0, bytemuck::bytes_of(camera));
let bg = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("canvas_blit_bg"),
layout: &self.bg_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::TextureView(canvas_view),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::Sampler(&self.sampler),
},
wgpu::BindGroupEntry {
binding: 2,
resource: cam_buf.as_entire_binding(),
},
],
});
let mut encoder = device.create_command_encoder(
&wgpu::CommandEncoderDescriptor { label: Some("canvas_blit_encoder") },
);
{
let mut rp = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some("canvas_blit_pass"),
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
view: target_view,
resolve_target: None,
depth_slice: None,
ops: wgpu::Operations {
load: wgpu::LoadOp::Clear(wgpu::Color::TRANSPARENT),
store: wgpu::StoreOp::Store,
},
})],
depth_stencil_attachment: None,
occlusion_query_set: None,
timestamp_writes: None,
});
rp.set_pipeline(&self.pipeline);
rp.set_bind_group(0, &bg, &[]);
rp.draw(0..4, 0..1);
}
queue.submit(Some(encoder.finish()));
}
}

View File

@ -50,6 +50,7 @@ pub enum AppAction {
AddVideoLayer, AddVideoLayer,
AddAudioTrack, AddAudioTrack,
AddMidiTrack, AddMidiTrack,
AddRasterLayer,
AddTestClip, AddTestClip,
DeleteLayer, DeleteLayer,
ToggleLayerVisibility, ToggleLayerVisibility,
@ -124,7 +125,7 @@ impl AppAction {
Self::BringToFront | Self::SplitClip | Self::DuplicateClip => "Modify", Self::BringToFront | Self::SplitClip | Self::DuplicateClip => "Modify",
Self::AddLayer | Self::AddVideoLayer | Self::AddAudioTrack | Self::AddLayer | Self::AddVideoLayer | Self::AddAudioTrack |
Self::AddMidiTrack | Self::AddTestClip | Self::DeleteLayer | Self::AddMidiTrack | Self::AddRasterLayer | Self::AddTestClip | Self::DeleteLayer |
Self::ToggleLayerVisibility => "Layer", Self::ToggleLayerVisibility => "Layer",
Self::NewKeyframe | Self::NewBlankKeyframe | Self::DeleteFrame | Self::NewKeyframe | Self::NewBlankKeyframe | Self::DeleteFrame |
@ -199,6 +200,7 @@ impl AppAction {
Self::AddVideoLayer => "Add Video Layer", Self::AddVideoLayer => "Add Video Layer",
Self::AddAudioTrack => "Add Audio Track", Self::AddAudioTrack => "Add Audio Track",
Self::AddMidiTrack => "Add MIDI Track", Self::AddMidiTrack => "Add MIDI Track",
Self::AddRasterLayer => "Add Raster Layer",
Self::AddTestClip => "Add Test Clip", Self::AddTestClip => "Add Test Clip",
Self::DeleteLayer => "Delete Layer", Self::DeleteLayer => "Delete Layer",
Self::ToggleLayerVisibility => "Toggle Layer Visibility", Self::ToggleLayerVisibility => "Toggle Layer Visibility",
@ -314,6 +316,7 @@ impl From<MenuAction> for AppAction {
MenuAction::AddVideoLayer => Self::AddVideoLayer, MenuAction::AddVideoLayer => Self::AddVideoLayer,
MenuAction::AddAudioTrack => Self::AddAudioTrack, MenuAction::AddAudioTrack => Self::AddAudioTrack,
MenuAction::AddMidiTrack => Self::AddMidiTrack, MenuAction::AddMidiTrack => Self::AddMidiTrack,
MenuAction::AddRasterLayer => Self::AddRasterLayer,
MenuAction::AddTestClip => Self::AddTestClip, MenuAction::AddTestClip => Self::AddTestClip,
MenuAction::DeleteLayer => Self::DeleteLayer, MenuAction::DeleteLayer => Self::DeleteLayer,
MenuAction::ToggleLayerVisibility => Self::ToggleLayerVisibility, MenuAction::ToggleLayerVisibility => Self::ToggleLayerVisibility,
@ -373,6 +376,7 @@ impl TryFrom<AppAction> for MenuAction {
AppAction::AddVideoLayer => MenuAction::AddVideoLayer, AppAction::AddVideoLayer => MenuAction::AddVideoLayer,
AppAction::AddAudioTrack => MenuAction::AddAudioTrack, AppAction::AddAudioTrack => MenuAction::AddAudioTrack,
AppAction::AddMidiTrack => MenuAction::AddMidiTrack, AppAction::AddMidiTrack => MenuAction::AddMidiTrack,
AppAction::AddRasterLayer => MenuAction::AddRasterLayer,
AppAction::AddTestClip => MenuAction::AddTestClip, AppAction::AddTestClip => MenuAction::AddTestClip,
AppAction::DeleteLayer => MenuAction::DeleteLayer, AppAction::DeleteLayer => MenuAction::DeleteLayer,
AppAction::ToggleLayerVisibility => MenuAction::ToggleLayerVisibility, AppAction::ToggleLayerVisibility => MenuAction::ToggleLayerVisibility,

View File

@ -23,6 +23,7 @@ use theme::{Theme, ThemeMode};
mod waveform_gpu; mod waveform_gpu;
mod cqt_gpu; mod cqt_gpu;
mod gpu_brush;
mod config; mod config;
use config::AppConfig; use config::AppConfig;
@ -320,6 +321,9 @@ mod tool_icons {
pub static POLYGON: &[u8] = include_bytes!("../../../src/assets/polygon.svg"); pub static POLYGON: &[u8] = include_bytes!("../../../src/assets/polygon.svg");
pub static BEZIER_EDIT: &[u8] = include_bytes!("../../../src/assets/bezier_edit.svg"); pub static BEZIER_EDIT: &[u8] = include_bytes!("../../../src/assets/bezier_edit.svg");
pub static TEXT: &[u8] = include_bytes!("../../../src/assets/text.svg"); pub static TEXT: &[u8] = include_bytes!("../../../src/assets/text.svg");
pub static SPLIT: &[u8] = include_bytes!("../../../src/assets/split.svg");
pub static ERASE: &[u8] = include_bytes!("../../../src/assets/erase.svg");
pub static SMUDGE: &[u8] = include_bytes!("../../../src/assets/smudge.svg");
} }
/// Embedded focus icon SVGs /// Embedded focus icon SVGs
@ -327,6 +331,7 @@ mod focus_icons {
pub static ANIMATION: &[u8] = include_bytes!("../../../src/assets/focus-animation.svg"); pub static ANIMATION: &[u8] = include_bytes!("../../../src/assets/focus-animation.svg");
pub static MUSIC: &[u8] = include_bytes!("../../../src/assets/focus-music.svg"); pub static MUSIC: &[u8] = include_bytes!("../../../src/assets/focus-music.svg");
pub static VIDEO: &[u8] = include_bytes!("../../../src/assets/focus-video.svg"); pub static VIDEO: &[u8] = include_bytes!("../../../src/assets/focus-video.svg");
pub static PAINTING: &[u8] = include_bytes!("../../../src/assets/focus-painting.svg");
} }
/// Icon cache for pane type icons /// Icon cache for pane type icons
@ -387,6 +392,9 @@ impl ToolIconCache {
Tool::BezierEdit => tool_icons::BEZIER_EDIT, Tool::BezierEdit => tool_icons::BEZIER_EDIT,
Tool::Text => tool_icons::TEXT, Tool::Text => tool_icons::TEXT,
Tool::RegionSelect => tool_icons::SELECT, // Reuse select icon for now Tool::RegionSelect => tool_icons::SELECT, // Reuse select icon for now
Tool::Split => tool_icons::SPLIT,
Tool::Erase => tool_icons::ERASE,
Tool::Smudge => tool_icons::SMUDGE,
}; };
if let Some(texture) = rasterize_svg(svg_data, tool.icon_file(), 180, ctx) { if let Some(texture) = rasterize_svg(svg_data, tool.icon_file(), 180, ctx) {
self.icons.insert(tool, texture); self.icons.insert(tool, texture);
@ -414,6 +422,7 @@ impl FocusIconCache {
FocusIcon::Animation => (focus_icons::ANIMATION, "focus-animation.svg"), FocusIcon::Animation => (focus_icons::ANIMATION, "focus-animation.svg"),
FocusIcon::Music => (focus_icons::MUSIC, "focus-music.svg"), FocusIcon::Music => (focus_icons::MUSIC, "focus-music.svg"),
FocusIcon::Video => (focus_icons::VIDEO, "focus-video.svg"), FocusIcon::Video => (focus_icons::VIDEO, "focus-video.svg"),
FocusIcon::Painting => (focus_icons::PAINTING, "focus-painting.svg"),
}; };
// Replace currentColor with the actual color // Replace currentColor with the actual color
@ -659,6 +668,7 @@ enum FocusIcon {
Animation, Animation,
Music, Music,
Video, Video,
Painting,
} }
/// Recording arm mode - determines how tracks are armed for recording /// Recording arm mode - determines how tracks are armed for recording
@ -746,6 +756,11 @@ struct EditorApp {
draw_simplify_mode: lightningbeam_core::tool::SimplifyMode, // Current simplification mode for draw tool draw_simplify_mode: lightningbeam_core::tool::SimplifyMode, // Current simplification mode for draw tool
rdp_tolerance: f64, // RDP simplification tolerance (default: 10.0) rdp_tolerance: f64, // RDP simplification tolerance (default: 10.0)
schneider_max_error: f64, // Schneider curve fitting max error (default: 30.0) schneider_max_error: f64, // Schneider curve fitting max error (default: 30.0)
// Raster brush settings
brush_radius: f32, // brush radius in pixels
brush_opacity: f32, // brush opacity 0.01.0
brush_hardness: f32, // brush hardness 0.01.0
brush_spacing: f32, // dabs_per_radius (fraction of radius per dab)
// Audio engine integration // Audio engine integration
#[allow(dead_code)] // Must be kept alive to maintain audio output #[allow(dead_code)] // Must be kept alive to maintain audio output
audio_stream: Option<cpal::Stream>, audio_stream: Option<cpal::Stream>,
@ -1020,6 +1035,10 @@ impl EditorApp {
draw_simplify_mode: lightningbeam_core::tool::SimplifyMode::Smooth, // Default to smooth curves draw_simplify_mode: lightningbeam_core::tool::SimplifyMode::Smooth, // Default to smooth curves
rdp_tolerance: 10.0, // Default RDP tolerance rdp_tolerance: 10.0, // Default RDP tolerance
schneider_max_error: 30.0, // Default Schneider max error schneider_max_error: 30.0, // Default Schneider max error
brush_radius: 10.0,
brush_opacity: 1.0,
brush_hardness: 0.5,
brush_spacing: 0.1,
audio_stream, audio_stream,
audio_controller, audio_controller,
audio_event_rx, audio_event_rx,
@ -1221,6 +1240,18 @@ impl EditorApp {
if response.clicked() { if response.clicked() {
self.create_new_project_with_focus(1); self.create_new_project_with_focus(1);
} }
ui.add_space(card_spacing);
// Painting
let (rect, response) = ui.allocate_exact_size(
egui::vec2(card_size, card_size + 40.0),
egui::Sense::click(),
);
self.render_focus_card_with_icon(ui, rect, response.hovered(), "Painting", FocusIcon::Painting);
if response.clicked() {
self.create_new_project_with_focus(5);
}
}); });
}); });
}); });
@ -1358,7 +1389,7 @@ impl EditorApp {
.with_framerate(self.config.framerate as f64); .with_framerate(self.config.framerate as f64);
// Add default layer based on focus type // Add default layer based on focus type
// Layout indices: 0 = Animation, 1 = Video editing, 2 = Music // Layout indices: 0 = Animation, 1 = Video editing, 2 = Music, 5 = Drawing/Painting
let layer_id = match layout_index { let layer_id = match layout_index {
0 => { 0 => {
// Animation focus -> VectorLayer // Animation focus -> VectorLayer
@ -1376,6 +1407,12 @@ impl EditorApp {
let layer = AudioLayer::new_midi("MIDI 1"); let layer = AudioLayer::new_midi("MIDI 1");
document.root.add_child(AnyLayer::Audio(layer)) document.root.add_child(AnyLayer::Audio(layer))
} }
5 => {
// Painting focus -> RasterLayer
use lightningbeam_core::raster_layer::RasterLayer;
let layer = RasterLayer::new("Raster 1");
document.root.add_child(AnyLayer::Raster(layer))
}
_ => { _ => {
// Fallback to VectorLayer // Fallback to VectorLayer
let layer = VectorLayer::new("Layer 1"); let layer = VectorLayer::new("Layer 1");
@ -1665,7 +1702,7 @@ impl EditorApp {
AnyLayer::Audio(al) => find_splittable_clips(&al.clip_instances, split_time, document), AnyLayer::Audio(al) => find_splittable_clips(&al.clip_instances, split_time, document),
AnyLayer::Video(vl) => find_splittable_clips(&vl.clip_instances, split_time, document), AnyLayer::Video(vl) => find_splittable_clips(&vl.clip_instances, split_time, document),
AnyLayer::Effect(el) => find_splittable_clips(&el.clip_instances, split_time, document), AnyLayer::Effect(el) => find_splittable_clips(&el.clip_instances, split_time, document),
AnyLayer::Group(_) => vec![], AnyLayer::Group(_) | AnyLayer::Raster(_) => vec![],
}; };
for instance_id in active_layer_clips { for instance_id in active_layer_clips {
@ -1683,7 +1720,7 @@ impl EditorApp {
AnyLayer::Audio(al) => find_splittable_clips(&al.clip_instances, split_time, document), AnyLayer::Audio(al) => find_splittable_clips(&al.clip_instances, split_time, document),
AnyLayer::Video(vl) => find_splittable_clips(&vl.clip_instances, split_time, document), AnyLayer::Video(vl) => find_splittable_clips(&vl.clip_instances, split_time, document),
AnyLayer::Effect(el) => find_splittable_clips(&el.clip_instances, split_time, document), AnyLayer::Effect(el) => find_splittable_clips(&el.clip_instances, split_time, document),
AnyLayer::Group(_) => vec![], AnyLayer::Group(_) | AnyLayer::Raster(_) => vec![],
}; };
if member_splittable.contains(member_instance_id) { if member_splittable.contains(member_instance_id) {
clips_to_split.push((*member_layer_id, *member_instance_id)); clips_to_split.push((*member_layer_id, *member_instance_id));
@ -1794,7 +1831,7 @@ impl EditorApp {
AnyLayer::Audio(al) => &al.clip_instances, AnyLayer::Audio(al) => &al.clip_instances,
AnyLayer::Video(vl) => &vl.clip_instances, AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances, AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => &[], AnyLayer::Group(_) | AnyLayer::Raster(_) => &[],
}; };
let instances: Vec<_> = clip_slice let instances: Vec<_> = clip_slice
.iter() .iter()
@ -2092,7 +2129,7 @@ impl EditorApp {
AnyLayer::Audio(al) => &al.clip_instances, AnyLayer::Audio(al) => &al.clip_instances,
AnyLayer::Video(vl) => &vl.clip_instances, AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances, AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => &[], AnyLayer::Group(_) | AnyLayer::Raster(_) => &[],
}; };
instances.iter() instances.iter()
.filter(|ci| selection.contains_clip_instance(&ci.id)) .filter(|ci| selection.contains_clip_instance(&ci.id))
@ -2791,6 +2828,24 @@ impl EditorApp {
} }
} }
} }
MenuAction::AddRasterLayer => {
use lightningbeam_core::raster_layer::RasterLayer;
let editing_clip_id = self.editing_context.current_clip_id();
let context_layers = self.action_executor.document().context_layers(editing_clip_id.as_ref());
let layer_number = context_layers.len() + 1;
let layer_name = format!("Raster {}", layer_number);
let layer = RasterLayer::new(layer_name);
let action = lightningbeam_core::actions::AddLayerAction::new(AnyLayer::Raster(layer))
.with_target_clip(editing_clip_id);
let _ = self.action_executor.execute(Box::new(action));
// Set newly created layer as active
let context_layers = self.action_executor.document().context_layers(editing_clip_id.as_ref());
if let Some(last_layer) = context_layers.last() {
self.active_layer_id = Some(last_layer.id());
}
}
MenuAction::AddTestClip => { MenuAction::AddTestClip => {
// Create a test vector clip and add it to the library (not to timeline) // Create a test vector clip and add it to the library (not to timeline)
use lightningbeam_core::clip::VectorClip; use lightningbeam_core::clip::VectorClip;
@ -5099,6 +5154,10 @@ impl eframe::App for EditorApp {
draw_simplify_mode: &mut self.draw_simplify_mode, draw_simplify_mode: &mut self.draw_simplify_mode,
rdp_tolerance: &mut self.rdp_tolerance, rdp_tolerance: &mut self.rdp_tolerance,
schneider_max_error: &mut self.schneider_max_error, schneider_max_error: &mut self.schneider_max_error,
brush_radius: &mut self.brush_radius,
brush_opacity: &mut self.brush_opacity,
brush_hardness: &mut self.brush_hardness,
brush_spacing: &mut self.brush_spacing,
audio_controller: self.audio_controller.as_ref(), audio_controller: self.audio_controller.as_ref(),
video_manager: &self.video_manager, video_manager: &self.video_manager,
playback_time: &mut self.playback_time, playback_time: &mut self.playback_time,

View File

@ -320,6 +320,7 @@ pub enum MenuAction {
AddVideoLayer, AddVideoLayer,
AddAudioTrack, AddAudioTrack,
AddMidiTrack, AddMidiTrack,
AddRasterLayer,
AddTestClip, // For testing: adds a test clip to the asset library AddTestClip, // For testing: adds a test clip to the asset library
DeleteLayer, DeleteLayer,
ToggleLayerVisibility, ToggleLayerVisibility,
@ -417,6 +418,7 @@ impl MenuItemDef {
const ADD_VIDEO_LAYER: Self = Self { label: "Add Video Layer", action: MenuAction::AddVideoLayer, shortcut: None }; const ADD_VIDEO_LAYER: Self = Self { label: "Add Video Layer", action: MenuAction::AddVideoLayer, shortcut: None };
const ADD_AUDIO_TRACK: Self = Self { label: "Add Audio Track", action: MenuAction::AddAudioTrack, shortcut: None }; const ADD_AUDIO_TRACK: Self = Self { label: "Add Audio Track", action: MenuAction::AddAudioTrack, shortcut: None };
const ADD_MIDI_TRACK: Self = Self { label: "Add MIDI Track", action: MenuAction::AddMidiTrack, shortcut: None }; const ADD_MIDI_TRACK: Self = Self { label: "Add MIDI Track", action: MenuAction::AddMidiTrack, shortcut: None };
const ADD_RASTER_LAYER: Self = Self { label: "Add Raster Layer", action: MenuAction::AddRasterLayer, shortcut: None };
const ADD_TEST_CLIP: Self = Self { label: "Add Test Clip to Library", action: MenuAction::AddTestClip, shortcut: None }; const ADD_TEST_CLIP: Self = Self { label: "Add Test Clip to Library", action: MenuAction::AddTestClip, shortcut: None };
const DELETE_LAYER: Self = Self { label: "Delete Layer", action: MenuAction::DeleteLayer, shortcut: None }; const DELETE_LAYER: Self = Self { label: "Delete Layer", action: MenuAction::DeleteLayer, shortcut: None };
const TOGGLE_LAYER_VISIBILITY: Self = Self { label: "Hide/Show Layer", action: MenuAction::ToggleLayerVisibility, shortcut: None }; const TOGGLE_LAYER_VISIBILITY: Self = Self { label: "Hide/Show Layer", action: MenuAction::ToggleLayerVisibility, shortcut: None };
@ -534,6 +536,7 @@ impl MenuItemDef {
MenuDef::Item(&Self::ADD_VIDEO_LAYER), MenuDef::Item(&Self::ADD_VIDEO_LAYER),
MenuDef::Item(&Self::ADD_AUDIO_TRACK), MenuDef::Item(&Self::ADD_AUDIO_TRACK),
MenuDef::Item(&Self::ADD_MIDI_TRACK), MenuDef::Item(&Self::ADD_MIDI_TRACK),
MenuDef::Item(&Self::ADD_RASTER_LAYER),
MenuDef::Separator, MenuDef::Separator,
MenuDef::Item(&Self::ADD_TEST_CLIP), MenuDef::Item(&Self::ADD_TEST_CLIP),
MenuDef::Separator, MenuDef::Separator,

View File

@ -1265,6 +1265,9 @@ impl AssetLibraryPane {
lightningbeam_core::layer::AnyLayer::Group(_) => { lightningbeam_core::layer::AnyLayer::Group(_) => {
// Group layers don't have their own clip instances // Group layers don't have their own clip instances
} }
lightningbeam_core::layer::AnyLayer::Raster(_) => {
// Raster layers don't have their own clip instances
}
} }
} }
false false

View File

@ -147,13 +147,19 @@ impl InfopanelPane {
fn render_tool_section(&mut self, ui: &mut Ui, path: &NodePath, shared: &mut SharedPaneState) { fn render_tool_section(&mut self, ui: &mut Ui, path: &NodePath, shared: &mut SharedPaneState) {
let tool = *shared.selected_tool; let tool = *shared.selected_tool;
let active_is_raster = shared.active_layer_id
.and_then(|id| shared.action_executor.document().get_layer(&id))
.map_or(false, |l| matches!(l, AnyLayer::Raster(_)));
let is_raster_paint_tool = active_is_raster && matches!(tool, Tool::Draw | Tool::Erase | Tool::Smudge);
// Only show tool options for tools that have options // Only show tool options for tools that have options
let is_vector_tool = matches!( let is_vector_tool = !active_is_raster && matches!(
tool, tool,
Tool::Select | Tool::BezierEdit | Tool::Draw | Tool::Rectangle Tool::Select | Tool::BezierEdit | Tool::Draw | Tool::Rectangle
| Tool::Ellipse | Tool::Line | Tool::Polygon | Tool::Ellipse | Tool::Line | Tool::Polygon
); );
let has_options = is_vector_tool || matches!( let has_options = is_vector_tool || is_raster_paint_tool || matches!(
tool, tool,
Tool::PaintBucket | Tool::RegionSelect Tool::PaintBucket | Tool::RegionSelect
); );
@ -162,7 +168,17 @@ impl InfopanelPane {
return; return;
} }
egui::CollapsingHeader::new("Tool Options") let header_label = if is_raster_paint_tool {
match tool {
Tool::Erase => "Eraser",
Tool::Smudge => "Smudge",
_ => "Brush",
}
} else {
"Tool Options"
};
egui::CollapsingHeader::new(header_label)
.id_salt(("tool_options", path)) .id_salt(("tool_options", path))
.default_open(self.tool_section_open) .default_open(self.tool_section_open)
.show(ui, |ui| { .show(ui, |ui| {
@ -175,7 +191,7 @@ impl InfopanelPane {
} }
match tool { match tool {
Tool::Draw => { Tool::Draw if !is_raster_paint_tool => {
// Stroke width // Stroke width
ui.horizontal(|ui| { ui.horizontal(|ui| {
ui.label("Stroke Width:"); ui.label("Stroke Width:");
@ -284,6 +300,42 @@ impl InfopanelPane {
}); });
} }
// Raster paint tools
Tool::Draw | Tool::Erase | Tool::Smudge if is_raster_paint_tool => {
ui.horizontal(|ui| {
ui.label("Size:");
ui.add(
egui::Slider::new(shared.brush_radius, 1.0_f32..=200.0)
.logarithmic(true)
.suffix(" px"),
);
});
if !matches!(tool, Tool::Smudge) {
ui.horizontal(|ui| {
ui.label("Opacity:");
ui.add(
egui::Slider::new(shared.brush_opacity, 0.0_f32..=1.0)
.custom_formatter(|v, _| format!("{:.0}%", v * 100.0)),
);
});
}
ui.horizontal(|ui| {
ui.label("Hardness:");
ui.add(
egui::Slider::new(shared.brush_hardness, 0.0_f32..=1.0)
.custom_formatter(|v, _| format!("{:.0}%", v * 100.0)),
);
});
ui.horizontal(|ui| {
ui.label("Spacing:");
ui.add(
egui::Slider::new(shared.brush_spacing, 0.01_f32..=1.0)
.logarithmic(true)
.custom_formatter(|v, _| format!("{:.0}%", v * 100.0)),
);
});
}
_ => {} _ => {}
} }
@ -535,6 +587,7 @@ impl InfopanelPane {
AnyLayer::Video(_) => "Video", AnyLayer::Video(_) => "Video",
AnyLayer::Effect(_) => "Effect", AnyLayer::Effect(_) => "Effect",
AnyLayer::Group(_) => "Group", AnyLayer::Group(_) => "Group",
AnyLayer::Raster(_) => "Raster",
}; };
ui.horizontal(|ui| { ui.horizontal(|ui| {
ui.label("Type:"); ui.label("Type:");
@ -590,6 +643,7 @@ impl InfopanelPane {
AnyLayer::Video(l) => &l.clip_instances, AnyLayer::Video(l) => &l.clip_instances,
AnyLayer::Effect(l) => &l.clip_instances, AnyLayer::Effect(l) => &l.clip_instances,
AnyLayer::Group(_) => &[], AnyLayer::Group(_) => &[],
AnyLayer::Raster(_) => &[],
}; };
if let Some(ci) = instances.iter().find(|c| c.id == ci_id) { if let Some(ci) = instances.iter().find(|c| c.id == ci_id) {
found = true; found = true;

View File

@ -187,6 +187,11 @@ pub struct SharedPaneState<'a> {
pub draw_simplify_mode: &'a mut lightningbeam_core::tool::SimplifyMode, pub draw_simplify_mode: &'a mut lightningbeam_core::tool::SimplifyMode,
pub rdp_tolerance: &'a mut f64, pub rdp_tolerance: &'a mut f64,
pub schneider_max_error: &'a mut f64, pub schneider_max_error: &'a mut f64,
/// Raster brush settings
pub brush_radius: &'a mut f32,
pub brush_opacity: &'a mut f32,
pub brush_hardness: &'a mut f32,
pub brush_spacing: &'a mut f32,
/// Audio engine controller for playback control (wrapped in Arc<Mutex<>> for thread safety) /// Audio engine controller for playback control (wrapped in Arc<Mutex<>> for thread safety)
pub audio_controller: Option<&'a std::sync::Arc<std::sync::Mutex<daw_backend::EngineController>>>, pub audio_controller: Option<&'a std::sync::Arc<std::sync::Mutex<daw_backend::EngineController>>>,
/// Video manager for video decoding and frame caching /// Video manager for video decoding and frame caching

View File

@ -0,0 +1,152 @@
// GPU brush dab compute shader.
//
// Renders all dabs for one stroke segment into the raster canvas.
// Uses a ping-pong pair: reads from `canvas_src` (texture_2d) via textureLoad,
// writes to `canvas_dst` (storage, write-only).
//
// `textureSample` is forbidden in compute shaders; bilinear filtering for the
// smudge tool is implemented manually using four textureLoad calls.
//
// Before this dispatch the caller copies `canvas_src` `canvas_dst` so that pixels
// outside the union dab bounding box (not touched by the shader) remain unchanged.
//
// Dispatch: ceil(bbox_w / 8) × ceil(bbox_h / 8) × 1
// Each thread covers one pixel in the bounding-box-clamped canvas region.
// ---------------------------------------------------------------------------
// Data layout must match GpuDab in brush_engine.rs (64 bytes, 16-byte aligned).
// ---------------------------------------------------------------------------
struct GpuDab {
x: f32, y: f32, radius: f32, hardness: f32, // bytes 015
opacity: f32, color_r: f32, color_g: f32, color_b: f32, // bytes 1631
color_a: f32, ndx: f32, ndy: f32, smudge_dist: f32, // bytes 3247
blend_mode: u32, _pad0: u32, _pad1: u32, _pad2: u32, // bytes 4863
}
struct Params {
bbox_x0: i32,
bbox_y0: i32,
bbox_w: u32,
bbox_h: u32,
num_dabs: u32,
canvas_w: u32,
canvas_h: u32,
_pad: u32,
}
@group(0) @binding(0) var<storage, read> dabs: array<GpuDab>;
@group(0) @binding(1) var<uniform> params: Params;
@group(0) @binding(2) var canvas_src: texture_2d<f32>;
@group(0) @binding(3) var canvas_dst: texture_storage_2d<rgba8unorm, write>;
// ---------------------------------------------------------------------------
// Manual bilinear sample from canvas_src at sub-pixel coordinates (px, py).
// Out-of-bounds texels clamp to the canvas edge (replicates ClampToEdge).
// textureSample is forbidden in compute shaders; we use four textureLoad calls.
// ---------------------------------------------------------------------------
fn bilinear_sample(px: f32, py: f32) -> vec4<f32> {
let cw = i32(params.canvas_w);
let ch = i32(params.canvas_h);
// Integer coords of the top-left sample
let ix = i32(floor(px - 0.5));
let iy = i32(floor(py - 0.5));
// Fractional weights
let fx = fract(px - 0.5);
let fy = fract(py - 0.5);
// Clamp to [0, dim-1]
let x0 = clamp(ix, 0, cw - 1);
let x1 = clamp(ix + 1, 0, cw - 1);
let y0 = clamp(iy, 0, ch - 1);
let y1 = clamp(iy + 1, 0, ch - 1);
let s00 = textureLoad(canvas_src, vec2<i32>(x0, y0), 0);
let s10 = textureLoad(canvas_src, vec2<i32>(x1, y0), 0);
let s01 = textureLoad(canvas_src, vec2<i32>(x0, y1), 0);
let s11 = textureLoad(canvas_src, vec2<i32>(x1, y1), 0);
return mix(mix(s00, s10, fx), mix(s01, s11, fx), fy);
}
// ---------------------------------------------------------------------------
// Apply a single dab to `current` and return the updated colour.
// ---------------------------------------------------------------------------
fn apply_dab(current: vec4<f32>, dab: GpuDab, px: i32, py: i32) -> vec4<f32> {
let dx = f32(px) + 0.5 - dab.x;
let dy = f32(py) + 0.5 - dab.y;
let rr = (dx * dx + dy * dy) / (dab.radius * dab.radius);
if rr > 1.0 { return current; }
// Two-segment linear falloff (identical to libmypaint calculate_opa)
let h = clamp(dab.hardness, 0.001, 1.0);
var opa_weight: f32;
if rr <= h {
opa_weight = 1.0 + rr * (-(1.0 / h - 1.0));
} else {
opa_weight = h / (1.0 - h) + rr * (-h / (1.0 - h));
}
opa_weight = clamp(opa_weight, 0.0, 1.0);
if dab.blend_mode == 0u {
// Normal: "over" operator
let dab_a = opa_weight * dab.opacity * dab.color_a;
if dab_a <= 0.0 { return current; }
let ba = 1.0 - dab_a;
return vec4<f32>(
dab_a * dab.color_r + ba * current.r,
dab_a * dab.color_g + ba * current.g,
dab_a * dab.color_b + ba * current.b,
dab_a + ba * current.a,
);
} else if dab.blend_mode == 1u {
// Erase: multiplicative alpha reduction
let dab_a = opa_weight * dab.opacity * dab.color_a;
if dab_a <= 0.0 { return current; }
let new_a = current.a * (1.0 - dab_a);
let scale = select(0.0, new_a / current.a, current.a > 1e-6);
return vec4<f32>(current.r * scale, current.g * scale, current.b * scale, new_a);
} else {
// Smudge: directional warp sample from position behind the stroke direction
let alpha = opa_weight * dab.opacity;
if alpha <= 0.0 { return current; }
let src_x = f32(px) + 0.5 - dab.ndx * dab.smudge_dist;
let src_y = f32(py) + 0.5 - dab.ndy * dab.smudge_dist;
let src = bilinear_sample(src_x, src_y);
let da = 1.0 - alpha;
return vec4<f32>(
alpha * src.r + da * current.r,
alpha * src.g + da * current.g,
alpha * src.b + da * current.b,
alpha * src.a + da * current.a,
);
}
}
// ---------------------------------------------------------------------------
// Main entry point
// ---------------------------------------------------------------------------
@compute @workgroup_size(8, 8)
fn main(@builtin(global_invocation_id) gid: vec3<u32>) {
// Bounds check within the bounding box
if gid.x >= params.bbox_w || gid.y >= params.bbox_h { return; }
let px = i32(gid.x) + params.bbox_x0;
let py = i32(gid.y) + params.bbox_y0;
// Bounds check within the canvas (bbox may extend past canvas edges)
if px < 0 || py < 0 || u32(px) >= params.canvas_w || u32(py) >= params.canvas_h { return; }
// Read current pixel from source (canvas_dst was pre-filled from canvas_src
// by the caller, but we read from canvas_src to ensure consistency)
var current = textureLoad(canvas_src, vec2<i32>(px, py), 0);
// Apply all dabs for this frame (sequential in the thread, no races between threads
// since each thread owns a unique output pixel)
for (var i = 0u; i < params.num_dabs; i++) {
current = apply_dab(current, dabs[i], px, py);
}
textureStore(canvas_dst, vec2<i32>(px, py), current);
}

View File

@ -0,0 +1,87 @@
// Canvas blit shader.
//
// Renders a GPU raster canvas (at document resolution) into the layer's sRGB
// render buffer (at viewport resolution), applying the camera transform
// (pan + zoom) to map document-space pixels to viewport-space pixels.
//
// Any viewport pixel whose corresponding document coordinate falls outside
// [0, canvas_w) × [0, canvas_h) outputs transparent black.
struct CameraParams {
pan_x: f32,
pan_y: f32,
zoom: f32,
canvas_w: f32,
canvas_h: f32,
viewport_w: f32,
viewport_h: f32,
_pad: f32,
}
@group(0) @binding(0) var canvas_tex: texture_2d<f32>;
@group(0) @binding(1) var canvas_sampler: sampler;
@group(0) @binding(2) var<uniform> camera: CameraParams;
struct VertexOutput {
@builtin(position) position: vec4<f32>,
@location(0) uv: vec2<f32>,
}
// Generates a fullscreen triangle strip (same pattern as blit.wgsl)
@vertex
fn vs_main(@builtin(vertex_index) vertex_index: u32) -> VertexOutput {
var out: VertexOutput;
let x = f32((vertex_index & 1u) << 1u);
let y = f32(vertex_index & 2u);
out.position = vec4<f32>(x * 2.0 - 1.0, 1.0 - y * 2.0, 0.0, 1.0);
out.uv = vec2<f32>(x, y);
return out;
}
// Linear sRGB encoding for a single channel.
// Applied to premultiplied linear values so the downstream srgb_to_linear
// pass round-trips correctly without darkening semi-transparent edges.
fn linear_to_srgb(c: f32) -> f32 {
return select(
1.055 * pow(max(c, 0.0), 1.0 / 2.4) - 0.055,
c * 12.92,
c <= 0.0031308,
);
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
// Map viewport UV [0,1] viewport pixel
let vp = in.uv * vec2<f32>(camera.viewport_w, camera.viewport_h);
// Map viewport pixel document pixel (inverse camera transform)
let doc = (vp - vec2<f32>(camera.pan_x, camera.pan_y)) / camera.zoom;
// Map document pixel canvas UV [0,1]
let canvas_uv = doc / vec2<f32>(camera.canvas_w, camera.canvas_h);
// Out-of-bounds transparent
if canvas_uv.x < 0.0 || canvas_uv.x > 1.0
|| canvas_uv.y < 0.0 || canvas_uv.y > 1.0 {
return vec4<f32>(0.0, 0.0, 0.0, 0.0);
}
// The canvas stores premultiplied linear RGBA.
// The downstream pipeline (srgb_to_linear compositor) expects the sRGB
// buffer to contain straight-alpha sRGB, i.e. the same format Vello outputs:
// sRGB buffer: srgb(r_straight), srgb(g_straight), srgb(b_straight), a
// srgb_to_linear: r_straight, g_straight, b_straight, a (linear straight)
// compositor: r_straight * a * opacity (premultiplied, correct)
//
// Without unpremultiplying, the compositor would double-premultiply:
// src = (premul_r, premul_g, premul_b, a) output = premul_r * a = r * a²
// which produces a dark halo over transparent regions.
let c = textureSample(canvas_tex, canvas_sampler, canvas_uv);
let inv_a = select(0.0, 1.0 / c.a, c.a > 1e-6);
return vec4<f32>(
linear_to_srgb(c.r * inv_a),
linear_to_srgb(c.g * inv_a),
linear_to_srgb(c.b * inv_a),
c.a,
);
}

View File

@ -36,6 +36,10 @@ struct SharedVelloResources {
effect_processor: Mutex<EffectProcessor>, effect_processor: Mutex<EffectProcessor>,
/// sRGB to linear color converter (for Vello output) /// sRGB to linear color converter (for Vello output)
srgb_to_linear: SrgbToLinearConverter, srgb_to_linear: SrgbToLinearConverter,
/// GPU raster brush engine (compute pipeline + canvas texture cache)
gpu_brush: Mutex<crate::gpu_brush::GpuBrushEngine>,
/// Canvas blit pipeline (renders GPU canvas to layer sRGB buffer)
canvas_blit: crate::gpu_brush::CanvasBlitPipeline,
} }
/// Per-instance Vello resources (created for each Stage pane) /// Per-instance Vello resources (created for each Stage pane)
@ -206,7 +210,11 @@ impl SharedVelloResources {
// Initialize sRGB to linear converter for Vello output // Initialize sRGB to linear converter for Vello output
let srgb_to_linear = SrgbToLinearConverter::new(device); let srgb_to_linear = SrgbToLinearConverter::new(device);
println!("✅ Vello shared resources initialized (renderer, shaders, HDR compositor, effect processor, and color converter)"); // Initialize GPU raster brush engine
let gpu_brush = crate::gpu_brush::GpuBrushEngine::new(device);
let canvas_blit = crate::gpu_brush::CanvasBlitPipeline::new(device);
println!("✅ Vello shared resources initialized (renderer, shaders, HDR compositor, effect processor, color converter, and GPU brush engine)");
Ok(Self { Ok(Self {
renderer: Arc::new(Mutex::new(renderer)), renderer: Arc::new(Mutex::new(renderer)),
@ -220,6 +228,8 @@ impl SharedVelloResources {
compositor, compositor,
effect_processor: Mutex::new(effect_processor), effect_processor: Mutex::new(effect_processor),
srgb_to_linear, srgb_to_linear,
gpu_brush: Mutex::new(gpu_brush),
canvas_blit,
}) })
} }
} }
@ -390,6 +400,15 @@ struct VelloRenderContext {
mouse_world_pos: Option<vello::kurbo::Point>, mouse_world_pos: Option<vello::kurbo::Point>,
/// Latest webcam frame for live preview (if any camera is active) /// Latest webcam frame for live preview (if any camera is active)
webcam_frame: Option<lightningbeam_core::webcam::CaptureFrame>, webcam_frame: Option<lightningbeam_core::webcam::CaptureFrame>,
/// GPU brush dabs to dispatch in this frame's prepare() call.
pending_raster_dabs: Option<PendingRasterDabs>,
/// Instance ID (for storing readback results in the global map).
instance_id_for_readback: u64,
/// The (layer_id, keyframe_id) of the raster layer with a live GPU canvas.
/// Present for the entire stroke duration, not just frames with new dabs.
painting_canvas: Option<(uuid::Uuid, uuid::Uuid)>,
/// GPU canvas keyframe to remove at the top of this prepare() call.
pending_canvas_removal: Option<uuid::Uuid>,
} }
/// Callback for Vello rendering within egui /// Callback for Vello rendering within egui
@ -470,6 +489,77 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// This means we only need 1 layer buffer at a time (plus the HDR accumulator) // This means we only need 1 layer buffer at a time (plus the HDR accumulator)
instance_resources.ensure_hdr_texture(device, &shared, width, height); instance_resources.ensure_hdr_texture(device, &shared, width, height);
// --- Deferred GPU canvas removal ---
// The previous frame's render_content consumed a readback result and updated
// raw_pixels. Now that the Vello scene is current we can safely drop the
// GPU canvas; painting_canvas was already cleared so the compositor will use
// the Vello scene from here on.
if let Some(kf_id) = self.ctx.pending_canvas_removal {
if let Ok(mut gpu_brush) = shared.gpu_brush.lock() {
gpu_brush.remove_canvas(&kf_id);
}
}
// --- GPU brush dispatch ---
// Dispatch the compute shader for any pending raster dabs from this frame's
// input event. Must happen before compositing so the updated canvas texture
// is sampled correctly when the layer is blitted.
if let Some(ref pending) = self.ctx.pending_raster_dabs {
if let Ok(mut gpu_brush) = shared.gpu_brush.lock() {
// Ensure the canvas pair exists (creates it if missing or wrong size)
gpu_brush.ensure_canvas(
device,
pending.keyframe_id,
pending.canvas_width,
pending.canvas_height,
);
// On stroke start, upload the pre-stroke pixel data to both textures
if let Some(ref pixels) = pending.initial_pixels {
if let Some(canvas) = gpu_brush.canvases.get(&pending.keyframe_id) {
canvas.upload(queue, pixels);
}
}
// Dispatch the compute shader for this frame's dabs
if !pending.dabs.is_empty() {
gpu_brush.render_dabs(
device,
queue,
pending.keyframe_id,
&pending.dabs,
pending.dab_bbox,
pending.canvas_width,
pending.canvas_height,
);
}
// On stroke end, read back the finished canvas and store it so
// the next ui() call can create the undo action.
if pending.wants_final_readback {
if let Some(pixels) = gpu_brush.readback_canvas(
device,
queue,
pending.keyframe_id,
) {
let results = RASTER_READBACK_RESULTS.get_or_init(|| {
Arc::new(Mutex::new(std::collections::HashMap::new()))
});
if let Ok(mut map) = results.lock() {
map.insert(self.ctx.instance_id_for_readback, RasterReadbackResult {
layer_id: pending.layer_id,
time: pending.time,
canvas_width: pending.canvas_width,
canvas_height: pending.canvas_height,
pixels,
});
}
// Canvas is kept alive: the compositor will still blit it
// this frame (painting_canvas is still Some). render_content
// will clear painting_canvas and set pending_canvas_removal,
// so the texture is freed at the top of the next prepare().
}
}
}
}
let mut image_cache = shared.image_cache.lock().unwrap(); let mut image_cache = shared.image_cache.lock().unwrap();
let composite_result = lightningbeam_core::renderer::render_document_for_compositing( let composite_result = lightningbeam_core::renderer::render_document_for_compositing(
@ -558,7 +648,14 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Now render and composite each layer incrementally // Now render and composite each layer incrementally
for rendered_layer in &composite_result.layers { for rendered_layer in &composite_result.layers {
if !rendered_layer.has_content { // Check if this raster layer has a live GPU canvas that should be
// blitted every frame, even when no new dabs arrived this frame.
// `painting_canvas` persists for the entire stroke duration.
let gpu_canvas_kf: Option<uuid::Uuid> = self.ctx.painting_canvas
.filter(|(layer_id, _)| *layer_id == rendered_layer.layer_id)
.map(|(_, kf_id)| kf_id);
if !rendered_layer.has_content && gpu_canvas_kf.is_none() {
continue; continue;
} }
@ -573,9 +670,42 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
buffer_pool.get_view(hdr_layer_handle), buffer_pool.get_view(hdr_layer_handle),
&instance_resources.hdr_texture_view, &instance_resources.hdr_texture_view,
) { ) {
// Render layer scene to sRGB buffer // GPU canvas blit path: if a live GPU canvas exists for this
if let Ok(mut renderer) = shared.renderer.lock() { // raster layer, sample it directly instead of rendering the Vello
renderer.render_to_texture(device, queue, &rendered_layer.scene, srgb_view, &layer_render_params).ok(); // scene (which lags until raw_pixels is updated after readback).
let used_gpu_canvas = if let Some(kf_id) = gpu_canvas_kf {
let mut used = false;
if let Ok(gpu_brush) = shared.gpu_brush.lock() {
if let Some(canvas) = gpu_brush.canvases.get(&kf_id) {
let camera = crate::gpu_brush::CameraParams {
pan_x: self.ctx.pan_offset.x,
pan_y: self.ctx.pan_offset.y,
zoom: self.ctx.zoom,
canvas_w: canvas.width as f32,
canvas_h: canvas.height as f32,
viewport_w: width as f32,
viewport_h: height as f32,
_pad: 0.0,
};
shared.canvas_blit.blit(
device, queue,
canvas.src_view(),
srgb_view,
&camera,
);
used = true;
}
}
used
} else {
false
};
if !used_gpu_canvas {
// Render layer scene to sRGB buffer
if let Ok(mut renderer) = shared.renderer.lock() {
renderer.render_to_texture(device, queue, &rendered_layer.scene, srgb_view, &layer_render_params).ok();
}
} }
// Convert sRGB to linear HDR // Convert sRGB to linear HDR
@ -2136,6 +2266,24 @@ pub struct StagePane {
dcel_editing_cache: Option<DcelEditingCache>, dcel_editing_cache: Option<DcelEditingCache>,
// Current snap result (for visual feedback rendering) // Current snap result (for visual feedback rendering)
current_snap: Option<lightningbeam_core::snap::SnapResult>, current_snap: Option<lightningbeam_core::snap::SnapResult>,
// Raster stroke in progress: (layer_id, time, brush_state, buffer_before)
raster_stroke_state: Option<(uuid::Uuid, f64, lightningbeam_core::brush_engine::StrokeState, Vec<u8>)>,
// Last raster stroke point (for incremental segment painting)
raster_last_point: Option<lightningbeam_core::raster_layer::StrokePoint>,
/// GPU dabs computed during this frame's drag event — consumed by prepare().
pending_raster_dabs: Option<PendingRasterDabs>,
/// Undo snapshot info captured at mouse-down; claimed when readback completes.
/// (layer_id, time, canvas_w, canvas_h, buffer_before)
pending_undo_before: Option<(uuid::Uuid, f64, u32, u32, Vec<u8>)>,
/// The (layer_id, keyframe_id) of the raster layer whose GPU canvas is live.
/// Set on mouse-down, cleared when the readback result is consumed.
/// Used every frame to blit the GPU canvas instead of the stale Vello scene.
painting_canvas: Option<(uuid::Uuid, uuid::Uuid)>,
/// Keyframe UUID whose GPU canvas should be removed at the start of the next
/// prepare() call. Set by render_content after consuming the readback result
/// and updating raw_pixels, so the canvas lives one extra composite frame to
/// avoid a flash of the stale Vello scene.
pending_canvas_removal: Option<uuid::Uuid>,
/// Synthetic drag/click override for test mode replay (debug builds only) /// Synthetic drag/click override for test mode replay (debug builds only)
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
replay_override: Option<ReplayDragState>, replay_override: Option<ReplayDragState>,
@ -2167,6 +2315,46 @@ static INSTANCE_COUNTER: std::sync::atomic::AtomicU64 = std::sync::atomic::Atomi
// Global storage for eyedropper results (instance_id -> (color, color_mode)) // Global storage for eyedropper results (instance_id -> (color, color_mode))
static EYEDROPPER_RESULTS: OnceLock<Arc<Mutex<std::collections::HashMap<u64, (egui::Color32, super::ColorMode)>>>> = OnceLock::new(); static EYEDROPPER_RESULTS: OnceLock<Arc<Mutex<std::collections::HashMap<u64, (egui::Color32, super::ColorMode)>>>> = OnceLock::new();
/// Pending GPU dabs for a single drag event.
///
/// Created by the event handler (`handle_raster_stroke_tool`) and consumed once
/// by `VelloCallback::prepare()`.
struct PendingRasterDabs {
/// Keyframe UUID — indexes the canvas texture pair in `GpuBrushEngine`.
keyframe_id: uuid::Uuid,
/// Layer UUID — used for the undo readback result.
layer_id: uuid::Uuid,
/// Playback time of the keyframe.
time: f64,
/// Canvas dimensions (pixels).
canvas_width: u32,
canvas_height: u32,
/// Raw RGBA pixel data to upload to the canvas texture on the very first dab of
/// a stroke (i.e., when the stroke starts). `None` on subsequent drag events.
initial_pixels: Option<Vec<u8>>,
/// Dab list computed by `BrushEngine::compute_dabs()`.
dabs: Vec<lightningbeam_core::brush_engine::GpuDab>,
/// Union bounding box of `dabs` (x0, y0, x1, y1) in canvas pixel coords.
dab_bbox: (i32, i32, i32, i32),
/// When `true`, perform a full canvas readback after dispatching and store
/// the result in `RASTER_READBACK_RESULTS` so the next frame can create
/// the undo action.
wants_final_readback: bool,
}
/// Result stored by `prepare()` after a stroke-end readback.
struct RasterReadbackResult {
layer_id: uuid::Uuid,
time: f64,
canvas_width: u32,
canvas_height: u32,
/// Raw RGBA pixels from the completed stroke.
pixels: Vec<u8>,
}
// Global storage for raster readback results (instance_id -> result)
static RASTER_READBACK_RESULTS: OnceLock<Arc<Mutex<std::collections::HashMap<u64, RasterReadbackResult>>>> = OnceLock::new();
/// Cached 2x2 stipple image brush for selection overlay. /// Cached 2x2 stipple image brush for selection overlay.
/// Pattern: [[black, transparent], [transparent, white]] /// Pattern: [[black, transparent], [transparent, white]]
/// Tiled with nearest-neighbor sampling so each pixel stays crisp. /// Tiled with nearest-neighbor sampling so each pixel stays crisp.
@ -2211,6 +2399,12 @@ impl StagePane {
last_viewport_rect: None, last_viewport_rect: None,
dcel_editing_cache: None, dcel_editing_cache: None,
current_snap: None, current_snap: None,
raster_stroke_state: None,
raster_last_point: None,
pending_raster_dabs: None,
pending_undo_before: None,
painting_canvas: None,
pending_canvas_removal: None,
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
replay_override: None, replay_override: None,
} }
@ -4175,6 +4369,246 @@ impl StagePane {
} }
} }
/// Handle raster stroke tool input (Draw/Erase/Smudge on a raster layer).
///
/// Computes GPU dab lists for each drag event and stores them in
/// `self.pending_raster_dabs` for dispatch by `VelloCallback::prepare()`.
///
/// The actual pixel rendering happens on the GPU (compute shader). The CPU
/// only does dab placement arithmetic (cheap). On stroke end a readback is
/// requested so the undo system can capture the final pixel state.
fn handle_raster_stroke_tool(
&mut self,
ui: &mut egui::Ui,
response: &egui::Response,
world_pos: egui::Vec2,
blend_mode: lightningbeam_core::raster_layer::RasterBlendMode,
shared: &mut SharedPaneState,
) {
use lightningbeam_core::tool::ToolState;
use lightningbeam_core::layer::AnyLayer;
use lightningbeam_core::raster_layer::StrokePoint;
use lightningbeam_core::brush_engine::{BrushEngine, StrokeState};
use lightningbeam_core::raster_layer::StrokeRecord;
let active_layer_id = match *shared.active_layer_id {
Some(id) => id,
None => return,
};
// Only operate on raster layers
let is_raster = shared.action_executor.document()
.get_layer(&active_layer_id)
.map_or(false, |l| matches!(l, AnyLayer::Raster(_)));
if !is_raster { return; }
let brush = {
use lightningbeam_core::brush_settings::BrushSettings;
BrushSettings {
radius_log: shared.brush_radius.ln(),
hardness: *shared.brush_hardness,
opaque: *shared.brush_opacity,
dabs_per_radius: *shared.brush_spacing,
color_h: 0.0,
color_s: 0.0,
color_v: 0.0,
pressure_radius_gain: 0.3,
pressure_opacity_gain: 0.8,
}
};
let color = if matches!(blend_mode, lightningbeam_core::raster_layer::RasterBlendMode::Erase) {
[1.0f32, 1.0, 1.0, 1.0]
} else {
let c = *shared.stroke_color;
[c.r() as f32 / 255.0, c.g() as f32 / 255.0, c.b() as f32 / 255.0, c.a() as f32 / 255.0]
};
// ----------------------------------------------------------------
// Mouse down: capture buffer_before, start stroke, compute first dab
// ----------------------------------------------------------------
if self.rsp_drag_started(response) || self.rsp_clicked(response) {
let (doc_width, doc_height) = {
let doc = shared.action_executor.document();
(doc.width as u32, doc.height as u32)
};
// Ensure the keyframe exists BEFORE reading its ID, so we always get
// the real UUID. Previously we read the ID first and fell back to a
// randomly-generated UUID when no keyframe existed; that fake UUID was
// stored in painting_canvas but subsequent drag frames used the real UUID
// from keyframe_at(), causing the GPU canvas to be a different object from
// the one being composited.
{
let doc = shared.action_executor.document_mut();
if let Some(AnyLayer::Raster(rl)) = doc.get_layer_mut(&active_layer_id) {
rl.ensure_keyframe_at(*shared.playback_time, doc_width, doc_height);
}
}
// Now read the guaranteed-to-exist keyframe to get the real UUID.
let (keyframe_id, canvas_width, canvas_height, buffer_before, initial_pixels) = {
let doc = shared.action_executor.document();
if let Some(AnyLayer::Raster(rl)) = doc.get_layer(&active_layer_id) {
if let Some(kf) = rl.keyframe_at(*shared.playback_time) {
let raw = kf.raw_pixels.clone();
let init = if raw.is_empty() {
vec![0u8; (kf.width * kf.height * 4) as usize]
} else {
raw.clone()
};
(kf.id, kf.width, kf.height, raw, init)
} else {
return; // shouldn't happen after ensure_keyframe_at
}
} else {
return;
}
};
// Compute the first dab (single-point tap)
let mut stroke_state = StrokeState::new();
stroke_state.distance_since_last_dab = f32::MAX;
let first_pt = StrokePoint {
x: world_pos.x, y: world_pos.y,
pressure: 1.0, tilt_x: 0.0, tilt_y: 0.0, timestamp: 0.0,
};
let single = StrokeRecord {
brush_settings: brush.clone(),
color,
blend_mode,
points: vec![first_pt.clone()],
};
let (dabs, dab_bbox) = BrushEngine::compute_dabs(&single, &mut stroke_state);
self.painting_canvas = Some((active_layer_id, keyframe_id));
self.pending_undo_before = Some((
active_layer_id,
*shared.playback_time,
canvas_width,
canvas_height,
buffer_before,
));
self.pending_raster_dabs = Some(PendingRasterDabs {
keyframe_id,
layer_id: active_layer_id,
time: *shared.playback_time,
canvas_width,
canvas_height,
initial_pixels: Some(initial_pixels),
dabs,
dab_bbox,
wants_final_readback: false,
});
self.raster_stroke_state = Some((
active_layer_id,
*shared.playback_time,
stroke_state,
Vec::new(), // buffer_before now lives in pending_undo_before
));
self.raster_last_point = Some(first_pt);
*shared.tool_state = ToolState::DrawingRasterStroke { points: vec![] };
}
// ----------------------------------------------------------------
// Mouse drag: compute dabs for this segment
// ----------------------------------------------------------------
if self.rsp_dragged(response) {
if let Some((layer_id, time, ref mut stroke_state, _)) = self.raster_stroke_state {
if let Some(prev_pt) = self.raster_last_point.take() {
let curr_pt = StrokePoint {
x: world_pos.x, y: world_pos.y,
pressure: 1.0, tilt_x: 0.0, tilt_y: 0.0, timestamp: 0.0,
};
const MIN_DIST_SQ: f32 = 1.5 * 1.5;
let dx = curr_pt.x - prev_pt.x;
let dy = curr_pt.y - prev_pt.y;
let moved_pt = if dx * dx + dy * dy >= MIN_DIST_SQ {
curr_pt.clone()
} else {
prev_pt.clone()
};
if dx * dx + dy * dy >= MIN_DIST_SQ {
// Get keyframe info (needed for canvas dimensions)
let (kf_id, kw, kh) = {
let doc = shared.action_executor.document();
if let Some(AnyLayer::Raster(rl)) = doc.get_layer(&layer_id) {
if let Some(kf) = rl.keyframe_at(time) {
(kf.id, kf.width, kf.height)
} else { self.raster_last_point = Some(moved_pt); return; }
} else { self.raster_last_point = Some(moved_pt); return; }
};
let seg = StrokeRecord {
brush_settings: brush.clone(),
color,
blend_mode,
points: vec![prev_pt, curr_pt],
};
let (dabs, dab_bbox) = BrushEngine::compute_dabs(&seg, stroke_state);
self.pending_raster_dabs = Some(PendingRasterDabs {
keyframe_id: kf_id,
layer_id,
time,
canvas_width: kw,
canvas_height: kh,
initial_pixels: None,
dabs,
dab_bbox,
wants_final_readback: false,
});
}
self.raster_last_point = Some(moved_pt);
}
}
}
// ----------------------------------------------------------------
// Mouse up: request a full-canvas readback for the undo snapshot
// ----------------------------------------------------------------
if self.rsp_drag_stopped(response)
|| (self.rsp_any_released(ui) && matches!(*shared.tool_state, ToolState::DrawingRasterStroke { .. }))
{
self.raster_stroke_state = None;
self.raster_last_point = None;
*shared.tool_state = ToolState::Idle;
// Mark the pending dabs (if any this frame) for final readback.
// If there are no pending dabs this frame, create a "readback only" entry.
if let Some(ref mut pending) = self.pending_raster_dabs {
pending.wants_final_readback = true;
} else if let Some((ub_layer, ub_time, ub_cw, ub_ch, _)) =
self.pending_undo_before.as_ref()
{
let (ub_layer, ub_time, ub_cw, ub_ch) = (*ub_layer, *ub_time, *ub_cw, *ub_ch);
// Get keyframe_id for the canvas texture lookup
let kf_id = shared.action_executor.document()
.get_layer(&ub_layer)
.and_then(|l| if let AnyLayer::Raster(rl) = l {
rl.keyframe_at(ub_time).map(|kf| kf.id)
} else { None });
if let Some(kf_id) = kf_id {
self.pending_raster_dabs = Some(PendingRasterDabs {
keyframe_id: kf_id,
layer_id: ub_layer,
time: ub_time,
canvas_width: ub_cw,
canvas_height: ub_ch,
initial_pixels: None,
dabs: Vec::new(),
dab_bbox: (i32::MAX, i32::MAX, i32::MIN, i32::MIN),
wants_final_readback: true,
});
}
}
}
}
fn handle_paint_bucket_tool( fn handle_paint_bucket_tool(
&mut self, &mut self,
response: &egui::Response, response: &egui::Response,
@ -6187,7 +6621,21 @@ impl StagePane {
self.handle_ellipse_tool(ui, &response, world_pos, shift_held, ctrl_held, shared); self.handle_ellipse_tool(ui, &response, world_pos, shift_held, ctrl_held, shared);
} }
Tool::Draw => { Tool::Draw => {
self.handle_draw_tool(ui, &response, world_pos, shared); // Dispatch to raster or vector draw handler based on active layer type
let is_raster = shared.active_layer_id.and_then(|id| {
shared.action_executor.document().get_layer(&id)
}).map_or(false, |l| matches!(l, lightningbeam_core::layer::AnyLayer::Raster(_)));
if is_raster {
self.handle_raster_stroke_tool(ui, &response, world_pos, lightningbeam_core::raster_layer::RasterBlendMode::Normal, shared);
} else {
self.handle_draw_tool(ui, &response, world_pos, shared);
}
}
Tool::Erase => {
self.handle_raster_stroke_tool(ui, &response, world_pos, lightningbeam_core::raster_layer::RasterBlendMode::Erase, shared);
}
Tool::Smudge => {
self.handle_raster_stroke_tool(ui, &response, world_pos, lightningbeam_core::raster_layer::RasterBlendMode::Smudge, shared);
} }
Tool::Transform => { Tool::Transform => {
self.handle_transform_tool(ui, &response, world_pos, shared); self.handle_transform_tool(ui, &response, world_pos, shared);
@ -6648,6 +7096,35 @@ impl PaneRenderer for StagePane {
self.pan_offset = viewport_center - canvas_center; self.pan_offset = viewport_center - canvas_center;
} }
// Check for completed raster stroke readbacks and create undo actions
if let Ok(mut results) = RASTER_READBACK_RESULTS
.get_or_init(|| Arc::new(Mutex::new(std::collections::HashMap::new())))
.lock() {
if let Some(readback) = results.remove(&self.instance_id) {
if let Some((layer_id, time, w, h, buffer_before)) = self.pending_undo_before.take() {
use lightningbeam_core::actions::RasterStrokeAction;
let action = RasterStrokeAction::new(
layer_id,
time,
buffer_before,
readback.pixels.clone(),
w,
h,
);
// execute() sets raw_pixels = buffer_after so future Vello renders
// and file saves see the completed stroke.
let _ = shared.action_executor.execute(Box::new(action));
}
// raw_pixels is now up to date; switch compositing back to the Vello
// scene. Schedule the GPU canvas for removal at the start of the next
// prepare() — keeping it alive for this frame's composite avoids a
// one-frame flash of the stale Vello scene.
if let Some((_, kf_id)) = self.painting_canvas.take() {
self.pending_canvas_removal = Some(kf_id);
}
}
}
// Check for completed eyedropper samples from GPU readback and apply them // Check for completed eyedropper samples from GPU readback and apply them
if let Ok(mut results) = EYEDROPPER_RESULTS if let Ok(mut results) = EYEDROPPER_RESULTS
.get_or_init(|| Arc::new(Mutex::new(std::collections::HashMap::new()))) .get_or_init(|| Arc::new(Mutex::new(std::collections::HashMap::new())))
@ -6994,6 +7471,10 @@ impl PaneRenderer for StagePane {
region_selection: shared.region_selection.clone(), region_selection: shared.region_selection.clone(),
mouse_world_pos, mouse_world_pos,
webcam_frame: shared.webcam_frame.clone(), webcam_frame: shared.webcam_frame.clone(),
pending_raster_dabs: self.pending_raster_dabs.take(),
instance_id_for_readback: self.instance_id,
painting_canvas: self.painting_canvas,
pending_canvas_removal: self.pending_canvas_removal.take(),
}}; }};
let cb = egui_wgpu::Callback::new_paint_callback( let cb = egui_wgpu::Callback::new_paint_callback(

View File

@ -118,6 +118,7 @@ fn effective_clip_duration(
AnyLayer::Video(_) => document.get_video_clip(&clip_instance.clip_id).map(|c| c.duration), AnyLayer::Video(_) => document.get_video_clip(&clip_instance.clip_id).map(|c| c.duration),
AnyLayer::Effect(_) => Some(lightningbeam_core::effect::EFFECT_DURATION), AnyLayer::Effect(_) => Some(lightningbeam_core::effect::EFFECT_DURATION),
AnyLayer::Group(_) => None, AnyLayer::Group(_) => None,
AnyLayer::Raster(_) => None,
} }
} }
@ -422,6 +423,7 @@ fn collect_clip_instances<'a>(layer: &'a AnyLayer, result: &mut Vec<(&'a AnyLaye
collect_clip_instances(child, result); collect_clip_instances(child, result);
} }
} }
AnyLayer::Raster(_) => {}
} }
} }
@ -806,6 +808,7 @@ impl TimelinePane {
lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances, lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Effect(el) => &el.clip_instances, lightningbeam_core::layer::AnyLayer::Effect(el) => &el.clip_instances,
lightningbeam_core::layer::AnyLayer::Group(_) => &[], lightningbeam_core::layer::AnyLayer::Group(_) => &[],
lightningbeam_core::layer::AnyLayer::Raster(_) => &[],
}; };
// Check each clip instance // Check each clip instance
@ -1337,6 +1340,7 @@ impl TimelinePane {
AnyLayer::Video(_) => ("Video", egui::Color32::from_rgb(180, 100, 255)), AnyLayer::Video(_) => ("Video", egui::Color32::from_rgb(180, 100, 255)),
AnyLayer::Effect(_) => ("Effect", egui::Color32::from_rgb(255, 100, 180)), AnyLayer::Effect(_) => ("Effect", egui::Color32::from_rgb(255, 100, 180)),
AnyLayer::Group(_) => ("Group", egui::Color32::from_rgb(0, 180, 180)), AnyLayer::Group(_) => ("Group", egui::Color32::from_rgb(0, 180, 180)),
AnyLayer::Raster(_) => ("Raster", egui::Color32::from_rgb(160, 100, 200)),
}; };
(layer.id(), data.name.clone(), lt, tc) (layer.id(), data.name.clone(), lt, tc)
} }
@ -1354,6 +1358,7 @@ impl TimelinePane {
AnyLayer::Video(_) => ("Video", egui::Color32::from_rgb(180, 100, 255)), AnyLayer::Video(_) => ("Video", egui::Color32::from_rgb(180, 100, 255)),
AnyLayer::Effect(_) => ("Effect", egui::Color32::from_rgb(255, 100, 180)), AnyLayer::Effect(_) => ("Effect", egui::Color32::from_rgb(255, 100, 180)),
AnyLayer::Group(_) => ("Group", egui::Color32::from_rgb(0, 180, 180)), AnyLayer::Group(_) => ("Group", egui::Color32::from_rgb(0, 180, 180)),
AnyLayer::Raster(_) => ("Raster", egui::Color32::from_rgb(160, 100, 200)),
}; };
(child.id(), data.name.clone(), lt, tc) (child.id(), data.name.clone(), lt, tc)
} }
@ -1821,6 +1826,7 @@ impl TimelinePane {
AnyLayer::Video(_) => ("Video", egui::Color32::from_rgb(180, 100, 255)), AnyLayer::Video(_) => ("Video", egui::Color32::from_rgb(180, 100, 255)),
AnyLayer::Effect(_) => ("Effect", egui::Color32::from_rgb(255, 100, 180)), AnyLayer::Effect(_) => ("Effect", egui::Color32::from_rgb(255, 100, 180)),
AnyLayer::Group(_) => ("Group", egui::Color32::from_rgb(0, 180, 180)), AnyLayer::Group(_) => ("Group", egui::Color32::from_rgb(0, 180, 180)),
AnyLayer::Raster(_) => ("Raster", egui::Color32::from_rgb(100, 200, 255)),
}; };
(layer.layer().name.clone(), lt, tc) (layer.layer().name.clone(), lt, tc)
} }
@ -1837,6 +1843,7 @@ impl TimelinePane {
AnyLayer::Video(_) => ("Video", egui::Color32::from_rgb(180, 100, 255)), AnyLayer::Video(_) => ("Video", egui::Color32::from_rgb(180, 100, 255)),
AnyLayer::Effect(_) => ("Effect", egui::Color32::from_rgb(255, 100, 180)), AnyLayer::Effect(_) => ("Effect", egui::Color32::from_rgb(255, 100, 180)),
AnyLayer::Group(_) => ("Group", egui::Color32::from_rgb(0, 180, 180)), AnyLayer::Group(_) => ("Group", egui::Color32::from_rgb(0, 180, 180)),
AnyLayer::Raster(_) => ("Raster", egui::Color32::from_rgb(100, 200, 255)),
}; };
(child.layer().name.clone(), lt, tc) (child.layer().name.clone(), lt, tc)
} }
@ -2356,6 +2363,7 @@ impl TimelinePane {
lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances, lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Effect(el) => &el.clip_instances, lightningbeam_core::layer::AnyLayer::Effect(el) => &el.clip_instances,
lightningbeam_core::layer::AnyLayer::Group(_) => &[], lightningbeam_core::layer::AnyLayer::Group(_) => &[],
lightningbeam_core::layer::AnyLayer::Raster(_) => &[],
}; };
// For moves, precompute the clamped offset so all selected clips move uniformly // For moves, precompute the clamped offset so all selected clips move uniformly
@ -2662,6 +2670,10 @@ impl TimelinePane {
egui::Color32::from_rgb(0, 150, 150), // Teal egui::Color32::from_rgb(0, 150, 150), // Teal
egui::Color32::from_rgb(100, 220, 220), // Bright teal egui::Color32::from_rgb(100, 220, 220), // Bright teal
), ),
lightningbeam_core::layer::AnyLayer::Raster(_) => (
egui::Color32::from_rgb(160, 100, 200), // Purple/violet
egui::Color32::from_rgb(200, 160, 240), // Bright purple/violet
),
}; };
let (row, total_rows) = clip_stacking[clip_instance_index]; let (row, total_rows) = clip_stacking[clip_instance_index];
@ -3251,6 +3263,7 @@ impl TimelinePane {
lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances, lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Effect(el) => &el.clip_instances, lightningbeam_core::layer::AnyLayer::Effect(el) => &el.clip_instances,
lightningbeam_core::layer::AnyLayer::Group(_) => &[], lightningbeam_core::layer::AnyLayer::Group(_) => &[],
lightningbeam_core::layer::AnyLayer::Raster(_) => &[],
}; };
// Check if click is within any clip instance // Check if click is within any clip instance
@ -4347,6 +4360,7 @@ impl PaneRenderer for TimelinePane {
lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances, lightningbeam_core::layer::AnyLayer::Video(vl) => &vl.clip_instances,
lightningbeam_core::layer::AnyLayer::Effect(el) => &el.clip_instances, lightningbeam_core::layer::AnyLayer::Effect(el) => &el.clip_instances,
lightningbeam_core::layer::AnyLayer::Group(_) => &[], lightningbeam_core::layer::AnyLayer::Group(_) => &[],
lightningbeam_core::layer::AnyLayer::Raster(_) => &[],
}; };
for clip_instance in clip_instances { for clip_instance in clip_instances {
@ -4485,6 +4499,7 @@ impl PaneRenderer for TimelinePane {
AnyLayer::Video(vl) => &vl.clip_instances, AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances, AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => &[], AnyLayer::Group(_) => &[],
AnyLayer::Raster(_) => &[],
}; };
for inst in instances { for inst in instances {
if !shared.selection.contains_clip_instance(&inst.id) { continue; } if !shared.selection.contains_clip_instance(&inst.id) { continue; }
@ -4515,6 +4530,7 @@ impl PaneRenderer for TimelinePane {
AnyLayer::Video(vl) => &vl.clip_instances, AnyLayer::Video(vl) => &vl.clip_instances,
AnyLayer::Effect(el) => &el.clip_instances, AnyLayer::Effect(el) => &el.clip_instances,
AnyLayer::Group(_) => &[], AnyLayer::Group(_) => &[],
AnyLayer::Raster(_) => &[],
}; };
// Check each selected clip // Check each selected clip
enabled = instances.iter() enabled = instances.iter()

View File

@ -4,6 +4,7 @@
/// Users can click to select tools, which updates the global selected_tool state. /// Users can click to select tools, which updates the global selected_tool state.
use eframe::egui; use eframe::egui;
use lightningbeam_core::layer::{AnyLayer, LayerType};
use lightningbeam_core::tool::{Tool, RegionSelectMode}; use lightningbeam_core::tool::{Tool, RegionSelectMode};
use super::{NodePath, PaneRenderer, SharedPaneState}; use super::{NodePath, PaneRenderer, SharedPaneState};
@ -30,14 +31,29 @@ impl PaneRenderer for ToolbarPane {
let button_padding = 8.0; let button_padding = 8.0;
let button_spacing = 4.0; let button_spacing = 4.0;
// Determine which tools to show based on the active layer type
let active_layer_type: Option<LayerType> = shared.active_layer_id
.and_then(|id| shared.action_executor.document().get_layer(&id))
.map(|layer| match layer {
AnyLayer::Vector(_) => LayerType::Vector,
AnyLayer::Audio(_) => LayerType::Audio,
AnyLayer::Video(_) => LayerType::Video,
AnyLayer::Effect(_) => LayerType::Effect,
AnyLayer::Group(_) => LayerType::Group,
AnyLayer::Raster(_) => LayerType::Raster,
});
// Auto-switch to Select if the current tool isn't available for this layer type
let tools = Tool::for_layer_type(active_layer_type);
if !tools.contains(shared.selected_tool) {
*shared.selected_tool = Tool::Select;
}
// Calculate how many columns we can fit // Calculate how many columns we can fit
let available_width = rect.width() - (button_padding * 2.0); let available_width = rect.width() - (button_padding * 2.0);
let columns = let columns =
((available_width + button_spacing) / (button_size + button_spacing)).floor() as usize; ((available_width + button_spacing) / (button_size + button_spacing)).floor() as usize;
let columns = columns.max(1); // At least 1 column let columns = columns.max(1); // At least 1 column
// Calculate total number of tools and rows
let tools = Tool::all();
let total_tools = tools.len(); let total_tools = tools.len();
let total_rows = (total_tools + columns - 1) / columns; let total_rows = (total_tools + columns - 1) / columns;
@ -177,52 +193,55 @@ impl PaneRenderer for ToolbarPane {
y += button_size + button_spacing; y += button_size + button_spacing;
} }
let is_raster = matches!(active_layer_type, Some(LayerType::Raster));
let show_colors = matches!(active_layer_type, None | Some(LayerType::Vector) | Some(LayerType::Raster));
// Add color pickers below the tool buttons // Add color pickers below the tool buttons
if show_colors {
y += button_spacing * 2.0; // Extra spacing y += button_spacing * 2.0; // Extra spacing
// Fill Color
let fill_label_width = 40.0; let fill_label_width = 40.0;
let color_button_size = 50.0; let color_button_size = 50.0;
let color_row_width = fill_label_width + color_button_size + button_spacing; let color_row_width = fill_label_width + color_button_size + button_spacing;
let color_x = rect.left() + (rect.width() - color_row_width) / 2.0; let color_x = rect.left() + (rect.width() - color_row_width) / 2.0;
// Fill color label // For raster layers show a single "Color" swatch (brush paint color = stroke_color).
// For vector layers show Fill + Stroke.
if !is_raster {
// Fill color label
ui.painter().text(
egui::pos2(color_x + fill_label_width / 2.0, y + color_button_size / 2.0),
egui::Align2::CENTER_CENTER,
"Fill",
egui::FontId::proportional(14.0),
egui::Color32::from_gray(200),
);
// Fill color button
let fill_button_rect = egui::Rect::from_min_size(
egui::pos2(color_x + fill_label_width + button_spacing, y),
egui::vec2(color_button_size, color_button_size),
);
let fill_button_id = ui.id().with(("fill_color_button", path));
let fill_response = ui.interact(fill_button_rect, fill_button_id, egui::Sense::click());
draw_color_button(ui, fill_button_rect, *shared.fill_color);
egui::containers::Popup::from_toggle_button_response(&fill_response)
.show(|ui| {
let changed = egui::color_picker::color_picker_color32(ui, shared.fill_color, egui::color_picker::Alpha::OnlyBlend);
if changed {
*shared.active_color_mode = super::ColorMode::Fill;
}
});
y += color_button_size + button_spacing;
}
// Stroke/brush color label
let stroke_label = if is_raster { "Color" } else { "Stroke" };
ui.painter().text( ui.painter().text(
egui::pos2(color_x + fill_label_width / 2.0, y + color_button_size / 2.0), egui::pos2(color_x + fill_label_width / 2.0, y + color_button_size / 2.0),
egui::Align2::CENTER_CENTER, egui::Align2::CENTER_CENTER,
"Fill", stroke_label,
egui::FontId::proportional(14.0),
egui::Color32::from_gray(200),
);
// Fill color button
let fill_button_rect = egui::Rect::from_min_size(
egui::pos2(color_x + fill_label_width + button_spacing, y),
egui::vec2(color_button_size, color_button_size),
);
let fill_button_id = ui.id().with(("fill_color_button", path));
let fill_response = ui.interact(fill_button_rect, fill_button_id, egui::Sense::click());
// Draw fill color button with checkerboard for alpha
draw_color_button(ui, fill_button_rect, *shared.fill_color);
// Show fill color picker popup using new Popup API
egui::containers::Popup::from_toggle_button_response(&fill_response)
.show(|ui| {
let changed = egui::color_picker::color_picker_color32(ui, shared.fill_color, egui::color_picker::Alpha::OnlyBlend);
// Track that the user interacted with the fill color
if changed {
*shared.active_color_mode = super::ColorMode::Fill;
}
});
y += color_button_size + button_spacing;
// Stroke color label
ui.painter().text(
egui::pos2(color_x + fill_label_width / 2.0, y + color_button_size / 2.0),
egui::Align2::CENTER_CENTER,
"Stroke",
egui::FontId::proportional(14.0), egui::FontId::proportional(14.0),
egui::Color32::from_gray(200), egui::Color32::from_gray(200),
); );
@ -234,19 +253,15 @@ impl PaneRenderer for ToolbarPane {
); );
let stroke_button_id = ui.id().with(("stroke_color_button", path)); let stroke_button_id = ui.id().with(("stroke_color_button", path));
let stroke_response = ui.interact(stroke_button_rect, stroke_button_id, egui::Sense::click()); let stroke_response = ui.interact(stroke_button_rect, stroke_button_id, egui::Sense::click());
// Draw stroke color button with checkerboard for alpha
draw_color_button(ui, stroke_button_rect, *shared.stroke_color); draw_color_button(ui, stroke_button_rect, *shared.stroke_color);
// Show stroke color picker popup using new Popup API
egui::containers::Popup::from_toggle_button_response(&stroke_response) egui::containers::Popup::from_toggle_button_response(&stroke_response)
.show(|ui| { .show(|ui| {
let changed = egui::color_picker::color_picker_color32(ui, shared.stroke_color, egui::color_picker::Alpha::OnlyBlend); let changed = egui::color_picker::color_picker_color32(ui, shared.stroke_color, egui::color_picker::Alpha::OnlyBlend);
// Track that the user interacted with the stroke color
if changed { if changed {
*shared.active_color_mode = super::ColorMode::Stroke; *shared.active_color_mode = super::ColorMode::Stroke;
} }
}); });
} // end color pickers
} }
fn name(&self) -> &str { fn name(&self) -> &str {

12
src/assets/erase.svg Normal file
View File

@ -0,0 +1,12 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24">
<!-- Eraser body (angled rectangle) -->
<rect x="3" y="11" width="14" height="7" rx="1.5"
transform="rotate(-30 10 14.5)"
fill="#bebebe" fill-opacity="1"/>
<!-- Pink eraser tip highlight -->
<rect x="3" y="11" width="5" height="7" rx="1.5"
transform="rotate(-30 10 14.5)"
fill="#e8a0a0" fill-opacity="0.8"/>
<!-- Baseline -->
<line x1="2" y1="22" x2="22" y2="22" stroke="#bebebe" stroke-width="1.5" stroke-linecap="round"/>
</svg>

After

Width:  |  Height:  |  Size: 559 B

View File

@ -0,0 +1,13 @@
<svg xmlns="http://www.w3.org/2000/svg" width="80" height="80" viewBox="0 0 100 100">
<!-- Brush handle -->
<rect x="48" y="10" width="8" height="40" rx="3" fill="currentColor" opacity="0.7"/>
<!-- Brush ferrule -->
<rect x="46" y="46" width="12" height="6" rx="1" fill="currentColor"/>
<!-- Brush tip -->
<ellipse cx="52" cy="58" rx="6" ry="10" fill="currentColor"/>
<!-- Paint strokes -->
<path d="M15 72 Q25 62 38 68 Q50 74 60 65" stroke="currentColor" stroke-width="5"
stroke-linecap="round" fill="none" opacity="0.9"/>
<path d="M12 85 Q30 78 50 83 Q65 87 80 80" stroke="currentColor" stroke-width="4"
stroke-linecap="round" fill="none" opacity="0.6"/>
</svg>

After

Width:  |  Height:  |  Size: 698 B

12
src/assets/smudge.svg Normal file
View File

@ -0,0 +1,12 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24">
<!-- Finger tip -->
<ellipse cx="14" cy="5" rx="3" ry="4" fill="#bebebe" fill-opacity="1"/>
<!-- Finger body -->
<rect x="11" y="5" width="6" height="10" rx="3" fill="#bebebe" fill-opacity="1"/>
<!-- Smudge streak 1 -->
<path d="M 6 14 Q 10 16 15 15" stroke="#bebebe" stroke-width="2.5" stroke-linecap="round" fill="none" opacity="0.7"/>
<!-- Smudge streak 2 -->
<path d="M 4 17 Q 9 19 14 18" stroke="#bebebe" stroke-width="2" stroke-linecap="round" fill="none" opacity="0.5"/>
<!-- Smudge streak 3 -->
<path d="M 5 20 Q 10 21.5 15 21" stroke="#bebebe" stroke-width="1.5" stroke-linecap="round" fill="none" opacity="0.3"/>
</svg>

After

Width:  |  Height:  |  Size: 734 B

24
src/assets/split.svg Normal file
View File

@ -0,0 +1,24 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
width="24"
height="24"
viewBox="0 0 24 24"
version="1.1"
xmlns="http://www.w3.org/2000/svg">
<g>
<rect
style="fill:none;stroke:none"
width="24"
height="24"
x="0"
y="0" />
<!-- Vertical split line -->
<line
style="fill:none;stroke:#bfbfbf;stroke-width:2;stroke-linecap:round;stroke-dasharray:2,2"
x1="12" y1="2" x2="12" y2="22" />
<!-- Blade pointing right -->
<polygon
style="fill:#bfbfbf;stroke:none"
points="8,10 14,12 8,14" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 604 B