handle preview rendering during shape editing

This commit is contained in:
Skyler Lehmkuhl 2025-12-23 09:36:54 -05:00
parent f1df85baa2
commit 1fcad0355d
4 changed files with 180 additions and 31 deletions

View File

@ -178,6 +178,7 @@ pub fn render_document_for_compositing(
base_transform: Affine, base_transform: Affine,
image_cache: &mut ImageCache, image_cache: &mut ImageCache,
video_manager: &std::sync::Arc<std::sync::Mutex<crate::video::VideoManager>>, video_manager: &std::sync::Arc<std::sync::Mutex<crate::video::VideoManager>>,
skip_instance_id: Option<uuid::Uuid>,
) -> CompositeRenderResult { ) -> CompositeRenderResult {
let time = document.current_time; let time = document.current_time;
@ -211,6 +212,7 @@ pub fn render_document_for_compositing(
base_transform, base_transform,
image_cache, image_cache,
video_manager, video_manager,
skip_instance_id,
); );
rendered_layers.push(rendered); rendered_layers.push(rendered);
} }
@ -235,6 +237,7 @@ pub fn render_layer_isolated(
base_transform: Affine, base_transform: Affine,
image_cache: &mut ImageCache, image_cache: &mut ImageCache,
video_manager: &std::sync::Arc<std::sync::Mutex<crate::video::VideoManager>>, video_manager: &std::sync::Arc<std::sync::Mutex<crate::video::VideoManager>>,
skip_instance_id: Option<uuid::Uuid>,
) -> RenderedLayer { ) -> RenderedLayer {
let layer_id = layer.id(); let layer_id = layer.id();
let opacity = layer.opacity() as f32; let opacity = layer.opacity() as f32;
@ -256,6 +259,7 @@ pub fn render_layer_isolated(
1.0, // Full opacity - layer opacity handled in compositing 1.0, // Full opacity - layer opacity handled in compositing
image_cache, image_cache,
video_manager, video_manager,
skip_instance_id,
); );
rendered.has_content = !vector_layer.shape_instances.is_empty() rendered.has_content = !vector_layer.shape_instances.is_empty()
|| !vector_layer.clip_instances.is_empty(); || !vector_layer.clip_instances.is_empty();
@ -302,6 +306,7 @@ fn render_vector_layer_to_scene(
parent_opacity: f64, parent_opacity: f64,
image_cache: &mut ImageCache, image_cache: &mut ImageCache,
video_manager: &std::sync::Arc<std::sync::Mutex<crate::video::VideoManager>>, video_manager: &std::sync::Arc<std::sync::Mutex<crate::video::VideoManager>>,
skip_instance_id: Option<uuid::Uuid>,
) { ) {
// Render using the existing function but to this isolated scene // Render using the existing function but to this isolated scene
render_vector_layer( render_vector_layer(
@ -313,6 +318,7 @@ fn render_vector_layer_to_scene(
parent_opacity, parent_opacity,
image_cache, image_cache,
video_manager, video_manager,
skip_instance_id,
); );
} }
@ -349,7 +355,7 @@ pub fn render_document(
image_cache: &mut ImageCache, image_cache: &mut ImageCache,
video_manager: &std::sync::Arc<std::sync::Mutex<crate::video::VideoManager>>, video_manager: &std::sync::Arc<std::sync::Mutex<crate::video::VideoManager>>,
) { ) {
render_document_with_transform(document, scene, Affine::IDENTITY, image_cache, video_manager); render_document_with_transform(document, scene, Affine::IDENTITY, image_cache, video_manager, None);
} }
/// Render a document to a Vello scene with a base transform /// Render a document to a Vello scene with a base transform
@ -360,13 +366,14 @@ pub fn render_document_with_transform(
base_transform: Affine, base_transform: Affine,
image_cache: &mut ImageCache, image_cache: &mut ImageCache,
video_manager: &std::sync::Arc<std::sync::Mutex<crate::video::VideoManager>>, video_manager: &std::sync::Arc<std::sync::Mutex<crate::video::VideoManager>>,
skip_instance_id: Option<uuid::Uuid>,
) { ) {
// 1. Draw background // 1. Draw background
render_background(document, scene, base_transform); render_background(document, scene, base_transform);
// 2. Recursively render the root graphics object at current time // 2. Recursively render the root graphics object at current time
let time = document.current_time; let time = document.current_time;
render_graphics_object(document, time, scene, base_transform, image_cache, video_manager); render_graphics_object(document, time, scene, base_transform, image_cache, video_manager, skip_instance_id);
} }
/// Draw the document background /// Draw the document background
@ -393,6 +400,7 @@ fn render_graphics_object(
base_transform: Affine, base_transform: Affine,
image_cache: &mut ImageCache, image_cache: &mut ImageCache,
video_manager: &std::sync::Arc<std::sync::Mutex<crate::video::VideoManager>>, video_manager: &std::sync::Arc<std::sync::Mutex<crate::video::VideoManager>>,
skip_instance_id: Option<uuid::Uuid>,
) { ) {
// Check if any layers are soloed // Check if any layers are soloed
let any_soloed = document.visible_layers().any(|layer| layer.soloed()); let any_soloed = document.visible_layers().any(|layer| layer.soloed());
@ -405,11 +413,11 @@ fn render_graphics_object(
if any_soloed { if any_soloed {
// Only render soloed layers when solo is active // Only render soloed layers when solo is active
if layer.soloed() { if layer.soloed() {
render_layer(document, time, layer, scene, base_transform, 1.0, image_cache, video_manager); render_layer(document, time, layer, scene, base_transform, 1.0, image_cache, video_manager, skip_instance_id);
} }
} else { } else {
// Render all visible layers when no solo is active // Render all visible layers when no solo is active
render_layer(document, time, layer, scene, base_transform, 1.0, image_cache, video_manager); render_layer(document, time, layer, scene, base_transform, 1.0, image_cache, video_manager, skip_instance_id);
} }
} }
} }
@ -424,10 +432,11 @@ fn render_layer(
parent_opacity: f64, parent_opacity: f64,
image_cache: &mut ImageCache, image_cache: &mut ImageCache,
video_manager: &std::sync::Arc<std::sync::Mutex<crate::video::VideoManager>>, video_manager: &std::sync::Arc<std::sync::Mutex<crate::video::VideoManager>>,
skip_instance_id: Option<uuid::Uuid>,
) { ) {
match layer { match layer {
AnyLayer::Vector(vector_layer) => { AnyLayer::Vector(vector_layer) => {
render_vector_layer(document, time, vector_layer, scene, base_transform, parent_opacity, image_cache, video_manager) render_vector_layer(document, time, vector_layer, scene, base_transform, parent_opacity, image_cache, video_manager, skip_instance_id)
} }
AnyLayer::Audio(_) => { AnyLayer::Audio(_) => {
// Audio layers don't render visually // Audio layers don't render visually
@ -580,7 +589,7 @@ fn render_clip_instance(
if !layer_node.data.visible() { if !layer_node.data.visible() {
continue; continue;
} }
render_layer(document, clip_time, &layer_node.data, scene, instance_transform, clip_opacity, image_cache, video_manager); render_layer(document, clip_time, &layer_node.data, scene, instance_transform, clip_opacity, image_cache, video_manager, None);
} }
} }
@ -761,6 +770,7 @@ fn render_vector_layer(
parent_opacity: f64, parent_opacity: f64,
image_cache: &mut ImageCache, image_cache: &mut ImageCache,
video_manager: &std::sync::Arc<std::sync::Mutex<crate::video::VideoManager>>, video_manager: &std::sync::Arc<std::sync::Mutex<crate::video::VideoManager>>,
skip_instance_id: Option<uuid::Uuid>,
) { ) {
// Cascade opacity: parent_opacity × layer.opacity // Cascade opacity: parent_opacity × layer.opacity
let layer_opacity = parent_opacity * layer.layer.opacity; let layer_opacity = parent_opacity * layer.layer.opacity;
@ -772,6 +782,11 @@ fn render_vector_layer(
// Render each shape instance in the layer // Render each shape instance in the layer
for shape_instance in &layer.shape_instances { for shape_instance in &layer.shape_instances {
// Skip this instance if it's being edited
if Some(shape_instance.id) == skip_instance_id {
continue;
}
// Get the shape for this instance // Get the shape for this instance
let Some(shape) = layer.get_shape(&shape_instance.shape_id) else { let Some(shape) = layer.get_shape(&shape_instance.shape_id) else {
continue; continue;

View File

@ -54,9 +54,10 @@ fn test_render_empty_document() {
let document = Document::new("Empty"); let document = Document::new("Empty");
let mut scene = Scene::new(); let mut scene = Scene::new();
let mut image_cache = ImageCache::new(); let mut image_cache = ImageCache::new();
let video_manager = std::sync::Arc::new(std::sync::Mutex::new(lightningbeam_core::video::VideoManager::new()));
// Should not panic // Should not panic
render_document(&document, &mut scene, &mut image_cache); render_document(&document, &mut scene, &mut image_cache, &video_manager);
} }
#[test] #[test]
@ -64,9 +65,10 @@ fn test_render_document_with_shapes() {
let (document, _ids) = setup_rendering_document(); let (document, _ids) = setup_rendering_document();
let mut scene = Scene::new(); let mut scene = Scene::new();
let mut image_cache = ImageCache::new(); let mut image_cache = ImageCache::new();
let video_manager = std::sync::Arc::new(std::sync::Mutex::new(lightningbeam_core::video::VideoManager::new()));
// Should render all 3 layers without error // Should render all 3 layers without error
render_document(&document, &mut scene, &mut image_cache); render_document(&document, &mut scene, &mut image_cache, &video_manager);
} }
#[test] #[test]
@ -74,10 +76,11 @@ fn test_render_with_transform() {
let (document, _ids) = setup_rendering_document(); let (document, _ids) = setup_rendering_document();
let mut scene = Scene::new(); let mut scene = Scene::new();
let mut image_cache = ImageCache::new(); let mut image_cache = ImageCache::new();
let video_manager = std::sync::Arc::new(std::sync::Mutex::new(lightningbeam_core::video::VideoManager::new()));
// Render with zoom and pan // Render with zoom and pan
let transform = Affine::translate((100.0, 50.0)) * Affine::scale(2.0); let transform = Affine::translate((100.0, 50.0)) * Affine::scale(2.0);
render_document_with_transform(&document, &mut scene, transform, &mut image_cache); render_document_with_transform(&document, &mut scene, transform, &mut image_cache, &video_manager, None);
} }
#[test] #[test]
@ -102,7 +105,8 @@ fn test_render_solo_single_layer() {
// Render should work // Render should work
let mut scene = Scene::new(); let mut scene = Scene::new();
let mut image_cache = ImageCache::new(); let mut image_cache = ImageCache::new();
render_document(&document, &mut scene, &mut image_cache); let video_manager = std::sync::Arc::new(std::sync::Mutex::new(lightningbeam_core::video::VideoManager::new()));
render_document(&document, &mut scene, &mut image_cache, &video_manager);
} }
#[test] #[test]
@ -126,7 +130,8 @@ fn test_render_solo_multiple_layers() {
let mut scene = Scene::new(); let mut scene = Scene::new();
let mut image_cache = ImageCache::new(); let mut image_cache = ImageCache::new();
render_document(&document, &mut scene, &mut image_cache); let video_manager = std::sync::Arc::new(std::sync::Mutex::new(lightningbeam_core::video::VideoManager::new()));
render_document(&document, &mut scene, &mut image_cache, &video_manager);
} }
#[test] #[test]
@ -143,7 +148,8 @@ fn test_render_hidden_layer_not_rendered() {
let mut scene = Scene::new(); let mut scene = Scene::new();
let mut image_cache = ImageCache::new(); let mut image_cache = ImageCache::new();
render_document(&document, &mut scene, &mut image_cache); let video_manager = std::sync::Arc::new(std::sync::Mutex::new(lightningbeam_core::video::VideoManager::new()));
render_document(&document, &mut scene, &mut image_cache, &video_manager);
} }
#[test] #[test]
@ -168,7 +174,8 @@ fn test_render_with_layer_opacity() {
let mut scene = Scene::new(); let mut scene = Scene::new();
let mut image_cache = ImageCache::new(); let mut image_cache = ImageCache::new();
render_document(&document, &mut scene, &mut image_cache); let video_manager = std::sync::Arc::new(std::sync::Mutex::new(lightningbeam_core::video::VideoManager::new()));
render_document(&document, &mut scene, &mut image_cache, &video_manager);
} }
#[test] #[test]
@ -206,7 +213,8 @@ fn test_render_with_clip_instances() {
let mut scene = Scene::new(); let mut scene = Scene::new();
let mut image_cache = ImageCache::new(); let mut image_cache = ImageCache::new();
render_document(&document, &mut scene, &mut image_cache); let video_manager = std::sync::Arc::new(std::sync::Mutex::new(lightningbeam_core::video::VideoManager::new()));
render_document(&document, &mut scene, &mut image_cache, &video_manager);
} }
#[test] #[test]
@ -232,7 +240,8 @@ fn test_render_clip_instance_outside_time_range() {
// Clip shouldn't render (it hasn't started yet) // Clip shouldn't render (it hasn't started yet)
let mut scene = Scene::new(); let mut scene = Scene::new();
let mut image_cache = ImageCache::new(); let mut image_cache = ImageCache::new();
render_document(&document, &mut scene, &mut image_cache); let video_manager = std::sync::Arc::new(std::sync::Mutex::new(lightningbeam_core::video::VideoManager::new()));
render_document(&document, &mut scene, &mut image_cache, &video_manager);
} }
#[test] #[test]
@ -252,7 +261,8 @@ fn test_render_all_layers_hidden() {
// Should still render (just background) // Should still render (just background)
let mut scene = Scene::new(); let mut scene = Scene::new();
let mut image_cache = ImageCache::new(); let mut image_cache = ImageCache::new();
render_document(&document, &mut scene, &mut image_cache); let video_manager = std::sync::Arc::new(std::sync::Mutex::new(lightningbeam_core::video::VideoManager::new()));
render_document(&document, &mut scene, &mut image_cache, &video_manager);
} }
#[test] #[test]
@ -280,7 +290,8 @@ fn test_render_solo_hidden_layer_interaction() {
let mut scene = Scene::new(); let mut scene = Scene::new();
let mut image_cache = ImageCache::new(); let mut image_cache = ImageCache::new();
render_document(&document, &mut scene, &mut image_cache); let video_manager = std::sync::Arc::new(std::sync::Mutex::new(lightningbeam_core::video::VideoManager::new()));
render_document(&document, &mut scene, &mut image_cache, &video_manager);
} }
#[test] #[test]
@ -290,18 +301,20 @@ fn test_render_background_color() {
let mut scene = Scene::new(); let mut scene = Scene::new();
let mut image_cache = ImageCache::new(); let mut image_cache = ImageCache::new();
render_document(&document, &mut scene, &mut image_cache); let video_manager = std::sync::Arc::new(std::sync::Mutex::new(lightningbeam_core::video::VideoManager::new()));
render_document(&document, &mut scene, &mut image_cache, &video_manager);
} }
#[test] #[test]
fn test_render_at_different_times() { fn test_render_at_different_times() {
let (mut document, _ids) = setup_rendering_document(); let (mut document, _ids) = setup_rendering_document();
let mut image_cache = ImageCache::new(); let mut image_cache = ImageCache::new();
let video_manager = std::sync::Arc::new(std::sync::Mutex::new(lightningbeam_core::video::VideoManager::new()));
// Render at different times // Render at different times
for time in [0.0, 0.5, 1.0, 2.5, 5.0, 10.0] { for time in [0.0, 0.5, 1.0, 2.5, 5.0, 10.0] {
document.set_time(time); document.set_time(time);
let mut scene = Scene::new(); let mut scene = Scene::new();
render_document(&document, &mut scene, &mut image_cache); render_document(&document, &mut scene, &mut image_cache, &video_manager);
} }
} }

View File

@ -746,6 +746,7 @@ pub fn render_frame_to_rgba_hdr(
base_transform, base_transform,
image_cache, image_cache,
video_manager, video_manager,
None, // No skipping during export
); );
// Buffer specs for layer rendering // Buffer specs for layer rendering
@ -1131,6 +1132,7 @@ pub fn render_frame_to_gpu_rgba(
base_transform, base_transform,
image_cache, image_cache,
video_manager, video_manager,
None, // No skipping during export
); );
// Buffer specs for layer rendering // Buffer specs for layer rendering

View File

@ -358,6 +358,7 @@ struct VelloCallback {
eyedropper_request: Option<(egui::Pos2, super::ColorMode)>, // Pending eyedropper sample eyedropper_request: Option<(egui::Pos2, super::ColorMode)>, // Pending eyedropper sample
playback_time: f64, // Current playback time for animation evaluation playback_time: f64, // Current playback time for animation evaluation
video_manager: std::sync::Arc<std::sync::Mutex<lightningbeam_core::video::VideoManager>>, video_manager: std::sync::Arc<std::sync::Mutex<lightningbeam_core::video::VideoManager>>,
shape_editing_cache: Option<ShapeEditingCache>, // Cache for vector editing preview
} }
impl VelloCallback { impl VelloCallback {
@ -378,8 +379,9 @@ impl VelloCallback {
eyedropper_request: Option<(egui::Pos2, super::ColorMode)>, eyedropper_request: Option<(egui::Pos2, super::ColorMode)>,
playback_time: f64, playback_time: f64,
video_manager: std::sync::Arc<std::sync::Mutex<lightningbeam_core::video::VideoManager>>, video_manager: std::sync::Arc<std::sync::Mutex<lightningbeam_core::video::VideoManager>>,
shape_editing_cache: Option<ShapeEditingCache>,
) -> Self { ) -> Self {
Self { rect, pan_offset, zoom, instance_id, document, tool_state, active_layer_id, drag_delta, selection, fill_color, stroke_color, stroke_width, selected_tool, eyedropper_request, playback_time, video_manager } Self { rect, pan_offset, zoom, instance_id, document, tool_state, active_layer_id, drag_delta, selection, fill_color, stroke_color, stroke_width, selected_tool, eyedropper_request, playback_time, video_manager, shape_editing_cache }
} }
} }
@ -440,11 +442,16 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
instance_resources.ensure_hdr_texture(device, &shared, width, height); instance_resources.ensure_hdr_texture(device, &shared, width, height);
let mut image_cache = shared.image_cache.lock().unwrap(); let mut image_cache = shared.image_cache.lock().unwrap();
// Skip rendering the shape instance being edited (for vector editing preview)
let skip_instance_id = self.shape_editing_cache.as_ref().map(|cache| cache.instance_id);
let composite_result = lightningbeam_core::renderer::render_document_for_compositing( let composite_result = lightningbeam_core::renderer::render_document_for_compositing(
&self.document, &self.document,
camera_transform, camera_transform,
&mut image_cache, &mut image_cache,
&shared.video_manager, &shared.video_manager,
skip_instance_id,
); );
drop(image_cache); drop(image_cache);
@ -679,12 +686,17 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Legacy single-scene rendering // Legacy single-scene rendering
let mut scene = vello::Scene::new(); let mut scene = vello::Scene::new();
let mut image_cache = shared.image_cache.lock().unwrap(); let mut image_cache = shared.image_cache.lock().unwrap();
// Skip rendering the shape instance being edited (for vector editing preview)
let skip_instance_id = self.shape_editing_cache.as_ref().map(|cache| cache.instance_id);
lightningbeam_core::renderer::render_document_with_transform( lightningbeam_core::renderer::render_document_with_transform(
&self.document, &self.document,
&mut scene, &mut scene,
camera_transform, camera_transform,
&mut image_cache, &mut image_cache,
&shared.video_manager, &shared.video_manager,
skip_instance_id,
); );
drop(image_cache); drop(image_cache);
scene scene
@ -1199,6 +1211,59 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
} }
} }
// 8. Draw vector editing preview
if let Some(cache) = &self.shape_editing_cache {
use lightningbeam_core::bezpath_editing::rebuild_bezpath;
// Rebuild the path from the modified editable curves
let preview_path = rebuild_bezpath(&cache.editable_data);
// Get the layer first, then the shape from the layer
if let Some(layer) = (*self.document).root.get_child(&cache.layer_id) {
if let lightningbeam_core::layer::AnyLayer::Vector(vector_layer) = layer {
if let Some(shape) = vector_layer.get_shape(&cache.shape_id) {
let transform = camera_transform * cache.local_to_world;
// Render fill with FULL OPACITY (same as original)
if let Some(fill_color) = &shape.fill_color {
scene.fill(
shape.fill_rule.into(),
transform,
fill_color.to_peniko(),
None,
&preview_path,
);
}
// Render stroke with FULL OPACITY (same as original)
if let Some(stroke_color) = &shape.stroke_color {
if let Some(stroke_style) = &shape.stroke_style {
scene.stroke(
&stroke_style.to_stroke(),
transform,
stroke_color.to_peniko(),
None,
&preview_path,
);
}
}
// If shape has neither fill nor stroke, render with default stroke
if shape.fill_color.is_none() && shape.stroke_color.is_none() {
let default_stroke = vello::kurbo::Stroke::new(2.0);
scene.stroke(
&default_stroke,
transform,
vello::peniko::Color::from_rgba8(100, 150, 255, 255),
None,
&preview_path,
);
}
}
}
}
}
// 6. Draw transform tool handles (when Transform tool is active) // 6. Draw transform tool handles (when Transform tool is active)
use lightningbeam_core::tool::Tool; use lightningbeam_core::tool::Tool;
let should_draw_transform_handles = matches!(self.selected_tool, Tool::Transform) && !self.selection.is_empty(); let should_draw_transform_handles = matches!(self.selected_tool, Tool::Transform) && !self.selection.is_empty();
@ -1683,15 +1748,24 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// The overlay scene was built above with all the UI elements // The overlay scene was built above with all the UI elements
if let Some(hdr_view) = &instance_resources.hdr_texture_view { if let Some(hdr_view) = &instance_resources.hdr_texture_view {
let mut buffer_pool = shared.buffer_pool.lock().unwrap(); let mut buffer_pool = shared.buffer_pool.lock().unwrap();
let overlay_spec = lightningbeam_core::gpu::BufferSpec::new( let overlay_srgb_spec = lightningbeam_core::gpu::BufferSpec::new(
width, width,
height, height,
lightningbeam_core::gpu::BufferFormat::Rgba8Srgb, lightningbeam_core::gpu::BufferFormat::Rgba8Srgb,
); );
let overlay_handle = buffer_pool.acquire(device, overlay_spec); let overlay_hdr_spec = lightningbeam_core::gpu::BufferSpec::new(
width,
height,
lightningbeam_core::gpu::BufferFormat::Rgba16Float,
);
let overlay_srgb_handle = buffer_pool.acquire(device, overlay_srgb_spec);
let overlay_hdr_handle = buffer_pool.acquire(device, overlay_hdr_spec);
if let Some(overlay_view) = buffer_pool.get_view(overlay_handle) { if let (Some(overlay_srgb_view), Some(overlay_hdr_view)) = (
// Render overlay scene to temp buffer buffer_pool.get_view(overlay_srgb_handle),
buffer_pool.get_view(overlay_hdr_handle),
) {
// Render overlay scene to sRGB buffer
let overlay_params = vello::RenderParams { let overlay_params = vello::RenderParams {
base_color: vello::peniko::Color::TRANSPARENT, base_color: vello::peniko::Color::TRANSPARENT,
width, width,
@ -1700,11 +1774,18 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
}; };
if let Ok(mut renderer) = shared.renderer.lock() { if let Ok(mut renderer) = shared.renderer.lock() {
renderer.render_to_texture(device, queue, &scene, overlay_view, &overlay_params).ok(); renderer.render_to_texture(device, queue, &scene, overlay_srgb_view, &overlay_params).ok();
} }
// Composite overlay onto HDR texture (sRGB→linear conversion happens in compositor) // Convert sRGB to linear HDR (same as main document layers)
let overlay_layer = lightningbeam_core::gpu::CompositorLayer::normal(overlay_handle, 1.0); let mut convert_encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor {
label: Some("overlay_srgb_to_linear_encoder"),
});
shared.srgb_to_linear.convert(device, &mut convert_encoder, overlay_srgb_view, overlay_hdr_view);
queue.submit(Some(convert_encoder.finish()));
// Composite overlay onto HDR texture
let overlay_layer = lightningbeam_core::gpu::CompositorLayer::normal(overlay_hdr_handle, 1.0);
let mut encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor { let mut encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor {
label: Some("overlay_composite_encoder"), label: Some("overlay_composite_encoder"),
}); });
@ -1720,7 +1801,8 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
queue.submit(Some(encoder.finish())); queue.submit(Some(encoder.finish()));
} }
buffer_pool.release(overlay_handle); buffer_pool.release(overlay_srgb_handle);
buffer_pool.release(overlay_hdr_handle);
drop(buffer_pool); drop(buffer_pool);
} }
@ -1916,7 +1998,10 @@ pub struct StagePane {
} }
/// Cached data for editing a shape /// Cached data for editing a shape
#[derive(Clone)]
struct ShapeEditingCache { struct ShapeEditingCache {
/// The layer ID containing the shape being edited
layer_id: uuid::Uuid,
/// The shape ID being edited /// The shape ID being edited
shape_id: uuid::Uuid, shape_id: uuid::Uuid,
/// The shape instance ID being edited /// The shape instance ID being edited
@ -2428,6 +2513,7 @@ impl StagePane {
// Store editing cache // Store editing cache
self.shape_editing_cache = Some(ShapeEditingCache { self.shape_editing_cache = Some(ShapeEditingCache {
layer_id: active_layer_id,
shape_id: shape_instance.shape_id, shape_id: shape_instance.shape_id,
instance_id: shape_instance_id, instance_id: shape_instance_id,
editable_data: editable_data.clone(), editable_data: editable_data.clone(),
@ -2505,6 +2591,7 @@ impl StagePane {
// Store editing cache // Store editing cache
self.shape_editing_cache = Some(ShapeEditingCache { self.shape_editing_cache = Some(ShapeEditingCache {
layer_id: active_layer_id,
shape_id: shape_instance.shape_id, shape_id: shape_instance.shape_id,
instance_id: shape_instance_id, instance_id: shape_instance_id,
editable_data, editable_data,
@ -2865,6 +2952,7 @@ impl StagePane {
// Store editing cache // Store editing cache
self.shape_editing_cache = Some(ShapeEditingCache { self.shape_editing_cache = Some(ShapeEditingCache {
layer_id: active_layer_id,
shape_id: shape_instance.shape_id, shape_id: shape_instance.shape_id,
instance_id: shape_instance_id, instance_id: shape_instance_id,
editable_data, editable_data,
@ -5680,7 +5768,17 @@ impl StagePane {
}; };
let local_to_world = instance.to_affine(); let local_to_world = instance.to_affine();
let editable = extract_editable_curves(shape.path());
// Use modified curves from cache if this instance is being edited
let editable = if let Some(cache) = &self.shape_editing_cache {
if cache.instance_id == instance.id {
cache.editable_data.clone()
} else {
extract_editable_curves(shape.path())
}
} else {
extract_editable_curves(shape.path())
};
// Determine active element from tool state (being dragged) // Determine active element from tool state (being dragged)
let (active_vertex, active_control_point) = match &*shared.tool_state { let (active_vertex, active_control_point) = match &*shared.tool_state {
@ -5779,7 +5877,17 @@ impl StagePane {
if let Some(instance) = layer.shape_instances.iter().find(|i| i.id == shape_instance_id) { if let Some(instance) = layer.shape_instances.iter().find(|i| i.id == shape_instance_id) {
if let Some(shape) = layer.get_shape(&instance.shape_id) { if let Some(shape) = layer.get_shape(&instance.shape_id) {
let local_to_world = instance.to_affine(); let local_to_world = instance.to_affine();
let editable = extract_editable_curves(shape.path());
// Use modified curves from cache if this instance is being edited
let editable = if let Some(cache) = &self.shape_editing_cache {
if cache.instance_id == instance.id {
cache.editable_data.clone()
} else {
extract_editable_curves(shape.path())
}
} else {
extract_editable_curves(shape.path())
};
if vertex_index < editable.vertices.len() { if vertex_index < editable.vertices.len() {
let vertex = &editable.vertices[vertex_index]; let vertex = &editable.vertices[vertex_index];
@ -5808,7 +5916,17 @@ impl StagePane {
if let Some(instance) = layer.shape_instances.iter().find(|i| i.id == shape_instance_id) { if let Some(instance) = layer.shape_instances.iter().find(|i| i.id == shape_instance_id) {
if let Some(shape) = layer.get_shape(&instance.shape_id) { if let Some(shape) = layer.get_shape(&instance.shape_id) {
let local_to_world = instance.to_affine(); let local_to_world = instance.to_affine();
let editable = extract_editable_curves(shape.path());
// Use modified curves from cache if this instance is being edited
let editable = if let Some(cache) = &self.shape_editing_cache {
if cache.instance_id == instance.id {
cache.editable_data.clone()
} else {
extract_editable_curves(shape.path())
}
} else {
extract_editable_curves(shape.path())
};
if curve_index < editable.curves.len() { if curve_index < editable.curves.len() {
let curve = &editable.curves[curve_index]; let curve = &editable.curves[curve_index];
@ -6226,6 +6344,7 @@ impl PaneRenderer for StagePane {
self.pending_eyedropper_sample, self.pending_eyedropper_sample,
*shared.playback_time, *shared.playback_time,
shared.video_manager.clone(), shared.video_manager.clone(),
self.shape_editing_cache.clone(),
); );
let cb = egui_wgpu::Callback::new_paint_callback( let cb = egui_wgpu::Callback::new_paint_callback(