Export images

This commit is contained in:
Skyler Lehmkuhl 2026-03-09 11:22:51 -04:00
parent 09856ab52c
commit a18a335c60
6 changed files with 543 additions and 42 deletions

View File

@ -390,6 +390,59 @@ impl VideoExportSettings {
}
}
// ── Image export ─────────────────────────────────────────────────────────────
/// Image export formats (single-frame still image)
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum ImageFormat {
Png,
Jpeg,
WebP,
}
impl ImageFormat {
pub fn name(self) -> &'static str {
match self { Self::Png => "PNG", Self::Jpeg => "JPEG", Self::WebP => "WebP" }
}
pub fn extension(self) -> &'static str {
match self { Self::Png => "png", Self::Jpeg => "jpg", Self::WebP => "webp" }
}
/// Whether quality (1100) applies to this format.
pub fn has_quality(self) -> bool { matches!(self, Self::Jpeg | Self::WebP) }
}
/// Settings for exporting a single frame as a still image.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ImageExportSettings {
pub format: ImageFormat,
/// Document time (seconds) of the frame to render.
pub time: f64,
/// Override width; None = use document canvas width.
pub width: Option<u32>,
/// Override height; None = use document canvas height.
pub height: Option<u32>,
/// Encode quality 1100 (JPEG / WebP only).
pub quality: u8,
/// Preserve the alpha channel in the output (respect document background alpha).
/// When false, the image is composited onto an opaque background before encoding.
/// Only meaningful for formats that support alpha (PNG, WebP).
pub allow_transparency: bool,
}
impl Default for ImageExportSettings {
fn default() -> Self {
Self { format: ImageFormat::Png, time: 0.0, width: None, height: None, quality: 90, allow_transparency: false }
}
}
impl ImageExportSettings {
pub fn validate(&self) -> Result<(), String> {
if let Some(w) = self.width { if w == 0 { return Err("Width must be > 0".into()); } }
if let Some(h) = self.height { if h == 0 { return Err("Height must be > 0".into()); } }
Ok(())
}
}
/// Progress updates during export
#[derive(Debug, Clone)]
pub enum ExportProgress {

View File

@ -3,13 +3,29 @@
//! Provides a user interface for configuring and starting audio/video exports.
use eframe::egui;
use lightningbeam_core::export::{AudioExportSettings, AudioFormat, VideoExportSettings, VideoCodec, VideoQuality};
use lightningbeam_core::export::{
AudioExportSettings, AudioFormat,
ImageExportSettings, ImageFormat,
VideoExportSettings, VideoCodec, VideoQuality,
};
use std::path::PathBuf;
/// Hint about document content, used to pick a smart default export type.
pub struct DocumentHint {
pub has_video: bool,
pub has_audio: bool,
pub has_raster: bool,
pub has_vector: bool,
pub current_time: f64,
pub doc_width: u32,
pub doc_height: u32,
}
/// Export type selection
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ExportType {
Audio,
Image,
Video,
}
@ -17,6 +33,7 @@ pub enum ExportType {
#[derive(Debug, Clone)]
pub enum ExportResult {
AudioOnly(AudioExportSettings, PathBuf),
Image(ImageExportSettings, PathBuf),
VideoOnly(VideoExportSettings, PathBuf),
VideoWithAudio(VideoExportSettings, AudioExportSettings, PathBuf),
}
@ -32,6 +49,9 @@ pub struct ExportDialog {
/// Audio export settings
pub audio_settings: AudioExportSettings,
/// Image export settings
pub image_settings: ImageExportSettings,
/// Video export settings
pub video_settings: VideoExportSettings,
@ -55,6 +75,12 @@ pub struct ExportDialog {
/// Output directory
pub output_dir: PathBuf,
/// Project name from the last `open()` call — used to detect file switches.
current_project: String,
/// Export type used the last time the user actually clicked Export for `current_project`.
last_export_type: Option<ExportType>,
}
impl Default for ExportDialog {
@ -71,6 +97,7 @@ impl Default for ExportDialog {
open: false,
export_type: ExportType::Audio,
audio_settings: AudioExportSettings::standard_mp3(),
image_settings: ImageExportSettings::default(),
video_settings: VideoExportSettings::default(),
include_audio: true,
output_path: None,
@ -78,23 +105,52 @@ impl Default for ExportDialog {
show_advanced: false,
selected_video_preset: 0,
output_filename: String::new(),
current_project: String::new(),
last_export_type: None,
output_dir: music_dir,
}
}
}
impl ExportDialog {
/// Open the dialog with default settings
pub fn open(&mut self, timeline_duration: f64, project_name: &str) {
/// Open the dialog with default settings, using `hint` to pick a smart default tab.
pub fn open(&mut self, timeline_duration: f64, project_name: &str, hint: &DocumentHint) {
self.open = true;
self.audio_settings.end_time = timeline_duration;
self.video_settings.end_time = timeline_duration;
self.image_settings.time = hint.current_time;
// Propagate document dimensions as defaults (None means "use doc size").
self.image_settings.width = None;
self.image_settings.height = None;
self.error_message = None;
// Pre-populate filename from project name if not already set
// Determine export type: prefer the type used last time for this file,
// then fall back to document-content hints.
let same_project = self.current_project == project_name;
self.export_type = if same_project && self.last_export_type.is_some() {
self.last_export_type.unwrap()
} else {
let only_audio = hint.has_audio && !hint.has_video && !hint.has_raster && !hint.has_vector;
let only_raster = hint.has_raster && !hint.has_video && !hint.has_audio && !hint.has_vector;
if hint.has_video { ExportType::Video }
else if only_audio { ExportType::Audio }
else if only_raster { ExportType::Image }
else { self.export_type } // keep current as fallback
};
self.current_project = project_name.to_owned();
// Pre-populate filename from project name if not already set.
if self.output_filename.is_empty() || !self.output_filename.contains(project_name) {
let ext = self.audio_settings.format.extension();
self.output_filename = format!("{}.{}", project_name, ext);
self.output_filename = format!("{}.{}", project_name, self.current_extension());
}
}
/// Extension for the currently selected export type.
fn current_extension(&self) -> &'static str {
match self.export_type {
ExportType::Audio => self.audio_settings.format.extension(),
ExportType::Image => self.image_settings.format.extension(),
ExportType::Video => self.video_settings.codec.container_format(),
}
}
@ -106,10 +162,7 @@ impl ExportDialog {
/// Update the filename extension to match the current format
fn update_filename_extension(&mut self) {
let ext = match self.export_type {
ExportType::Audio => self.audio_settings.format.extension(),
ExportType::Video => self.video_settings.codec.container_format(),
};
let ext = self.current_extension();
// Replace extension in filename
if let Some(dot_pos) = self.output_filename.rfind('.') {
self.output_filename.truncate(dot_pos + 1);
@ -138,6 +191,7 @@ impl ExportDialog {
let window_title = match self.export_type {
ExportType::Audio => "Export Audio",
ExportType::Image => "Export Image",
ExportType::Video => "Export Video",
};
@ -156,11 +210,14 @@ impl ExportDialog {
// Export type selection (tabs)
ui.horizontal(|ui| {
if ui.selectable_value(&mut self.export_type, ExportType::Audio, "Audio").clicked() {
self.update_filename_extension();
}
if ui.selectable_value(&mut self.export_type, ExportType::Video, "Video").clicked() {
self.update_filename_extension();
for (variant, label) in [
(ExportType::Audio, "Audio"),
(ExportType::Image, "Image"),
(ExportType::Video, "Video"),
] {
if ui.selectable_value(&mut self.export_type, variant, label).clicked() {
self.update_filename_extension();
}
}
});
@ -171,6 +228,7 @@ impl ExportDialog {
// Basic settings
match self.export_type {
ExportType::Audio => self.render_audio_basic(ui),
ExportType::Image => self.render_image_settings(ui),
ExportType::Video => self.render_video_basic(ui),
}
@ -188,6 +246,7 @@ impl ExportDialog {
ui.add_space(8.0);
match self.export_type {
ExportType::Audio => self.render_audio_advanced(ui),
ExportType::Image => self.render_image_advanced(ui),
ExportType::Video => self.render_video_advanced(ui),
}
}
@ -260,6 +319,62 @@ impl ExportDialog {
});
}
/// Render basic image export settings (format, quality, transparency).
fn render_image_settings(&mut self, ui: &mut egui::Ui) {
// Format
ui.horizontal(|ui| {
ui.label("Format:");
let prev = self.image_settings.format;
egui::ComboBox::from_id_salt("image_format")
.selected_text(self.image_settings.format.name())
.show_ui(ui, |ui| {
ui.selectable_value(&mut self.image_settings.format, ImageFormat::Png, "PNG");
ui.selectable_value(&mut self.image_settings.format, ImageFormat::Jpeg, "JPEG");
ui.selectable_value(&mut self.image_settings.format, ImageFormat::WebP, "WebP");
});
if self.image_settings.format != prev {
self.update_filename_extension();
}
});
// Quality (JPEG / WebP only)
if self.image_settings.format.has_quality() {
ui.horizontal(|ui| {
ui.label("Quality:");
ui.add(egui::Slider::new(&mut self.image_settings.quality, 1..=100));
});
}
// Transparency (PNG / WebP only — JPEG has no alpha)
if self.image_settings.format != ImageFormat::Jpeg {
ui.checkbox(&mut self.image_settings.allow_transparency, "Allow transparency");
}
}
/// Render advanced image export settings (time, resolution override).
fn render_image_advanced(&mut self, ui: &mut egui::Ui) {
// Time (which frame to export)
ui.horizontal(|ui| {
ui.label("Time:");
ui.add(egui::DragValue::new(&mut self.image_settings.time)
.speed(0.01)
.range(0.0..=f64::MAX)
.suffix(" s"));
});
// Resolution override (None = use document size; 0 means "use doc size")
ui.horizontal(|ui| {
ui.label("Size:");
let mut w = self.image_settings.width.unwrap_or(0);
let mut h = self.image_settings.height.unwrap_or(0);
let changed_w = ui.add(egui::DragValue::new(&mut w).range(0..=u32::MAX).prefix("W ")).changed();
let changed_h = ui.add(egui::DragValue::new(&mut h).range(0..=u32::MAX).prefix("H ")).changed();
if changed_w { self.image_settings.width = if w == 0 { None } else { Some(w) }; }
if changed_h { self.image_settings.height = if h == 0 { None } else { Some(h) }; }
ui.weak("(0 = document size)");
});
}
/// Render advanced audio settings (sample rate, channels, bit depth, bitrate, time range)
fn render_audio_advanced(&mut self, ui: &mut egui::Ui) {
ui.horizontal(|ui| {
@ -419,6 +534,7 @@ impl ExportDialog {
fn render_time_range(&mut self, ui: &mut egui::Ui) {
let (start_time, end_time) = match self.export_type {
ExportType::Audio => (&mut self.audio_settings.start_time, &mut self.audio_settings.end_time),
ExportType::Image => return, // image uses a single time field, not a range
ExportType::Video => (&mut self.video_settings.start_time, &mut self.video_settings.end_time),
};
@ -440,26 +556,35 @@ impl ExportDialog {
ui.label(format!("Duration: {:.2} seconds", duration));
}
/// Render output file selection UI
/// Render output file selection UI — single OS save-file dialog.
fn render_output_selection(&mut self, ui: &mut egui::Ui) {
ui.horizontal(|ui| {
// Show the current path (truncated if long).
let full_path = self.build_output_path();
let path_str = full_path.display().to_string();
ui.label("Save to:");
let dir_text = self.output_dir.display().to_string();
ui.label(&dir_text);
if ui.button("Change...").clicked() {
if let Some(dir) = rfd::FileDialog::new()
.set_directory(&self.output_dir)
.pick_folder()
{
self.output_dir = dir;
}
}
ui.add(egui::Label::new(
egui::RichText::new(&path_str).weak()
).truncate());
});
ui.horizontal(|ui| {
ui.label("Filename:");
ui.text_edit_singleline(&mut self.output_filename);
});
if ui.button("Choose location...").clicked() {
let ext = self.current_extension();
let mut dialog = rfd::FileDialog::new()
.set_directory(&self.output_dir)
.set_file_name(&self.output_filename)
.add_filter(ext.to_uppercase(), &[ext]);
if let Some(path) = dialog.save_file() {
if let Some(dir) = path.parent() {
self.output_dir = dir.to_path_buf();
}
if let Some(name) = path.file_name() {
self.output_filename = name.to_string_lossy().into_owned();
// Ensure the extension matches the selected format.
self.update_filename_extension();
}
}
}
}
/// Handle export button click
@ -471,7 +596,17 @@ impl ExportDialog {
let output_path = self.output_path.clone().unwrap();
// Remember this export type for next time this file is opened.
self.last_export_type = Some(self.export_type);
let result = match self.export_type {
ExportType::Image => {
if let Err(err) = self.image_settings.validate() {
self.error_message = Some(err);
return None;
}
Some(ExportResult::Image(self.image_settings.clone(), output_path))
}
ExportType::Audio => {
// Validate audio settings
if let Err(err) = self.audio_settings.validate() {

View File

@ -0,0 +1,70 @@
//! Image encoding — save raw RGBA bytes as PNG / JPEG / WebP.
use lightningbeam_core::export::ImageFormat;
use std::path::Path;
/// Encode `pixels` (raw RGBA8, top-left origin) and write to `path`.
///
/// * `allow_transparency` — when true the alpha channel is preserved (PNG/WebP);
/// when false each pixel is composited onto black before encoding.
pub fn save_rgba_image(
pixels: &[u8],
width: u32,
height: u32,
format: ImageFormat,
quality: u8,
allow_transparency: bool,
path: &Path,
) -> Result<(), String> {
use image::{ImageBuffer, Rgba};
let img = ImageBuffer::<Rgba<u8>, _>::from_raw(width, height, pixels.to_vec())
.ok_or_else(|| "Pixel buffer size mismatch".to_string())?;
match format {
ImageFormat::Png => {
if allow_transparency {
img.save(path).map_err(|e| format!("PNG save failed: {e}"))
} else {
let flat = flatten_alpha(img);
flat.save(path).map_err(|e| format!("PNG save failed: {e}"))
}
}
ImageFormat::Jpeg => {
use image::codecs::jpeg::JpegEncoder;
use image::DynamicImage;
use std::fs::File;
use std::io::BufWriter;
// Flatten alpha onto black before JPEG encoding (JPEG has no alpha).
let flat = flatten_alpha(img);
let rgb_img = DynamicImage::ImageRgb8(flat).to_rgb8();
let file = File::create(path).map_err(|e| format!("Cannot create file: {e}"))?;
let writer = BufWriter::new(file);
let mut encoder = JpegEncoder::new_with_quality(writer, quality);
encoder.encode_image(&rgb_img).map_err(|e| format!("JPEG encode failed: {e}"))
}
ImageFormat::WebP => {
if allow_transparency {
img.save(path).map_err(|e| format!("WebP save failed: {e}"))
} else {
let flat = flatten_alpha(img);
flat.save(path).map_err(|e| format!("WebP save failed: {e}"))
}
}
}
}
/// Composite RGBA pixels onto an opaque black background, returning an RGB image.
fn flatten_alpha(img: image::ImageBuffer<image::Rgba<u8>, Vec<u8>>) -> image::ImageBuffer<image::Rgb<u8>, Vec<u8>> {
use image::{ImageBuffer, Rgb};
ImageBuffer::from_fn(img.width(), img.height(), |x, y| {
let p = img.get_pixel(x, y);
let a = p[3] as f32 / 255.0;
Rgb([
(p[0] as f32 * a) as u8,
(p[1] as f32 * a) as u8,
(p[2] as f32 * a) as u8,
])
})
}

View File

@ -5,12 +5,13 @@
pub mod audio_exporter;
pub mod dialog;
pub mod image_exporter;
pub mod video_exporter;
pub mod readback_pipeline;
pub mod perf_metrics;
pub mod cpu_yuv_converter;
use lightningbeam_core::export::{AudioExportSettings, VideoExportSettings, ExportProgress};
use lightningbeam_core::export::{AudioExportSettings, ImageExportSettings, VideoExportSettings, ExportProgress};
use lightningbeam_core::document::Document;
use lightningbeam_core::renderer::ImageCache;
use lightningbeam_core::video::VideoManager;
@ -66,6 +67,25 @@ pub struct VideoExportState {
perf_metrics: Option<perf_metrics::ExportMetrics>,
}
/// State for a single-frame image export (runs on the GPU render thread, one frame per update).
pub struct ImageExportState {
pub settings: ImageExportSettings,
pub output_path: PathBuf,
/// Resolved pixel dimensions (after applying any width/height overrides).
pub width: u32,
pub height: u32,
/// True once rendering has been submitted; the next call reads back and encodes.
pub rendered: bool,
/// GPU resources allocated on the first render call.
pub gpu_resources: Option<video_exporter::ExportGpuResources>,
/// Output RGBA texture — kept separate from gpu_resources to avoid split-borrow issues.
pub output_texture: Option<wgpu::Texture>,
/// View for output_texture.
pub output_texture_view: Option<wgpu::TextureView>,
/// Staging buffer for synchronous GPU→CPU readback.
pub staging_buffer: Option<wgpu::Buffer>,
}
/// Export orchestrator that manages the export process
pub struct ExportOrchestrator {
/// Channel for receiving progress updates (video or audio-only export)
@ -82,6 +102,9 @@ pub struct ExportOrchestrator {
/// Parallel audio+video export state
parallel_export: Option<ParallelExportState>,
/// Single-frame image export state
image_state: Option<ImageExportState>,
}
/// State for parallel audio+video export
@ -115,6 +138,7 @@ impl ExportOrchestrator {
cancel_flag: Arc::new(AtomicBool::new(false)),
video_state: None,
parallel_export: None,
image_state: None,
}
}
@ -446,12 +470,8 @@ impl ExportOrchestrator {
/// Check if an export is in progress
pub fn is_exporting(&self) -> bool {
// Check parallel export first
if self.parallel_export.is_some() {
return true;
}
// Check single export
if self.parallel_export.is_some() { return true; }
if self.image_state.is_some() { return true; }
if let Some(handle) = &self.thread_handle {
!handle.is_finished()
} else {
@ -459,6 +479,168 @@ impl ExportOrchestrator {
}
}
/// Enqueue a single-frame image export. Call `render_image_frame()` from the
/// egui update loop (where the wgpu device/queue are available) to complete it.
pub fn start_image_export(
&mut self,
settings: ImageExportSettings,
output_path: PathBuf,
doc_width: u32,
doc_height: u32,
) {
self.cancel_flag.store(false, Ordering::Relaxed);
let width = settings.width.unwrap_or(doc_width).max(1);
let height = settings.height.unwrap_or(doc_height).max(1);
self.image_state = Some(ImageExportState {
settings,
output_path,
width,
height,
rendered: false,
gpu_resources: None,
output_texture: None,
output_texture_view: None,
staging_buffer: None,
});
}
/// Drive the single-frame image export. Returns `Ok(true)` when done (success or
/// cancelled), `Ok(false)` if another call is needed next frame.
pub fn render_image_frame(
&mut self,
document: &mut Document,
device: &wgpu::Device,
queue: &wgpu::Queue,
renderer: &mut vello::Renderer,
image_cache: &mut ImageCache,
video_manager: &Arc<std::sync::Mutex<VideoManager>>,
) -> Result<bool, String> {
if self.cancel_flag.load(Ordering::Relaxed) {
self.image_state = None;
return Ok(true);
}
let state = match self.image_state.as_mut() {
Some(s) => s,
None => return Ok(true),
};
if !state.rendered {
// ── First call: render the frame to the GPU output texture ────────
let w = state.width;
let h = state.height;
if state.gpu_resources.is_none() {
state.gpu_resources = Some(video_exporter::ExportGpuResources::new(device, w, h));
}
if state.output_texture.is_none() {
let tex = device.create_texture(&wgpu::TextureDescriptor {
label: Some("image_export_output"),
size: wgpu::Extent3d { width: w, height: h, depth_or_array_layers: 1 },
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Rgba8Unorm,
usage: wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::COPY_SRC,
view_formats: &[],
});
state.output_texture_view = Some(tex.create_view(&wgpu::TextureViewDescriptor::default()));
state.output_texture = Some(tex);
}
// Borrow separately to avoid a split-borrow conflict (gpu mutably, view immutably).
let gpu = state.gpu_resources.as_mut().unwrap();
let output_view = state.output_texture_view.as_ref().unwrap();
let mut encoder = video_exporter::render_frame_to_gpu_rgba(
document,
state.settings.time,
w, h,
device, queue, renderer, image_cache, video_manager,
gpu,
output_view,
)?;
queue.submit(Some(encoder.finish()));
// Create a staging buffer for synchronous readback.
// wgpu requires bytes_per_row to be a multiple of 256.
let align = wgpu::COPY_BYTES_PER_ROW_ALIGNMENT;
let bytes_per_row = (w * 4 + align - 1) / align * align;
let staging = device.create_buffer(&wgpu::BufferDescriptor {
label: Some("image_export_staging"),
size: (bytes_per_row * h) as u64,
usage: wgpu::BufferUsages::COPY_DST | wgpu::BufferUsages::MAP_READ,
mapped_at_creation: false,
});
let mut copy_enc = device.create_command_encoder(&wgpu::CommandEncoderDescriptor {
label: Some("image_export_copy"),
});
let output_tex = state.output_texture.as_ref().unwrap();
copy_enc.copy_texture_to_buffer(
wgpu::TexelCopyTextureInfo {
texture: output_tex,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
wgpu::TexelCopyBufferInfo {
buffer: &staging,
layout: wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(bytes_per_row),
rows_per_image: Some(h),
},
},
wgpu::Extent3d { width: w, height: h, depth_or_array_layers: 1 },
);
queue.submit(Some(copy_enc.finish()));
state.staging_buffer = Some(staging);
state.rendered = true;
return Ok(false); // Come back next frame to read the result.
}
// ── Second call: map the staging buffer, encode, and save ─────────────
let staging = match state.staging_buffer.as_ref() {
Some(b) => b,
None => { self.image_state = None; return Ok(true); }
};
// Map synchronously.
let slice = staging.slice(..);
slice.map_async(wgpu::MapMode::Read, |_| {});
let _ = device.poll(wgpu::PollType::wait_indefinitely());
let w = state.width;
let h = state.height;
let align = wgpu::COPY_BYTES_PER_ROW_ALIGNMENT;
let bytes_per_row = (w * 4 + align - 1) / align * align;
let pixels: Vec<u8> = {
let mapped = slice.get_mapped_range();
// Strip row padding: copy only w*4 bytes from each bytes_per_row-wide row.
let mut out = Vec::with_capacity((w * h * 4) as usize);
for row in 0..h {
let start = (row * bytes_per_row) as usize;
out.extend_from_slice(&mapped[start..start + (w * 4) as usize]);
}
out
};
staging.unmap();
let result = image_exporter::save_rgba_image(
&pixels, w, h,
state.settings.format,
state.settings.quality,
state.settings.allow_transparency,
&state.output_path,
);
self.image_state = None;
result.map(|_| true)
}
/// Wait for the export to complete
///
/// This blocks until the export thread finishes.

View File

@ -2869,14 +2869,42 @@ impl EditorApp {
}
MenuAction::Export => {
println!("Menu: Export");
// Open export dialog with calculated timeline endpoint
let timeline_endpoint = self.action_executor.document().calculate_timeline_endpoint();
// Derive project name from the .beam file path, falling back to document name
let project_name = self.current_file_path.as_ref()
.and_then(|p| p.file_stem())
.map(|s| s.to_string_lossy().into_owned())
.unwrap_or_else(|| self.action_executor.document().name.clone());
self.export_dialog.open(timeline_endpoint, &project_name);
// Build document hint for smart export-type defaulting.
let hint = {
use lightningbeam_core::layer::AnyLayer;
use export::dialog::DocumentHint;
fn scan(layers: &[AnyLayer], hint: &mut DocumentHint) {
for l in layers {
match l {
AnyLayer::Video(_) => hint.has_video = true,
AnyLayer::Audio(_) => hint.has_audio = true,
AnyLayer::Raster(_) => hint.has_raster = true,
AnyLayer::Vector(_) | AnyLayer::Effect(_) => hint.has_vector = true,
AnyLayer::Group(g) => scan(&g.children, hint),
}
}
}
let doc = self.action_executor.document();
let mut h = DocumentHint {
has_video: false,
has_audio: false,
has_raster: false,
has_vector: false,
current_time: doc.current_time,
doc_width: doc.width as u32,
doc_height: doc.height as u32,
};
scan(&doc.root.children, &mut h);
h
};
self.export_dialog.open(timeline_endpoint, &project_name, &hint);
}
MenuAction::Quit => {
println!("Menu: Quit");
@ -5180,6 +5208,17 @@ impl eframe::App for EditorApp {
let export_started = if let Some(orchestrator) = &mut self.export_orchestrator {
match export_result {
ExportResult::Image(settings, output_path) => {
println!("🖼 [MAIN] Starting image export: {}", output_path.display());
let doc = self.action_executor.document();
orchestrator.start_image_export(
settings,
output_path,
doc.width as u32,
doc.height as u32,
);
false // image export is silent (no progress dialog)
}
ExportResult::AudioOnly(settings, output_path) => {
println!("🎵 [MAIN] Starting audio-only export: {}", output_path.display());
@ -5290,6 +5329,7 @@ impl eframe::App for EditorApp {
let mut temp_image_cache = lightningbeam_core::renderer::ImageCache::new();
if let Some(renderer) = &mut temp_renderer {
// Drive incremental video export.
if let Ok(has_more) = orchestrator.render_next_video_frame(
self.action_executor.document_mut(),
device,
@ -5299,10 +5339,23 @@ impl eframe::App for EditorApp {
&self.video_manager,
) {
if has_more {
// More frames to render - request repaint for next frame
ctx.request_repaint();
}
}
// Drive single-frame image export (two-frame async: render then readback).
match orchestrator.render_image_frame(
self.action_executor.document_mut(),
device,
queue,
renderer,
&mut temp_image_cache,
&self.video_manager,
) {
Ok(false) => { ctx.request_repaint(); } // readback pending
Ok(true) => {} // done or cancelled
Err(e) => { eprintln!("Image export failed: {e}"); }
}
}
}
}

View File

@ -11803,9 +11803,17 @@ impl PaneRenderer for StagePane {
shared.action_executor.document().get_layer(&id)
}).map_or(false, |l| matches!(l, lightningbeam_core::layer::AnyLayer::Raster(_)));
if is_raster_paint {
// Only override the cursor when no higher-order layer (e.g. a modal dialog)
// is covering the canvas at this position.
let canvas_is_topmost = ui.ctx()
.layer_id_at(pos)
.map_or(true, |l| l == ui.layer_id());
if is_raster_paint && canvas_is_topmost {
ui.ctx().set_cursor_icon(egui::CursorIcon::None);
self.draw_brush_cursor(ui, rect, pos, shared);
} else if is_raster_paint {
// A modal is covering the canvas — let the system cursor show normally.
} else {
crate::custom_cursor::set(
ui.ctx(),