Add asset pane

This commit is contained in:
Skyler Lehmkuhl 2025-11-30 06:54:53 -05:00
parent 8f830b7799
commit 4d1e052ee7
22 changed files with 2311 additions and 179 deletions

View File

@ -7,13 +7,16 @@ members = [
[workspace.dependencies] [workspace.dependencies]
# UI Framework (using eframe for simplified integration) # UI Framework (using eframe for simplified integration)
eframe = { version = "0.29", default-features = true, features = ["wgpu"] } # Note: Upgraded from 0.29 to 0.31 to fix Linux IME/keyboard input issues
egui_extras = { version = "0.29", features = ["image", "svg"] } # See: https://github.com/emilk/egui/pull/5198
egui-wgpu = "0.29" eframe = { version = "0.31", default-features = true, features = ["wgpu"] }
egui_extras = { version = "0.31", features = ["image", "svg"] }
egui-wgpu = "0.31"
# GPU Rendering # GPU Rendering
vello = "0.3" # vello 0.5 uses wgpu 24, matching eframe 0.31
wgpu = "22" vello = "0.5"
wgpu = "24"
kurbo = { version = "0.11", features = ["serde"] } kurbo = { version = "0.11", features = ["serde"] }
peniko = "0.5" peniko = "0.5"

View File

@ -8,11 +8,14 @@ serde = { workspace = true }
serde_json = { workspace = true } serde_json = { workspace = true }
# UI framework (for Color32 conversion) # UI framework (for Color32 conversion)
egui = "0.29" egui = "0.31"
# Geometry and rendering # Geometry and rendering
kurbo = { workspace = true } kurbo = { workspace = true }
vello = { workspace = true } vello = { workspace = true }
# Image decoding for image fills
image = { workspace = true }
# Unique identifiers # Unique identifiers
uuid = { version = "1.0", features = ["v4", "serde"] } uuid = { version = "1.0", features = ["v4", "serde"] }

View File

@ -0,0 +1,148 @@
//! Add clip instance action
//!
//! Handles adding a clip instance to a layer.
use crate::action::Action;
use crate::clip::ClipInstance;
use crate::document::Document;
use crate::layer::AnyLayer;
use uuid::Uuid;
/// Action that adds a clip instance to a layer
pub struct AddClipInstanceAction {
/// The target layer ID
layer_id: Uuid,
/// The clip instance to add
clip_instance: ClipInstance,
/// Whether the action has been executed (for rollback)
executed: bool,
}
impl AddClipInstanceAction {
/// Create a new add clip instance action
///
/// # Arguments
///
/// * `layer_id` - The ID of the layer to add the clip instance to
/// * `clip_instance` - The clip instance to add
pub fn new(layer_id: Uuid, clip_instance: ClipInstance) -> Self {
Self {
layer_id,
clip_instance,
executed: false,
}
}
/// Get the ID of the clip instance that will be/was added
pub fn clip_instance_id(&self) -> Uuid {
self.clip_instance.id
}
/// Get the layer ID this action targets
pub fn layer_id(&self) -> Uuid {
self.layer_id
}
}
impl Action for AddClipInstanceAction {
fn execute(&mut self, document: &mut Document) {
if let Some(layer) = document.get_layer_mut(&self.layer_id) {
match layer {
AnyLayer::Vector(vector_layer) => {
vector_layer.clip_instances.push(self.clip_instance.clone());
}
AnyLayer::Audio(audio_layer) => {
audio_layer.clip_instances.push(self.clip_instance.clone());
}
AnyLayer::Video(video_layer) => {
video_layer.clip_instances.push(self.clip_instance.clone());
}
}
self.executed = true;
}
}
fn rollback(&mut self, document: &mut Document) {
if !self.executed {
return;
}
let instance_id = self.clip_instance.id;
if let Some(layer) = document.get_layer_mut(&self.layer_id) {
match layer {
AnyLayer::Vector(vector_layer) => {
vector_layer
.clip_instances
.retain(|ci| ci.id != instance_id);
}
AnyLayer::Audio(audio_layer) => {
audio_layer
.clip_instances
.retain(|ci| ci.id != instance_id);
}
AnyLayer::Video(video_layer) => {
video_layer
.clip_instances
.retain(|ci| ci.id != instance_id);
}
}
self.executed = false;
}
}
fn description(&self) -> String {
"Add clip instance".to_string()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::layer::VectorLayer;
#[test]
fn test_add_clip_instance_to_vector_layer() {
let mut document = Document::new("Test");
// Add a layer
let layer = VectorLayer::new("Test Layer");
let layer_id = layer.layer.id;
document.root_mut().add_child(AnyLayer::Vector(layer));
// Create a clip instance (using a fake clip_id since we're just testing the action)
let clip_id = Uuid::new_v4();
let clip_instance = ClipInstance::new(clip_id);
let instance_id = clip_instance.id;
// Execute action
let mut action = AddClipInstanceAction::new(layer_id, clip_instance);
action.execute(&mut document);
// Verify clip instance was added
if let Some(AnyLayer::Vector(vector_layer)) = document.get_layer(&layer_id) {
assert_eq!(vector_layer.clip_instances.len(), 1);
assert_eq!(vector_layer.clip_instances[0].id, instance_id);
} else {
panic!("Layer not found");
}
// Rollback
action.rollback(&mut document);
// Verify clip instance was removed
if let Some(AnyLayer::Vector(vector_layer)) = document.get_layer(&layer_id) {
assert_eq!(vector_layer.clip_instances.len(), 0);
} else {
panic!("Layer not found");
}
}
#[test]
fn test_add_clip_instance_description() {
let action = AddClipInstanceAction::new(Uuid::new_v4(), ClipInstance::new(Uuid::new_v4()));
assert_eq!(action.description(), "Add clip instance");
}
}

View File

@ -3,6 +3,7 @@
//! This module contains all the concrete action types that can be executed //! This module contains all the concrete action types that can be executed
//! through the action system. //! through the action system.
pub mod add_clip_instance;
pub mod add_layer; pub mod add_layer;
pub mod add_shape; pub mod add_shape;
pub mod move_clip_instances; pub mod move_clip_instances;
@ -13,6 +14,7 @@ pub mod transform_clip_instances;
pub mod transform_objects; pub mod transform_objects;
pub mod trim_clip_instances; pub mod trim_clip_instances;
pub use add_clip_instance::AddClipInstanceAction;
pub use add_layer::AddLayerAction; pub use add_layer::AddLayerAction;
pub use add_shape::AddShapeAction; pub use add_shape::AddShapeAction;
pub use move_clip_instances::MoveClipInstancesAction; pub use move_clip_instances::MoveClipInstancesAction;

View File

@ -15,6 +15,7 @@ use crate::layer::AnyLayer;
use crate::layer_tree::LayerTree; use crate::layer_tree::LayerTree;
use crate::object::Transform; use crate::object::Transform;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use uuid::Uuid; use uuid::Uuid;
use vello::kurbo::{Rect, Affine, Shape as KurboShape}; use vello::kurbo::{Rect, Affine, Shape as KurboShape};
@ -158,6 +159,71 @@ impl VectorClip {
} }
} }
/// Image asset for static images
///
/// Images can be used as fill textures for shapes or (in the future)
/// added to video tracks as still frames. Unlike clips, images don't
/// have a duration or timeline properties.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct ImageAsset {
/// Unique identifier
pub id: Uuid,
/// Asset name (usually derived from filename)
pub name: String,
/// Original file path
pub path: PathBuf,
/// Image width in pixels
pub width: u32,
/// Image height in pixels
pub height: u32,
/// Embedded image data (for project portability)
/// If None, the image will be loaded from path when needed
#[serde(skip_serializing_if = "Option::is_none")]
pub data: Option<Vec<u8>>,
}
impl ImageAsset {
/// Create a new image asset
pub fn new(
name: impl Into<String>,
path: impl Into<PathBuf>,
width: u32,
height: u32,
) -> Self {
Self {
id: Uuid::new_v4(),
name: name.into(),
path: path.into(),
width,
height,
data: None,
}
}
/// Create with embedded data
pub fn with_data(
name: impl Into<String>,
path: impl Into<PathBuf>,
width: u32,
height: u32,
data: Vec<u8>,
) -> Self {
Self {
id: Uuid::new_v4(),
name: name.into(),
path: path.into(),
width,
height,
data: Some(data),
}
}
}
/// Video clip referencing an external video file /// Video clip referencing an external video file
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct VideoClip { pub struct VideoClip {

View File

@ -3,7 +3,7 @@
//! The Document represents a complete animation project with settings //! The Document represents a complete animation project with settings
//! and a root graphics object containing the scene graph. //! and a root graphics object containing the scene graph.
use crate::clip::{AudioClip, VideoClip, VectorClip}; use crate::clip::{AudioClip, ImageAsset, VideoClip, VectorClip};
use crate::layer::AnyLayer; use crate::layer::AnyLayer;
use crate::shape::ShapeColor; use crate::shape::ShapeColor;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -103,6 +103,9 @@ pub struct Document {
/// Audio clip library - sampled audio and MIDI clips /// Audio clip library - sampled audio and MIDI clips
pub audio_clips: HashMap<Uuid, AudioClip>, pub audio_clips: HashMap<Uuid, AudioClip>,
/// Image asset library - static images for fill textures
pub image_assets: HashMap<Uuid, ImageAsset>,
/// Current playback time in seconds /// Current playback time in seconds
#[serde(skip)] #[serde(skip)]
pub current_time: f64, pub current_time: f64,
@ -122,6 +125,7 @@ impl Default for Document {
vector_clips: HashMap::new(), vector_clips: HashMap::new(),
video_clips: HashMap::new(), video_clips: HashMap::new(),
audio_clips: HashMap::new(), audio_clips: HashMap::new(),
image_assets: HashMap::new(),
current_time: 0.0, current_time: 0.0,
} }
} }
@ -272,6 +276,30 @@ impl Document {
pub fn remove_audio_clip(&mut self, id: &Uuid) -> Option<AudioClip> { pub fn remove_audio_clip(&mut self, id: &Uuid) -> Option<AudioClip> {
self.audio_clips.remove(id) self.audio_clips.remove(id)
} }
// === IMAGE ASSET METHODS ===
/// Add an image asset to the library
pub fn add_image_asset(&mut self, asset: ImageAsset) -> Uuid {
let id = asset.id;
self.image_assets.insert(id, asset);
id
}
/// Get an image asset by ID
pub fn get_image_asset(&self, id: &Uuid) -> Option<&ImageAsset> {
self.image_assets.get(id)
}
/// Get a mutable image asset by ID
pub fn get_image_asset_mut(&mut self, id: &Uuid) -> Option<&mut ImageAsset> {
self.image_assets.get_mut(id)
}
/// Remove an image asset from the library
pub fn remove_image_asset(&mut self, id: &Uuid) -> Option<ImageAsset> {
self.image_assets.remove(id)
}
} }
#[cfg(test)] #[cfg(test)]

View File

@ -0,0 +1,104 @@
//! File type detection and supported extension constants
//!
//! This module provides shared file extension constants that can be used
//! across the codebase for file dialogs, import detection, etc.
/// Supported image file extensions
pub const IMAGE_EXTENSIONS: &[&str] = &["png", "gif", "avif", "jpg", "jpeg"];
/// Supported audio file extensions
pub const AUDIO_EXTENSIONS: &[&str] = &["mp3", "wav", "aiff", "ogg", "flac"];
/// Supported video file extensions
pub const VIDEO_EXTENSIONS: &[&str] = &["mp4", "mov", "avi", "mkv", "webm", "m4v"];
/// Supported MIDI file extensions
pub const MIDI_EXTENSIONS: &[&str] = &["mid", "midi"];
// Note: SVG import deferred to future task
// Note: .beam project files handled separately in file save/load feature
/// File type categories for import routing
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum FileType {
Image,
Audio,
Video,
Midi,
}
/// Detect file type from extension string
///
/// Returns `None` if the extension is not recognized.
///
/// # Example
/// ```
/// use lightningbeam_core::file_types::get_file_type;
///
/// assert_eq!(get_file_type("png"), Some(lightningbeam_core::file_types::FileType::Image));
/// assert_eq!(get_file_type("MP3"), Some(lightningbeam_core::file_types::FileType::Audio));
/// assert_eq!(get_file_type("unknown"), None);
/// ```
pub fn get_file_type(extension: &str) -> Option<FileType> {
let ext = extension.to_lowercase();
if IMAGE_EXTENSIONS.contains(&ext.as_str()) {
return Some(FileType::Image);
}
if AUDIO_EXTENSIONS.contains(&ext.as_str()) {
return Some(FileType::Audio);
}
if VIDEO_EXTENSIONS.contains(&ext.as_str()) {
return Some(FileType::Video);
}
if MIDI_EXTENSIONS.contains(&ext.as_str()) {
return Some(FileType::Midi);
}
None
}
/// Get all supported extensions as a single flat list
///
/// Useful for "All Supported Files" filter in file dialogs.
pub fn all_supported_extensions() -> Vec<&'static str> {
let mut all = Vec::new();
all.extend_from_slice(IMAGE_EXTENSIONS);
all.extend_from_slice(AUDIO_EXTENSIONS);
all.extend_from_slice(VIDEO_EXTENSIONS);
all.extend_from_slice(MIDI_EXTENSIONS);
all
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_get_file_type() {
assert_eq!(get_file_type("png"), Some(FileType::Image));
assert_eq!(get_file_type("PNG"), Some(FileType::Image));
assert_eq!(get_file_type("jpg"), Some(FileType::Image));
assert_eq!(get_file_type("jpeg"), Some(FileType::Image));
assert_eq!(get_file_type("mp3"), Some(FileType::Audio));
assert_eq!(get_file_type("wav"), Some(FileType::Audio));
assert_eq!(get_file_type("flac"), Some(FileType::Audio));
assert_eq!(get_file_type("mp4"), Some(FileType::Video));
assert_eq!(get_file_type("webm"), Some(FileType::Video));
assert_eq!(get_file_type("mid"), Some(FileType::Midi));
assert_eq!(get_file_type("midi"), Some(FileType::Midi));
assert_eq!(get_file_type("unknown"), None);
assert_eq!(get_file_type("svg"), None); // SVG deferred
}
#[test]
fn test_all_supported_extensions() {
let all = all_supported_extensions();
assert!(all.contains(&"png"));
assert!(all.contains(&"mp3"));
assert!(all.contains(&"mp4"));
assert!(all.contains(&"mid"));
}
}

View File

@ -324,6 +324,21 @@ impl VectorLayer {
} }
} }
/// Audio layer subtype - distinguishes sampled audio from MIDI
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum AudioLayerType {
/// Sampled audio (WAV, MP3, etc.)
Sampled,
/// MIDI sequence
Midi,
}
impl Default for AudioLayerType {
fn default() -> Self {
AudioLayerType::Sampled
}
}
/// Audio layer containing audio clips /// Audio layer containing audio clips
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct AudioLayer { pub struct AudioLayer {
@ -333,6 +348,10 @@ pub struct AudioLayer {
/// Clip instances (references to audio clips) /// Clip instances (references to audio clips)
/// AudioLayer can contain instances of AudioClips (sampled or MIDI) /// AudioLayer can contain instances of AudioClips (sampled or MIDI)
pub clip_instances: Vec<ClipInstance>, pub clip_instances: Vec<ClipInstance>,
/// Audio layer subtype (sampled vs MIDI)
#[serde(default)]
pub audio_layer_type: AudioLayerType,
} }
impl LayerTrait for AudioLayer { impl LayerTrait for AudioLayer {
@ -406,11 +425,26 @@ impl LayerTrait for AudioLayer {
} }
impl AudioLayer { impl AudioLayer {
/// Create a new audio layer /// Create a new sampled audio layer
pub fn new(name: impl Into<String>) -> Self { pub fn new(name: impl Into<String>) -> Self {
Self { Self {
layer: Layer::new(LayerType::Audio, name), layer: Layer::new(LayerType::Audio, name),
clip_instances: Vec::new(), clip_instances: Vec::new(),
audio_layer_type: AudioLayerType::Sampled,
}
}
/// Create a new sampled audio layer (explicit)
pub fn new_sampled(name: impl Into<String>) -> Self {
Self::new(name)
}
/// Create a new MIDI layer
pub fn new_midi(name: impl Into<String>) -> Self {
Self {
layer: Layer::new(LayerType::Audio, name),
clip_instances: Vec::new(),
audio_layer_type: AudioLayerType::Midi,
} }
} }
} }

View File

@ -28,3 +28,4 @@ pub mod gap_handling;
pub mod intersection_graph; pub mod intersection_graph;
pub mod segment_builder; pub mod segment_builder;
pub mod planar_graph; pub mod planar_graph;
pub mod file_types;

View File

@ -29,6 +29,8 @@ pub enum PaneType {
NodeEditor, NodeEditor,
/// Preset/asset browser /// Preset/asset browser
PresetBrowser, PresetBrowser,
/// Asset library for browsing clips
AssetLibrary,
} }
impl PaneType { impl PaneType {
@ -43,6 +45,7 @@ impl PaneType {
PaneType::PianoRoll => "Piano Roll", PaneType::PianoRoll => "Piano Roll",
PaneType::NodeEditor => "Node Editor", PaneType::NodeEditor => "Node Editor",
PaneType::PresetBrowser => "Preset Browser", PaneType::PresetBrowser => "Preset Browser",
PaneType::AssetLibrary => "Asset Library",
} }
} }
@ -59,6 +62,7 @@ impl PaneType {
PaneType::PianoRoll => "piano-roll.svg", PaneType::PianoRoll => "piano-roll.svg",
PaneType::NodeEditor => "node-editor.svg", PaneType::NodeEditor => "node-editor.svg",
PaneType::PresetBrowser => "stage.svg", // TODO: needs own icon PaneType::PresetBrowser => "stage.svg", // TODO: needs own icon
PaneType::AssetLibrary => "stage.svg", // TODO: needs own icon
} }
} }
@ -74,6 +78,7 @@ impl PaneType {
"pianoroll" => Some(PaneType::PianoRoll), "pianoroll" => Some(PaneType::PianoRoll),
"nodeeditor" => Some(PaneType::NodeEditor), "nodeeditor" => Some(PaneType::NodeEditor),
"presetbrowser" => Some(PaneType::PresetBrowser), "presetbrowser" => Some(PaneType::PresetBrowser),
"assetlibrary" => Some(PaneType::AssetLibrary),
_ => None, _ => None,
} }
} }
@ -89,6 +94,7 @@ impl PaneType {
PaneType::NodeEditor, PaneType::NodeEditor,
PaneType::PianoRoll, PaneType::PianoRoll,
PaneType::PresetBrowser, PaneType::PresetBrowser,
PaneType::AssetLibrary,
] ]
} }
@ -103,6 +109,7 @@ impl PaneType {
PaneType::PianoRoll => "pianoRoll", PaneType::PianoRoll => "pianoRoll",
PaneType::NodeEditor => "nodeEditor", PaneType::NodeEditor => "nodeEditor",
PaneType::PresetBrowser => "presetBrowser", PaneType::PresetBrowser => "presetBrowser",
PaneType::AssetLibrary => "assetLibrary",
} }
} }
} }

View File

@ -3,28 +3,93 @@
//! Renders documents to Vello scenes for GPU-accelerated display. //! Renders documents to Vello scenes for GPU-accelerated display.
use crate::animation::TransformProperty; use crate::animation::TransformProperty;
use crate::clip::ImageAsset;
use crate::document::Document; use crate::document::Document;
use crate::layer::{AnyLayer, LayerTrait, VectorLayer}; use crate::layer::{AnyLayer, LayerTrait, VectorLayer};
use crate::object::ShapeInstance; use crate::object::ShapeInstance;
use kurbo::{Affine, Shape}; use kurbo::{Affine, Shape};
use std::collections::HashMap;
use std::sync::Arc;
use uuid::Uuid;
use vello::kurbo::Rect; use vello::kurbo::Rect;
use vello::peniko::Fill; use vello::peniko::{Blob, Fill, Image, ImageFormat};
use vello::Scene; use vello::Scene;
/// Cache for decoded image data to avoid re-decoding every frame
pub struct ImageCache {
cache: HashMap<Uuid, Arc<Image>>,
}
impl ImageCache {
/// Create a new empty image cache
pub fn new() -> Self {
Self {
cache: HashMap::new(),
}
}
/// Get or decode an image, caching the result
pub fn get_or_decode(&mut self, asset: &ImageAsset) -> Option<Arc<Image>> {
if let Some(cached) = self.cache.get(&asset.id) {
return Some(Arc::clone(cached));
}
// Decode and cache
let image = decode_image_asset(asset)?;
let arc_image = Arc::new(image);
self.cache.insert(asset.id, Arc::clone(&arc_image));
Some(arc_image)
}
/// Clear cache entry when an image asset is deleted or modified
pub fn invalidate(&mut self, id: &Uuid) {
self.cache.remove(id);
}
/// Clear all cached images
pub fn clear(&mut self) {
self.cache.clear();
}
}
impl Default for ImageCache {
fn default() -> Self {
Self::new()
}
}
/// Decode an image asset to peniko Image
fn decode_image_asset(asset: &ImageAsset) -> Option<Image> {
// Get the raw file data
let data = asset.data.as_ref()?;
// Decode using the image crate
let img = image::load_from_memory(data).ok()?;
let rgba = img.to_rgba8();
// Create peniko Image
Some(Image::new(
Blob::from(rgba.into_raw()),
ImageFormat::Rgba8,
asset.width,
asset.height,
))
}
/// Render a document to a Vello scene /// Render a document to a Vello scene
pub fn render_document(document: &Document, scene: &mut Scene) { pub fn render_document(document: &Document, scene: &mut Scene, image_cache: &mut ImageCache) {
render_document_with_transform(document, scene, Affine::IDENTITY); render_document_with_transform(document, scene, Affine::IDENTITY, image_cache);
} }
/// Render a document to a Vello scene with a base transform /// Render a document to a Vello scene with a base transform
/// The base transform is composed with all object transforms (useful for camera zoom/pan) /// The base transform is composed with all object transforms (useful for camera zoom/pan)
pub fn render_document_with_transform(document: &Document, scene: &mut Scene, base_transform: Affine) { pub fn render_document_with_transform(document: &Document, scene: &mut Scene, base_transform: Affine, image_cache: &mut ImageCache) {
// 1. Draw background // 1. Draw background
render_background(document, scene, base_transform); render_background(document, scene, base_transform);
// 2. Recursively render the root graphics object at current time // 2. Recursively render the root graphics object at current time
let time = document.current_time; let time = document.current_time;
render_graphics_object(document, time, scene, base_transform); render_graphics_object(document, time, scene, base_transform, image_cache);
} }
/// Draw the document background /// Draw the document background
@ -44,7 +109,7 @@ fn render_background(document: &Document, scene: &mut Scene, base_transform: Aff
} }
/// Recursively render the root graphics object and its children /// Recursively render the root graphics object and its children
fn render_graphics_object(document: &Document, time: f64, scene: &mut Scene, base_transform: Affine) { fn render_graphics_object(document: &Document, time: f64, scene: &mut Scene, base_transform: Affine, image_cache: &mut ImageCache) {
// Check if any layers are soloed // Check if any layers are soloed
let any_soloed = document.visible_layers().any(|layer| layer.soloed()); let any_soloed = document.visible_layers().any(|layer| layer.soloed());
@ -56,19 +121,19 @@ fn render_graphics_object(document: &Document, time: f64, scene: &mut Scene, bas
if any_soloed { if any_soloed {
// Only render soloed layers when solo is active // Only render soloed layers when solo is active
if layer.soloed() { if layer.soloed() {
render_layer(document, time, layer, scene, base_transform, 1.0); render_layer(document, time, layer, scene, base_transform, 1.0, image_cache);
} }
} else { } else {
// Render all visible layers when no solo is active // Render all visible layers when no solo is active
render_layer(document, time, layer, scene, base_transform, 1.0); render_layer(document, time, layer, scene, base_transform, 1.0, image_cache);
} }
} }
} }
/// Render a single layer /// Render a single layer
fn render_layer(document: &Document, time: f64, layer: &AnyLayer, scene: &mut Scene, base_transform: Affine, parent_opacity: f64) { fn render_layer(document: &Document, time: f64, layer: &AnyLayer, scene: &mut Scene, base_transform: Affine, parent_opacity: f64, image_cache: &mut ImageCache) {
match layer { match layer {
AnyLayer::Vector(vector_layer) => render_vector_layer(document, time, vector_layer, scene, base_transform, parent_opacity), AnyLayer::Vector(vector_layer) => render_vector_layer(document, time, vector_layer, scene, base_transform, parent_opacity, image_cache),
AnyLayer::Audio(_) => { AnyLayer::Audio(_) => {
// Audio layers don't render visually // Audio layers don't render visually
} }
@ -87,6 +152,7 @@ fn render_clip_instance(
scene: &mut Scene, scene: &mut Scene,
base_transform: Affine, base_transform: Affine,
animation_data: &crate::animation::AnimationData, animation_data: &crate::animation::AnimationData,
image_cache: &mut ImageCache,
) { ) {
// Try to find the clip in the document's clip libraries // Try to find the clip in the document's clip libraries
// For now, only handle VectorClips (VideoClip and AudioClip rendering not yet implemented) // For now, only handle VectorClips (VideoClip and AudioClip rendering not yet implemented)
@ -214,19 +280,19 @@ fn render_clip_instance(
if !layer_node.data.visible() { if !layer_node.data.visible() {
continue; continue;
} }
render_layer(document, clip_time, &layer_node.data, scene, instance_transform, clip_opacity); render_layer(document, clip_time, &layer_node.data, scene, instance_transform, clip_opacity, image_cache);
} }
} }
/// Render a vector layer with all its clip instances and shape instances /// Render a vector layer with all its clip instances and shape instances
fn render_vector_layer(document: &Document, time: f64, layer: &VectorLayer, scene: &mut Scene, base_transform: Affine, parent_opacity: f64) { fn render_vector_layer(document: &Document, time: f64, layer: &VectorLayer, scene: &mut Scene, base_transform: Affine, parent_opacity: f64, image_cache: &mut ImageCache) {
// Cascade opacity: parent_opacity × layer.opacity // Cascade opacity: parent_opacity × layer.opacity
let layer_opacity = parent_opacity * layer.layer.opacity; let layer_opacity = parent_opacity * layer.layer.opacity;
// Render clip instances first (they appear under shape instances) // Render clip instances first (they appear under shape instances)
for clip_instance in &layer.clip_instances { for clip_instance in &layer.clip_instances {
render_clip_instance(document, time, clip_instance, layer_opacity, scene, base_transform, &layer.layer.animation_data); render_clip_instance(document, time, clip_instance, layer_opacity, scene, base_transform, &layer.layer.animation_data, image_cache);
} }
// Render each shape instance in the layer // Render each shape instance in the layer
@ -384,7 +450,33 @@ fn render_vector_layer(document: &Document, time: f64, layer: &VectorLayer, scen
// layer_opacity already includes parent_opacity from render_vector_layer // layer_opacity already includes parent_opacity from render_vector_layer
let final_opacity = (layer_opacity * opacity) as f32; let final_opacity = (layer_opacity * opacity) as f32;
// Render fill if present // Determine fill rule
let fill_rule = match shape.fill_rule {
crate::shape::FillRule::NonZero => Fill::NonZero,
crate::shape::FillRule::EvenOdd => Fill::EvenOdd,
};
// Render fill - prefer image fill over color fill
let mut filled = false;
// Check for image fill first
if let Some(image_asset_id) = shape.image_fill {
if let Some(image_asset) = document.get_image_asset(&image_asset_id) {
if let Some(image) = image_cache.get_or_decode(image_asset) {
// Apply opacity to image (clone is cheap - Image uses Arc<Blob> internally)
let image_with_alpha = (*image).clone().with_alpha(final_opacity);
// The image is rendered as a fill for the shape path
// Since the shape path is a rectangle matching the image dimensions,
// the image should fill the shape perfectly
scene.fill(fill_rule, affine, &image_with_alpha, None, &path);
filled = true;
}
}
}
// Fall back to color fill if no image fill (or image failed to load)
if !filled {
if let Some(fill_color) = &shape.fill_color { if let Some(fill_color) = &shape.fill_color {
// Apply opacity to color // Apply opacity to color
let alpha = ((fill_color.a as f32 / 255.0) * final_opacity * 255.0) as u8; let alpha = ((fill_color.a as f32 / 255.0) * final_opacity * 255.0) as u8;
@ -395,11 +487,6 @@ fn render_vector_layer(document: &Document, time: f64, layer: &VectorLayer, scen
alpha, alpha,
); );
let fill_rule = match shape.fill_rule {
crate::shape::FillRule::NonZero => Fill::NonZero,
crate::shape::FillRule::EvenOdd => Fill::EvenOdd,
};
scene.fill( scene.fill(
fill_rule, fill_rule,
affine, affine,
@ -408,6 +495,7 @@ fn render_vector_layer(document: &Document, time: f64, layer: &VectorLayer, scen
&path, &path,
); );
} }
}
// Render stroke if present // Render stroke if present
if let (Some(stroke_color), Some(stroke_style)) = (&shape.stroke_color, &shape.stroke_style) if let (Some(stroke_color), Some(stroke_style)) = (&shape.stroke_color, &shape.stroke_style)
@ -445,8 +533,9 @@ mod tests {
fn test_render_empty_document() { fn test_render_empty_document() {
let doc = Document::new("Test"); let doc = Document::new("Test");
let mut scene = Scene::new(); let mut scene = Scene::new();
let mut image_cache = ImageCache::new();
render_document(&doc, &mut scene); render_document(&doc, &mut scene, &mut image_cache);
// Should render background without errors // Should render background without errors
} }
@ -472,7 +561,8 @@ mod tests {
// Render // Render
let mut scene = Scene::new(); let mut scene = Scene::new();
render_document(&doc, &mut scene); let mut image_cache = ImageCache::new();
render_document(&doc, &mut scene, &mut image_cache);
// Should render without errors // Should render without errors
} }
@ -514,7 +604,8 @@ mod tests {
// Render should work without errors // Render should work without errors
let mut scene = Scene::new(); let mut scene = Scene::new();
render_document(&doc, &mut scene); let mut image_cache = ImageCache::new();
render_document(&doc, &mut scene, &mut image_cache);
} }
#[test] #[test]
@ -544,7 +635,8 @@ mod tests {
// Render should work // Render should work
let mut scene = Scene::new(); let mut scene = Scene::new();
render_document(&doc, &mut scene); let mut image_cache = ImageCache::new();
render_document(&doc, &mut scene, &mut image_cache);
} }
#[test] #[test]
@ -570,14 +662,15 @@ mod tests {
// Render // Render
let mut scene = Scene::new(); let mut scene = Scene::new();
render_document(&doc, &mut scene); let mut image_cache = ImageCache::new();
render_document(&doc, &mut scene, &mut image_cache);
} }
#[test] #[test]
fn test_hidden_layer_not_rendered() { fn test_hidden_layer_not_rendered() {
let mut doc = Document::new("Test"); let mut doc = Document::new("Test");
let mut layer1 = VectorLayer::new("Layer 1"); let layer1 = VectorLayer::new("Layer 1");
let mut layer2 = VectorLayer::new("Layer 2"); let mut layer2 = VectorLayer::new("Layer 2");
// Hide layer 2 // Hide layer 2
@ -591,7 +684,8 @@ mod tests {
// Render // Render
let mut scene = Scene::new(); let mut scene = Scene::new();
render_document(&doc, &mut scene); let mut image_cache = ImageCache::new();
render_document(&doc, &mut scene, &mut image_cache);
} }
#[test] #[test]
@ -621,7 +715,8 @@ mod tests {
// Render // Render
let mut scene = Scene::new(); let mut scene = Scene::new();
render_document(&doc, &mut scene); let mut image_cache = ImageCache::new();
render_document(&doc, &mut scene, &mut image_cache);
} }
#[test] #[test]
@ -659,7 +754,8 @@ mod tests {
// Render // Render
let mut scene = Scene::new(); let mut scene = Scene::new();
render_document(&doc, &mut scene); let mut image_cache = ImageCache::new();
render_document(&doc, &mut scene, &mut image_cache);
} }
#[test] #[test]

View File

@ -170,7 +170,7 @@ impl ShapeColor {
/// Convert to peniko Color /// Convert to peniko Color
pub fn to_peniko(&self) -> Color { pub fn to_peniko(&self) -> Color {
Color::rgba8(self.r, self.g, self.b, self.a) Color::from_rgba8(self.r, self.g, self.b, self.a)
} }
/// Convert to peniko Brush /// Convert to peniko Brush
@ -197,11 +197,13 @@ impl Default for ShapeColor {
impl From<Color> for ShapeColor { impl From<Color> for ShapeColor {
fn from(color: Color) -> Self { fn from(color: Color) -> Self {
// peniko 0.4 uses components array [r, g, b, a] as floats 0.0-1.0
let components = color.components;
Self { Self {
r: color.r, r: (components[0] * 255.0) as u8,
g: color.g, g: (components[1] * 255.0) as u8,
b: color.b, b: (components[2] * 255.0) as u8,
a: color.a, a: (components[3] * 255.0) as u8,
} }
} }
} }
@ -216,9 +218,14 @@ pub struct Shape {
/// The shape animates between these by varying the shapeIndex property /// The shape animates between these by varying the shapeIndex property
pub versions: Vec<ShapeVersion>, pub versions: Vec<ShapeVersion>,
/// Fill color /// Fill color (used when image_fill is None)
pub fill_color: Option<ShapeColor>, pub fill_color: Option<ShapeColor>,
/// Image fill - references an ImageAsset by UUID
/// When set, the image is rendered as the fill instead of fill_color
#[serde(default)]
pub image_fill: Option<Uuid>,
/// Fill rule /// Fill rule
#[serde(default)] #[serde(default)]
pub fill_rule: FillRule, pub fill_rule: FillRule,
@ -237,6 +244,7 @@ impl Shape {
id: Uuid::new_v4(), id: Uuid::new_v4(),
versions: vec![ShapeVersion::new(path, 0)], versions: vec![ShapeVersion::new(path, 0)],
fill_color: Some(ShapeColor::rgb(0, 0, 0)), fill_color: Some(ShapeColor::rgb(0, 0, 0)),
image_fill: None,
fill_rule: FillRule::NonZero, fill_rule: FillRule::NonZero,
stroke_color: None, stroke_color: None,
stroke_style: None, stroke_style: None,
@ -249,12 +257,20 @@ impl Shape {
id, id,
versions: vec![ShapeVersion::new(path, 0)], versions: vec![ShapeVersion::new(path, 0)],
fill_color: Some(ShapeColor::rgb(0, 0, 0)), fill_color: Some(ShapeColor::rgb(0, 0, 0)),
image_fill: None,
fill_rule: FillRule::NonZero, fill_rule: FillRule::NonZero,
stroke_color: None, stroke_color: None,
stroke_style: None, stroke_style: None,
} }
} }
/// Set image fill (references an ImageAsset by UUID)
pub fn with_image_fill(mut self, image_asset_id: Uuid) -> Self {
self.image_fill = Some(image_asset_id);
self.fill_color = None; // Image fill takes precedence
self
}
/// Add a new version for morphing /// Add a new version for morphing
pub fn add_version(&mut self, path: BezPath) -> usize { pub fn add_version(&mut self, path: BezPath) -> usize {
let index = self.versions.len(); let index = self.versions.len();

View File

@ -38,3 +38,6 @@ pollster = { workspace = true }
lightningcss = "1.0.0-alpha.68" lightningcss = "1.0.0-alpha.68"
clap = { version = "4.5", features = ["derive"] } clap = { version = "4.5", features = ["derive"] }
uuid = { version = "1.0", features = ["v4", "serde"] } uuid = { version = "1.0", features = ["v4", "serde"] }
# Native file dialogs
rfd = "0.15"

View File

@ -9,6 +9,8 @@ use uuid::Uuid;
mod panes; mod panes;
use panes::{PaneInstance, PaneRenderer, SharedPaneState}; use panes::{PaneInstance, PaneRenderer, SharedPaneState};
mod widgets;
mod menu; mod menu;
use menu::{MenuAction, MenuSystem}; use menu::{MenuAction, MenuSystem};
@ -271,6 +273,21 @@ struct EditorApp {
// Playback state (global for all panes) // Playback state (global for all panes)
playback_time: f64, // Current playback position in seconds (persistent - save with document) playback_time: f64, // Current playback position in seconds (persistent - save with document)
is_playing: bool, // Whether playback is currently active (transient - don't save) is_playing: bool, // Whether playback is currently active (transient - don't save)
// Asset drag-and-drop state
dragging_asset: Option<panes::DraggingAsset>, // Asset being dragged from Asset Library
// Import dialog state
last_import_filter: ImportFilter, // Last used import filter (remembered across imports)
}
/// Import filter types for the file dialog
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
enum ImportFilter {
#[default]
All,
Images,
Audio,
Video,
Midi,
} }
impl EditorApp { impl EditorApp {
@ -345,6 +362,8 @@ impl EditorApp {
audio_system, audio_system,
playback_time: 0.0, // Start at beginning playback_time: 0.0, // Start at beginning
is_playing: false, // Start paused is_playing: false, // Start paused
dragging_asset: None, // No asset being dragged initially
last_import_filter: ImportFilter::default(), // Default to "All Supported"
} }
} }
@ -413,8 +432,90 @@ impl EditorApp {
// TODO: Implement revert // TODO: Implement revert
} }
MenuAction::Import => { MenuAction::Import => {
println!("Menu: Import"); use lightningbeam_core::file_types::*;
// TODO: Implement import use rfd::FileDialog;
// Build file filter from extension constants
let all_extensions: Vec<&str> = all_supported_extensions();
// Build dialog with filters in order based on last used filter
// The first filter added is the default in most file dialogs
let mut dialog = FileDialog::new().set_title("Import Asset");
// Add filters in order, with the last-used filter first
match self.last_import_filter {
ImportFilter::All => {
dialog = dialog
.add_filter("All Supported", &all_extensions)
.add_filter("Images", IMAGE_EXTENSIONS)
.add_filter("Audio", AUDIO_EXTENSIONS)
.add_filter("Video", VIDEO_EXTENSIONS)
.add_filter("MIDI", MIDI_EXTENSIONS);
}
ImportFilter::Images => {
dialog = dialog
.add_filter("Images", IMAGE_EXTENSIONS)
.add_filter("All Supported", &all_extensions)
.add_filter("Audio", AUDIO_EXTENSIONS)
.add_filter("Video", VIDEO_EXTENSIONS)
.add_filter("MIDI", MIDI_EXTENSIONS);
}
ImportFilter::Audio => {
dialog = dialog
.add_filter("Audio", AUDIO_EXTENSIONS)
.add_filter("All Supported", &all_extensions)
.add_filter("Images", IMAGE_EXTENSIONS)
.add_filter("Video", VIDEO_EXTENSIONS)
.add_filter("MIDI", MIDI_EXTENSIONS);
}
ImportFilter::Video => {
dialog = dialog
.add_filter("Video", VIDEO_EXTENSIONS)
.add_filter("All Supported", &all_extensions)
.add_filter("Images", IMAGE_EXTENSIONS)
.add_filter("Audio", AUDIO_EXTENSIONS)
.add_filter("MIDI", MIDI_EXTENSIONS);
}
ImportFilter::Midi => {
dialog = dialog
.add_filter("MIDI", MIDI_EXTENSIONS)
.add_filter("All Supported", &all_extensions)
.add_filter("Images", IMAGE_EXTENSIONS)
.add_filter("Audio", AUDIO_EXTENSIONS)
.add_filter("Video", VIDEO_EXTENSIONS);
}
}
let file = dialog.pick_file();
if let Some(path) = file {
// Get extension and detect file type
let extension = path.extension()
.and_then(|e| e.to_str())
.unwrap_or("");
match get_file_type(extension) {
Some(FileType::Image) => {
self.last_import_filter = ImportFilter::Images;
self.import_image(&path);
}
Some(FileType::Audio) => {
self.last_import_filter = ImportFilter::Audio;
self.import_audio(&path);
}
Some(FileType::Video) => {
self.last_import_filter = ImportFilter::Video;
self.import_video(&path);
}
Some(FileType::Midi) => {
self.last_import_filter = ImportFilter::Midi;
self.import_midi(&path);
}
None => {
println!("Unsupported file type: {}", extension);
}
}
}
} }
MenuAction::Export => { MenuAction::Export => {
println!("Menu: Export"); println!("Menu: Export");
@ -510,19 +611,21 @@ impl EditorApp {
// TODO: Implement add MIDI track // TODO: Implement add MIDI track
} }
MenuAction::AddTestClip => { MenuAction::AddTestClip => {
// Require an active layer // Create a test vector clip and add it to the library (not to timeline)
if let Some(layer_id) = self.active_layer_id { use lightningbeam_core::clip::VectorClip;
// Create a test vector clip (5 second duration)
use lightningbeam_core::clip::{VectorClip, ClipInstance};
use lightningbeam_core::layer::{VectorLayer, AnyLayer}; use lightningbeam_core::layer::{VectorLayer, AnyLayer};
use lightningbeam_core::shape::{Shape, ShapeColor}; use lightningbeam_core::shape::{Shape, ShapeColor};
use lightningbeam_core::object::ShapeInstance; use lightningbeam_core::object::ShapeInstance;
use kurbo::{Circle, Rect, Shape as KurboShape}; use kurbo::{Circle, Rect, Shape as KurboShape};
let mut test_clip = VectorClip::new("Test Clip", 400.0, 400.0, 5.0); // Generate unique name based on existing clip count
let clip_count = self.action_executor.document().vector_clips.len();
let clip_name = format!("Test Clip {}", clip_count + 1);
let mut test_clip = VectorClip::new(&clip_name, 400.0, 400.0, 5.0);
// Create a layer with some shapes // Create a layer with some shapes
let mut layer = VectorLayer::new("Test Layer"); let mut layer = VectorLayer::new("Shapes");
// Create a red circle shape // Create a red circle shape
let circle_path = Circle::new((100.0, 100.0), 50.0).to_path(0.1); let circle_path = Circle::new((100.0, 100.0), 50.0).to_path(0.1);
@ -545,28 +648,9 @@ impl EditorApp {
// Add the layer to the clip // Add the layer to the clip
test_clip.layers.add_root(AnyLayer::Vector(layer)); test_clip.layers.add_root(AnyLayer::Vector(layer));
// Add to document's clip library // Add to document's clip library only (user drags from Asset Library to timeline)
let clip_id = self.action_executor.document_mut().add_vector_clip(test_clip); let _clip_id = self.action_executor.document_mut().add_vector_clip(test_clip);
println!("Added '{}' to Asset Library (drag to timeline to use)", clip_name);
// Create clip instance at current time
let current_time = self.action_executor.document().current_time;
let instance = ClipInstance::new(clip_id)
.with_timeline_start(current_time)
.with_name("Test Instance");
// Add to layer (only vector layers can have clip instances)
if let Some(layer) = self.action_executor.document_mut().get_layer_mut(&layer_id) {
use lightningbeam_core::layer::AnyLayer;
if let AnyLayer::Vector(vector_layer) = layer {
vector_layer.clip_instances.push(instance);
println!("Added test clip instance with red circle and blue rectangle at time {}", current_time);
} else {
println!("Can only add clip instances to vector layers");
}
}
} else {
println!("No active layer selected");
}
} }
MenuAction::DeleteLayer => { MenuAction::DeleteLayer => {
println!("Menu: Delete Layer"); println!("Menu: Delete Layer");
@ -666,6 +750,148 @@ impl EditorApp {
} }
} }
} }
/// Import an image file as an ImageAsset
fn import_image(&mut self, path: &std::path::Path) {
use lightningbeam_core::clip::ImageAsset;
// Get filename for asset name
let name = path.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("Untitled Image")
.to_string();
// Load image to get dimensions
match image::open(path) {
Ok(img) => {
let (width, height) = (img.width(), img.height());
// Read raw file data for embedding
let data = match std::fs::read(path) {
Ok(data) => Some(data),
Err(e) => {
eprintln!("Warning: Could not embed image data: {}", e);
None
}
};
// Create image asset
let mut asset = ImageAsset::new(&name, path, width, height);
asset.data = data;
// Add to document
let asset_id = self.action_executor.document_mut().add_image_asset(asset);
println!("Imported image '{}' ({}x{}) - ID: {}", name, width, height, asset_id);
}
Err(e) => {
eprintln!("Failed to load image '{}': {}", path.display(), e);
}
}
}
/// Import an audio file via daw-backend
fn import_audio(&mut self, path: &std::path::Path) {
use daw_backend::io::audio_file::AudioFile;
use lightningbeam_core::clip::{AudioClip, AudioClipType};
let name = path.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("Untitled Audio")
.to_string();
// Load audio file via daw-backend
match AudioFile::load(path) {
Ok(audio_file) => {
let duration = audio_file.frames as f64 / audio_file.sample_rate as f64;
let channels = audio_file.channels;
let sample_rate = audio_file.sample_rate;
// Add to audio engine pool if available
if let Some(ref mut audio_system) = self.audio_system {
// Send audio data to the engine
let path_str = path.to_string_lossy().to_string();
audio_system.controller.add_audio_file(
path_str.clone(),
audio_file.data,
channels,
sample_rate,
);
// For now, use a placeholder pool index (the engine will assign the real one)
// In a full implementation, we'd wait for the AudioFileAdded event
let pool_index = self.action_executor.document().audio_clips.len();
// Create audio clip in document
let clip = AudioClip::new_sampled(&name, pool_index, duration);
let clip_id = self.action_executor.document_mut().add_audio_clip(clip);
println!("Imported audio '{}' ({:.1}s, {}ch, {}Hz) - ID: {}",
name, duration, channels, sample_rate, clip_id);
} else {
eprintln!("Cannot import audio: audio engine not initialized");
}
}
Err(e) => {
eprintln!("Failed to load audio '{}': {}", path.display(), e);
}
}
}
/// Import a MIDI file via daw-backend
fn import_midi(&mut self, path: &std::path::Path) {
use lightningbeam_core::clip::{AudioClip, AudioClipType, MidiEvent};
let name = path.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("Untitled MIDI")
.to_string();
// Load MIDI file via daw-backend
// Note: daw-backend's load_midi_file returns a MidiClip with events
match daw_backend::io::midi_file::load_midi_file(path, 0, 44100) {
Ok(midi_clip) => {
// Convert daw-backend MidiEvents to our MidiEvent type
let events: Vec<MidiEvent> = midi_clip.events.iter().map(|e| {
MidiEvent::new(e.timestamp, e.status, e.data1, e.data2)
}).collect();
let duration = midi_clip.duration;
// Create MIDI audio clip in document
let clip = AudioClip::new_midi(&name, duration, events, false);
let clip_id = self.action_executor.document_mut().add_audio_clip(clip);
println!("Imported MIDI '{}' ({:.1}s, {} events) - ID: {}",
name, duration, midi_clip.events.len(), clip_id);
}
Err(e) => {
eprintln!("Failed to load MIDI '{}': {}", path.display(), e);
}
}
}
/// Import a video file (placeholder - decoder not yet ported)
fn import_video(&mut self, path: &std::path::Path) {
use lightningbeam_core::clip::VideoClip;
let name = path.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("Untitled Video")
.to_string();
// TODO: Use video decoder to get actual dimensions/duration
// For now, create a placeholder with default values
let clip = VideoClip::new(
&name,
path.to_string_lossy().to_string(),
1920.0, // Default width (TODO: probe video)
1080.0, // Default height (TODO: probe video)
0.0, // Duration unknown (TODO: probe video)
30.0, // Default frame rate (TODO: probe video)
);
let clip_id = self.action_executor.document_mut().add_video_clip(clip);
println!("Imported video '{}' (placeholder - dimensions/duration unknown) - ID: {}", name, clip_id);
println!("Note: Video decoder not yet ported. Video preview unavailable.");
}
} }
impl eframe::App for EditorApp { impl eframe::App for EditorApp {
@ -706,43 +932,6 @@ impl eframe::App for EditorApp {
ctx.request_repaint(); ctx.request_repaint();
} }
// Check keyboard shortcuts (works on all platforms)
ctx.input(|i| {
// Check menu shortcuts
if let Some(action) = MenuSystem::check_shortcuts(i) {
self.handle_menu_action(action);
}
// Check tool shortcuts (only if no modifiers are held)
if !i.modifiers.ctrl && !i.modifiers.shift && !i.modifiers.alt && !i.modifiers.command {
use lightningbeam_core::tool::Tool;
if i.key_pressed(egui::Key::V) {
self.selected_tool = Tool::Select;
} else if i.key_pressed(egui::Key::P) {
self.selected_tool = Tool::Draw;
} else if i.key_pressed(egui::Key::Q) {
self.selected_tool = Tool::Transform;
} else if i.key_pressed(egui::Key::R) {
self.selected_tool = Tool::Rectangle;
} else if i.key_pressed(egui::Key::E) {
self.selected_tool = Tool::Ellipse;
} else if i.key_pressed(egui::Key::B) {
self.selected_tool = Tool::PaintBucket;
} else if i.key_pressed(egui::Key::I) {
self.selected_tool = Tool::Eyedropper;
} else if i.key_pressed(egui::Key::L) {
self.selected_tool = Tool::Line;
} else if i.key_pressed(egui::Key::G) {
self.selected_tool = Tool::Polygon;
} else if i.key_pressed(egui::Key::A) {
self.selected_tool = Tool::BezierEdit;
} else if i.key_pressed(egui::Key::T) {
self.selected_tool = Tool::Text;
}
}
});
// Top menu bar (egui-rendered on all platforms) // Top menu bar (egui-rendered on all platforms)
egui::TopBottomPanel::top("menu_bar").show(ctx, |ui| { egui::TopBottomPanel::top("menu_bar").show(ctx, |ui| {
if let Some(action) = MenuSystem::render_egui_menu_bar(ui) { if let Some(action) = MenuSystem::render_egui_menu_bar(ui) {
@ -791,6 +980,7 @@ impl eframe::App for EditorApp {
audio_controller: self.audio_system.as_mut().map(|sys| &mut sys.controller), audio_controller: self.audio_system.as_mut().map(|sys| &mut sys.controller),
playback_time: &mut self.playback_time, playback_time: &mut self.playback_time,
is_playing: &mut self.is_playing, is_playing: &mut self.is_playing,
dragging_asset: &mut self.dragging_asset,
}; };
render_layout_node( render_layout_node(
@ -861,6 +1051,46 @@ impl eframe::App for EditorApp {
if let Some(action) = layout_action { if let Some(action) = layout_action {
self.apply_layout_action(action); self.apply_layout_action(action);
} }
// Check keyboard shortcuts AFTER UI is rendered
// This ensures text fields have had a chance to claim focus first
let wants_keyboard = ctx.wants_keyboard_input();
ctx.input(|i| {
// Check menu shortcuts (these use modifiers, so allow even when typing)
if let Some(action) = MenuSystem::check_shortcuts(i) {
self.handle_menu_action(action);
}
// Check tool shortcuts (only if no modifiers are held AND no text input is focused)
if !wants_keyboard && !i.modifiers.ctrl && !i.modifiers.shift && !i.modifiers.alt && !i.modifiers.command {
use lightningbeam_core::tool::Tool;
if i.key_pressed(egui::Key::V) {
self.selected_tool = Tool::Select;
} else if i.key_pressed(egui::Key::P) {
self.selected_tool = Tool::Draw;
} else if i.key_pressed(egui::Key::Q) {
self.selected_tool = Tool::Transform;
} else if i.key_pressed(egui::Key::R) {
self.selected_tool = Tool::Rectangle;
} else if i.key_pressed(egui::Key::E) {
self.selected_tool = Tool::Ellipse;
} else if i.key_pressed(egui::Key::B) {
self.selected_tool = Tool::PaintBucket;
} else if i.key_pressed(egui::Key::I) {
self.selected_tool = Tool::Eyedropper;
} else if i.key_pressed(egui::Key::L) {
self.selected_tool = Tool::Line;
} else if i.key_pressed(egui::Key::G) {
self.selected_tool = Tool::Polygon;
} else if i.key_pressed(egui::Key::A) {
self.selected_tool = Tool::BezierEdit;
} else if i.key_pressed(egui::Key::T) {
self.selected_tool = Tool::Text;
}
}
});
} }
} }
@ -890,6 +1120,7 @@ struct RenderContext<'a> {
audio_controller: Option<&'a mut daw_backend::EngineController>, audio_controller: Option<&'a mut daw_backend::EngineController>,
playback_time: &'a mut f64, playback_time: &'a mut f64,
is_playing: &'a mut bool, is_playing: &'a mut bool,
dragging_asset: &'a mut Option<panes::DraggingAsset>,
} }
/// Recursively render a layout node with drag support /// Recursively render a layout node with drag support
@ -1202,6 +1433,7 @@ fn render_pane(
rect, rect,
0.0, 0.0,
egui::Stroke::new(border_width, border_color), egui::Stroke::new(border_width, border_color),
egui::StrokeKind::Middle,
); );
// Draw header separator line // Draw header separator line
@ -1249,6 +1481,7 @@ fn render_pane(
icon_button_rect, icon_button_rect,
4.0, 4.0,
egui::Stroke::new(1.0, egui::Color32::from_gray(180)), egui::Stroke::new(1.0, egui::Color32::from_gray(180)),
egui::StrokeKind::Middle,
); );
} }
@ -1352,6 +1585,7 @@ fn render_pane(
audio_controller: ctx.audio_controller.as_mut().map(|c| &mut **c), audio_controller: ctx.audio_controller.as_mut().map(|c| &mut **c),
playback_time: ctx.playback_time, playback_time: ctx.playback_time,
is_playing: ctx.is_playing, is_playing: ctx.is_playing,
dragging_asset: ctx.dragging_asset,
}; };
pane_instance.render_header(&mut header_ui, &mut shared); pane_instance.render_header(&mut header_ui, &mut shared);
} }
@ -1399,6 +1633,7 @@ fn render_pane(
audio_controller: ctx.audio_controller.as_mut().map(|c| &mut **c), audio_controller: ctx.audio_controller.as_mut().map(|c| &mut **c),
playback_time: ctx.playback_time, playback_time: ctx.playback_time,
is_playing: ctx.is_playing, is_playing: ctx.is_playing,
dragging_asset: ctx.dragging_asset,
}; };
// Render pane content (header was already rendered above) // Render pane content (header was already rendered above)
@ -1572,6 +1807,7 @@ fn render_toolbar(
button_rect, button_rect,
4.0, 4.0,
egui::Stroke::new(2.0, egui::Color32::from_gray(180)), egui::Stroke::new(2.0, egui::Color32::from_gray(180)),
egui::StrokeKind::Middle,
); );
} }
@ -1584,6 +1820,7 @@ fn render_toolbar(
button_rect, button_rect,
4.0, 4.0,
egui::Stroke::new(2.0, egui::Color32::from_rgb(100, 150, 255)), egui::Stroke::new(2.0, egui::Color32::from_rgb(100, 150, 255)),
egui::StrokeKind::Middle,
); );
} }
@ -1612,6 +1849,7 @@ fn pane_color(pane_type: PaneType) -> egui::Color32 {
PaneType::PianoRoll => egui::Color32::from_rgb(55, 35, 45), PaneType::PianoRoll => egui::Color32::from_rgb(55, 35, 45),
PaneType::NodeEditor => egui::Color32::from_rgb(30, 45, 50), PaneType::NodeEditor => egui::Color32::from_rgb(30, 45, 50),
PaneType::PresetBrowser => egui::Color32::from_rgb(50, 45, 30), PaneType::PresetBrowser => egui::Color32::from_rgb(50, 45, 30),
PaneType::AssetLibrary => egui::Color32::from_rgb(45, 50, 35),
} }
} }

View File

@ -164,7 +164,7 @@ pub enum MenuAction {
AddVideoLayer, AddVideoLayer,
AddAudioTrack, AddAudioTrack,
AddMidiTrack, AddMidiTrack,
AddTestClip, // For testing: adds a test clip instance to the current layer AddTestClip, // For testing: adds a test clip to the asset library
DeleteLayer, DeleteLayer,
ToggleLayerVisibility, ToggleLayerVisibility,
@ -255,7 +255,7 @@ impl MenuItemDef {
const ADD_VIDEO_LAYER: Self = Self { label: "Add Video Layer", action: MenuAction::AddVideoLayer, shortcut: None }; const ADD_VIDEO_LAYER: Self = Self { label: "Add Video Layer", action: MenuAction::AddVideoLayer, shortcut: None };
const ADD_AUDIO_TRACK: Self = Self { label: "Add Audio Track", action: MenuAction::AddAudioTrack, shortcut: None }; const ADD_AUDIO_TRACK: Self = Self { label: "Add Audio Track", action: MenuAction::AddAudioTrack, shortcut: None };
const ADD_MIDI_TRACK: Self = Self { label: "Add MIDI Track", action: MenuAction::AddMidiTrack, shortcut: None }; const ADD_MIDI_TRACK: Self = Self { label: "Add MIDI Track", action: MenuAction::AddMidiTrack, shortcut: None };
const ADD_TEST_CLIP: Self = Self { label: "Add Test Clip", action: MenuAction::AddTestClip, shortcut: None }; const ADD_TEST_CLIP: Self = Self { label: "Add Test Clip to Library", action: MenuAction::AddTestClip, shortcut: None };
const DELETE_LAYER: Self = Self { label: "Delete Layer", action: MenuAction::DeleteLayer, shortcut: None }; const DELETE_LAYER: Self = Self { label: "Delete Layer", action: MenuAction::DeleteLayer, shortcut: None };
const TOGGLE_LAYER_VISIBILITY: Self = Self { label: "Hide/Show Layer", action: MenuAction::ToggleLayerVisibility, shortcut: None }; const TOGGLE_LAYER_VISIBILITY: Self = Self { label: "Hide/Show Layer", action: MenuAction::ToggleLayerVisibility, shortcut: None };

View File

@ -0,0 +1,814 @@
//! Asset Library pane - browse and manage project assets
//!
//! Displays all clips in the document organized by category:
//! - Vector Clips (animations)
//! - Video Clips (imported video files)
//! - Audio Clips (sampled audio and MIDI)
use eframe::egui;
use lightningbeam_core::clip::AudioClipType;
use lightningbeam_core::document::Document;
use uuid::Uuid;
use super::{DragClipType, DraggingAsset, NodePath, PaneRenderer, SharedPaneState};
use crate::widgets::ImeTextField;
// Layout constants
const SEARCH_BAR_HEIGHT: f32 = 30.0;
const CATEGORY_TAB_HEIGHT: f32 = 28.0;
const ITEM_HEIGHT: f32 = 40.0;
const ITEM_PADDING: f32 = 4.0;
/// Asset category for filtering
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum AssetCategory {
All,
Vector,
Video,
Audio,
Images,
}
impl AssetCategory {
pub fn display_name(&self) -> &'static str {
match self {
AssetCategory::All => "All",
AssetCategory::Vector => "Vector",
AssetCategory::Video => "Video",
AssetCategory::Audio => "Audio",
AssetCategory::Images => "Images",
}
}
pub fn all() -> &'static [AssetCategory] {
&[
AssetCategory::All,
AssetCategory::Vector,
AssetCategory::Video,
AssetCategory::Audio,
AssetCategory::Images,
]
}
/// Get the color associated with this category
pub fn color(&self) -> egui::Color32 {
match self {
AssetCategory::All => egui::Color32::from_gray(150),
AssetCategory::Vector => egui::Color32::from_rgb(100, 150, 255), // Blue
AssetCategory::Video => egui::Color32::from_rgb(255, 150, 100), // Orange
AssetCategory::Audio => egui::Color32::from_rgb(100, 255, 150), // Green
AssetCategory::Images => egui::Color32::from_rgb(255, 200, 100), // Yellow/Gold
}
}
}
/// Unified asset entry for display
#[derive(Debug, Clone)]
pub struct AssetEntry {
pub id: Uuid,
pub name: String,
pub category: AssetCategory,
/// More specific clip type for drag-and-drop compatibility
pub drag_clip_type: DragClipType,
pub duration: f64,
pub dimensions: Option<(f64, f64)>,
pub extra_info: String,
}
/// Pending delete confirmation state
#[derive(Debug, Clone)]
struct PendingDelete {
asset_id: Uuid,
asset_name: String,
category: AssetCategory,
in_use: bool,
}
/// Inline rename editing state
#[derive(Debug, Clone)]
struct RenameState {
asset_id: Uuid,
category: AssetCategory,
edit_text: String,
}
/// Context menu state with position
#[derive(Debug, Clone)]
struct ContextMenuState {
asset_id: Uuid,
position: egui::Pos2,
}
pub struct AssetLibraryPane {
/// Current search filter text
search_filter: String,
/// Currently selected category tab
selected_category: AssetCategory,
/// Currently selected asset ID (for future drag-to-timeline)
selected_asset: Option<Uuid>,
/// Context menu state with position
context_menu: Option<ContextMenuState>,
/// Pending delete confirmation
pending_delete: Option<PendingDelete>,
/// Active rename state
rename_state: Option<RenameState>,
}
impl AssetLibraryPane {
pub fn new() -> Self {
Self {
search_filter: String::new(),
selected_category: AssetCategory::All,
selected_asset: None,
context_menu: None,
pending_delete: None,
rename_state: None,
}
}
/// Collect all assets from the document into a unified list
fn collect_assets(&self, document: &Document) -> Vec<AssetEntry> {
let mut assets = Vec::new();
// Collect vector clips
for (id, clip) in &document.vector_clips {
assets.push(AssetEntry {
id: *id,
name: clip.name.clone(),
category: AssetCategory::Vector,
drag_clip_type: DragClipType::Vector,
duration: clip.duration,
dimensions: Some((clip.width, clip.height)),
extra_info: format!("{}x{}", clip.width as u32, clip.height as u32),
});
}
// Collect video clips
for (id, clip) in &document.video_clips {
assets.push(AssetEntry {
id: *id,
name: clip.name.clone(),
category: AssetCategory::Video,
drag_clip_type: DragClipType::Video,
duration: clip.duration,
dimensions: Some((clip.width, clip.height)),
extra_info: format!("{:.0}fps", clip.frame_rate),
});
}
// Collect audio clips
for (id, clip) in &document.audio_clips {
let (extra_info, drag_clip_type) = match &clip.clip_type {
AudioClipType::Sampled { .. } => ("Sampled".to_string(), DragClipType::AudioSampled),
AudioClipType::Midi { .. } => ("MIDI".to_string(), DragClipType::AudioMidi),
};
assets.push(AssetEntry {
id: *id,
name: clip.name.clone(),
category: AssetCategory::Audio,
drag_clip_type,
duration: clip.duration,
dimensions: None,
extra_info,
});
}
// Collect image assets
for (id, asset) in &document.image_assets {
assets.push(AssetEntry {
id: *id,
name: asset.name.clone(),
category: AssetCategory::Images,
drag_clip_type: DragClipType::Image,
duration: 0.0, // Images don't have duration
dimensions: Some((asset.width as f64, asset.height as f64)),
extra_info: format!("{}x{}", asset.width, asset.height),
});
}
// Sort alphabetically by name
assets.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase()));
assets
}
/// Filter assets based on current category and search text
fn filter_assets<'a>(&self, assets: &'a [AssetEntry]) -> Vec<&'a AssetEntry> {
let search_lower = self.search_filter.to_lowercase();
assets
.iter()
.filter(|asset| {
// Category filter
let category_matches = self.selected_category == AssetCategory::All
|| asset.category == self.selected_category;
// Search filter
let search_matches =
search_lower.is_empty() || asset.name.to_lowercase().contains(&search_lower);
category_matches && search_matches
})
.collect()
}
/// Check if an asset is currently in use (has clip instances on layers)
fn is_asset_in_use(document: &Document, asset_id: Uuid, category: AssetCategory) -> bool {
// Check all layers for clip instances referencing this asset
for layer in &document.root.children {
match layer {
lightningbeam_core::layer::AnyLayer::Vector(vl) => {
if category == AssetCategory::Vector {
for instance in &vl.clip_instances {
if instance.clip_id == asset_id {
return true;
}
}
}
}
lightningbeam_core::layer::AnyLayer::Video(vl) => {
if category == AssetCategory::Video {
for instance in &vl.clip_instances {
if instance.clip_id == asset_id {
return true;
}
}
}
}
lightningbeam_core::layer::AnyLayer::Audio(al) => {
if category == AssetCategory::Audio {
for instance in &al.clip_instances {
if instance.clip_id == asset_id {
return true;
}
}
}
}
}
}
false
}
/// Delete an asset from the document
fn delete_asset(document: &mut Document, asset_id: Uuid, category: AssetCategory) {
match category {
AssetCategory::Vector => {
document.remove_vector_clip(&asset_id);
}
AssetCategory::Video => {
document.remove_video_clip(&asset_id);
}
AssetCategory::Audio => {
document.remove_audio_clip(&asset_id);
}
AssetCategory::Images => {
document.remove_image_asset(&asset_id);
}
AssetCategory::All => {} // Not a real category for deletion
}
}
/// Rename an asset in the document
fn rename_asset(document: &mut Document, asset_id: Uuid, category: AssetCategory, new_name: &str) {
match category {
AssetCategory::Vector => {
if let Some(clip) = document.get_vector_clip_mut(&asset_id) {
clip.name = new_name.to_string();
}
}
AssetCategory::Video => {
if let Some(clip) = document.get_video_clip_mut(&asset_id) {
clip.name = new_name.to_string();
}
}
AssetCategory::Audio => {
if let Some(clip) = document.get_audio_clip_mut(&asset_id) {
clip.name = new_name.to_string();
}
}
AssetCategory::Images => {
if let Some(asset) = document.get_image_asset_mut(&asset_id) {
asset.name = new_name.to_string();
}
}
AssetCategory::All => {} // Not a real category for renaming
}
}
/// Render the search bar at the top
fn render_search_bar(&mut self, ui: &mut egui::Ui, rect: egui::Rect, shared: &SharedPaneState) {
let search_rect =
egui::Rect::from_min_size(rect.min, egui::vec2(rect.width(), SEARCH_BAR_HEIGHT));
// Background
let bg_style = shared.theme.style(".panel-header", ui.ctx());
let bg_color = bg_style
.background_color
.unwrap_or(egui::Color32::from_rgb(30, 30, 30));
ui.painter().rect_filled(search_rect, 0.0, bg_color);
// Label position
let label_pos = search_rect.min + egui::vec2(8.0, (SEARCH_BAR_HEIGHT - 14.0) / 2.0);
ui.painter().text(
label_pos,
egui::Align2::LEFT_TOP,
"Search:",
egui::FontId::proportional(14.0),
egui::Color32::from_gray(180),
);
// Text field using IME-safe widget
let text_edit_rect = egui::Rect::from_min_size(
search_rect.min + egui::vec2(65.0, 4.0),
egui::vec2(search_rect.width() - 75.0, SEARCH_BAR_HEIGHT - 8.0),
);
let mut child_ui = ui.new_child(egui::UiBuilder::new().max_rect(text_edit_rect));
ImeTextField::new(&mut self.search_filter)
.placeholder("Filter assets...")
.desired_width(text_edit_rect.width())
.show(&mut child_ui);
}
/// Render category tabs
fn render_category_tabs(
&mut self,
ui: &mut egui::Ui,
rect: egui::Rect,
shared: &SharedPaneState,
) {
let tabs_rect =
egui::Rect::from_min_size(rect.min, egui::vec2(rect.width(), CATEGORY_TAB_HEIGHT));
// Background
let bg_style = shared.theme.style(".panel-content", ui.ctx());
let bg_color = bg_style
.background_color
.unwrap_or(egui::Color32::from_rgb(40, 40, 40));
ui.painter().rect_filled(tabs_rect, 0.0, bg_color);
// Tab buttons
let tab_width = tabs_rect.width() / AssetCategory::all().len() as f32;
for (i, category) in AssetCategory::all().iter().enumerate() {
let tab_rect = egui::Rect::from_min_size(
tabs_rect.min + egui::vec2(i as f32 * tab_width, 0.0),
egui::vec2(tab_width, CATEGORY_TAB_HEIGHT),
);
let is_selected = self.selected_category == *category;
// Tab background
let tab_bg = if is_selected {
egui::Color32::from_rgb(60, 60, 60)
} else {
egui::Color32::TRANSPARENT
};
ui.painter().rect_filled(tab_rect, 0.0, tab_bg);
// Handle click
let response = ui.allocate_rect(tab_rect, egui::Sense::click());
if response.clicked() {
self.selected_category = *category;
}
// Category color indicator
let indicator_color = category.color();
let text_color = if is_selected {
indicator_color
} else {
egui::Color32::from_gray(150)
};
ui.painter().text(
tab_rect.center(),
egui::Align2::CENTER_CENTER,
category.display_name(),
egui::FontId::proportional(12.0),
text_color,
);
// Underline for selected tab
if is_selected {
ui.painter().line_segment(
[
egui::pos2(tab_rect.min.x + 4.0, tab_rect.max.y - 2.0),
egui::pos2(tab_rect.max.x - 4.0, tab_rect.max.y - 2.0),
],
egui::Stroke::new(2.0, indicator_color),
);
}
}
}
/// Render the asset list
fn render_asset_list(
&mut self,
ui: &mut egui::Ui,
rect: egui::Rect,
shared: &mut SharedPaneState,
assets: &[&AssetEntry],
) {
// Background
let bg_style = shared.theme.style(".panel-content", ui.ctx());
let bg_color = bg_style
.background_color
.unwrap_or(egui::Color32::from_rgb(25, 25, 25));
ui.painter().rect_filled(rect, 0.0, bg_color);
// Text colors
let text_style = shared.theme.style(".text-primary", ui.ctx());
let text_color = text_style
.text_color
.unwrap_or(egui::Color32::from_gray(200));
let secondary_text_color = egui::Color32::from_gray(120);
// Show empty state message if no assets
if assets.is_empty() {
let message = if !self.search_filter.is_empty() {
"No assets match your search"
} else {
"No assets in this category"
};
ui.painter().text(
rect.center(),
egui::Align2::CENTER_CENTER,
message,
egui::FontId::proportional(14.0),
secondary_text_color,
);
return;
}
// Use egui's built-in ScrollArea for scrolling
let scroll_area_rect = rect;
ui.allocate_ui_at_rect(scroll_area_rect, |ui| {
egui::ScrollArea::vertical()
.auto_shrink([false, false])
.show(ui, |ui| {
ui.set_min_width(scroll_area_rect.width() - 16.0); // Account for scrollbar
for asset in assets {
let (item_rect, response) = ui.allocate_exact_size(
egui::vec2(ui.available_width(), ITEM_HEIGHT),
egui::Sense::click_and_drag(),
);
let is_selected = self.selected_asset == Some(asset.id);
let is_being_dragged = shared
.dragging_asset
.as_ref()
.map(|d| d.clip_id == asset.id)
.unwrap_or(false);
// Item background
let item_bg = if is_being_dragged {
egui::Color32::from_rgb(80, 100, 120) // Highlight when dragging
} else if is_selected {
egui::Color32::from_rgb(60, 80, 100)
} else if response.hovered() {
egui::Color32::from_rgb(45, 45, 45)
} else {
egui::Color32::from_rgb(35, 35, 35)
};
ui.painter().rect_filled(item_rect, 3.0, item_bg);
// Category color indicator bar
let indicator_color = asset.category.color();
let indicator_rect = egui::Rect::from_min_size(
item_rect.min,
egui::vec2(4.0, ITEM_HEIGHT),
);
ui.painter().rect_filled(indicator_rect, 0.0, indicator_color);
// Asset name (or inline edit field)
let is_renaming = self.rename_state.as_ref().map(|s| s.asset_id == asset.id).unwrap_or(false);
if is_renaming {
// Inline rename text field using IME-safe widget
let name_rect = egui::Rect::from_min_size(
item_rect.min + egui::vec2(10.0, 4.0),
egui::vec2(item_rect.width() - 20.0, 18.0),
);
if let Some(ref mut state) = self.rename_state {
let mut child_ui = ui.new_child(egui::UiBuilder::new().max_rect(name_rect));
ImeTextField::new(&mut state.edit_text)
.font_size(13.0)
.desired_width(name_rect.width())
.request_focus()
.show(&mut child_ui);
}
} else {
// Normal asset name display
ui.painter().text(
item_rect.min + egui::vec2(12.0, 8.0),
egui::Align2::LEFT_TOP,
&asset.name,
egui::FontId::proportional(13.0),
text_color,
);
}
// Metadata line (images don't have duration)
let metadata = if asset.category == AssetCategory::Images {
// For images, just show dimensions
asset.extra_info.clone()
} else if let Some((w, h)) = asset.dimensions {
format!(
"{:.1}s | {}x{} | {}",
asset.duration, w as u32, h as u32, asset.extra_info
)
} else {
format!("{:.1}s | {}", asset.duration, asset.extra_info)
};
ui.painter().text(
item_rect.min + egui::vec2(12.0, 24.0),
egui::Align2::LEFT_TOP,
&metadata,
egui::FontId::proportional(10.0),
secondary_text_color,
);
// Handle click (selection)
if response.clicked() {
self.selected_asset = Some(asset.id);
}
// Handle right-click (context menu)
if response.secondary_clicked() {
if let Some(pos) = ui.ctx().pointer_interact_pos() {
self.context_menu = Some(ContextMenuState {
asset_id: asset.id,
position: pos,
});
}
}
// Handle double-click (start rename)
if response.double_clicked() {
self.rename_state = Some(RenameState {
asset_id: asset.id,
category: asset.category,
edit_text: asset.name.clone(),
});
}
// Handle drag start
if response.drag_started() {
*shared.dragging_asset = Some(DraggingAsset {
clip_id: asset.id,
clip_type: asset.drag_clip_type,
name: asset.name.clone(),
duration: asset.duration,
dimensions: asset.dimensions,
});
}
// Add small spacing between items
ui.add_space(ITEM_PADDING);
}
});
});
// Draw drag preview at cursor when dragging
if let Some(dragging) = shared.dragging_asset.as_ref() {
if let Some(pos) = ui.ctx().pointer_interact_pos() {
// Draw a semi-transparent preview
let preview_rect = egui::Rect::from_min_size(
pos + egui::vec2(10.0, 10.0), // Offset from cursor
egui::vec2(150.0, 30.0),
);
// Use top layer for drag preview
let painter = ui.ctx().layer_painter(egui::LayerId::new(
egui::Order::Tooltip,
egui::Id::new("drag_preview"),
));
painter.rect_filled(
preview_rect,
4.0,
egui::Color32::from_rgba_unmultiplied(60, 60, 60, 220),
);
painter.text(
preview_rect.center(),
egui::Align2::CENTER_CENTER,
&dragging.name,
egui::FontId::proportional(12.0),
egui::Color32::WHITE,
);
}
}
// Clear drag state when mouse is released (if not dropped on valid target)
// Note: Valid drop targets (Timeline, Stage) will clear this themselves
if ui.input(|i| i.pointer.any_released()) {
// Only clear if we're still within this pane (dropped back on library)
if let Some(pos) = ui.ctx().pointer_interact_pos() {
if rect.contains(pos) {
*shared.dragging_asset = None;
}
}
}
}
}
impl PaneRenderer for AssetLibraryPane {
fn render_content(
&mut self,
ui: &mut egui::Ui,
rect: egui::Rect,
_path: &NodePath,
shared: &mut SharedPaneState,
) {
let document = shared.action_executor.document();
// Collect and filter assets
let all_assets = self.collect_assets(document);
let filtered_assets = self.filter_assets(&all_assets);
// Layout: Search bar -> Category tabs -> Asset list
let search_rect =
egui::Rect::from_min_size(rect.min, egui::vec2(rect.width(), SEARCH_BAR_HEIGHT));
let tabs_rect = egui::Rect::from_min_size(
rect.min + egui::vec2(0.0, SEARCH_BAR_HEIGHT),
egui::vec2(rect.width(), CATEGORY_TAB_HEIGHT),
);
let list_rect = egui::Rect::from_min_max(
rect.min + egui::vec2(0.0, SEARCH_BAR_HEIGHT + CATEGORY_TAB_HEIGHT),
rect.max,
);
// Render components
self.render_search_bar(ui, search_rect, shared);
self.render_category_tabs(ui, tabs_rect, shared);
self.render_asset_list(ui, list_rect, shared, &filtered_assets);
// Context menu handling
if let Some(ref context_state) = self.context_menu.clone() {
let context_asset_id = context_state.asset_id;
let menu_pos = context_state.position;
// Find the asset info
if let Some(asset) = all_assets.iter().find(|a| a.id == context_asset_id) {
let asset_name = asset.name.clone();
let asset_category = asset.category;
let in_use = Self::is_asset_in_use(
shared.action_executor.document(),
context_asset_id,
asset_category,
);
// Show context menu popup at the stored position
let menu_id = egui::Id::new("asset_context_menu");
let menu_response = egui::Area::new(menu_id)
.order(egui::Order::Foreground)
.fixed_pos(menu_pos)
.show(ui.ctx(), |ui| {
egui::Frame::popup(ui.style()).show(ui, |ui| {
ui.set_min_width(120.0);
if ui.button("Rename").clicked() {
// Start inline rename
self.rename_state = Some(RenameState {
asset_id: context_asset_id,
category: asset_category,
edit_text: asset_name.clone(),
});
self.context_menu = None;
}
if ui.button("Delete").clicked() {
// Set up pending delete confirmation
self.pending_delete = Some(PendingDelete {
asset_id: context_asset_id,
asset_name: asset_name.clone(),
category: asset_category,
in_use,
});
self.context_menu = None;
}
});
});
// Close menu on click outside (using primary button release)
let menu_rect = menu_response.response.rect;
if ui.input(|i| i.pointer.primary_released()) {
if let Some(pos) = ui.ctx().pointer_interact_pos() {
if !menu_rect.contains(pos) {
self.context_menu = None;
}
}
}
// Also close on Escape
if ui.input(|i| i.key_pressed(egui::Key::Escape)) {
self.context_menu = None;
}
} else {
self.context_menu = None;
}
}
// Delete confirmation dialog
if let Some(ref pending) = self.pending_delete.clone() {
let window_id = egui::Id::new("delete_confirm_dialog");
let mut should_close = false;
let mut should_delete = false;
egui::Window::new("Confirm Delete")
.id(window_id)
.collapsible(false)
.resizable(false)
.anchor(egui::Align2::CENTER_CENTER, egui::vec2(0.0, 0.0))
.show(ui.ctx(), |ui| {
ui.set_min_width(300.0);
if pending.in_use {
ui.label(egui::RichText::new("Warning: This asset is currently in use!")
.color(egui::Color32::from_rgb(255, 180, 100)));
ui.add_space(4.0);
ui.label("Deleting it will remove all clip instances that reference it.");
ui.add_space(8.0);
}
ui.label(format!("Are you sure you want to delete \"{}\"?", pending.asset_name));
ui.add_space(12.0);
ui.horizontal(|ui| {
if ui.button("Cancel").clicked() {
should_close = true;
}
ui.with_layout(egui::Layout::right_to_left(egui::Align::Center), |ui| {
let delete_text = if pending.in_use { "Delete Anyway" } else { "Delete" };
if ui.button(delete_text).clicked() {
should_delete = true;
should_close = true;
}
});
});
});
if should_delete {
// Perform the delete
Self::delete_asset(
shared.action_executor.document_mut(),
pending.asset_id,
pending.category,
);
}
if should_close {
self.pending_delete = None;
}
}
// Handle rename state (Enter to confirm, Escape to cancel, click outside to confirm)
if let Some(ref state) = self.rename_state.clone() {
let mut should_confirm = false;
let mut should_cancel = false;
// Check for Enter or Escape
ui.input(|i| {
if i.key_pressed(egui::Key::Enter) {
should_confirm = true;
} else if i.key_pressed(egui::Key::Escape) {
should_cancel = true;
}
});
if should_confirm {
let new_name = state.edit_text.trim();
if !new_name.is_empty() {
Self::rename_asset(
shared.action_executor.document_mut(),
state.asset_id,
state.category,
new_name,
);
}
self.rename_state = None;
} else if should_cancel {
self.rename_state = None;
}
}
}
fn name(&self) -> &str {
"Asset Library"
}
}

View File

@ -5,6 +5,7 @@
use eframe::egui; use eframe::egui;
use lightningbeam_core::{pane::PaneType, tool::Tool}; use lightningbeam_core::{pane::PaneType, tool::Tool};
use uuid::Uuid;
// Type alias for node paths (matches main.rs) // Type alias for node paths (matches main.rs)
pub type NodePath = Vec<usize>; pub type NodePath = Vec<usize>;
@ -18,6 +19,37 @@ pub struct ViewActionHandler {
pub zoom_center: egui::Vec2, pub zoom_center: egui::Vec2,
} }
/// Clip type for drag-and-drop operations
/// Distinguishes between different clip/layer type combinations
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum DragClipType {
/// Vector animation clip
Vector,
/// Video clip
Video,
/// Sampled audio clip (WAV, MP3, etc.)
AudioSampled,
/// MIDI clip
AudioMidi,
/// Static image asset
Image,
}
/// Information about an asset being dragged from the Asset Library
#[derive(Debug, Clone)]
pub struct DraggingAsset {
/// The clip ID being dragged
pub clip_id: Uuid,
/// Type of clip (determines compatible layer types)
pub clip_type: DragClipType,
/// Display name
pub name: String,
/// Duration in seconds
pub duration: f64,
/// Dimensions (width, height) for vector/video clips, None for audio
pub dimensions: Option<(f64, f64)>,
}
pub mod toolbar; pub mod toolbar;
pub mod stage; pub mod stage;
pub mod timeline; pub mod timeline;
@ -26,6 +58,7 @@ pub mod outliner;
pub mod piano_roll; pub mod piano_roll;
pub mod node_editor; pub mod node_editor;
pub mod preset_browser; pub mod preset_browser;
pub mod asset_library;
/// Which color mode is active for the eyedropper tool /// Which color mode is active for the eyedropper tool
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
@ -78,6 +111,8 @@ pub struct SharedPaneState<'a> {
/// Global playback state /// Global playback state
pub playback_time: &'a mut f64, // Current playback position in seconds pub playback_time: &'a mut f64, // Current playback position in seconds
pub is_playing: &'a mut bool, // Whether playback is currently active pub is_playing: &'a mut bool, // Whether playback is currently active
/// Asset being dragged from Asset Library (for cross-pane drag-and-drop)
pub dragging_asset: &'a mut Option<DraggingAsset>,
} }
/// Trait for pane rendering /// Trait for pane rendering
@ -116,6 +151,7 @@ pub enum PaneInstance {
PianoRoll(piano_roll::PianoRollPane), PianoRoll(piano_roll::PianoRollPane),
NodeEditor(node_editor::NodeEditorPane), NodeEditor(node_editor::NodeEditorPane),
PresetBrowser(preset_browser::PresetBrowserPane), PresetBrowser(preset_browser::PresetBrowserPane),
AssetLibrary(asset_library::AssetLibraryPane),
} }
impl PaneInstance { impl PaneInstance {
@ -132,6 +168,9 @@ impl PaneInstance {
PaneType::PresetBrowser => { PaneType::PresetBrowser => {
PaneInstance::PresetBrowser(preset_browser::PresetBrowserPane::new()) PaneInstance::PresetBrowser(preset_browser::PresetBrowserPane::new())
} }
PaneType::AssetLibrary => {
PaneInstance::AssetLibrary(asset_library::AssetLibraryPane::new())
}
} }
} }
@ -146,6 +185,7 @@ impl PaneInstance {
PaneInstance::PianoRoll(_) => PaneType::PianoRoll, PaneInstance::PianoRoll(_) => PaneType::PianoRoll,
PaneInstance::NodeEditor(_) => PaneType::NodeEditor, PaneInstance::NodeEditor(_) => PaneType::NodeEditor,
PaneInstance::PresetBrowser(_) => PaneType::PresetBrowser, PaneInstance::PresetBrowser(_) => PaneType::PresetBrowser,
PaneInstance::AssetLibrary(_) => PaneType::AssetLibrary,
} }
} }
} }
@ -161,6 +201,7 @@ impl PaneRenderer for PaneInstance {
PaneInstance::PianoRoll(p) => p.render_header(ui, shared), PaneInstance::PianoRoll(p) => p.render_header(ui, shared),
PaneInstance::NodeEditor(p) => p.render_header(ui, shared), PaneInstance::NodeEditor(p) => p.render_header(ui, shared),
PaneInstance::PresetBrowser(p) => p.render_header(ui, shared), PaneInstance::PresetBrowser(p) => p.render_header(ui, shared),
PaneInstance::AssetLibrary(p) => p.render_header(ui, shared),
} }
} }
@ -180,6 +221,7 @@ impl PaneRenderer for PaneInstance {
PaneInstance::PianoRoll(p) => p.render_content(ui, rect, path, shared), PaneInstance::PianoRoll(p) => p.render_content(ui, rect, path, shared),
PaneInstance::NodeEditor(p) => p.render_content(ui, rect, path, shared), PaneInstance::NodeEditor(p) => p.render_content(ui, rect, path, shared),
PaneInstance::PresetBrowser(p) => p.render_content(ui, rect, path, shared), PaneInstance::PresetBrowser(p) => p.render_content(ui, rect, path, shared),
PaneInstance::AssetLibrary(p) => p.render_content(ui, rect, path, shared),
} }
} }
@ -193,6 +235,7 @@ impl PaneRenderer for PaneInstance {
PaneInstance::PianoRoll(p) => p.name(), PaneInstance::PianoRoll(p) => p.name(),
PaneInstance::NodeEditor(p) => p.name(), PaneInstance::NodeEditor(p) => p.name(),
PaneInstance::PresetBrowser(p) => p.name(), PaneInstance::PresetBrowser(p) => p.name(),
PaneInstance::AssetLibrary(p) => p.name(),
} }
} }
} }

View File

@ -3,16 +3,49 @@
/// Renders composited layers using Vello GPU renderer via egui callbacks. /// Renders composited layers using Vello GPU renderer via egui callbacks.
use eframe::egui; use eframe::egui;
use super::{NodePath, PaneRenderer, SharedPaneState}; use lightningbeam_core::action::Action;
use lightningbeam_core::clip::ClipInstance;
use lightningbeam_core::layer::{AnyLayer, AudioLayer, AudioLayerType, VideoLayer, VectorLayer};
use super::{DragClipType, NodePath, PaneRenderer, SharedPaneState};
use std::sync::{Arc, Mutex, OnceLock}; use std::sync::{Arc, Mutex, OnceLock};
use vello::kurbo::Shape; use vello::kurbo::Shape;
/// Check if a clip type matches a layer type
fn layer_matches_clip_type(layer: &AnyLayer, clip_type: DragClipType) -> bool {
match (layer, clip_type) {
(AnyLayer::Vector(_), DragClipType::Vector) => true,
(AnyLayer::Vector(_), DragClipType::Image) => true, // Images go on vector layers as shapes
(AnyLayer::Video(_), DragClipType::Video) => true,
(AnyLayer::Audio(audio), DragClipType::AudioSampled) => {
audio.audio_layer_type == AudioLayerType::Sampled
}
(AnyLayer::Audio(audio), DragClipType::AudioMidi) => {
audio.audio_layer_type == AudioLayerType::Midi
}
_ => false,
}
}
/// Create a new layer of the appropriate type for a clip
fn create_layer_for_clip_type(clip_type: DragClipType, name: &str) -> AnyLayer {
match clip_type {
DragClipType::Vector => AnyLayer::Vector(VectorLayer::new(name)),
DragClipType::Video => AnyLayer::Video(VideoLayer::new(name)),
DragClipType::AudioSampled => AnyLayer::Audio(AudioLayer::new_sampled(name)),
DragClipType::AudioMidi => AnyLayer::Audio(AudioLayer::new_midi(name)),
// Images are placed as shapes on vector layers, not their own layer type
DragClipType::Image => AnyLayer::Vector(VectorLayer::new(name)),
}
}
/// Shared Vello resources (created once, reused by all Stage panes) /// Shared Vello resources (created once, reused by all Stage panes)
struct SharedVelloResources { struct SharedVelloResources {
renderer: Arc<Mutex<vello::Renderer>>, renderer: Arc<Mutex<vello::Renderer>>,
blit_pipeline: wgpu::RenderPipeline, blit_pipeline: wgpu::RenderPipeline,
blit_bind_group_layout: wgpu::BindGroupLayout, blit_bind_group_layout: wgpu::BindGroupLayout,
sampler: wgpu::Sampler, sampler: wgpu::Sampler,
/// Shared image cache for avoiding re-decoding images every frame
image_cache: Mutex<lightningbeam_core::renderer::ImageCache>,
} }
/// Per-instance Vello resources (created for each Stage pane) /// Per-instance Vello resources (created for each Stage pane)
@ -33,10 +66,10 @@ impl SharedVelloResources {
let renderer = vello::Renderer::new( let renderer = vello::Renderer::new(
device, device,
vello::RendererOptions { vello::RendererOptions {
surface_format: None,
use_cpu: false, use_cpu: false,
antialiasing_support: vello::AaSupport::all(), antialiasing_support: vello::AaSupport::all(),
num_init_threads: std::num::NonZeroUsize::new(1), num_init_threads: std::num::NonZeroUsize::new(1),
pipeline_cache: None,
}, },
).map_err(|e| format!("Failed to create Vello renderer: {}", e))?; ).map_err(|e| format!("Failed to create Vello renderer: {}", e))?;
@ -82,13 +115,13 @@ impl SharedVelloResources {
layout: Some(&pipeline_layout), layout: Some(&pipeline_layout),
vertex: wgpu::VertexState { vertex: wgpu::VertexState {
module: &shader, module: &shader,
entry_point: "vs_main", entry_point: Some("vs_main"),
buffers: &[], buffers: &[],
compilation_options: Default::default(), compilation_options: Default::default(),
}, },
fragment: Some(wgpu::FragmentState { fragment: Some(wgpu::FragmentState {
module: &shader, module: &shader,
entry_point: "fs_main", entry_point: Some("fs_main"),
targets: &[Some(wgpu::ColorTargetState { targets: &[Some(wgpu::ColorTargetState {
format: wgpu::TextureFormat::Rgba8Unorm, // egui's target format format: wgpu::TextureFormat::Rgba8Unorm, // egui's target format
blend: Some(wgpu::BlendState::ALPHA_BLENDING), blend: Some(wgpu::BlendState::ALPHA_BLENDING),
@ -130,6 +163,7 @@ impl SharedVelloResources {
blit_pipeline, blit_pipeline,
blit_bind_group_layout, blit_bind_group_layout,
sampler, sampler,
image_cache: Mutex::new(lightningbeam_core::renderer::ImageCache::new()),
}) })
} }
} }
@ -283,7 +317,9 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
* Affine::scale(self.zoom as f64); * Affine::scale(self.zoom as f64);
// Render the document to the scene with camera transform // Render the document to the scene with camera transform
lightningbeam_core::renderer::render_document_with_transform(&self.document, &mut scene, camera_transform); let mut image_cache = shared.image_cache.lock().unwrap();
lightningbeam_core::renderer::render_document_with_transform(&self.document, &mut scene, camera_transform, &mut image_cache);
drop(image_cache); // Explicitly release lock before other operations
// Render drag preview objects with transparency // Render drag preview objects with transparency
if let (Some(delta), Some(active_layer_id)) = (self.drag_delta, self.active_layer_id) { if let (Some(delta), Some(active_layer_id)) = (self.drag_delta, self.active_layer_id) {
@ -306,7 +342,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
let combined_transform = camera_transform * object_transform; let combined_transform = camera_transform * object_transform;
// Render shape with semi-transparent fill (light blue, 40% opacity) // Render shape with semi-transparent fill (light blue, 40% opacity)
let alpha_color = Color::rgba8(100, 150, 255, 100); let alpha_color = Color::from_rgba8(100, 150, 255, 100);
scene.fill( scene.fill(
Fill::NonZero, Fill::NonZero,
combined_transform, combined_transform,
@ -339,7 +375,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
}; };
// Draw preview outline // Draw preview outline
let alpha_color = Color::rgba8(255, 150, 100, 150); // Orange, semi-transparent let alpha_color = Color::from_rgba8(255, 150, 100, 150); // Orange, semi-transparent
let stroke_width = 2.0 / self.zoom.max(0.5) as f64; let stroke_width = 2.0 / self.zoom.max(0.5) as f64;
scene.stroke( scene.stroke(
&Stroke::new(stroke_width), &Stroke::new(stroke_width),
@ -362,7 +398,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
use vello::peniko::{Color, Fill}; use vello::peniko::{Color, Fill};
use vello::kurbo::{Circle, Rect as KurboRect, Shape as KurboShape, Stroke}; use vello::kurbo::{Circle, Rect as KurboRect, Shape as KurboShape, Stroke};
let selection_color = Color::rgb8(0, 120, 255); // Blue let selection_color = Color::from_rgb8(0, 120, 255); // Blue
let stroke_width = 2.0 / self.zoom.max(0.5) as f64; let stroke_width = 2.0 / self.zoom.max(0.5) as f64;
// 1. Draw selection outlines around selected objects // 1. Draw selection outlines around selected objects
@ -413,7 +449,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
scene.stroke( scene.stroke(
&Stroke::new(1.0), &Stroke::new(1.0),
combined_transform, combined_transform,
Color::rgb8(255, 255, 255), Color::from_rgb8(255, 255, 255),
None, None,
&corner_circle, &corner_circle,
); );
@ -444,7 +480,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
let combined_transform = camera_transform * clip_transform; let combined_transform = camera_transform * clip_transform;
// Draw selection outline with different color for clip instances // Draw selection outline with different color for clip instances
let clip_selection_color = Color::rgb8(255, 120, 0); // Orange let clip_selection_color = Color::from_rgb8(255, 120, 0); // Orange
scene.stroke( scene.stroke(
&Stroke::new(stroke_width), &Stroke::new(stroke_width),
combined_transform, combined_transform,
@ -476,7 +512,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
scene.stroke( scene.stroke(
&Stroke::new(1.0), &Stroke::new(1.0),
combined_transform, combined_transform,
Color::rgb8(255, 255, 255), Color::from_rgb8(255, 255, 255),
None, None,
&corner_circle, &corner_circle,
); );
@ -495,7 +531,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
); );
// Semi-transparent fill // Semi-transparent fill
let marquee_fill = Color::rgba8(0, 120, 255, 100); let marquee_fill = Color::from_rgba8(0, 120, 255, 100);
scene.fill( scene.fill(
Fill::NonZero, Fill::NonZero,
camera_transform, camera_transform,
@ -564,7 +600,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
let preview_transform = camera_transform * Affine::translate((position.x, position.y)); let preview_transform = camera_transform * Affine::translate((position.x, position.y));
// Use actual fill color (same as final shape) // Use actual fill color (same as final shape)
let fill_color = Color::rgba8( let fill_color = Color::from_rgba8(
self.fill_color.r(), self.fill_color.r(),
self.fill_color.g(), self.fill_color.g(),
self.fill_color.b(), self.fill_color.b(),
@ -622,7 +658,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
let preview_transform = camera_transform * Affine::translate((position.x, position.y)); let preview_transform = camera_transform * Affine::translate((position.x, position.y));
// Use actual fill color (same as final shape) // Use actual fill color (same as final shape)
let fill_color = Color::rgba8( let fill_color = Color::from_rgba8(
self.fill_color.r(), self.fill_color.r(),
self.fill_color.g(), self.fill_color.g(),
self.fill_color.b(), self.fill_color.b(),
@ -665,7 +701,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
if length > 0.0 { if length > 0.0 {
// Use actual stroke color for line preview // Use actual stroke color for line preview
let stroke_color = Color::rgba8( let stroke_color = Color::from_rgba8(
self.stroke_color.r(), self.stroke_color.r(),
self.stroke_color.g(), self.stroke_color.g(),
self.stroke_color.b(), self.stroke_color.b(),
@ -698,7 +734,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
let preview_transform = camera_transform * Affine::translate((center.x, center.y)); let preview_transform = camera_transform * Affine::translate((center.x, center.y));
// Use actual fill color (same as final shape) // Use actual fill color (same as final shape)
let fill_color = Color::rgba8( let fill_color = Color::from_rgba8(
self.fill_color.r(), self.fill_color.r(),
self.fill_color.g(), self.fill_color.g(),
self.fill_color.b(), self.fill_color.b(),
@ -749,7 +785,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Draw the preview path with stroke // Draw the preview path with stroke
let stroke_width = (2.0 / self.zoom.max(0.5) as f64).max(1.0); let stroke_width = (2.0 / self.zoom.max(0.5) as f64).max(1.0);
let stroke_color = Color::rgb8( let stroke_color = Color::from_rgb8(
self.stroke_color.r(), self.stroke_color.r(),
self.stroke_color.g(), self.stroke_color.g(),
self.stroke_color.b(), self.stroke_color.b(),
@ -783,7 +819,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
if let Some(object) = vector_layer.get_object(&object_id) { if let Some(object) = vector_layer.get_object(&object_id) {
if let Some(shape) = vector_layer.get_shape(&object.shape_id) { if let Some(shape) = vector_layer.get_shape(&object.shape_id) {
let handle_size = (8.0 / self.zoom.max(0.5) as f64).max(6.0); let handle_size = (8.0 / self.zoom.max(0.5) as f64).max(6.0);
let handle_color = Color::rgb8(0, 120, 255); // Blue let handle_color = Color::from_rgb8(0, 120, 255); // Blue
let rotation_handle_offset = 20.0 / self.zoom.max(0.5) as f64; let rotation_handle_offset = 20.0 / self.zoom.max(0.5) as f64;
// Get shape's local bounding box // Get shape's local bounding box
@ -876,7 +912,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
scene.stroke( scene.stroke(
&Stroke::new(1.0), &Stroke::new(1.0),
camera_transform, camera_transform,
Color::rgb8(255, 255, 255), Color::from_rgb8(255, 255, 255),
None, None,
&handle_rect, &handle_rect,
); );
@ -906,7 +942,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
scene.stroke( scene.stroke(
&Stroke::new(1.0), &Stroke::new(1.0),
camera_transform, camera_transform,
Color::rgb8(255, 255, 255), Color::from_rgb8(255, 255, 255),
None, None,
&edge_circle, &edge_circle,
); );
@ -931,7 +967,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
scene.fill( scene.fill(
Fill::NonZero, Fill::NonZero,
camera_transform, camera_transform,
Color::rgb8(50, 200, 50), Color::from_rgb8(50, 200, 50),
None, None,
&rotation_circle, &rotation_circle,
); );
@ -940,7 +976,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
scene.stroke( scene.stroke(
&Stroke::new(1.0), &Stroke::new(1.0),
camera_transform, camera_transform,
Color::rgb8(255, 255, 255), Color::from_rgb8(255, 255, 255),
None, None,
&rotation_circle, &rotation_circle,
); );
@ -956,7 +992,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
scene.stroke( scene.stroke(
&Stroke::new(1.0), &Stroke::new(1.0),
camera_transform, camera_transform,
Color::rgb8(50, 200, 50), Color::from_rgb8(50, 200, 50),
None, None,
&line_path, &line_path,
); );
@ -985,7 +1021,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
if let Some(bbox) = combined_bbox { if let Some(bbox) = combined_bbox {
let handle_size = (8.0 / self.zoom.max(0.5) as f64).max(6.0); let handle_size = (8.0 / self.zoom.max(0.5) as f64).max(6.0);
let handle_color = Color::rgb8(0, 120, 255); let handle_color = Color::from_rgb8(0, 120, 255);
let rotation_handle_offset = 20.0 / self.zoom.max(0.5) as f64; let rotation_handle_offset = 20.0 / self.zoom.max(0.5) as f64;
scene.stroke(&Stroke::new(stroke_width), camera_transform, handle_color, None, &bbox); scene.stroke(&Stroke::new(stroke_width), camera_transform, handle_color, None, &bbox);
@ -1003,7 +1039,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
corner.x + handle_size / 2.0, corner.y + handle_size / 2.0, corner.x + handle_size / 2.0, corner.y + handle_size / 2.0,
); );
scene.fill(Fill::NonZero, camera_transform, handle_color, None, &handle_rect); scene.fill(Fill::NonZero, camera_transform, handle_color, None, &handle_rect);
scene.stroke(&Stroke::new(1.0), camera_transform, Color::rgb8(255, 255, 255), None, &handle_rect); scene.stroke(&Stroke::new(1.0), camera_transform, Color::from_rgb8(255, 255, 255), None, &handle_rect);
} }
let edges = [ let edges = [
@ -1016,13 +1052,13 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
for edge in &edges { for edge in &edges {
let edge_circle = Circle::new(*edge, handle_size / 2.0); let edge_circle = Circle::new(*edge, handle_size / 2.0);
scene.fill(Fill::NonZero, camera_transform, handle_color, None, &edge_circle); scene.fill(Fill::NonZero, camera_transform, handle_color, None, &edge_circle);
scene.stroke(&Stroke::new(1.0), camera_transform, Color::rgb8(255, 255, 255), None, &edge_circle); scene.stroke(&Stroke::new(1.0), camera_transform, Color::from_rgb8(255, 255, 255), None, &edge_circle);
} }
let rotation_handle_pos = vello::kurbo::Point::new(bbox.center().x, bbox.y0 - rotation_handle_offset); let rotation_handle_pos = vello::kurbo::Point::new(bbox.center().x, bbox.y0 - rotation_handle_offset);
let rotation_circle = Circle::new(rotation_handle_pos, handle_size / 2.0); let rotation_circle = Circle::new(rotation_handle_pos, handle_size / 2.0);
scene.fill(Fill::NonZero, camera_transform, Color::rgb8(50, 200, 50), None, &rotation_circle); scene.fill(Fill::NonZero, camera_transform, Color::from_rgb8(50, 200, 50), None, &rotation_circle);
scene.stroke(&Stroke::new(1.0), camera_transform, Color::rgb8(255, 255, 255), None, &rotation_circle); scene.stroke(&Stroke::new(1.0), camera_transform, Color::from_rgb8(255, 255, 255), None, &rotation_circle);
let line_path = { let line_path = {
let mut path = vello::kurbo::BezPath::new(); let mut path = vello::kurbo::BezPath::new();
@ -1030,7 +1066,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
path.line_to(vello::kurbo::Point::new(bbox.center().x, bbox.y0)); path.line_to(vello::kurbo::Point::new(bbox.center().x, bbox.y0));
path path
}; };
scene.stroke(&Stroke::new(1.0), camera_transform, Color::rgb8(50, 200, 50), None, &line_path); scene.stroke(&Stroke::new(1.0), camera_transform, Color::from_rgb8(50, 200, 50), None, &line_path);
} }
} }
} }
@ -1041,7 +1077,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Render scene to texture using shared renderer // Render scene to texture using shared renderer
if let Some(texture_view) = &instance_resources.texture_view { if let Some(texture_view) = &instance_resources.texture_view {
let render_params = vello::RenderParams { let render_params = vello::RenderParams {
base_color: vello::peniko::Color::rgb8(45, 45, 48), // Dark background base_color: vello::peniko::Color::from_rgb8(45, 45, 48), // Dark background
width, width,
height, height,
antialiasing_method: vello::AaConfig::Msaa16, antialiasing_method: vello::AaConfig::Msaa16,
@ -4020,6 +4056,134 @@ impl PaneRenderer for StagePane {
// Handle input for pan/zoom and tool controls // Handle input for pan/zoom and tool controls
self.handle_input(ui, rect, shared); self.handle_input(ui, rect, shared);
// Handle asset drag-and-drop from Asset Library
if let Some(dragging) = shared.dragging_asset.clone() {
if let Some(pointer_pos) = ui.ctx().pointer_interact_pos() {
// Check if pointer is over the stage
if rect.contains(pointer_pos) {
// Visual feedback: draw ghost preview at cursor
let preview_size = egui::vec2(60.0, 40.0);
let preview_rect = egui::Rect::from_center_size(pointer_pos, preview_size);
ui.painter().rect_filled(
preview_rect,
4.0,
egui::Color32::from_rgba_unmultiplied(100, 150, 255, 100),
);
ui.painter().rect_stroke(
preview_rect,
4.0,
egui::Stroke::new(2.0, egui::Color32::WHITE),
egui::StrokeKind::Middle,
);
ui.painter().text(
preview_rect.center(),
egui::Align2::CENTER_CENTER,
&dragging.name,
egui::FontId::proportional(10.0),
egui::Color32::WHITE,
);
// Handle drop on mouse release
if ui.input(|i| i.pointer.any_released()) {
// Convert screen position to world coordinates
let canvas_pos = pointer_pos - rect.min;
let world_pos = (canvas_pos - self.pan_offset) / self.zoom;
// Use playhead time
let drop_time = *shared.playback_time;
// Find or create a compatible layer
let document = shared.action_executor.document();
let mut target_layer_id = None;
// Check if active layer is compatible
if let Some(active_id) = shared.active_layer_id {
if let Some(layer) = document.get_layer(active_id) {
if layer_matches_clip_type(layer, dragging.clip_type) {
target_layer_id = Some(*active_id);
}
}
}
// If no compatible active layer, we need to create a new layer
if target_layer_id.is_none() {
// Create new layer
let layer_name = format!("{} Layer", match dragging.clip_type {
DragClipType::Vector => "Vector",
DragClipType::Video => "Video",
DragClipType::AudioSampled => "Audio",
DragClipType::AudioMidi => "MIDI",
DragClipType::Image => "Image",
});
let new_layer = create_layer_for_clip_type(dragging.clip_type, &layer_name);
// Create add layer action
let mut add_layer_action = lightningbeam_core::actions::AddLayerAction::new(new_layer);
// Execute immediately to get the layer ID
add_layer_action.execute(shared.action_executor.document_mut());
target_layer_id = add_layer_action.created_layer_id();
// Update active layer to the new layer
if let Some(layer_id) = target_layer_id {
*shared.active_layer_id = Some(layer_id);
}
}
// Add clip instance or shape to the target layer
if let Some(layer_id) = target_layer_id {
// For images, create a shape with image fill instead of a clip instance
if dragging.clip_type == DragClipType::Image {
// Get image dimensions (from the dragging info)
let (width, height) = dragging.dimensions.unwrap_or((100.0, 100.0));
// Create a rectangle path at the origin (position handled by transform)
use kurbo::BezPath;
let mut path = BezPath::new();
path.move_to((0.0, 0.0));
path.line_to((width, 0.0));
path.line_to((width, height));
path.line_to((0.0, height));
path.close_path();
// Create shape with image fill (references the ImageAsset)
use lightningbeam_core::shape::Shape;
let shape = Shape::new(path).with_image_fill(dragging.clip_id);
// Create shape instance at drop position
use lightningbeam_core::object::ShapeInstance;
let shape_instance = ShapeInstance::new(shape.id)
.with_position(world_pos.x as f64, world_pos.y as f64);
// Create and queue action
let action = lightningbeam_core::actions::AddShapeAction::new(
layer_id,
shape,
shape_instance,
);
shared.pending_actions.push(Box::new(action));
} else {
// For clips, create a clip instance
let clip_instance = ClipInstance::new(dragging.clip_id)
.with_timeline_start(drop_time)
.with_position(world_pos.x as f64, world_pos.y as f64);
// Create and queue action
let action = lightningbeam_core::actions::AddClipInstanceAction::new(
layer_id,
clip_instance,
);
shared.pending_actions.push(Box::new(action));
}
}
// Clear drag state
*shared.dragging_asset = None;
}
}
}
}
// Register handler for pending view actions (two-phase dispatch) // Register handler for pending view actions (two-phase dispatch)
// Priority: Mouse-over (0-99) > Fallback Stage(1000) > Fallback Timeline(1001) etc. // Priority: Mouse-over (0-99) > Fallback Stage(1000) > Fallback Timeline(1001) etc.
const STAGE_MOUSE_OVER_PRIORITY: u32 = 0; const STAGE_MOUSE_OVER_PRIORITY: u32 = 0;

View File

@ -7,8 +7,9 @@
/// - Basic layer visualization /// - Basic layer visualization
use eframe::egui; use eframe::egui;
use lightningbeam_core::layer::LayerTrait; use lightningbeam_core::clip::ClipInstance;
use super::{NodePath, PaneRenderer, SharedPaneState}; use lightningbeam_core::layer::{AnyLayer, AudioLayerType, LayerTrait};
use super::{DragClipType, NodePath, PaneRenderer, SharedPaneState};
const RULER_HEIGHT: f32 = 30.0; const RULER_HEIGHT: f32 = 30.0;
const LAYER_HEIGHT: f32 = 60.0; const LAYER_HEIGHT: f32 = 60.0;
@ -56,6 +57,21 @@ pub struct TimelinePane {
layer_control_clicked: bool, layer_control_clicked: bool,
} }
/// Check if a clip type can be dropped on a layer type
fn can_drop_on_layer(layer: &AnyLayer, clip_type: DragClipType) -> bool {
match (layer, clip_type) {
(AnyLayer::Vector(_), DragClipType::Vector) => true,
(AnyLayer::Video(_), DragClipType::Video) => true,
(AnyLayer::Audio(audio), DragClipType::AudioSampled) => {
audio.audio_layer_type == AudioLayerType::Sampled
}
(AnyLayer::Audio(audio), DragClipType::AudioMidi) => {
audio.audio_layer_type == AudioLayerType::Midi
}
_ => false,
}
}
impl TimelinePane { impl TimelinePane {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
@ -780,6 +796,7 @@ impl TimelinePane {
clip_rect, clip_rect,
3.0, 3.0,
egui::Stroke::new(3.0, bright_color), egui::Stroke::new(3.0, bright_color),
egui::StrokeKind::Middle,
); );
} }
@ -1543,6 +1560,79 @@ impl PaneRenderer for TimelinePane {
shared.audio_controller.as_mut().map(|c| &mut **c), shared.audio_controller.as_mut().map(|c| &mut **c),
); );
// Handle asset drag-and-drop from Asset Library
if let Some(dragging) = shared.dragging_asset.as_ref() {
if let Some(pointer_pos) = ui.ctx().pointer_interact_pos() {
// Check if pointer is in content area (not ruler or header column)
if content_rect.contains(pointer_pos) {
// Calculate which layer the pointer is over
let relative_y = pointer_pos.y - content_rect.min.y + self.viewport_scroll_y;
let hovered_layer_index = (relative_y / LAYER_HEIGHT) as usize;
// Get the layer at this index (accounting for reversed display order)
let layers: Vec<_> = document.root.children.iter().rev().collect();
if let Some(layer) = layers.get(hovered_layer_index) {
let is_compatible = can_drop_on_layer(layer, dragging.clip_type);
// Visual feedback: highlight compatible tracks
let layer_y = content_rect.min.y + hovered_layer_index as f32 * LAYER_HEIGHT - self.viewport_scroll_y;
let highlight_rect = egui::Rect::from_min_size(
egui::pos2(content_rect.min.x, layer_y),
egui::vec2(content_rect.width(), LAYER_HEIGHT),
);
let highlight_color = if is_compatible {
egui::Color32::from_rgba_unmultiplied(100, 255, 100, 40) // Green
} else {
egui::Color32::from_rgba_unmultiplied(255, 100, 100, 40) // Red
};
ui.painter().rect_filled(highlight_rect, 0.0, highlight_color);
// Show drop time indicator
let drop_time = self.x_to_time(pointer_pos.x - content_rect.min.x);
let drop_x = self.time_to_x(drop_time);
if drop_x >= 0.0 && drop_x <= content_rect.width() {
ui.painter().line_segment(
[
egui::pos2(content_rect.min.x + drop_x, layer_y),
egui::pos2(content_rect.min.x + drop_x, layer_y + LAYER_HEIGHT),
],
egui::Stroke::new(2.0, egui::Color32::WHITE),
);
}
// Handle drop on mouse release
if ui.input(|i| i.pointer.any_released()) && is_compatible {
let layer_id = layer.id();
let drop_time = self.x_to_time(pointer_pos.x - content_rect.min.x).max(0.0);
// Get document dimensions for centering
let doc = shared.action_executor.document();
let center_x = doc.width / 2.0;
let center_y = doc.height / 2.0;
// Create clip instance centered on stage, at drop time
let clip_instance = ClipInstance::new(dragging.clip_id)
.with_timeline_start(drop_time)
.with_position(center_x, center_y);
// Create and queue action
let action = lightningbeam_core::actions::AddClipInstanceAction::new(
layer_id,
clip_instance,
);
shared.pending_actions.push(Box::new(action));
// Clear drag state
*shared.dragging_asset = None;
}
}
}
}
}
// Register handler for pending view actions (two-phase dispatch) // Register handler for pending view actions (two-phase dispatch)
// Priority: Mouse-over (0-99) > Fallback Timeline(1001) // Priority: Mouse-over (0-99) > Fallback Timeline(1001)
const TIMELINE_MOUSE_OVER_PRIORITY: u32 = 0; const TIMELINE_MOUSE_OVER_PRIORITY: u32 = 0;

View File

@ -97,6 +97,7 @@ impl PaneRenderer for ToolbarPane {
button_rect, button_rect,
4.0, 4.0,
egui::Stroke::new(2.0, egui::Color32::from_gray(180)), egui::Stroke::new(2.0, egui::Color32::from_gray(180)),
egui::StrokeKind::Middle,
); );
} }
@ -109,6 +110,7 @@ impl PaneRenderer for ToolbarPane {
button_rect, button_rect,
4.0, 4.0,
egui::Stroke::new(2.0, egui::Color32::from_rgb(100, 150, 255)), egui::Stroke::new(2.0, egui::Color32::from_rgb(100, 150, 255)),
egui::StrokeKind::Middle,
); );
} }
@ -239,5 +241,6 @@ fn draw_color_button(ui: &mut egui::Ui, rect: egui::Rect, color: egui::Color32)
rect, rect,
2.0, 2.0,
egui::Stroke::new(1.0, egui::Color32::from_gray(80)), egui::Stroke::new(1.0, egui::Color32::from_gray(80)),
egui::StrokeKind::Middle,
); );
} }

View File

@ -0,0 +1,5 @@
//! Reusable UI widgets for the editor
mod text_field;
pub use text_field::ImeTextField;

View File

@ -0,0 +1,264 @@
//! Custom text field widget with IME workaround
//!
//! WORKAROUND for IBus Wayland bug (egui issue #7485):
//! https://github.com/emilk/egui/issues/7485
//!
//! IBus on Wayland only delivers one character through normal TextEdit handling.
//! This widget renders a custom text field and handles all input manually.
//!
//! TODO: Remove this workaround once the upstream issue is fixed.
use eframe::egui;
/// Convert egui Key to character for manual text input handling.
/// Uses egui's name()/symbol_or_name() where possible, with shift handling
/// for uppercase letters and shifted symbols (US keyboard layout).
fn key_to_char(key: egui::Key, shift: bool) -> Option<char> {
let symbol = key.symbol_or_name();
// If it's a single character, we can use it (with shift handling)
if symbol.chars().count() == 1 {
let c = symbol.chars().next().unwrap();
// Handle letters - apply shift for case
if c.is_ascii_alphabetic() {
return Some(if shift { c.to_ascii_uppercase() } else { c.to_ascii_lowercase() });
}
// Handle digits with shift -> symbols (US keyboard layout)
if c.is_ascii_digit() && shift {
return Some(match c {
'0' => ')',
'1' => '!',
'2' => '@',
'3' => '#',
'4' => '$',
'5' => '%',
'6' => '^',
'7' => '&',
'8' => '*',
'9' => '(',
_ => c,
});
}
// Handle punctuation with shift (US keyboard layout)
if shift {
return Some(match c {
'-' | '' => '_', // Minus (egui uses special minus char)
'=' => '+',
'[' => '{',
']' => '}',
'\\' => '|',
';' => ':',
'\'' => '"',
',' => '<',
'.' => '>',
'/' => '?',
'`' => '~',
_ => c,
});
}
return Some(c);
}
// Special case: Space returns "Space" not " "
if matches!(key, egui::Key::Space) {
return Some(' ');
}
None // Non-printable keys (arrows, function keys, etc.)
}
/// Response from the IME text field widget
pub struct ImeTextFieldResponse {
/// The egui response for the text field area
pub response: egui::Response,
/// Whether the text was changed
pub changed: bool,
/// Whether Enter was pressed (for single-line fields)
pub submitted: bool,
/// Whether Escape was pressed
pub cancelled: bool,
}
/// A text field widget that works around IBus Wayland IME issues.
///
/// This is a temporary workaround for egui issue #7485. Use this instead of
/// `egui::TextEdit` when you need text input to work on Wayland with IBus.
pub struct ImeTextField<'a> {
text: &'a mut String,
placeholder: Option<&'a str>,
font_size: f32,
desired_width: Option<f32>,
request_focus: bool,
}
impl<'a> ImeTextField<'a> {
/// Create a new text field widget
pub fn new(text: &'a mut String) -> Self {
Self {
text,
placeholder: None,
font_size: 14.0,
desired_width: None,
request_focus: false,
}
}
/// Set placeholder text shown when the field is empty and unfocused
pub fn placeholder(mut self, placeholder: &'a str) -> Self {
self.placeholder = Some(placeholder);
self
}
/// Set the font size (default: 14.0)
pub fn font_size(mut self, size: f32) -> Self {
self.font_size = size;
self
}
/// Set the desired width of the field
pub fn desired_width(mut self, width: f32) -> Self {
self.desired_width = Some(width);
self
}
/// Request focus on this field
pub fn request_focus(mut self) -> Self {
self.request_focus = true;
self
}
/// Show the text field widget
pub fn show(self, ui: &mut egui::Ui) -> ImeTextFieldResponse {
let desired_size = egui::vec2(
self.desired_width.unwrap_or(ui.available_width()),
self.font_size + 8.0,
);
let (rect, response) = ui.allocate_exact_size(desired_size, egui::Sense::click());
let id = response.id;
// Handle click to focus
if response.clicked() {
ui.memory_mut(|m| m.request_focus(id));
}
// Handle focus request
if self.request_focus {
ui.memory_mut(|m| m.request_focus(id));
}
let has_focus = ui.memory(|m| m.has_focus(id));
// Draw the text field background
let bg_color = if has_focus {
egui::Color32::from_rgb(50, 50, 55)
} else {
egui::Color32::from_rgb(40, 40, 45)
};
let stroke = if has_focus {
egui::Stroke::new(1.0, egui::Color32::from_rgb(100, 150, 255))
} else {
egui::Stroke::new(1.0, egui::Color32::from_rgb(60, 60, 65))
};
ui.painter().rect(rect, 3.0, bg_color, stroke, egui::StrokeKind::Middle);
// Draw the text or placeholder
let text_pos = rect.min + egui::vec2(6.0, (rect.height() - self.font_size) / 2.0);
if self.text.is_empty() && !has_focus {
if let Some(placeholder) = self.placeholder {
ui.painter().text(
text_pos,
egui::Align2::LEFT_TOP,
placeholder,
egui::FontId::proportional(self.font_size),
egui::Color32::from_gray(100),
);
}
} else {
ui.painter().text(
text_pos,
egui::Align2::LEFT_TOP,
self.text.as_str(),
egui::FontId::proportional(self.font_size),
egui::Color32::from_gray(220),
);
// Draw cursor when focused
if has_focus {
let text_width = ui.painter().layout_no_wrap(
self.text.clone(),
egui::FontId::proportional(self.font_size),
egui::Color32::WHITE,
).rect.width();
let cursor_x = text_pos.x + text_width + 1.0;
let blink = (ui.input(|i| i.time) * 2.0).fract() < 0.5;
if blink {
ui.painter().line_segment(
[
egui::pos2(cursor_x, rect.min.y + 4.0),
egui::pos2(cursor_x, rect.max.y - 4.0),
],
egui::Stroke::new(1.0, egui::Color32::WHITE),
);
}
ui.ctx().request_repaint(); // For cursor blinking
}
}
// Handle keyboard input when focused
let mut changed = false;
let mut submitted = false;
let mut cancelled = false;
if has_focus {
ui.input(|i| {
for event in &i.events {
if let egui::Event::Key { key, pressed: true, modifiers, .. } = event {
// Skip if modifier keys are held (except shift)
if modifiers.ctrl || modifiers.alt || modifiers.command {
continue;
}
match key {
egui::Key::Backspace => {
if !self.text.is_empty() {
self.text.pop();
changed = true;
}
}
egui::Key::Enter => {
submitted = true;
}
egui::Key::Escape => {
cancelled = true;
}
_ => {
if let Some(c) = key_to_char(*key, modifiers.shift) {
self.text.push(c);
changed = true;
}
}
}
}
}
});
// Lose focus on Escape
if cancelled {
ui.memory_mut(|m| m.surrender_focus(id));
}
}
ImeTextFieldResponse {
response,
changed,
submitted,
cancelled,
}
}
}