Render audio clip waveforms

This commit is contained in:
Skyler Lehmkuhl 2025-12-02 00:57:20 -05:00
parent cffb61e5a8
commit c2f8969432
8 changed files with 879 additions and 147 deletions

View File

@ -646,14 +646,47 @@ impl AudioTrack {
/// Rebuild the effects graph from preset after deserialization
pub fn rebuild_audio_graph(&mut self, sample_rate: u32, buffer_size: usize) -> Result<(), String> {
if let Some(preset) = &self.effects_graph_preset {
// Check if preset is empty or missing required nodes
let has_nodes = !preset.nodes.is_empty();
let has_output = preset.output_node.is_some();
if has_nodes && has_output {
// Valid preset - rebuild from it
self.effects_graph = AudioGraph::from_preset(preset, sample_rate, buffer_size, None)?;
} else {
// Empty or invalid preset - create default graph
self.effects_graph = Self::create_default_graph(sample_rate, buffer_size);
}
} else {
// No preset - create default graph
self.effects_graph = AudioGraph::new(sample_rate, buffer_size);
self.effects_graph = Self::create_default_graph(sample_rate, buffer_size);
}
Ok(())
}
/// Create a default effects graph with AudioInput -> AudioOutput
fn create_default_graph(sample_rate: u32, buffer_size: usize) -> AudioGraph {
let mut effects_graph = AudioGraph::new(sample_rate, buffer_size);
// Add AudioInput node
let input_node = Box::new(AudioInputNode::new("Audio Input"));
let input_id = effects_graph.add_node(input_node);
effects_graph.set_node_position(input_id, 100.0, 150.0);
// Add AudioOutput node
let output_node = Box::new(AudioOutputNode::new("Audio Output"));
let output_id = effects_graph.add_node(output_node);
effects_graph.set_node_position(output_id, 500.0, 150.0);
// Connect AudioInput -> AudioOutput
let _ = effects_graph.connect(input_id, 0, output_id, 0);
// Set the AudioOutput node as the graph's output
effects_graph.set_output_node(Some(output_id));
effects_graph
}
/// Add an automation lane to this track
pub fn add_automation_lane(&mut self, parameter_id: ParameterId) -> AutomationLaneId {
let lane_id = self.next_automation_id;

View File

@ -43,3 +43,6 @@ uuid = { version = "1.0", features = ["v4", "serde"] }
# Native file dialogs
rfd = "0.15"
# Cross-platform config paths
directories = "5.0"

View File

@ -0,0 +1,129 @@
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
/// Application configuration (persistent)
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AppConfig {
/// Recent files list (newest first, max 10 items)
#[serde(default)]
pub recent_files: Vec<PathBuf>,
}
impl Default for AppConfig {
fn default() -> Self {
Self {
recent_files: Vec::new(),
}
}
}
impl AppConfig {
/// Load config from standard location
/// Returns default config if file doesn't exist or is malformed
pub fn load() -> Self {
match Self::try_load() {
Ok(config) => config,
Err(e) => {
eprintln!("⚠️ Failed to load config: {}", e);
eprintln!(" Using default configuration");
Self::default()
}
}
}
/// Try to load config, returning error if something goes wrong
fn try_load() -> Result<Self, Box<dyn std::error::Error>> {
let config_path = Self::config_path()?;
if !config_path.exists() {
return Ok(Self::default());
}
let contents = std::fs::read_to_string(&config_path)?;
let config: AppConfig = serde_json::from_str(&contents)?;
Ok(config)
}
/// Save config to standard location
/// Logs error but doesn't block if save fails
pub fn save(&self) {
if let Err(e) = self.try_save() {
eprintln!("⚠️ Failed to save config: {}", e);
}
}
/// Try to save config atomically (write to temp, then rename)
fn try_save(&self) -> Result<(), Box<dyn std::error::Error>> {
let config_path = Self::config_path()?;
// Ensure parent directory exists
if let Some(parent) = config_path.parent() {
std::fs::create_dir_all(parent)?;
}
// Serialize to JSON with pretty formatting
let json = serde_json::to_string_pretty(self)?;
// Atomic write: write to temp file, then rename
let temp_path = config_path.with_extension("json.tmp");
std::fs::write(&temp_path, json)?;
std::fs::rename(temp_path, config_path)?;
Ok(())
}
/// Get cross-platform config file path
fn config_path() -> Result<PathBuf, Box<dyn std::error::Error>> {
use directories::ProjectDirs;
let proj_dirs = ProjectDirs::from("", "", "lightningbeam")
.ok_or("Failed to determine config directory")?;
Ok(proj_dirs.config_dir().join("config.json"))
}
/// Add a file to recent files list
/// - Canonicalize path (resolve relative paths and symlinks)
/// - Move to front if already in list (remove duplicates)
/// - Enforce 10-item limit (LRU eviction)
/// - Auto-save config
pub fn add_recent_file(&mut self, path: PathBuf) {
// Try to canonicalize path (absolute, resolve symlinks)
let canonical = match path.canonicalize() {
Ok(p) => p,
Err(e) => {
// Canonicalize can fail for unsaved files or deleted files
eprintln!("⚠️ Could not canonicalize path {:?}: {}", path, e);
return; // Don't add non-existent paths
}
};
// Remove if already present (we'll add to front)
self.recent_files.retain(|p| p != &canonical);
// Add to front
self.recent_files.insert(0, canonical);
// Enforce 10-item limit
self.recent_files.truncate(10);
// Auto-save
self.save();
}
/// Get recent files list, filtering out files that no longer exist
/// Returns newest first
pub fn get_recent_files(&self) -> Vec<PathBuf> {
self.recent_files
.iter()
.filter(|p| p.exists())
.cloned()
.collect()
}
/// Clear all recent files
pub fn clear_recent_files(&mut self) {
self.recent_files.clear();
self.save();
}
}

View File

@ -18,6 +18,11 @@ use menu::{MenuAction, MenuSystem};
mod theme;
use theme::{Theme, ThemeMode};
mod waveform_image_cache;
mod config;
use config::AppConfig;
mod default_instrument;
/// Lightningbeam Editor - Animation and video editing software
@ -476,8 +481,13 @@ struct EditorApp {
/// Prevents repeated backend queries for the same audio file
/// Format: Vec of WaveformPeak (min/max pairs)
waveform_cache: HashMap<usize, Vec<daw_backend::WaveformPeak>>,
/// Cache for rendered waveform images (GPU textures)
/// Stores pre-rendered waveform tiles at various zoom levels for fast blitting
waveform_image_cache: waveform_image_cache::WaveformImageCache,
/// Current file path (None if not yet saved)
current_file_path: Option<std::path::PathBuf>,
/// Application configuration (recent files, etc.)
config: AppConfig,
/// File operations worker command sender
file_command_tx: std::sync::mpsc::Sender<FileCommand>,
@ -500,8 +510,17 @@ impl EditorApp {
fn new(cc: &eframe::CreationContext, layouts: Vec<LayoutDefinition>, theme: Theme) -> Self {
let current_layout = layouts[0].layout.clone();
// Load application config
let config = AppConfig::load();
// Initialize native menu system
let menu_system = MenuSystem::new().ok();
let mut menu_system = MenuSystem::new().ok();
// Populate recent files menu
if let Some(ref mut menu_sys) = menu_system {
let recent_files = config.get_recent_files();
menu_sys.update_recent_files(&recent_files);
}
// Create default document with a simple test scene
let mut document = lightningbeam_core::document::Document::with_size("Untitled Animation", 1920.0, 1080.0)
@ -600,7 +619,9 @@ impl EditorApp {
polygon_sides: 5, // Default to pentagon
midi_event_cache: HashMap::new(), // Initialize empty MIDI event cache
waveform_cache: HashMap::new(), // Initialize empty waveform cache
waveform_image_cache: waveform_image_cache::WaveformImageCache::new(), // Initialize waveform image cache
current_file_path: None, // No file loaded initially
config,
file_command_tx,
file_operation: None, // No file operation in progress initially
}
@ -838,6 +859,18 @@ impl EditorApp {
self.load_from_file(path);
}
}
MenuAction::OpenRecent(index) => {
let recent_files = self.config.get_recent_files();
if let Some(path) = recent_files.get(index) {
// TODO: Prompt to save current file if modified
self.load_from_file(path.clone());
}
}
MenuAction::ClearRecentFiles => {
self.config.clear_recent_files();
self.update_recent_files_menu();
}
MenuAction::Revert => {
println!("Menu: Revert");
// TODO: Implement revert
@ -1337,6 +1370,14 @@ impl EditorApp {
});
}
/// Update the "Open Recent" menu to reflect current config
fn update_recent_files_menu(&mut self) {
if let Some(menu_system) = &mut self.menu_system {
let recent_files = self.config.get_recent_files();
menu_system.update_recent_files(&recent_files);
}
}
/// Restore UI layout from loaded document
fn restore_layout_from_document(&mut self) {
let doc = self.action_executor.document();
@ -1394,33 +1435,26 @@ impl EditorApp {
self.restore_layout_from_document();
eprintln!("📊 [APPLY] Step 2: Restore UI layout took {:.2}ms", step2_start.elapsed().as_secs_f64() * 1000.0);
// Set project in audio engine via query
// Load audio pool FIRST (before setting project, so clips can reference pool entries)
let step3_start = std::time::Instant::now();
if let Some(ref controller_arc) = self.audio_controller {
let mut controller = controller_arc.lock().unwrap();
let audio_pool_entries = loaded_project.audio_pool_entries;
eprintln!("📊 [APPLY] Step 3: Starting audio pool load...");
if let Err(e) = controller.load_audio_pool(audio_pool_entries, &path) {
eprintln!("❌ Failed to load audio pool: {}", e);
return;
}
eprintln!("📊 [APPLY] Step 3: Load audio pool took {:.2}ms", step3_start.elapsed().as_secs_f64() * 1000.0);
// Now set project (clips can now reference the loaded pool entries)
let step4_start = std::time::Instant::now();
if let Err(e) = controller.set_project(loaded_project.audio_project) {
eprintln!("❌ Failed to set project: {}", e);
return;
}
eprintln!("📊 [APPLY] Step 3: Set audio project took {:.2}ms", step3_start.elapsed().as_secs_f64() * 1000.0);
// Load audio pool asynchronously to avoid blocking UI
let step4_start = std::time::Instant::now();
let controller_clone = controller_arc.clone();
let path_clone = path.clone();
let audio_pool_entries = loaded_project.audio_pool_entries;
std::thread::spawn(move || {
eprintln!("📊 [APPLY] Step 4: Starting async audio pool load...");
let load_start = std::time::Instant::now();
let mut controller = controller_clone.lock().unwrap();
if let Err(e) = controller.load_audio_pool(audio_pool_entries, &path_clone) {
eprintln!("❌ Failed to load audio pool: {}", e);
} else {
eprintln!("📊 [APPLY] Step 4: Async audio pool load completed in {:.2}ms", load_start.elapsed().as_secs_f64() * 1000.0);
}
});
eprintln!("📊 [APPLY] Step 4: Spawned async audio pool load in {:.2}ms", step4_start.elapsed().as_secs_f64() * 1000.0);
eprintln!("📊 [APPLY] Step 4: Set audio project took {:.2}ms", step4_start.elapsed().as_secs_f64() * 1000.0);
}
// Reset state
@ -1461,6 +1495,10 @@ impl EditorApp {
self.is_playing = false;
self.current_file_path = Some(path.clone());
// Add to recent files
self.config.add_recent_file(path.clone());
self.update_recent_files_menu();
// Set active layer
if let Some(first) = self.action_executor.document().root.children.first() {
self.active_layer_id = Some(first.id());
@ -1694,6 +1732,7 @@ impl eframe::App for EditorApp {
// Poll for progress updates
let mut operation_complete = false;
let mut loaded_project_data: Option<(lightningbeam_core::file_io::LoadedProject, std::path::PathBuf)> = None;
let mut update_recent_menu = false; // Track if we need to update recent files menu
match operation {
FileOperation::Saving { ref mut progress_rx, ref path } => {
@ -1702,6 +1741,11 @@ impl eframe::App for EditorApp {
FileProgress::Done => {
println!("✅ Save complete!");
self.current_file_path = Some(path.clone());
// Add to recent files
self.config.add_recent_file(path.clone());
update_recent_menu = true;
operation_complete = true;
}
FileProgress::Error(e) => {
@ -1777,6 +1821,11 @@ impl eframe::App for EditorApp {
self.apply_loaded_project(loaded_project, path);
}
// Update recent files menu if needed
if update_recent_menu {
self.update_recent_files_menu();
}
// Request repaint to keep updating progress
ctx.request_repaint();
}
@ -1804,9 +1853,12 @@ impl eframe::App for EditorApp {
// Top menu bar (egui-rendered on all platforms)
egui::TopBottomPanel::top("menu_bar").show(ctx, |ui| {
if let Some(action) = MenuSystem::render_egui_menu_bar(ui) {
if let Some(menu_system) = &self.menu_system {
let recent_files = self.config.get_recent_files();
if let Some(action) = menu_system.render_egui_menu_bar(ui, &recent_files) {
self.handle_menu_action(action);
}
}
});
// Main pane area
@ -1858,6 +1910,7 @@ impl eframe::App for EditorApp {
layer_to_track_map: &self.layer_to_track_map,
midi_event_cache: &self.midi_event_cache,
waveform_cache: &self.waveform_cache,
waveform_image_cache: &mut self.waveform_image_cache,
};
render_layout_node(
@ -2028,6 +2081,8 @@ struct RenderContext<'a> {
midi_event_cache: &'a HashMap<u32, Vec<(f64, u8, bool)>>,
/// Cache of waveform data for rendering (keyed by audio_pool_index)
waveform_cache: &'a HashMap<usize, Vec<daw_backend::WaveformPeak>>,
/// Cache of rendered waveform images (GPU textures)
waveform_image_cache: &'a mut waveform_image_cache::WaveformImageCache,
}
/// Recursively render a layout node with drag support
@ -2500,6 +2555,7 @@ fn render_pane(
polygon_sides: ctx.polygon_sides,
midi_event_cache: ctx.midi_event_cache,
waveform_cache: ctx.waveform_cache,
waveform_image_cache: ctx.waveform_image_cache,
};
pane_instance.render_header(&mut header_ui, &mut shared);
}
@ -2555,6 +2611,7 @@ fn render_pane(
polygon_sides: ctx.polygon_sides,
midi_event_cache: ctx.midi_event_cache,
waveform_cache: ctx.waveform_cache,
waveform_image_cache: ctx.waveform_image_cache,
};
// Render pane content (header was already rendered above)

View File

@ -138,6 +138,8 @@ pub enum MenuAction {
Save,
SaveAs,
OpenFile,
OpenRecent(usize), // Index into recent files list
ClearRecentFiles, // Clear recent files list
Revert,
Import,
Export,
@ -440,6 +442,8 @@ pub struct MenuSystem {
#[allow(dead_code)]
menu: Menu,
items: Vec<(MenuItem, MenuAction)>,
/// Reference to "Open Recent" submenu for dynamic updates
open_recent_submenu: Option<Submenu>,
}
impl MenuSystem {
@ -447,19 +451,20 @@ impl MenuSystem {
pub fn new() -> Result<Self, Box<dyn std::error::Error>> {
let menu = Menu::new();
let mut items = Vec::new();
let mut open_recent_submenu: Option<Submenu> = None;
// Platform-specific: Add "Lightningbeam" menu on macOS
#[cfg(target_os = "macos")]
{
Self::build_submenu(&menu, &MenuItemDef::macos_app_menu(), &mut items)?;
Self::build_submenu(&menu, &MenuItemDef::macos_app_menu(), &mut items, &mut open_recent_submenu)?;
}
// Build all menus from the centralized structure
for menu_def in MenuItemDef::menu_structure() {
Self::build_submenu(&menu, menu_def, &mut items)?;
Self::build_submenu(&menu, menu_def, &mut items, &mut open_recent_submenu)?;
}
Ok(Self { menu, items })
Ok(Self { menu, items, open_recent_submenu })
}
/// Build a top-level submenu and append to menu
@ -467,11 +472,12 @@ impl MenuSystem {
menu: &Menu,
def: &MenuDef,
items: &mut Vec<(MenuItem, MenuAction)>,
open_recent_submenu: &mut Option<Submenu>,
) -> Result<(), Box<dyn std::error::Error>> {
if let MenuDef::Submenu { label, children } = def {
let submenu = Submenu::new(*label, true);
for child in *children {
Self::build_menu_item(&submenu, child, items)?;
Self::build_menu_item(&submenu, child, items, open_recent_submenu)?;
}
menu.append(&submenu)?;
}
@ -483,6 +489,7 @@ impl MenuSystem {
parent: &Submenu,
def: &MenuDef,
items: &mut Vec<(MenuItem, MenuAction)>,
open_recent_submenu: &mut Option<Submenu>,
) -> Result<(), Box<dyn std::error::Error>> {
match def {
MenuDef::Item(item_def) => {
@ -496,8 +503,14 @@ impl MenuSystem {
}
MenuDef::Submenu { label, children } => {
let submenu = Submenu::new(*label, true);
// Capture reference if this is "Open Recent"
if *label == "Open Recent" {
*open_recent_submenu = Some(submenu.clone());
}
for child in *children {
Self::build_menu_item(&submenu, child, items)?;
Self::build_menu_item(&submenu, child, items, open_recent_submenu)?;
}
parent.append(&submenu)?;
}
@ -505,6 +518,43 @@ impl MenuSystem {
Ok(())
}
/// Update "Open Recent" submenu with current recent files
/// Call this after menu creation and whenever recent files change
pub fn update_recent_files(&mut self, recent_files: &[std::path::PathBuf]) {
if let Some(submenu) = &self.open_recent_submenu {
// Clear existing items
while submenu.items().len() > 0 {
let _ = submenu.remove_at(0);
}
// Add recent file items
for (index, path) in recent_files.iter().enumerate() {
let display_name = path
.file_name()
.and_then(|s| s.to_str())
.unwrap_or("Unknown")
.to_string();
let item = MenuItem::new(&display_name, true, None);
if submenu.append(&item).is_ok() {
self.items.push((item.clone(), MenuAction::OpenRecent(index)));
}
}
// Add separator and clear option if we have items
if !recent_files.is_empty() {
let _ = submenu.append(&PredefinedMenuItem::separator());
}
// Add "Clear Recent Files" item
let clear_item = MenuItem::new("Clear Recent Files", true, None);
if submenu.append(&clear_item).is_ok() {
self.items.push((clear_item.clone(), MenuAction::ClearRecentFiles));
}
}
}
/// Initialize native menus for macOS (app-wide, doesn't require window handle)
#[cfg(target_os = "macos")]
pub fn init_for_macos(&self) {
@ -537,12 +587,12 @@ impl MenuSystem {
}
/// Render egui menu bar from the same menu structure (for Linux/Windows)
pub fn render_egui_menu_bar(ui: &mut egui::Ui) -> Option<MenuAction> {
pub fn render_egui_menu_bar(&self, ui: &mut egui::Ui, recent_files: &[std::path::PathBuf]) -> Option<MenuAction> {
let mut action = None;
egui::menu::bar(ui, |ui| {
for menu_def in MenuItemDef::menu_structure() {
if let Some(a) = Self::render_menu_def(ui, menu_def) {
if let Some(a) = self.render_menu_def(ui, menu_def, recent_files) {
action = Some(a);
}
}
@ -552,7 +602,7 @@ impl MenuSystem {
}
/// Recursively render a MenuDef as egui UI
fn render_menu_def(ui: &mut egui::Ui, def: &MenuDef) -> Option<MenuAction> {
fn render_menu_def(&self, ui: &mut egui::Ui, def: &MenuDef, recent_files: &[std::path::PathBuf]) -> Option<MenuAction> {
match def {
MenuDef::Item(item_def) => {
if Self::render_menu_item(ui, item_def) {
@ -568,12 +618,39 @@ impl MenuSystem {
MenuDef::Submenu { label, children } => {
let mut action = None;
ui.menu_button(*label, |ui| {
// Special handling for "Open Recent" submenu
if *label == "Open Recent" {
// Render dynamic recent files
for (index, path) in recent_files.iter().enumerate() {
let display_name = path
.file_name()
.and_then(|s| s.to_str())
.unwrap_or("Unknown");
if ui.button(display_name).clicked() {
action = Some(MenuAction::OpenRecent(index));
ui.close_menu();
}
}
// Add separator and clear option if we have items
if !recent_files.is_empty() {
ui.separator();
}
if ui.button("Clear Recent Files").clicked() {
action = Some(MenuAction::ClearRecentFiles);
ui.close_menu();
}
} else {
// Normal submenu rendering
for child in *children {
if let Some(a) = Self::render_menu_def(ui, child) {
if let Some(a) = self.render_menu_def(ui, child, recent_files) {
action = Some(a);
ui.close_menu();
}
}
}
});
action
}

View File

@ -129,6 +129,8 @@ pub struct SharedPaneState<'a> {
pub midi_event_cache: &'a std::collections::HashMap<u32, Vec<(f64, u8, bool)>>,
/// Cache of waveform data for rendering (keyed by audio_pool_index)
pub waveform_cache: &'a std::collections::HashMap<usize, Vec<daw_backend::WaveformPeak>>,
/// Cache of rendered waveform images (GPU textures) for fast blitting
pub waveform_image_cache: &'a mut crate::waveform_image_cache::WaveformImageCache,
}
/// Trait for pane rendering

View File

@ -456,133 +456,216 @@ impl TimelinePane {
}
}
/// Render waveform visualization for audio clips on timeline
/// Uses peak-based rendering: each waveform sample has a fixed pixel width that scales with zoom
/// Calculate which waveform tiles are visible in the viewport
fn calculate_visible_tiles(
audio_pool_index: usize,
clip_start_time: f64,
clip_duration: f64,
clip_rect: egui::Rect,
timeline_left_edge: f32,
viewport_start_time: f64,
pixels_per_second: f64,
zoom_bucket: u32,
height: u32,
) -> Vec<crate::waveform_image_cache::WaveformCacheKey> {
use crate::waveform_image_cache::{WaveformCacheKey, TILE_WIDTH_PIXELS};
// Calculate clip position in screen space (including timeline offset)
let clip_start_x = timeline_left_edge + ((clip_start_time - viewport_start_time) * pixels_per_second) as f32;
let clip_width = (clip_duration * pixels_per_second) as f32;
// Check if clip is visible
if clip_start_x + clip_width < clip_rect.min.x || clip_start_x > clip_rect.max.x {
return vec![]; // Clip not visible
}
// Calculate tile duration in seconds (based on zoom bucket, not current pixels_per_second)
let seconds_per_pixel_in_tile = 1.0 / zoom_bucket as f64;
let tile_duration_seconds = TILE_WIDTH_PIXELS as f64 * seconds_per_pixel_in_tile;
// Calculate total tiles needed based on TIME, not screen pixels
let total_tiles = ((clip_duration / tile_duration_seconds).ceil() as u32).max(1);
// Calculate visible time range within the clip
let visible_start_pixel = (clip_rect.min.x - clip_start_x).max(0.0);
let visible_end_pixel = (clip_rect.max.x - clip_start_x).min(clip_width);
// Convert screen pixels to time within clip
let visible_start_time = (visible_start_pixel as f64) / pixels_per_second;
let visible_end_time = (visible_end_pixel as f64) / pixels_per_second;
// Calculate which tiles cover this time range
let start_tile = ((visible_start_time / tile_duration_seconds).floor() as u32).min(total_tiles.saturating_sub(1));
let end_tile = ((visible_end_time / tile_duration_seconds).ceil() as u32).min(total_tiles);
// Generate cache keys for visible tiles
let mut keys = Vec::new();
for tile_idx in start_tile..end_tile {
keys.push(WaveformCacheKey {
audio_pool_index,
zoom_bucket,
tile_index: tile_idx,
height,
});
}
keys
}
/// Calculate tiles for pre-caching (1-2 screens ahead/behind)
fn calculate_precache_tiles(
visible_tiles: &[crate::waveform_image_cache::WaveformCacheKey],
viewport_width_pixels: f32,
) -> Vec<crate::waveform_image_cache::WaveformCacheKey> {
use crate::waveform_image_cache::{WaveformCacheKey, TILE_WIDTH_PIXELS};
if visible_tiles.is_empty() {
return vec![];
}
// Calculate how many tiles = 1-2 screens
let tiles_per_screen = ((viewport_width_pixels as usize + TILE_WIDTH_PIXELS - 1)
/ TILE_WIDTH_PIXELS) as u32;
let precache_count = tiles_per_screen * 2; // 2 screens worth
let first_visible = visible_tiles.first().unwrap();
let last_visible = visible_tiles.last().unwrap();
let mut precache = Vec::new();
// Tiles before viewport
for i in 1..=precache_count {
if let Some(tile_idx) = first_visible.tile_index.checked_sub(i) {
precache.push(WaveformCacheKey {
audio_pool_index: first_visible.audio_pool_index,
zoom_bucket: first_visible.zoom_bucket,
tile_index: tile_idx,
height: first_visible.height,
});
}
}
// Tiles after viewport (with bounds check based on clip duration)
for i in 1..=precache_count {
let tile_idx = last_visible.tile_index + i;
precache.push(WaveformCacheKey {
audio_pool_index: first_visible.audio_pool_index,
zoom_bucket: first_visible.zoom_bucket,
tile_index: tile_idx,
height: first_visible.height,
});
}
precache
}
/// Render waveform visualization using cached texture tiles
/// This is much faster than line-based rendering for many clips
#[allow(clippy::too_many_arguments)]
fn render_audio_waveform(
painter: &egui::Painter,
clip_rect: egui::Rect,
clip_start_x: f32, // Absolute screen x where clip starts (can be offscreen)
clip_bg_color: egui::Color32, // Background color of the clip
waveform: &[daw_backend::WaveformPeak],
timeline_left_edge: f32,
audio_pool_index: usize,
clip_start_time: f64,
clip_duration: f64,
pixels_per_second: f32,
trim_start: f64,
theme: &crate::theme::Theme,
audio_file_duration: f64,
viewport_start_time: f64,
pixels_per_second: f64,
waveform_image_cache: &mut crate::waveform_image_cache::WaveformImageCache,
waveform_peaks: &[daw_backend::WaveformPeak],
ctx: &egui::Context,
tint_color: egui::Color32,
) {
if waveform.is_empty() {
use crate::waveform_image_cache::{calculate_zoom_bucket, TILE_WIDTH_PIXELS};
if waveform_peaks.is_empty() {
return;
}
let clip_height = clip_rect.height();
let center_y = clip_rect.center().y;
// Calculate zoom bucket
let zoom_bucket = calculate_zoom_bucket(pixels_per_second);
// Calculate waveform color: lighten the clip background color
// Blend clip background with white (70% white + 30% clip color) for subtle tint
// Use full opacity to prevent overlapping lines from blending lighter when zoomed out
let r = ((255.0 * 0.7) + (clip_bg_color.r() as f32 * 0.3)) as u8;
let g = ((255.0 * 0.7) + (clip_bg_color.g() as f32 * 0.3)) as u8;
let b = ((255.0 * 0.7) + (clip_bg_color.b() as f32 * 0.3)) as u8;
let waveform_color = egui::Color32::from_rgb(r, g, b);
// Calculate visible tiles
let visible_tiles = Self::calculate_visible_tiles(
audio_pool_index,
clip_start_time,
clip_duration,
clip_rect,
timeline_left_edge,
viewport_start_time,
pixels_per_second,
zoom_bucket,
clip_rect.height() as u32,
);
// Calculate how wide each peak should be at current zoom (mirrors JavaScript)
// fullSourceWidth = sourceDuration * pixelsPerSecond
// pixelsPerPeak = fullSourceWidth / waveformData.length
let full_source_width = clip_duration * pixels_per_second as f64;
let pixels_per_peak = full_source_width / waveform.len() as f64;
// Calculate the unclipped clip position (where the full clip would be on screen)
let clip_screen_x = timeline_left_edge + ((clip_start_time - viewport_start_time) * pixels_per_second) as f32;
// Calculate which peak corresponds to the clip's offset (trimmed left edge)
let offset_peak_index = ((trim_start / clip_duration) * waveform.len() as f64).floor() as usize;
let offset_peak_index = offset_peak_index.min(waveform.len().saturating_sub(1));
// Render each tile
for key in &visible_tiles {
let texture = waveform_image_cache.get_or_create(
*key,
ctx,
waveform_peaks,
audio_file_duration,
trim_start,
);
// Calculate visible peak range
// firstVisiblePeak = max(offsetPeakIndex, floor((visibleStart - startX) / pixelsPerPeak) + offsetPeakIndex)
let visible_start = clip_rect.min.x;
let visible_end = clip_rect.max.x;
// Calculate tile time offset and duration
// Each pixel in the tile texture represents (1.0 / zoom_bucket) seconds
let seconds_per_pixel_in_tile = 1.0 / key.zoom_bucket as f64;
let tile_time_offset = key.tile_index as f64 * TILE_WIDTH_PIXELS as f64 * seconds_per_pixel_in_tile;
let tile_duration_seconds = TILE_WIDTH_PIXELS as f64 * seconds_per_pixel_in_tile;
let first_visible_peak_from_viewport = if pixels_per_peak > 0.0 {
(((visible_start - clip_start_x) as f64 / pixels_per_peak).floor() as isize + offset_peak_index as isize).max(0)
// Clip tile duration to clip's actual duration
// At extreme zoom-out, a tile can represent more time than the clip contains
let tile_end_time = tile_time_offset + tile_duration_seconds;
let visible_tile_duration = if tile_end_time > clip_duration {
(clip_duration - tile_time_offset).max(0.0)
} else {
offset_peak_index as isize
tile_duration_seconds
};
let first_visible_peak = (first_visible_peak_from_viewport as usize).max(offset_peak_index);
let last_visible_peak_from_viewport = if pixels_per_peak > 0.0 {
((visible_end - clip_start_x) as f64 / pixels_per_peak).ceil() as isize + offset_peak_index as isize
} else {
offset_peak_index as isize
};
let last_visible_peak = (last_visible_peak_from_viewport as usize)
.min(waveform.len().saturating_sub(1));
if first_visible_peak > last_visible_peak || first_visible_peak >= waveform.len() {
return;
// Skip tiles completely outside clip bounds
if visible_tile_duration <= 0.0 {
continue;
}
println!("\n🎵 WAVEFORM RENDER:");
println!(" Waveform total peaks: {}", waveform.len());
println!(" Clip duration: {:.2}s", clip_duration);
println!(" Pixels per second: {}", pixels_per_second);
println!(" Pixels per peak: {:.4}", pixels_per_peak);
println!(" Trim start: {:.2}s", trim_start);
println!(" Offset peak index: {}", offset_peak_index);
println!(" Clip start X: {:.1}", clip_start_x);
println!(" Clip rect: x=[{:.1}, {:.1}], y=[{:.1}, {:.1}]",
clip_rect.min.x, clip_rect.max.x, clip_rect.min.y, clip_rect.max.y);
println!(" Visible start: {:.1}, end: {:.1}", visible_start, visible_end);
println!(" First visible peak: {} (time: {:.2}s)",
first_visible_peak, first_visible_peak as f64 * clip_duration / waveform.len() as f64);
println!(" Last visible peak: {} (time: {:.2}s)",
last_visible_peak, last_visible_peak as f64 * clip_duration / waveform.len() as f64);
println!(" Peak range size: {}", last_visible_peak - first_visible_peak + 1);
// Convert time to screen space using CURRENT zoom level
// This makes tiles stretch/squash smoothly when zooming between zoom buckets
let tile_screen_offset = (tile_time_offset * pixels_per_second) as f32;
let tile_screen_x = clip_screen_x + tile_screen_offset;
let tile_screen_width = (visible_tile_duration * pixels_per_second) as f32;
// Draw waveform as vertical lines from min to max
// Line width scales with zoom to avoid gaps between peaks
let line_width = if pixels_per_peak > 1.0 {
pixels_per_peak.ceil() as f32
// Calculate UV coordinates (clip texture if tile extends beyond clip)
let uv_max_x = if tile_duration_seconds > 0.0 {
(visible_tile_duration / tile_duration_seconds).min(1.0) as f32
} else {
1.0
};
let mut peaks_drawn = 0;
let mut lines = Vec::new();
let tile_rect = egui::Rect::from_min_size(
egui::pos2(tile_screen_x, clip_rect.min.y),
egui::vec2(tile_screen_width, clip_rect.height()),
);
for i in first_visible_peak..=last_visible_peak {
if i >= waveform.len() {
break;
}
let peak_x = clip_start_x + ((i as isize - offset_peak_index as isize) as f64 * pixels_per_peak) as f32;
let peak = &waveform[i];
// Calculate Y positions for min and max
let max_y = center_y + (peak.max * clip_height * 0.45);
let min_y = center_y + (peak.min * clip_height * 0.45);
if peaks_drawn < 3 {
println!(" PEAK[{}]: x={:.1}, min={:.3} (y={:.1}), max={:.3} (y={:.1})",
i, peak_x, peak.min, min_y, peak.max, max_y);
}
// Draw vertical line from min to max
lines.push((
egui::pos2(peak_x, max_y),
egui::pos2(peak_x, min_y),
));
peaks_drawn += 1;
}
println!(" Peaks drawn: {}, line width: {:.1}px", peaks_drawn, line_width);
// Draw all lines with clipping
for (start, end) in lines {
painter.with_clip_rect(clip_rect).line_segment(
[start, end],
egui::Stroke::new(line_width, waveform_color),
// Blit texture with adjusted UV coordinates
painter.image(
texture.id(),
tile_rect,
egui::Rect::from_min_max(egui::pos2(0.0, 0.0), egui::pos2(uv_max_x, 1.0)),
tint_color,
);
}
// Pre-cache adjacent tiles (non-blocking)
let precache_tiles = Self::calculate_precache_tiles(&visible_tiles, clip_rect.width());
// Create temporary HashMap with just this clip's waveform for pre-caching
let mut temp_waveform_cache = std::collections::HashMap::new();
temp_waveform_cache.insert(audio_pool_index, waveform_peaks.to_vec());
waveform_image_cache.precache_tiles(&precache_tiles, ctx, &temp_waveform_cache, audio_file_duration, trim_start);
}
/// Render layer header column (left side with track names and controls)
@ -870,6 +953,8 @@ impl TimelinePane {
selection: &lightningbeam_core::selection::Selection,
midi_event_cache: &std::collections::HashMap<u32, Vec<(f64, u8, bool)>>,
waveform_cache: &std::collections::HashMap<usize, Vec<daw_backend::WaveformPeak>>,
waveform_image_cache: &mut crate::waveform_image_cache::WaveformImageCache,
audio_controller: Option<&std::sync::Arc<std::sync::Mutex<daw_backend::EngineController>>>,
) {
let painter = ui.painter();
@ -1068,20 +1153,32 @@ impl TimelinePane {
// Sampled Audio: Draw waveform
lightningbeam_core::clip::AudioClipType::Sampled { audio_pool_index } => {
if let Some(waveform) = waveform_cache.get(audio_pool_index) {
// Calculate absolute screen x where clip starts (can be offscreen)
let clip_start_x = rect.min.x + start_x;
// Get audio file duration from backend
let audio_file_duration = if let Some(ref controller_arc) = audio_controller {
let mut controller = controller_arc.lock().unwrap();
controller.get_pool_file_info(*audio_pool_index)
.ok()
.map(|(duration, _, _)| duration)
.unwrap_or(clip.duration) // Fallback to clip duration
} else {
clip.duration // Fallback if no controller
};
Self::render_audio_waveform(
painter,
clip_rect,
clip_start_x,
clip_color, // Pass clip background color for tinting
waveform,
rect.min.x,
*audio_pool_index,
instance_start,
clip.duration,
self.pixels_per_second,
clip_instance.trim_start,
theme,
audio_file_duration,
self.viewport_start_time,
self.pixels_per_second as f64,
waveform_image_cache,
waveform,
ui.ctx(),
bright_color, // Use bright color for waveform (lighter than background)
);
}
}
@ -1839,7 +1936,7 @@ impl PaneRenderer for TimelinePane {
// Render layer rows with clipping
ui.set_clip_rect(content_rect.intersect(original_clip_rect));
self.render_layers(ui, content_rect, shared.theme, document, shared.active_layer_id, shared.selection, shared.midi_event_cache, shared.waveform_cache);
self.render_layers(ui, content_rect, shared.theme, document, shared.active_layer_id, shared.selection, shared.midi_event_cache, shared.waveform_cache, shared.waveform_image_cache, shared.audio_controller);
// Render playhead on top (clip to timeline area)
ui.set_clip_rect(timeline_rect.intersect(original_clip_rect));

View File

@ -0,0 +1,334 @@
use eframe::egui;
use std::collections::{HashMap, VecDeque};
use std::time::Instant;
/// Tile width is constant at 1024 pixels per tile
pub const TILE_WIDTH_PIXELS: usize = 1024;
/// Unique identifier for a cached waveform image tile
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct WaveformCacheKey {
/// Audio pool index from backend
pub audio_pool_index: usize,
/// Zoom bucket (power of 2: 1, 2, 4, 8, 16, etc.)
pub zoom_bucket: u32,
/// Tile index (which tile in the sequence for this audio clip)
pub tile_index: u32,
/// Clip height in pixels (for cache invalidation on resize)
pub height: u32,
}
/// Cached waveform image with metadata
pub struct CachedWaveform {
/// The rendered texture handle
pub texture: egui::TextureHandle,
/// Size in bytes (for memory tracking)
pub size_bytes: usize,
/// Last access time (for LRU eviction)
pub last_accessed: Instant,
/// Width of the image in pixels
pub width_pixels: u32,
/// Height of the image in pixels
pub height_pixels: u32,
}
/// Main cache structure
pub struct WaveformImageCache {
/// Map from cache key to rendered texture
cache: HashMap<WaveformCacheKey, CachedWaveform>,
/// LRU queue (most recent at back)
lru_queue: VecDeque<WaveformCacheKey>,
/// Current total memory usage in bytes
total_bytes: usize,
/// Maximum memory usage (100 MB default)
max_bytes: usize,
/// Statistics
hits: u64,
misses: u64,
}
impl WaveformImageCache {
/// Create a new waveform image cache with 100 MB limit
pub fn new() -> Self {
Self {
cache: HashMap::new(),
lru_queue: VecDeque::new(),
total_bytes: 0,
max_bytes: 100 * 1024 * 1024, // 100 MB
hits: 0,
misses: 0,
}
}
/// Clear all cached textures
pub fn clear(&mut self) {
self.cache.clear();
self.lru_queue.clear();
self.total_bytes = 0;
// Note: hits/misses preserved for debugging
}
/// Get cache statistics: (hits, misses, total_bytes, num_entries)
pub fn stats(&self) -> (u64, u64, usize, usize) {
(self.hits, self.misses, self.total_bytes, self.cache.len())
}
/// Evict least recently used entries until under memory limit
fn evict_lru(&mut self) {
while self.total_bytes > self.max_bytes && !self.lru_queue.is_empty() {
if let Some(key) = self.lru_queue.pop_front() {
if let Some(cached) = self.cache.remove(&key) {
self.total_bytes -= cached.size_bytes;
// Texture automatically freed when CachedWaveform dropped
}
}
}
}
/// Update LRU queue when a key is accessed
fn touch(&mut self, key: WaveformCacheKey) {
// Remove key from its current position in LRU queue
self.lru_queue.retain(|&k| k != key);
// Add to back (most recent)
self.lru_queue.push_back(key);
}
/// Get cached texture or generate new one
pub fn get_or_create(
&mut self,
key: WaveformCacheKey,
ctx: &egui::Context,
waveform: &[daw_backend::WaveformPeak],
audio_file_duration: f64,
trim_start: f64,
) -> egui::TextureHandle {
// Check if already cached
let texture = if let Some(cached) = self.cache.get_mut(&key) {
// Cache hit
self.hits += 1;
cached.last_accessed = Instant::now();
Some(cached.texture.clone())
} else {
None
};
if let Some(texture) = texture {
self.touch(key);
return texture;
}
// Cache miss - generate new tile
self.misses += 1;
// Render waveform to image
let color_image = render_waveform_to_image(
waveform,
key.tile_index,
audio_file_duration,
key.zoom_bucket,
key.height,
trim_start,
);
// Upload to GPU as texture
let texture_name = format!(
"waveform_{}_{}_{}",
key.audio_pool_index, key.zoom_bucket, key.tile_index
);
let texture = ctx.load_texture(
texture_name,
color_image,
egui::TextureOptions::LINEAR,
);
// Calculate memory usage
let size_bytes = TILE_WIDTH_PIXELS * key.height as usize * 4;
// Store in cache
let cached = CachedWaveform {
texture: texture.clone(),
size_bytes,
last_accessed: Instant::now(),
width_pixels: TILE_WIDTH_PIXELS as u32,
height_pixels: key.height,
};
self.total_bytes += size_bytes;
self.cache.insert(key, cached);
self.touch(key);
// Evict if over limit
self.evict_lru();
texture
}
/// Pre-cache tiles for smooth scrolling
pub fn precache_tiles(
&mut self,
keys: &[WaveformCacheKey],
ctx: &egui::Context,
waveform_peak_cache: &HashMap<usize, Vec<daw_backend::WaveformPeak>>,
audio_file_duration: f64,
trim_start: f64,
) {
// Limit pre-caching to avoid frame time spike
const MAX_PRECACHE_PER_FRAME: usize = 2;
let mut precached = 0;
for key in keys {
if precached >= MAX_PRECACHE_PER_FRAME {
break;
}
// Skip if already cached
if self.cache.contains_key(key) {
continue;
}
// Get waveform peaks
if let Some(waveform) = waveform_peak_cache.get(&key.audio_pool_index) {
// Generate and cache
let _ = self.get_or_create(*key, ctx, waveform, audio_file_duration, trim_start);
precached += 1;
}
}
}
/// Remove all entries for a specific audio file
pub fn invalidate_audio(&mut self, audio_pool_index: usize) {
let keys_to_remove: Vec<WaveformCacheKey> = self
.cache
.keys()
.filter(|k| k.audio_pool_index == audio_pool_index)
.copied()
.collect();
for key in keys_to_remove {
if let Some(cached) = self.cache.remove(&key) {
self.total_bytes -= cached.size_bytes;
}
}
// Also clean up LRU queue
self.lru_queue.retain(|key| key.audio_pool_index != audio_pool_index);
}
/// Remove all entries with a specific height (for window resize)
pub fn invalidate_height(&mut self, old_height: u32) {
let keys_to_remove: Vec<WaveformCacheKey> = self
.cache
.keys()
.filter(|k| k.height == old_height)
.copied()
.collect();
for key in keys_to_remove {
if let Some(cached) = self.cache.remove(&key) {
self.total_bytes -= cached.size_bytes;
}
}
// Also clean up LRU queue
self.lru_queue.retain(|key| key.height != old_height);
}
}
impl Default for WaveformImageCache {
fn default() -> Self {
Self::new()
}
}
/// Calculate zoom bucket from pixels_per_second
/// Rounds to nearest power of 2: 1, 2, 4, 8, 16, 32, 64, 128, 256
pub fn calculate_zoom_bucket(pixels_per_second: f64) -> u32 {
if pixels_per_second <= 1.0 {
return 1;
}
// Round to nearest power of 2
let log2 = pixels_per_second.log2();
let rounded = log2.round();
2u32.pow(rounded as u32)
}
/// Render a waveform tile to a ColorImage
fn render_waveform_to_image(
waveform: &[daw_backend::WaveformPeak],
tile_index: u32,
audio_file_duration: f64,
zoom_bucket: u32,
height: u32,
trim_start: f64,
) -> egui::ColorImage {
let width = TILE_WIDTH_PIXELS;
let height = height as usize;
// Create RGBA buffer (transparent background)
let mut pixels = vec![0u8; width * height * 4];
// Render as white - will be tinted at render time with clip background color
let waveform_color = egui::Color32::WHITE;
// Calculate time range for this tile
// Each pixel represents (1.0 / zoom_bucket) seconds
let seconds_per_pixel = 1.0 / zoom_bucket as f64;
let tile_start_in_clip = tile_index as f64 * TILE_WIDTH_PIXELS as f64 * seconds_per_pixel;
let tile_end_in_clip = tile_start_in_clip + width as f64 * seconds_per_pixel;
// Add trim_start offset to get position in source audio file
let tile_start_time = trim_start + tile_start_in_clip;
let tile_end_time = (trim_start + tile_end_in_clip).min(audio_file_duration);
// Calculate which waveform peaks correspond to this tile
let peak_start_idx = ((tile_start_time / audio_file_duration) * waveform.len() as f64) as usize;
let peak_end_idx = ((tile_end_time / audio_file_duration) * waveform.len() as f64) as usize;
let peak_end_idx = peak_end_idx.min(waveform.len());
if peak_start_idx >= waveform.len() {
// Tile is beyond the end of the audio clip - return transparent image
return egui::ColorImage::from_rgba_unmultiplied([width, height], &pixels);
}
let tile_peaks = &waveform[peak_start_idx..peak_end_idx];
if tile_peaks.is_empty() {
return egui::ColorImage::from_rgba_unmultiplied([width, height], &pixels);
}
// Calculate the actual time range this tile covers in the audio file
// This may be less than the full tile width if the audio file is shorter than the tile's time span
let actual_time_covered = tile_end_time - tile_start_time;
let actual_pixel_width = (actual_time_covered / seconds_per_pixel).min(width as f64);
// Render waveform to pixel buffer
// Distribute peaks only across the valid pixel range, not the entire tile width
let pixels_per_peak = actual_pixel_width / tile_peaks.len() as f64;
for (peak_idx, peak) in tile_peaks.iter().enumerate() {
let x_start = (peak_idx as f64 * pixels_per_peak).floor() as usize;
let x_end = ((peak_idx + 1) as f64 * pixels_per_peak).ceil() as usize;
let x_end = x_end.min(width);
// Calculate Y range for this peak
let center_y = height as f64 / 2.0;
let max_y = (center_y + (peak.max as f64 * height as f64 * 0.45)).round() as usize;
let min_y = (center_y + (peak.min as f64 * height as f64 * 0.45)).round() as usize;
let min_y = min_y.min(height - 1);
let max_y = max_y.min(height - 1);
// Fill vertical span for this peak
for x in x_start..x_end {
for y in min_y..=max_y {
let pixel_idx = (y * width + x) * 4;
pixels[pixel_idx] = waveform_color.r();
pixels[pixel_idx + 1] = waveform_color.g();
pixels[pixel_idx + 2] = waveform_color.b();
pixels[pixel_idx + 3] = waveform_color.a();
}
}
}
egui::ColorImage::from_rgba_unmultiplied([width, height], &pixels)
}