File save/load

This commit is contained in:
Skyler Lehmkuhl 2025-12-01 09:18:49 -05:00
parent 5379e3bc8c
commit ba9a4ee812
19 changed files with 1339 additions and 73 deletions

View File

@ -1,3 +1,5 @@
use serde::{Serialize, Deserialize};
/// Audio clip instance ID type /// Audio clip instance ID type
pub type AudioClipInstanceId = u32; pub type AudioClipInstanceId = u32;
@ -16,7 +18,7 @@ pub type ClipId = AudioClipInstanceId;
/// ## Looping /// ## Looping
/// If `external_duration` is greater than `internal_end - internal_start`, /// If `external_duration` is greater than `internal_end - internal_start`,
/// the clip will seamlessly loop back to `internal_start` when it reaches `internal_end`. /// the clip will seamlessly loop back to `internal_start` when it reaches `internal_end`.
#[derive(Debug, Clone)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AudioClipInstance { pub struct AudioClipInstance {
pub id: AudioClipInstanceId, pub id: AudioClipInstanceId,
pub audio_pool_index: usize, pub audio_pool_index: usize,

View File

@ -483,12 +483,19 @@ impl Engine {
let _ = self.event_tx.push(AudioEvent::TrackCreated(track_id, false, name)); let _ = self.event_tx.push(AudioEvent::TrackCreated(track_id, false, name));
} }
Command::AddAudioFile(path, data, channels, sample_rate) => { Command::AddAudioFile(path, data, channels, sample_rate) => {
// Detect original format from file extension
let path_buf = std::path::PathBuf::from(path.clone());
let original_format = path_buf.extension()
.and_then(|ext| ext.to_str())
.map(|s| s.to_lowercase());
// Create AudioFile and add to pool // Create AudioFile and add to pool
let audio_file = crate::audio::pool::AudioFile::new( let audio_file = crate::audio::pool::AudioFile::with_format(
std::path::PathBuf::from(path.clone()), path_buf,
data, data,
channels, channels,
sample_rate, sample_rate,
original_format,
); );
let pool_index = self.audio_pool.add_file(audio_file); let pool_index = self.audio_pool.add_file(audio_file);
// Notify UI about the new audio file // Notify UI about the new audio file
@ -1730,6 +1737,22 @@ impl Engine {
Err(e) => QueryResponse::AudioClipInstanceAdded(Err(e.to_string())), Err(e) => QueryResponse::AudioClipInstanceAdded(Err(e.to_string())),
} }
} }
Query::GetProject => {
// Clone the entire project for serialization
QueryResponse::ProjectRetrieved(Ok(Box::new(self.project.clone())))
}
Query::SetProject(new_project) => {
// Replace the current project with the new one
// Need to rebuild audio graphs with current sample_rate and buffer_size
let mut project = *new_project;
match project.rebuild_audio_graphs(self.buffer_pool.buffer_size()) {
Ok(()) => {
self.project = project;
QueryResponse::ProjectSet(Ok(()))
}
Err(e) => QueryResponse::ProjectSet(Err(format!("Failed to rebuild audio graphs: {}", e))),
}
}
}; };
// Send response back // Send response back
@ -1850,11 +1873,13 @@ impl Engine {
frames_recorded, temp_file_path, waveform.len(), audio_data.len()); frames_recorded, temp_file_path, waveform.len(), audio_data.len());
// Add to pool using the in-memory audio data (no file loading needed!) // Add to pool using the in-memory audio data (no file loading needed!)
let pool_file = crate::audio::pool::AudioFile::new( // Recorded audio is always WAV format
let pool_file = crate::audio::pool::AudioFile::with_format(
temp_file_path.clone(), temp_file_path.clone(),
audio_data, audio_data,
channels, channels,
sample_rate, sample_rate,
Some("wav".to_string()),
); );
let pool_index = self.audio_pool.add_file(pool_file); let pool_index = self.audio_pool.add_file(pool_file);
eprintln!("[STOP_RECORDING] Added to pool at index {}", pool_index); eprintln!("[STOP_RECORDING] Added to pool at index {}", pool_index);
@ -2741,4 +2766,46 @@ impl EngineController {
Err("Export timeout".to_string()) Err("Export timeout".to_string())
} }
/// Get a clone of the current project for serialization
pub fn get_project(&mut self) -> Result<crate::audio::project::Project, String> {
// Send query
if let Err(_) = self.query_tx.push(Query::GetProject) {
return Err("Failed to send query - queue full".to_string());
}
// Wait for response (with timeout)
let start = std::time::Instant::now();
let timeout = std::time::Duration::from_secs(5);
while start.elapsed() < timeout {
if let Ok(QueryResponse::ProjectRetrieved(result)) = self.query_response_rx.pop() {
return result.map(|boxed| *boxed);
}
std::thread::sleep(std::time::Duration::from_millis(10));
}
Err("Query timeout".to_string())
}
/// Set the project (replaces current project state)
pub fn set_project(&mut self, project: crate::audio::project::Project) -> Result<(), String> {
// Send query
if let Err(_) = self.query_tx.push(Query::SetProject(Box::new(project))) {
return Err("Failed to send query - queue full".to_string());
}
// Wait for response (with timeout)
let start = std::time::Instant::now();
let timeout = std::time::Duration::from_secs(10); // Longer timeout for loading project
while start.elapsed() < timeout {
if let Ok(QueryResponse::ProjectSet(result)) = self.query_response_rx.pop() {
return result;
}
std::thread::sleep(std::time::Duration::from_millis(10));
}
Err("Query timeout".to_string())
}
} }

View File

@ -73,7 +73,7 @@ pub type MidiClipInstanceId = u32;
/// ///
/// This represents the content data stored in the MidiClipPool. /// This represents the content data stored in the MidiClipPool.
/// Events have timestamps relative to the start of the clip (0.0 = clip beginning). /// Events have timestamps relative to the start of the clip (0.0 = clip beginning).
#[derive(Debug, Clone)] #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct MidiClip { pub struct MidiClip {
pub id: MidiClipId, pub id: MidiClipId,
pub events: Vec<MidiEvent>, pub events: Vec<MidiEvent>,
@ -132,7 +132,7 @@ impl MidiClip {
/// ## Looping /// ## Looping
/// If `external_duration` is greater than `internal_end - internal_start`, /// If `external_duration` is greater than `internal_end - internal_start`,
/// the instance will seamlessly loop back to `internal_start` when it reaches `internal_end`. /// the instance will seamlessly loop back to `internal_start` when it reaches `internal_end`.
#[derive(Debug, Clone)] #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct MidiClipInstance { pub struct MidiClipInstance {
pub id: MidiClipInstanceId, pub id: MidiClipInstanceId,
pub clip_id: MidiClipId, // Reference to MidiClip in pool pub clip_id: MidiClipId, // Reference to MidiClip in pool

View File

@ -1,8 +1,10 @@
use serde::{Serialize, Deserialize};
use std::collections::HashMap; use std::collections::HashMap;
use super::midi::{MidiClip, MidiClipId, MidiEvent}; use super::midi::{MidiClip, MidiClipId, MidiEvent};
/// Pool for storing MIDI clip content /// Pool for storing MIDI clip content
/// Similar to AudioClipPool but for MIDI data /// Similar to AudioClipPool but for MIDI data
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MidiClipPool { pub struct MidiClipPool {
clips: HashMap<MidiClipId, MidiClip>, clips: HashMap<MidiClipId, MidiClip>,
next_id: MidiClipId, next_id: MidiClipId,

View File

@ -22,6 +22,16 @@ pub struct GraphNode {
pub midi_output_buffers: Vec<Vec<MidiEvent>>, pub midi_output_buffers: Vec<Vec<MidiEvent>>,
} }
impl std::fmt::Debug for GraphNode {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("GraphNode")
.field("node", &"<AudioNode>")
.field("output_buffers_len", &self.output_buffers.len())
.field("midi_output_buffers_len", &self.midi_output_buffers.len())
.finish()
}
}
impl GraphNode { impl GraphNode {
pub fn new(node: Box<dyn AudioNode>, buffer_size: usize) -> Self { pub fn new(node: Box<dyn AudioNode>, buffer_size: usize) -> Self {
let outputs = node.outputs(); let outputs = node.outputs();
@ -57,6 +67,7 @@ impl GraphNode {
} }
/// Audio processing graph for instruments/effects /// Audio processing graph for instruments/effects
#[derive(Debug)]
pub struct AudioGraph { pub struct AudioGraph {
/// The audio graph (StableGraph allows node removal without index invalidation) /// The audio graph (StableGraph allows node removal without index invalidation)
graph: StableGraph<GraphNode, Connection>, graph: StableGraph<GraphNode, Connection>,

View File

@ -59,6 +59,9 @@ pub struct AudioFile {
pub channels: u32, pub channels: u32,
pub sample_rate: u32, pub sample_rate: u32,
pub frames: u64, pub frames: u64,
/// Original file format (mp3, ogg, wav, flac, etc.)
/// Used to determine if we should preserve lossy encoding during save
pub original_format: Option<String>,
} }
impl AudioFile { impl AudioFile {
@ -71,6 +74,20 @@ impl AudioFile {
channels, channels,
sample_rate, sample_rate,
frames, frames,
original_format: None,
}
}
/// Create a new AudioFile with original format information
pub fn with_format(path: PathBuf, data: Vec<f32>, channels: u32, sample_rate: u32, original_format: Option<String>) -> Self {
let frames = (data.len() / channels as usize) as u64;
Self {
path,
data,
channels,
sample_rate,
frames,
original_format,
} }
} }
@ -452,7 +469,27 @@ impl AudioClipPool {
fn embed_from_memory(audio_file: &AudioFile) -> EmbeddedAudioData { fn embed_from_memory(audio_file: &AudioFile) -> EmbeddedAudioData {
use base64::{Engine as _, engine::general_purpose}; use base64::{Engine as _, engine::general_purpose};
// Convert the f32 interleaved samples to WAV format bytes // Check if this is a lossy format that should be preserved
let is_lossy = audio_file.original_format.as_ref().map_or(false, |fmt| {
let fmt_lower = fmt.to_lowercase();
fmt_lower == "mp3" || fmt_lower == "ogg" || fmt_lower == "aac"
|| fmt_lower == "m4a" || fmt_lower == "opus"
});
if is_lossy {
// For lossy formats, read the original file bytes (if it still exists)
if let Ok(original_bytes) = std::fs::read(&audio_file.path) {
let data_base64 = general_purpose::STANDARD.encode(&original_bytes);
return EmbeddedAudioData {
data_base64,
format: audio_file.original_format.clone().unwrap_or_else(|| "mp3".to_string()),
};
}
// If we can't read the original file, fall through to WAV conversion
}
// For lossless/PCM or if we couldn't read the original lossy file,
// convert the f32 interleaved samples to WAV format bytes
let wav_data = Self::encode_wav( let wav_data = Self::encode_wav(
&audio_file.data, &audio_file.data,
audio_file.channels, audio_file.channels,
@ -672,11 +709,17 @@ impl AudioClipPool {
} }
} }
let audio_file = AudioFile::new( // Detect original format from file extension
let original_format = file_path.extension()
.and_then(|ext| ext.to_str())
.map(|s| s.to_lowercase());
let audio_file = AudioFile::with_format(
file_path.to_path_buf(), file_path.to_path_buf(),
samples, samples,
channels, channels,
sample_rate, sample_rate,
original_format,
); );
if pool_index >= self.files.len() { if pool_index >= self.files.len() {

View File

@ -4,6 +4,7 @@ use super::midi::{MidiClip, MidiClipId, MidiClipInstance, MidiClipInstanceId, Mi
use super::midi_pool::MidiClipPool; use super::midi_pool::MidiClipPool;
use super::pool::AudioClipPool; use super::pool::AudioClipPool;
use super::track::{AudioTrack, Metatrack, MidiTrack, RenderContext, TrackId, TrackNode}; use super::track::{AudioTrack, Metatrack, MidiTrack, RenderContext, TrackId, TrackNode};
use serde::{Serialize, Deserialize};
use std::collections::HashMap; use std::collections::HashMap;
/// Project manages the hierarchical track structure and clip pools /// Project manages the hierarchical track structure and clip pools
@ -13,6 +14,7 @@ use std::collections::HashMap;
/// ///
/// Clip content is stored in pools (MidiClipPool), while tracks store /// Clip content is stored in pools (MidiClipPool), while tracks store
/// clip instances that reference the pool content. /// clip instances that reference the pool content.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Project { pub struct Project {
tracks: HashMap<TrackId, TrackNode>, tracks: HashMap<TrackId, TrackNode>,
next_track_id: TrackId, next_track_id: TrackId,
@ -515,6 +517,47 @@ impl Project {
track.queue_live_midi(event); track.queue_live_midi(event);
} }
} }
/// Prepare all tracks for serialization by saving their audio graphs as presets
pub fn prepare_for_save(&mut self) {
for track in self.tracks.values_mut() {
match track {
TrackNode::Audio(audio_track) => {
audio_track.prepare_for_save();
}
TrackNode::Midi(midi_track) => {
midi_track.prepare_for_save();
}
TrackNode::Group(_) => {
// Groups don't have audio graphs
}
}
}
}
/// Rebuild all audio graphs from presets after deserialization
///
/// This should be called after deserializing a Project to reconstruct
/// the AudioGraph instances from their stored presets.
///
/// # Arguments
/// * `buffer_size` - Buffer size for audio processing (typically 8192)
pub fn rebuild_audio_graphs(&mut self, buffer_size: usize) -> Result<(), String> {
for track in self.tracks.values_mut() {
match track {
TrackNode::Audio(audio_track) => {
audio_track.rebuild_audio_graph(self.sample_rate, buffer_size)?;
}
TrackNode::Midi(midi_track) => {
midi_track.rebuild_audio_graph(self.sample_rate, buffer_size)?;
}
TrackNode::Group(_) => {
// Groups don't have audio graphs
}
}
}
Ok(())
}
} }
impl Default for Project { impl Default for Project {

View File

@ -4,12 +4,19 @@ use super::midi::{MidiClipInstance, MidiClipInstanceId, MidiEvent};
use super::midi_pool::MidiClipPool; use super::midi_pool::MidiClipPool;
use super::node_graph::AudioGraph; use super::node_graph::AudioGraph;
use super::node_graph::nodes::{AudioInputNode, AudioOutputNode}; use super::node_graph::nodes::{AudioInputNode, AudioOutputNode};
use super::node_graph::preset::GraphPreset;
use super::pool::AudioClipPool; use super::pool::AudioClipPool;
use serde::{Serialize, Deserialize};
use std::collections::HashMap; use std::collections::HashMap;
/// Track ID type /// Track ID type
pub type TrackId = u32; pub type TrackId = u32;
/// Default function for creating empty AudioGraph during deserialization
fn default_audio_graph() -> AudioGraph {
AudioGraph::new(48000, 8192)
}
/// Type alias for backwards compatibility /// Type alias for backwards compatibility
pub type Track = AudioTrack; pub type Track = AudioTrack;
@ -59,6 +66,7 @@ impl RenderContext {
} }
/// Node in the track hierarchy - can be an audio track, MIDI track, or a metatrack /// Node in the track hierarchy - can be an audio track, MIDI track, or a metatrack
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum TrackNode { pub enum TrackNode {
Audio(AudioTrack), Audio(AudioTrack),
Midi(MidiTrack), Midi(MidiTrack),
@ -145,6 +153,7 @@ impl TrackNode {
} }
/// Metatrack that contains other tracks with time transformation capabilities /// Metatrack that contains other tracks with time transformation capabilities
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Metatrack { pub struct Metatrack {
pub id: TrackId, pub id: TrackId,
pub name: String, pub name: String,
@ -301,12 +310,21 @@ impl Metatrack {
} }
/// MIDI track with MIDI clip instances and a node-based instrument /// MIDI track with MIDI clip instances and a node-based instrument
#[derive(Debug, Serialize, Deserialize)]
pub struct MidiTrack { pub struct MidiTrack {
pub id: TrackId, pub id: TrackId,
pub name: String, pub name: String,
/// Clip instances placed on this track (reference clips in the MidiClipPool) /// Clip instances placed on this track (reference clips in the MidiClipPool)
pub clip_instances: Vec<MidiClipInstance>, pub clip_instances: Vec<MidiClipInstance>,
/// Serialized instrument graph (used for save/load)
#[serde(default, skip_serializing_if = "Option::is_none")]
instrument_graph_preset: Option<GraphPreset>,
/// Runtime instrument graph (rebuilt from preset on load)
#[serde(skip, default = "default_audio_graph")]
pub instrument_graph: AudioGraph, pub instrument_graph: AudioGraph,
pub volume: f32, pub volume: f32,
pub muted: bool, pub muted: bool,
pub solo: bool, pub solo: bool,
@ -314,9 +332,28 @@ pub struct MidiTrack {
pub automation_lanes: HashMap<AutomationLaneId, AutomationLane>, pub automation_lanes: HashMap<AutomationLaneId, AutomationLane>,
next_automation_id: AutomationLaneId, next_automation_id: AutomationLaneId,
/// Queue for live MIDI input (virtual keyboard, MIDI controllers) /// Queue for live MIDI input (virtual keyboard, MIDI controllers)
#[serde(skip)]
live_midi_queue: Vec<MidiEvent>, live_midi_queue: Vec<MidiEvent>,
} }
impl Clone for MidiTrack {
fn clone(&self) -> Self {
Self {
id: self.id,
name: self.name.clone(),
clip_instances: self.clip_instances.clone(),
instrument_graph_preset: self.instrument_graph_preset.clone(),
instrument_graph: default_audio_graph(), // Create fresh graph, not cloned
volume: self.volume,
muted: self.muted,
solo: self.solo,
automation_lanes: self.automation_lanes.clone(),
next_automation_id: self.next_automation_id,
live_midi_queue: Vec::new(), // Don't clone live MIDI queue
}
}
}
impl MidiTrack { impl MidiTrack {
/// Create a new MIDI track with default settings /// Create a new MIDI track with default settings
pub fn new(id: TrackId, name: String, sample_rate: u32) -> Self { pub fn new(id: TrackId, name: String, sample_rate: u32) -> Self {
@ -327,6 +364,7 @@ impl MidiTrack {
id, id,
name, name,
clip_instances: Vec::new(), clip_instances: Vec::new(),
instrument_graph_preset: None,
instrument_graph: AudioGraph::new(sample_rate, default_buffer_size), instrument_graph: AudioGraph::new(sample_rate, default_buffer_size),
volume: 1.0, volume: 1.0,
muted: false, muted: false,
@ -337,6 +375,22 @@ impl MidiTrack {
} }
} }
/// Prepare for serialization by saving the instrument graph as a preset
pub fn prepare_for_save(&mut self) {
self.instrument_graph_preset = Some(self.instrument_graph.to_preset("Instrument Graph"));
}
/// Rebuild the instrument graph from preset after deserialization
pub fn rebuild_audio_graph(&mut self, sample_rate: u32, buffer_size: usize) -> Result<(), String> {
if let Some(preset) = &self.instrument_graph_preset {
self.instrument_graph = AudioGraph::from_preset(preset, sample_rate, buffer_size, None)?;
} else {
// No preset - create default graph
self.instrument_graph = AudioGraph::new(sample_rate, buffer_size);
}
Ok(())
}
/// Add an automation lane to this track /// Add an automation lane to this track
pub fn add_automation_lane(&mut self, parameter_id: ParameterId) -> AutomationLaneId { pub fn add_automation_lane(&mut self, parameter_id: ParameterId) -> AutomationLaneId {
let lane_id = self.next_automation_id; let lane_id = self.next_automation_id;
@ -504,6 +558,7 @@ impl MidiTrack {
} }
/// Audio track with audio clip instances /// Audio track with audio clip instances
#[derive(Debug, Serialize, Deserialize)]
pub struct AudioTrack { pub struct AudioTrack {
pub id: TrackId, pub id: TrackId,
pub name: String, pub name: String,
@ -515,10 +570,33 @@ pub struct AudioTrack {
/// Automation lanes for this track /// Automation lanes for this track
pub automation_lanes: HashMap<AutomationLaneId, AutomationLane>, pub automation_lanes: HashMap<AutomationLaneId, AutomationLane>,
next_automation_id: AutomationLaneId, next_automation_id: AutomationLaneId,
/// Effects processing graph for this audio track
/// Serialized effects graph (used for save/load)
#[serde(default, skip_serializing_if = "Option::is_none")]
effects_graph_preset: Option<GraphPreset>,
/// Runtime effects processing graph (rebuilt from preset on load)
#[serde(skip, default = "default_audio_graph")]
pub effects_graph: AudioGraph, pub effects_graph: AudioGraph,
} }
impl Clone for AudioTrack {
fn clone(&self) -> Self {
Self {
id: self.id,
name: self.name.clone(),
clips: self.clips.clone(),
volume: self.volume,
muted: self.muted,
solo: self.solo,
automation_lanes: self.automation_lanes.clone(),
next_automation_id: self.next_automation_id,
effects_graph_preset: self.effects_graph_preset.clone(),
effects_graph: default_audio_graph(), // Create fresh graph, not cloned
}
}
}
impl AudioTrack { impl AudioTrack {
/// Create a new audio track with default settings /// Create a new audio track with default settings
pub fn new(id: TrackId, name: String, sample_rate: u32) -> Self { pub fn new(id: TrackId, name: String, sample_rate: u32) -> Self {
@ -555,10 +633,27 @@ impl AudioTrack {
solo: false, solo: false,
automation_lanes: HashMap::new(), automation_lanes: HashMap::new(),
next_automation_id: 0, next_automation_id: 0,
effects_graph_preset: None,
effects_graph, effects_graph,
} }
} }
/// Prepare for serialization by saving the effects graph as a preset
pub fn prepare_for_save(&mut self) {
self.effects_graph_preset = Some(self.effects_graph.to_preset("Effects Graph"));
}
/// Rebuild the effects graph from preset after deserialization
pub fn rebuild_audio_graph(&mut self, sample_rate: u32, buffer_size: usize) -> Result<(), String> {
if let Some(preset) = &self.effects_graph_preset {
self.effects_graph = AudioGraph::from_preset(preset, sample_rate, buffer_size, None)?;
} else {
// No preset - create default graph
self.effects_graph = AudioGraph::new(sample_rate, buffer_size);
}
Ok(())
}
/// Add an automation lane to this track /// Add an automation lane to this track
pub fn add_automation_lane(&mut self, parameter_id: ParameterId) -> AutomationLaneId { pub fn add_automation_lane(&mut self, parameter_id: ParameterId) -> AutomationLaneId {
let lane_id = self.next_automation_id; let lane_id = self.next_automation_id;

View File

@ -272,6 +272,10 @@ pub enum Query {
AddMidiClipInstanceSync(TrackId, crate::audio::midi::MidiClipInstance), AddMidiClipInstanceSync(TrackId, crate::audio::midi::MidiClipInstance),
/// Add an audio clip to a track synchronously (track_id, pool_index, start_time, duration, offset) - returns instance ID /// Add an audio clip to a track synchronously (track_id, pool_index, start_time, duration, offset) - returns instance ID
AddAudioClipSync(TrackId, usize, f64, f64, f64), AddAudioClipSync(TrackId, usize, f64, f64, f64),
/// Get a clone of the current project for serialization
GetProject,
/// Set the project (replaces current project state)
SetProject(Box<crate::audio::project::Project>),
} }
/// Oscilloscope data from a node /// Oscilloscope data from a node
@ -335,4 +339,8 @@ pub enum QueryResponse {
MidiClipInstanceAdded(Result<MidiClipInstanceId, String>), MidiClipInstanceAdded(Result<MidiClipInstanceId, String>),
/// Audio clip instance added (returns instance ID) /// Audio clip instance added (returns instance ID)
AudioClipInstanceAdded(Result<AudioClipInstanceId, String>), AudioClipInstanceAdded(Result<AudioClipInstanceId, String>),
/// Project retrieved
ProjectRetrieved(Result<Box<crate::audio::project::Project>, String>),
/// Project set
ProjectSet(Result<(), String>),
} }

View File

@ -22,3 +22,13 @@ uuid = { version = "1.0", features = ["v4", "serde"] }
# Audio backend # Audio backend
daw-backend = { path = "../../daw-backend" } daw-backend = { path = "../../daw-backend" }
# File I/O
zip = "0.6"
chrono = "0.4"
base64 = "0.21"
pathdiff = "0.2"
# Audio encoding for embedded files
flacenc = "0.4" # For FLAC encoding (lossless)
claxon = "0.4" # For FLAC decoding

View File

@ -5,6 +5,7 @@
use crate::clip::{AudioClip, ImageAsset, VideoClip, VectorClip}; use crate::clip::{AudioClip, ImageAsset, VideoClip, VectorClip};
use crate::layer::AnyLayer; use crate::layer::AnyLayer;
use crate::layout::LayoutNode;
use crate::shape::ShapeColor; use crate::shape::ShapeColor;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::HashMap; use std::collections::HashMap;
@ -106,6 +107,14 @@ pub struct Document {
/// Image asset library - static images for fill textures /// Image asset library - static images for fill textures
pub image_assets: HashMap<Uuid, ImageAsset>, pub image_assets: HashMap<Uuid, ImageAsset>,
/// Current UI layout state (serialized for save/load)
#[serde(default, skip_serializing_if = "Option::is_none")]
pub ui_layout: Option<LayoutNode>,
/// Name of base layout this was derived from (for reference only)
#[serde(default, skip_serializing_if = "Option::is_none")]
pub ui_layout_base: Option<String>,
/// Current playback time in seconds /// Current playback time in seconds
#[serde(skip)] #[serde(skip)]
pub current_time: f64, pub current_time: f64,
@ -126,6 +135,8 @@ impl Default for Document {
video_clips: HashMap::new(), video_clips: HashMap::new(),
audio_clips: HashMap::new(), audio_clips: HashMap::new(),
image_assets: HashMap::new(), image_assets: HashMap::new(),
ui_layout: None,
ui_layout_base: None,
current_time: 0.0, current_time: 0.0,
} }
} }

View File

@ -0,0 +1,438 @@
//! File I/O for .beam project files
//!
//! This module handles saving and loading Lightningbeam projects in the .beam format,
//! which is a ZIP archive containing:
//! - project.json (compressed) - Project metadata and structure
//! - media/ directory (uncompressed) - Embedded media files (FLAC for audio)
use crate::document::Document;
use daw_backend::audio::pool::AudioPoolEntry;
use daw_backend::audio::project::Project as AudioProject;
use serde::{Deserialize, Serialize};
use std::fs::File;
use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use zip::write::FileOptions;
use zip::{CompressionMethod, ZipArchive, ZipWriter};
use flacenc::error::Verify;
/// File format version
pub const BEAM_VERSION: &str = "1.0.0";
/// Default buffer size for audio processing (512 samples = ~10.7ms at 48kHz)
pub const DEFAULT_BUFFER_SIZE: usize = 512;
/// Complete .beam project structure for serialization
#[derive(Serialize, Deserialize)]
pub struct BeamProject {
/// File format version
pub version: String,
/// Project creation timestamp (ISO 8601)
pub created: String,
/// Last modified timestamp (ISO 8601)
pub modified: String,
/// UI state (Document from lightningbeam-core)
pub ui_state: Document,
/// Audio backend state
pub audio_backend: SerializedAudioBackend,
}
/// Serialized audio backend state
#[derive(Serialize, Deserialize)]
pub struct SerializedAudioBackend {
/// Sample rate for audio processing
pub sample_rate: u32,
/// Audio project (tracks, MIDI clips, etc.)
pub project: AudioProject,
/// Audio pool entries (metadata and paths for audio files)
/// Note: embedded_data field from daw-backend is ignored; embedded files
/// are stored as FLAC in the ZIP's media/audio/ directory instead
pub audio_pool_entries: Vec<AudioPoolEntry>,
}
/// Settings for saving a project
#[derive(Debug, Clone)]
pub struct SaveSettings {
/// Automatically embed files smaller than this size (in bytes)
pub auto_embed_threshold_bytes: u64,
/// Force embedding all media files
pub force_embed_all: bool,
/// Force linking all media files (don't embed any)
pub force_link_all: bool,
}
impl Default for SaveSettings {
fn default() -> Self {
Self {
auto_embed_threshold_bytes: 10_000_000, // 10 MB
force_embed_all: false,
force_link_all: false,
}
}
}
/// Result of loading a project
pub struct LoadedProject {
/// Deserialized document
pub document: Document,
/// Deserialized audio project
pub audio_project: AudioProject,
/// Loaded audio pool entries
pub audio_pool_entries: Vec<AudioPoolEntry>,
/// List of files that couldn't be found
pub missing_files: Vec<MissingFileInfo>,
}
/// Information about a missing file
#[derive(Debug, Clone)]
pub struct MissingFileInfo {
/// Index in the audio pool
pub pool_index: usize,
/// Original file path
pub original_path: PathBuf,
/// Type of media file
pub file_type: MediaFileType,
}
/// Type of media file
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum MediaFileType {
Audio,
Video,
Image,
}
/// Save a project to a .beam file
///
/// This function:
/// 1. Prepares audio project for save (saves AudioGraph presets)
/// 2. Serializes project data to JSON
/// 3. Creates ZIP archive with compressed project.json
/// 4. Embeds media files as FLAC (for audio) in media/ directory
///
/// # Arguments
/// * `path` - Path to save the .beam file
/// * `document` - UI document state
/// * `audio_project` - Audio backend project
/// * `audio_pool_entries` - Serialized audio pool entries
/// * `settings` - Save settings (embedding preferences)
///
/// # Returns
/// Ok(()) on success, or error message
pub fn save_beam(
path: &Path,
document: &Document,
audio_project: &mut AudioProject,
audio_pool_entries: Vec<AudioPoolEntry>,
_settings: &SaveSettings,
) -> Result<(), String> {
// 1. Create backup if file exists
if path.exists() {
let backup_path = path.with_extension("beam.backup");
std::fs::copy(path, &backup_path)
.map_err(|e| format!("Failed to create backup: {}", e))?;
}
// 2. Prepare audio project for serialization (save AudioGraph presets)
audio_project.prepare_for_save();
// 3. Create ZIP writer
let file = File::create(path)
.map_err(|e| format!("Failed to create file: {}", e))?;
let mut zip = ZipWriter::new(file);
// 4. Process audio pool entries and write embedded audio files to ZIP
// Smart compression: lossy formats (mp3, ogg) stored as-is, lossless data as FLAC
let mut modified_entries = Vec::new();
for entry in &audio_pool_entries {
let mut modified_entry = entry.clone();
if let Some(ref embedded_data) = entry.embedded_data {
// Decode base64 audio data
let audio_bytes = base64::decode(&embedded_data.data_base64)
.map_err(|e| format!("Failed to decode base64 audio data for pool index {}: {}", entry.pool_index, e))?;
let format_lower = embedded_data.format.to_lowercase();
let is_lossy = format_lower == "mp3" || format_lower == "ogg"
|| format_lower == "aac" || format_lower == "m4a"
|| format_lower == "opus";
let zip_filename = if is_lossy {
// Store lossy formats directly (no transcoding)
format!("media/audio/{}.{}", entry.pool_index, embedded_data.format)
} else {
// Store lossless data as FLAC
format!("media/audio/{}.flac", entry.pool_index)
};
// Write to ZIP (uncompressed - audio is already compressed)
let file_options = FileOptions::default()
.compression_method(CompressionMethod::Stored);
zip.start_file(&zip_filename, file_options)
.map_err(|e| format!("Failed to create {} in ZIP: {}", zip_filename, e))?;
if is_lossy {
// Write lossy file directly
zip.write_all(&audio_bytes)
.map_err(|e| format!("Failed to write {}: {}", zip_filename, e))?;
} else {
// Decode PCM samples and encode to FLAC
// The audio_bytes are raw PCM samples (interleaved f32 little-endian)
let samples: Vec<f32> = audio_bytes
.chunks_exact(4)
.map(|chunk| f32::from_le_bytes([chunk[0], chunk[1], chunk[2], chunk[3]]))
.collect();
// Convert f32 samples to i32 for FLAC encoding (FLAC doesn't support f32)
// FLAC supports up to 24-bit samples: range [-8388608, 8388607]
let samples_i32: Vec<i32> = samples
.iter()
.map(|&s| {
// Clamp to [-1.0, 1.0] first, then scale to 24-bit range
let clamped = s.clamp(-1.0, 1.0);
(clamped * 8388607.0) as i32
})
.collect();
// Configure FLAC encoder
let config = flacenc::config::Encoder::default()
.into_verified()
.map_err(|(_, e)| format!("FLAC encoder config error: {:?}", e))?;
let source = flacenc::source::MemSource::from_samples(
&samples_i32,
entry.channels as usize,
24, // bits per sample (FLAC max is 24-bit)
entry.sample_rate as usize,
);
// Encode to FLAC
let flac_stream = flacenc::encode_with_fixed_block_size(
&config,
source,
config.block_size,
).map_err(|e| format!("FLAC encoding failed: {:?}", e))?;
// Convert stream to bytes
use flacenc::component::BitRepr;
let mut sink = flacenc::bitsink::ByteSink::new();
flac_stream.write(&mut sink)
.map_err(|e| format!("Failed to write FLAC stream: {:?}", e))?;
let flac_bytes = sink.as_slice();
zip.write_all(flac_bytes)
.map_err(|e| format!("Failed to write {}: {}", zip_filename, e))?;
}
// Update entry to point to ZIP file instead of embedding data
modified_entry.embedded_data = None;
modified_entry.relative_path = Some(zip_filename);
}
modified_entries.push(modified_entry);
}
// 5. Build BeamProject structure with modified entries
let now = chrono::Utc::now().to_rfc3339();
let beam_project = BeamProject {
version: BEAM_VERSION.to_string(),
created: now.clone(),
modified: now,
ui_state: document.clone(),
audio_backend: SerializedAudioBackend {
sample_rate: 48000, // TODO: Get from audio engine
project: audio_project.clone(),
audio_pool_entries: modified_entries,
},
};
// 6. Write project.json (compressed with DEFLATE)
let json_options = FileOptions::default()
.compression_method(CompressionMethod::Deflated)
.compression_level(Some(6));
zip.start_file("project.json", json_options)
.map_err(|e| format!("Failed to create project.json in ZIP: {}", e))?;
let json = serde_json::to_string_pretty(&beam_project)
.map_err(|e| format!("JSON serialization failed: {}", e))?;
zip.write_all(json.as_bytes())
.map_err(|e| format!("Failed to write project.json: {}", e))?;
// 7. Finalize ZIP
zip.finish()
.map_err(|e| format!("Failed to finalize ZIP: {}", e))?;
Ok(())
}
/// Load a project from a .beam file
///
/// This function:
/// 1. Opens ZIP archive and reads project.json
/// 2. Deserializes project data
/// 3. Loads embedded media files from archive
/// 4. Attempts to load external media files
/// 5. Rebuilds AudioGraphs from presets with correct sample_rate
///
/// # Arguments
/// * `path` - Path to the .beam file
///
/// # Returns
/// LoadedProject on success (with missing_files list), or error message
pub fn load_beam(path: &Path) -> Result<LoadedProject, String> {
// 1. Open ZIP archive
let file = File::open(path)
.map_err(|e| format!("Failed to open file: {}", e))?;
let mut zip = ZipArchive::new(file)
.map_err(|e| format!("Failed to open ZIP archive: {}", e))?;
// 2. Read project.json
let mut project_file = zip.by_name("project.json")
.map_err(|e| format!("Failed to find project.json in archive: {}", e))?;
let mut json_data = String::new();
project_file.read_to_string(&mut json_data)
.map_err(|e| format!("Failed to read project.json: {}", e))?;
// 3. Deserialize BeamProject
let beam_project: BeamProject = serde_json::from_str(&json_data)
.map_err(|e| format!("Failed to deserialize project.json: {}", e))?;
// 4. Check version compatibility
if beam_project.version != BEAM_VERSION {
return Err(format!(
"Unsupported file version: {} (expected {})",
beam_project.version, BEAM_VERSION
));
}
// 5. Extract document and audio backend state
let document = beam_project.ui_state;
let mut audio_project = beam_project.audio_backend.project;
let audio_pool_entries = beam_project.audio_backend.audio_pool_entries;
// 6. Rebuild AudioGraphs from presets
audio_project.rebuild_audio_graphs(DEFAULT_BUFFER_SIZE)
.map_err(|e| format!("Failed to rebuild audio graphs: {}", e))?;
// 7. Extract embedded audio files from ZIP and restore to entries
drop(project_file); // Close project.json file handle
let mut restored_entries = Vec::new();
for entry in &audio_pool_entries {
let mut restored_entry = entry.clone();
// Check if this entry has a file in the ZIP (relative_path starts with "media/audio/")
if let Some(ref rel_path) = entry.relative_path {
if rel_path.starts_with("media/audio/") {
// Extract file from ZIP
match zip.by_name(rel_path) {
Ok(mut audio_file) => {
let mut audio_bytes = Vec::new();
audio_file.read_to_end(&mut audio_bytes)
.map_err(|e| format!("Failed to read {} from ZIP: {}", rel_path, e))?;
// Determine format from filename
let format = rel_path.split('.').last()
.unwrap_or("flac")
.to_string();
// For lossless formats, decode back to PCM f32 samples
// For lossy formats, store the original bytes
let embedded_data = if format == "flac" {
// Decode FLAC to PCM f32 samples
let cursor = std::io::Cursor::new(&audio_bytes);
let mut reader = claxon::FlacReader::new(cursor)
.map_err(|e| format!("Failed to create FLAC reader: {:?}", e))?;
let stream_info = reader.streaminfo();
let bits_per_sample = stream_info.bits_per_sample;
let max_value = (1i64 << (bits_per_sample - 1)) as f32;
// Read all samples and convert to f32
let mut samples_f32 = Vec::new();
for sample_result in reader.samples() {
let sample = sample_result
.map_err(|e| format!("Failed to read FLAC sample: {:?}", e))?;
samples_f32.push(sample as f32 / max_value);
}
// Convert f32 samples to bytes (little-endian)
let mut pcm_bytes = Vec::new();
for sample in samples_f32 {
pcm_bytes.extend_from_slice(&sample.to_le_bytes());
}
Some(daw_backend::audio::pool::EmbeddedAudioData {
data_base64: base64::encode(&pcm_bytes),
format: "wav".to_string(), // Mark as WAV since it's now PCM
})
} else {
// Lossy format - store as-is
Some(daw_backend::audio::pool::EmbeddedAudioData {
data_base64: base64::encode(&audio_bytes),
format: format.clone(),
})
};
restored_entry.embedded_data = embedded_data;
restored_entry.relative_path = None; // Clear ZIP path
}
Err(_) => {
// File not found in ZIP, treat as external reference
}
}
}
}
restored_entries.push(restored_entry);
}
// 8. Check for missing external files
// An entry is missing if it has a relative_path (external reference)
// but no embedded_data and the file doesn't exist
let project_dir = path.parent().unwrap_or_else(|| Path::new("."));
let missing_files: Vec<MissingFileInfo> = restored_entries
.iter()
.enumerate()
.filter_map(|(idx, entry)| {
// Check if this entry references an external file that doesn't exist
if entry.embedded_data.is_none() {
if let Some(ref rel_path) = entry.relative_path {
let full_path = project_dir.join(rel_path);
if !full_path.exists() {
return Some(MissingFileInfo {
pool_index: idx,
original_path: full_path,
file_type: MediaFileType::Audio,
});
}
}
}
None
})
.collect();
Ok(LoadedProject {
document,
audio_project,
audio_pool_entries: restored_entries,
missing_files,
})
}

View File

@ -29,3 +29,4 @@ pub mod intersection_graph;
pub mod segment_builder; pub mod segment_builder;
pub mod planar_graph; pub mod planar_graph;
pub mod file_types; pub mod file_types;
pub mod file_io;

View File

@ -7,6 +7,7 @@ edition = "2021"
lightningbeam-core = { path = "../lightningbeam-core" } lightningbeam-core = { path = "../lightningbeam-core" }
daw-backend = { path = "../../daw-backend" } daw-backend = { path = "../../daw-backend" }
rtrb = "0.3" rtrb = "0.3"
cpal = "0.15"
# UI Framework # UI Framework
eframe = { workspace = true } eframe = { workspace = true }

View File

@ -245,6 +245,164 @@ impl ToolIconCache {
} }
} }
/// Command sent to file operations worker thread
enum FileCommand {
Save {
path: std::path::PathBuf,
document: lightningbeam_core::document::Document,
progress_tx: std::sync::mpsc::Sender<FileProgress>,
},
Load {
path: std::path::PathBuf,
progress_tx: std::sync::mpsc::Sender<FileProgress>,
},
}
/// Progress updates from file operations worker
enum FileProgress {
SerializingAudioPool,
EncodingAudio { current: usize, total: usize },
WritingZip,
LoadingProject,
DecodingAudio { current: usize, total: usize },
Complete(Result<lightningbeam_core::file_io::LoadedProject, String>), // For loading
Error(String),
Done,
}
/// Active file operation state
enum FileOperation {
Saving {
path: std::path::PathBuf,
progress_rx: std::sync::mpsc::Receiver<FileProgress>,
},
Loading {
path: std::path::PathBuf,
progress_rx: std::sync::mpsc::Receiver<FileProgress>,
},
}
/// Worker thread for file operations (save/load)
struct FileOperationsWorker {
command_rx: std::sync::mpsc::Receiver<FileCommand>,
audio_controller: std::sync::Arc<std::sync::Mutex<daw_backend::EngineController>>,
}
impl FileOperationsWorker {
/// Create a new worker and spawn it on a background thread
fn spawn(audio_controller: std::sync::Arc<std::sync::Mutex<daw_backend::EngineController>>)
-> std::sync::mpsc::Sender<FileCommand>
{
let (command_tx, command_rx) = std::sync::mpsc::channel();
let worker = FileOperationsWorker {
command_rx,
audio_controller,
};
std::thread::spawn(move || {
worker.run();
});
command_tx
}
/// Main worker loop - processes file commands
fn run(self) {
while let Ok(command) = self.command_rx.recv() {
match command {
FileCommand::Save { path, document, progress_tx } => {
self.handle_save(path, document, progress_tx);
}
FileCommand::Load { path, progress_tx } => {
self.handle_load(path, progress_tx);
}
}
}
}
/// Handle save command
fn handle_save(
&self,
path: std::path::PathBuf,
document: lightningbeam_core::document::Document,
progress_tx: std::sync::mpsc::Sender<FileProgress>,
) {
use lightningbeam_core::file_io::{save_beam, SaveSettings};
// Step 1: Serialize audio pool
let _ = progress_tx.send(FileProgress::SerializingAudioPool);
let audio_pool_entries = {
let mut controller = self.audio_controller.lock().unwrap();
match controller.serialize_audio_pool(&path) {
Ok(entries) => entries,
Err(e) => {
let _ = progress_tx.send(FileProgress::Error(format!("Failed to serialize audio pool: {}", e)));
return;
}
}
};
// Step 2: Get project
let mut audio_project = {
let mut controller = self.audio_controller.lock().unwrap();
match controller.get_project() {
Ok(p) => p,
Err(e) => {
let _ = progress_tx.send(FileProgress::Error(format!("Failed to get project: {}", e)));
return;
}
}
};
// Step 3: Save to file
let _ = progress_tx.send(FileProgress::WritingZip);
let settings = SaveSettings::default();
match save_beam(&path, &document, &mut audio_project, audio_pool_entries, &settings) {
Ok(()) => {
println!("✅ Saved to: {}", path.display());
let _ = progress_tx.send(FileProgress::Done);
}
Err(e) => {
let _ = progress_tx.send(FileProgress::Error(format!("Save failed: {}", e)));
}
}
}
/// Handle load command
fn handle_load(
&self,
path: std::path::PathBuf,
progress_tx: std::sync::mpsc::Sender<FileProgress>,
) {
use lightningbeam_core::file_io::load_beam;
// Step 1: Load from file
let _ = progress_tx.send(FileProgress::LoadingProject);
let loaded_project = match load_beam(&path) {
Ok(p) => p,
Err(e) => {
let _ = progress_tx.send(FileProgress::Error(format!("Load failed: {}", e)));
return;
}
};
// Check for missing files
if !loaded_project.missing_files.is_empty() {
eprintln!("⚠️ {} missing files", loaded_project.missing_files.len());
for missing in &loaded_project.missing_files {
eprintln!(" - {}", missing.original_path.display());
}
}
// Send the loaded project back to UI thread for processing
let _ = progress_tx.send(FileProgress::Complete(Ok(loaded_project)));
}
}
struct EditorApp { struct EditorApp {
layouts: Vec<LayoutDefinition>, layouts: Vec<LayoutDefinition>,
current_layout_index: usize, current_layout_index: usize,
@ -272,7 +430,11 @@ struct EditorApp {
rdp_tolerance: f64, // RDP simplification tolerance (default: 10.0) rdp_tolerance: f64, // RDP simplification tolerance (default: 10.0)
schneider_max_error: f64, // Schneider curve fitting max error (default: 30.0) schneider_max_error: f64, // Schneider curve fitting max error (default: 30.0)
// Audio engine integration // Audio engine integration
audio_system: Option<daw_backend::AudioSystem>, // Audio system (must be kept alive for stream) audio_stream: Option<cpal::Stream>, // Audio stream (must be kept alive)
audio_controller: Option<std::sync::Arc<std::sync::Mutex<daw_backend::EngineController>>>, // Shared audio controller
audio_event_rx: Option<rtrb::Consumer<daw_backend::AudioEvent>>, // Audio event receiver
audio_sample_rate: u32, // Audio sample rate
audio_channels: u32, // Audio channel count
// Track ID mapping (Document layer UUIDs <-> daw-backend TrackIds) // Track ID mapping (Document layer UUIDs <-> daw-backend TrackIds)
layer_to_track_map: HashMap<Uuid, daw_backend::TrackId>, layer_to_track_map: HashMap<Uuid, daw_backend::TrackId>,
track_to_layer_map: HashMap<daw_backend::TrackId, Uuid>, track_to_layer_map: HashMap<daw_backend::TrackId, Uuid>,
@ -293,6 +455,13 @@ struct EditorApp {
/// Prevents repeated backend queries for the same MIDI clip /// Prevents repeated backend queries for the same MIDI clip
/// Format: (timestamp, note_number, is_note_on) /// Format: (timestamp, note_number, is_note_on)
midi_event_cache: HashMap<u32, Vec<(f64, u8, bool)>>, midi_event_cache: HashMap<u32, Vec<(f64, u8, bool)>>,
/// Current file path (None if not yet saved)
current_file_path: Option<std::path::PathBuf>,
/// File operations worker command sender
file_command_tx: std::sync::mpsc::Sender<FileCommand>,
/// Current file operation in progress (if any)
file_operation: Option<FileOperation>,
} }
/// Import filter types for the file dialog /// Import filter types for the file dialog
@ -338,18 +507,35 @@ impl EditorApp {
// Wrap document in ActionExecutor // Wrap document in ActionExecutor
let action_executor = lightningbeam_core::action::ActionExecutor::new(document); let action_executor = lightningbeam_core::action::ActionExecutor::new(document);
// Initialize audio system (keep the whole system to maintain the audio stream) // Initialize audio system and destructure it for sharing
let audio_system = match daw_backend::AudioSystem::new(None, 256) { let (audio_stream, audio_controller, audio_event_rx, audio_sample_rate, audio_channels, file_command_tx) =
Ok(audio_system) => { match daw_backend::AudioSystem::new(None, 256) {
println!("✅ Audio engine initialized successfully"); Ok(audio_system) => {
Some(audio_system) println!("✅ Audio engine initialized successfully");
}
Err(e) => { // Extract components
eprintln!("❌ Failed to initialize audio engine: {}", e); let stream = audio_system.stream;
eprintln!(" Playback will be disabled"); let sample_rate = audio_system.sample_rate;
None let channels = audio_system.channels;
} let event_rx = audio_system.event_rx;
};
// Wrap controller in Arc<Mutex<>> for sharing with worker thread
let controller = std::sync::Arc::new(std::sync::Mutex::new(audio_system.controller));
// Spawn file operations worker
let file_command_tx = FileOperationsWorker::spawn(controller.clone());
(Some(stream), Some(controller), event_rx, sample_rate, channels, file_command_tx)
}
Err(e) => {
eprintln!("❌ Failed to initialize audio engine: {}", e);
eprintln!(" Playback will be disabled");
// Create a dummy channel for file operations (won't be used)
let (tx, _rx) = std::sync::mpsc::channel();
(None, None, None, 48000, 2, tx)
}
};
Self { Self {
layouts, layouts,
@ -376,7 +562,11 @@ impl EditorApp {
draw_simplify_mode: lightningbeam_core::tool::SimplifyMode::Smooth, // Default to smooth curves draw_simplify_mode: lightningbeam_core::tool::SimplifyMode::Smooth, // Default to smooth curves
rdp_tolerance: 10.0, // Default RDP tolerance rdp_tolerance: 10.0, // Default RDP tolerance
schneider_max_error: 30.0, // Default Schneider max error schneider_max_error: 30.0, // Default Schneider max error
audio_system, audio_stream,
audio_controller,
audio_event_rx,
audio_sample_rate,
audio_channels,
layer_to_track_map: HashMap::new(), layer_to_track_map: HashMap::new(),
track_to_layer_map: HashMap::new(), track_to_layer_map: HashMap::new(),
playback_time: 0.0, // Start at beginning playback_time: 0.0, // Start at beginning
@ -388,6 +578,9 @@ impl EditorApp {
paint_bucket_gap_tolerance: 5.0, // Default gap tolerance paint_bucket_gap_tolerance: 5.0, // Default gap tolerance
polygon_sides: 5, // Default to pentagon polygon_sides: 5, // Default to pentagon
midi_event_cache: HashMap::new(), // Initialize empty MIDI event cache midi_event_cache: HashMap::new(), // Initialize empty MIDI event cache
current_file_path: None, // No file loaded initially
file_command_tx,
file_operation: None, // No file operation in progress initially
} }
} }
@ -419,15 +612,16 @@ impl EditorApp {
} }
// Create daw-backend MIDI track // Create daw-backend MIDI track
if let Some(ref mut audio_system) = self.audio_system { if let Some(ref controller_arc) = self.audio_controller {
match audio_system.controller.create_midi_track_sync(layer_name.clone()) { let mut controller = controller_arc.lock().unwrap();
match controller.create_midi_track_sync(layer_name.clone()) {
Ok(track_id) => { Ok(track_id) => {
// Store bidirectional mapping // Store bidirectional mapping
self.layer_to_track_map.insert(layer_id, track_id); self.layer_to_track_map.insert(layer_id, track_id);
self.track_to_layer_map.insert(track_id, layer_id); self.track_to_layer_map.insert(track_id, layer_id);
// Load default instrument // Load default instrument
if let Err(e) = default_instrument::load_default_instrument(&mut audio_system.controller, track_id) { if let Err(e) = default_instrument::load_default_instrument(&mut *controller, track_id) {
eprintln!("⚠️ Failed to load default instrument for {}: {}", layer_name, e); eprintln!("⚠️ Failed to load default instrument for {}: {}", layer_name, e);
} else { } else {
println!("✅ Synced MIDI layer '{}' to backend (TrackId: {})", layer_name, track_id); println!("✅ Synced MIDI layer '{}' to backend (TrackId: {})", layer_name, track_id);
@ -449,6 +643,9 @@ impl EditorApp {
fn switch_layout(&mut self, index: usize) { fn switch_layout(&mut self, index: usize) {
self.current_layout_index = index; self.current_layout_index = index;
self.current_layout = self.layouts[index].layout.clone(); self.current_layout = self.layouts[index].layout.clone();
// Clear pane instances so they rebuild with new layout
self.pane_instances.clear();
} }
fn current_layout_def(&self) -> &LayoutDefinition { fn current_layout_def(&self) -> &LayoutDefinition {
@ -488,23 +685,85 @@ impl EditorApp {
// File menu // File menu
MenuAction::NewFile => { MenuAction::NewFile => {
println!("Menu: New File"); println!("Menu: New File");
// TODO: Implement new file // TODO: Prompt to save current file if modified
// Create new document
let mut document = lightningbeam_core::document::Document::with_size("Untitled Animation", 1920.0, 1080.0)
.with_duration(10.0)
.with_framerate(60.0);
// Add default layer
use lightningbeam_core::layer::{AnyLayer, VectorLayer};
let vector_layer = VectorLayer::new("Layer 1");
let layer_id = document.root.add_child(AnyLayer::Vector(vector_layer));
// Replace action executor with new document
self.action_executor = lightningbeam_core::action::ActionExecutor::new(document);
self.active_layer_id = Some(layer_id);
// Reset audio project (send command to create new empty project)
// TODO: Add ResetProject command to EngineController
self.layer_to_track_map.clear();
self.track_to_layer_map.clear();
// Clear file path
self.current_file_path = None;
println!("Created new file");
} }
MenuAction::NewWindow => { MenuAction::NewWindow => {
println!("Menu: New Window"); println!("Menu: New Window");
// TODO: Implement new window // TODO: Implement new window (requires multi-window support)
} }
MenuAction::Save => { MenuAction::Save => {
println!("Menu: Save"); use rfd::FileDialog;
// TODO: Implement save
if let Some(path) = &self.current_file_path {
// Save to existing path
self.save_to_file(path.clone());
} else {
// No current path, fall through to Save As
if let Some(path) = FileDialog::new()
.add_filter("Lightningbeam Project", &["beam"])
.set_file_name("Untitled.beam")
.save_file()
{
self.save_to_file(path);
}
}
} }
MenuAction::SaveAs => { MenuAction::SaveAs => {
println!("Menu: Save As"); use rfd::FileDialog;
// TODO: Implement save as
let dialog = FileDialog::new()
.add_filter("Lightningbeam Project", &["beam"])
.set_file_name("Untitled.beam");
// Set initial directory if we have a current file
let dialog = if let Some(current_path) = &self.current_file_path {
if let Some(parent) = current_path.parent() {
dialog.set_directory(parent)
} else {
dialog
}
} else {
dialog
};
if let Some(path) = dialog.save_file() {
self.save_to_file(path);
}
} }
MenuAction::OpenFile => { MenuAction::OpenFile => {
println!("Menu: Open File"); use rfd::FileDialog;
// TODO: Implement open file
// TODO: Prompt to save current file if modified
if let Some(path) = FileDialog::new()
.add_filter("Lightningbeam Project", &["beam"])
.pick_file()
{
self.load_from_file(path);
}
} }
MenuAction::Revert => { MenuAction::Revert => {
println!("Menu: Revert"); println!("Menu: Revert");
@ -607,9 +866,10 @@ impl EditorApp {
// Edit menu // Edit menu
MenuAction::Undo => { MenuAction::Undo => {
if let Some(ref mut audio_system) = self.audio_system { if let Some(ref controller_arc) = self.audio_controller {
let mut controller = controller_arc.lock().unwrap();
let mut backend_context = lightningbeam_core::action::BackendContext { let mut backend_context = lightningbeam_core::action::BackendContext {
audio_controller: Some(&mut audio_system.controller), audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map, layer_to_track_map: &self.layer_to_track_map,
}; };
@ -627,9 +887,10 @@ impl EditorApp {
} }
} }
MenuAction::Redo => { MenuAction::Redo => {
if let Some(ref mut audio_system) = self.audio_system { if let Some(ref controller_arc) = self.audio_controller {
let mut controller = controller_arc.lock().unwrap();
let mut backend_context = lightningbeam_core::action::BackendContext { let mut backend_context = lightningbeam_core::action::BackendContext {
audio_controller: Some(&mut audio_system.controller), audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map, layer_to_track_map: &self.layer_to_track_map,
}; };
@ -727,15 +988,16 @@ impl EditorApp {
self.active_layer_id = Some(layer_id); self.active_layer_id = Some(layer_id);
// Create corresponding daw-backend MIDI track // Create corresponding daw-backend MIDI track
if let Some(ref mut audio_system) = self.audio_system { if let Some(ref controller_arc) = self.audio_controller {
match audio_system.controller.create_midi_track_sync(layer_name.clone()) { let mut controller = controller_arc.lock().unwrap();
match controller.create_midi_track_sync(layer_name.clone()) {
Ok(track_id) => { Ok(track_id) => {
// Store bidirectional mapping // Store bidirectional mapping
self.layer_to_track_map.insert(layer_id, track_id); self.layer_to_track_map.insert(layer_id, track_id);
self.track_to_layer_map.insert(track_id, layer_id); self.track_to_layer_map.insert(track_id, layer_id);
// Load default instrument into the track // Load default instrument into the track
if let Err(e) = default_instrument::load_default_instrument(&mut audio_system.controller, track_id) { if let Err(e) = default_instrument::load_default_instrument(&mut *controller, track_id) {
eprintln!("⚠️ Failed to load default instrument for {}: {}", layer_name, e); eprintln!("⚠️ Failed to load default instrument for {}: {}", layer_name, e);
} else { } else {
println!("✅ Created {} (backend TrackId: {}, instrument: {})", println!("✅ Created {} (backend TrackId: {}, instrument: {})",
@ -893,6 +1155,170 @@ impl EditorApp {
} }
} }
/// Prepare document for saving by storing current UI layout
fn prepare_document_for_save(&mut self) {
let doc = self.action_executor.document_mut();
// Store current layout state
doc.ui_layout = Some(self.current_layout.clone());
// Store base layout name for reference
if self.current_layout_index < self.layouts.len() {
doc.ui_layout_base = Some(self.layouts[self.current_layout_index].name.clone());
}
}
/// Save the current document to a .beam file
fn save_to_file(&mut self, path: std::path::PathBuf) {
println!("Saving to: {}", path.display());
if self.audio_controller.is_none() {
eprintln!("❌ Audio system not initialized");
return;
}
// Prepare document for save (including layout)
self.prepare_document_for_save();
// Create progress channel
let (progress_tx, progress_rx) = std::sync::mpsc::channel();
// Clone document for background thread
let document = self.action_executor.document().clone();
// Send save command to worker thread
let command = FileCommand::Save {
path: path.clone(),
document,
progress_tx,
};
if let Err(e) = self.file_command_tx.send(command) {
eprintln!("❌ Failed to send save command: {}", e);
return;
}
// Store operation state
self.file_operation = Some(FileOperation::Saving {
path,
progress_rx,
});
}
/// Load a document from a .beam file
fn load_from_file(&mut self, path: std::path::PathBuf) {
println!("Loading from: {}", path.display());
if self.audio_controller.is_none() {
eprintln!("❌ Audio system not initialized");
return;
}
// Create progress channel
let (progress_tx, progress_rx) = std::sync::mpsc::channel();
// Send load command to worker thread
let command = FileCommand::Load {
path: path.clone(),
progress_tx,
};
if let Err(e) = self.file_command_tx.send(command) {
eprintln!("❌ Failed to send load command: {}", e);
return;
}
// Store operation state
self.file_operation = Some(FileOperation::Loading {
path,
progress_rx,
});
}
/// Restore UI layout from loaded document
fn restore_layout_from_document(&mut self) {
let doc = self.action_executor.document();
// Restore saved layout if present
if let Some(saved_layout) = &doc.ui_layout {
self.current_layout = saved_layout.clone();
// Try to find matching base layout by name
if let Some(base_name) = &doc.ui_layout_base {
if let Some(index) = self.layouts.iter().position(|l| &l.name == base_name) {
self.current_layout_index = index;
} else {
// Base layout not found (maybe renamed/removed), default to first
self.current_layout_index = 0;
}
}
println!("✅ Restored UI layout from save file");
} else {
// No saved layout (old file format or new project)
// Keep the default (first layout)
self.current_layout_index = 0;
self.current_layout = self.layouts[0].layout.clone();
println!(" No saved layout found, using default");
}
// Clear existing pane instances so they rebuild with new layout
self.pane_instances.clear();
}
/// Apply loaded project data (called after successful load in background)
fn apply_loaded_project(&mut self, loaded_project: lightningbeam_core::file_io::LoadedProject, path: std::path::PathBuf) {
use lightningbeam_core::action::ActionExecutor;
// Check for missing files
if !loaded_project.missing_files.is_empty() {
eprintln!("⚠️ {} missing files", loaded_project.missing_files.len());
for missing in &loaded_project.missing_files {
eprintln!(" - {}", missing.original_path.display());
}
// TODO Phase 5: Show recovery dialog
}
// Replace document
self.action_executor = ActionExecutor::new(loaded_project.document);
// Restore UI layout from loaded document
self.restore_layout_from_document();
// Set project in audio engine via query
if let Some(ref controller_arc) = self.audio_controller {
let mut controller = controller_arc.lock().unwrap();
if let Err(e) = controller.set_project(loaded_project.audio_project) {
eprintln!("❌ Failed to set project: {}", e);
return;
}
// Load audio pool
if let Err(e) = controller.load_audio_pool(
loaded_project.audio_pool_entries,
&path,
) {
eprintln!("❌ Failed to load audio pool: {}", e);
return;
}
}
// Reset state
self.layer_to_track_map.clear();
self.track_to_layer_map.clear();
self.sync_midi_layers_to_backend();
self.playback_time = 0.0;
self.is_playing = false;
self.current_file_path = Some(path.clone());
// Set active layer
if let Some(first) = self.action_executor.document().root.children.first() {
self.active_layer_id = Some(first.id());
}
println!("✅ Loaded from: {}", path.display());
}
/// Import an image file as an ImageAsset /// Import an image file as an ImageAsset
fn import_image(&mut self, path: &std::path::Path) { fn import_image(&mut self, path: &std::path::Path) {
use lightningbeam_core::clip::ImageAsset; use lightningbeam_core::clip::ImageAsset;
@ -949,10 +1375,11 @@ impl EditorApp {
let sample_rate = audio_file.sample_rate; let sample_rate = audio_file.sample_rate;
// Add to audio engine pool if available // Add to audio engine pool if available
if let Some(ref mut audio_system) = self.audio_system { if let Some(ref controller_arc) = self.audio_controller {
let mut controller = controller_arc.lock().unwrap();
// Send audio data to the engine // Send audio data to the engine
let path_str = path.to_string_lossy().to_string(); let path_str = path.to_string_lossy().to_string();
audio_system.controller.add_audio_file( controller.add_audio_file(
path_str.clone(), path_str.clone(),
audio_file.data, audio_file.data,
channels, channels,
@ -1011,8 +1438,9 @@ impl EditorApp {
let note_event_count = processed_events.len(); let note_event_count = processed_events.len();
// Add to backend MIDI clip pool FIRST and get the backend clip ID // Add to backend MIDI clip pool FIRST and get the backend clip ID
if let Some(ref mut audio_system) = self.audio_system { if let Some(ref controller_arc) = self.audio_controller {
audio_system.controller.add_midi_clip_to_pool(midi_clip.clone()); let mut controller = controller_arc.lock().unwrap();
controller.add_midi_clip_to_pool(midi_clip.clone());
let backend_clip_id = midi_clip.id; // The backend clip ID let backend_clip_id = midi_clip.id; // The backend clip ID
// Cache MIDI events in frontend for rendering (thumbnails & timeline piano roll) // Cache MIDI events in frontend for rendering (thumbnails & timeline piano roll)
@ -1076,10 +1504,104 @@ impl eframe::App for EditorApp {
} }
} }
// Handle file operation progress
if let Some(ref mut operation) = self.file_operation {
// Set wait cursor
ctx.set_cursor_icon(egui::CursorIcon::Progress);
// Poll for progress updates
let mut operation_complete = false;
let mut loaded_project_data: Option<(lightningbeam_core::file_io::LoadedProject, std::path::PathBuf)> = None;
match operation {
FileOperation::Saving { ref mut progress_rx, ref path } => {
while let Ok(progress) = progress_rx.try_recv() {
match progress {
FileProgress::Done => {
println!("✅ Save complete!");
self.current_file_path = Some(path.clone());
operation_complete = true;
}
FileProgress::Error(e) => {
eprintln!("❌ Save error: {}", e);
operation_complete = true;
}
_ => {
// Other progress states - just keep going
}
}
}
// Render progress dialog
egui::Window::new("Saving Project")
.collapsible(false)
.resizable(false)
.anchor(egui::Align2::CENTER_CENTER, egui::vec2(0.0, 0.0))
.show(ctx, |ui| {
ui.vertical_centered(|ui| {
ui.add(egui::Spinner::new());
ui.add_space(8.0);
ui.label("Saving project...");
ui.label(format!("Path: {}", path.display()));
});
});
}
FileOperation::Loading { ref mut progress_rx, ref path } => {
while let Ok(progress) = progress_rx.try_recv() {
match progress {
FileProgress::Complete(Ok(loaded_project)) => {
println!("✅ Load complete!");
// Store data to apply after dialog closes
loaded_project_data = Some((loaded_project, path.clone()));
operation_complete = true;
}
FileProgress::Complete(Err(e)) => {
eprintln!("❌ Load error: {}", e);
operation_complete = true;
}
FileProgress::Error(e) => {
eprintln!("❌ Load error: {}", e);
operation_complete = true;
}
_ => {
// Other progress states - just keep going
}
}
}
// Render progress dialog
egui::Window::new("Loading Project")
.collapsible(false)
.resizable(false)
.anchor(egui::Align2::CENTER_CENTER, egui::vec2(0.0, 0.0))
.show(ctx, |ui| {
ui.vertical_centered(|ui| {
ui.add(egui::Spinner::new());
ui.add_space(8.0);
ui.label("Loading project...");
ui.label(format!("Path: {}", path.display()));
});
});
}
}
// Clear operation if complete
if operation_complete {
self.file_operation = None;
}
// Apply loaded project data if available
if let Some((loaded_project, path)) = loaded_project_data {
self.apply_loaded_project(loaded_project, path);
}
// Request repaint to keep updating progress
ctx.request_repaint();
}
// Poll audio events from the audio engine // Poll audio events from the audio engine
if let Some(audio_system) = &mut self.audio_system { if let Some(event_rx) = &mut self.audio_event_rx {
if let Some(event_rx) = &mut audio_system.event_rx { while let Ok(event) = event_rx.pop() {
while let Ok(event) = event_rx.pop() {
use daw_backend::AudioEvent; use daw_backend::AudioEvent;
match event { match event {
AudioEvent::PlaybackPosition(time) => { AudioEvent::PlaybackPosition(time) => {
@ -1091,7 +1613,6 @@ impl eframe::App for EditorApp {
_ => {} // Ignore other events for now _ => {} // Ignore other events for now
} }
} }
}
} }
// Request continuous repaints when playing to update time display // Request continuous repaints when playing to update time display
@ -1144,7 +1665,7 @@ impl eframe::App for EditorApp {
draw_simplify_mode: &mut self.draw_simplify_mode, draw_simplify_mode: &mut self.draw_simplify_mode,
rdp_tolerance: &mut self.rdp_tolerance, rdp_tolerance: &mut self.rdp_tolerance,
schneider_max_error: &mut self.schneider_max_error, schneider_max_error: &mut self.schneider_max_error,
audio_controller: self.audio_system.as_mut().map(|sys| &mut sys.controller), audio_controller: self.audio_controller.as_ref(),
playback_time: &mut self.playback_time, playback_time: &mut self.playback_time,
is_playing: &mut self.is_playing, is_playing: &mut self.is_playing,
dragging_asset: &mut self.dragging_asset, dragging_asset: &mut self.dragging_asset,
@ -1189,9 +1710,10 @@ impl eframe::App for EditorApp {
// Execute all pending actions (two-phase dispatch) // Execute all pending actions (two-phase dispatch)
for action in pending_actions { for action in pending_actions {
// Create backend context for actions that need backend sync // Create backend context for actions that need backend sync
if let Some(ref mut audio_system) = self.audio_system { if let Some(ref controller_arc) = self.audio_controller {
let mut controller = controller_arc.lock().unwrap();
let mut backend_context = lightningbeam_core::action::BackendContext { let mut backend_context = lightningbeam_core::action::BackendContext {
audio_controller: Some(&mut audio_system.controller), audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map, layer_to_track_map: &self.layer_to_track_map,
}; };
@ -1308,7 +1830,7 @@ struct RenderContext<'a> {
draw_simplify_mode: &'a mut lightningbeam_core::tool::SimplifyMode, draw_simplify_mode: &'a mut lightningbeam_core::tool::SimplifyMode,
rdp_tolerance: &'a mut f64, rdp_tolerance: &'a mut f64,
schneider_max_error: &'a mut f64, schneider_max_error: &'a mut f64,
audio_controller: Option<&'a mut daw_backend::EngineController>, audio_controller: Option<&'a std::sync::Arc<std::sync::Mutex<daw_backend::EngineController>>>,
playback_time: &'a mut f64, playback_time: &'a mut f64,
is_playing: &'a mut bool, is_playing: &'a mut bool,
dragging_asset: &'a mut Option<panes::DraggingAsset>, dragging_asset: &'a mut Option<panes::DraggingAsset>,
@ -1782,7 +2304,7 @@ fn render_pane(
draw_simplify_mode: ctx.draw_simplify_mode, draw_simplify_mode: ctx.draw_simplify_mode,
rdp_tolerance: ctx.rdp_tolerance, rdp_tolerance: ctx.rdp_tolerance,
schneider_max_error: ctx.schneider_max_error, schneider_max_error: ctx.schneider_max_error,
audio_controller: ctx.audio_controller.as_mut().map(|c| &mut **c), audio_controller: ctx.audio_controller,
layer_to_track_map: ctx.layer_to_track_map, layer_to_track_map: ctx.layer_to_track_map,
playback_time: ctx.playback_time, playback_time: ctx.playback_time,
is_playing: ctx.is_playing, is_playing: ctx.is_playing,
@ -1836,7 +2358,7 @@ fn render_pane(
draw_simplify_mode: ctx.draw_simplify_mode, draw_simplify_mode: ctx.draw_simplify_mode,
rdp_tolerance: ctx.rdp_tolerance, rdp_tolerance: ctx.rdp_tolerance,
schneider_max_error: ctx.schneider_max_error, schneider_max_error: ctx.schneider_max_error,
audio_controller: ctx.audio_controller.as_mut().map(|c| &mut **c), audio_controller: ctx.audio_controller,
layer_to_track_map: ctx.layer_to_track_map, layer_to_track_map: ctx.layer_to_track_map,
playback_time: ctx.playback_time, playback_time: ctx.playback_time,
is_playing: ctx.is_playing, is_playing: ctx.is_playing,

View File

@ -1126,8 +1126,9 @@ impl AssetLibraryPane {
if asset_category == AssetCategory::Audio && !self.thumbnail_cache.has(&asset_id) { if asset_category == AssetCategory::Audio && !self.thumbnail_cache.has(&asset_id) {
if let Some(clip) = document.audio_clips.get(&asset_id) { if let Some(clip) = document.audio_clips.get(&asset_id) {
if let AudioClipType::Sampled { audio_pool_index } = &clip.clip_type { if let AudioClipType::Sampled { audio_pool_index } = &clip.clip_type {
if let Some(audio_controller) = shared.audio_controller.as_mut() { if let Some(controller_arc) = shared.audio_controller {
audio_controller.get_pool_waveform(*audio_pool_index, THUMBNAIL_SIZE as usize) let mut controller = controller_arc.lock().unwrap();
controller.get_pool_waveform(*audio_pool_index, THUMBNAIL_SIZE as usize)
.ok() .ok()
.map(|peaks| peaks.iter().map(|p| (p.min, p.max)).collect()) .map(|peaks| peaks.iter().map(|p| (p.min, p.max)).collect())
} else { } else {
@ -1397,8 +1398,9 @@ impl AssetLibraryPane {
if asset_category == AssetCategory::Audio && !self.thumbnail_cache.has(&asset_id) { if asset_category == AssetCategory::Audio && !self.thumbnail_cache.has(&asset_id) {
if let Some(clip) = document.audio_clips.get(&asset_id) { if let Some(clip) = document.audio_clips.get(&asset_id) {
if let AudioClipType::Sampled { audio_pool_index } = &clip.clip_type { if let AudioClipType::Sampled { audio_pool_index } = &clip.clip_type {
if let Some(audio_controller) = shared.audio_controller.as_mut() { if let Some(controller_arc) = shared.audio_controller {
audio_controller.get_pool_waveform(*audio_pool_index, THUMBNAIL_SIZE as usize) let mut controller = controller_arc.lock().unwrap();
controller.get_pool_waveform(*audio_pool_index, THUMBNAIL_SIZE as usize)
.ok() .ok()
.map(|peaks| peaks.iter().map(|p| (p.min, p.max)).collect()) .map(|peaks| peaks.iter().map(|p| (p.min, p.max)).collect())
} else { } else {

View File

@ -107,8 +107,8 @@ pub struct SharedPaneState<'a> {
pub draw_simplify_mode: &'a mut lightningbeam_core::tool::SimplifyMode, pub draw_simplify_mode: &'a mut lightningbeam_core::tool::SimplifyMode,
pub rdp_tolerance: &'a mut f64, pub rdp_tolerance: &'a mut f64,
pub schneider_max_error: &'a mut f64, pub schneider_max_error: &'a mut f64,
/// Audio engine controller for playback control /// Audio engine controller for playback control (wrapped in Arc<Mutex<>> for thread safety)
pub audio_controller: Option<&'a mut daw_backend::EngineController>, pub audio_controller: Option<&'a std::sync::Arc<std::sync::Mutex<daw_backend::EngineController>>>,
/// Mapping from Document layer UUIDs to daw-backend TrackIds /// Mapping from Document layer UUIDs to daw-backend TrackIds
pub layer_to_track_map: &'a std::collections::HashMap<Uuid, daw_backend::TrackId>, pub layer_to_track_map: &'a std::collections::HashMap<Uuid, daw_backend::TrackId>,
/// Global playback state /// Global playback state

View File

@ -975,7 +975,7 @@ impl TimelinePane {
pending_actions: &mut Vec<Box<dyn lightningbeam_core::action::Action>>, pending_actions: &mut Vec<Box<dyn lightningbeam_core::action::Action>>,
playback_time: &mut f64, playback_time: &mut f64,
is_playing: &mut bool, is_playing: &mut bool,
audio_controller: Option<&mut daw_backend::EngineController>, audio_controller: Option<&std::sync::Arc<std::sync::Mutex<daw_backend::EngineController>>>,
) { ) {
// Don't allocate the header area for input - let widgets handle it directly // Don't allocate the header area for input - let widgets handle it directly
// Only allocate content area (ruler + layers) with click and drag // Only allocate content area (ruler + layers) with click and drag
@ -1382,7 +1382,8 @@ impl TimelinePane {
else if !response.dragged() && self.is_scrubbing { else if !response.dragged() && self.is_scrubbing {
self.is_scrubbing = false; self.is_scrubbing = false;
// Seek the audio engine to the new position // Seek the audio engine to the new position
if let Some(controller) = audio_controller { if let Some(controller_arc) = audio_controller {
let mut controller = controller_arc.lock().unwrap();
controller.seek(*playback_time); controller.seek(*playback_time);
} }
} }
@ -1492,7 +1493,8 @@ impl PaneRenderer for TimelinePane {
// Go to start // Go to start
if ui.add_sized(button_size, egui::Button::new("|◀")).clicked() { if ui.add_sized(button_size, egui::Button::new("|◀")).clicked() {
*shared.playback_time = 0.0; *shared.playback_time = 0.0;
if let Some(controller) = shared.audio_controller.as_mut() { if let Some(controller_arc) = shared.audio_controller {
let mut controller = controller_arc.lock().unwrap();
controller.seek(0.0); controller.seek(0.0);
} }
} }
@ -1500,7 +1502,8 @@ impl PaneRenderer for TimelinePane {
// Rewind (step backward) // Rewind (step backward)
if ui.add_sized(button_size, egui::Button::new("◀◀")).clicked() { if ui.add_sized(button_size, egui::Button::new("◀◀")).clicked() {
*shared.playback_time = (*shared.playback_time - 0.1).max(0.0); *shared.playback_time = (*shared.playback_time - 0.1).max(0.0);
if let Some(controller) = shared.audio_controller.as_mut() { if let Some(controller_arc) = shared.audio_controller {
let mut controller = controller_arc.lock().unwrap();
controller.seek(*shared.playback_time); controller.seek(*shared.playback_time);
} }
} }
@ -1512,7 +1515,8 @@ impl PaneRenderer for TimelinePane {
println!("🔘 Play/Pause button clicked! is_playing = {}", *shared.is_playing); println!("🔘 Play/Pause button clicked! is_playing = {}", *shared.is_playing);
// Send play/pause command to audio engine // Send play/pause command to audio engine
if let Some(controller) = shared.audio_controller.as_mut() { if let Some(controller_arc) = shared.audio_controller {
let mut controller = controller_arc.lock().unwrap();
if *shared.is_playing { if *shared.is_playing {
controller.play(); controller.play();
println!("▶ Started playback"); println!("▶ Started playback");
@ -1528,7 +1532,8 @@ impl PaneRenderer for TimelinePane {
// Fast forward (step forward) // Fast forward (step forward)
if ui.add_sized(button_size, egui::Button::new("▶▶")).clicked() { if ui.add_sized(button_size, egui::Button::new("▶▶")).clicked() {
*shared.playback_time = (*shared.playback_time + 0.1).min(self.duration); *shared.playback_time = (*shared.playback_time + 0.1).min(self.duration);
if let Some(controller) = shared.audio_controller.as_mut() { if let Some(controller_arc) = shared.audio_controller {
let mut controller = controller_arc.lock().unwrap();
controller.seek(*shared.playback_time); controller.seek(*shared.playback_time);
} }
} }
@ -1536,7 +1541,8 @@ impl PaneRenderer for TimelinePane {
// Go to end // Go to end
if ui.add_sized(button_size, egui::Button::new("▶|")).clicked() { if ui.add_sized(button_size, egui::Button::new("▶|")).clicked() {
*shared.playback_time = self.duration; *shared.playback_time = self.duration;
if let Some(controller) = shared.audio_controller.as_mut() { if let Some(controller_arc) = shared.audio_controller {
let mut controller = controller_arc.lock().unwrap();
controller.seek(self.duration); controller.seek(self.duration);
} }
} }
@ -1690,7 +1696,7 @@ impl PaneRenderer for TimelinePane {
shared.pending_actions, shared.pending_actions,
shared.playback_time, shared.playback_time,
shared.is_playing, shared.is_playing,
shared.audio_controller.as_mut().map(|c| &mut **c), shared.audio_controller,
); );
// Handle asset drag-and-drop from Asset Library // Handle asset drag-and-drop from Asset Library

View File

@ -358,7 +358,8 @@ impl VirtualPianoPane {
if let Some(active_layer_id) = *shared.active_layer_id { if let Some(active_layer_id) = *shared.active_layer_id {
// Look up daw-backend track ID from layer ID // Look up daw-backend track ID from layer ID
if let Some(&track_id) = shared.layer_to_track_map.get(&active_layer_id) { if let Some(&track_id) = shared.layer_to_track_map.get(&active_layer_id) {
if let Some(ref mut controller) = shared.audio_controller { if let Some(controller_arc) = shared.audio_controller {
let mut controller = controller_arc.lock().unwrap();
controller.send_midi_note_on(track_id, note, velocity); controller.send_midi_note_on(track_id, note, velocity);
} }
} }
@ -380,7 +381,8 @@ impl VirtualPianoPane {
if let Some(active_layer_id) = *shared.active_layer_id { if let Some(active_layer_id) = *shared.active_layer_id {
if let Some(&track_id) = shared.layer_to_track_map.get(&active_layer_id) { if let Some(&track_id) = shared.layer_to_track_map.get(&active_layer_id) {
if let Some(ref mut controller) = shared.audio_controller { if let Some(controller_arc) = shared.audio_controller {
let mut controller = controller_arc.lock().unwrap();
controller.send_midi_note_off(track_id, note); controller.send_midi_note_off(track_id, note);
} }
} }
@ -560,7 +562,8 @@ impl VirtualPianoPane {
self.pressed_notes.remove(&note); self.pressed_notes.remove(&note);
if let Some(active_layer_id) = *shared.active_layer_id { if let Some(active_layer_id) = *shared.active_layer_id {
if let Some(&track_id) = shared.layer_to_track_map.get(&active_layer_id) { if let Some(&track_id) = shared.layer_to_track_map.get(&active_layer_id) {
if let Some(ref mut controller) = shared.audio_controller { if let Some(controller_arc) = shared.audio_controller {
let mut controller = controller_arc.lock().unwrap();
controller.send_midi_note_off(track_id, note); controller.send_midi_note_off(track_id, note);
} }
} }
@ -573,7 +576,8 @@ impl VirtualPianoPane {
self.pressed_notes.remove(note); self.pressed_notes.remove(note);
if let Some(active_layer_id) = *shared.active_layer_id { if let Some(active_layer_id) = *shared.active_layer_id {
if let Some(&track_id) = shared.layer_to_track_map.get(&active_layer_id) { if let Some(&track_id) = shared.layer_to_track_map.get(&active_layer_id) {
if let Some(ref mut controller) = shared.audio_controller { if let Some(controller_arc) = shared.audio_controller {
let mut controller = controller_arc.lock().unwrap();
controller.send_midi_note_off(track_id, *note); controller.send_midi_note_off(track_id, *note);
} }
} }