Import .mid files to midi tracks

This commit is contained in:
Skyler Lehmkuhl 2025-12-01 05:35:53 -05:00
parent c09cd276a0
commit 5379e3bc8c
13 changed files with 683 additions and 84 deletions

View File

@ -1,7 +1,7 @@
use crate::audio::buffer_pool::BufferPool;
use crate::audio::clip::{AudioClipInstance, ClipId};
use crate::audio::clip::{AudioClipInstance, AudioClipInstanceId, ClipId};
use crate::audio::metronome::Metronome;
use crate::audio::midi::{MidiClip, MidiClipId, MidiClipInstance, MidiEvent};
use crate::audio::midi::{MidiClip, MidiClipId, MidiClipInstance, MidiClipInstanceId, MidiEvent};
use crate::audio::node_graph::{nodes::*, AudioGraph};
use crate::audio::pool::AudioClipPool;
use crate::audio::project::Project;
@ -611,6 +611,14 @@ impl Engine {
clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
}
}
Command::RemoveMidiClip(track_id, instance_id) => {
// Remove a MIDI clip instance from a track (for undo/redo support)
let _ = self.project.remove_midi_clip(track_id, instance_id);
}
Command::RemoveAudioClip(track_id, instance_id) => {
// Remove an audio clip instance from a track (for undo/redo support)
let _ = self.project.remove_audio_clip(track_id, instance_id);
}
Command::RequestBufferPoolStats => {
// Send buffer pool statistics back to UI
let stats = self.buffer_pool.stats();
@ -1683,6 +1691,45 @@ impl Engine {
Err(e) => QueryResponse::AudioExported(Err(e)),
}
}
Query::AddMidiClipSync(track_id, clip, start_time) => {
// Add MIDI clip to track and return the instance ID
match self.project.add_midi_clip_at(track_id, clip, start_time) {
Ok(instance_id) => QueryResponse::MidiClipInstanceAdded(Ok(instance_id)),
Err(e) => QueryResponse::MidiClipInstanceAdded(Err(e.to_string())),
}
}
Query::AddMidiClipInstanceSync(track_id, mut instance) => {
// Add MIDI clip instance to track (clip must already be in pool)
// Assign instance ID
let instance_id = self.project.next_midi_clip_instance_id();
instance.id = instance_id;
match self.project.add_midi_clip_instance(track_id, instance) {
Ok(_) => QueryResponse::MidiClipInstanceAdded(Ok(instance_id)),
Err(e) => QueryResponse::MidiClipInstanceAdded(Err(e.to_string())),
}
}
Query::AddAudioClipSync(track_id, pool_index, start_time, duration, offset) => {
// Add audio clip to track and return the instance ID
// Create audio clip instance
let instance_id = self.next_clip_id;
self.next_clip_id += 1;
let clip = AudioClipInstance {
id: instance_id,
audio_pool_index: pool_index,
internal_start: offset,
internal_end: offset + duration,
external_start: start_time,
external_duration: duration,
gain: 1.0,
};
match self.project.add_clip(track_id, clip) {
Ok(instance_id) => QueryResponse::AudioClipInstanceAdded(Ok(instance_id)),
Err(e) => QueryResponse::AudioClipInstanceAdded(Err(e.to_string())),
}
}
};
// Send response back
@ -2156,6 +2203,16 @@ impl EngineController {
let _ = self.command_tx.push(Command::UpdateMidiClipNotes(track_id, clip_id, notes));
}
/// Remove a MIDI clip instance from a track (for undo/redo support)
pub fn remove_midi_clip(&mut self, track_id: TrackId, instance_id: MidiClipInstanceId) {
let _ = self.command_tx.push(Command::RemoveMidiClip(track_id, instance_id));
}
/// Remove an audio clip instance from a track (for undo/redo support)
pub fn remove_audio_clip(&mut self, track_id: TrackId, instance_id: AudioClipInstanceId) {
let _ = self.command_tx.push(Command::RemoveAudioClip(track_id, instance_id));
}
/// Request buffer pool statistics
/// The statistics will be sent via an AudioEvent::BufferPoolStats event
pub fn request_buffer_pool_stats(&mut self) {
@ -2360,6 +2417,30 @@ impl EngineController {
let _ = self.command_tx.push(Command::MultiSamplerRemoveLayer(track_id, node_id, layer_index));
}
/// Send a synchronous query and wait for the response
/// This blocks until the audio thread processes the query
/// Generic method that works with any Query/QueryResponse pair
pub fn send_query(&mut self, query: Query) -> Result<QueryResponse, String> {
// Send query
if let Err(_) = self.query_tx.push(query) {
return Err("Failed to send query - queue full".to_string());
}
// Wait for response (with timeout)
let start = std::time::Instant::now();
let timeout = std::time::Duration::from_millis(500);
while start.elapsed() < timeout {
if let Ok(response) = self.query_response_rx.pop() {
return Ok(response);
}
// Small sleep to avoid busy-waiting
std::thread::sleep(std::time::Duration::from_micros(100));
}
Err("Query timeout".to_string())
}
/// Send a synchronous query and wait for the response
/// This blocks until the audio thread processes the query
pub fn query_graph_state(&mut self, track_id: TrackId) -> Result<String, String> {

View File

@ -1,5 +1,5 @@
use super::buffer_pool::BufferPool;
use super::clip::Clip;
use super::clip::{AudioClipInstanceId, Clip};
use super::midi::{MidiClip, MidiClipId, MidiClipInstance, MidiClipInstanceId, MidiEvent};
use super::midi_pool::MidiClipPool;
use super::pool::AudioClipPool;
@ -242,10 +242,11 @@ impl Project {
}
/// Add a clip to an audio track
pub fn add_clip(&mut self, track_id: TrackId, clip: Clip) -> Result<(), &'static str> {
pub fn add_clip(&mut self, track_id: TrackId, clip: Clip) -> Result<AudioClipInstanceId, &'static str> {
if let Some(TrackNode::Audio(track)) = self.tracks.get_mut(&track_id) {
let instance_id = clip.id;
track.add_clip(clip);
Ok(())
Ok(instance_id)
} else {
Err("Track not found or is not an audio track")
}
@ -302,12 +303,12 @@ impl Project {
}
/// Legacy method for backwards compatibility - creates clip and instance from old MidiClip format
pub fn add_midi_clip(&mut self, track_id: TrackId, clip: MidiClip) -> Result<(), &'static str> {
pub fn add_midi_clip(&mut self, track_id: TrackId, clip: MidiClip) -> Result<MidiClipInstanceId, &'static str> {
self.add_midi_clip_at(track_id, clip, 0.0)
}
/// Add a MIDI clip to the pool and create an instance at the given timeline position
pub fn add_midi_clip_at(&mut self, track_id: TrackId, clip: MidiClip, start_time: f64) -> Result<(), &'static str> {
pub fn add_midi_clip_at(&mut self, track_id: TrackId, clip: MidiClip, start_time: f64) -> Result<MidiClipInstanceId, &'static str> {
// Add the clip to the pool (it already has events and duration)
let duration = clip.duration;
let clip_id = clip.id;
@ -317,7 +318,28 @@ impl Project {
let instance_id = self.next_midi_clip_instance_id();
let instance = MidiClipInstance::from_full_clip(instance_id, clip_id, duration, start_time);
self.add_midi_clip_instance(track_id, instance)
self.add_midi_clip_instance(track_id, instance)?;
Ok(instance_id)
}
/// Remove a MIDI clip instance from a track (for undo/redo support)
pub fn remove_midi_clip(&mut self, track_id: TrackId, instance_id: MidiClipInstanceId) -> Result<(), &'static str> {
if let Some(track) = self.get_track_mut(track_id) {
track.remove_midi_clip_instance(instance_id);
Ok(())
} else {
Err("Track not found")
}
}
/// Remove an audio clip instance from a track (for undo/redo support)
pub fn remove_audio_clip(&mut self, track_id: TrackId, instance_id: AudioClipInstanceId) -> Result<(), &'static str> {
if let Some(track) = self.get_track_mut(track_id) {
track.remove_audio_clip_instance(instance_id);
Ok(())
} else {
Err("Track not found")
}
}
/// Render all root tracks into the output buffer

View File

@ -1,6 +1,6 @@
use super::automation::{AutomationLane, AutomationLaneId, ParameterId};
use super::clip::AudioClipInstance;
use super::midi::{MidiClipInstance, MidiEvent};
use super::clip::{AudioClipInstance, AudioClipInstanceId};
use super::midi::{MidiClipInstance, MidiClipInstanceId, MidiEvent};
use super::midi_pool::MidiClipPool;
use super::node_graph::AudioGraph;
use super::node_graph::nodes::{AudioInputNode, AudioOutputNode};
@ -128,6 +128,20 @@ impl TrackNode {
TrackNode::Group(group) => group.set_solo(solo),
}
}
/// Remove a MIDI clip instance (only works on MIDI tracks)
pub fn remove_midi_clip_instance(&mut self, instance_id: MidiClipInstanceId) {
if let TrackNode::Midi(track) = self {
track.remove_midi_clip_instance(instance_id);
}
}
/// Remove an audio clip instance (only works on audio tracks)
pub fn remove_audio_clip_instance(&mut self, instance_id: AudioClipInstanceId) {
if let TrackNode::Audio(track) = self {
track.remove_audio_clip_instance(instance_id);
}
}
}
/// Metatrack that contains other tracks with time transformation capabilities
@ -353,6 +367,11 @@ impl MidiTrack {
self.clip_instances.push(instance);
}
/// Remove a MIDI clip instance from this track by instance ID (for undo/redo support)
pub fn remove_midi_clip_instance(&mut self, instance_id: MidiClipInstanceId) {
self.clip_instances.retain(|instance| instance.id != instance_id);
}
/// Set track volume
pub fn set_volume(&mut self, volume: f32) {
self.volume = volume.max(0.0);
@ -570,6 +589,11 @@ impl AudioTrack {
self.clips.push(clip);
}
/// Remove an audio clip instance from this track by instance ID (for undo/redo support)
pub fn remove_audio_clip_instance(&mut self, instance_id: AudioClipInstanceId) {
self.clips.retain(|instance| instance.id != instance_id);
}
/// Set track volume (0.0 = silence, 1.0 = unity gain, >1.0 = amplification)
pub fn set_volume(&mut self, volume: f32) {
self.volume = volume.max(0.0);

View File

@ -1,6 +1,6 @@
use crate::audio::{
AutomationLaneId, ClipId, CurveType, MidiClip, MidiClipId, ParameterId,
TrackId,
AudioClipInstanceId, AutomationLaneId, ClipId, CurveType, MidiClip, MidiClipId,
MidiClipInstanceId, ParameterId, TrackId,
};
use crate::audio::buffer_pool::BufferPoolStats;
use crate::audio::node_graph::nodes::LoopMode;
@ -78,6 +78,10 @@ pub enum Command {
/// Update MIDI clip notes (track_id, clip_id, notes: Vec<(start_time, note, velocity, duration)>)
/// NOTE: May need to switch to individual note operations if this becomes slow on clips with many notes
UpdateMidiClipNotes(TrackId, MidiClipId, Vec<(f64, u8, u8, f64)>),
/// Remove a MIDI clip instance from a track (track_id, instance_id) - for undo/redo support
RemoveMidiClip(TrackId, MidiClipInstanceId),
/// Remove an audio clip instance from a track (track_id, instance_id) - for undo/redo support
RemoveAudioClip(TrackId, AudioClipInstanceId),
// Diagnostics commands
/// Request buffer pool statistics
@ -261,6 +265,13 @@ pub enum Query {
GetPoolFileInfo(usize),
/// Export audio to file (settings, output_path)
ExportAudio(crate::audio::ExportSettings, std::path::PathBuf),
/// Add a MIDI clip to a track synchronously (track_id, clip, start_time) - returns instance ID
AddMidiClipSync(TrackId, crate::audio::midi::MidiClip, f64),
/// Add a MIDI clip instance to a track synchronously (track_id, instance) - returns instance ID
/// The clip must already exist in the MidiClipPool
AddMidiClipInstanceSync(TrackId, crate::audio::midi::MidiClipInstance),
/// Add an audio clip to a track synchronously (track_id, pool_index, start_time, duration, offset) - returns instance ID
AddAudioClipSync(TrackId, usize, f64, f64, f64),
}
/// Oscilloscope data from a node
@ -320,4 +331,8 @@ pub enum QueryResponse {
PoolFileInfo(Result<(f64, u32, u32), String>),
/// Audio exported
AudioExported(Result<(), String>),
/// MIDI clip instance added (returns instance ID)
MidiClipInstanceAdded(Result<MidiClipInstanceId, String>),
/// Audio clip instance added (returns instance ID)
AudioClipInstanceAdded(Result<AudioClipInstanceId, String>),
}

View File

@ -13,8 +13,8 @@ pub mod tui;
// Re-export commonly used types
pub use audio::{
AudioPool, AudioTrack, AutomationLane, AutomationLaneId, AutomationPoint, BufferPool, Clip, ClipId, CurveType, Engine, EngineController,
Metatrack, MidiClip, MidiClipId, MidiEvent, MidiTrack, ParameterId, PoolAudioFile, Project, RecordingState, RenderContext, Track, TrackId,
AudioClipInstanceId, AudioPool, AudioTrack, AutomationLane, AutomationLaneId, AutomationPoint, BufferPool, Clip, ClipId, CurveType, Engine, EngineController,
Metatrack, MidiClip, MidiClipId, MidiClipInstance, MidiClipInstanceId, MidiEvent, MidiTrack, ParameterId, PoolAudioFile, Project, RecordingState, RenderContext, Track, TrackId,
TrackNode,
};
pub use audio::node_graph::{GraphPreset, AudioGraph, PresetMetadata, SerializedConnection, SerializedNode};

View File

@ -19,3 +19,6 @@ image = { workspace = true }
# Unique identifiers
uuid = { version = "1.0", features = ["v4", "serde"] }
# Audio backend
daw-backend = { path = "../../daw-backend" }

View File

@ -18,12 +18,34 @@
//! callbacks), the document is cloned before mutation, preserving their snapshot.
use crate::document::Document;
use std::collections::HashMap;
use std::sync::Arc;
use uuid::Uuid;
/// Backend context for actions that need to interact with external systems
///
/// This bundles all backend references (audio, future video) that actions
/// may need to synchronize state with external systems beyond the document.
pub struct BackendContext<'a> {
/// Audio engine controller (optional - may not be initialized)
pub audio_controller: Option<&'a mut daw_backend::EngineController>,
/// Mapping from document layer UUIDs to backend track IDs
pub layer_to_track_map: &'a HashMap<Uuid, daw_backend::TrackId>,
// Future: pub video_controller: Option<&'a mut VideoController>,
}
/// Action trait for undo/redo operations
///
/// Each action must be able to execute (apply changes) and rollback (undo changes).
/// Actions are stored in the undo stack and can be re-executed from the redo stack.
///
/// ## Backend Integration
///
/// Actions can optionally implement backend synchronization via `execute_backend()`
/// and `rollback_backend()`. Default implementations do nothing, so actions that
/// only affect the document (vector graphics) don't need to implement these.
pub trait Action: Send {
/// Apply this action to the document
fn execute(&mut self, document: &mut Document);
@ -33,6 +55,33 @@ pub trait Action: Send {
/// Get a human-readable description of this action (for UI display)
fn description(&self) -> String;
/// Execute backend operations after document changes
///
/// Called AFTER execute() succeeds. If this returns an error, execute()
/// will be automatically rolled back to maintain atomicity.
///
/// # Arguments
/// * `backend` - Backend context with audio/video controllers
/// * `document` - Read-only document access for looking up clip data
///
/// Default: No backend operations
fn execute_backend(&mut self, _backend: &mut BackendContext, _document: &Document) -> Result<(), String> {
Ok(())
}
/// Rollback backend operations during undo
///
/// Called BEFORE rollback() to undo backend changes in reverse order.
///
/// # Arguments
/// * `backend` - Backend context with audio/video controllers
/// * `document` - Read-only document access (if needed)
///
/// Default: No backend operations
fn rollback_backend(&mut self, _backend: &mut BackendContext, _document: &Document) -> Result<(), String> {
Ok(())
}
}
/// Action executor that wraps the document and manages undo/redo
@ -195,6 +244,104 @@ impl ActionExecutor {
self.undo_stack.drain(0..remove_count);
}
}
/// Execute an action with backend synchronization
///
/// This performs atomic execution: if backend operations fail, the document
/// changes are automatically rolled back to maintain consistency.
///
/// # Arguments
/// * `action` - The action to execute
/// * `backend` - Backend context for audio/video operations
///
/// # Returns
/// * `Ok(())` if both document and backend operations succeeded
/// * `Err(msg)` if backend failed (document changes are rolled back)
pub fn execute_with_backend(
&mut self,
mut action: Box<dyn Action>,
backend: &mut BackendContext,
) -> Result<(), String> {
// 1. Execute document changes
action.execute(Arc::make_mut(&mut self.document));
// 2. Execute backend changes (pass document for reading clip data)
if let Err(e) = action.execute_backend(backend, &self.document) {
// ATOMIC ROLLBACK: Backend failed → undo document
action.rollback(Arc::make_mut(&mut self.document));
return Err(e);
}
// 3. Push to undo stack (both succeeded)
self.redo_stack.clear();
self.undo_stack.push(action);
// Limit undo stack size
if self.undo_stack.len() > self.max_undo_depth {
self.undo_stack.remove(0);
}
Ok(())
}
/// Undo the last action with backend synchronization
///
/// Rollback happens in reverse order: backend first, then document.
///
/// # Arguments
/// * `backend` - Backend context for audio/video operations
///
/// # Returns
/// * `Ok(true)` if an action was undone
/// * `Ok(false)` if undo stack is empty
/// * `Err(msg)` if backend rollback failed
pub fn undo_with_backend(&mut self, backend: &mut BackendContext) -> Result<bool, String> {
if let Some(mut action) = self.undo_stack.pop() {
// Rollback in REVERSE order: backend first, then document
action.rollback_backend(backend, &self.document)?;
action.rollback(Arc::make_mut(&mut self.document));
// Move to redo stack
self.redo_stack.push(action);
Ok(true)
} else {
Ok(false)
}
}
/// Redo the last undone action with backend synchronization
///
/// Re-execution happens in normal order: document first, then backend.
///
/// # Arguments
/// * `backend` - Backend context for audio/video operations
///
/// # Returns
/// * `Ok(true)` if an action was redone
/// * `Ok(false)` if redo stack is empty
/// * `Err(msg)` if backend execution failed
pub fn redo_with_backend(&mut self, backend: &mut BackendContext) -> Result<bool, String> {
if let Some(mut action) = self.redo_stack.pop() {
// Re-execute in same order: document first, then backend
action.execute(Arc::make_mut(&mut self.document));
if let Err(e) = action.execute_backend(backend, &self.document) {
// Rollback document if backend fails
action.rollback(Arc::make_mut(&mut self.document));
// Put action back on redo stack
self.redo_stack.push(action);
return Err(e);
}
// Move back to undo stack
self.undo_stack.push(action);
Ok(true)
} else {
Ok(false)
}
}
}
#[cfg(test)]

View File

@ -2,7 +2,7 @@
//!
//! Handles adding a clip instance to a layer.
use crate::action::Action;
use crate::action::{Action, BackendContext};
use crate::clip::ClipInstance;
use crate::document::Document;
use crate::layer::AnyLayer;
@ -18,6 +18,15 @@ pub struct AddClipInstanceAction {
/// Whether the action has been executed (for rollback)
executed: bool,
/// Backend track ID (stored during execute_backend for undo)
backend_track_id: Option<daw_backend::TrackId>,
/// Backend MIDI clip instance ID (stored during execute_backend for undo)
backend_midi_instance_id: Option<daw_backend::MidiClipInstanceId>,
/// Backend audio clip instance ID (stored during execute_backend for undo)
backend_audio_instance_id: Option<daw_backend::AudioClipInstanceId>,
}
impl AddClipInstanceAction {
@ -32,6 +41,9 @@ impl AddClipInstanceAction {
layer_id,
clip_instance,
executed: false,
backend_track_id: None,
backend_midi_instance_id: None,
backend_audio_instance_id: None,
}
}
@ -96,6 +108,109 @@ impl Action for AddClipInstanceAction {
fn description(&self) -> String {
"Add clip instance".to_string()
}
fn execute_backend(&mut self, backend: &mut BackendContext, document: &Document) -> Result<(), String> {
// Only sync audio clips to the backend
// Look up the clip from the document
let clip = document
.get_audio_clip(&self.clip_instance.clip_id)
.ok_or_else(|| "Audio clip not found".to_string())?;
// Look up backend track ID from layer mapping
let backend_track_id = backend
.layer_to_track_map
.get(&self.layer_id)
.ok_or_else(|| format!("Layer {} not mapped to backend track", self.layer_id))?;
// Get audio controller
let controller = backend
.audio_controller
.as_mut()
.ok_or_else(|| "Audio controller not available".to_string())?;
// Handle different clip types
use crate::clip::AudioClipType;
match &clip.clip_type {
AudioClipType::Midi { midi_clip_id } => {
// Create a MIDI clip instance referencing the existing clip in the backend pool
// No need to add to pool again - it was added during MIDI import
use daw_backend::command::{Query, QueryResponse};
// Calculate internal start/end from trim parameters
let internal_start = self.clip_instance.trim_start;
let internal_end = self.clip_instance.trim_end.unwrap_or(clip.duration);
let external_start = self.clip_instance.timeline_start;
// Calculate external duration (for looping if timeline_duration is set)
let external_duration = self.clip_instance.timeline_duration
.unwrap_or(internal_end - internal_start);
// Create MidiClipInstance
let instance = daw_backend::MidiClipInstance::new(
0, // Instance ID will be assigned by backend
*midi_clip_id,
internal_start,
internal_end,
external_start,
external_duration,
);
// Send query to add instance and get instance ID
let query = Query::AddMidiClipInstanceSync(*backend_track_id, instance);
match controller.send_query(query)? {
QueryResponse::MidiClipInstanceAdded(Ok(instance_id)) => {
self.backend_track_id = Some(*backend_track_id);
self.backend_midi_instance_id = Some(instance_id);
Ok(())
}
QueryResponse::MidiClipInstanceAdded(Err(e)) => Err(e),
_ => Err("Unexpected query response".to_string()),
}
}
AudioClipType::Sampled { audio_pool_index } => {
// For sampled audio, send AddAudioClipSync query
use daw_backend::command::{Query, QueryResponse};
let duration = clip.duration;
let start_time = self.clip_instance.timeline_start;
let offset = self.clip_instance.trim_start;
let query =
Query::AddAudioClipSync(*backend_track_id, *audio_pool_index, start_time, duration, offset);
match controller.send_query(query)? {
QueryResponse::AudioClipInstanceAdded(Ok(instance_id)) => {
self.backend_track_id = Some(*backend_track_id);
self.backend_audio_instance_id = Some(instance_id);
Ok(())
}
QueryResponse::AudioClipInstanceAdded(Err(e)) => Err(e),
_ => Err("Unexpected query response".to_string()),
}
}
}
}
fn rollback_backend(&mut self, backend: &mut BackendContext, _document: &Document) -> Result<(), String> {
// Remove clip from backend if it was added
if let (Some(track_id), Some(controller)) =
(self.backend_track_id, backend.audio_controller.as_mut())
{
if let Some(midi_instance_id) = self.backend_midi_instance_id {
controller.remove_midi_clip(track_id, midi_instance_id);
} else if let Some(audio_instance_id) = self.backend_audio_instance_id {
controller.remove_audio_clip(track_id, audio_instance_id);
}
// Clear stored IDs
self.backend_track_id = None;
self.backend_midi_instance_id = None;
self.backend_audio_instance_id = None;
}
Ok(())
}
}
#[cfg(test)]

View File

@ -332,12 +332,12 @@ pub enum AudioClipType {
},
/// MIDI sequence
///
/// Compatible with daw-backend's MidiClip structure.
/// References MIDI data in the backend's MidiClipPool.
/// The clip content is stored in daw-backend, not duplicated here.
Midi {
/// MIDI events with timestamps
events: Vec<MidiEvent>,
/// Whether the clip loops
loop_enabled: bool,
/// Backend MIDI clip ID (references MidiClip in backend pool)
/// This allows sharing MIDI data between multiple clip instances
midi_clip_id: u32,
},
}
@ -379,20 +379,21 @@ impl AudioClip {
}
/// Create a new MIDI clip
///
/// # Arguments
/// * `name` - Clip name
/// * `midi_clip_id` - Backend MIDI clip ID (from daw-backend MidiClipPool)
/// * `duration` - Clip duration
pub fn new_midi(
name: impl Into<String>,
midi_clip_id: u32,
duration: f64,
events: Vec<MidiEvent>,
loop_enabled: bool,
) -> Self {
Self {
id: Uuid::new_v4(),
name: name.into(),
duration,
clip_type: AudioClipType::Midi {
events,
loop_enabled,
},
clip_type: AudioClipType::Midi { midi_clip_id },
}
}
@ -404,10 +405,10 @@ impl AudioClip {
}
}
/// Get MIDI events if this is a MIDI clip
pub fn midi_events(&self) -> Option<&[MidiEvent]> {
/// Get backend MIDI clip ID if this is a MIDI clip
pub fn midi_clip_id(&self) -> Option<u32> {
match &self.clip_type {
AudioClipType::Midi { events, .. } => Some(events),
AudioClipType::Midi { midi_clip_id } => Some(*midi_clip_id),
_ => None,
}
}

View File

@ -288,6 +288,11 @@ struct EditorApp {
fill_enabled: bool, // Whether to fill shapes (default: true)
paint_bucket_gap_tolerance: f64, // Fill gap tolerance for paint bucket (default: 5.0)
polygon_sides: u32, // Number of sides for polygon tool (default: 5)
/// Cache for MIDI event data (keyed by backend midi_clip_id)
/// Prevents repeated backend queries for the same MIDI clip
/// Format: (timestamp, note_number, is_note_on)
midi_event_cache: HashMap<u32, Vec<(f64, u8, bool)>>,
}
/// Import filter types for the file dialog
@ -382,6 +387,7 @@ impl EditorApp {
fill_enabled: true, // Default to filling shapes
paint_bucket_gap_tolerance: 5.0, // Default gap tolerance
polygon_sides: 5, // Default to pentagon
midi_event_cache: HashMap::new(), // Initialize empty MIDI event cache
}
}
@ -601,19 +607,45 @@ impl EditorApp {
// Edit menu
MenuAction::Undo => {
if let Some(ref mut audio_system) = self.audio_system {
let mut backend_context = lightningbeam_core::action::BackendContext {
audio_controller: Some(&mut audio_system.controller),
layer_to_track_map: &self.layer_to_track_map,
};
match self.action_executor.undo_with_backend(&mut backend_context) {
Ok(true) => println!("Undid: {}", self.action_executor.redo_description().unwrap_or_default()),
Ok(false) => println!("Nothing to undo"),
Err(e) => eprintln!("Undo failed: {}", e),
}
} else {
if self.action_executor.undo() {
println!("Undid: {}", self.action_executor.redo_description().unwrap_or_default());
} else {
println!("Nothing to undo");
}
}
}
MenuAction::Redo => {
if let Some(ref mut audio_system) = self.audio_system {
let mut backend_context = lightningbeam_core::action::BackendContext {
audio_controller: Some(&mut audio_system.controller),
layer_to_track_map: &self.layer_to_track_map,
};
match self.action_executor.redo_with_backend(&mut backend_context) {
Ok(true) => println!("Redid: {}", self.action_executor.undo_description().unwrap_or_default()),
Ok(false) => println!("Nothing to redo"),
Err(e) => eprintln!("Redo failed: {}", e),
}
} else {
if self.action_executor.redo() {
println!("Redid: {}", self.action_executor.undo_description().unwrap_or_default());
} else {
println!("Nothing to redo");
}
}
}
MenuAction::Cut => {
println!("Menu: Cut");
// TODO: Implement cut
@ -948,7 +980,7 @@ impl EditorApp {
/// Import a MIDI file via daw-backend
fn import_midi(&mut self, path: &std::path::Path) {
use lightningbeam_core::clip::{AudioClip, AudioClipType, MidiEvent};
use lightningbeam_core::clip::AudioClip;
let name = path.file_stem()
.and_then(|s| s.to_str())
@ -956,26 +988,45 @@ impl EditorApp {
.to_string();
// Load MIDI file via daw-backend
// Note: daw-backend's load_midi_file returns a MidiClip with events
match daw_backend::io::midi_file::load_midi_file(path, 0, 44100) {
Ok(midi_clip) => {
// Convert daw-backend MidiEvents to our MidiEvent type
let events: Vec<MidiEvent> = midi_clip.events.iter().map(|e| {
MidiEvent::new(e.timestamp, e.status, e.data1, e.data2)
}).collect();
let duration = midi_clip.duration;
let event_count = midi_clip.events.len();
// Create MIDI audio clip in document library
let clip = AudioClip::new_midi(&name, duration, events, false);
let clip_id = self.action_executor.document_mut().add_audio_clip(clip);
println!("Imported MIDI '{}' ({:.1}s, {} events) to library - ID: {}",
name, duration, midi_clip.events.len(), clip_id);
// Process MIDI events to cache format: (timestamp, note_number, is_note_on)
// Filter to note events only (status 0x90 = note-on, 0x80 = note-off)
let processed_events: Vec<(f64, u8, bool)> = midi_clip.events.iter()
.filter_map(|event| {
let status_type = event.status & 0xF0;
if status_type == 0x90 || status_type == 0x80 {
// Note-on is 0x90 with velocity > 0, Note-off is 0x80 or velocity = 0
let is_note_on = status_type == 0x90 && event.data2 > 0;
Some((event.timestamp, event.data1, is_note_on))
} else {
None // Ignore non-note events (CC, pitch bend, etc.)
}
})
.collect();
// Add to daw-backend MIDI clip pool (for playback when placed on timeline)
let note_event_count = processed_events.len();
// Add to backend MIDI clip pool FIRST and get the backend clip ID
if let Some(ref mut audio_system) = self.audio_system {
audio_system.controller.add_midi_clip_to_pool(midi_clip);
println!("✅ Added MIDI clip to backend pool");
audio_system.controller.add_midi_clip_to_pool(midi_clip.clone());
let backend_clip_id = midi_clip.id; // The backend clip ID
// Cache MIDI events in frontend for rendering (thumbnails & timeline piano roll)
self.midi_event_cache.insert(backend_clip_id, processed_events);
// Create frontend MIDI clip referencing the backend pool
let clip = AudioClip::new_midi(&name, backend_clip_id, duration);
let frontend_clip_id = self.action_executor.document_mut().add_audio_clip(clip);
println!("Imported MIDI '{}' ({:.1}s, {} total events, {} note events) - Frontend ID: {}, Backend ID: {}",
name, duration, event_count, note_event_count, frontend_clip_id, backend_clip_id);
println!("✅ Added MIDI clip to backend pool and cached {} note events", note_event_count);
} else {
eprintln!("⚠️ Cannot import MIDI: audio system not available");
}
}
Err(e) => {
@ -1102,6 +1153,7 @@ impl eframe::App for EditorApp {
paint_bucket_gap_tolerance: &mut self.paint_bucket_gap_tolerance,
polygon_sides: &mut self.polygon_sides,
layer_to_track_map: &self.layer_to_track_map,
midi_event_cache: &self.midi_event_cache,
};
render_layout_node(
@ -1136,8 +1188,22 @@ impl eframe::App for EditorApp {
// Execute all pending actions (two-phase dispatch)
for action in pending_actions {
// Create backend context for actions that need backend sync
if let Some(ref mut audio_system) = self.audio_system {
let mut backend_context = lightningbeam_core::action::BackendContext {
audio_controller: Some(&mut audio_system.controller),
layer_to_track_map: &self.layer_to_track_map,
};
// Execute action with backend synchronization
if let Err(e) = self.action_executor.execute_with_backend(action, &mut backend_context) {
eprintln!("Action execution failed: {}", e);
}
} else {
// No audio system available, execute without backend
self.action_executor.execute(action);
}
}
// Set cursor based on hover state
if let Some((_, is_horizontal)) = self.hovered_divider {
@ -1253,6 +1319,8 @@ struct RenderContext<'a> {
polygon_sides: &'a mut u32,
/// Mapping from Document layer UUIDs to daw-backend TrackIds
layer_to_track_map: &'a std::collections::HashMap<Uuid, daw_backend::TrackId>,
/// Cache of MIDI events for rendering (keyed by backend midi_clip_id)
midi_event_cache: &'a HashMap<u32, Vec<(f64, u8, bool)>>,
}
/// Recursively render a layout node with drag support
@ -1723,6 +1791,7 @@ fn render_pane(
fill_enabled: ctx.fill_enabled,
paint_bucket_gap_tolerance: ctx.paint_bucket_gap_tolerance,
polygon_sides: ctx.polygon_sides,
midi_event_cache: ctx.midi_event_cache,
};
pane_instance.render_header(&mut header_ui, &mut shared);
}
@ -1776,6 +1845,7 @@ fn render_pane(
fill_enabled: ctx.fill_enabled,
paint_bucket_gap_tolerance: ctx.paint_bucket_gap_tolerance,
polygon_sides: ctx.polygon_sides,
midi_event_cache: ctx.midi_event_cache,
};
// Render pane content (header was already rendered above)

View File

@ -1172,23 +1172,15 @@ impl AssetLibraryPane {
Some(generate_placeholder_thumbnail(AssetCategory::Audio, 200))
}
}
AudioClipType::Midi { events, .. } => {
let note_color = egui::Color32::from_rgb(100, 150, 255);
// Convert MIDI events to (timestamp, note, is_note_on) tuples
// Note on: 0x90-0x9F, Note off: 0x80-0x8F
let midi_events: Vec<(f64, u8, bool)> = events.iter()
.filter_map(|e| {
let msg_type = e.status & 0xF0;
let is_note_on = msg_type == 0x90 && e.data2 > 0;
let is_note_off = msg_type == 0x80 || (msg_type == 0x90 && e.data2 == 0);
if is_note_on || is_note_off {
Some((e.timestamp, e.data1, is_note_on))
AudioClipType::Midi { midi_clip_id } => {
let bg_color = egui::Color32::from_rgba_unmultiplied(40, 40, 40, 200);
let note_color = egui::Color32::from_rgb(100, 200, 100);
if let Some(events) = shared.midi_event_cache.get(midi_clip_id) {
Some(generate_midi_thumbnail(events, clip.duration, bg_color, note_color))
} else {
None
Some(generate_placeholder_thumbnail(AssetCategory::Audio, 200))
}
})
.collect();
Some(generate_midi_thumbnail(&midi_events, clip.duration, bg_color, note_color))
}
}
} else {
@ -1449,21 +1441,15 @@ impl AssetLibraryPane {
Some(generate_placeholder_thumbnail(AssetCategory::Audio, 200))
}
}
AudioClipType::Midi { events, .. } => {
let note_color = egui::Color32::from_rgb(100, 150, 255);
let midi_events: Vec<(f64, u8, bool)> = events.iter()
.filter_map(|e| {
let msg_type = e.status & 0xF0;
let is_note_on = msg_type == 0x90 && e.data2 > 0;
let is_note_off = msg_type == 0x80 || (msg_type == 0x90 && e.data2 == 0);
if is_note_on || is_note_off {
Some((e.timestamp, e.data1, is_note_on))
AudioClipType::Midi { midi_clip_id } => {
let bg_color = egui::Color32::from_rgba_unmultiplied(40, 40, 40, 200);
let note_color = egui::Color32::from_rgb(100, 200, 100);
if let Some(events) = shared.midi_event_cache.get(midi_clip_id) {
Some(generate_midi_thumbnail(events, clip.duration, bg_color, note_color))
} else {
None
Some(generate_placeholder_thumbnail(AssetCategory::Audio, 200))
}
})
.collect();
Some(generate_midi_thumbnail(&midi_events, clip.duration, bg_color, note_color))
}
}
} else {

View File

@ -125,6 +125,8 @@ pub struct SharedPaneState<'a> {
pub paint_bucket_gap_tolerance: &'a mut f64,
/// Number of sides for polygon tool
pub polygon_sides: &'a mut u32,
/// Cache of MIDI events for rendering (keyed by backend midi_clip_id)
pub midi_event_cache: &'a std::collections::HashMap<u32, Vec<(f64, u8, bool)>>,
}
/// Trait for pane rendering

View File

@ -347,6 +347,115 @@ impl TimelinePane {
}
}
/// Render mini piano roll visualization for MIDI clips on timeline
/// Shows notes modulo 12 (one octave) matching the JavaScript reference implementation
#[allow(clippy::too_many_arguments)]
fn render_midi_piano_roll(
painter: &egui::Painter,
clip_rect: egui::Rect,
rect_min_x: f32, // Timeline panel left edge (for proper viewport-relative positioning)
events: &[(f64, u8, bool)], // (timestamp, note_number, is_note_on)
trim_start: f64,
visible_duration: f64,
timeline_start: f64,
viewport_start_time: f64,
pixels_per_second: f32,
theme: &crate::theme::Theme,
ctx: &egui::Context,
) {
let clip_height = clip_rect.height();
let note_height = clip_height / 12.0; // 12 semitones per octave
// Get note color from theme CSS (fallback to black)
let note_style = theme.style(".timeline-midi-note", ctx);
let note_color = note_style.background_color.unwrap_or(egui::Color32::BLACK);
// Build a map of active notes (note_number -> note_on_timestamp)
// to calculate durations when we encounter note-offs
let mut active_notes: std::collections::HashMap<u8, f64> = std::collections::HashMap::new();
let mut note_rectangles: Vec<(egui::Rect, u8)> = Vec::new();
// First pass: pair note-ons with note-offs to calculate durations
for &(timestamp, note_number, is_note_on) in events {
if is_note_on {
// Store note-on timestamp
active_notes.insert(note_number, timestamp);
} else {
// Note-off: find matching note-on and calculate duration
if let Some(&note_on_time) = active_notes.get(&note_number) {
let duration = timestamp - note_on_time;
// Skip notes outside visible trim range
if note_on_time < trim_start || note_on_time > trim_start + visible_duration {
active_notes.remove(&note_number);
continue;
}
// Calculate X position and width
// Convert note position to absolute timeline position
let note_timeline_pos = timeline_start + (note_on_time - trim_start);
// Convert to screen X using same formula as clip positioning (time_to_x)
let note_x = rect_min_x + ((note_timeline_pos - viewport_start_time) * pixels_per_second as f64) as f32;
// Calculate note width from duration (minimum 2px for visibility)
let note_width = (duration as f32 * pixels_per_second).max(2.0);
// Calculate Y position (modulo 12 for octave wrapping)
let pitch_class = note_number % 12;
let note_y = clip_rect.min.y + ((11 - pitch_class) as f32 * note_height);
let note_rect = egui::Rect::from_min_size(
egui::pos2(note_x, note_y),
egui::vec2(note_width, note_height - 1.0), // -1 for spacing between notes
);
// Store for rendering (only if visible)
if note_rect.right() >= clip_rect.left() && note_rect.left() <= clip_rect.right() {
note_rectangles.push((note_rect, note_number));
}
active_notes.remove(&note_number);
}
}
}
// Handle any notes that didn't get a note-off (still active at end of clip)
for (&note_number, &note_on_time) in &active_notes {
// Skip notes outside visible trim range
if note_on_time < trim_start || note_on_time > trim_start + visible_duration {
continue;
}
// Use a default duration (extend to end of visible area or 0.5 seconds, whichever is shorter)
let max_end_time = (trim_start + visible_duration).min(note_on_time + 0.5);
let duration = max_end_time - note_on_time;
// Convert note position to absolute timeline position
let note_timeline_pos = timeline_start + (note_on_time - trim_start);
// Convert to screen X using same formula as clip positioning (time_to_x)
let note_x = rect_min_x + ((note_timeline_pos - viewport_start_time) * pixels_per_second as f64) as f32;
let note_width = (duration as f32 * pixels_per_second).max(2.0);
let pitch_class = note_number % 12;
let note_y = clip_rect.min.y + ((11 - pitch_class) as f32 * note_height);
let note_rect = egui::Rect::from_min_size(
egui::pos2(note_x, note_y),
egui::vec2(note_width, note_height - 1.0),
);
if note_rect.right() >= clip_rect.left() && note_rect.left() <= clip_rect.right() {
note_rectangles.push((note_rect, note_number));
}
}
// Second pass: render all note rectangles
for (note_rect, _note_number) in note_rectangles {
painter.rect_filled(note_rect, 1.0, note_color);
}
}
/// Render layer header column (left side with track names and controls)
fn render_layer_headers(
&mut self,
@ -625,6 +734,7 @@ impl TimelinePane {
document: &lightningbeam_core::document::Document,
active_layer_id: &Option<uuid::Uuid>,
selection: &lightningbeam_core::selection::Selection,
midi_event_cache: &std::collections::HashMap<u32, Vec<(f64, u8, bool)>>,
) {
let painter = ui.painter();
@ -790,6 +900,29 @@ impl TimelinePane {
clip_color,
);
// MIDI VISUALIZATION: Draw piano roll overlay for MIDI clips
if let lightningbeam_core::layer::AnyLayer::Audio(_) = layer {
if let Some(clip) = document.get_audio_clip(&clip_instance.clip_id) {
if let lightningbeam_core::clip::AudioClipType::Midi { midi_clip_id } = &clip.clip_type {
if let Some(events) = midi_event_cache.get(midi_clip_id) {
Self::render_midi_piano_roll(
painter,
clip_rect,
rect.min.x, // Pass timeline panel left edge for proper positioning
events,
clip_instance.trim_start,
instance_duration,
instance_start,
self.viewport_start_time,
self.pixels_per_second,
theme,
ui.ctx(),
);
}
}
}
}
// Draw border only if selected (brighter version of clip color)
if selection.contains_clip_instance(&clip_instance.id) {
painter.rect_stroke(
@ -1534,7 +1667,7 @@ impl PaneRenderer for TimelinePane {
// Render layer rows with clipping
ui.set_clip_rect(content_rect.intersect(original_clip_rect));
self.render_layers(ui, content_rect, shared.theme, document, shared.active_layer_id, shared.selection);
self.render_layers(ui, content_rect, shared.theme, document, shared.active_layer_id, shared.selection, shared.midi_event_cache);
// Render playhead on top (clip to timeline area)
ui.set_clip_rect(timeline_rect.intersect(original_clip_rect));