Merge branch 'rust-ui' of https://git.skyler.io/skyler/Lightningbeam into rust-ui
This commit is contained in:
commit
434b488a4c
|
|
@ -35,6 +35,7 @@ dasp_peak = "0.11"
|
|||
dasp_rms = "0.11"
|
||||
petgraph = "0.6"
|
||||
serde_json = "1.0"
|
||||
zip = "0.6"
|
||||
|
||||
# BeamDSP scripting engine
|
||||
beamdsp = { path = "../lightningbeam-ui/beamdsp" }
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ pub struct Engine {
|
|||
project: Project,
|
||||
audio_pool: AudioClipPool,
|
||||
buffer_pool: BufferPool,
|
||||
playhead: u64, // Playhead position in samples
|
||||
playhead: i64, // Playhead position in samples (may be negative during count-in pre-roll)
|
||||
sample_rate: u32,
|
||||
playing: bool,
|
||||
channels: u32,
|
||||
|
|
@ -74,6 +74,10 @@ pub struct Engine {
|
|||
// MIDI recording state
|
||||
midi_recording_state: Option<MidiRecordingState>,
|
||||
|
||||
// Currently held MIDI notes per track (note -> velocity), updated on NoteOn/NoteOff
|
||||
// Used to inject held notes when recording starts mid-press (e.g. after count-in)
|
||||
midi_held_notes: HashMap<TrackId, HashMap<u8, u8>>,
|
||||
|
||||
// MIDI input manager for external MIDI devices
|
||||
midi_input_manager: Option<MidiInputManager>,
|
||||
|
||||
|
|
@ -160,6 +164,7 @@ impl Engine {
|
|||
recording_mirror_tx: None,
|
||||
recording_progress_counter: 0,
|
||||
midi_recording_state: None,
|
||||
midi_held_notes: HashMap::new(),
|
||||
midi_input_manager: None,
|
||||
metronome: Metronome::new(sample_rate),
|
||||
recording_sample_buffer: Vec::with_capacity(4096),
|
||||
|
|
@ -396,17 +401,18 @@ impl Engine {
|
|||
);
|
||||
|
||||
// Update playhead (convert total samples to frames)
|
||||
self.playhead += (output.len() / self.channels as usize) as u64;
|
||||
self.playhead += (output.len() / self.channels as usize) as i64;
|
||||
|
||||
// Update atomic playhead for UI reads
|
||||
// Update atomic playhead for UI reads (clamped to 0; negative = count-in pre-roll)
|
||||
self.playhead_atomic
|
||||
.store(self.playhead, Ordering::Relaxed);
|
||||
.store(self.playhead.max(0) as u64, Ordering::Relaxed);
|
||||
|
||||
// Send periodic position updates
|
||||
self.frames_since_last_event += output.len() / self.channels as usize;
|
||||
if self.frames_since_last_event >= self.event_interval_frames / self.channels as usize
|
||||
{
|
||||
let position_seconds = self.playhead as f64 / self.sample_rate as f64;
|
||||
// Clamp to 0 during count-in pre-roll (negative playhead = before project start)
|
||||
let position_seconds = self.playhead.max(0) as f64 / self.sample_rate as f64;
|
||||
let _ = self
|
||||
.event_tx
|
||||
.push(AudioEvent::PlaybackPosition(position_seconds));
|
||||
|
|
@ -692,17 +698,17 @@ impl Engine {
|
|||
self.project.stop_all_notes();
|
||||
}
|
||||
Command::Seek(seconds) => {
|
||||
let frames = (seconds * self.sample_rate as f64) as u64;
|
||||
self.playhead = frames;
|
||||
self.playhead_atomic
|
||||
.store(self.playhead, Ordering::Relaxed);
|
||||
self.playhead = (seconds * self.sample_rate as f64) as i64;
|
||||
// Clamp to 0 for atomic/disk-reader; negative = count-in pre-roll (no disk reads needed)
|
||||
let clamped = self.playhead.max(0) as u64;
|
||||
self.playhead_atomic.store(clamped, Ordering::Relaxed);
|
||||
// Stop all MIDI notes when seeking to prevent stuck notes
|
||||
self.project.stop_all_notes();
|
||||
// Reset all node graphs to clear effect buffers (echo, reverb, etc.)
|
||||
self.project.reset_all_graphs();
|
||||
// Notify disk reader to refill buffers from new position
|
||||
if let Some(ref mut dr) = self.disk_reader {
|
||||
dr.send(crate::audio::disk_reader::DiskReaderCommand::Seek { frame: frames });
|
||||
dr.send(crate::audio::disk_reader::DiskReaderCommand::Seek { frame: clamped });
|
||||
}
|
||||
}
|
||||
Command::SetTrackVolume(track_id, volume) => {
|
||||
|
|
@ -992,6 +998,13 @@ impl Engine {
|
|||
clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
|
||||
}
|
||||
}
|
||||
Command::UpdateMidiClipEvents(_track_id, clip_id, events) => {
|
||||
// Replace all events in a MIDI clip (used for CC/pitch bend editing)
|
||||
if let Some(clip) = self.project.midi_clip_pool.get_clip_mut(clip_id) {
|
||||
clip.events = events;
|
||||
clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
|
||||
}
|
||||
}
|
||||
Command::RemoveMidiClip(track_id, instance_id) => {
|
||||
// Remove a MIDI clip instance from a track (for undo/redo support)
|
||||
let _ = self.project.remove_midi_clip(track_id, instance_id);
|
||||
|
|
@ -1205,6 +1218,9 @@ impl Engine {
|
|||
// Emit event to UI for visual feedback
|
||||
let _ = self.event_tx.push(AudioEvent::NoteOn(note, velocity));
|
||||
|
||||
// Track held notes so count-in recording can inject them at start_time
|
||||
self.midi_held_notes.entry(track_id).or_default().insert(note, velocity);
|
||||
|
||||
// If MIDI recording is active on this track, capture the event
|
||||
if let Some(recording) = &mut self.midi_recording_state {
|
||||
if recording.track_id == track_id {
|
||||
|
|
@ -1223,6 +1239,11 @@ impl Engine {
|
|||
// Emit event to UI for visual feedback
|
||||
let _ = self.event_tx.push(AudioEvent::NoteOff(note));
|
||||
|
||||
// Remove from held notes tracking
|
||||
if let Some(track_notes) = self.midi_held_notes.get_mut(&track_id) {
|
||||
track_notes.remove(¬e);
|
||||
}
|
||||
|
||||
// If MIDI recording is active on this track, capture the event
|
||||
if let Some(recording) = &mut self.midi_recording_state {
|
||||
if recording.track_id == track_id {
|
||||
|
|
@ -1655,7 +1676,7 @@ impl Engine {
|
|||
// Extract the directory path from the preset path for resolving relative sample paths
|
||||
let preset_base_path = std::path::Path::new(&preset_path).parent();
|
||||
|
||||
match AudioGraph::from_preset(&preset, self.sample_rate, 8192, preset_base_path) {
|
||||
match AudioGraph::from_preset(&preset, self.sample_rate, 8192, preset_base_path, None) {
|
||||
Ok(graph) => {
|
||||
// Replace the track's graph
|
||||
match self.project.get_track_mut(track_id) {
|
||||
|
|
@ -1705,6 +1726,80 @@ impl Engine {
|
|||
}
|
||||
}
|
||||
|
||||
Command::GraphLoadLbins(track_id, path) => {
|
||||
match crate::audio::node_graph::lbins::load_lbins(&path) {
|
||||
Ok((preset, assets)) => {
|
||||
match AudioGraph::from_preset(&preset, self.sample_rate, 8192, None, Some(&assets)) {
|
||||
Ok(graph) => {
|
||||
match self.project.get_track_mut(track_id) {
|
||||
Some(TrackNode::Midi(track)) => {
|
||||
track.instrument_graph = graph;
|
||||
track.graph_is_default = true;
|
||||
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
|
||||
let _ = self.event_tx.push(AudioEvent::GraphPresetLoaded(track_id));
|
||||
}
|
||||
Some(TrackNode::Audio(track)) => {
|
||||
track.effects_graph = graph;
|
||||
track.graph_is_default = true;
|
||||
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
|
||||
let _ = self.event_tx.push(AudioEvent::GraphPresetLoaded(track_id));
|
||||
}
|
||||
Some(TrackNode::Group(track)) => {
|
||||
track.audio_graph = graph;
|
||||
track.graph_is_default = true;
|
||||
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
|
||||
let _ = self.event_tx.push(AudioEvent::GraphPresetLoaded(track_id));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
let _ = self.event_tx.push(AudioEvent::GraphConnectionError(
|
||||
track_id,
|
||||
format!("Failed to load .lbins graph: {}", e),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
let _ = self.event_tx.push(AudioEvent::GraphConnectionError(
|
||||
track_id,
|
||||
format!("Failed to open .lbins file: {}", e),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Command::GraphSaveLbins(track_id, path, preset_name, description, tags) => {
|
||||
let graph = match self.project.get_track(track_id) {
|
||||
Some(TrackNode::Midi(track)) => Some(&track.instrument_graph),
|
||||
Some(TrackNode::Audio(track)) => Some(&track.effects_graph),
|
||||
Some(TrackNode::Group(track)) => Some(&track.audio_graph),
|
||||
_ => None,
|
||||
};
|
||||
if let Some(graph) = graph {
|
||||
let mut preset = graph.to_preset(&preset_name);
|
||||
preset.metadata.description = description;
|
||||
preset.metadata.tags = tags;
|
||||
preset.metadata.author = String::from("User");
|
||||
|
||||
match crate::audio::node_graph::lbins::save_lbins(&path, &preset, None) {
|
||||
Ok(()) => {
|
||||
let _ = self.event_tx.push(AudioEvent::GraphPresetSaved(
|
||||
track_id,
|
||||
path.to_string_lossy().to_string(),
|
||||
));
|
||||
}
|
||||
Err(e) => {
|
||||
let _ = self.event_tx.push(AudioEvent::GraphConnectionError(
|
||||
track_id,
|
||||
format!("Failed to save .lbins: {}", e),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Command::GraphSaveTemplatePreset(track_id, voice_allocator_id, preset_path, preset_name) => {
|
||||
use crate::audio::node_graph::nodes::VoiceAllocatorNode;
|
||||
|
||||
|
|
@ -2456,6 +2551,27 @@ impl Engine {
|
|||
}
|
||||
}
|
||||
|
||||
Query::GetAutomationRange(track_id, node_id) => {
|
||||
use crate::audio::node_graph::nodes::AutomationInputNode;
|
||||
|
||||
if let Some(TrackNode::Midi(track)) = self.project.get_track(track_id) {
|
||||
let graph = &track.instrument_graph;
|
||||
let node_idx = NodeIndex::new(node_id as usize);
|
||||
|
||||
if let Some(graph_node) = graph.get_graph_node(node_idx) {
|
||||
if let Some(auto_node) = graph_node.node.as_any().downcast_ref::<AutomationInputNode>() {
|
||||
QueryResponse::AutomationRange(Ok((auto_node.value_min, auto_node.value_max)))
|
||||
} else {
|
||||
QueryResponse::AutomationRange(Err(format!("Node {} is not an AutomationInputNode", node_id)))
|
||||
}
|
||||
} else {
|
||||
QueryResponse::AutomationRange(Err(format!("Node {} not found", node_id)))
|
||||
}
|
||||
} else {
|
||||
QueryResponse::AutomationRange(Err(format!("Track {} not found or is not a MIDI track", track_id)))
|
||||
}
|
||||
}
|
||||
|
||||
Query::SerializeAudioPool(project_path) => {
|
||||
QueryResponse::AudioPoolSerialized(self.audio_pool.serialize(&project_path))
|
||||
}
|
||||
|
|
@ -2509,12 +2625,12 @@ impl Engine {
|
|||
match track_node {
|
||||
TrackNode::Audio(track) => {
|
||||
// Load into effects graph with proper buffer size (8192 to handle any callback size)
|
||||
track.effects_graph = AudioGraph::from_preset(&preset, self.sample_rate, 8192, preset_base_path)?;
|
||||
track.effects_graph = AudioGraph::from_preset(&preset, self.sample_rate, 8192, preset_base_path, None)?;
|
||||
Ok(())
|
||||
}
|
||||
TrackNode::Midi(track) => {
|
||||
// Load into instrument graph with proper buffer size (8192 to handle any callback size)
|
||||
track.instrument_graph = AudioGraph::from_preset(&preset, self.sample_rate, 8192, preset_base_path)?;
|
||||
track.instrument_graph = AudioGraph::from_preset(&preset, self.sample_rate, 8192, preset_base_path, None)?;
|
||||
Ok(())
|
||||
}
|
||||
TrackNode::Group(_) => {
|
||||
|
|
@ -2723,6 +2839,40 @@ impl Engine {
|
|||
};
|
||||
QueryResponse::GraphIsDefault(is_default)
|
||||
}
|
||||
|
||||
Query::GetPitchBendRange(track_id) => {
|
||||
use crate::audio::node_graph::nodes::{MidiToCVNode, MultiSamplerNode, VoiceAllocatorNode};
|
||||
use crate::audio::node_graph::AudioNode;
|
||||
let range = if let Some(TrackNode::Midi(track)) = self.project.get_track(track_id) {
|
||||
let graph = &track.instrument_graph;
|
||||
let mut found = None;
|
||||
for idx in graph.node_indices() {
|
||||
if let Some(gn) = graph.get_graph_node(idx) {
|
||||
if let Some(ms) = gn.node.as_any().downcast_ref::<MultiSamplerNode>() {
|
||||
found = Some(ms.get_parameter(4)); // PARAM_PITCH_BEND_RANGE
|
||||
break;
|
||||
}
|
||||
// Search inside VoiceAllocator template for MidiToCV
|
||||
if let Some(va) = gn.node.as_any().downcast_ref::<VoiceAllocatorNode>() {
|
||||
let tg = va.template_graph();
|
||||
for tidx in tg.node_indices() {
|
||||
if let Some(tgn) = tg.get_graph_node(tidx) {
|
||||
if let Some(mc) = tgn.node.as_any().downcast_ref::<MidiToCVNode>() {
|
||||
found = Some(mc.get_parameter(0)); // PARAM_PITCH_BEND_RANGE
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if found.is_some() { break; }
|
||||
}
|
||||
}
|
||||
}
|
||||
found.unwrap_or(2.0)
|
||||
} else {
|
||||
2.0
|
||||
};
|
||||
QueryResponse::PitchBendRange(range)
|
||||
}
|
||||
};
|
||||
|
||||
// Send response back
|
||||
|
|
@ -2902,7 +3052,17 @@ impl Engine {
|
|||
// Check if track exists and is a MIDI track
|
||||
if let Some(crate::audio::track::TrackNode::Midi(_)) = self.project.get_track_mut(track_id) {
|
||||
// Create MIDI recording state
|
||||
let recording_state = MidiRecordingState::new(track_id, clip_id, start_time);
|
||||
let mut recording_state = MidiRecordingState::new(track_id, clip_id, start_time);
|
||||
|
||||
// Inject any notes currently held on this track (pressed during count-in pre-roll)
|
||||
// so they start at t=0 of the recording rather than being lost
|
||||
if let Some(held) = self.midi_held_notes.get(&track_id) {
|
||||
for (¬e, &velocity) in held {
|
||||
eprintln!("[MIDI_RECORDING] Injecting held note {} vel {} at start_time {:.3}s", note, velocity, start_time);
|
||||
recording_state.note_on(note, velocity, start_time);
|
||||
}
|
||||
}
|
||||
|
||||
self.midi_recording_state = Some(recording_state);
|
||||
|
||||
eprintln!("[MIDI_RECORDING] Started MIDI recording on track {} for clip {}", track_id, clip_id);
|
||||
|
|
@ -3317,6 +3477,11 @@ impl EngineController {
|
|||
let _ = self.command_tx.push(Command::UpdateMidiClipNotes(track_id, clip_id, notes));
|
||||
}
|
||||
|
||||
/// Replace all events in a MIDI clip (used for CC/pitch bend editing from the piano roll)
|
||||
pub fn update_midi_clip_events(&mut self, track_id: TrackId, clip_id: MidiClipId, events: Vec<crate::audio::midi::MidiEvent>) {
|
||||
let _ = self.command_tx.push(Command::UpdateMidiClipEvents(track_id, clip_id, events));
|
||||
}
|
||||
|
||||
/// Remove a MIDI clip instance from a track (for undo/redo support)
|
||||
pub fn remove_midi_clip(&mut self, track_id: TrackId, instance_id: MidiClipInstanceId) {
|
||||
let _ = self.command_tx.push(Command::RemoveMidiClip(track_id, instance_id));
|
||||
|
|
@ -3396,6 +3561,25 @@ impl EngineController {
|
|||
));
|
||||
}
|
||||
|
||||
/// Add a keyframe to an AutomationInput node
|
||||
pub fn automation_add_keyframe(&mut self, track_id: TrackId, node_id: u32,
|
||||
time: f64, value: f32, interpolation: String,
|
||||
ease_out: (f32, f32), ease_in: (f32, f32)) {
|
||||
let _ = self.command_tx.push(Command::AutomationAddKeyframe(
|
||||
track_id, node_id, time, value, interpolation, ease_out, ease_in));
|
||||
}
|
||||
|
||||
/// Remove a keyframe from an AutomationInput node
|
||||
pub fn automation_remove_keyframe(&mut self, track_id: TrackId, node_id: u32, time: f64) {
|
||||
let _ = self.command_tx.push(Command::AutomationRemoveKeyframe(
|
||||
track_id, node_id, time));
|
||||
}
|
||||
|
||||
/// Set the display name of an AutomationInput node
|
||||
pub fn automation_set_name(&mut self, track_id: TrackId, node_id: u32, name: String) {
|
||||
let _ = self.command_tx.push(Command::AutomationSetName(track_id, node_id, name));
|
||||
}
|
||||
|
||||
/// Start recording on a track
|
||||
pub fn start_recording(&mut self, track_id: TrackId, start_time: f64) {
|
||||
let _ = self.command_tx.push(Command::StartRecording(track_id, start_time));
|
||||
|
|
@ -3542,6 +3726,16 @@ impl EngineController {
|
|||
let _ = self.command_tx.push(Command::GraphLoadPreset(track_id, preset_path));
|
||||
}
|
||||
|
||||
/// Load a `.lbins` instrument bundle into a track's graph
|
||||
pub fn graph_load_lbins(&mut self, track_id: TrackId, path: std::path::PathBuf) {
|
||||
let _ = self.command_tx.push(Command::GraphLoadLbins(track_id, path));
|
||||
}
|
||||
|
||||
/// Save a track's graph as a `.lbins` instrument bundle
|
||||
pub fn graph_save_lbins(&mut self, track_id: TrackId, path: std::path::PathBuf, preset_name: String, description: String, tags: Vec<String>) {
|
||||
let _ = self.command_tx.push(Command::GraphSaveLbins(track_id, path, preset_name, description, tags));
|
||||
}
|
||||
|
||||
/// Save a VoiceAllocator's template graph as a preset
|
||||
pub fn graph_save_template_preset(&mut self, track_id: TrackId, voice_allocator_id: u32, preset_path: String, preset_name: String) {
|
||||
let _ = self.command_tx.push(Command::GraphSaveTemplatePreset(track_id, voice_allocator_id, preset_path, preset_name));
|
||||
|
|
@ -3809,6 +4003,45 @@ impl EngineController {
|
|||
Err("Query timeout".to_string())
|
||||
}
|
||||
|
||||
/// Query automation node value range (min, max)
|
||||
pub fn query_automation_range(&mut self, track_id: TrackId, node_id: u32) -> Result<(f32, f32), String> {
|
||||
if let Err(_) = self.query_tx.push(Query::GetAutomationRange(track_id, node_id)) {
|
||||
return Err("Failed to send query - queue full".to_string());
|
||||
}
|
||||
|
||||
let start = std::time::Instant::now();
|
||||
let timeout = std::time::Duration::from_millis(100);
|
||||
|
||||
while start.elapsed() < timeout {
|
||||
if let Ok(QueryResponse::AutomationRange(result)) = self.query_response_rx.pop() {
|
||||
return result;
|
||||
}
|
||||
std::thread::sleep(std::time::Duration::from_micros(50));
|
||||
}
|
||||
|
||||
Err("Query timeout".to_string())
|
||||
}
|
||||
|
||||
/// Query the pitch bend range (semitones) for the instrument on a MIDI track.
|
||||
/// Returns 2.0 (default) if the track or instrument cannot be found.
|
||||
pub fn query_pitch_bend_range(&mut self, track_id: TrackId) -> f32 {
|
||||
if let Err(_) = self.query_tx.push(Query::GetPitchBendRange(track_id)) {
|
||||
return 2.0;
|
||||
}
|
||||
|
||||
let start = std::time::Instant::now();
|
||||
let timeout = std::time::Duration::from_millis(100);
|
||||
|
||||
while start.elapsed() < timeout {
|
||||
if let Ok(QueryResponse::PitchBendRange(range)) = self.query_response_rx.pop() {
|
||||
return range;
|
||||
}
|
||||
std::thread::sleep(std::time::Duration::from_micros(50));
|
||||
}
|
||||
|
||||
2.0 // default on timeout
|
||||
}
|
||||
|
||||
/// Serialize the audio pool for project saving
|
||||
pub fn serialize_audio_pool(&mut self, project_path: &std::path::Path) -> Result<Vec<crate::audio::pool::AudioPoolEntry>, String> {
|
||||
// Send query
|
||||
|
|
|
|||
|
|
@ -90,8 +90,9 @@ impl Metronome {
|
|||
self.last_beat = -1; // Reset beat tracking when disabled
|
||||
self.click_position = 0; // Stop any playing click
|
||||
} else {
|
||||
// When enabling, don't trigger a click until the next beat
|
||||
self.click_position = usize::MAX; // Set to max to prevent immediate click
|
||||
// Reset beat tracking so the next beat boundary (including beat 0) fires a click
|
||||
self.last_beat = -1;
|
||||
self.click_position = self.high_click.len(); // Idle (past end, nothing playing)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -106,7 +107,7 @@ impl Metronome {
|
|||
pub fn process(
|
||||
&mut self,
|
||||
output: &mut [f32],
|
||||
playhead_samples: u64,
|
||||
playhead_samples: i64,
|
||||
playing: bool,
|
||||
sample_rate: u32,
|
||||
channels: u32,
|
||||
|
|
@ -119,31 +120,23 @@ impl Metronome {
|
|||
let frames = output.len() / channels as usize;
|
||||
|
||||
for frame in 0..frames {
|
||||
let current_sample = playhead_samples + frame as u64;
|
||||
let current_sample = playhead_samples + frame as i64;
|
||||
|
||||
// Calculate current beat number
|
||||
let current_time_seconds = current_sample as f64 / sample_rate as f64;
|
||||
let beats_per_second = self.bpm as f64 / 60.0;
|
||||
let current_beat = (current_time_seconds * beats_per_second).floor() as i64;
|
||||
|
||||
// Check if we crossed a beat boundary
|
||||
if current_beat != self.last_beat && current_beat >= 0 {
|
||||
// Check if we crossed a beat boundary (including negative beats during count-in pre-roll)
|
||||
if current_beat != self.last_beat {
|
||||
self.last_beat = current_beat;
|
||||
|
||||
// Only trigger a click if we're not in the "just enabled" state
|
||||
if self.click_position != usize::MAX {
|
||||
// Determine which click to play
|
||||
// Beat 1 of each measure gets the accent (high click)
|
||||
let beat_in_measure = (current_beat as u32 % self.time_signature_numerator) as usize;
|
||||
let is_first_beat = beat_in_measure == 0;
|
||||
|
||||
// Start playing the appropriate click
|
||||
self.playing_high_click = is_first_beat;
|
||||
self.click_position = 0; // Start from beginning of click
|
||||
} else {
|
||||
// We just got enabled - reset position but don't play yet
|
||||
self.click_position = self.high_click.len(); // Set past end so no click plays
|
||||
}
|
||||
// Determine which click to play.
|
||||
// Beat 0 of each measure gets the accent (high click).
|
||||
// Use rem_euclid so negative beat numbers map correctly (e.g. -4 % 4 = 0).
|
||||
let beat_in_measure = current_beat.rem_euclid(self.time_signature_numerator as i64) as usize;
|
||||
self.playing_high_click = beat_in_measure == 0;
|
||||
self.click_position = 0; // Start from beginning of click
|
||||
}
|
||||
|
||||
// Continue playing click sample if we're currently in one
|
||||
|
|
|
|||
|
|
@ -1053,7 +1053,7 @@ impl AudioGraph {
|
|||
}
|
||||
|
||||
/// Deserialize a preset into the graph
|
||||
pub fn from_preset(preset: &crate::audio::node_graph::preset::GraphPreset, sample_rate: u32, buffer_size: usize, preset_base_path: Option<&std::path::Path>) -> Result<Self, String> {
|
||||
pub fn from_preset(preset: &crate::audio::node_graph::preset::GraphPreset, sample_rate: u32, buffer_size: usize, preset_base_path: Option<&std::path::Path>, embedded_assets: Option<&std::collections::HashMap<String, Vec<u8>>>) -> Result<Self, String> {
|
||||
use crate::audio::node_graph::nodes::*;
|
||||
use petgraph::stable_graph::NodeIndex;
|
||||
use std::collections::HashMap;
|
||||
|
|
@ -1124,7 +1124,7 @@ impl AudioGraph {
|
|||
if serialized_node.node_type == "VoiceAllocator" {
|
||||
if let Some(ref template_preset) = serialized_node.template_graph {
|
||||
if let Some(va) = node.as_any_mut().downcast_mut::<VoiceAllocatorNode>() {
|
||||
let template_graph = Self::from_preset(template_preset, sample_rate, buffer_size, preset_base_path)?;
|
||||
let template_graph = Self::from_preset(template_preset, sample_rate, buffer_size, preset_base_path, embedded_assets)?;
|
||||
*va.template_graph_mut() = template_graph;
|
||||
va.rebuild_voices();
|
||||
}
|
||||
|
|
@ -1182,10 +1182,28 @@ impl AudioGraph {
|
|||
sampler_node.set_sample(samples, embedded.sample_rate as f32);
|
||||
}
|
||||
} else if let Some(ref path) = file_path {
|
||||
// Fall back to loading from file (resolve path relative to preset)
|
||||
let resolved_path = resolve_sample_path(path);
|
||||
if let Err(e) = sampler_node.load_sample_from_file(&resolved_path) {
|
||||
eprintln!("Failed to load sample from {}: {}", resolved_path, e);
|
||||
// Check embedded assets map first (from .lbins bundle)
|
||||
let loaded = if let Some(assets) = embedded_assets {
|
||||
if let Some(bytes) = assets.get(path.as_str()) {
|
||||
match crate::audio::sample_loader::load_audio_from_bytes(bytes, path) {
|
||||
Ok(data) => {
|
||||
sampler_node.set_sample(data.samples, data.sample_rate as f32);
|
||||
true
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to decode bundled sample {}: {}", path, e);
|
||||
false
|
||||
}
|
||||
}
|
||||
} else { false }
|
||||
} else { false };
|
||||
|
||||
if !loaded {
|
||||
// Fall back to loading from filesystem
|
||||
let resolved_path = resolve_sample_path(path);
|
||||
if let Err(e) = sampler_node.load_sample_from_file(&resolved_path) {
|
||||
eprintln!("Failed to load sample from {}: {}", resolved_path, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1225,20 +1243,49 @@ impl AudioGraph {
|
|||
);
|
||||
}
|
||||
} else if let Some(ref path) = layer.file_path {
|
||||
// Fall back to loading from file (resolve path relative to preset)
|
||||
let resolved_path = resolve_sample_path(path);
|
||||
if let Err(e) = multi_sampler_node.load_layer_from_file(
|
||||
&resolved_path,
|
||||
layer.key_min,
|
||||
layer.key_max,
|
||||
layer.root_key,
|
||||
layer.velocity_min,
|
||||
layer.velocity_max,
|
||||
layer.loop_start,
|
||||
layer.loop_end,
|
||||
layer.loop_mode,
|
||||
) {
|
||||
eprintln!("Failed to load sample layer from {}: {}", resolved_path, e);
|
||||
// Check embedded assets map first (from .lbins bundle)
|
||||
let loaded = if let Some(assets) = embedded_assets {
|
||||
if let Some(bytes) = assets.get(path.as_str()) {
|
||||
match crate::audio::sample_loader::load_audio_from_bytes(bytes, path) {
|
||||
Ok(data) => {
|
||||
multi_sampler_node.add_layer(
|
||||
data.samples,
|
||||
data.sample_rate as f32,
|
||||
layer.key_min,
|
||||
layer.key_max,
|
||||
layer.root_key,
|
||||
layer.velocity_min,
|
||||
layer.velocity_max,
|
||||
layer.loop_start,
|
||||
layer.loop_end,
|
||||
layer.loop_mode,
|
||||
);
|
||||
true
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to decode bundled sample layer {}: {}", path, e);
|
||||
false
|
||||
}
|
||||
}
|
||||
} else { false }
|
||||
} else { false };
|
||||
|
||||
if !loaded {
|
||||
// Fall back to loading from filesystem
|
||||
let resolved_path = resolve_sample_path(path);
|
||||
if let Err(e) = multi_sampler_node.load_layer_from_file(
|
||||
&resolved_path,
|
||||
layer.key_min,
|
||||
layer.key_max,
|
||||
layer.root_key,
|
||||
layer.velocity_min,
|
||||
layer.velocity_max,
|
||||
layer.loop_start,
|
||||
layer.loop_end,
|
||||
layer.loop_mode,
|
||||
) {
|
||||
eprintln!("Failed to load sample layer from {}: {}", resolved_path, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1258,6 +1305,9 @@ impl AudioGraph {
|
|||
let result = if let Some(bundled_name) = model_path.strip_prefix("bundled:") {
|
||||
eprintln!("[AmpSim] Preset: loading bundled model {:?}", bundled_name);
|
||||
amp_sim.load_bundled_model(bundled_name)
|
||||
} else if let Some(bytes) = embedded_assets.and_then(|a| a.get(model_path.as_str())) {
|
||||
eprintln!("[AmpSim] Preset: loading from bundle {:?}", model_path);
|
||||
amp_sim.load_model_from_bytes(model_path, bytes)
|
||||
} else {
|
||||
let resolved_path = resolve_sample_path(model_path);
|
||||
eprintln!("[AmpSim] Preset: loading from file {:?}", resolved_path);
|
||||
|
|
|
|||
|
|
@ -0,0 +1,192 @@
|
|||
/// Load and save `.lbins` instrument bundle files.
|
||||
///
|
||||
/// A `.lbins` file is a ZIP archive with the following layout:
|
||||
///
|
||||
/// ```
|
||||
/// instrument.lbins (ZIP)
|
||||
/// ├── instrument.json ← GraphPreset JSON (existing schema)
|
||||
/// ├── samples/
|
||||
/// │ ├── kick.wav
|
||||
/// │ └── snare.flac
|
||||
/// └── models/
|
||||
/// └── amp.nam
|
||||
/// ```
|
||||
///
|
||||
/// All asset paths in `instrument.json` are ZIP-relative
|
||||
/// (e.g. `"samples/kick.wav"`, `"models/amp.nam"`).
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::io::{Read, Write};
|
||||
use std::path::Path;
|
||||
|
||||
use crate::audio::node_graph::preset::{GraphPreset, SampleData};
|
||||
|
||||
/// Load a `.lbins` file.
|
||||
///
|
||||
/// Returns the deserialized `GraphPreset` together with a map of all
|
||||
/// non-JSON entries keyed by their ZIP-relative path (e.g. `"samples/kick.wav"`).
|
||||
pub fn load_lbins(path: &Path) -> Result<(GraphPreset, HashMap<String, Vec<u8>>), String> {
|
||||
let file = std::fs::File::open(path)
|
||||
.map_err(|e| format!("Failed to open .lbins file: {}", e))?;
|
||||
|
||||
let mut archive = zip::ZipArchive::new(file)
|
||||
.map_err(|e| format!("Failed to read ZIP archive: {}", e))?;
|
||||
|
||||
// Read instrument.json first
|
||||
let preset_json = {
|
||||
let mut entry = archive
|
||||
.by_name("instrument.json")
|
||||
.map_err(|_| "Missing instrument.json in .lbins archive".to_string())?;
|
||||
let mut buf = String::new();
|
||||
entry
|
||||
.read_to_string(&mut buf)
|
||||
.map_err(|e| format!("Failed to read instrument.json: {}", e))?;
|
||||
buf
|
||||
};
|
||||
|
||||
let preset = GraphPreset::from_json(&preset_json)
|
||||
.map_err(|e| format!("Failed to parse instrument.json: {}", e))?;
|
||||
|
||||
// Read all other entries into memory
|
||||
let mut assets: HashMap<String, Vec<u8>> = HashMap::new();
|
||||
for i in 0..archive.len() {
|
||||
let mut entry = archive
|
||||
.by_index(i)
|
||||
.map_err(|e| format!("Failed to read ZIP entry {}: {}", i, e))?;
|
||||
|
||||
let entry_name = entry.name().to_string();
|
||||
if entry_name == "instrument.json" || entry.is_dir() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut bytes = Vec::new();
|
||||
entry
|
||||
.read_to_end(&mut bytes)
|
||||
.map_err(|e| format!("Failed to read {}: {}", entry_name, e))?;
|
||||
|
||||
assets.insert(entry_name, bytes);
|
||||
}
|
||||
|
||||
Ok((preset, assets))
|
||||
}
|
||||
|
||||
/// Save a preset to a `.lbins` file.
|
||||
///
|
||||
/// Asset paths in `preset` are rewritten to ZIP-relative form
|
||||
/// (`samples/<basename>` or `models/<basename>`).
|
||||
/// If the path is already ZIP-relative (starts with `samples/` or `models/`)
|
||||
/// it is used as-is. Absolute / relative filesystem paths are resolved
|
||||
/// relative to `asset_base` (typically the directory that contained the
|
||||
/// original `.json` preset) and then read from disk.
|
||||
pub fn save_lbins(path: &Path, preset: &GraphPreset, asset_base: Option<&Path>) -> Result<(), String> {
|
||||
let file = std::fs::File::create(path)
|
||||
.map_err(|e| format!("Failed to create .lbins file: {}", e))?;
|
||||
|
||||
let mut zip = zip::ZipWriter::new(file);
|
||||
let options = zip::write::FileOptions::default()
|
||||
.compression_method(zip::CompressionMethod::Deflated);
|
||||
|
||||
// We'll build a rewritten copy of the preset while collecting assets
|
||||
let mut rewritten = preset.clone();
|
||||
// Map: original path → (zip_path, file_bytes)
|
||||
let mut asset_map: HashMap<String, (String, Vec<u8>)> = HashMap::new();
|
||||
|
||||
// Helper: given an original asset path string and a subdirectory ("samples" or "models"),
|
||||
// resolve the bytes and return the canonical ZIP-relative path.
|
||||
let mut resolve_asset = |orig_path: &str, subdir: &str| -> Result<String, String> {
|
||||
// Already a ZIP-relative path — no re-reading needed, caller stored bytes already
|
||||
// or the asset will be provided by a prior pass. Just normalise the subdirectory.
|
||||
if orig_path.starts_with(&format!("{}/", subdir)) {
|
||||
return Ok(orig_path.to_string());
|
||||
}
|
||||
|
||||
let basename = Path::new(orig_path)
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.ok_or_else(|| format!("Cannot determine filename for asset: {}", orig_path))?;
|
||||
|
||||
let zip_path = format!("{}/{}", subdir, basename);
|
||||
|
||||
if !asset_map.contains_key(orig_path) {
|
||||
// Resolve to an absolute filesystem path
|
||||
let fs_path = if Path::new(orig_path).is_absolute() {
|
||||
std::path::PathBuf::from(orig_path)
|
||||
} else if let Some(base) = asset_base {
|
||||
base.join(orig_path)
|
||||
} else {
|
||||
std::path::PathBuf::from(orig_path)
|
||||
};
|
||||
|
||||
let bytes = std::fs::read(&fs_path)
|
||||
.map_err(|e| format!("Failed to read asset {}: {}", fs_path.display(), e))?;
|
||||
|
||||
asset_map.insert(orig_path.to_string(), (zip_path.clone(), bytes));
|
||||
}
|
||||
|
||||
Ok(zip_path)
|
||||
};
|
||||
|
||||
// Rewrite paths in all nodes
|
||||
for node in &mut rewritten.nodes {
|
||||
// Sample data paths
|
||||
if let Some(ref mut sample_data) = node.sample_data {
|
||||
match sample_data {
|
||||
SampleData::SimpleSampler { ref mut file_path, .. } => {
|
||||
if let Some(ref orig) = file_path.clone() {
|
||||
if !orig.is_empty() {
|
||||
match resolve_asset(orig, "samples") {
|
||||
Ok(zip_path) => *file_path = Some(zip_path),
|
||||
Err(e) => eprintln!("Warning: {}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
SampleData::MultiSampler { ref mut layers } => {
|
||||
for layer in layers.iter_mut() {
|
||||
if let Some(ref orig) = layer.file_path.clone() {
|
||||
if !orig.is_empty() {
|
||||
match resolve_asset(orig, "samples") {
|
||||
Ok(zip_path) => layer.file_path = Some(zip_path),
|
||||
Err(e) => eprintln!("Warning: {}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// NAM model path
|
||||
if let Some(ref orig) = node.nam_model_path.clone() {
|
||||
if !orig.starts_with("bundled:") && !orig.is_empty() {
|
||||
match resolve_asset(orig, "models") {
|
||||
Ok(zip_path) => node.nam_model_path = Some(zip_path),
|
||||
Err(e) => eprintln!("Warning: {}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Write all collected assets to the ZIP
|
||||
for (_, (zip_path, bytes)) in &asset_map {
|
||||
zip.start_file(zip_path, options)
|
||||
.map_err(|e| format!("Failed to start ZIP entry {}: {}", zip_path, e))?;
|
||||
zip.write_all(bytes)
|
||||
.map_err(|e| format!("Failed to write {}: {}", zip_path, e))?;
|
||||
}
|
||||
|
||||
// Write instrument.json last (after assets so paths are already rewritten)
|
||||
let json = rewritten
|
||||
.to_json()
|
||||
.map_err(|e| format!("Failed to serialize preset: {}", e))?;
|
||||
|
||||
zip.start_file("instrument.json", options)
|
||||
.map_err(|e| format!("Failed to start instrument.json entry: {}", e))?;
|
||||
zip.write_all(json.as_bytes())
|
||||
.map_err(|e| format!("Failed to write instrument.json: {}", e))?;
|
||||
|
||||
zip.finish()
|
||||
.map_err(|e| format!("Failed to finalize ZIP: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
mod graph;
|
||||
mod node_trait;
|
||||
mod types;
|
||||
pub mod lbins;
|
||||
pub mod nodes;
|
||||
pub mod preset;
|
||||
|
||||
|
|
|
|||
|
|
@ -75,6 +75,21 @@ impl AmpSimNode {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Load a .nam model from in-memory bytes (used when loading from a .lbins bundle).
|
||||
/// `zip_path` is the ZIP-relative path stored back in `model_path` for serialization.
|
||||
pub fn load_model_from_bytes(&mut self, zip_path: &str, bytes: &[u8]) -> Result<(), String> {
|
||||
let basename = std::path::Path::new(zip_path)
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.unwrap_or(zip_path);
|
||||
let mut model = nam_ffi::NamModel::from_bytes(basename, bytes)
|
||||
.map_err(|e| format!("{}", e))?;
|
||||
model.set_max_buffer_size(1024);
|
||||
self.model = Some(model);
|
||||
self.model_path = Some(zip_path.to_string());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the loaded model path (for preset serialization).
|
||||
pub fn model_path(&self) -> Option<&str> {
|
||||
self.model_path.as_deref()
|
||||
|
|
|
|||
|
|
@ -49,6 +49,10 @@ pub struct AutomationInputNode {
|
|||
parameters: Vec<Parameter>,
|
||||
/// Shared playback time (set by the graph before processing)
|
||||
playback_time: Arc<RwLock<f64>>,
|
||||
/// Minimum output value (for UI display range)
|
||||
pub value_min: f32,
|
||||
/// Maximum output value (for UI display range)
|
||||
pub value_max: f32,
|
||||
}
|
||||
|
||||
impl AutomationInputNode {
|
||||
|
|
@ -62,10 +66,12 @@ impl AutomationInputNode {
|
|||
Self {
|
||||
name: name.clone(),
|
||||
display_name: "Automation".to_string(),
|
||||
keyframes: Vec::new(),
|
||||
keyframes: vec![AutomationKeyframe::new(0.0, 0.0)],
|
||||
outputs,
|
||||
parameters: Vec::new(),
|
||||
playback_time: Arc::new(RwLock::new(0.0)),
|
||||
value_min: -1.0,
|
||||
value_max: 1.0,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -275,6 +281,8 @@ impl AudioNode for AutomationInputNode {
|
|||
outputs: self.outputs.clone(),
|
||||
parameters: self.parameters.clone(),
|
||||
playback_time: Arc::new(RwLock::new(0.0)),
|
||||
value_min: self.value_min,
|
||||
value_max: self.value_max,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,14 +1,19 @@
|
|||
use crate::audio::midi::MidiEvent;
|
||||
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, SignalType};
|
||||
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
|
||||
|
||||
const PARAM_PITCH_BEND_RANGE: u32 = 0;
|
||||
|
||||
/// MIDI to CV converter
|
||||
/// Converts MIDI note events to control voltage signals
|
||||
pub struct MidiToCVNode {
|
||||
name: String,
|
||||
note: u8, // Current MIDI note number
|
||||
gate: f32, // Gate CV (1.0 when note on, 0.0 when off)
|
||||
velocity: f32, // Velocity CV (0.0-1.0)
|
||||
pitch_cv: f32, // Pitch CV (V/Oct: 0V = A4, ±1V per octave)
|
||||
note: u8, // Current MIDI note number
|
||||
gate: f32, // Gate CV (1.0 when note on, 0.0 when off)
|
||||
velocity: f32, // Velocity CV (0.0-1.0)
|
||||
pitch_cv: f32, // Pitch CV (V/Oct: 0V = A4, ±1V per octave), without bend
|
||||
pitch_bend_range: f32, // Pitch bend range in semitones (default 2.0)
|
||||
current_bend: f32, // Current pitch bend, normalised -1.0..=1.0 (0 = centre)
|
||||
current_mod: f32, // Current modulation (CC1), 0.0..=1.0
|
||||
inputs: Vec<NodePort>,
|
||||
outputs: Vec<NodePort>,
|
||||
parameters: Vec<Parameter>,
|
||||
|
|
@ -18,26 +23,41 @@ impl MidiToCVNode {
|
|||
pub fn new(name: impl Into<String>) -> Self {
|
||||
let name = name.into();
|
||||
|
||||
// MIDI input port for receiving MIDI through graph connections
|
||||
let inputs = vec![
|
||||
NodePort::new("MIDI In", SignalType::Midi, 0),
|
||||
NodePort::new("Bend CV", SignalType::CV, 0), // External pitch bend in semitones
|
||||
NodePort::new("Mod CV", SignalType::CV, 1), // External modulation 0.0..=1.0
|
||||
];
|
||||
|
||||
let outputs = vec![
|
||||
NodePort::new("V/Oct", SignalType::CV, 0), // V/Oct: 0V = A4, ±1V per octave
|
||||
NodePort::new("V/Oct", SignalType::CV, 0), // V/Oct: 0V = A4, ±1V per octave (with bend applied)
|
||||
NodePort::new("Gate", SignalType::CV, 1), // 1.0 = on, 0.0 = off
|
||||
NodePort::new("Velocity", SignalType::CV, 2), // 0.0-1.0
|
||||
NodePort::new("Bend", SignalType::CV, 3), // Total pitch bend in semitones (MIDI + CV)
|
||||
NodePort::new("Mod", SignalType::CV, 4), // Total modulation 0.0..=1.0 (MIDI CC1 + CV)
|
||||
];
|
||||
|
||||
let parameters = vec![
|
||||
Parameter::new(
|
||||
PARAM_PITCH_BEND_RANGE,
|
||||
"Pitch Bend Range",
|
||||
0.0, 48.0, 2.0,
|
||||
ParameterUnit::Generic,
|
||||
),
|
||||
];
|
||||
|
||||
Self {
|
||||
name,
|
||||
note: 60, // Middle C
|
||||
note: 60,
|
||||
gate: 0.0,
|
||||
velocity: 0.0,
|
||||
pitch_cv: Self::midi_note_to_voct(60),
|
||||
pitch_bend_range: 2.0,
|
||||
current_bend: 0.0,
|
||||
current_mod: 0.0,
|
||||
inputs,
|
||||
outputs,
|
||||
parameters: vec![], // No user parameters
|
||||
parameters,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -48,6 +68,37 @@ impl MidiToCVNode {
|
|||
// Standard V/Oct: 0V at A4, 1V per octave (12 semitones)
|
||||
(note as f32 - 69.0) / 12.0
|
||||
}
|
||||
|
||||
fn apply_midi_event(&mut self, event: &MidiEvent) {
|
||||
let status = event.status & 0xF0;
|
||||
match status {
|
||||
0x90 if event.data2 > 0 => {
|
||||
// Note on — reset per-note expression so previous note's bend doesn't bleed in
|
||||
self.note = event.data1;
|
||||
self.pitch_cv = Self::midi_note_to_voct(self.note);
|
||||
self.velocity = event.data2 as f32 / 127.0;
|
||||
self.gate = 1.0;
|
||||
self.current_bend = 0.0;
|
||||
self.current_mod = 0.0;
|
||||
}
|
||||
0x80 | 0x90 => {
|
||||
// Note off (or note on with velocity 0)
|
||||
if event.data1 == self.note {
|
||||
self.gate = 0.0;
|
||||
}
|
||||
}
|
||||
0xE0 => {
|
||||
// Pitch bend: 14-bit value, center = 8192
|
||||
let bend_raw = ((event.data2 as i16) << 7) | (event.data1 as i16);
|
||||
self.current_bend = (bend_raw - 8192) as f32 / 8192.0;
|
||||
}
|
||||
0xB0 if event.data1 == 1 => {
|
||||
// CC1 (modulation wheel)
|
||||
self.current_mod = event.data2 as f32 / 127.0;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNode for MidiToCVNode {
|
||||
|
|
@ -67,46 +118,27 @@ impl AudioNode for MidiToCVNode {
|
|||
&self.parameters
|
||||
}
|
||||
|
||||
fn set_parameter(&mut self, _id: u32, _value: f32) {
|
||||
// No parameters
|
||||
fn set_parameter(&mut self, id: u32, value: f32) {
|
||||
if id == PARAM_PITCH_BEND_RANGE {
|
||||
self.pitch_bend_range = value.clamp(0.0, 48.0);
|
||||
}
|
||||
}
|
||||
|
||||
fn get_parameter(&self, _id: u32) -> f32 {
|
||||
0.0
|
||||
fn get_parameter(&self, id: u32) -> f32 {
|
||||
if id == PARAM_PITCH_BEND_RANGE {
|
||||
self.pitch_bend_range
|
||||
} else {
|
||||
0.0
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_midi(&mut self, event: &MidiEvent) {
|
||||
let status = event.status & 0xF0;
|
||||
|
||||
match status {
|
||||
0x90 => {
|
||||
// Note on
|
||||
if event.data2 > 0 {
|
||||
// Velocity > 0 means note on
|
||||
self.note = event.data1;
|
||||
self.pitch_cv = Self::midi_note_to_voct(self.note);
|
||||
self.velocity = event.data2 as f32 / 127.0;
|
||||
self.gate = 1.0;
|
||||
} else {
|
||||
// Velocity = 0 means note off
|
||||
if event.data1 == self.note {
|
||||
self.gate = 0.0;
|
||||
}
|
||||
}
|
||||
}
|
||||
0x80 => {
|
||||
// Note off
|
||||
if event.data1 == self.note {
|
||||
self.gate = 0.0;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
self.apply_midi_event(event);
|
||||
}
|
||||
|
||||
fn process(
|
||||
&mut self,
|
||||
_inputs: &[&[f32]],
|
||||
inputs: &[&[f32]],
|
||||
outputs: &mut [&mut [f32]],
|
||||
midi_inputs: &[&[MidiEvent]],
|
||||
_midi_outputs: &mut [&mut Vec<MidiEvent>],
|
||||
|
|
@ -115,52 +147,56 @@ impl AudioNode for MidiToCVNode {
|
|||
// Process MIDI events from input buffer
|
||||
if !midi_inputs.is_empty() {
|
||||
for event in midi_inputs[0] {
|
||||
let status = event.status & 0xF0;
|
||||
match status {
|
||||
0x90 if event.data2 > 0 => {
|
||||
// Note on
|
||||
self.note = event.data1;
|
||||
self.pitch_cv = Self::midi_note_to_voct(self.note);
|
||||
self.velocity = event.data2 as f32 / 127.0;
|
||||
self.gate = 1.0;
|
||||
}
|
||||
0x80 | 0x90 => {
|
||||
// Note off (or note on with velocity 0)
|
||||
if event.data1 == self.note {
|
||||
self.gate = 0.0;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
self.apply_midi_event(event);
|
||||
}
|
||||
}
|
||||
|
||||
if outputs.len() < 3 {
|
||||
if outputs.len() < 5 {
|
||||
return;
|
||||
}
|
||||
|
||||
// CV signals are mono
|
||||
// Use split_at_mut to get multiple mutable references
|
||||
let (pitch_and_rest, rest) = outputs.split_at_mut(1);
|
||||
let (gate_and_rest, velocity_slice) = rest.split_at_mut(1);
|
||||
// Read CV inputs (use first sample of buffer). NaN = unconnected port → treat as 0.
|
||||
let bend_cv = inputs.get(0).and_then(|b| b.first().copied())
|
||||
.filter(|v| v.is_finite()).unwrap_or(0.0);
|
||||
let mod_cv = inputs.get(1).and_then(|b| b.first().copied())
|
||||
.filter(|v| v.is_finite()).unwrap_or(0.0);
|
||||
|
||||
let pitch_out = &mut pitch_and_rest[0];
|
||||
let gate_out = &mut gate_and_rest[0];
|
||||
let velocity_out = &mut velocity_slice[0];
|
||||
// Total bend in semitones: MIDI bend + CV bend
|
||||
let bend_semitones = self.current_bend * self.pitch_bend_range + bend_cv;
|
||||
// Total mod: MIDI CC1 + CV mod, clamped to 0..1
|
||||
let total_mod = (self.current_mod + mod_cv).clamp(0.0, 1.0);
|
||||
// Pitch output includes bend
|
||||
let pitch_out_val = self.pitch_cv + bend_semitones / 12.0;
|
||||
|
||||
// Use split_at_mut to get multiple mutable references
|
||||
let (v0, rest) = outputs.split_at_mut(1);
|
||||
let (v1, rest) = rest.split_at_mut(1);
|
||||
let (v2, rest) = rest.split_at_mut(1);
|
||||
let (v3, v4_slice) = rest.split_at_mut(1);
|
||||
|
||||
let pitch_out = &mut v0[0];
|
||||
let gate_out = &mut v1[0];
|
||||
let velocity_out = &mut v2[0];
|
||||
let bend_out = &mut v3[0];
|
||||
let mod_out = &mut v4_slice[0];
|
||||
|
||||
let frames = pitch_out.len();
|
||||
|
||||
// Output constant CV values for the entire buffer
|
||||
for frame in 0..frames {
|
||||
pitch_out[frame] = self.pitch_cv;
|
||||
gate_out[frame] = self.gate;
|
||||
pitch_out[frame] = pitch_out_val;
|
||||
gate_out[frame] = self.gate;
|
||||
velocity_out[frame] = self.velocity;
|
||||
bend_out[frame] = bend_semitones;
|
||||
mod_out[frame] = total_mod;
|
||||
}
|
||||
}
|
||||
|
||||
fn reset(&mut self) {
|
||||
self.gate = 0.0;
|
||||
self.velocity = 0.0;
|
||||
self.current_bend = 0.0;
|
||||
self.current_mod = 0.0;
|
||||
}
|
||||
|
||||
fn node_type(&self) -> &str {
|
||||
|
|
@ -174,10 +210,13 @@ impl AudioNode for MidiToCVNode {
|
|||
fn clone_node(&self) -> Box<dyn AudioNode> {
|
||||
Box::new(Self {
|
||||
name: self.name.clone(),
|
||||
note: 60, // Reset to middle C
|
||||
gate: 0.0, // Reset gate
|
||||
velocity: 0.0, // Reset velocity
|
||||
pitch_cv: Self::midi_note_to_voct(60), // Reset pitch
|
||||
note: 60,
|
||||
gate: 0.0,
|
||||
velocity: 0.0,
|
||||
pitch_cv: Self::midi_note_to_voct(60),
|
||||
pitch_bend_range: self.pitch_bend_range,
|
||||
current_bend: 0.0,
|
||||
current_mod: 0.0,
|
||||
inputs: self.inputs.clone(),
|
||||
outputs: self.outputs.clone(),
|
||||
parameters: self.parameters.clone(),
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ const PARAM_GAIN: u32 = 0;
|
|||
const PARAM_ATTACK: u32 = 1;
|
||||
const PARAM_RELEASE: u32 = 2;
|
||||
const PARAM_TRANSPOSE: u32 = 3;
|
||||
const PARAM_PITCH_BEND_RANGE: u32 = 4;
|
||||
|
||||
/// Loop playback mode
|
||||
#[derive(Clone, Copy, Debug, PartialEq, serde::Serialize, serde::Deserialize)]
|
||||
|
|
@ -201,6 +202,7 @@ struct Voice {
|
|||
layer_index: usize,
|
||||
playhead: f32,
|
||||
note: u8,
|
||||
channel: u8, // MIDI channel this voice was activated on
|
||||
velocity: u8,
|
||||
is_active: bool,
|
||||
|
||||
|
|
@ -221,11 +223,12 @@ enum EnvelopePhase {
|
|||
}
|
||||
|
||||
impl Voice {
|
||||
fn new(layer_index: usize, note: u8, velocity: u8) -> Self {
|
||||
fn new(layer_index: usize, note: u8, channel: u8, velocity: u8) -> Self {
|
||||
Self {
|
||||
layer_index,
|
||||
playhead: 0.0,
|
||||
note,
|
||||
channel,
|
||||
velocity,
|
||||
is_active: true,
|
||||
envelope_phase: EnvelopePhase::Attack,
|
||||
|
|
@ -250,9 +253,14 @@ pub struct MultiSamplerNode {
|
|||
|
||||
// Parameters
|
||||
gain: f32,
|
||||
attack_time: f32, // seconds
|
||||
release_time: f32, // seconds
|
||||
transpose: i8, // semitones
|
||||
attack_time: f32, // seconds
|
||||
release_time: f32, // seconds
|
||||
transpose: i8, // semitones
|
||||
pitch_bend_range: f32, // semitones (default 2.0)
|
||||
|
||||
// Live MIDI state
|
||||
bend_per_channel: [f32; 16], // Pitch bend per MIDI channel; ch0 = global broadcast
|
||||
current_mod: f32, // MIDI CC1 modulation 0.0..=1.0
|
||||
|
||||
inputs: Vec<NodePort>,
|
||||
outputs: Vec<NodePort>,
|
||||
|
|
@ -265,6 +273,8 @@ impl MultiSamplerNode {
|
|||
|
||||
let inputs = vec![
|
||||
NodePort::new("MIDI In", SignalType::Midi, 0),
|
||||
NodePort::new("Bend CV", SignalType::CV, 0), // External pitch bend in semitones
|
||||
NodePort::new("Mod CV", SignalType::CV, 1), // External modulation 0.0..=1.0
|
||||
];
|
||||
|
||||
let outputs = vec![
|
||||
|
|
@ -276,6 +286,7 @@ impl MultiSamplerNode {
|
|||
Parameter::new(PARAM_ATTACK, "Attack", 0.001, 1.0, 0.01, ParameterUnit::Time),
|
||||
Parameter::new(PARAM_RELEASE, "Release", 0.01, 5.0, 0.1, ParameterUnit::Time),
|
||||
Parameter::new(PARAM_TRANSPOSE, "Transpose", -24.0, 24.0, 0.0, ParameterUnit::Generic),
|
||||
Parameter::new(PARAM_PITCH_BEND_RANGE, "Pitch Bend Range", 0.0, 48.0, 2.0, ParameterUnit::Generic),
|
||||
];
|
||||
|
||||
Self {
|
||||
|
|
@ -288,6 +299,9 @@ impl MultiSamplerNode {
|
|||
attack_time: 0.01,
|
||||
release_time: 0.1,
|
||||
transpose: 0,
|
||||
pitch_bend_range: 2.0,
|
||||
bend_per_channel: [0.0; 16],
|
||||
current_mod: 0.0,
|
||||
inputs,
|
||||
outputs,
|
||||
parameters,
|
||||
|
|
@ -478,7 +492,9 @@ impl MultiSamplerNode {
|
|||
}
|
||||
|
||||
/// Trigger a note
|
||||
fn note_on(&mut self, note: u8, velocity: u8) {
|
||||
fn note_on(&mut self, note: u8, channel: u8, velocity: u8) {
|
||||
// Reset per-channel bend on note-on so a previous note's bend doesn't bleed in
|
||||
self.bend_per_channel[channel as usize] = 0.0;
|
||||
let transposed_note = (note as i16 + self.transpose as i16).clamp(0, 127) as u8;
|
||||
|
||||
if let Some(layer_index) = self.find_layer(transposed_note, velocity) {
|
||||
|
|
@ -496,7 +512,7 @@ impl MultiSamplerNode {
|
|||
}
|
||||
});
|
||||
|
||||
let voice = Voice::new(layer_index, note, velocity);
|
||||
let voice = Voice::new(layer_index, note, channel, velocity);
|
||||
|
||||
if voice_index < self.voices.len() {
|
||||
self.voices[voice_index] = voice;
|
||||
|
|
@ -547,6 +563,9 @@ impl AudioNode for MultiSamplerNode {
|
|||
PARAM_TRANSPOSE => {
|
||||
self.transpose = value.clamp(-24.0, 24.0) as i8;
|
||||
}
|
||||
PARAM_PITCH_BEND_RANGE => {
|
||||
self.pitch_bend_range = value.clamp(0.0, 48.0);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
|
@ -557,13 +576,14 @@ impl AudioNode for MultiSamplerNode {
|
|||
PARAM_ATTACK => self.attack_time,
|
||||
PARAM_RELEASE => self.release_time,
|
||||
PARAM_TRANSPOSE => self.transpose as f32,
|
||||
PARAM_PITCH_BEND_RANGE => self.pitch_bend_range,
|
||||
_ => 0.0,
|
||||
}
|
||||
}
|
||||
|
||||
fn process(
|
||||
&mut self,
|
||||
_inputs: &[&[f32]],
|
||||
inputs: &[&[f32]],
|
||||
outputs: &mut [&mut [f32]],
|
||||
midi_inputs: &[&[MidiEvent]],
|
||||
_midi_outputs: &mut [&mut Vec<MidiEvent>],
|
||||
|
|
@ -582,14 +602,32 @@ impl AudioNode for MultiSamplerNode {
|
|||
// Process MIDI events
|
||||
if !midi_inputs.is_empty() {
|
||||
for event in midi_inputs[0].iter() {
|
||||
if event.is_note_on() {
|
||||
self.note_on(event.data1, event.data2);
|
||||
} else if event.is_note_off() {
|
||||
self.note_off(event.data1);
|
||||
let status = event.status & 0xF0;
|
||||
match status {
|
||||
_ if event.is_note_on() => self.note_on(event.data1, event.status & 0x0F, event.data2),
|
||||
_ if event.is_note_off() => self.note_off(event.data1),
|
||||
0xE0 => {
|
||||
// Pitch bend: 14-bit value, center = 8192; stored per-channel
|
||||
let bend_raw = ((event.data2 as i16) << 7) | (event.data1 as i16);
|
||||
let ch = (event.status & 0x0F) as usize;
|
||||
self.bend_per_channel[ch] = (bend_raw - 8192) as f32 / 8192.0;
|
||||
}
|
||||
0xB0 if event.data1 == 1 => {
|
||||
// CC1 (modulation wheel)
|
||||
self.current_mod = event.data2 as f32 / 127.0;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Read CV inputs. NaN = unconnected port → treat as 0.
|
||||
let bend_cv = inputs.get(0).and_then(|b| b.first().copied())
|
||||
.filter(|v| v.is_finite()).unwrap_or(0.0);
|
||||
// Global bend (channel 0) applies to all voices; per-channel bend is added per-voice below.
|
||||
let global_bend_norm = self.bend_per_channel[0];
|
||||
let bend_per_channel = self.bend_per_channel;
|
||||
|
||||
// Extract parameters needed for processing
|
||||
let gain = self.gain;
|
||||
let attack_time = self.attack_time;
|
||||
|
|
@ -607,9 +645,12 @@ impl AudioNode for MultiSamplerNode {
|
|||
|
||||
let layer = &self.layers[voice.layer_index];
|
||||
|
||||
// Calculate playback speed
|
||||
// Calculate playback speed (includes pitch bend)
|
||||
// Channel-0 = global; voice's own channel bend is added on top.
|
||||
let voice_bend_norm = global_bend_norm + bend_per_channel[voice.channel as usize];
|
||||
let total_bend_semitones = voice_bend_norm * self.pitch_bend_range + bend_cv;
|
||||
let semitone_diff = voice.note as i16 - layer.root_key as i16;
|
||||
let speed = 2.0_f32.powf(semitone_diff as f32 / 12.0);
|
||||
let speed = 2.0_f32.powf((semitone_diff as f32 + total_bend_semitones) / 12.0);
|
||||
let speed_adjusted = speed * (layer.sample_rate / sample_rate as f32);
|
||||
|
||||
for frame in 0..frames {
|
||||
|
|
@ -765,6 +806,8 @@ impl AudioNode for MultiSamplerNode {
|
|||
|
||||
fn reset(&mut self) {
|
||||
self.voices.clear();
|
||||
self.bend_per_channel = [0.0; 16];
|
||||
self.current_mod = 0.0;
|
||||
}
|
||||
|
||||
fn node_type(&self) -> &str {
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ struct VoiceState {
|
|||
active: bool,
|
||||
releasing: bool, // Note-off received, still processing (e.g. ADSR release)
|
||||
note: u8,
|
||||
note_channel: u8, // MIDI channel this voice was allocated on (0 = global/unset)
|
||||
age: u32, // For voice stealing
|
||||
pending_events: Vec<MidiEvent>, // MIDI events to send to this voice
|
||||
}
|
||||
|
|
@ -21,6 +22,7 @@ impl VoiceState {
|
|||
active: false,
|
||||
releasing: false,
|
||||
note: 0,
|
||||
note_channel: 0,
|
||||
age: 0,
|
||||
pending_events: Vec::new(),
|
||||
}
|
||||
|
|
@ -273,6 +275,7 @@ impl AudioNode for VoiceAllocatorNode {
|
|||
self.voices[voice_idx].active = true;
|
||||
self.voices[voice_idx].releasing = false;
|
||||
self.voices[voice_idx].note = event.data1;
|
||||
self.voices[voice_idx].note_channel = event.status & 0x0F;
|
||||
self.voices[voice_idx].age = 0;
|
||||
|
||||
// Store MIDI event for this voice to process
|
||||
|
|
@ -295,10 +298,12 @@ impl AudioNode for VoiceAllocatorNode {
|
|||
}
|
||||
}
|
||||
_ => {
|
||||
// Other MIDI events (CC, pitch bend, etc.) - send to all active voices
|
||||
// Route to matching-channel voices; channel 0 = global broadcast
|
||||
let event_channel = event.status & 0x0F;
|
||||
for voice_idx in 0..self.voice_count {
|
||||
if self.voices[voice_idx].active {
|
||||
self.voices[voice_idx].pending_events.push(*event);
|
||||
let voice = &mut self.voices[voice_idx];
|
||||
if voice.active && (event_channel == 0 || voice.note_channel == event_channel) {
|
||||
voice.pending_events.push(*event);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -213,7 +213,6 @@ impl MidiRecordingState {
|
|||
|
||||
/// Handle a MIDI note on event
|
||||
pub fn note_on(&mut self, note: u8, velocity: u8, absolute_time: f64) {
|
||||
// Store this note as active
|
||||
self.active_notes.insert(note, ActiveMidiNote {
|
||||
note,
|
||||
velocity,
|
||||
|
|
@ -225,14 +224,21 @@ impl MidiRecordingState {
|
|||
pub fn note_off(&mut self, note: u8, absolute_time: f64) {
|
||||
// Find the matching noteOn
|
||||
if let Some(active_note) = self.active_notes.remove(¬e) {
|
||||
// Calculate relative time offset and duration
|
||||
let time_offset = active_note.start_time - self.start_time;
|
||||
let duration = absolute_time - active_note.start_time;
|
||||
// If the note was fully released before the recording start (e.g. during count-in
|
||||
// pre-roll), discard it — only notes still held at the clip start are kept.
|
||||
if absolute_time <= self.start_time {
|
||||
return;
|
||||
}
|
||||
|
||||
// Clamp note start to clip start: notes held across the recording boundary
|
||||
// are treated as starting at the clip position.
|
||||
let note_start = active_note.start_time.max(self.start_time);
|
||||
let time_offset = note_start - self.start_time;
|
||||
let duration = absolute_time - note_start;
|
||||
|
||||
eprintln!("[MIDI_RECORDING_STATE] Completing note {}: note_start={:.3}s, note_end={:.3}s, recording_start={:.3}s, time_offset={:.3}s, duration={:.3}s",
|
||||
note, active_note.start_time, absolute_time, self.start_time, time_offset, duration);
|
||||
note, note_start, absolute_time, self.start_time, time_offset, duration);
|
||||
|
||||
// Add to completed notes
|
||||
self.completed_notes.push((
|
||||
time_offset,
|
||||
active_note.note,
|
||||
|
|
@ -258,8 +264,9 @@ impl MidiRecordingState {
|
|||
pub fn get_notes_with_active(&self, current_time: f64) -> Vec<(f64, u8, u8, f64)> {
|
||||
let mut notes = self.completed_notes.clone();
|
||||
for active in self.active_notes.values() {
|
||||
let time_offset = active.start_time - self.start_time;
|
||||
let provisional_dur = (current_time - active.start_time).max(0.0);
|
||||
let note_start = active.start_time.max(self.start_time);
|
||||
let time_offset = note_start - self.start_time;
|
||||
let provisional_dur = (current_time - note_start).max(0.0);
|
||||
notes.push((time_offset, active.note, active.velocity, provisional_dur));
|
||||
}
|
||||
notes
|
||||
|
|
@ -273,15 +280,13 @@ impl MidiRecordingState {
|
|||
/// Close out all active notes at the given time
|
||||
/// This should be called when stopping recording to end any held notes
|
||||
pub fn close_active_notes(&mut self, end_time: f64) {
|
||||
// Collect all active notes and close them
|
||||
let active_notes: Vec<_> = self.active_notes.drain().collect();
|
||||
|
||||
for (_note_num, active_note) in active_notes {
|
||||
// Calculate relative time offset and duration
|
||||
let time_offset = active_note.start_time - self.start_time;
|
||||
let duration = end_time - active_note.start_time;
|
||||
let note_start = active_note.start_time.max(self.start_time);
|
||||
let time_offset = note_start - self.start_time;
|
||||
let duration = end_time - note_start;
|
||||
|
||||
// Add to completed notes
|
||||
self.completed_notes.push((
|
||||
time_offset,
|
||||
active_note.note,
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ use symphonia::core::io::MediaSourceStream;
|
|||
use symphonia::core::meta::MetadataOptions;
|
||||
use symphonia::core::probe::Hint;
|
||||
use std::fs::File;
|
||||
use std::io::Cursor;
|
||||
use std::path::Path;
|
||||
|
||||
/// Loaded audio sample data
|
||||
|
|
@ -20,33 +21,36 @@ pub struct SampleData {
|
|||
/// Load an audio file and decode it to mono f32 samples
|
||||
pub fn load_audio_file(path: impl AsRef<Path>) -> Result<SampleData, String> {
|
||||
let path = path.as_ref();
|
||||
|
||||
// Open the file
|
||||
let file = File::open(path)
|
||||
.map_err(|e| format!("Failed to open file: {}", e))?;
|
||||
|
||||
// Create a media source stream
|
||||
let file = File::open(path).map_err(|e| format!("Failed to open file: {}", e))?;
|
||||
let mss = MediaSourceStream::new(Box::new(file), Default::default());
|
||||
|
||||
// Create a hint to help the format registry guess the format
|
||||
let mut hint = Hint::new();
|
||||
if let Some(extension) = path.extension() {
|
||||
if let Some(ext_str) = extension.to_str() {
|
||||
hint.with_extension(ext_str);
|
||||
}
|
||||
if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
|
||||
hint.with_extension(ext);
|
||||
}
|
||||
decode_mss(mss, hint)
|
||||
}
|
||||
|
||||
// Probe the media source for a format
|
||||
let format_opts = FormatOptions::default();
|
||||
let metadata_opts = MetadataOptions::default();
|
||||
/// Load audio from an in-memory byte slice and decode it to mono f32 samples.
|
||||
/// Supports WAV, FLAC, MP3, AAC, and any other format Symphonia recognises.
|
||||
/// `filename_hint` is used to help Symphonia detect the format (e.g. "kick.wav").
|
||||
pub fn load_audio_from_bytes(bytes: &[u8], filename_hint: &str) -> Result<SampleData, String> {
|
||||
let cursor = Cursor::new(bytes.to_vec());
|
||||
let mss = MediaSourceStream::new(Box::new(cursor), Default::default());
|
||||
let mut hint = Hint::new();
|
||||
if let Some(ext) = std::path::Path::new(filename_hint).extension().and_then(|e| e.to_str()) {
|
||||
hint.with_extension(ext);
|
||||
}
|
||||
decode_mss(mss, hint)
|
||||
}
|
||||
|
||||
/// Shared decode logic: probe `mss`, find the first audio track, decode to mono f32.
|
||||
fn decode_mss(mss: MediaSourceStream, hint: Hint) -> Result<SampleData, String> {
|
||||
let probed = symphonia::default::get_probe()
|
||||
.format(&hint, mss, &format_opts, &metadata_opts)
|
||||
.format(&hint, mss, &FormatOptions::default(), &MetadataOptions::default())
|
||||
.map_err(|e| format!("Failed to probe format: {}", e))?;
|
||||
|
||||
let mut format = probed.format;
|
||||
|
||||
// Find the first audio track
|
||||
let track = format
|
||||
.tracks()
|
||||
.iter()
|
||||
|
|
@ -56,47 +60,33 @@ pub fn load_audio_file(path: impl AsRef<Path>) -> Result<SampleData, String> {
|
|||
let track_id = track.id;
|
||||
let sample_rate = track.codec_params.sample_rate.unwrap_or(48000);
|
||||
|
||||
// Create a decoder for the track
|
||||
let dec_opts = DecoderOptions::default();
|
||||
let mut decoder = symphonia::default::get_codecs()
|
||||
.make(&track.codec_params, &dec_opts)
|
||||
.make(&track.codec_params, &DecoderOptions::default())
|
||||
.map_err(|e| format!("Failed to create decoder: {}", e))?;
|
||||
|
||||
// Decode all packets
|
||||
let mut all_samples = Vec::new();
|
||||
|
||||
loop {
|
||||
// Get the next packet
|
||||
let packet = match format.next_packet() {
|
||||
Ok(packet) => packet,
|
||||
Err(SymphoniaError::IoError(e)) if e.kind() == std::io::ErrorKind::UnexpectedEof => {
|
||||
// End of stream
|
||||
break;
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(format!("Error reading packet: {}", e));
|
||||
}
|
||||
Err(e) => return Err(format!("Error reading packet: {}", e)),
|
||||
};
|
||||
|
||||
// Skip packets that don't belong to the selected track
|
||||
if packet.track_id() != track_id {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Decode the packet
|
||||
let decoded = decoder
|
||||
.decode(&packet)
|
||||
.map_err(|e| format!("Failed to decode packet: {}", e))?;
|
||||
|
||||
// Convert to f32 samples and mix to mono
|
||||
let samples = convert_to_mono_f32(&decoded);
|
||||
all_samples.extend_from_slice(&samples);
|
||||
all_samples.extend_from_slice(&convert_to_mono_f32(&decoded));
|
||||
}
|
||||
|
||||
Ok(SampleData {
|
||||
samples: all_samples,
|
||||
sample_rate,
|
||||
})
|
||||
Ok(SampleData { samples: all_samples, sample_rate })
|
||||
}
|
||||
|
||||
/// Convert an audio buffer to mono f32 samples
|
||||
|
|
|
|||
|
|
@ -438,7 +438,7 @@ impl Metatrack {
|
|||
pub fn rebuild_audio_graph(&mut self, sample_rate: u32, buffer_size: usize) -> Result<(), String> {
|
||||
if let Some(preset) = &self.audio_graph_preset {
|
||||
if !preset.nodes.is_empty() && preset.output_node.is_some() {
|
||||
self.audio_graph = AudioGraph::from_preset(preset, sample_rate, buffer_size, None)?;
|
||||
self.audio_graph = AudioGraph::from_preset(preset, sample_rate, buffer_size, None, None)?;
|
||||
// graph_is_default remains as serialized (false for user-modified graphs)
|
||||
} else {
|
||||
self.audio_graph = Self::create_empty_graph(sample_rate, buffer_size);
|
||||
|
|
@ -703,7 +703,7 @@ impl MidiTrack {
|
|||
/// Rebuild the instrument graph from preset after deserialization
|
||||
pub fn rebuild_audio_graph(&mut self, sample_rate: u32, buffer_size: usize) -> Result<(), String> {
|
||||
if let Some(preset) = &self.instrument_graph_preset {
|
||||
self.instrument_graph = AudioGraph::from_preset(preset, sample_rate, buffer_size, None)?;
|
||||
self.instrument_graph = AudioGraph::from_preset(preset, sample_rate, buffer_size, None, None)?;
|
||||
} else {
|
||||
// No preset - create default graph
|
||||
self.instrument_graph = AudioGraph::new(sample_rate, buffer_size);
|
||||
|
|
@ -985,7 +985,7 @@ impl AudioTrack {
|
|||
|
||||
if has_nodes && has_output {
|
||||
// Valid preset - rebuild from it
|
||||
self.effects_graph = AudioGraph::from_preset(preset, sample_rate, buffer_size, None)?;
|
||||
self.effects_graph = AudioGraph::from_preset(preset, sample_rate, buffer_size, None, None)?;
|
||||
} else {
|
||||
// Empty or invalid preset - create default graph
|
||||
self.effects_graph = Self::create_default_graph(sample_rate, buffer_size);
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ use crate::audio::{
|
|||
AudioClipInstanceId, AutomationLaneId, ClipId, CurveType, MidiClip, MidiClipId,
|
||||
MidiClipInstanceId, ParameterId, TrackId,
|
||||
};
|
||||
use crate::audio::midi::MidiEvent;
|
||||
use crate::audio::buffer_pool::BufferPoolStats;
|
||||
use crate::audio::node_graph::nodes::LoopMode;
|
||||
use crate::io::WaveformPeak;
|
||||
|
|
@ -85,6 +86,8 @@ pub enum Command {
|
|||
/// Update MIDI clip notes (track_id, clip_id, notes: Vec<(start_time, note, velocity, duration)>)
|
||||
/// NOTE: May need to switch to individual note operations if this becomes slow on clips with many notes
|
||||
UpdateMidiClipNotes(TrackId, MidiClipId, Vec<(f64, u8, u8, f64)>),
|
||||
/// Replace all events in a MIDI clip (track_id, clip_id, events). Used for CC/pitch bend editing.
|
||||
UpdateMidiClipEvents(TrackId, MidiClipId, Vec<MidiEvent>),
|
||||
/// Remove a MIDI clip instance from a track (track_id, instance_id) - for undo/redo support
|
||||
RemoveMidiClip(TrackId, MidiClipInstanceId),
|
||||
/// Remove an audio clip instance from a track (track_id, instance_id) - for undo/redo support
|
||||
|
|
@ -181,6 +184,10 @@ pub enum Command {
|
|||
GraphSavePreset(TrackId, String, String, String, Vec<String>),
|
||||
/// Load a preset into a track's graph (track_id, preset_path)
|
||||
GraphLoadPreset(TrackId, String),
|
||||
/// Load a .lbins instrument bundle into a track's graph (track_id, path)
|
||||
GraphLoadLbins(TrackId, std::path::PathBuf),
|
||||
/// Save a track's graph as a .lbins instrument bundle (track_id, path, preset_name, description, tags)
|
||||
GraphSaveLbins(TrackId, std::path::PathBuf, String, String, Vec<String>),
|
||||
|
||||
// Metatrack subtrack graph commands
|
||||
/// Replace a metatrack's mixing graph with the default SubtrackInputs→Mixer→Output layout.
|
||||
|
|
@ -392,6 +399,8 @@ pub enum Query {
|
|||
GetAutomationKeyframes(TrackId, u32),
|
||||
/// Get the display name of an AutomationInput node (track_id, node_id)
|
||||
GetAutomationName(TrackId, u32),
|
||||
/// Get the value range (min, max) of an AutomationInput node (track_id, node_id)
|
||||
GetAutomationRange(TrackId, u32),
|
||||
/// Serialize audio pool for project saving (project_path)
|
||||
SerializeAudioPool(std::path::PathBuf),
|
||||
/// Load audio pool from serialized entries (entries, project_path)
|
||||
|
|
@ -439,6 +448,9 @@ pub enum Query {
|
|||
DuplicateMidiClipSync(MidiClipId),
|
||||
/// Get whether a track's graph is still the auto-generated default
|
||||
GetGraphIsDefault(TrackId),
|
||||
/// Get the pitch bend range (in semitones) for the instrument on a MIDI track.
|
||||
/// Searches for MidiToCVNode (in VA templates) or MultiSamplerNode (direct).
|
||||
GetPitchBendRange(TrackId),
|
||||
}
|
||||
|
||||
/// Oscilloscope data from a node
|
||||
|
|
@ -480,6 +492,8 @@ pub enum QueryResponse {
|
|||
AutomationKeyframes(Result<Vec<AutomationKeyframeData>, String>),
|
||||
/// Automation node name
|
||||
AutomationName(Result<String, String>),
|
||||
/// Automation node value range (min, max)
|
||||
AutomationRange(Result<(f32, f32), String>),
|
||||
/// Serialized audio pool entries
|
||||
AudioPoolSerialized(Result<Vec<crate::audio::pool::AudioPoolEntry>, String>),
|
||||
/// Audio pool loaded (returns list of missing pool indices)
|
||||
|
|
@ -514,4 +528,6 @@ pub enum QueryResponse {
|
|||
MidiClipDuplicated(Result<MidiClipId, String>),
|
||||
/// Whether a track's graph is the auto-generated default
|
||||
GraphIsDefault(bool),
|
||||
/// Pitch bend range in semitones for the track's instrument
|
||||
PitchBendRange(f32),
|
||||
}
|
||||
|
|
|
|||
|
|
@ -109,6 +109,17 @@ pub trait Action: Send {
|
|||
fn midi_notes_after_rollback(&self) -> Option<(u32, &[(f64, u8, u8, f64)])> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Return full MIDI event data (CC, pitch bend, etc.) reflecting the state after execute/redo.
|
||||
/// Used to keep the frontend MIDI event cache in sync after undo/redo.
|
||||
fn midi_events_after_execute(&self) -> Option<(u32, &[daw_backend::audio::midi::MidiEvent])> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Return full MIDI event data reflecting the state after rollback/undo.
|
||||
fn midi_events_after_rollback(&self) -> Option<(u32, &[daw_backend::audio::midi::MidiEvent])> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Action executor that wraps the document and manages undo/redo
|
||||
|
|
@ -280,6 +291,16 @@ impl ActionExecutor {
|
|||
self.redo_stack.last().and_then(|a| a.midi_notes_after_rollback())
|
||||
}
|
||||
|
||||
/// Get full MIDI event data from the last action on the undo stack (after redo).
|
||||
pub fn last_undo_midi_events(&self) -> Option<(u32, &[daw_backend::audio::midi::MidiEvent])> {
|
||||
self.undo_stack.last().and_then(|a| a.midi_events_after_execute())
|
||||
}
|
||||
|
||||
/// Get full MIDI event data from the last action on the redo stack (after undo).
|
||||
pub fn last_redo_midi_events(&self) -> Option<(u32, &[daw_backend::audio::midi::MidiEvent])> {
|
||||
self.redo_stack.last().and_then(|a| a.midi_events_after_rollback())
|
||||
}
|
||||
|
||||
/// Get the description of the next action to redo
|
||||
pub fn redo_description(&self) -> Option<String> {
|
||||
self.redo_stack.last().map(|a| a.description())
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ pub mod rename_folder;
|
|||
pub mod delete_folder;
|
||||
pub mod move_asset_to_folder;
|
||||
pub mod update_midi_notes;
|
||||
pub mod update_midi_events;
|
||||
pub mod loop_clip_instances;
|
||||
pub mod remove_clip_instances;
|
||||
pub mod set_keyframe;
|
||||
|
|
@ -56,6 +57,7 @@ pub use rename_folder::RenameFolderAction;
|
|||
pub use delete_folder::{DeleteFolderAction, DeleteStrategy};
|
||||
pub use move_asset_to_folder::MoveAssetToFolderAction;
|
||||
pub use update_midi_notes::UpdateMidiNotesAction;
|
||||
pub use update_midi_events::UpdateMidiEventsAction;
|
||||
pub use loop_clip_instances::LoopClipInstancesAction;
|
||||
pub use remove_clip_instances::RemoveClipInstancesAction;
|
||||
pub use set_keyframe::SetKeyframeAction;
|
||||
|
|
|
|||
|
|
@ -0,0 +1,76 @@
|
|||
use crate::action::Action;
|
||||
use crate::document::Document;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Action to replace all MIDI events in a clip (CC, pitch bend, notes, etc.) with undo/redo.
|
||||
///
|
||||
/// Used when editing per-note CC or pitch bend from the piano roll. Stores full
|
||||
/// `MidiEvent` lists rather than the simplified note-tuple format of `UpdateMidiNotesAction`.
|
||||
pub struct UpdateMidiEventsAction {
|
||||
/// Layer containing the MIDI clip
|
||||
pub layer_id: Uuid,
|
||||
/// Backend MIDI clip ID
|
||||
pub midi_clip_id: u32,
|
||||
/// Full event list before the edit
|
||||
pub old_events: Vec<daw_backend::audio::midi::MidiEvent>,
|
||||
/// Full event list after the edit
|
||||
pub new_events: Vec<daw_backend::audio::midi::MidiEvent>,
|
||||
/// Human-readable description
|
||||
pub description_text: String,
|
||||
}
|
||||
|
||||
impl Action for UpdateMidiEventsAction {
|
||||
fn execute(&mut self, _document: &mut Document) -> Result<(), String> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn rollback(&mut self, _document: &mut Document) -> Result<(), String> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
self.description_text.clone()
|
||||
}
|
||||
|
||||
fn execute_backend(
|
||||
&mut self,
|
||||
backend: &mut crate::action::BackendContext,
|
||||
_document: &Document,
|
||||
) -> Result<(), String> {
|
||||
let controller = match backend.audio_controller.as_mut() {
|
||||
Some(c) => c,
|
||||
None => return Ok(()),
|
||||
};
|
||||
let track_id = backend
|
||||
.layer_to_track_map
|
||||
.get(&self.layer_id)
|
||||
.ok_or_else(|| format!("Layer {} not mapped to backend track", self.layer_id))?;
|
||||
controller.update_midi_clip_events(*track_id, self.midi_clip_id, self.new_events.clone());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn rollback_backend(
|
||||
&mut self,
|
||||
backend: &mut crate::action::BackendContext,
|
||||
_document: &Document,
|
||||
) -> Result<(), String> {
|
||||
let controller = match backend.audio_controller.as_mut() {
|
||||
Some(c) => c,
|
||||
None => return Ok(()),
|
||||
};
|
||||
let track_id = backend
|
||||
.layer_to_track_map
|
||||
.get(&self.layer_id)
|
||||
.ok_or_else(|| format!("Layer {} not mapped to backend track", self.layer_id))?;
|
||||
controller.update_midi_clip_events(*track_id, self.midi_clip_id, self.old_events.clone());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn midi_events_after_execute(&self) -> Option<(u32, &[daw_backend::audio::midi::MidiEvent])> {
|
||||
Some((self.midi_clip_id, &self.new_events))
|
||||
}
|
||||
|
||||
fn midi_events_after_rollback(&self) -> Option<(u32, &[daw_backend::audio::midi::MidiEvent])> {
|
||||
Some((self.midi_clip_id, &self.old_events))
|
||||
}
|
||||
}
|
||||
|
|
@ -133,6 +133,15 @@ impl Default for TimeSignature {
|
|||
|
||||
fn default_bpm() -> f64 { 120.0 }
|
||||
|
||||
/// How time is displayed in the timeline
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
pub enum TimelineMode {
|
||||
#[default]
|
||||
Seconds,
|
||||
Measures,
|
||||
Frames,
|
||||
}
|
||||
|
||||
/// Asset category for folder tree access
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum AssetCategory {
|
||||
|
|
@ -226,6 +235,10 @@ pub struct Document {
|
|||
#[serde(default)]
|
||||
pub script_folders: AssetFolderTree,
|
||||
|
||||
/// How time is displayed in the timeline (saved with document)
|
||||
#[serde(default)]
|
||||
pub timeline_mode: TimelineMode,
|
||||
|
||||
/// Current UI layout state (serialized for save/load)
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub ui_layout: Option<LayoutNode>,
|
||||
|
|
@ -270,6 +283,7 @@ impl Default for Document {
|
|||
effect_folders: AssetFolderTree::new(),
|
||||
script_definitions: HashMap::new(),
|
||||
script_folders: AssetFolderTree::new(),
|
||||
timeline_mode: TimelineMode::Seconds,
|
||||
ui_layout: None,
|
||||
ui_layout_base: None,
|
||||
current_time: 0.0,
|
||||
|
|
|
|||
|
|
@ -24,6 +24,8 @@ use vello::Scene;
|
|||
/// Cache for decoded image data to avoid re-decoding every frame
|
||||
pub struct ImageCache {
|
||||
cache: HashMap<Uuid, Arc<ImageBrush>>,
|
||||
/// CPU path: tiny-skia pixmaps decoded from the same assets (premultiplied RGBA8)
|
||||
cpu_cache: HashMap<Uuid, Arc<tiny_skia::Pixmap>>,
|
||||
}
|
||||
|
||||
impl ImageCache {
|
||||
|
|
@ -31,6 +33,7 @@ impl ImageCache {
|
|||
pub fn new() -> Self {
|
||||
Self {
|
||||
cache: HashMap::new(),
|
||||
cpu_cache: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -47,14 +50,28 @@ impl ImageCache {
|
|||
Some(arc_image)
|
||||
}
|
||||
|
||||
/// Get or decode an image as a premultiplied tiny-skia Pixmap (CPU render path).
|
||||
pub fn get_or_decode_cpu(&mut self, asset: &ImageAsset) -> Option<Arc<tiny_skia::Pixmap>> {
|
||||
if let Some(cached) = self.cpu_cache.get(&asset.id) {
|
||||
return Some(Arc::clone(cached));
|
||||
}
|
||||
|
||||
let pixmap = decode_image_to_pixmap(asset)?;
|
||||
let arc = Arc::new(pixmap);
|
||||
self.cpu_cache.insert(asset.id, Arc::clone(&arc));
|
||||
Some(arc)
|
||||
}
|
||||
|
||||
/// Clear cache entry when an image asset is deleted or modified
|
||||
pub fn invalidate(&mut self, id: &Uuid) {
|
||||
self.cache.remove(id);
|
||||
self.cpu_cache.remove(id);
|
||||
}
|
||||
|
||||
/// Clear all cached images
|
||||
pub fn clear(&mut self) {
|
||||
self.cache.clear();
|
||||
self.cpu_cache.clear();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -64,6 +81,25 @@ impl Default for ImageCache {
|
|||
}
|
||||
}
|
||||
|
||||
/// Decode an image asset to a premultiplied tiny-skia Pixmap (CPU render path).
|
||||
fn decode_image_to_pixmap(asset: &ImageAsset) -> Option<tiny_skia::Pixmap> {
|
||||
let data = asset.data.as_ref()?;
|
||||
let img = image::load_from_memory(data).ok()?;
|
||||
let rgba = img.to_rgba8();
|
||||
let mut pixmap = tiny_skia::Pixmap::new(asset.width, asset.height)?;
|
||||
for (dst, src) in pixmap.pixels_mut().iter_mut().zip(rgba.pixels()) {
|
||||
let [r, g, b, a] = src.0;
|
||||
// Convert straight alpha (image crate output) to premultiplied (tiny-skia internal format)
|
||||
let af = a as f32 / 255.0;
|
||||
let pr = (r as f32 * af).round() as u8;
|
||||
let pg = (g as f32 * af).round() as u8;
|
||||
let pb = (b as f32 * af).round() as u8;
|
||||
// from_rgba only fails when channel > alpha; premultiplied values are always ≤ alpha
|
||||
*dst = tiny_skia::PremultipliedColorU8::from_rgba(pr, pg, pb, a).unwrap();
|
||||
}
|
||||
Some(pixmap)
|
||||
}
|
||||
|
||||
/// Decode an image asset to peniko ImageBrush
|
||||
fn decode_image_asset(asset: &ImageAsset) -> Option<ImageBrush> {
|
||||
// Get the raw file data
|
||||
|
|
@ -1368,8 +1404,8 @@ fn render_vector_graph_cpu(
|
|||
pixmap: &mut tiny_skia::PixmapMut<'_>,
|
||||
transform: tiny_skia::Transform,
|
||||
opacity: f32,
|
||||
_document: &Document,
|
||||
_image_cache: &mut ImageCache,
|
||||
document: &Document,
|
||||
image_cache: &mut ImageCache,
|
||||
) {
|
||||
// 1. Fills
|
||||
for (i, fill) in graph.fills.iter().enumerate() {
|
||||
|
|
@ -1412,8 +1448,25 @@ fn render_vector_graph_cpu(
|
|||
}
|
||||
}
|
||||
|
||||
// Image fill — not yet implemented for CPU renderer; fall through to solid or skip
|
||||
// TODO: decode image to Pixmap and use as Pattern shader
|
||||
// Image fill — decode to Pixmap and use as a Pattern shader
|
||||
if let Some(image_asset_id) = face.image_fill {
|
||||
if let Some(asset) = document.get_image_asset(&image_asset_id) {
|
||||
if let Some(img_pixmap) = image_cache.get_or_decode_cpu(asset) {
|
||||
let pattern = tiny_skia::Pattern::new(
|
||||
tiny_skia::Pixmap::as_ref(&img_pixmap),
|
||||
tiny_skia::SpreadMode::Pad,
|
||||
tiny_skia::FilterQuality::Bilinear,
|
||||
opacity,
|
||||
tiny_skia::Transform::identity(),
|
||||
);
|
||||
let mut paint = tiny_skia::Paint::default();
|
||||
paint.shader = pattern;
|
||||
paint.anti_alias = true;
|
||||
pixmap.fill_path(&ts_path, &paint, fill_type, transform, None);
|
||||
filled = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Solid colour fill
|
||||
if !filled {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,323 @@
|
|||
/// Generic curve lane widget — renders a keyframe curve and handles editing interactions.
|
||||
///
|
||||
/// Used for audio automation lanes (AutomationInput nodes) and, in future, for visual
|
||||
/// property animation lanes on vector/raster layers.
|
||||
|
||||
use eframe::egui::{self, Color32, Pos2, Rect, Shape, Stroke, Vec2};
|
||||
|
||||
// ─── Data types ──────────────────────────────────────────────────────────────
|
||||
|
||||
/// A single keyframe. Values are in the caller's raw unit space (not normalised).
|
||||
/// Convert from `AutomationKeyframeData` or `lightningbeam_core::animation::Keyframe`
|
||||
/// before passing in.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct CurvePoint {
|
||||
pub time: f64,
|
||||
pub value: f32,
|
||||
pub interpolation: CurveInterpolation,
|
||||
/// Outgoing Bezier tangent (x, y) relative to this keyframe, range 0–1
|
||||
pub ease_out: (f32, f32),
|
||||
/// Incoming Bezier tangent (x, y) relative to next keyframe, range 0–1
|
||||
pub ease_in: (f32, f32),
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum CurveInterpolation {
|
||||
Linear,
|
||||
Bezier,
|
||||
Step,
|
||||
Hold,
|
||||
}
|
||||
|
||||
/// Edit action the user performed during one frame, returned from [`render_curve_lane`].
|
||||
#[derive(Debug)]
|
||||
pub enum CurveEditAction {
|
||||
None,
|
||||
AddKeyframe { time: f64, value: f32 },
|
||||
MoveKeyframe { index: usize, new_time: f64, new_value: f32 },
|
||||
DeleteKeyframe { index: usize },
|
||||
}
|
||||
|
||||
/// Drag state for an in-progress keyframe move.
|
||||
/// Stored by the caller alongside the lane's cached keyframe list.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct CurveDragState {
|
||||
pub keyframe_index: usize,
|
||||
pub original_time: f64,
|
||||
pub original_value: f32,
|
||||
pub current_time: f64,
|
||||
pub current_value: f32,
|
||||
}
|
||||
|
||||
// ─── Curve evaluation ────────────────────────────────────────────────────────
|
||||
|
||||
/// Evaluate the curve defined by `keyframes` at the given `time`.
|
||||
///
|
||||
/// Matches the interpolation logic of `AutomationInputNode::evaluate_at_time()`.
|
||||
pub fn evaluate_curve(keyframes: &[CurvePoint], time: f64) -> f32 {
|
||||
if keyframes.is_empty() {
|
||||
return 0.0;
|
||||
}
|
||||
if keyframes.len() == 1 || time <= keyframes[0].time {
|
||||
return keyframes[0].value;
|
||||
}
|
||||
let last = &keyframes[keyframes.len() - 1];
|
||||
if time >= last.time {
|
||||
return last.value;
|
||||
}
|
||||
|
||||
// Find the pair that brackets `time`
|
||||
let right = keyframes.partition_point(|kf| kf.time <= time);
|
||||
let kf1 = &keyframes[right - 1];
|
||||
let kf2 = &keyframes[right];
|
||||
|
||||
let t = if kf2.time == kf1.time {
|
||||
0.0f32
|
||||
} else {
|
||||
((time - kf1.time) / (kf2.time - kf1.time)) as f32
|
||||
};
|
||||
|
||||
match kf1.interpolation {
|
||||
CurveInterpolation::Linear => kf1.value + (kf2.value - kf1.value) * t,
|
||||
CurveInterpolation::Bezier => {
|
||||
let eased = cubic_bezier_ease(t, kf1.ease_out, kf2.ease_in);
|
||||
kf1.value + (kf2.value - kf1.value) * eased
|
||||
}
|
||||
CurveInterpolation::Step | CurveInterpolation::Hold => kf1.value,
|
||||
}
|
||||
}
|
||||
|
||||
/// Simplified cubic Bezier easing (0,0 → ease_out → ease_in → 1,1).
|
||||
/// Identical to `AutomationInputNode::cubic_bezier_ease`.
|
||||
fn cubic_bezier_ease(t: f32, ease_out: (f32, f32), ease_in: (f32, f32)) -> f32 {
|
||||
let u = 1.0 - t;
|
||||
3.0 * u * u * t * ease_out.1 + 3.0 * u * t * t * ease_in.1 + t * t * t
|
||||
}
|
||||
|
||||
// ─── Rendering ───────────────────────────────────────────────────────────────
|
||||
|
||||
const DIAMOND_RADIUS: f32 = 5.0;
|
||||
|
||||
/// Render a curve lane within `rect` and return any edit action the user performed.
|
||||
///
|
||||
/// `drag_state` is an in/out reference; the caller is responsible for storing it between
|
||||
/// frames alongside the lane's keyframe list.
|
||||
///
|
||||
/// `value_min` and `value_max` define the displayed value range (bottom to top of rect).
|
||||
/// Keyframe values outside this range are clamped visually.
|
||||
///
|
||||
/// `time_to_x` maps a project time (seconds) to an **absolute** screen X coordinate.
|
||||
/// `x_to_time` maps an **absolute** screen X coordinate to project time.
|
||||
pub fn render_curve_lane(
|
||||
ui: &mut egui::Ui,
|
||||
rect: Rect,
|
||||
keyframes: &[CurvePoint],
|
||||
drag_state: &mut Option<CurveDragState>,
|
||||
playback_time: f64,
|
||||
accent_color: Color32,
|
||||
id: egui::Id,
|
||||
value_min: f32,
|
||||
value_max: f32,
|
||||
time_to_x: impl Fn(f64) -> f32,
|
||||
x_to_time: impl Fn(f32) -> f64,
|
||||
) -> CurveEditAction {
|
||||
let painter = ui.painter_at(rect);
|
||||
|
||||
// Helper: raw value → normalised [0,1] for screen-Y mapping
|
||||
let normalize = |v: f32| -> f32 {
|
||||
if (value_max - value_min).abs() < f32::EPSILON {
|
||||
0.5
|
||||
} else {
|
||||
(v - value_min) / (value_max - value_min)
|
||||
}
|
||||
};
|
||||
// Helper: normalised [0,1] → raw value
|
||||
let denormalize = |n: f32| -> f32 {
|
||||
value_min + n * (value_max - value_min)
|
||||
};
|
||||
|
||||
// ── Background ──────────────────────────────────────────────────────────
|
||||
painter.rect_filled(rect, 0.0, Color32::from_rgba_premultiplied(20, 20, 25, 230));
|
||||
|
||||
// Zero-line (value = 0, or mid-line if range doesn't include 0)
|
||||
let zero_norm = normalize(0.0).clamp(0.0, 1.0);
|
||||
let zero_y = value_to_y(zero_norm, rect);
|
||||
painter.line_segment(
|
||||
[Pos2::new(rect.min.x, zero_y), Pos2::new(rect.max.x, zero_y)],
|
||||
Stroke::new(1.0, Color32::from_rgba_premultiplied(80, 80, 80, 120)),
|
||||
);
|
||||
|
||||
// ── Curve polyline ───────────────────────────────────────────────────────
|
||||
// Build a working keyframe list with any in-progress drag preview applied
|
||||
let display_keyframes: Vec<CurvePoint> = if let Some(ref ds) = drag_state {
|
||||
let mut kfs = keyframes.to_vec();
|
||||
if ds.keyframe_index < kfs.len() {
|
||||
kfs[ds.keyframe_index].time = ds.current_time;
|
||||
kfs[ds.keyframe_index].value = ds.current_value;
|
||||
kfs.sort_by(|a, b| a.time.partial_cmp(&b.time).unwrap_or(std::cmp::Ordering::Equal));
|
||||
}
|
||||
kfs
|
||||
} else {
|
||||
keyframes.to_vec()
|
||||
};
|
||||
|
||||
if !display_keyframes.is_empty() {
|
||||
let step = 2.0f32; // sample every 2 screen pixels
|
||||
let num_steps = ((rect.width() / step) as usize).max(1);
|
||||
let mut points: Vec<Pos2> = Vec::with_capacity(num_steps + 1);
|
||||
|
||||
for i in 0..=num_steps {
|
||||
let x = rect.min.x + i as f32 * step;
|
||||
let t = x_to_time(x.min(rect.max.x));
|
||||
let v = evaluate_curve(&display_keyframes, t);
|
||||
let y = value_to_y(normalize(v), rect);
|
||||
points.push(Pos2::new(x.min(rect.max.x), y));
|
||||
}
|
||||
|
||||
let curve_color = accent_color.linear_multiply(0.8);
|
||||
painter.add(Shape::line(points, Stroke::new(1.5, curve_color)));
|
||||
}
|
||||
|
||||
// ── Playhead ─────────────────────────────────────────────────────────────
|
||||
let ph_x = time_to_x(playback_time);
|
||||
if ph_x >= rect.min.x && ph_x <= rect.max.x {
|
||||
painter.line_segment(
|
||||
[Pos2::new(ph_x, rect.min.y), Pos2::new(ph_x, rect.max.y)],
|
||||
Stroke::new(1.0, Color32::from_rgb(255, 80, 80)),
|
||||
);
|
||||
}
|
||||
|
||||
// ── Interaction ──────────────────────────────────────────────────────────
|
||||
let sense = egui::Sense::click_and_drag();
|
||||
let response = ui.interact(rect, id, sense);
|
||||
|
||||
// latest_pos() works whether the pointer button is up or down (unlike interact_pos).
|
||||
let pointer_pos: Option<Pos2> = ui.input(|i| i.pointer.latest_pos());
|
||||
|
||||
// Find which keyframe (if any) the pointer is near
|
||||
let hovered_kf: Option<usize> = pointer_pos.and_then(|pos| {
|
||||
keyframes.iter().enumerate().find(|(_, kf)| {
|
||||
let kx = time_to_x(kf.time);
|
||||
let ky = value_to_y(normalize(kf.value), rect);
|
||||
let d = Vec2::new(pos.x - kx, pos.y - ky).length();
|
||||
d <= DIAMOND_RADIUS * 1.5
|
||||
}).map(|(i, _)| i)
|
||||
});
|
||||
|
||||
// Draw keyframe diamonds (after interaction setup so hover color works)
|
||||
for (idx, kf) in keyframes.iter().enumerate() {
|
||||
let kx = time_to_x(kf.time);
|
||||
if kx < rect.min.x - DIAMOND_RADIUS || kx > rect.max.x + DIAMOND_RADIUS {
|
||||
continue;
|
||||
}
|
||||
let ky = value_to_y(normalize(kf.value), rect);
|
||||
|
||||
// During drag, show this diamond at its preview position
|
||||
let (draw_x, draw_y) = if let Some(ref ds) = drag_state {
|
||||
if ds.keyframe_index == idx {
|
||||
(time_to_x(ds.current_time), value_to_y(normalize(ds.current_value), rect))
|
||||
} else {
|
||||
(kx, ky)
|
||||
}
|
||||
} else {
|
||||
(kx, ky)
|
||||
};
|
||||
|
||||
let is_hovered = hovered_kf == Some(idx);
|
||||
let is_dragging = drag_state.as_ref().map_or(false, |d| d.keyframe_index == idx);
|
||||
|
||||
let fill = if is_dragging {
|
||||
Color32::WHITE
|
||||
} else if is_hovered {
|
||||
accent_color
|
||||
} else {
|
||||
accent_color.linear_multiply(0.7)
|
||||
};
|
||||
|
||||
draw_diamond(&painter, Pos2::new(draw_x, draw_y), DIAMOND_RADIUS, fill);
|
||||
}
|
||||
|
||||
// ── Interaction logic ────────────────────────────────────────────────────
|
||||
|
||||
// Right-click → delete keyframe
|
||||
if response.secondary_clicked() {
|
||||
if let Some(idx) = hovered_kf {
|
||||
return CurveEditAction::DeleteKeyframe { index: idx };
|
||||
}
|
||||
}
|
||||
|
||||
// Left drag start → begin dragging a keyframe
|
||||
if response.drag_started() {
|
||||
if let Some(idx) = hovered_kf {
|
||||
let kf = &keyframes[idx];
|
||||
*drag_state = Some(CurveDragState {
|
||||
keyframe_index: idx,
|
||||
original_time: kf.time,
|
||||
original_value: kf.value,
|
||||
current_time: kf.time,
|
||||
current_value: kf.value,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Drag in progress → update preview position
|
||||
if let Some(ref mut ds) = drag_state {
|
||||
if response.dragged() {
|
||||
if let Some(pos) = pointer_pos {
|
||||
let clamped_x = pos.x.clamp(rect.min.x, rect.max.x);
|
||||
let clamped_y = pos.y.clamp(rect.min.y, rect.max.y);
|
||||
ds.current_time = x_to_time(clamped_x);
|
||||
ds.current_value = denormalize(y_to_value(clamped_y, rect));
|
||||
}
|
||||
}
|
||||
// Drag released → commit
|
||||
if response.drag_stopped() {
|
||||
let ds = drag_state.take().unwrap();
|
||||
return CurveEditAction::MoveKeyframe {
|
||||
index: ds.keyframe_index,
|
||||
new_time: ds.current_time,
|
||||
new_value: ds.current_value,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Left click on empty space → add keyframe
|
||||
// Use interact_pointer_pos() here: it captures the click position even after button release.
|
||||
if response.clicked() && hovered_kf.is_none() && drag_state.is_none() {
|
||||
if let Some(pos) = response.interact_pointer_pos() {
|
||||
let t = x_to_time(pos.x);
|
||||
let v = denormalize(y_to_value(pos.y, rect));
|
||||
return CurveEditAction::AddKeyframe { time: t, value: v };
|
||||
}
|
||||
}
|
||||
|
||||
CurveEditAction::None
|
||||
}
|
||||
|
||||
// ─── Coordinate helpers ───────────────────────────────────────────────────────
|
||||
|
||||
/// Map a normalised value (0=bottom, 1=top) to a Y screen coordinate within `rect`.
|
||||
pub fn value_to_y(value: f32, rect: Rect) -> f32 {
|
||||
rect.max.y - value.clamp(0.0, 1.0) * rect.height()
|
||||
}
|
||||
|
||||
/// Map a screen Y coordinate within `rect` to a normalised value (0=bottom, 1=top).
|
||||
pub fn y_to_value(y: f32, rect: Rect) -> f32 {
|
||||
((rect.max.y - y) / rect.height()).clamp(0.0, 1.0)
|
||||
}
|
||||
|
||||
// ─── Drawing utilities ────────────────────────────────────────────────────────
|
||||
|
||||
fn draw_diamond(painter: &egui::Painter, center: Pos2, radius: f32, fill: Color32) {
|
||||
let points = vec![
|
||||
Pos2::new(center.x, center.y - radius), // top
|
||||
Pos2::new(center.x + radius, center.y), // right
|
||||
Pos2::new(center.x, center.y + radius), // bottom
|
||||
Pos2::new(center.x - radius, center.y), // left
|
||||
];
|
||||
painter.add(Shape::convex_polygon(
|
||||
points,
|
||||
fill,
|
||||
Stroke::new(1.0, Color32::from_rgba_premultiplied(0, 0, 0, 180)),
|
||||
));
|
||||
}
|
||||
|
|
@ -339,6 +339,7 @@ impl From<MenuAction> for AppAction {
|
|||
MenuAction::AddShapeTween => Self::AddShapeTween,
|
||||
MenuAction::ReturnToStart => Self::ReturnToStart,
|
||||
MenuAction::Play => Self::Play,
|
||||
MenuAction::ToggleCountIn => Self::Play, // not directly mappable to AppAction
|
||||
MenuAction::ZoomIn => Self::ZoomIn,
|
||||
MenuAction::ZoomOut => Self::ZoomOut,
|
||||
MenuAction::ActualSize => Self::ActualSize,
|
||||
|
|
|
|||
|
|
@ -57,6 +57,8 @@ mod test_mode;
|
|||
mod sample_import;
|
||||
mod sample_import_dialog;
|
||||
|
||||
mod curve_editor;
|
||||
|
||||
/// Lightningbeam Editor - Animation and video editing software
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(name = "Lightningbeam Editor")]
|
||||
|
|
@ -294,7 +296,7 @@ enum SplitPreviewMode {
|
|||
}
|
||||
|
||||
/// Rasterize an embedded SVG and upload it as an egui texture
|
||||
fn rasterize_svg(svg_data: &[u8], name: &str, render_size: u32, ctx: &egui::Context) -> Option<egui::TextureHandle> {
|
||||
pub(crate) fn rasterize_svg(svg_data: &[u8], name: &str, render_size: u32, ctx: &egui::Context) -> Option<egui::TextureHandle> {
|
||||
let tree = resvg::usvg::Tree::from_data(svg_data, &resvg::usvg::Options::default()).ok()?;
|
||||
let pixmap_size = tree.size().to_int_size();
|
||||
let scale_x = render_size as f32 / pixmap_size.width() as f32;
|
||||
|
|
@ -840,6 +842,8 @@ struct EditorApp {
|
|||
track_to_layer_map: HashMap<daw_backend::TrackId, Uuid>,
|
||||
/// Generation counter - incremented on project load to force UI components to reload
|
||||
project_generation: u64,
|
||||
/// Incremented whenever node graph topology changes (add/remove node or connection)
|
||||
graph_topology_generation: u64,
|
||||
// Clip instance ID mapping (Document clip instance UUIDs <-> backend clip instance IDs)
|
||||
clip_instance_to_backend_map: HashMap<Uuid, lightningbeam_core::action::BackendClipInstanceId>,
|
||||
// Playback state (global for all panes)
|
||||
|
|
@ -851,6 +855,8 @@ struct EditorApp {
|
|||
#[allow(dead_code)]
|
||||
armed_layers: HashSet<Uuid>,
|
||||
is_recording: bool, // Whether recording is currently active
|
||||
metronome_enabled: bool, // Whether metronome clicks during recording
|
||||
count_in_enabled: bool, // Whether count-in fires before recording
|
||||
recording_clips: HashMap<Uuid, u32>, // layer_id -> backend clip_id during recording
|
||||
recording_start_time: f64, // Playback time when recording started
|
||||
recording_layer_ids: Vec<Uuid>, // Layers being recorded to (for creating clips)
|
||||
|
|
@ -882,10 +888,9 @@ struct EditorApp {
|
|||
output_level: (f32, f32),
|
||||
track_levels: HashMap<daw_backend::TrackId, f32>,
|
||||
|
||||
/// Cache for MIDI event data (keyed by backend midi_clip_id)
|
||||
/// Prevents repeated backend queries for the same MIDI clip
|
||||
/// Format: (timestamp, note_number, velocity, is_note_on)
|
||||
midi_event_cache: HashMap<u32, Vec<(f64, u8, u8, bool)>>,
|
||||
/// Cache for MIDI event data (keyed by backend midi_clip_id).
|
||||
/// Stores full raw MidiEvents (note on/off, CC, pitch bend, etc.)
|
||||
midi_event_cache: HashMap<u32, Vec<daw_backend::audio::midi::MidiEvent>>,
|
||||
/// Cache for audio file durations to avoid repeated queries
|
||||
/// Format: pool_index -> duration in seconds
|
||||
audio_duration_cache: HashMap<usize, f64>,
|
||||
|
|
@ -1116,12 +1121,15 @@ impl EditorApp {
|
|||
layer_to_track_map: HashMap::new(),
|
||||
track_to_layer_map: HashMap::new(),
|
||||
project_generation: 0,
|
||||
graph_topology_generation: 0,
|
||||
clip_instance_to_backend_map: HashMap::new(),
|
||||
playback_time: 0.0, // Start at beginning
|
||||
is_playing: false, // Start paused
|
||||
recording_arm_mode: RecordingArmMode::default(), // Auto mode by default
|
||||
armed_layers: HashSet::new(), // No layers explicitly armed
|
||||
is_recording: false, // Not recording initially
|
||||
metronome_enabled: false, // Metronome off by default
|
||||
count_in_enabled: false, // Count-in off by default
|
||||
recording_clips: HashMap::new(), // No active recording clips
|
||||
recording_start_time: 0.0, // Will be set when recording starts
|
||||
recording_layer_ids: Vec::new(), // Will be populated when recording starts
|
||||
|
|
@ -1482,6 +1490,13 @@ impl EditorApp {
|
|||
}
|
||||
};
|
||||
|
||||
// Set default timeline mode based on activity
|
||||
document.timeline_mode = match layout_index {
|
||||
2 => lightningbeam_core::document::TimelineMode::Measures, // Music
|
||||
1 => lightningbeam_core::document::TimelineMode::Seconds, // Video
|
||||
_ => lightningbeam_core::document::TimelineMode::Frames, // Animation, Painting, etc.
|
||||
};
|
||||
|
||||
// Reset action executor with new document
|
||||
self.action_executor = lightningbeam_core::action::ActionExecutor::new(document);
|
||||
|
||||
|
|
@ -3099,10 +3114,16 @@ impl EditorApp {
|
|||
};
|
||||
// Rebuild MIDI cache after undo (backend_context dropped, borrows released)
|
||||
if undo_succeeded {
|
||||
let midi_update = self.action_executor.last_redo_midi_notes()
|
||||
.map(|(id, notes)| (id, notes.to_vec()));
|
||||
if let Some((clip_id, notes)) = midi_update {
|
||||
self.rebuild_midi_cache_entry(clip_id, ¬es);
|
||||
if let Some((clip_id, events)) = self.action_executor.last_redo_midi_events()
|
||||
.map(|(id, ev)| (id, ev.to_vec()))
|
||||
{
|
||||
self.midi_event_cache.insert(clip_id, events);
|
||||
} else {
|
||||
let midi_update = self.action_executor.last_redo_midi_notes()
|
||||
.map(|(id, notes)| (id, notes.to_vec()));
|
||||
if let Some((clip_id, notes)) = midi_update {
|
||||
self.rebuild_midi_cache_entry(clip_id, ¬es);
|
||||
}
|
||||
}
|
||||
// Stale vertex/edge/face IDs from before the undo would
|
||||
// crash selection rendering on the restored (smaller) DCEL.
|
||||
|
|
@ -3137,10 +3158,16 @@ impl EditorApp {
|
|||
};
|
||||
// Rebuild MIDI cache after redo (backend_context dropped, borrows released)
|
||||
if redo_succeeded {
|
||||
let midi_update = self.action_executor.last_undo_midi_notes()
|
||||
.map(|(id, notes)| (id, notes.to_vec()));
|
||||
if let Some((clip_id, notes)) = midi_update {
|
||||
self.rebuild_midi_cache_entry(clip_id, ¬es);
|
||||
if let Some((clip_id, events)) = self.action_executor.last_undo_midi_events()
|
||||
.map(|(id, ev)| (id, ev.to_vec()))
|
||||
{
|
||||
self.midi_event_cache.insert(clip_id, events);
|
||||
} else {
|
||||
let midi_update = self.action_executor.last_undo_midi_notes()
|
||||
.map(|(id, notes)| (id, notes.to_vec()));
|
||||
if let Some((clip_id, notes)) = midi_update {
|
||||
self.rebuild_midi_cache_entry(clip_id, ¬es);
|
||||
}
|
||||
}
|
||||
self.selection.clear_geometry_selection();
|
||||
}
|
||||
|
|
@ -3505,6 +3532,12 @@ impl EditorApp {
|
|||
println!("Menu: Play");
|
||||
// TODO: Implement play/pause
|
||||
}
|
||||
MenuAction::ToggleCountIn => {
|
||||
// Only effective when metronome is enabled (count-in requires a click track)
|
||||
if self.metronome_enabled {
|
||||
self.count_in_enabled = !self.count_in_enabled;
|
||||
}
|
||||
}
|
||||
|
||||
// View menu
|
||||
MenuAction::ZoomIn => {
|
||||
|
|
@ -3804,18 +3837,7 @@ impl EditorApp {
|
|||
// track_id is unused by the query, pass 0
|
||||
match controller.query_midi_clip(0, clip_id) {
|
||||
Ok(clip_data) => {
|
||||
let processed_events: Vec<(f64, u8, u8, bool)> = clip_data.events.iter()
|
||||
.filter_map(|event| {
|
||||
let status_type = event.status & 0xF0;
|
||||
if status_type == 0x90 || status_type == 0x80 {
|
||||
let is_note_on = status_type == 0x90 && event.data2 > 0;
|
||||
Some((event.timestamp, event.data1, event.data2, is_note_on))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
self.midi_event_cache.insert(clip_id, processed_events);
|
||||
self.midi_event_cache.insert(clip_id, clip_data.events);
|
||||
midi_fetched += 1;
|
||||
}
|
||||
Err(e) => eprintln!("Failed to fetch MIDI clip {}: {}", clip_id, e),
|
||||
|
|
@ -3954,12 +3976,12 @@ impl EditorApp {
|
|||
/// Rebuild a MIDI event cache entry from backend note format.
|
||||
/// Called after undo/redo to keep the cache consistent with the backend.
|
||||
fn rebuild_midi_cache_entry(&mut self, clip_id: u32, notes: &[(f64, u8, u8, f64)]) {
|
||||
let mut events: Vec<(f64, u8, u8, bool)> = Vec::with_capacity(notes.len() * 2);
|
||||
let mut events: Vec<daw_backend::audio::midi::MidiEvent> = Vec::with_capacity(notes.len() * 2);
|
||||
for &(start_time, note, velocity, duration) in notes {
|
||||
events.push((start_time, note, velocity, true));
|
||||
events.push((start_time + duration, note, velocity, false));
|
||||
events.push(daw_backend::audio::midi::MidiEvent::note_on(start_time, 0, note, velocity));
|
||||
events.push(daw_backend::audio::midi::MidiEvent::note_off(start_time + duration, 0, note, 0));
|
||||
}
|
||||
events.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap());
|
||||
events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
|
||||
self.midi_event_cache.insert(clip_id, events);
|
||||
}
|
||||
|
||||
|
|
@ -3978,22 +4000,7 @@ impl EditorApp {
|
|||
let duration = midi_clip.duration;
|
||||
let event_count = midi_clip.events.len();
|
||||
|
||||
// Process MIDI events to cache format: (timestamp, note_number, velocity, is_note_on)
|
||||
// Filter to note events only (status 0x90 = note-on, 0x80 = note-off)
|
||||
let processed_events: Vec<(f64, u8, u8, bool)> = midi_clip.events.iter()
|
||||
.filter_map(|event| {
|
||||
let status_type = event.status & 0xF0;
|
||||
if status_type == 0x90 || status_type == 0x80 {
|
||||
// Note-on is 0x90 with velocity > 0, Note-off is 0x80 or velocity = 0
|
||||
let is_note_on = status_type == 0x90 && event.data2 > 0;
|
||||
Some((event.timestamp, event.data1, event.data2, is_note_on))
|
||||
} else {
|
||||
None // Ignore non-note events (CC, pitch bend, etc.)
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let note_event_count = processed_events.len();
|
||||
let processed_events = midi_clip.events.clone();
|
||||
|
||||
// Add to backend MIDI clip pool FIRST and get the backend clip ID
|
||||
if let Some(ref controller_arc) = self.audio_controller {
|
||||
|
|
@ -4008,9 +4015,8 @@ impl EditorApp {
|
|||
let clip = AudioClip::new_midi(&name, backend_clip_id, duration);
|
||||
let frontend_clip_id = self.action_executor.document_mut().add_audio_clip(clip);
|
||||
|
||||
println!("Imported MIDI '{}' ({:.1}s, {} total events, {} note events) - Frontend ID: {}, Backend ID: {}",
|
||||
name, duration, event_count, note_event_count, frontend_clip_id, backend_clip_id);
|
||||
println!("✅ Added MIDI clip to backend pool and cached {} note events", note_event_count);
|
||||
println!("Imported MIDI '{}' ({:.1}s, {} total events) - Frontend ID: {}, Backend ID: {}",
|
||||
name, duration, event_count, frontend_clip_id, backend_clip_id);
|
||||
|
||||
Some(ImportedAssetInfo {
|
||||
clip_id: frontend_clip_id,
|
||||
|
|
@ -5112,40 +5118,57 @@ impl eframe::App for EditorApp {
|
|||
if let Some(layer_id) = midi_layer_id {
|
||||
// Lazily create the doc clip + instance on the first progress event
|
||||
// (there is no MidiRecordingStarted event from the backend).
|
||||
let already_exists = self.clip_instance_to_backend_map.values().any(|v| {
|
||||
matches!(v, lightningbeam_core::action::BackendClipInstanceId::Midi(id) if *id == clip_id)
|
||||
});
|
||||
if !already_exists {
|
||||
use lightningbeam_core::clip::{AudioClip, ClipInstance};
|
||||
let clip = AudioClip::new_recording("Recording...");
|
||||
let doc_clip_id = self.action_executor.document_mut().add_audio_clip(clip);
|
||||
let clip_instance = ClipInstance::new(doc_clip_id)
|
||||
.with_timeline_start(self.recording_start_time);
|
||||
let clip_instance_id = clip_instance.id;
|
||||
if let Some(layer) = self.action_executor.document_mut().get_layer_mut(&layer_id) {
|
||||
if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer {
|
||||
audio_layer.clip_instances.push(clip_instance);
|
||||
}
|
||||
}
|
||||
self.clip_instance_to_backend_map.insert(
|
||||
clip_instance_id,
|
||||
lightningbeam_core::action::BackendClipInstanceId::Midi(clip_id),
|
||||
);
|
||||
}
|
||||
|
||||
let doc_clip_id = {
|
||||
let document = self.action_executor.document();
|
||||
document.get_layer(&layer_id)
|
||||
.and_then(|layer| {
|
||||
if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer {
|
||||
audio_layer.clip_instances.last().map(|i| i.clip_id)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
//
|
||||
// MidiClipId (clip_id) is the content ID; MidiClipInstanceId is
|
||||
// the placement ID used in the snapshot and backend operations.
|
||||
// We need to store the instance ID, not the content ID, so that
|
||||
// build_audio_clip_cache can correlate mc.id → doc UUID.
|
||||
// Command::CreateMidiClip has already been processed and the
|
||||
// snapshot refreshed by the time this event arrives.
|
||||
let backend_instance_id: u32 = if let Some(ref controller_arc) = self.audio_controller {
|
||||
let controller = controller_arc.lock().unwrap();
|
||||
let snap = controller.clip_snapshot();
|
||||
let snap = snap.read().unwrap();
|
||||
snap.midi.get(&_track_id)
|
||||
.and_then(|instances| instances.iter().find(|mc| mc.clip_id == clip_id))
|
||||
.map(|mc| mc.id)
|
||||
.unwrap_or(clip_id)
|
||||
} else {
|
||||
clip_id
|
||||
};
|
||||
|
||||
// Find the Midi-typed clip instance the timeline already created.
|
||||
// Register it in the map (using the correct instance ID, not the
|
||||
// content ID) so trim/move actions can find it via the snapshot.
|
||||
let already_mapped = self.clip_instance_to_backend_map.values().any(|v| {
|
||||
matches!(v, lightningbeam_core::action::BackendClipInstanceId::Midi(id) if *id == backend_instance_id)
|
||||
});
|
||||
let doc_clip_id = {
|
||||
let doc = self.action_executor.document();
|
||||
doc.audio_clip_by_midi_clip_id(clip_id).map(|(id, _)| id)
|
||||
};
|
||||
if let Some(doc_clip_id) = doc_clip_id {
|
||||
if !already_mapped {
|
||||
// Find the clip instance for this clip on the layer
|
||||
let instance_id = {
|
||||
let doc = self.action_executor.document();
|
||||
doc.get_layer(&layer_id)
|
||||
.and_then(|l| {
|
||||
if let lightningbeam_core::layer::AnyLayer::Audio(al) = l {
|
||||
al.clip_instances.iter()
|
||||
.find(|ci| ci.clip_id == doc_clip_id)
|
||||
.map(|ci| ci.id)
|
||||
} else { None }
|
||||
})
|
||||
};
|
||||
if let Some(instance_id) = instance_id {
|
||||
self.clip_instance_to_backend_map.insert(
|
||||
instance_id,
|
||||
lightningbeam_core::action::BackendClipInstanceId::Midi(backend_instance_id),
|
||||
);
|
||||
}
|
||||
}
|
||||
// Update the clip's duration so the timeline bar grows
|
||||
if let Some(clip) = self.action_executor.document_mut().audio_clips.get_mut(&doc_clip_id) {
|
||||
clip.duration = duration;
|
||||
}
|
||||
|
|
@ -5153,15 +5176,14 @@ impl eframe::App for EditorApp {
|
|||
}
|
||||
|
||||
// Update midi_event_cache with notes captured so far
|
||||
// (inlined instead of calling rebuild_midi_cache_entry to avoid
|
||||
// conflicting &mut self borrow with event_rx loop)
|
||||
// (inlined to avoid conflicting &mut self borrow)
|
||||
{
|
||||
let mut events: Vec<(f64, u8, u8, bool)> = Vec::with_capacity(notes.len() * 2);
|
||||
let mut events: Vec<daw_backend::audio::midi::MidiEvent> = Vec::with_capacity(notes.len() * 2);
|
||||
for &(start_time, note, velocity, dur) in ¬es {
|
||||
events.push((start_time, note, velocity, true));
|
||||
events.push((start_time + dur, note, velocity, false));
|
||||
events.push(daw_backend::audio::midi::MidiEvent::note_on(start_time, 0, note, velocity));
|
||||
events.push(daw_backend::audio::midi::MidiEvent::note_off(start_time + dur, 0, note, 0));
|
||||
}
|
||||
events.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap());
|
||||
events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
|
||||
self.midi_event_cache.insert(clip_id, events);
|
||||
}
|
||||
ctx.request_repaint();
|
||||
|
|
@ -5175,42 +5197,17 @@ impl eframe::App for EditorApp {
|
|||
let mut controller = controller_arc.lock().unwrap();
|
||||
match controller.query_midi_clip(track_id, clip_id) {
|
||||
Ok(midi_clip_data) => {
|
||||
// Convert backend MidiEvent format to cache format
|
||||
let cache_events: Vec<(f64, u8, u8, bool)> = midi_clip_data.events.iter()
|
||||
.filter_map(|event| {
|
||||
let status_type = event.status & 0xF0;
|
||||
if status_type == 0x90 || status_type == 0x80 {
|
||||
let is_note_on = status_type == 0x90 && event.data2 > 0;
|
||||
Some((event.timestamp, event.data1, event.data2, is_note_on))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
drop(controller);
|
||||
self.midi_event_cache.insert(clip_id, cache_events);
|
||||
self.midi_event_cache.insert(clip_id, midi_clip_data.events.clone());
|
||||
|
||||
// Update document clip with final duration and name
|
||||
let midi_layer_id = self.track_to_layer_map.get(&track_id)
|
||||
.filter(|lid| self.recording_layer_ids.contains(lid))
|
||||
.copied();
|
||||
if let Some(layer_id) = midi_layer_id {
|
||||
let doc_clip_id = {
|
||||
let document = self.action_executor.document();
|
||||
document.get_layer(&layer_id)
|
||||
.and_then(|layer| {
|
||||
if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer {
|
||||
audio_layer.clip_instances.last().map(|i| i.clip_id)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
};
|
||||
if let Some(doc_clip_id) = doc_clip_id {
|
||||
if let Some(clip) = self.action_executor.document_mut().audio_clips.get_mut(&doc_clip_id) {
|
||||
clip.duration = midi_clip_data.duration;
|
||||
clip.name = format!("MIDI Recording {}", clip_id);
|
||||
}
|
||||
let doc_clip_id = self.action_executor.document()
|
||||
.audio_clip_by_midi_clip_id(clip_id)
|
||||
.map(|(id, _)| id);
|
||||
if let Some(doc_clip_id) = doc_clip_id {
|
||||
if let Some(clip) = self.action_executor.document_mut().audio_clips.get_mut(&doc_clip_id) {
|
||||
clip.duration = midi_clip_data.duration;
|
||||
clip.name = format!("MIDI Recording {}", clip_id);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -5596,9 +5593,28 @@ impl eframe::App for EditorApp {
|
|||
if let Some(menu_system) = &self.menu_system {
|
||||
let recent_files = self.config.get_recent_files();
|
||||
let layout_names: Vec<String> = self.layouts.iter().map(|l| l.name.clone()).collect();
|
||||
|
||||
// Determine timeline measures mode for conditional menu items
|
||||
let timeline_is_measures = self.pane_instances.values().any(|p| {
|
||||
if let panes::PaneInstance::Timeline(t) = p { t.is_measures_mode() } else { false }
|
||||
});
|
||||
|
||||
// Checked actions show "✔ Label"; hidden actions are not rendered at all
|
||||
let checked: &[crate::menu::MenuAction] = if self.count_in_enabled && self.metronome_enabled {
|
||||
&[crate::menu::MenuAction::ToggleCountIn]
|
||||
} else {
|
||||
&[]
|
||||
};
|
||||
let hidden: &[crate::menu::MenuAction] = if timeline_is_measures && self.metronome_enabled {
|
||||
&[]
|
||||
} else {
|
||||
&[crate::menu::MenuAction::ToggleCountIn]
|
||||
};
|
||||
|
||||
if let Some(action) = menu_system.render_egui_menu_bar(
|
||||
ui, &recent_files, Some(&self.keymap),
|
||||
&layout_names, self.current_layout_index,
|
||||
checked, hidden,
|
||||
) {
|
||||
self.handle_menu_action(action);
|
||||
}
|
||||
|
|
@ -5737,6 +5753,8 @@ impl eframe::App for EditorApp {
|
|||
playback_time: &mut self.playback_time,
|
||||
is_playing: &mut self.is_playing,
|
||||
is_recording: &mut self.is_recording,
|
||||
metronome_enabled: &mut self.metronome_enabled,
|
||||
count_in_enabled: &mut self.count_in_enabled,
|
||||
recording_clips: &mut self.recording_clips,
|
||||
recording_start_time: &mut self.recording_start_time,
|
||||
recording_layer_ids: &mut self.recording_layer_ids,
|
||||
|
|
@ -5769,6 +5787,7 @@ impl eframe::App for EditorApp {
|
|||
track_to_layer_map: &self.track_to_layer_map,
|
||||
waveform_stereo: self.config.waveform_stereo,
|
||||
project_generation: &mut self.project_generation,
|
||||
graph_topology_generation: &mut self.graph_topology_generation,
|
||||
script_to_edit: &mut self.script_to_edit,
|
||||
script_saved: &mut self.script_saved,
|
||||
region_selection: &mut self.region_selection,
|
||||
|
|
|
|||
|
|
@ -324,6 +324,7 @@ pub enum MenuAction {
|
|||
AddShapeTween,
|
||||
ReturnToStart,
|
||||
Play,
|
||||
ToggleCountIn,
|
||||
|
||||
// View menu
|
||||
ZoomIn,
|
||||
|
|
@ -422,6 +423,7 @@ impl MenuItemDef {
|
|||
const ADD_SHAPE_TWEEN: Self = Self { label: "Add Shape Tween", action: MenuAction::AddShapeTween, shortcut: None };
|
||||
const RETURN_TO_START: Self = Self { label: "Return to start", action: MenuAction::ReturnToStart, shortcut: None };
|
||||
const PLAY: Self = Self { label: "Play", action: MenuAction::Play, shortcut: None };
|
||||
const COUNT_IN: Self = Self { label: "Count In", action: MenuAction::ToggleCountIn, shortcut: None };
|
||||
|
||||
// View menu items
|
||||
const ZOOM_IN: Self = Self { label: "Zoom In", action: MenuAction::ZoomIn, shortcut: Some(Shortcut::new(ShortcutKey::Equals, CTRL, NO_SHIFT, NO_ALT)) };
|
||||
|
|
@ -548,6 +550,8 @@ impl MenuItemDef {
|
|||
MenuDef::Separator,
|
||||
MenuDef::Item(&Self::RETURN_TO_START),
|
||||
MenuDef::Item(&Self::PLAY),
|
||||
MenuDef::Separator,
|
||||
MenuDef::Item(&Self::COUNT_IN),
|
||||
],
|
||||
},
|
||||
// View menu
|
||||
|
|
@ -805,6 +809,8 @@ impl MenuSystem {
|
|||
keymap: Option<&crate::keymap::KeymapManager>,
|
||||
layout_names: &[String],
|
||||
current_layout_index: usize,
|
||||
checked_actions: &[MenuAction],
|
||||
hidden_actions: &[MenuAction],
|
||||
) -> Option<MenuAction> {
|
||||
let mut action = None;
|
||||
let ctx = ui.ctx().clone();
|
||||
|
|
@ -819,7 +825,7 @@ impl MenuSystem {
|
|||
let response = ui.button(*label);
|
||||
let popup_id = egui::Popup::default_response_id(&response);
|
||||
button_entries.push((response, popup_id, menu_def));
|
||||
} else if let Some(a) = self.render_menu_def(ui, menu_def, recent_files, keymap, layout_names, current_layout_index) {
|
||||
} else if let Some(a) = self.render_menu_def(ui, menu_def, recent_files, keymap, layout_names, current_layout_index, checked_actions, hidden_actions) {
|
||||
action = Some(a);
|
||||
}
|
||||
}
|
||||
|
|
@ -847,7 +853,7 @@ impl MenuSystem {
|
|||
ui.set_width(min_width);
|
||||
let mut a = None;
|
||||
for child in *children {
|
||||
if let Some(result) = self.render_menu_def(ui, child, recent_files, keymap, layout_names, current_layout_index) {
|
||||
if let Some(result) = self.render_menu_def(ui, child, recent_files, keymap, layout_names, current_layout_index, checked_actions, hidden_actions) {
|
||||
a = Some(result);
|
||||
ui.close();
|
||||
}
|
||||
|
|
@ -875,10 +881,15 @@ impl MenuSystem {
|
|||
keymap: Option<&crate::keymap::KeymapManager>,
|
||||
layout_names: &[String],
|
||||
current_layout_index: usize,
|
||||
checked_actions: &[MenuAction],
|
||||
hidden_actions: &[MenuAction],
|
||||
) -> Option<MenuAction> {
|
||||
match def {
|
||||
MenuDef::Item(item_def) => {
|
||||
if Self::render_menu_item(ui, item_def, keymap) {
|
||||
if hidden_actions.contains(&item_def.action) {
|
||||
return None;
|
||||
}
|
||||
if Self::render_menu_item(ui, item_def, keymap, checked_actions) {
|
||||
Some(item_def.action)
|
||||
} else {
|
||||
None
|
||||
|
|
@ -914,7 +925,7 @@ impl MenuSystem {
|
|||
} else if *label == "Layout" {
|
||||
let mut action = None;
|
||||
for child in *children {
|
||||
if let Some(a) = self.render_menu_def(ui, child, recent_files, keymap, layout_names, current_layout_index) {
|
||||
if let Some(a) = self.render_menu_def(ui, child, recent_files, keymap, layout_names, current_layout_index, checked_actions, hidden_actions) {
|
||||
action = Some(a);
|
||||
ui.close();
|
||||
}
|
||||
|
|
@ -937,7 +948,7 @@ impl MenuSystem {
|
|||
} else {
|
||||
let mut action = None;
|
||||
for child in *children {
|
||||
if let Some(a) = self.render_menu_def(ui, child, recent_files, keymap, layout_names, current_layout_index) {
|
||||
if let Some(a) = self.render_menu_def(ui, child, recent_files, keymap, layout_names, current_layout_index, checked_actions, hidden_actions) {
|
||||
action = Some(a);
|
||||
ui.close();
|
||||
}
|
||||
|
|
@ -951,7 +962,7 @@ impl MenuSystem {
|
|||
}
|
||||
|
||||
/// Render a single menu item with label and shortcut
|
||||
fn render_menu_item(ui: &mut egui::Ui, def: &MenuItemDef, keymap: Option<&crate::keymap::KeymapManager>) -> bool {
|
||||
fn render_menu_item(ui: &mut egui::Ui, def: &MenuItemDef, keymap: Option<&crate::keymap::KeymapManager>, checked_actions: &[MenuAction]) -> bool {
|
||||
// Look up shortcut from keymap if available, otherwise use static default
|
||||
let effective_shortcut = if let Some(km) = keymap {
|
||||
if let Ok(app_action) = crate::keymap::AppAction::try_from(def.action) {
|
||||
|
|
@ -987,10 +998,15 @@ impl MenuSystem {
|
|||
ui.visuals().widgets.inactive.text_color()
|
||||
};
|
||||
let label_pos = rect.min + egui::vec2(4.0, (rect.height() - 14.0) / 2.0);
|
||||
let label = if checked_actions.contains(&def.action) {
|
||||
format!("✔ {}", def.label)
|
||||
} else {
|
||||
def.label.to_owned()
|
||||
};
|
||||
ui.painter().text(
|
||||
label_pos,
|
||||
egui::Align2::LEFT_TOP,
|
||||
def.label,
|
||||
label,
|
||||
egui::FontId::proportional(14.0),
|
||||
text_color,
|
||||
);
|
||||
|
|
|
|||
|
|
@ -372,7 +372,7 @@ fn generate_video_thumbnail(
|
|||
/// Generate a piano roll thumbnail for MIDI clips
|
||||
/// Shows notes as horizontal bars with Y position = note % 12 (one octave)
|
||||
fn generate_midi_thumbnail(
|
||||
events: &[(f64, u8, u8, bool)], // (timestamp, note_number, velocity, is_note_on)
|
||||
events: &[daw_backend::audio::midi::MidiEvent],
|
||||
duration: f64,
|
||||
bg_color: egui::Color32,
|
||||
note_color: egui::Color32,
|
||||
|
|
@ -390,10 +390,11 @@ fn generate_midi_thumbnail(
|
|||
}
|
||||
|
||||
// Draw note events
|
||||
for &(timestamp, note_number, _velocity, is_note_on) in events {
|
||||
if !is_note_on || timestamp > preview_duration {
|
||||
for event in events {
|
||||
if !event.is_note_on() || event.timestamp > preview_duration {
|
||||
continue;
|
||||
}
|
||||
let (timestamp, note_number) = (event.timestamp, event.data1);
|
||||
|
||||
let x = ((timestamp / preview_duration) * size as f64) as usize;
|
||||
|
||||
|
|
|
|||
|
|
@ -1178,14 +1178,16 @@ impl InfopanelPane {
|
|||
if indices.len() == 1 {
|
||||
// Single note — show details if we can resolve from the event cache
|
||||
if let Some(events) = shared.midi_event_cache.get(&midi_clip_id) {
|
||||
// Events are (time, note, velocity, is_on) — resolve to notes
|
||||
let mut notes: Vec<(f64, u8, u8, f64)> = Vec::new(); // (time, note, vel, dur)
|
||||
// Resolve note-on/off pairs to (time, note, vel, dur) tuples
|
||||
let mut notes: Vec<(f64, u8, u8, f64)> = Vec::new();
|
||||
let mut pending: std::collections::HashMap<u8, (f64, u8)> = std::collections::HashMap::new();
|
||||
for &(time, note, vel, is_on) in events {
|
||||
if is_on {
|
||||
pending.insert(note, (time, vel));
|
||||
} else if let Some((start, v)) = pending.remove(¬e) {
|
||||
notes.push((start, note, v, time - start));
|
||||
for event in events {
|
||||
if event.is_note_on() {
|
||||
pending.insert(event.data1, (event.timestamp, event.data2));
|
||||
} else if event.is_note_off() {
|
||||
if let Some((start, v)) = pending.remove(&event.data1) {
|
||||
notes.push((start, event.data1, v, event.timestamp - start));
|
||||
}
|
||||
}
|
||||
}
|
||||
notes.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap_or(std::cmp::Ordering::Equal));
|
||||
|
|
|
|||
|
|
@ -212,6 +212,8 @@ pub struct SharedPaneState<'a> {
|
|||
pub is_playing: &'a mut bool, // Whether playback is currently active
|
||||
/// Recording state
|
||||
pub is_recording: &'a mut bool, // Whether recording is currently active
|
||||
pub metronome_enabled: &'a mut bool, // Whether metronome clicks during recording
|
||||
pub count_in_enabled: &'a mut bool, // Whether count-in fires before recording
|
||||
pub recording_clips: &'a mut std::collections::HashMap<uuid::Uuid, u32>, // layer_id -> clip_id
|
||||
pub recording_start_time: &'a mut f64, // Playback time when recording started
|
||||
pub recording_layer_ids: &'a mut Vec<uuid::Uuid>, // Layers being recorded to
|
||||
|
|
@ -233,7 +235,7 @@ pub struct SharedPaneState<'a> {
|
|||
/// NOTE: If an action later fails during execution, the cache may be out of sync with the
|
||||
/// backend. This is acceptable because MIDI note edits are simple and unlikely to fail.
|
||||
/// Undo/redo rebuilds affected entries from the backend to restore consistency.
|
||||
pub midi_event_cache: &'a mut std::collections::HashMap<u32, Vec<(f64, u8, u8, bool)>>,
|
||||
pub midi_event_cache: &'a mut std::collections::HashMap<u32, Vec<daw_backend::audio::midi::MidiEvent>>,
|
||||
/// Audio pool indices that got new raw audio data this frame (for thumbnail invalidation)
|
||||
pub audio_pools_with_new_waveforms: &'a std::collections::HashSet<usize>,
|
||||
/// Raw audio samples for GPU waveform rendering (pool_index -> (samples, sample_rate, channels))
|
||||
|
|
@ -268,6 +270,9 @@ pub struct SharedPaneState<'a> {
|
|||
pub waveform_stereo: bool,
|
||||
/// Generation counter - incremented on project load to force reloads
|
||||
pub project_generation: &'a mut u64,
|
||||
/// Incremented whenever node graph topology changes (add/remove node or connection).
|
||||
/// Used by the timeline to know when to refresh automation lane caches.
|
||||
pub graph_topology_generation: &'a mut u64,
|
||||
/// Script ID to open in the script editor (set by node graph "Edit Script" action)
|
||||
pub script_to_edit: &'a mut Option<Uuid>,
|
||||
/// Script ID that was just saved (triggers auto-recompile of nodes using it)
|
||||
|
|
|
|||
|
|
@ -301,6 +301,12 @@ pub struct GraphState {
|
|||
pub available_nam_models: Vec<NamModelInfo>,
|
||||
/// Search text for the NAM model picker popup
|
||||
pub nam_search_text: String,
|
||||
/// Edit buffers for AutomationInput display names, keyed by frontend NodeId
|
||||
pub automation_name_edits: HashMap<NodeId, String>,
|
||||
/// Pending automation name changes (node_id, backend_node_id, new_name)
|
||||
pub pending_automation_name_changes: Vec<(NodeId, u32, String)>,
|
||||
/// AutomationInput nodes whose display name still needs to be queried from backend
|
||||
pub pending_automation_name_queries: Vec<(NodeId, u32)>,
|
||||
}
|
||||
|
||||
impl Default for GraphState {
|
||||
|
|
@ -327,6 +333,9 @@ impl Default for GraphState {
|
|||
pending_amp_sim_load: None,
|
||||
available_nam_models: Vec::new(),
|
||||
nam_search_text: String::new(),
|
||||
automation_name_edits: HashMap::new(),
|
||||
pending_automation_name_changes: Vec::new(),
|
||||
pending_automation_name_queries: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1511,6 +1520,21 @@ impl NodeDataTrait for NodeData {
|
|||
if close_popup {
|
||||
egui::Popup::close_id(ui.ctx(), popup_id);
|
||||
}
|
||||
} else if self.template == NodeTemplate::AutomationInput {
|
||||
let backend_node_id = user_state.node_backend_ids.get(&node_id).copied().unwrap_or(0);
|
||||
let edit_buf = user_state.automation_name_edits
|
||||
.entry(node_id)
|
||||
.or_insert_with(String::new);
|
||||
let resp = ui.add(
|
||||
egui::TextEdit::singleline(edit_buf)
|
||||
.hint_text("Lane name...")
|
||||
.desired_width(f32::INFINITY),
|
||||
);
|
||||
if resp.lost_focus() {
|
||||
user_state.pending_automation_name_changes.push(
|
||||
(node_id, backend_node_id, edit_buf.clone()),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
ui.label("");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -681,6 +681,9 @@ impl NodeGraphPane {
|
|||
if let Err(e) = shared.action_executor.execute_with_backend(action, &mut backend_context) {
|
||||
eprintln!("Failed to execute node graph action: {}", e);
|
||||
} else {
|
||||
// Notify other panes (e.g. timeline automation cache) that graph topology changed
|
||||
*shared.graph_topology_generation += 1;
|
||||
|
||||
// If this was a node addition, query backend to get the new node's ID
|
||||
if let Some((frontend_id, node_type, position)) = self.pending_node_addition.take() {
|
||||
if let Some(track_id) = self.track_id {
|
||||
|
|
@ -1432,6 +1435,7 @@ impl NodeGraphPane {
|
|||
|
||||
// Create nodes in frontend
|
||||
self.pending_script_resolutions.clear();
|
||||
self.user_state.pending_automation_name_queries.clear();
|
||||
for node in &graph_state.nodes {
|
||||
let node_template = match NodeTemplate::from_backend_name(&node.node_type) {
|
||||
Some(t) => t,
|
||||
|
|
@ -1456,6 +1460,13 @@ impl NodeGraphPane {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For AutomationInput nodes: queue a name query to populate the edit buffer
|
||||
if node.node_type == "AutomationInput" {
|
||||
if let Some(fid) = frontend_id {
|
||||
self.user_state.pending_automation_name_queries.push((fid, node.id));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create connections in frontend
|
||||
|
|
@ -2780,6 +2791,36 @@ impl crate::panes::PaneRenderer for NodeGraphPane {
|
|||
}
|
||||
}
|
||||
|
||||
// Populate automation name edit buffers (deferred after load)
|
||||
if !self.user_state.pending_automation_name_queries.is_empty() {
|
||||
let queries: Vec<_> = self.user_state.pending_automation_name_queries.drain(..).collect();
|
||||
if let Some(backend_track_id) = self.track_id.and_then(|tid| shared.layer_to_track_map.get(&tid).copied()) {
|
||||
if let Some(controller_arc) = &shared.audio_controller {
|
||||
let mut controller = controller_arc.lock().unwrap();
|
||||
for (node_id, backend_node_id) in queries {
|
||||
if let Ok(name) = controller.query_automation_name(backend_track_id, backend_node_id) {
|
||||
self.user_state.automation_name_edits.insert(node_id, name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle pending automation name changes
|
||||
if !self.user_state.pending_automation_name_changes.is_empty() {
|
||||
let changes: Vec<_> = self.user_state.pending_automation_name_changes.drain(..).collect();
|
||||
if let Some(backend_track_id) = self.track_id.and_then(|tid| shared.layer_to_track_map.get(&tid).copied()) {
|
||||
if let Some(controller_arc) = &shared.audio_controller {
|
||||
let mut controller = controller_arc.lock().unwrap();
|
||||
for (_node_id, backend_node_id, name) in changes {
|
||||
controller.automation_set_name(backend_track_id, backend_node_id, name);
|
||||
}
|
||||
// Invalidate timeline automation cache so renamed lanes appear immediately
|
||||
*shared.graph_topology_generation += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle param changes from draw block (canvas knob drag etc.)
|
||||
if !self.user_state.pending_draw_param_changes.is_empty() {
|
||||
let changes: Vec<_> = self.user_state.pending_draw_param_changes.drain(..).collect();
|
||||
|
|
|
|||
|
|
@ -27,12 +27,30 @@ const DEFAULT_VELOCITY: u8 = 100;
|
|||
|
||||
// ── Types ────────────────────────────────────────────────────────────────────
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
enum PitchBendZone {
|
||||
Start, // First 30% of note: ramp from bend → 0
|
||||
Middle, // Middle 40%: bell curve 0 → bend → 0
|
||||
End, // Last 30%: ramp from 0 → bend
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
enum DragMode {
|
||||
MoveNotes { start_time_offset: f64, start_note_offset: i32 },
|
||||
ResizeNote { note_index: usize, original_duration: f64 },
|
||||
CreateNote,
|
||||
SelectRect,
|
||||
/// Alt-drag pitch bend editing on a note
|
||||
PitchBend {
|
||||
note_index: usize,
|
||||
zone: PitchBendZone,
|
||||
note_pitch: u8,
|
||||
note_channel: u8,
|
||||
note_start: f64,
|
||||
note_duration: f64,
|
||||
origin_y: f32,
|
||||
current_semitones: f32,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
|
@ -47,6 +65,7 @@ struct TempNote {
|
|||
#[derive(Debug, Clone)]
|
||||
struct ResolvedNote {
|
||||
note: u8,
|
||||
channel: u8,
|
||||
start_time: f64,
|
||||
duration: f64,
|
||||
velocity: u8,
|
||||
|
|
@ -94,6 +113,15 @@ pub struct PianoRollPane {
|
|||
|
||||
// Spectrogram gamma (power curve for colormap)
|
||||
spectrogram_gamma: f32,
|
||||
|
||||
// Header slider values — persist across frames during drag
|
||||
header_vel: f32,
|
||||
header_mod: f32,
|
||||
|
||||
// Instrument pitch bend range in semitones (queried from backend when layer changes)
|
||||
pitch_bend_range: f32,
|
||||
// Layer ID for which pitch_bend_range was last queried
|
||||
pitch_bend_range_layer: Option<uuid::Uuid>,
|
||||
}
|
||||
|
||||
impl PianoRollPane {
|
||||
|
|
@ -123,6 +151,10 @@ impl PianoRollPane {
|
|||
user_scrolled_since_play: false,
|
||||
cached_clip_id: None,
|
||||
spectrogram_gamma: 0.8,
|
||||
header_vel: 100.0,
|
||||
header_mod: 0.0,
|
||||
pitch_bend_range: 2.0,
|
||||
pitch_bend_range_layer: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -166,28 +198,33 @@ impl PianoRollPane {
|
|||
|
||||
// ── Note resolution ──────────────────────────────────────────────────
|
||||
|
||||
fn resolve_notes(events: &[(f64, u8, u8, bool)]) -> Vec<ResolvedNote> {
|
||||
let mut active: HashMap<u8, (f64, u8)> = HashMap::new(); // note -> (start_time, velocity)
|
||||
fn resolve_notes(events: &[daw_backend::audio::midi::MidiEvent]) -> Vec<ResolvedNote> {
|
||||
let mut active: HashMap<u8, (f64, u8, u8)> = HashMap::new(); // note -> (start_time, velocity, channel)
|
||||
let mut notes = Vec::new();
|
||||
|
||||
for &(timestamp, note_number, velocity, is_note_on) in events {
|
||||
if is_note_on {
|
||||
active.insert(note_number, (timestamp, velocity));
|
||||
} else if let Some((start, vel)) = active.remove(¬e_number) {
|
||||
let duration = (timestamp - start).max(MIN_NOTE_DURATION);
|
||||
notes.push(ResolvedNote {
|
||||
note: note_number,
|
||||
start_time: start,
|
||||
duration,
|
||||
velocity: vel,
|
||||
});
|
||||
for event in events {
|
||||
let channel = event.status & 0x0F;
|
||||
if event.is_note_on() {
|
||||
active.insert(event.data1, (event.timestamp, event.data2, channel));
|
||||
} else if event.is_note_off() {
|
||||
if let Some((start, vel, ch)) = active.remove(&event.data1) {
|
||||
let duration = (event.timestamp - start).max(MIN_NOTE_DURATION);
|
||||
notes.push(ResolvedNote {
|
||||
note: event.data1,
|
||||
channel: ch,
|
||||
start_time: start,
|
||||
duration,
|
||||
velocity: vel,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle unterminated notes
|
||||
for (¬e_number, &(start, vel)) in &active {
|
||||
for (¬e_number, &(start, vel, ch)) in &active {
|
||||
notes.push(ResolvedNote {
|
||||
note: note_number,
|
||||
channel: ch,
|
||||
start_time: start,
|
||||
duration: 0.5, // default duration for unterminated
|
||||
velocity: vel,
|
||||
|
|
@ -251,16 +288,51 @@ impl PianoRollPane {
|
|||
None => return,
|
||||
};
|
||||
|
||||
// Query pitch bend range from backend when the layer changes
|
||||
if self.pitch_bend_range_layer != Some(layer_id) {
|
||||
if let Some(track_id) = shared.layer_to_track_map.get(&layer_id) {
|
||||
if let Some(ctrl) = shared.audio_controller.as_ref() {
|
||||
if let Ok(mut c) = ctrl.lock() {
|
||||
self.pitch_bend_range = c.query_pitch_bend_range(*track_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
self.pitch_bend_range_layer = Some(layer_id);
|
||||
}
|
||||
|
||||
let document = shared.action_executor.document();
|
||||
|
||||
// Collect clip data we need before borrowing shared mutably
|
||||
// Collect clip data using the engine snapshot (source of truth), which reflects
|
||||
// recorded clips immediately. Falls back to document if snapshot is empty/absent.
|
||||
let mut clip_data: Vec<(u32, f64, f64, f64, Uuid)> = Vec::new(); // (midi_clip_id, timeline_start, trim_start, duration, instance_id)
|
||||
if let Some(AnyLayer::Audio(audio_layer)) = document.get_layer(&layer_id) {
|
||||
for instance in &audio_layer.clip_instances {
|
||||
if let Some(clip) = document.audio_clips.get(&instance.clip_id) {
|
||||
if let AudioClipType::Midi { midi_clip_id } = clip.clip_type {
|
||||
let duration = instance.effective_duration(clip.duration);
|
||||
clip_data.push((midi_clip_id, instance.timeline_start, instance.trim_start, duration, instance.id));
|
||||
|
||||
let snapshot_clips: Option<Vec<daw_backend::audio::midi::MidiClipInstance>> =
|
||||
shared.clip_snapshot.as_ref().and_then(|arc| {
|
||||
let snap = arc.read().ok()?;
|
||||
let track_id = shared.layer_to_track_map.get(&layer_id)?;
|
||||
snap.midi.get(track_id).cloned()
|
||||
});
|
||||
|
||||
if let Some(midi_instances) = snapshot_clips.filter(|v| !v.is_empty()) {
|
||||
// Use snapshot data (engine is source of truth)
|
||||
for mc in &midi_instances {
|
||||
if let Some((clip_doc_id, _)) = document.audio_clip_by_midi_clip_id(mc.clip_id) {
|
||||
let clip_doc_id = clip_doc_id; // doc-side AudioClip uuid
|
||||
let duration = mc.external_duration;
|
||||
let instance_uuid = Uuid::nil(); // no doc-side instance uuid yet
|
||||
clip_data.push((mc.clip_id, mc.external_start, mc.internal_start, duration, instance_uuid));
|
||||
let _ = clip_doc_id; // used above for the if-let pattern
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Fall back to document (handles recording-in-progress and pre-snapshot clips)
|
||||
if let Some(AnyLayer::Audio(audio_layer)) = document.get_layer(&layer_id) {
|
||||
for instance in &audio_layer.clip_instances {
|
||||
if let Some(clip) = document.audio_clips.get(&instance.clip_id) {
|
||||
if let AudioClipType::Midi { midi_clip_id } = clip.clip_type {
|
||||
let duration = instance.effective_duration(clip.duration);
|
||||
clip_data.push((midi_clip_id, instance.timeline_start, instance.trim_start, duration, instance.id));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -337,7 +409,7 @@ impl PianoRollPane {
|
|||
// Render notes
|
||||
if let Some(events) = shared.midi_event_cache.get(&midi_clip_id) {
|
||||
let resolved = Self::resolve_notes(events);
|
||||
self.render_notes(&grid_painter, grid_rect, &resolved, timeline_start, trim_start, duration, opacity, is_selected);
|
||||
self.render_notes(&grid_painter, grid_rect, &resolved, events, timeline_start, trim_start, duration, opacity, is_selected, midi_clip_id);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -508,16 +580,159 @@ impl PianoRollPane {
|
|||
}
|
||||
}
|
||||
|
||||
/// Find the peak pitch bend value (in semitones) for a note in the event list.
|
||||
/// Returns 0.0 if no pitch bend events are present in the note's time range.
|
||||
fn find_peak_pitch_bend_semitones(
|
||||
events: &[daw_backend::audio::midi::MidiEvent],
|
||||
note_start: f64,
|
||||
note_end: f64,
|
||||
channel: u8,
|
||||
pitch_bend_range: f32,
|
||||
) -> f32 {
|
||||
let mut peak = 0.0f32;
|
||||
for ev in events {
|
||||
if ev.timestamp > note_end + 0.01 { break; }
|
||||
if ev.timestamp >= note_start - 0.01
|
||||
&& (ev.status & 0xF0) == 0xE0
|
||||
&& (ev.status & 0x0F) == channel
|
||||
{
|
||||
let raw = ((ev.data2 as i16) << 7) | (ev.data1 as i16);
|
||||
let normalized = (raw - 8192) as f32 / 8192.0;
|
||||
let semitones = normalized * pitch_bend_range;
|
||||
if semitones.abs() > peak.abs() {
|
||||
peak = semitones;
|
||||
}
|
||||
}
|
||||
}
|
||||
peak
|
||||
}
|
||||
|
||||
/// Determine which zone of a note was clicked based on the X position within the note rect.
|
||||
fn pitch_bend_zone_from_x(click_x: f32, note_left: f32, note_right: f32) -> PitchBendZone {
|
||||
let t = (click_x - note_left) / (note_right - note_left).max(1.0);
|
||||
if t < 0.3 {
|
||||
PitchBendZone::Start
|
||||
} else if t < 0.7 {
|
||||
PitchBendZone::Middle
|
||||
} else {
|
||||
PitchBendZone::End
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate pitch bend MIDI events for a note based on the zone and target semitones.
|
||||
fn generate_pitch_bend_events(
|
||||
note_start: f64,
|
||||
note_duration: f64,
|
||||
zone: PitchBendZone,
|
||||
semitones: f32,
|
||||
channel: u8,
|
||||
pitch_bend_range: f32,
|
||||
) -> Vec<daw_backend::audio::midi::MidiEvent> {
|
||||
use daw_backend::audio::midi::MidiEvent;
|
||||
let num_steps: usize = 128;
|
||||
let mut events = Vec::new();
|
||||
let encode_bend = |normalized: f32| -> (u8, u8) {
|
||||
let value_14 = (normalized * 8191.0 + 8192.0).clamp(0.0, 16383.0) as i16;
|
||||
((value_14 & 0x7F) as u8, ((value_14 >> 7) & 0x7F) as u8)
|
||||
};
|
||||
// Use t directly (0..=1 across the full note) — same formula as the visual ghost.
|
||||
// Start: peak → 0 (ramps down over full note)
|
||||
// Middle: 0 → peak → 0 (sine arch, peaks at center)
|
||||
// End: 0 → peak (ramps up over full note)
|
||||
for i in 0..=num_steps {
|
||||
let t = i as f64 / num_steps as f64;
|
||||
let t_f32 = t as f32;
|
||||
// Cosine ease curves: Start+End at equal value = perfectly flat (partition of unity).
|
||||
// Start: (1+cos(πt))/2 — peaks at t=0, smooth decay to 0 at t=1
|
||||
// End: (1-cos(πt))/2 — 0 at t=0, smooth rise to peak at t=1
|
||||
// Middle: sin(πt) — arch peaking at t=0.5
|
||||
let normalized = match zone {
|
||||
PitchBendZone::Start => semitones / pitch_bend_range * (1.0 + (std::f32::consts::PI * t_f32).cos()) * 0.5,
|
||||
PitchBendZone::Middle => semitones / pitch_bend_range * (std::f32::consts::PI * t_f32).sin(),
|
||||
PitchBendZone::End => semitones / pitch_bend_range * (1.0 - (std::f32::consts::PI * t_f32).cos()) * 0.5,
|
||||
};
|
||||
let timestamp = note_start + t * note_duration;
|
||||
let (lsb, msb) = encode_bend(normalized);
|
||||
events.push(MidiEvent { timestamp, status: 0xE0 | channel, data1: lsb, data2: msb });
|
||||
}
|
||||
events
|
||||
}
|
||||
|
||||
/// Find the lowest available MIDI channel (1–15) not already used by any note
|
||||
/// overlapping [note_start, note_end], excluding the note being assigned itself.
|
||||
/// Returns the note's current channel unchanged if it is already uniquely assigned (non-zero).
|
||||
fn find_or_assign_channel(
|
||||
events: &[daw_backend::audio::midi::MidiEvent],
|
||||
note_start: f64,
|
||||
note_end: f64,
|
||||
note_pitch: u8,
|
||||
current_channel: u8,
|
||||
) -> u8 {
|
||||
use std::collections::HashMap;
|
||||
let mut used = [false; 16];
|
||||
// Walk events to find which channels have notes overlapping the target range.
|
||||
// key = (pitch, channel), value = note_start_time
|
||||
let mut active: HashMap<(u8, u8), f64> = HashMap::new();
|
||||
for ev in events {
|
||||
let ch = ev.status & 0x0F;
|
||||
let msg = ev.status & 0xF0;
|
||||
if msg == 0x90 && ev.data2 > 0 {
|
||||
active.insert((ev.data1, ch), ev.timestamp);
|
||||
} else if msg == 0x80 || (msg == 0x90 && ev.data2 == 0) {
|
||||
if let Some(start) = active.remove(&(ev.data1, ch)) {
|
||||
// Overlaps target range and is NOT the note we're assigning
|
||||
if start < note_end && ev.timestamp > note_start
|
||||
&& !(ev.data1 == note_pitch && ch == current_channel)
|
||||
{
|
||||
used[ch as usize] = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Mark still-active (no note-off seen) notes
|
||||
for ((pitch, ch), start) in &active {
|
||||
if *start < note_end && !(*pitch == note_pitch && *ch == current_channel) {
|
||||
used[*ch as usize] = true;
|
||||
}
|
||||
}
|
||||
// Keep current channel if already uniquely assigned and non-zero
|
||||
if current_channel != 0 && !used[current_channel as usize] {
|
||||
return current_channel;
|
||||
}
|
||||
// Find lowest free channel in 1..15
|
||||
for ch in 1u8..16 {
|
||||
if !used[ch as usize] { return ch; }
|
||||
}
|
||||
current_channel // fallback (>15 simultaneous notes)
|
||||
}
|
||||
|
||||
/// Find the CC1 (modulation) value for a note in the event list.
|
||||
/// Searches for a CC1 event at or just before the note's start time on the same channel.
|
||||
fn find_cc1_for_note(events: &[daw_backend::audio::midi::MidiEvent], note_start: f64, note_end: f64, channel: u8) -> u8 {
|
||||
let mut cc1 = 0u8;
|
||||
for ev in events {
|
||||
if ev.timestamp > note_end { break; }
|
||||
if (ev.status & 0xF0) == 0xB0 && (ev.status & 0x0F) == channel && ev.data1 == 1 {
|
||||
if ev.timestamp <= note_start {
|
||||
cc1 = ev.data2;
|
||||
}
|
||||
}
|
||||
}
|
||||
cc1
|
||||
}
|
||||
|
||||
fn render_notes(
|
||||
&self,
|
||||
painter: &egui::Painter,
|
||||
grid_rect: Rect,
|
||||
notes: &[ResolvedNote],
|
||||
events: &[daw_backend::audio::midi::MidiEvent],
|
||||
clip_timeline_start: f64,
|
||||
trim_start: f64,
|
||||
clip_duration: f64,
|
||||
opacity: f32,
|
||||
is_selected_clip: bool,
|
||||
clip_id: u32,
|
||||
) {
|
||||
for (i, note) in notes.iter().enumerate() {
|
||||
// Skip notes entirely outside the visible trim window
|
||||
|
|
@ -588,6 +803,107 @@ impl PianoRollPane {
|
|||
if clipped.is_positive() {
|
||||
painter.rect_filled(clipped, 1.0, color);
|
||||
painter.rect_stroke(clipped, 1.0, Stroke::new(1.0, Color32::from_rgba_unmultiplied(0, 0, 0, (76.0 * opacity) as u8)), StrokeKind::Middle);
|
||||
|
||||
// Modulation (CC1) bar: 3px column on left edge of note, fills from bottom
|
||||
let cc1 = Self::find_cc1_for_note(events, note.start_time, note.start_time + note.duration, note.channel);
|
||||
if cc1 > 0 {
|
||||
let bar_width = 3.0_f32.min(clipped.width());
|
||||
let bar_height = (cc1 as f32 / 127.0) * clipped.height();
|
||||
let bar_rect = Rect::from_min_size(
|
||||
pos2(clipped.min.x, clipped.max.y - bar_height),
|
||||
vec2(bar_width, bar_height),
|
||||
);
|
||||
let bar_alpha = (128.0 * opacity) as u8;
|
||||
painter.rect_filled(bar_rect, 0.0, Color32::from_rgba_unmultiplied(255, 255, 255, bar_alpha));
|
||||
}
|
||||
|
||||
// Pitch bend ghost overlay — contour-following filled band
|
||||
// Build a curve of semitone values sampled across the note width.
|
||||
// For live drag: existing bend + new zone contribution (additive).
|
||||
// For persisted: sample actual events.
|
||||
const N_SAMPLES: usize = 24;
|
||||
let bend_curve: Option<[f32; N_SAMPLES + 1]> =
|
||||
if let Some(DragMode::PitchBend { note_index: drag_idx, current_semitones, zone, note_channel: drag_ch, .. }) = self.drag_mode {
|
||||
if drag_idx == i && is_selected_clip && Some(clip_id) == self.selected_clip_id {
|
||||
let mut curve = [0.0f32; N_SAMPLES + 1];
|
||||
let pi = std::f32::consts::PI;
|
||||
for s in 0..=N_SAMPLES {
|
||||
let t = s as f32 / N_SAMPLES as f32;
|
||||
// Sample existing bend at this time position
|
||||
let ts = note.start_time + t as f64 * note.duration;
|
||||
let mut existing_norm = 0.0f32;
|
||||
for ev in events {
|
||||
if ev.timestamp > ts { break; }
|
||||
if (ev.status & 0xF0) == 0xE0 && (ev.status & 0x0F) == drag_ch {
|
||||
let raw = ((ev.data2 as i16) << 7) | (ev.data1 as i16);
|
||||
existing_norm = (raw - 8192) as f32 / 8192.0;
|
||||
}
|
||||
}
|
||||
let existing_semi = existing_norm * self.pitch_bend_range;
|
||||
// New zone contribution
|
||||
let zone_semi = match zone {
|
||||
PitchBendZone::Start => current_semitones * (1.0 + (pi * t).cos()) * 0.5,
|
||||
PitchBendZone::Middle => current_semitones * (pi * t).sin(),
|
||||
PitchBendZone::End => current_semitones * (1.0 - (pi * t).cos()) * 0.5,
|
||||
};
|
||||
curve[s] = existing_semi + zone_semi;
|
||||
}
|
||||
// Only show ghost if there's any meaningful bend at all
|
||||
if curve.iter().any(|v| v.abs() >= 0.05) {
|
||||
Some(curve)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// For persisted notes (no live drag), sample actual pitch bend events
|
||||
let bend_curve = bend_curve.or_else(|| {
|
||||
let peak = Self::find_peak_pitch_bend_semitones(
|
||||
events, note.start_time, note.start_time + note.duration,
|
||||
note.channel, self.pitch_bend_range);
|
||||
if peak.abs() < 0.05 { return None; }
|
||||
let mut curve = [0.0f32; N_SAMPLES + 1];
|
||||
for s in 0..=N_SAMPLES {
|
||||
let t = s as f64 / N_SAMPLES as f64;
|
||||
let ts = note.start_time + t * note.duration;
|
||||
// Find last pitch bend event at or before ts
|
||||
let mut bend_norm = 0.0f32;
|
||||
for ev in events {
|
||||
if ev.timestamp > ts { break; }
|
||||
if (ev.status & 0xF0) == 0xE0 && (ev.status & 0x0F) == note.channel {
|
||||
let raw = ((ev.data2 as i16) << 7) | (ev.data1 as i16);
|
||||
bend_norm = (raw - 8192) as f32 / 8192.0;
|
||||
}
|
||||
}
|
||||
curve[s] = bend_norm * self.pitch_bend_range;
|
||||
}
|
||||
Some(curve)
|
||||
});
|
||||
|
||||
if let Some(curve) = bend_curve {
|
||||
// Draw a stroked curve relative to the note's centerline.
|
||||
let note_center_y = y + h * 0.5;
|
||||
// Brighten toward white for visibility
|
||||
let brighten = |c: u8| -> u8 { (c as u16 + (255 - c as u16) * 3 / 4) as u8 };
|
||||
let stroke_color = Color32::from_rgba_unmultiplied(
|
||||
brighten(r), brighten(g), brighten(b), (220.0 * opacity) as u8,
|
||||
);
|
||||
|
||||
let points: Vec<egui::Pos2> = (0..=N_SAMPLES).map(|s| {
|
||||
let t = s as f32 / N_SAMPLES as f32;
|
||||
let px = (x + t * w).clamp(grid_rect.min.x, grid_rect.max.x);
|
||||
let bend_px = (curve[s] * self.note_height)
|
||||
.clamp(-(grid_rect.height()), grid_rect.height());
|
||||
let py = (note_center_y - bend_px).clamp(grid_rect.min.y, grid_rect.max.y);
|
||||
pos2(px, py)
|
||||
}).collect();
|
||||
painter.add(egui::Shape::line(points, egui::Stroke::new(3.0, stroke_color)));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -654,6 +970,7 @@ impl PianoRollPane {
|
|||
let response = ui.allocate_rect(full_rect, egui::Sense::click_and_drag());
|
||||
let shift_held = ui.input(|i| i.modifiers.shift);
|
||||
let ctrl_held = ui.input(|i| i.modifiers.ctrl);
|
||||
let alt_held = ui.input(|i| i.modifiers.alt);
|
||||
let now = ui.input(|i| i.time);
|
||||
|
||||
// Auto-release preview note after its duration expires.
|
||||
|
|
@ -784,7 +1101,7 @@ impl PianoRollPane {
|
|||
if full_rect.contains(pos) {
|
||||
let in_grid = pos.x >= grid_rect.min.x;
|
||||
if in_grid {
|
||||
self.on_grid_press(pos, grid_rect, shift_held, ctrl_held, now, shared, clip_data);
|
||||
self.on_grid_press(pos, grid_rect, shift_held, ctrl_held, alt_held, now, shared, clip_data);
|
||||
} else {
|
||||
// Keyboard click - preview note (hold until mouse-up)
|
||||
let note = self.y_to_note(pos.y, keyboard_rect);
|
||||
|
|
@ -807,10 +1124,14 @@ impl PianoRollPane {
|
|||
}
|
||||
|
||||
// Update cursor
|
||||
if let Some(hover_pos) = response.hover_pos() {
|
||||
if matches!(self.drag_mode, Some(DragMode::PitchBend { .. })) {
|
||||
ui.ctx().set_cursor_icon(egui::CursorIcon::ResizeVertical);
|
||||
} else if let Some(hover_pos) = response.hover_pos() {
|
||||
if hover_pos.x >= grid_rect.min.x {
|
||||
if shift_held {
|
||||
ui.ctx().set_cursor_icon(egui::CursorIcon::Crosshair);
|
||||
} else if alt_held && self.hit_test_note(hover_pos, grid_rect, shared, clip_data).is_some() {
|
||||
ui.ctx().set_cursor_icon(egui::CursorIcon::ResizeVertical);
|
||||
} else if self.hit_test_note_edge(hover_pos, grid_rect, shared, clip_data).is_some() {
|
||||
ui.ctx().set_cursor_icon(egui::CursorIcon::ResizeHorizontal);
|
||||
} else if self.hit_test_note(hover_pos, grid_rect, shared, clip_data).is_some() {
|
||||
|
|
@ -831,6 +1152,7 @@ impl PianoRollPane {
|
|||
grid_rect: Rect,
|
||||
shift_held: bool,
|
||||
ctrl_held: bool,
|
||||
alt_held: bool,
|
||||
now: f64,
|
||||
shared: &mut SharedPaneState,
|
||||
clip_data: &[(u32, f64, f64, f64, Uuid)],
|
||||
|
|
@ -841,6 +1163,35 @@ impl PianoRollPane {
|
|||
self.drag_start_time = time;
|
||||
self.drag_start_note = note;
|
||||
|
||||
// Alt+click on a note: start pitch bend drag
|
||||
if alt_held {
|
||||
if let Some(note_idx) = self.hit_test_note(pos, grid_rect, shared, clip_data) {
|
||||
if let Some(clip_id) = self.selected_clip_id {
|
||||
if let Some(events) = shared.midi_event_cache.get(&clip_id) {
|
||||
let resolved = Self::resolve_notes(events);
|
||||
if note_idx < resolved.len() {
|
||||
let n = &resolved[note_idx];
|
||||
// Determine zone from X position within note rect
|
||||
let note_x = self.time_to_x(n.start_time, grid_rect);
|
||||
let note_w = (n.duration as f32 * self.pixels_per_second).max(2.0);
|
||||
let zone = Self::pitch_bend_zone_from_x(pos.x, note_x, note_x + note_w);
|
||||
self.drag_mode = Some(DragMode::PitchBend {
|
||||
note_index: note_idx,
|
||||
zone,
|
||||
note_pitch: n.note,
|
||||
note_channel: n.channel,
|
||||
note_start: n.start_time,
|
||||
note_duration: n.duration,
|
||||
origin_y: pos.y,
|
||||
current_semitones: 0.0, // additive delta; existing bend shown separately
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if clicking on a note edge (resize)
|
||||
if let Some(note_idx) = self.hit_test_note_edge(pos, grid_rect, shared, clip_data) {
|
||||
if let Some(clip_id) = self.selected_clip_id {
|
||||
|
|
@ -971,8 +1322,19 @@ impl PianoRollPane {
|
|||
self.update_selection_from_rect(grid_rect, shared, clip_data);
|
||||
}
|
||||
}
|
||||
Some(DragMode::PitchBend { .. }) => {
|
||||
// Handled below (needs mutable access to self.drag_mode and self.pitch_bend_range)
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
||||
// Pitch bend drag: update current_semitones based on Y movement
|
||||
if let Some(DragMode::PitchBend { ref mut current_semitones, ref mut origin_y, .. }) = self.drag_mode {
|
||||
let range = self.pitch_bend_range;
|
||||
let delta_semitones = (*origin_y - pos.y) / self.note_height;
|
||||
*current_semitones = (*current_semitones + delta_semitones).clamp(-range, range);
|
||||
*origin_y = pos.y;
|
||||
}
|
||||
}
|
||||
|
||||
fn on_grid_release(
|
||||
|
|
@ -1012,6 +1374,84 @@ impl PianoRollPane {
|
|||
self.selection_rect = None;
|
||||
self.update_focus(shared);
|
||||
}
|
||||
Some(DragMode::PitchBend { note_pitch, note_channel, note_start, note_duration, zone, current_semitones, .. }) => {
|
||||
// Only commit if the drag added a meaningful new contribution
|
||||
if current_semitones.abs() >= 0.05 {
|
||||
if let Some(clip_id) = self.selected_clip_id {
|
||||
let range = self.pitch_bend_range;
|
||||
let old_events = shared.midi_event_cache.get(&clip_id).cloned().unwrap_or_default();
|
||||
let mut new_events = old_events.clone();
|
||||
|
||||
// Assign a unique channel to this note so bend only affects it
|
||||
let target_channel = Self::find_or_assign_channel(
|
||||
&new_events, note_start, note_start + note_duration,
|
||||
note_pitch, note_channel,
|
||||
);
|
||||
|
||||
// Re-stamp note-on/off for this specific note if channel changed
|
||||
if target_channel != note_channel {
|
||||
for ev in &mut new_events {
|
||||
let msg = ev.status & 0xF0;
|
||||
let ch = ev.status & 0x0F;
|
||||
if (msg == 0x90 || msg == 0x80) && ev.data1 == note_pitch && ch == note_channel {
|
||||
ev.status = (ev.status & 0xF0) | target_channel;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sample existing bend (normalised -1..1) at each step, then add the
|
||||
// new zone contribution additively and write back as combined events.
|
||||
let num_steps: usize = 128;
|
||||
let pi = std::f32::consts::PI;
|
||||
let existing_norm: Vec<f32> = (0..=num_steps).map(|i| {
|
||||
let t = i as f64 / num_steps as f64;
|
||||
let ts = note_start + t * note_duration;
|
||||
let mut bend = 0.0f32;
|
||||
for ev in &new_events {
|
||||
if ev.timestamp > ts { break; }
|
||||
if (ev.status & 0xF0) == 0xE0 && (ev.status & 0x0F) == target_channel {
|
||||
let raw = ((ev.data2 as i16) << 7) | (ev.data1 as i16);
|
||||
bend = (raw - 8192) as f32 / 8192.0;
|
||||
}
|
||||
}
|
||||
bend
|
||||
}).collect();
|
||||
|
||||
// Remove old bend events in range before writing combined
|
||||
new_events.retain(|ev| {
|
||||
let is_bend = (ev.status & 0xF0) == 0xE0 && (ev.status & 0x0F) == target_channel;
|
||||
let in_range = ev.timestamp >= note_start - 0.001 && ev.timestamp <= note_start + note_duration + 0.01;
|
||||
!(is_bend && in_range)
|
||||
});
|
||||
|
||||
let encode_bend = |normalized: f32| -> (u8, u8) {
|
||||
let v = (normalized * 8191.0 + 8192.0).clamp(0.0, 16383.0) as i16;
|
||||
((v & 0x7F) as u8, ((v >> 7) & 0x7F) as u8)
|
||||
};
|
||||
for i in 0..=num_steps {
|
||||
let t = i as f32 / num_steps as f32;
|
||||
let zone_norm = match zone {
|
||||
PitchBendZone::Start => current_semitones / range * (1.0 + (pi * t).cos()) * 0.5,
|
||||
PitchBendZone::Middle => current_semitones / range * (pi * t).sin(),
|
||||
PitchBendZone::End => current_semitones / range * (1.0 - (pi * t).cos()) * 0.5,
|
||||
};
|
||||
let combined = (existing_norm[i] + zone_norm).clamp(-1.0, 1.0);
|
||||
let (lsb, msb) = encode_bend(combined);
|
||||
let ts = note_start + i as f64 / num_steps as f64 * note_duration;
|
||||
new_events.push(daw_backend::audio::midi::MidiEvent { timestamp: ts, status: 0xE0 | target_channel, data1: lsb, data2: msb });
|
||||
}
|
||||
// For End zone: reset just after note ends so it doesn't bleed into next note
|
||||
if zone == PitchBendZone::End {
|
||||
let (lsb, msb) = encode_bend(0.0);
|
||||
new_events.push(daw_backend::audio::midi::MidiEvent { timestamp: note_start + note_duration + 0.005, status: 0xE0 | target_channel, data1: lsb, data2: msb });
|
||||
}
|
||||
|
||||
new_events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap_or(std::cmp::Ordering::Equal));
|
||||
self.push_events_action("Set pitch bend", clip_id, old_events, new_events.clone(), shared);
|
||||
shared.midi_event_cache.insert(clip_id, new_events);
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
||||
|
|
@ -1160,12 +1600,12 @@ impl PianoRollPane {
|
|||
/// simple operations unlikely to fail, and undo/redo rebuilds cache from the action's
|
||||
/// stored note data to restore consistency.
|
||||
fn update_cache_from_resolved(clip_id: u32, resolved: &[ResolvedNote], shared: &mut SharedPaneState) {
|
||||
let mut events: Vec<(f64, u8, u8, bool)> = Vec::with_capacity(resolved.len() * 2);
|
||||
let mut events: Vec<daw_backend::audio::midi::MidiEvent> = Vec::with_capacity(resolved.len() * 2);
|
||||
for n in resolved {
|
||||
events.push((n.start_time, n.note, n.velocity, true));
|
||||
events.push((n.start_time + n.duration, n.note, n.velocity, false));
|
||||
events.push(daw_backend::audio::midi::MidiEvent::note_on(n.start_time, 0, n.note, n.velocity));
|
||||
events.push(daw_backend::audio::midi::MidiEvent::note_off(n.start_time + n.duration, 0, n.note, 0));
|
||||
}
|
||||
events.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap());
|
||||
events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
|
||||
shared.midi_event_cache.insert(clip_id, events);
|
||||
}
|
||||
|
||||
|
|
@ -1185,6 +1625,7 @@ impl PianoRollPane {
|
|||
|
||||
resolved.push(ResolvedNote {
|
||||
note: temp.note,
|
||||
channel: 0,
|
||||
start_time: temp.start_time,
|
||||
duration: temp.duration,
|
||||
velocity: temp.velocity,
|
||||
|
|
@ -1346,6 +1787,7 @@ impl PianoRollPane {
|
|||
for &(rel_time, note, velocity, duration) in ¬es_to_paste {
|
||||
resolved.push(ResolvedNote {
|
||||
note,
|
||||
channel: 0,
|
||||
start_time: paste_time + rel_time,
|
||||
duration,
|
||||
velocity,
|
||||
|
|
@ -1389,6 +1831,28 @@ impl PianoRollPane {
|
|||
shared.pending_actions.push(Box::new(action));
|
||||
}
|
||||
|
||||
fn push_events_action(
|
||||
&self,
|
||||
description: &str,
|
||||
clip_id: u32,
|
||||
old_events: Vec<daw_backend::audio::midi::MidiEvent>,
|
||||
new_events: Vec<daw_backend::audio::midi::MidiEvent>,
|
||||
shared: &mut SharedPaneState,
|
||||
) {
|
||||
let layer_id = match *shared.active_layer_id {
|
||||
Some(id) => id,
|
||||
None => return,
|
||||
};
|
||||
let action = lightningbeam_core::actions::UpdateMidiEventsAction {
|
||||
layer_id,
|
||||
midi_clip_id: clip_id,
|
||||
old_events,
|
||||
new_events,
|
||||
description_text: description.to_string(),
|
||||
};
|
||||
shared.pending_actions.push(Box::new(action));
|
||||
}
|
||||
|
||||
// ── Note preview ─────────────────────────────────────────────────────
|
||||
|
||||
fn preview_note_on(&mut self, note: u8, velocity: u8, duration: Option<f64>, time: f64, shared: &mut SharedPaneState) {
|
||||
|
|
@ -1664,20 +2128,105 @@ impl PaneRenderer for PianoRollPane {
|
|||
);
|
||||
}
|
||||
|
||||
// Velocity display for selected notes
|
||||
if self.selected_note_indices.len() == 1 {
|
||||
// Velocity + modulation sliders for selected note(s)
|
||||
if !self.selected_note_indices.is_empty() {
|
||||
if let Some(clip_id) = self.selected_clip_id {
|
||||
if let Some(events) = shared.midi_event_cache.get(&clip_id) {
|
||||
let resolved = Self::resolve_notes(events);
|
||||
if let Some(&idx) = self.selected_note_indices.iter().next() {
|
||||
if let Some(events) = shared.midi_event_cache.get(&clip_id).cloned() {
|
||||
let resolved = Self::resolve_notes(&events);
|
||||
// Pick the first selected note as the representative value
|
||||
let first_idx = self.selected_note_indices.iter().copied().next();
|
||||
if let Some(idx) = first_idx {
|
||||
if idx < resolved.len() {
|
||||
ui.separator();
|
||||
let n = &resolved[idx];
|
||||
ui.label(
|
||||
egui::RichText::new(format!("{} vel:{}", Self::note_name(n.note), n.velocity))
|
||||
.color(header_secondary)
|
||||
.size(10.0),
|
||||
|
||||
// ── Velocity ──────────────────────────────
|
||||
ui.separator();
|
||||
ui.label(egui::RichText::new("Vel").color(header_secondary).size(10.0));
|
||||
let vel_resp = ui.add(
|
||||
egui::DragValue::new(&mut self.header_vel)
|
||||
.range(1.0..=127.0)
|
||||
.max_decimals(0)
|
||||
.speed(1.0),
|
||||
);
|
||||
// Commit before syncing so header_vel isn't overwritten first
|
||||
if vel_resp.drag_stopped() || vel_resp.lost_focus() {
|
||||
let new_vel = self.header_vel.round().clamp(1.0, 127.0) as u8;
|
||||
if new_vel != n.velocity {
|
||||
let old_notes = Self::notes_to_backend_format(&resolved);
|
||||
let mut new_resolved = resolved.clone();
|
||||
for &i in &self.selected_note_indices {
|
||||
if i < new_resolved.len() {
|
||||
new_resolved[i].velocity = new_vel;
|
||||
}
|
||||
}
|
||||
let new_notes = Self::notes_to_backend_format(&new_resolved);
|
||||
self.push_update_action("Set velocity", clip_id, old_notes, new_notes, shared, &[]);
|
||||
// Patch the event cache immediately so next frame sees the new velocity
|
||||
if let Some(cached) = shared.midi_event_cache.get_mut(&clip_id) {
|
||||
for &i in &self.selected_note_indices {
|
||||
if i >= resolved.len() { continue; }
|
||||
let sn = &resolved[i];
|
||||
for ev in cached.iter_mut() {
|
||||
if ev.is_note_on() && ev.data1 == sn.note
|
||||
&& (ev.status & 0x0F) == sn.channel
|
||||
&& (ev.timestamp - sn.start_time).abs() < 1e-6
|
||||
{
|
||||
ev.data2 = new_vel;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Sync from note only when idle (not on commit frames)
|
||||
if !vel_resp.dragged() && !vel_resp.has_focus() && !vel_resp.drag_stopped() && !vel_resp.lost_focus() {
|
||||
self.header_vel = n.velocity as f32;
|
||||
}
|
||||
|
||||
// ── Modulation (CC1) ──────────────────────
|
||||
ui.separator();
|
||||
ui.label(egui::RichText::new("Mod").color(header_secondary).size(10.0));
|
||||
let current_cc1 = Self::find_cc1_for_note(&events, n.start_time, n.start_time + n.duration, n.channel);
|
||||
let mod_resp = ui.add(
|
||||
egui::DragValue::new(&mut self.header_mod)
|
||||
.range(0.0..=127.0)
|
||||
.max_decimals(0)
|
||||
.speed(1.0),
|
||||
);
|
||||
// Commit before syncing
|
||||
if mod_resp.drag_stopped() || mod_resp.lost_focus() {
|
||||
let new_cc1 = self.header_mod.round().clamp(0.0, 127.0) as u8;
|
||||
if new_cc1 != current_cc1 {
|
||||
let old_events = events.clone();
|
||||
let mut new_events = events.clone();
|
||||
for &i in &self.selected_note_indices {
|
||||
if i >= resolved.len() { continue; }
|
||||
let sn = &resolved[i];
|
||||
new_events.retain(|ev| {
|
||||
let is_cc1 = (ev.status & 0xF0) == 0xB0
|
||||
&& (ev.status & 0x0F) == sn.channel
|
||||
&& ev.data1 == 1;
|
||||
let at_start = (ev.timestamp - sn.start_time).abs() < 0.001;
|
||||
!(is_cc1 && at_start)
|
||||
});
|
||||
if new_cc1 > 0 {
|
||||
new_events.push(daw_backend::audio::midi::MidiEvent {
|
||||
timestamp: sn.start_time,
|
||||
status: 0xB0 | sn.channel,
|
||||
data1: 1,
|
||||
data2: new_cc1,
|
||||
});
|
||||
}
|
||||
}
|
||||
new_events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap_or(std::cmp::Ordering::Equal));
|
||||
self.push_events_action("Set modulation", clip_id, old_events, new_events.clone(), shared);
|
||||
shared.midi_event_cache.insert(clip_id, new_events);
|
||||
}
|
||||
}
|
||||
// Sync from note only when idle
|
||||
if !mod_resp.dragged() && !mod_resp.has_focus() && !mod_resp.drag_stopped() && !mod_resp.lost_focus() {
|
||||
self.header_mod = current_cc1 as f32;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,10 +7,18 @@ use eframe::egui;
|
|||
use std::path::PathBuf;
|
||||
use super::{NodePath, PaneRenderer, SharedPaneState};
|
||||
|
||||
/// Format of a preset file
|
||||
#[derive(Clone, Copy, PartialEq)]
|
||||
enum PresetFormat {
|
||||
Json,
|
||||
Lbins,
|
||||
}
|
||||
|
||||
/// Metadata extracted from a preset file
|
||||
struct PresetInfo {
|
||||
name: String,
|
||||
path: PathBuf,
|
||||
format: PresetFormat,
|
||||
category: String,
|
||||
description: String,
|
||||
author: String,
|
||||
|
|
@ -120,19 +128,29 @@ impl PresetBrowserPane {
|
|||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
self.scan_directory(&path, base_dir, is_factory);
|
||||
} else if path.extension().is_some_and(|e| e == "json") {
|
||||
if let Some(info) = self.load_preset_info(&path, base_dir, is_factory) {
|
||||
self.presets.push(info);
|
||||
} else {
|
||||
let ext = path.extension().and_then(|e| e.to_str()).unwrap_or("");
|
||||
if ext == "json" || ext == "lbins" {
|
||||
if let Some(info) = self.load_preset_info(&path, base_dir, is_factory) {
|
||||
self.presets.push(info);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Load metadata from a preset JSON file
|
||||
/// Load metadata from a preset file (.json or .lbins)
|
||||
fn load_preset_info(&self, path: &std::path::Path, base_dir: &std::path::Path, is_factory: bool) -> Option<PresetInfo> {
|
||||
let contents = std::fs::read_to_string(path).ok()?;
|
||||
let preset: daw_backend::audio::node_graph::GraphPreset =
|
||||
serde_json::from_str(&contents).ok()?;
|
||||
let ext = path.extension().and_then(|e| e.to_str()).unwrap_or("");
|
||||
let (preset, format) = if ext == "lbins" {
|
||||
let (p, _assets) = daw_backend::audio::node_graph::lbins::load_lbins(path).ok()?;
|
||||
(p, PresetFormat::Lbins)
|
||||
} else {
|
||||
let contents = std::fs::read_to_string(path).ok()?;
|
||||
let p: daw_backend::audio::node_graph::GraphPreset =
|
||||
serde_json::from_str(&contents).ok()?;
|
||||
(p, PresetFormat::Json)
|
||||
};
|
||||
|
||||
// Category = first directory component relative to base_dir
|
||||
let relative = path.strip_prefix(base_dir).ok()?;
|
||||
|
|
@ -144,6 +162,7 @@ impl PresetBrowserPane {
|
|||
Some(PresetInfo {
|
||||
name: preset.metadata.name,
|
||||
path: path.to_path_buf(),
|
||||
format,
|
||||
category,
|
||||
description: preset.metadata.description,
|
||||
author: preset.metadata.author,
|
||||
|
|
@ -189,7 +208,14 @@ impl PresetBrowserPane {
|
|||
|
||||
if let Some(audio_controller) = &shared.audio_controller {
|
||||
let mut controller = audio_controller.lock().unwrap();
|
||||
controller.graph_load_preset(track_id, preset.path.to_string_lossy().to_string());
|
||||
match preset.format {
|
||||
PresetFormat::Json => {
|
||||
controller.graph_load_preset(track_id, preset.path.to_string_lossy().to_string());
|
||||
}
|
||||
PresetFormat::Lbins => {
|
||||
controller.graph_load_lbins(track_id, preset.path.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
// Note: project_generation is incremented by the GraphPresetLoaded event handler
|
||||
// in main.rs, which fires after the audio thread has actually processed the load.
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue