Add pitch bend support

This commit is contained in:
Skyler Lehmkuhl 2026-03-18 23:11:24 -04:00
parent 4f3da810d0
commit 6b6ae230a1
14 changed files with 891 additions and 199 deletions

View File

@ -992,6 +992,13 @@ impl Engine {
clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
}
}
Command::UpdateMidiClipEvents(_track_id, clip_id, events) => {
// Replace all events in a MIDI clip (used for CC/pitch bend editing)
if let Some(clip) = self.project.midi_clip_pool.get_clip_mut(clip_id) {
clip.events = events;
clip.events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
}
}
Command::RemoveMidiClip(track_id, instance_id) => {
// Remove a MIDI clip instance from a track (for undo/redo support)
let _ = self.project.remove_midi_clip(track_id, instance_id);
@ -2818,6 +2825,40 @@ impl Engine {
};
QueryResponse::GraphIsDefault(is_default)
}
Query::GetPitchBendRange(track_id) => {
use crate::audio::node_graph::nodes::{MidiToCVNode, MultiSamplerNode, VoiceAllocatorNode};
use crate::audio::node_graph::AudioNode;
let range = if let Some(TrackNode::Midi(track)) = self.project.get_track(track_id) {
let graph = &track.instrument_graph;
let mut found = None;
for idx in graph.node_indices() {
if let Some(gn) = graph.get_graph_node(idx) {
if let Some(ms) = gn.node.as_any().downcast_ref::<MultiSamplerNode>() {
found = Some(ms.get_parameter(4)); // PARAM_PITCH_BEND_RANGE
break;
}
// Search inside VoiceAllocator template for MidiToCV
if let Some(va) = gn.node.as_any().downcast_ref::<VoiceAllocatorNode>() {
let tg = va.template_graph();
for tidx in tg.node_indices() {
if let Some(tgn) = tg.get_graph_node(tidx) {
if let Some(mc) = tgn.node.as_any().downcast_ref::<MidiToCVNode>() {
found = Some(mc.get_parameter(0)); // PARAM_PITCH_BEND_RANGE
break;
}
}
}
if found.is_some() { break; }
}
}
}
found.unwrap_or(2.0)
} else {
2.0
};
QueryResponse::PitchBendRange(range)
}
};
// Send response back
@ -3412,6 +3453,11 @@ impl EngineController {
let _ = self.command_tx.push(Command::UpdateMidiClipNotes(track_id, clip_id, notes));
}
/// Replace all events in a MIDI clip (used for CC/pitch bend editing from the piano roll)
pub fn update_midi_clip_events(&mut self, track_id: TrackId, clip_id: MidiClipId, events: Vec<crate::audio::midi::MidiEvent>) {
let _ = self.command_tx.push(Command::UpdateMidiClipEvents(track_id, clip_id, events));
}
/// Remove a MIDI clip instance from a track (for undo/redo support)
pub fn remove_midi_clip(&mut self, track_id: TrackId, instance_id: MidiClipInstanceId) {
let _ = self.command_tx.push(Command::RemoveMidiClip(track_id, instance_id));
@ -3952,6 +3998,26 @@ impl EngineController {
Err("Query timeout".to_string())
}
/// Query the pitch bend range (semitones) for the instrument on a MIDI track.
/// Returns 2.0 (default) if the track or instrument cannot be found.
pub fn query_pitch_bend_range(&mut self, track_id: TrackId) -> f32 {
if let Err(_) = self.query_tx.push(Query::GetPitchBendRange(track_id)) {
return 2.0;
}
let start = std::time::Instant::now();
let timeout = std::time::Duration::from_millis(100);
while start.elapsed() < timeout {
if let Ok(QueryResponse::PitchBendRange(range)) = self.query_response_rx.pop() {
return range;
}
std::thread::sleep(std::time::Duration::from_micros(50));
}
2.0 // default on timeout
}
/// Serialize the audio pool for project saving
pub fn serialize_audio_pool(&mut self, project_path: &std::path::Path) -> Result<Vec<crate::audio::pool::AudioPoolEntry>, String> {
// Send query

View File

@ -1,14 +1,19 @@
use crate::audio::midi::MidiEvent;
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, SignalType};
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
const PARAM_PITCH_BEND_RANGE: u32 = 0;
/// MIDI to CV converter
/// Converts MIDI note events to control voltage signals
pub struct MidiToCVNode {
name: String,
note: u8, // Current MIDI note number
gate: f32, // Gate CV (1.0 when note on, 0.0 when off)
velocity: f32, // Velocity CV (0.0-1.0)
pitch_cv: f32, // Pitch CV (V/Oct: 0V = A4, ±1V per octave)
note: u8, // Current MIDI note number
gate: f32, // Gate CV (1.0 when note on, 0.0 when off)
velocity: f32, // Velocity CV (0.0-1.0)
pitch_cv: f32, // Pitch CV (V/Oct: 0V = A4, ±1V per octave), without bend
pitch_bend_range: f32, // Pitch bend range in semitones (default 2.0)
current_bend: f32, // Current pitch bend, normalised -1.0..=1.0 (0 = centre)
current_mod: f32, // Current modulation (CC1), 0.0..=1.0
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
@ -18,26 +23,41 @@ impl MidiToCVNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
// MIDI input port for receiving MIDI through graph connections
let inputs = vec![
NodePort::new("MIDI In", SignalType::Midi, 0),
NodePort::new("Bend CV", SignalType::CV, 0), // External pitch bend in semitones
NodePort::new("Mod CV", SignalType::CV, 1), // External modulation 0.0..=1.0
];
let outputs = vec![
NodePort::new("V/Oct", SignalType::CV, 0), // V/Oct: 0V = A4, ±1V per octave
NodePort::new("V/Oct", SignalType::CV, 0), // V/Oct: 0V = A4, ±1V per octave (with bend applied)
NodePort::new("Gate", SignalType::CV, 1), // 1.0 = on, 0.0 = off
NodePort::new("Velocity", SignalType::CV, 2), // 0.0-1.0
NodePort::new("Bend", SignalType::CV, 3), // Total pitch bend in semitones (MIDI + CV)
NodePort::new("Mod", SignalType::CV, 4), // Total modulation 0.0..=1.0 (MIDI CC1 + CV)
];
let parameters = vec![
Parameter::new(
PARAM_PITCH_BEND_RANGE,
"Pitch Bend Range",
0.0, 48.0, 2.0,
ParameterUnit::Generic,
),
];
Self {
name,
note: 60, // Middle C
note: 60,
gate: 0.0,
velocity: 0.0,
pitch_cv: Self::midi_note_to_voct(60),
pitch_bend_range: 2.0,
current_bend: 0.0,
current_mod: 0.0,
inputs,
outputs,
parameters: vec![], // No user parameters
parameters,
}
}
@ -48,6 +68,37 @@ impl MidiToCVNode {
// Standard V/Oct: 0V at A4, 1V per octave (12 semitones)
(note as f32 - 69.0) / 12.0
}
fn apply_midi_event(&mut self, event: &MidiEvent) {
let status = event.status & 0xF0;
match status {
0x90 if event.data2 > 0 => {
// Note on — reset per-note expression so previous note's bend doesn't bleed in
self.note = event.data1;
self.pitch_cv = Self::midi_note_to_voct(self.note);
self.velocity = event.data2 as f32 / 127.0;
self.gate = 1.0;
self.current_bend = 0.0;
self.current_mod = 0.0;
}
0x80 | 0x90 => {
// Note off (or note on with velocity 0)
if event.data1 == self.note {
self.gate = 0.0;
}
}
0xE0 => {
// Pitch bend: 14-bit value, center = 8192
let bend_raw = ((event.data2 as i16) << 7) | (event.data1 as i16);
self.current_bend = (bend_raw - 8192) as f32 / 8192.0;
}
0xB0 if event.data1 == 1 => {
// CC1 (modulation wheel)
self.current_mod = event.data2 as f32 / 127.0;
}
_ => {}
}
}
}
impl AudioNode for MidiToCVNode {
@ -67,46 +118,27 @@ impl AudioNode for MidiToCVNode {
&self.parameters
}
fn set_parameter(&mut self, _id: u32, _value: f32) {
// No parameters
fn set_parameter(&mut self, id: u32, value: f32) {
if id == PARAM_PITCH_BEND_RANGE {
self.pitch_bend_range = value.clamp(0.0, 48.0);
}
}
fn get_parameter(&self, _id: u32) -> f32 {
0.0
fn get_parameter(&self, id: u32) -> f32 {
if id == PARAM_PITCH_BEND_RANGE {
self.pitch_bend_range
} else {
0.0
}
}
fn handle_midi(&mut self, event: &MidiEvent) {
let status = event.status & 0xF0;
match status {
0x90 => {
// Note on
if event.data2 > 0 {
// Velocity > 0 means note on
self.note = event.data1;
self.pitch_cv = Self::midi_note_to_voct(self.note);
self.velocity = event.data2 as f32 / 127.0;
self.gate = 1.0;
} else {
// Velocity = 0 means note off
if event.data1 == self.note {
self.gate = 0.0;
}
}
}
0x80 => {
// Note off
if event.data1 == self.note {
self.gate = 0.0;
}
}
_ => {}
}
self.apply_midi_event(event);
}
fn process(
&mut self,
_inputs: &[&[f32]],
inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
@ -115,52 +147,56 @@ impl AudioNode for MidiToCVNode {
// Process MIDI events from input buffer
if !midi_inputs.is_empty() {
for event in midi_inputs[0] {
let status = event.status & 0xF0;
match status {
0x90 if event.data2 > 0 => {
// Note on
self.note = event.data1;
self.pitch_cv = Self::midi_note_to_voct(self.note);
self.velocity = event.data2 as f32 / 127.0;
self.gate = 1.0;
}
0x80 | 0x90 => {
// Note off (or note on with velocity 0)
if event.data1 == self.note {
self.gate = 0.0;
}
}
_ => {}
}
self.apply_midi_event(event);
}
}
if outputs.len() < 3 {
if outputs.len() < 5 {
return;
}
// CV signals are mono
// Use split_at_mut to get multiple mutable references
let (pitch_and_rest, rest) = outputs.split_at_mut(1);
let (gate_and_rest, velocity_slice) = rest.split_at_mut(1);
// Read CV inputs (use first sample of buffer). NaN = unconnected port → treat as 0.
let bend_cv = inputs.get(0).and_then(|b| b.first().copied())
.filter(|v| v.is_finite()).unwrap_or(0.0);
let mod_cv = inputs.get(1).and_then(|b| b.first().copied())
.filter(|v| v.is_finite()).unwrap_or(0.0);
let pitch_out = &mut pitch_and_rest[0];
let gate_out = &mut gate_and_rest[0];
let velocity_out = &mut velocity_slice[0];
// Total bend in semitones: MIDI bend + CV bend
let bend_semitones = self.current_bend * self.pitch_bend_range + bend_cv;
// Total mod: MIDI CC1 + CV mod, clamped to 0..1
let total_mod = (self.current_mod + mod_cv).clamp(0.0, 1.0);
// Pitch output includes bend
let pitch_out_val = self.pitch_cv + bend_semitones / 12.0;
// Use split_at_mut to get multiple mutable references
let (v0, rest) = outputs.split_at_mut(1);
let (v1, rest) = rest.split_at_mut(1);
let (v2, rest) = rest.split_at_mut(1);
let (v3, v4_slice) = rest.split_at_mut(1);
let pitch_out = &mut v0[0];
let gate_out = &mut v1[0];
let velocity_out = &mut v2[0];
let bend_out = &mut v3[0];
let mod_out = &mut v4_slice[0];
let frames = pitch_out.len();
// Output constant CV values for the entire buffer
for frame in 0..frames {
pitch_out[frame] = self.pitch_cv;
gate_out[frame] = self.gate;
pitch_out[frame] = pitch_out_val;
gate_out[frame] = self.gate;
velocity_out[frame] = self.velocity;
bend_out[frame] = bend_semitones;
mod_out[frame] = total_mod;
}
}
fn reset(&mut self) {
self.gate = 0.0;
self.velocity = 0.0;
self.current_bend = 0.0;
self.current_mod = 0.0;
}
fn node_type(&self) -> &str {
@ -174,10 +210,13 @@ impl AudioNode for MidiToCVNode {
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
note: 60, // Reset to middle C
gate: 0.0, // Reset gate
velocity: 0.0, // Reset velocity
pitch_cv: Self::midi_note_to_voct(60), // Reset pitch
note: 60,
gate: 0.0,
velocity: 0.0,
pitch_cv: Self::midi_note_to_voct(60),
pitch_bend_range: self.pitch_bend_range,
current_bend: 0.0,
current_mod: 0.0,
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),

View File

@ -6,6 +6,7 @@ const PARAM_GAIN: u32 = 0;
const PARAM_ATTACK: u32 = 1;
const PARAM_RELEASE: u32 = 2;
const PARAM_TRANSPOSE: u32 = 3;
const PARAM_PITCH_BEND_RANGE: u32 = 4;
/// Loop playback mode
#[derive(Clone, Copy, Debug, PartialEq, serde::Serialize, serde::Deserialize)]
@ -201,6 +202,7 @@ struct Voice {
layer_index: usize,
playhead: f32,
note: u8,
channel: u8, // MIDI channel this voice was activated on
velocity: u8,
is_active: bool,
@ -221,11 +223,12 @@ enum EnvelopePhase {
}
impl Voice {
fn new(layer_index: usize, note: u8, velocity: u8) -> Self {
fn new(layer_index: usize, note: u8, channel: u8, velocity: u8) -> Self {
Self {
layer_index,
playhead: 0.0,
note,
channel,
velocity,
is_active: true,
envelope_phase: EnvelopePhase::Attack,
@ -250,9 +253,14 @@ pub struct MultiSamplerNode {
// Parameters
gain: f32,
attack_time: f32, // seconds
release_time: f32, // seconds
transpose: i8, // semitones
attack_time: f32, // seconds
release_time: f32, // seconds
transpose: i8, // semitones
pitch_bend_range: f32, // semitones (default 2.0)
// Live MIDI state
bend_per_channel: [f32; 16], // Pitch bend per MIDI channel; ch0 = global broadcast
current_mod: f32, // MIDI CC1 modulation 0.0..=1.0
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
@ -265,6 +273,8 @@ impl MultiSamplerNode {
let inputs = vec![
NodePort::new("MIDI In", SignalType::Midi, 0),
NodePort::new("Bend CV", SignalType::CV, 0), // External pitch bend in semitones
NodePort::new("Mod CV", SignalType::CV, 1), // External modulation 0.0..=1.0
];
let outputs = vec![
@ -276,6 +286,7 @@ impl MultiSamplerNode {
Parameter::new(PARAM_ATTACK, "Attack", 0.001, 1.0, 0.01, ParameterUnit::Time),
Parameter::new(PARAM_RELEASE, "Release", 0.01, 5.0, 0.1, ParameterUnit::Time),
Parameter::new(PARAM_TRANSPOSE, "Transpose", -24.0, 24.0, 0.0, ParameterUnit::Generic),
Parameter::new(PARAM_PITCH_BEND_RANGE, "Pitch Bend Range", 0.0, 48.0, 2.0, ParameterUnit::Generic),
];
Self {
@ -288,6 +299,9 @@ impl MultiSamplerNode {
attack_time: 0.01,
release_time: 0.1,
transpose: 0,
pitch_bend_range: 2.0,
bend_per_channel: [0.0; 16],
current_mod: 0.0,
inputs,
outputs,
parameters,
@ -478,7 +492,9 @@ impl MultiSamplerNode {
}
/// Trigger a note
fn note_on(&mut self, note: u8, velocity: u8) {
fn note_on(&mut self, note: u8, channel: u8, velocity: u8) {
// Reset per-channel bend on note-on so a previous note's bend doesn't bleed in
self.bend_per_channel[channel as usize] = 0.0;
let transposed_note = (note as i16 + self.transpose as i16).clamp(0, 127) as u8;
if let Some(layer_index) = self.find_layer(transposed_note, velocity) {
@ -496,7 +512,7 @@ impl MultiSamplerNode {
}
});
let voice = Voice::new(layer_index, note, velocity);
let voice = Voice::new(layer_index, note, channel, velocity);
if voice_index < self.voices.len() {
self.voices[voice_index] = voice;
@ -547,6 +563,9 @@ impl AudioNode for MultiSamplerNode {
PARAM_TRANSPOSE => {
self.transpose = value.clamp(-24.0, 24.0) as i8;
}
PARAM_PITCH_BEND_RANGE => {
self.pitch_bend_range = value.clamp(0.0, 48.0);
}
_ => {}
}
}
@ -557,13 +576,14 @@ impl AudioNode for MultiSamplerNode {
PARAM_ATTACK => self.attack_time,
PARAM_RELEASE => self.release_time,
PARAM_TRANSPOSE => self.transpose as f32,
PARAM_PITCH_BEND_RANGE => self.pitch_bend_range,
_ => 0.0,
}
}
fn process(
&mut self,
_inputs: &[&[f32]],
inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
@ -582,14 +602,32 @@ impl AudioNode for MultiSamplerNode {
// Process MIDI events
if !midi_inputs.is_empty() {
for event in midi_inputs[0].iter() {
if event.is_note_on() {
self.note_on(event.data1, event.data2);
} else if event.is_note_off() {
self.note_off(event.data1);
let status = event.status & 0xF0;
match status {
_ if event.is_note_on() => self.note_on(event.data1, event.status & 0x0F, event.data2),
_ if event.is_note_off() => self.note_off(event.data1),
0xE0 => {
// Pitch bend: 14-bit value, center = 8192; stored per-channel
let bend_raw = ((event.data2 as i16) << 7) | (event.data1 as i16);
let ch = (event.status & 0x0F) as usize;
self.bend_per_channel[ch] = (bend_raw - 8192) as f32 / 8192.0;
}
0xB0 if event.data1 == 1 => {
// CC1 (modulation wheel)
self.current_mod = event.data2 as f32 / 127.0;
}
_ => {}
}
}
}
// Read CV inputs. NaN = unconnected port → treat as 0.
let bend_cv = inputs.get(0).and_then(|b| b.first().copied())
.filter(|v| v.is_finite()).unwrap_or(0.0);
// Global bend (channel 0) applies to all voices; per-channel bend is added per-voice below.
let global_bend_norm = self.bend_per_channel[0];
let bend_per_channel = self.bend_per_channel;
// Extract parameters needed for processing
let gain = self.gain;
let attack_time = self.attack_time;
@ -607,9 +645,12 @@ impl AudioNode for MultiSamplerNode {
let layer = &self.layers[voice.layer_index];
// Calculate playback speed
// Calculate playback speed (includes pitch bend)
// Channel-0 = global; voice's own channel bend is added on top.
let voice_bend_norm = global_bend_norm + bend_per_channel[voice.channel as usize];
let total_bend_semitones = voice_bend_norm * self.pitch_bend_range + bend_cv;
let semitone_diff = voice.note as i16 - layer.root_key as i16;
let speed = 2.0_f32.powf(semitone_diff as f32 / 12.0);
let speed = 2.0_f32.powf((semitone_diff as f32 + total_bend_semitones) / 12.0);
let speed_adjusted = speed * (layer.sample_rate / sample_rate as f32);
for frame in 0..frames {
@ -765,6 +806,8 @@ impl AudioNode for MultiSamplerNode {
fn reset(&mut self) {
self.voices.clear();
self.bend_per_channel = [0.0; 16];
self.current_mod = 0.0;
}
fn node_type(&self) -> &str {

View File

@ -11,6 +11,7 @@ struct VoiceState {
active: bool,
releasing: bool, // Note-off received, still processing (e.g. ADSR release)
note: u8,
note_channel: u8, // MIDI channel this voice was allocated on (0 = global/unset)
age: u32, // For voice stealing
pending_events: Vec<MidiEvent>, // MIDI events to send to this voice
}
@ -21,6 +22,7 @@ impl VoiceState {
active: false,
releasing: false,
note: 0,
note_channel: 0,
age: 0,
pending_events: Vec::new(),
}
@ -273,6 +275,7 @@ impl AudioNode for VoiceAllocatorNode {
self.voices[voice_idx].active = true;
self.voices[voice_idx].releasing = false;
self.voices[voice_idx].note = event.data1;
self.voices[voice_idx].note_channel = event.status & 0x0F;
self.voices[voice_idx].age = 0;
// Store MIDI event for this voice to process
@ -295,10 +298,12 @@ impl AudioNode for VoiceAllocatorNode {
}
}
_ => {
// Other MIDI events (CC, pitch bend, etc.) - send to all active voices
// Route to matching-channel voices; channel 0 = global broadcast
let event_channel = event.status & 0x0F;
for voice_idx in 0..self.voice_count {
if self.voices[voice_idx].active {
self.voices[voice_idx].pending_events.push(*event);
let voice = &mut self.voices[voice_idx];
if voice.active && (event_channel == 0 || voice.note_channel == event_channel) {
voice.pending_events.push(*event);
}
}
}

View File

@ -2,6 +2,7 @@ use crate::audio::{
AudioClipInstanceId, AutomationLaneId, ClipId, CurveType, MidiClip, MidiClipId,
MidiClipInstanceId, ParameterId, TrackId,
};
use crate::audio::midi::MidiEvent;
use crate::audio::buffer_pool::BufferPoolStats;
use crate::audio::node_graph::nodes::LoopMode;
use crate::io::WaveformPeak;
@ -85,6 +86,8 @@ pub enum Command {
/// Update MIDI clip notes (track_id, clip_id, notes: Vec<(start_time, note, velocity, duration)>)
/// NOTE: May need to switch to individual note operations if this becomes slow on clips with many notes
UpdateMidiClipNotes(TrackId, MidiClipId, Vec<(f64, u8, u8, f64)>),
/// Replace all events in a MIDI clip (track_id, clip_id, events). Used for CC/pitch bend editing.
UpdateMidiClipEvents(TrackId, MidiClipId, Vec<MidiEvent>),
/// Remove a MIDI clip instance from a track (track_id, instance_id) - for undo/redo support
RemoveMidiClip(TrackId, MidiClipInstanceId),
/// Remove an audio clip instance from a track (track_id, instance_id) - for undo/redo support
@ -445,6 +448,9 @@ pub enum Query {
DuplicateMidiClipSync(MidiClipId),
/// Get whether a track's graph is still the auto-generated default
GetGraphIsDefault(TrackId),
/// Get the pitch bend range (in semitones) for the instrument on a MIDI track.
/// Searches for MidiToCVNode (in VA templates) or MultiSamplerNode (direct).
GetPitchBendRange(TrackId),
}
/// Oscilloscope data from a node
@ -522,4 +528,6 @@ pub enum QueryResponse {
MidiClipDuplicated(Result<MidiClipId, String>),
/// Whether a track's graph is the auto-generated default
GraphIsDefault(bool),
/// Pitch bend range in semitones for the track's instrument
PitchBendRange(f32),
}

View File

@ -109,6 +109,17 @@ pub trait Action: Send {
fn midi_notes_after_rollback(&self) -> Option<(u32, &[(f64, u8, u8, f64)])> {
None
}
/// Return full MIDI event data (CC, pitch bend, etc.) reflecting the state after execute/redo.
/// Used to keep the frontend MIDI event cache in sync after undo/redo.
fn midi_events_after_execute(&self) -> Option<(u32, &[daw_backend::audio::midi::MidiEvent])> {
None
}
/// Return full MIDI event data reflecting the state after rollback/undo.
fn midi_events_after_rollback(&self) -> Option<(u32, &[daw_backend::audio::midi::MidiEvent])> {
None
}
}
/// Action executor that wraps the document and manages undo/redo
@ -280,6 +291,16 @@ impl ActionExecutor {
self.redo_stack.last().and_then(|a| a.midi_notes_after_rollback())
}
/// Get full MIDI event data from the last action on the undo stack (after redo).
pub fn last_undo_midi_events(&self) -> Option<(u32, &[daw_backend::audio::midi::MidiEvent])> {
self.undo_stack.last().and_then(|a| a.midi_events_after_execute())
}
/// Get full MIDI event data from the last action on the redo stack (after undo).
pub fn last_redo_midi_events(&self) -> Option<(u32, &[daw_backend::audio::midi::MidiEvent])> {
self.redo_stack.last().and_then(|a| a.midi_events_after_rollback())
}
/// Get the description of the next action to redo
pub fn redo_description(&self) -> Option<String> {
self.redo_stack.last().map(|a| a.description())

View File

@ -23,6 +23,7 @@ pub mod rename_folder;
pub mod delete_folder;
pub mod move_asset_to_folder;
pub mod update_midi_notes;
pub mod update_midi_events;
pub mod loop_clip_instances;
pub mod remove_clip_instances;
pub mod set_keyframe;
@ -56,6 +57,7 @@ pub use rename_folder::RenameFolderAction;
pub use delete_folder::{DeleteFolderAction, DeleteStrategy};
pub use move_asset_to_folder::MoveAssetToFolderAction;
pub use update_midi_notes::UpdateMidiNotesAction;
pub use update_midi_events::UpdateMidiEventsAction;
pub use loop_clip_instances::LoopClipInstancesAction;
pub use remove_clip_instances::RemoveClipInstancesAction;
pub use set_keyframe::SetKeyframeAction;

View File

@ -0,0 +1,76 @@
use crate::action::Action;
use crate::document::Document;
use uuid::Uuid;
/// Action to replace all MIDI events in a clip (CC, pitch bend, notes, etc.) with undo/redo.
///
/// Used when editing per-note CC or pitch bend from the piano roll. Stores full
/// `MidiEvent` lists rather than the simplified note-tuple format of `UpdateMidiNotesAction`.
pub struct UpdateMidiEventsAction {
/// Layer containing the MIDI clip
pub layer_id: Uuid,
/// Backend MIDI clip ID
pub midi_clip_id: u32,
/// Full event list before the edit
pub old_events: Vec<daw_backend::audio::midi::MidiEvent>,
/// Full event list after the edit
pub new_events: Vec<daw_backend::audio::midi::MidiEvent>,
/// Human-readable description
pub description_text: String,
}
impl Action for UpdateMidiEventsAction {
fn execute(&mut self, _document: &mut Document) -> Result<(), String> {
Ok(())
}
fn rollback(&mut self, _document: &mut Document) -> Result<(), String> {
Ok(())
}
fn description(&self) -> String {
self.description_text.clone()
}
fn execute_backend(
&mut self,
backend: &mut crate::action::BackendContext,
_document: &Document,
) -> Result<(), String> {
let controller = match backend.audio_controller.as_mut() {
Some(c) => c,
None => return Ok(()),
};
let track_id = backend
.layer_to_track_map
.get(&self.layer_id)
.ok_or_else(|| format!("Layer {} not mapped to backend track", self.layer_id))?;
controller.update_midi_clip_events(*track_id, self.midi_clip_id, self.new_events.clone());
Ok(())
}
fn rollback_backend(
&mut self,
backend: &mut crate::action::BackendContext,
_document: &Document,
) -> Result<(), String> {
let controller = match backend.audio_controller.as_mut() {
Some(c) => c,
None => return Ok(()),
};
let track_id = backend
.layer_to_track_map
.get(&self.layer_id)
.ok_or_else(|| format!("Layer {} not mapped to backend track", self.layer_id))?;
controller.update_midi_clip_events(*track_id, self.midi_clip_id, self.old_events.clone());
Ok(())
}
fn midi_events_after_execute(&self) -> Option<(u32, &[daw_backend::audio::midi::MidiEvent])> {
Some((self.midi_clip_id, &self.new_events))
}
fn midi_events_after_rollback(&self) -> Option<(u32, &[daw_backend::audio::midi::MidiEvent])> {
Some((self.midi_clip_id, &self.old_events))
}
}

View File

@ -886,10 +886,9 @@ struct EditorApp {
output_level: (f32, f32),
track_levels: HashMap<daw_backend::TrackId, f32>,
/// Cache for MIDI event data (keyed by backend midi_clip_id)
/// Prevents repeated backend queries for the same MIDI clip
/// Format: (timestamp, note_number, velocity, is_note_on)
midi_event_cache: HashMap<u32, Vec<(f64, u8, u8, bool)>>,
/// Cache for MIDI event data (keyed by backend midi_clip_id).
/// Stores full raw MidiEvents (note on/off, CC, pitch bend, etc.)
midi_event_cache: HashMap<u32, Vec<daw_backend::audio::midi::MidiEvent>>,
/// Cache for audio file durations to avoid repeated queries
/// Format: pool_index -> duration in seconds
audio_duration_cache: HashMap<usize, f64>,
@ -3158,10 +3157,16 @@ impl EditorApp {
};
// Rebuild MIDI cache after undo (backend_context dropped, borrows released)
if undo_succeeded {
let midi_update = self.action_executor.last_redo_midi_notes()
.map(|(id, notes)| (id, notes.to_vec()));
if let Some((clip_id, notes)) = midi_update {
self.rebuild_midi_cache_entry(clip_id, &notes);
if let Some((clip_id, events)) = self.action_executor.last_redo_midi_events()
.map(|(id, ev)| (id, ev.to_vec()))
{
self.midi_event_cache.insert(clip_id, events);
} else {
let midi_update = self.action_executor.last_redo_midi_notes()
.map(|(id, notes)| (id, notes.to_vec()));
if let Some((clip_id, notes)) = midi_update {
self.rebuild_midi_cache_entry(clip_id, &notes);
}
}
// Stale vertex/edge/face IDs from before the undo would
// crash selection rendering on the restored (smaller) DCEL.
@ -3196,10 +3201,16 @@ impl EditorApp {
};
// Rebuild MIDI cache after redo (backend_context dropped, borrows released)
if redo_succeeded {
let midi_update = self.action_executor.last_undo_midi_notes()
.map(|(id, notes)| (id, notes.to_vec()));
if let Some((clip_id, notes)) = midi_update {
self.rebuild_midi_cache_entry(clip_id, &notes);
if let Some((clip_id, events)) = self.action_executor.last_undo_midi_events()
.map(|(id, ev)| (id, ev.to_vec()))
{
self.midi_event_cache.insert(clip_id, events);
} else {
let midi_update = self.action_executor.last_undo_midi_notes()
.map(|(id, notes)| (id, notes.to_vec()));
if let Some((clip_id, notes)) = midi_update {
self.rebuild_midi_cache_entry(clip_id, &notes);
}
}
self.selection.clear_dcel_selection();
}
@ -3863,18 +3874,7 @@ impl EditorApp {
// track_id is unused by the query, pass 0
match controller.query_midi_clip(0, clip_id) {
Ok(clip_data) => {
let processed_events: Vec<(f64, u8, u8, bool)> = clip_data.events.iter()
.filter_map(|event| {
let status_type = event.status & 0xF0;
if status_type == 0x90 || status_type == 0x80 {
let is_note_on = status_type == 0x90 && event.data2 > 0;
Some((event.timestamp, event.data1, event.data2, is_note_on))
} else {
None
}
})
.collect();
self.midi_event_cache.insert(clip_id, processed_events);
self.midi_event_cache.insert(clip_id, clip_data.events);
midi_fetched += 1;
}
Err(e) => eprintln!("Failed to fetch MIDI clip {}: {}", clip_id, e),
@ -4013,12 +4013,12 @@ impl EditorApp {
/// Rebuild a MIDI event cache entry from backend note format.
/// Called after undo/redo to keep the cache consistent with the backend.
fn rebuild_midi_cache_entry(&mut self, clip_id: u32, notes: &[(f64, u8, u8, f64)]) {
let mut events: Vec<(f64, u8, u8, bool)> = Vec::with_capacity(notes.len() * 2);
let mut events: Vec<daw_backend::audio::midi::MidiEvent> = Vec::with_capacity(notes.len() * 2);
for &(start_time, note, velocity, duration) in notes {
events.push((start_time, note, velocity, true));
events.push((start_time + duration, note, velocity, false));
events.push(daw_backend::audio::midi::MidiEvent::note_on(start_time, 0, note, velocity));
events.push(daw_backend::audio::midi::MidiEvent::note_off(start_time + duration, 0, note, 0));
}
events.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap());
events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
self.midi_event_cache.insert(clip_id, events);
}
@ -4037,22 +4037,7 @@ impl EditorApp {
let duration = midi_clip.duration;
let event_count = midi_clip.events.len();
// Process MIDI events to cache format: (timestamp, note_number, velocity, is_note_on)
// Filter to note events only (status 0x90 = note-on, 0x80 = note-off)
let processed_events: Vec<(f64, u8, u8, bool)> = midi_clip.events.iter()
.filter_map(|event| {
let status_type = event.status & 0xF0;
if status_type == 0x90 || status_type == 0x80 {
// Note-on is 0x90 with velocity > 0, Note-off is 0x80 or velocity = 0
let is_note_on = status_type == 0x90 && event.data2 > 0;
Some((event.timestamp, event.data1, event.data2, is_note_on))
} else {
None // Ignore non-note events (CC, pitch bend, etc.)
}
})
.collect();
let note_event_count = processed_events.len();
let processed_events = midi_clip.events.clone();
// Add to backend MIDI clip pool FIRST and get the backend clip ID
if let Some(ref controller_arc) = self.audio_controller {
@ -4067,9 +4052,8 @@ impl EditorApp {
let clip = AudioClip::new_midi(&name, backend_clip_id, duration);
let frontend_clip_id = self.action_executor.document_mut().add_audio_clip(clip);
println!("Imported MIDI '{}' ({:.1}s, {} total events, {} note events) - Frontend ID: {}, Backend ID: {}",
name, duration, event_count, note_event_count, frontend_clip_id, backend_clip_id);
println!("✅ Added MIDI clip to backend pool and cached {} note events", note_event_count);
println!("Imported MIDI '{}' ({:.1}s, {} total events) - Frontend ID: {}, Backend ID: {}",
name, duration, event_count, frontend_clip_id, backend_clip_id);
Some(ImportedAssetInfo {
clip_id: frontend_clip_id,
@ -5212,15 +5196,14 @@ impl eframe::App for EditorApp {
}
// Update midi_event_cache with notes captured so far
// (inlined instead of calling rebuild_midi_cache_entry to avoid
// conflicting &mut self borrow with event_rx loop)
// (inlined to avoid conflicting &mut self borrow)
{
let mut events: Vec<(f64, u8, u8, bool)> = Vec::with_capacity(notes.len() * 2);
let mut events: Vec<daw_backend::audio::midi::MidiEvent> = Vec::with_capacity(notes.len() * 2);
for &(start_time, note, velocity, dur) in &notes {
events.push((start_time, note, velocity, true));
events.push((start_time + dur, note, velocity, false));
events.push(daw_backend::audio::midi::MidiEvent::note_on(start_time, 0, note, velocity));
events.push(daw_backend::audio::midi::MidiEvent::note_off(start_time + dur, 0, note, 0));
}
events.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap());
events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
self.midi_event_cache.insert(clip_id, events);
}
ctx.request_repaint();
@ -5234,20 +5217,8 @@ impl eframe::App for EditorApp {
let mut controller = controller_arc.lock().unwrap();
match controller.query_midi_clip(track_id, clip_id) {
Ok(midi_clip_data) => {
// Convert backend MidiEvent format to cache format
let cache_events: Vec<(f64, u8, u8, bool)> = midi_clip_data.events.iter()
.filter_map(|event| {
let status_type = event.status & 0xF0;
if status_type == 0x90 || status_type == 0x80 {
let is_note_on = status_type == 0x90 && event.data2 > 0;
Some((event.timestamp, event.data1, event.data2, is_note_on))
} else {
None
}
})
.collect();
drop(controller);
self.midi_event_cache.insert(clip_id, cache_events);
self.midi_event_cache.insert(clip_id, midi_clip_data.events.clone());
// Update document clip with final duration and name
let midi_layer_id = self.track_to_layer_map.get(&track_id)

View File

@ -372,7 +372,7 @@ fn generate_video_thumbnail(
/// Generate a piano roll thumbnail for MIDI clips
/// Shows notes as horizontal bars with Y position = note % 12 (one octave)
fn generate_midi_thumbnail(
events: &[(f64, u8, u8, bool)], // (timestamp, note_number, velocity, is_note_on)
events: &[daw_backend::audio::midi::MidiEvent],
duration: f64,
bg_color: egui::Color32,
note_color: egui::Color32,
@ -390,10 +390,11 @@ fn generate_midi_thumbnail(
}
// Draw note events
for &(timestamp, note_number, _velocity, is_note_on) in events {
if !is_note_on || timestamp > preview_duration {
for event in events {
if !event.is_note_on() || event.timestamp > preview_duration {
continue;
}
let (timestamp, note_number) = (event.timestamp, event.data1);
let x = ((timestamp / preview_duration) * size as f64) as usize;

View File

@ -1176,14 +1176,16 @@ impl InfopanelPane {
if indices.len() == 1 {
// Single note — show details if we can resolve from the event cache
if let Some(events) = shared.midi_event_cache.get(&midi_clip_id) {
// Events are (time, note, velocity, is_on) — resolve to notes
let mut notes: Vec<(f64, u8, u8, f64)> = Vec::new(); // (time, note, vel, dur)
// Resolve note-on/off pairs to (time, note, vel, dur) tuples
let mut notes: Vec<(f64, u8, u8, f64)> = Vec::new();
let mut pending: std::collections::HashMap<u8, (f64, u8)> = std::collections::HashMap::new();
for &(time, note, vel, is_on) in events {
if is_on {
pending.insert(note, (time, vel));
} else if let Some((start, v)) = pending.remove(&note) {
notes.push((start, note, v, time - start));
for event in events {
if event.is_note_on() {
pending.insert(event.data1, (event.timestamp, event.data2));
} else if event.is_note_off() {
if let Some((start, v)) = pending.remove(&event.data1) {
notes.push((start, event.data1, v, event.timestamp - start));
}
}
}
notes.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap_or(std::cmp::Ordering::Equal));

View File

@ -233,7 +233,7 @@ pub struct SharedPaneState<'a> {
/// NOTE: If an action later fails during execution, the cache may be out of sync with the
/// backend. This is acceptable because MIDI note edits are simple and unlikely to fail.
/// Undo/redo rebuilds affected entries from the backend to restore consistency.
pub midi_event_cache: &'a mut std::collections::HashMap<u32, Vec<(f64, u8, u8, bool)>>,
pub midi_event_cache: &'a mut std::collections::HashMap<u32, Vec<daw_backend::audio::midi::MidiEvent>>,
/// Audio pool indices that got new raw audio data this frame (for thumbnail invalidation)
pub audio_pools_with_new_waveforms: &'a std::collections::HashSet<usize>,
/// Raw audio samples for GPU waveform rendering (pool_index -> (samples, sample_rate, channels))

View File

@ -27,12 +27,30 @@ const DEFAULT_VELOCITY: u8 = 100;
// ── Types ────────────────────────────────────────────────────────────────────
#[derive(Debug, Clone, Copy, PartialEq)]
enum PitchBendZone {
Start, // First 30% of note: ramp from bend → 0
Middle, // Middle 40%: bell curve 0 → bend → 0
End, // Last 30%: ramp from 0 → bend
}
#[derive(Debug, Clone, Copy, PartialEq)]
enum DragMode {
MoveNotes { start_time_offset: f64, start_note_offset: i32 },
ResizeNote { note_index: usize, original_duration: f64 },
CreateNote,
SelectRect,
/// Alt-drag pitch bend editing on a note
PitchBend {
note_index: usize,
zone: PitchBendZone,
note_pitch: u8,
note_channel: u8,
note_start: f64,
note_duration: f64,
origin_y: f32,
current_semitones: f32,
},
}
#[derive(Debug, Clone)]
@ -47,6 +65,7 @@ struct TempNote {
#[derive(Debug, Clone)]
struct ResolvedNote {
note: u8,
channel: u8,
start_time: f64,
duration: f64,
velocity: u8,
@ -94,6 +113,11 @@ pub struct PianoRollPane {
// Spectrogram gamma (power curve for colormap)
spectrogram_gamma: f32,
// Instrument pitch bend range in semitones (queried from backend when layer changes)
pitch_bend_range: f32,
// Layer ID for which pitch_bend_range was last queried
pitch_bend_range_layer: Option<uuid::Uuid>,
}
impl PianoRollPane {
@ -123,6 +147,8 @@ impl PianoRollPane {
user_scrolled_since_play: false,
cached_clip_id: None,
spectrogram_gamma: 0.8,
pitch_bend_range: 2.0,
pitch_bend_range_layer: None,
}
}
@ -166,28 +192,33 @@ impl PianoRollPane {
// ── Note resolution ──────────────────────────────────────────────────
fn resolve_notes(events: &[(f64, u8, u8, bool)]) -> Vec<ResolvedNote> {
let mut active: HashMap<u8, (f64, u8)> = HashMap::new(); // note -> (start_time, velocity)
fn resolve_notes(events: &[daw_backend::audio::midi::MidiEvent]) -> Vec<ResolvedNote> {
let mut active: HashMap<u8, (f64, u8, u8)> = HashMap::new(); // note -> (start_time, velocity, channel)
let mut notes = Vec::new();
for &(timestamp, note_number, velocity, is_note_on) in events {
if is_note_on {
active.insert(note_number, (timestamp, velocity));
} else if let Some((start, vel)) = active.remove(&note_number) {
let duration = (timestamp - start).max(MIN_NOTE_DURATION);
notes.push(ResolvedNote {
note: note_number,
start_time: start,
duration,
velocity: vel,
});
for event in events {
let channel = event.status & 0x0F;
if event.is_note_on() {
active.insert(event.data1, (event.timestamp, event.data2, channel));
} else if event.is_note_off() {
if let Some((start, vel, ch)) = active.remove(&event.data1) {
let duration = (event.timestamp - start).max(MIN_NOTE_DURATION);
notes.push(ResolvedNote {
note: event.data1,
channel: ch,
start_time: start,
duration,
velocity: vel,
});
}
}
}
// Handle unterminated notes
for (&note_number, &(start, vel)) in &active {
for (&note_number, &(start, vel, ch)) in &active {
notes.push(ResolvedNote {
note: note_number,
channel: ch,
start_time: start,
duration: 0.5, // default duration for unterminated
velocity: vel,
@ -251,16 +282,51 @@ impl PianoRollPane {
None => return,
};
// Query pitch bend range from backend when the layer changes
if self.pitch_bend_range_layer != Some(layer_id) {
if let Some(track_id) = shared.layer_to_track_map.get(&layer_id) {
if let Some(ctrl) = shared.audio_controller.as_ref() {
if let Ok(mut c) = ctrl.lock() {
self.pitch_bend_range = c.query_pitch_bend_range(*track_id);
}
}
}
self.pitch_bend_range_layer = Some(layer_id);
}
let document = shared.action_executor.document();
// Collect clip data we need before borrowing shared mutably
// Collect clip data using the engine snapshot (source of truth), which reflects
// recorded clips immediately. Falls back to document if snapshot is empty/absent.
let mut clip_data: Vec<(u32, f64, f64, f64, Uuid)> = Vec::new(); // (midi_clip_id, timeline_start, trim_start, duration, instance_id)
if let Some(AnyLayer::Audio(audio_layer)) = document.get_layer(&layer_id) {
for instance in &audio_layer.clip_instances {
if let Some(clip) = document.audio_clips.get(&instance.clip_id) {
if let AudioClipType::Midi { midi_clip_id } = clip.clip_type {
let duration = instance.effective_duration(clip.duration);
clip_data.push((midi_clip_id, instance.timeline_start, instance.trim_start, duration, instance.id));
let snapshot_clips: Option<Vec<daw_backend::audio::midi::MidiClipInstance>> =
shared.clip_snapshot.as_ref().and_then(|arc| {
let snap = arc.read().ok()?;
let track_id = shared.layer_to_track_map.get(&layer_id)?;
snap.midi.get(track_id).cloned()
});
if let Some(midi_instances) = snapshot_clips.filter(|v| !v.is_empty()) {
// Use snapshot data (engine is source of truth)
for mc in &midi_instances {
if let Some((clip_doc_id, _)) = document.audio_clip_by_midi_clip_id(mc.clip_id) {
let clip_doc_id = clip_doc_id; // doc-side AudioClip uuid
let duration = mc.external_duration;
let instance_uuid = Uuid::nil(); // no doc-side instance uuid yet
clip_data.push((mc.clip_id, mc.external_start, mc.internal_start, duration, instance_uuid));
let _ = clip_doc_id; // used above for the if-let pattern
}
}
} else {
// Fall back to document (handles recording-in-progress and pre-snapshot clips)
if let Some(AnyLayer::Audio(audio_layer)) = document.get_layer(&layer_id) {
for instance in &audio_layer.clip_instances {
if let Some(clip) = document.audio_clips.get(&instance.clip_id) {
if let AudioClipType::Midi { midi_clip_id } = clip.clip_type {
let duration = instance.effective_duration(clip.duration);
clip_data.push((midi_clip_id, instance.timeline_start, instance.trim_start, duration, instance.id));
}
}
}
}
@ -337,7 +403,7 @@ impl PianoRollPane {
// Render notes
if let Some(events) = shared.midi_event_cache.get(&midi_clip_id) {
let resolved = Self::resolve_notes(events);
self.render_notes(&grid_painter, grid_rect, &resolved, timeline_start, trim_start, duration, opacity, is_selected);
self.render_notes(&grid_painter, grid_rect, &resolved, events, timeline_start, trim_start, duration, opacity, is_selected, midi_clip_id);
}
}
@ -508,16 +574,159 @@ impl PianoRollPane {
}
}
/// Find the peak pitch bend value (in semitones) for a note in the event list.
/// Returns 0.0 if no pitch bend events are present in the note's time range.
fn find_peak_pitch_bend_semitones(
events: &[daw_backend::audio::midi::MidiEvent],
note_start: f64,
note_end: f64,
channel: u8,
pitch_bend_range: f32,
) -> f32 {
let mut peak = 0.0f32;
for ev in events {
if ev.timestamp > note_end + 0.01 { break; }
if ev.timestamp >= note_start - 0.01
&& (ev.status & 0xF0) == 0xE0
&& (ev.status & 0x0F) == channel
{
let raw = ((ev.data2 as i16) << 7) | (ev.data1 as i16);
let normalized = (raw - 8192) as f32 / 8192.0;
let semitones = normalized * pitch_bend_range;
if semitones.abs() > peak.abs() {
peak = semitones;
}
}
}
peak
}
/// Determine which zone of a note was clicked based on the X position within the note rect.
fn pitch_bend_zone_from_x(click_x: f32, note_left: f32, note_right: f32) -> PitchBendZone {
let t = (click_x - note_left) / (note_right - note_left).max(1.0);
if t < 0.3 {
PitchBendZone::Start
} else if t < 0.7 {
PitchBendZone::Middle
} else {
PitchBendZone::End
}
}
/// Generate pitch bend MIDI events for a note based on the zone and target semitones.
fn generate_pitch_bend_events(
note_start: f64,
note_duration: f64,
zone: PitchBendZone,
semitones: f32,
channel: u8,
pitch_bend_range: f32,
) -> Vec<daw_backend::audio::midi::MidiEvent> {
use daw_backend::audio::midi::MidiEvent;
let num_steps: usize = 128;
let mut events = Vec::new();
let encode_bend = |normalized: f32| -> (u8, u8) {
let value_14 = (normalized * 8191.0 + 8192.0).clamp(0.0, 16383.0) as i16;
((value_14 & 0x7F) as u8, ((value_14 >> 7) & 0x7F) as u8)
};
// Use t directly (0..=1 across the full note) — same formula as the visual ghost.
// Start: peak → 0 (ramps down over full note)
// Middle: 0 → peak → 0 (sine arch, peaks at center)
// End: 0 → peak (ramps up over full note)
for i in 0..=num_steps {
let t = i as f64 / num_steps as f64;
let t_f32 = t as f32;
// Cosine ease curves: Start+End at equal value = perfectly flat (partition of unity).
// Start: (1+cos(πt))/2 — peaks at t=0, smooth decay to 0 at t=1
// End: (1-cos(πt))/2 — 0 at t=0, smooth rise to peak at t=1
// Middle: sin(πt) — arch peaking at t=0.5
let normalized = match zone {
PitchBendZone::Start => semitones / pitch_bend_range * (1.0 + (std::f32::consts::PI * t_f32).cos()) * 0.5,
PitchBendZone::Middle => semitones / pitch_bend_range * (std::f32::consts::PI * t_f32).sin(),
PitchBendZone::End => semitones / pitch_bend_range * (1.0 - (std::f32::consts::PI * t_f32).cos()) * 0.5,
};
let timestamp = note_start + t * note_duration;
let (lsb, msb) = encode_bend(normalized);
events.push(MidiEvent { timestamp, status: 0xE0 | channel, data1: lsb, data2: msb });
}
events
}
/// Find the lowest available MIDI channel (115) not already used by any note
/// overlapping [note_start, note_end], excluding the note being assigned itself.
/// Returns the note's current channel unchanged if it is already uniquely assigned (non-zero).
fn find_or_assign_channel(
events: &[daw_backend::audio::midi::MidiEvent],
note_start: f64,
note_end: f64,
note_pitch: u8,
current_channel: u8,
) -> u8 {
use std::collections::HashMap;
let mut used = [false; 16];
// Walk events to find which channels have notes overlapping the target range.
// key = (pitch, channel), value = note_start_time
let mut active: HashMap<(u8, u8), f64> = HashMap::new();
for ev in events {
let ch = ev.status & 0x0F;
let msg = ev.status & 0xF0;
if msg == 0x90 && ev.data2 > 0 {
active.insert((ev.data1, ch), ev.timestamp);
} else if msg == 0x80 || (msg == 0x90 && ev.data2 == 0) {
if let Some(start) = active.remove(&(ev.data1, ch)) {
// Overlaps target range and is NOT the note we're assigning
if start < note_end && ev.timestamp > note_start
&& !(ev.data1 == note_pitch && ch == current_channel)
{
used[ch as usize] = true;
}
}
}
}
// Mark still-active (no note-off seen) notes
for ((pitch, ch), start) in &active {
if *start < note_end && !(*pitch == note_pitch && *ch == current_channel) {
used[*ch as usize] = true;
}
}
// Keep current channel if already uniquely assigned and non-zero
if current_channel != 0 && !used[current_channel as usize] {
return current_channel;
}
// Find lowest free channel in 1..15
for ch in 1u8..16 {
if !used[ch as usize] { return ch; }
}
current_channel // fallback (>15 simultaneous notes)
}
/// Find the CC1 (modulation) value for a note in the event list.
/// Searches for a CC1 event at or just before the note's start time on the same channel.
fn find_cc1_for_note(events: &[daw_backend::audio::midi::MidiEvent], note_start: f64, note_end: f64, channel: u8) -> u8 {
let mut cc1 = 0u8;
for ev in events {
if ev.timestamp > note_end { break; }
if (ev.status & 0xF0) == 0xB0 && (ev.status & 0x0F) == channel && ev.data1 == 1 {
if ev.timestamp <= note_start {
cc1 = ev.data2;
}
}
}
cc1
}
fn render_notes(
&self,
painter: &egui::Painter,
grid_rect: Rect,
notes: &[ResolvedNote],
events: &[daw_backend::audio::midi::MidiEvent],
clip_timeline_start: f64,
trim_start: f64,
clip_duration: f64,
opacity: f32,
is_selected_clip: bool,
clip_id: u32,
) {
for (i, note) in notes.iter().enumerate() {
// Skip notes entirely outside the visible trim window
@ -588,6 +797,107 @@ impl PianoRollPane {
if clipped.is_positive() {
painter.rect_filled(clipped, 1.0, color);
painter.rect_stroke(clipped, 1.0, Stroke::new(1.0, Color32::from_rgba_unmultiplied(0, 0, 0, (76.0 * opacity) as u8)), StrokeKind::Middle);
// Modulation (CC1) bar: 3px column on left edge of note, fills from bottom
let cc1 = Self::find_cc1_for_note(events, note.start_time, note.start_time + note.duration, note.channel);
if cc1 > 0 {
let bar_width = 3.0_f32.min(clipped.width());
let bar_height = (cc1 as f32 / 127.0) * clipped.height();
let bar_rect = Rect::from_min_size(
pos2(clipped.min.x, clipped.max.y - bar_height),
vec2(bar_width, bar_height),
);
let bar_alpha = (128.0 * opacity) as u8;
painter.rect_filled(bar_rect, 0.0, Color32::from_rgba_unmultiplied(255, 255, 255, bar_alpha));
}
// Pitch bend ghost overlay — contour-following filled band
// Build a curve of semitone values sampled across the note width.
// For live drag: existing bend + new zone contribution (additive).
// For persisted: sample actual events.
const N_SAMPLES: usize = 24;
let bend_curve: Option<[f32; N_SAMPLES + 1]> =
if let Some(DragMode::PitchBend { note_index: drag_idx, current_semitones, zone, note_channel: drag_ch, .. }) = self.drag_mode {
if drag_idx == i && is_selected_clip && Some(clip_id) == self.selected_clip_id {
let mut curve = [0.0f32; N_SAMPLES + 1];
let pi = std::f32::consts::PI;
for s in 0..=N_SAMPLES {
let t = s as f32 / N_SAMPLES as f32;
// Sample existing bend at this time position
let ts = note.start_time + t as f64 * note.duration;
let mut existing_norm = 0.0f32;
for ev in events {
if ev.timestamp > ts { break; }
if (ev.status & 0xF0) == 0xE0 && (ev.status & 0x0F) == drag_ch {
let raw = ((ev.data2 as i16) << 7) | (ev.data1 as i16);
existing_norm = (raw - 8192) as f32 / 8192.0;
}
}
let existing_semi = existing_norm * self.pitch_bend_range;
// New zone contribution
let zone_semi = match zone {
PitchBendZone::Start => current_semitones * (1.0 + (pi * t).cos()) * 0.5,
PitchBendZone::Middle => current_semitones * (pi * t).sin(),
PitchBendZone::End => current_semitones * (1.0 - (pi * t).cos()) * 0.5,
};
curve[s] = existing_semi + zone_semi;
}
// Only show ghost if there's any meaningful bend at all
if curve.iter().any(|v| v.abs() >= 0.05) {
Some(curve)
} else {
None
}
} else {
None
}
} else {
None
};
// For persisted notes (no live drag), sample actual pitch bend events
let bend_curve = bend_curve.or_else(|| {
let peak = Self::find_peak_pitch_bend_semitones(
events, note.start_time, note.start_time + note.duration,
note.channel, self.pitch_bend_range);
if peak.abs() < 0.05 { return None; }
let mut curve = [0.0f32; N_SAMPLES + 1];
for s in 0..=N_SAMPLES {
let t = s as f64 / N_SAMPLES as f64;
let ts = note.start_time + t * note.duration;
// Find last pitch bend event at or before ts
let mut bend_norm = 0.0f32;
for ev in events {
if ev.timestamp > ts { break; }
if (ev.status & 0xF0) == 0xE0 && (ev.status & 0x0F) == note.channel {
let raw = ((ev.data2 as i16) << 7) | (ev.data1 as i16);
bend_norm = (raw - 8192) as f32 / 8192.0;
}
}
curve[s] = bend_norm * self.pitch_bend_range;
}
Some(curve)
});
if let Some(curve) = bend_curve {
// Draw a stroked curve relative to the note's centerline.
let note_center_y = y + h * 0.5;
// Brighten toward white for visibility
let brighten = |c: u8| -> u8 { (c as u16 + (255 - c as u16) * 3 / 4) as u8 };
let stroke_color = Color32::from_rgba_unmultiplied(
brighten(r), brighten(g), brighten(b), (220.0 * opacity) as u8,
);
let points: Vec<egui::Pos2> = (0..=N_SAMPLES).map(|s| {
let t = s as f32 / N_SAMPLES as f32;
let px = (x + t * w).clamp(grid_rect.min.x, grid_rect.max.x);
let bend_px = (curve[s] * self.note_height)
.clamp(-(grid_rect.height()), grid_rect.height());
let py = (note_center_y - bend_px).clamp(grid_rect.min.y, grid_rect.max.y);
pos2(px, py)
}).collect();
painter.add(egui::Shape::line(points, egui::Stroke::new(3.0, stroke_color)));
}
}
}
}
@ -654,6 +964,7 @@ impl PianoRollPane {
let response = ui.allocate_rect(full_rect, egui::Sense::click_and_drag());
let shift_held = ui.input(|i| i.modifiers.shift);
let ctrl_held = ui.input(|i| i.modifiers.ctrl);
let alt_held = ui.input(|i| i.modifiers.alt);
let now = ui.input(|i| i.time);
// Auto-release preview note after its duration expires.
@ -784,7 +1095,7 @@ impl PianoRollPane {
if full_rect.contains(pos) {
let in_grid = pos.x >= grid_rect.min.x;
if in_grid {
self.on_grid_press(pos, grid_rect, shift_held, ctrl_held, now, shared, clip_data);
self.on_grid_press(pos, grid_rect, shift_held, ctrl_held, alt_held, now, shared, clip_data);
} else {
// Keyboard click - preview note (hold until mouse-up)
let note = self.y_to_note(pos.y, keyboard_rect);
@ -807,10 +1118,14 @@ impl PianoRollPane {
}
// Update cursor
if let Some(hover_pos) = response.hover_pos() {
if matches!(self.drag_mode, Some(DragMode::PitchBend { .. })) {
ui.ctx().set_cursor_icon(egui::CursorIcon::ResizeVertical);
} else if let Some(hover_pos) = response.hover_pos() {
if hover_pos.x >= grid_rect.min.x {
if shift_held {
ui.ctx().set_cursor_icon(egui::CursorIcon::Crosshair);
} else if alt_held && self.hit_test_note(hover_pos, grid_rect, shared, clip_data).is_some() {
ui.ctx().set_cursor_icon(egui::CursorIcon::ResizeVertical);
} else if self.hit_test_note_edge(hover_pos, grid_rect, shared, clip_data).is_some() {
ui.ctx().set_cursor_icon(egui::CursorIcon::ResizeHorizontal);
} else if self.hit_test_note(hover_pos, grid_rect, shared, clip_data).is_some() {
@ -831,6 +1146,7 @@ impl PianoRollPane {
grid_rect: Rect,
shift_held: bool,
ctrl_held: bool,
alt_held: bool,
now: f64,
shared: &mut SharedPaneState,
clip_data: &[(u32, f64, f64, f64, Uuid)],
@ -841,6 +1157,35 @@ impl PianoRollPane {
self.drag_start_time = time;
self.drag_start_note = note;
// Alt+click on a note: start pitch bend drag
if alt_held {
if let Some(note_idx) = self.hit_test_note(pos, grid_rect, shared, clip_data) {
if let Some(clip_id) = self.selected_clip_id {
if let Some(events) = shared.midi_event_cache.get(&clip_id) {
let resolved = Self::resolve_notes(events);
if note_idx < resolved.len() {
let n = &resolved[note_idx];
// Determine zone from X position within note rect
let note_x = self.time_to_x(n.start_time, grid_rect);
let note_w = (n.duration as f32 * self.pixels_per_second).max(2.0);
let zone = Self::pitch_bend_zone_from_x(pos.x, note_x, note_x + note_w);
self.drag_mode = Some(DragMode::PitchBend {
note_index: note_idx,
zone,
note_pitch: n.note,
note_channel: n.channel,
note_start: n.start_time,
note_duration: n.duration,
origin_y: pos.y,
current_semitones: 0.0, // additive delta; existing bend shown separately
});
return;
}
}
}
}
}
// Check if clicking on a note edge (resize)
if let Some(note_idx) = self.hit_test_note_edge(pos, grid_rect, shared, clip_data) {
if let Some(clip_id) = self.selected_clip_id {
@ -971,8 +1316,19 @@ impl PianoRollPane {
self.update_selection_from_rect(grid_rect, shared, clip_data);
}
}
Some(DragMode::PitchBend { .. }) => {
// Handled below (needs mutable access to self.drag_mode and self.pitch_bend_range)
}
None => {}
}
// Pitch bend drag: update current_semitones based on Y movement
if let Some(DragMode::PitchBend { ref mut current_semitones, ref mut origin_y, .. }) = self.drag_mode {
let range = self.pitch_bend_range;
let delta_semitones = (*origin_y - pos.y) / self.note_height;
*current_semitones = (*current_semitones + delta_semitones).clamp(-range, range);
*origin_y = pos.y;
}
}
fn on_grid_release(
@ -1012,6 +1368,84 @@ impl PianoRollPane {
self.selection_rect = None;
self.update_focus(shared);
}
Some(DragMode::PitchBend { note_pitch, note_channel, note_start, note_duration, zone, current_semitones, .. }) => {
// Only commit if the drag added a meaningful new contribution
if current_semitones.abs() >= 0.05 {
if let Some(clip_id) = self.selected_clip_id {
let range = self.pitch_bend_range;
let old_events = shared.midi_event_cache.get(&clip_id).cloned().unwrap_or_default();
let mut new_events = old_events.clone();
// Assign a unique channel to this note so bend only affects it
let target_channel = Self::find_or_assign_channel(
&new_events, note_start, note_start + note_duration,
note_pitch, note_channel,
);
// Re-stamp note-on/off for this specific note if channel changed
if target_channel != note_channel {
for ev in &mut new_events {
let msg = ev.status & 0xF0;
let ch = ev.status & 0x0F;
if (msg == 0x90 || msg == 0x80) && ev.data1 == note_pitch && ch == note_channel {
ev.status = (ev.status & 0xF0) | target_channel;
}
}
}
// Sample existing bend (normalised -1..1) at each step, then add the
// new zone contribution additively and write back as combined events.
let num_steps: usize = 128;
let pi = std::f32::consts::PI;
let existing_norm: Vec<f32> = (0..=num_steps).map(|i| {
let t = i as f64 / num_steps as f64;
let ts = note_start + t * note_duration;
let mut bend = 0.0f32;
for ev in &new_events {
if ev.timestamp > ts { break; }
if (ev.status & 0xF0) == 0xE0 && (ev.status & 0x0F) == target_channel {
let raw = ((ev.data2 as i16) << 7) | (ev.data1 as i16);
bend = (raw - 8192) as f32 / 8192.0;
}
}
bend
}).collect();
// Remove old bend events in range before writing combined
new_events.retain(|ev| {
let is_bend = (ev.status & 0xF0) == 0xE0 && (ev.status & 0x0F) == target_channel;
let in_range = ev.timestamp >= note_start - 0.001 && ev.timestamp <= note_start + note_duration + 0.01;
!(is_bend && in_range)
});
let encode_bend = |normalized: f32| -> (u8, u8) {
let v = (normalized * 8191.0 + 8192.0).clamp(0.0, 16383.0) as i16;
((v & 0x7F) as u8, ((v >> 7) & 0x7F) as u8)
};
for i in 0..=num_steps {
let t = i as f32 / num_steps as f32;
let zone_norm = match zone {
PitchBendZone::Start => current_semitones / range * (1.0 + (pi * t).cos()) * 0.5,
PitchBendZone::Middle => current_semitones / range * (pi * t).sin(),
PitchBendZone::End => current_semitones / range * (1.0 - (pi * t).cos()) * 0.5,
};
let combined = (existing_norm[i] + zone_norm).clamp(-1.0, 1.0);
let (lsb, msb) = encode_bend(combined);
let ts = note_start + i as f64 / num_steps as f64 * note_duration;
new_events.push(daw_backend::audio::midi::MidiEvent { timestamp: ts, status: 0xE0 | target_channel, data1: lsb, data2: msb });
}
// For End zone: reset just after note ends so it doesn't bleed into next note
if zone == PitchBendZone::End {
let (lsb, msb) = encode_bend(0.0);
new_events.push(daw_backend::audio::midi::MidiEvent { timestamp: note_start + note_duration + 0.005, status: 0xE0 | target_channel, data1: lsb, data2: msb });
}
new_events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap_or(std::cmp::Ordering::Equal));
self.push_events_action("Set pitch bend", clip_id, old_events, new_events.clone(), shared);
shared.midi_event_cache.insert(clip_id, new_events);
}
}
}
None => {}
}
@ -1160,12 +1594,12 @@ impl PianoRollPane {
/// simple operations unlikely to fail, and undo/redo rebuilds cache from the action's
/// stored note data to restore consistency.
fn update_cache_from_resolved(clip_id: u32, resolved: &[ResolvedNote], shared: &mut SharedPaneState) {
let mut events: Vec<(f64, u8, u8, bool)> = Vec::with_capacity(resolved.len() * 2);
let mut events: Vec<daw_backend::audio::midi::MidiEvent> = Vec::with_capacity(resolved.len() * 2);
for n in resolved {
events.push((n.start_time, n.note, n.velocity, true));
events.push((n.start_time + n.duration, n.note, n.velocity, false));
events.push(daw_backend::audio::midi::MidiEvent::note_on(n.start_time, 0, n.note, n.velocity));
events.push(daw_backend::audio::midi::MidiEvent::note_off(n.start_time + n.duration, 0, n.note, 0));
}
events.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap());
events.sort_by(|a, b| a.timestamp.partial_cmp(&b.timestamp).unwrap());
shared.midi_event_cache.insert(clip_id, events);
}
@ -1185,6 +1619,7 @@ impl PianoRollPane {
resolved.push(ResolvedNote {
note: temp.note,
channel: 0,
start_time: temp.start_time,
duration: temp.duration,
velocity: temp.velocity,
@ -1346,6 +1781,7 @@ impl PianoRollPane {
for &(rel_time, note, velocity, duration) in &notes_to_paste {
resolved.push(ResolvedNote {
note,
channel: 0,
start_time: paste_time + rel_time,
duration,
velocity,
@ -1389,6 +1825,28 @@ impl PianoRollPane {
shared.pending_actions.push(Box::new(action));
}
fn push_events_action(
&self,
description: &str,
clip_id: u32,
old_events: Vec<daw_backend::audio::midi::MidiEvent>,
new_events: Vec<daw_backend::audio::midi::MidiEvent>,
shared: &mut SharedPaneState,
) {
let layer_id = match *shared.active_layer_id {
Some(id) => id,
None => return,
};
let action = lightningbeam_core::actions::UpdateMidiEventsAction {
layer_id,
midi_clip_id: clip_id,
old_events,
new_events,
description_text: description.to_string(),
};
shared.pending_actions.push(Box::new(action));
}
// ── Note preview ─────────────────────────────────────────────────────
fn preview_note_on(&mut self, note: u8, velocity: u8, duration: Option<f64>, time: f64, shared: &mut SharedPaneState) {

View File

@ -1474,7 +1474,7 @@ impl TimelinePane {
painter: &egui::Painter,
clip_rect: egui::Rect,
rect_min_x: f32, // Timeline panel left edge (for proper viewport-relative positioning)
events: &[(f64, u8, u8, bool)], // (timestamp, note_number, velocity, is_note_on)
events: &[daw_backend::audio::midi::MidiEvent],
trim_start: f64,
visible_duration: f64,
timeline_start: f64,
@ -1497,12 +1497,12 @@ impl TimelinePane {
let mut note_rectangles: Vec<(egui::Rect, u8)> = Vec::new();
// First pass: pair note-ons with note-offs to calculate durations
for &(timestamp, note_number, _velocity, is_note_on) in events {
if is_note_on {
// Store note-on timestamp
for event in events {
if event.is_note_on() {
let (note_number, timestamp) = (event.data1, event.timestamp);
active_notes.insert(note_number, timestamp);
} else {
// Note-off: find matching note-on and calculate duration
} else if event.is_note_off() {
let (note_number, timestamp) = (event.data1, event.timestamp);
if let Some(&note_on_time) = active_notes.get(&note_number) {
let duration = timestamp - note_on_time;
@ -2295,7 +2295,7 @@ impl TimelinePane {
active_layer_id: &Option<uuid::Uuid>,
focus: &lightningbeam_core::selection::FocusSelection,
selection: &lightningbeam_core::selection::Selection,
midi_event_cache: &std::collections::HashMap<u32, Vec<(f64, u8, u8, bool)>>,
midi_event_cache: &std::collections::HashMap<u32, Vec<daw_backend::audio::midi::MidiEvent>>,
raw_audio_cache: &std::collections::HashMap<usize, (std::sync::Arc<Vec<f32>>, u32, u32)>,
waveform_gpu_dirty: &mut std::collections::HashSet<usize>,
target_format: wgpu::TextureFormat,