Add custom layouts, piano pane, midi file import
This commit is contained in:
parent
c46c28c9bb
commit
3de1b05fb3
|
|
@ -149,6 +149,12 @@ impl Engine {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Process live MIDI input from all MIDI tracks
|
||||||
|
fn process_live_midi(&mut self, output: &mut [f32]) {
|
||||||
|
// Process all MIDI tracks to handle live input
|
||||||
|
self.project.process_live_midi(output, self.sample_rate, self.channels);
|
||||||
|
}
|
||||||
|
|
||||||
/// Process audio callback - called from the audio thread
|
/// Process audio callback - called from the audio thread
|
||||||
pub fn process(&mut self, output: &mut [f32]) {
|
pub fn process(&mut self, output: &mut [f32]) {
|
||||||
// Process all pending commands
|
// Process all pending commands
|
||||||
|
|
@ -203,8 +209,8 @@ impl Engine {
|
||||||
self.frames_since_last_event = 0;
|
self.frames_since_last_event = 0;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Not playing, output silence
|
// Not playing, but process live MIDI input
|
||||||
output.fill(0.0);
|
self.process_live_midi(output);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process recording if active (independent of playback state)
|
// Process recording if active (independent of playback state)
|
||||||
|
|
@ -263,9 +269,13 @@ impl Engine {
|
||||||
self.playing = false;
|
self.playing = false;
|
||||||
self.playhead = 0;
|
self.playhead = 0;
|
||||||
self.playhead_atomic.store(0, Ordering::Relaxed);
|
self.playhead_atomic.store(0, Ordering::Relaxed);
|
||||||
|
// Stop all MIDI notes when stopping playback
|
||||||
|
self.project.stop_all_notes();
|
||||||
}
|
}
|
||||||
Command::Pause => {
|
Command::Pause => {
|
||||||
self.playing = false;
|
self.playing = false;
|
||||||
|
// Stop all MIDI notes when pausing playback
|
||||||
|
self.project.stop_all_notes();
|
||||||
}
|
}
|
||||||
Command::Seek(seconds) => {
|
Command::Seek(seconds) => {
|
||||||
let samples = (seconds * self.sample_rate as f64 * self.channels as f64) as u64;
|
let samples = (seconds * self.sample_rate as f64 * self.channels as f64) as u64;
|
||||||
|
|
@ -673,6 +683,16 @@ impl Engine {
|
||||||
// Notify UI that reset is complete
|
// Notify UI that reset is complete
|
||||||
let _ = self.event_tx.push(AudioEvent::ProjectReset);
|
let _ = self.event_tx.push(AudioEvent::ProjectReset);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Command::SendMidiNoteOn(track_id, note, velocity) => {
|
||||||
|
// Send a live MIDI note on event to the specified track's instrument
|
||||||
|
self.project.send_midi_note_on(track_id, note, velocity);
|
||||||
|
}
|
||||||
|
|
||||||
|
Command::SendMidiNoteOff(track_id, note) => {
|
||||||
|
// Send a live MIDI note off event to the specified track's instrument
|
||||||
|
self.project.send_midi_note_off(track_id, note);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1083,4 +1103,14 @@ impl EngineController {
|
||||||
pub fn reset(&mut self) {
|
pub fn reset(&mut self) {
|
||||||
let _ = self.command_tx.push(Command::Reset);
|
let _ = self.command_tx.push(Command::Reset);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Send a live MIDI note on event to a track's instrument
|
||||||
|
pub fn send_midi_note_on(&mut self, track_id: TrackId, note: u8, velocity: u8) {
|
||||||
|
let _ = self.command_tx.push(Command::SendMidiNoteOn(track_id, note, velocity));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Send a live MIDI note off event to a track's instrument
|
||||||
|
pub fn send_midi_note_off(&mut self, track_id: TrackId, note: u8) {
|
||||||
|
let _ = self.command_tx.push(Command::SendMidiNoteOff(track_id, note));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -406,6 +406,54 @@ impl Project {
|
||||||
None => {}
|
None => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Stop all notes on all MIDI tracks
|
||||||
|
pub fn stop_all_notes(&mut self) {
|
||||||
|
for track in self.tracks.values_mut() {
|
||||||
|
if let TrackNode::Midi(midi_track) = track {
|
||||||
|
midi_track.stop_all_notes();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Process live MIDI input from all MIDI tracks (called even when not playing)
|
||||||
|
pub fn process_live_midi(&mut self, output: &mut [f32], sample_rate: u32, channels: u32) {
|
||||||
|
// Process all MIDI tracks to handle queued live input events
|
||||||
|
for track in self.tracks.values_mut() {
|
||||||
|
if let TrackNode::Midi(midi_track) = track {
|
||||||
|
// Process only queued live events, not clips
|
||||||
|
midi_track.process_live_input(output, sample_rate, channels);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Send a live MIDI note on event to a track's instrument
|
||||||
|
pub fn send_midi_note_on(&mut self, track_id: TrackId, note: u8, velocity: u8) {
|
||||||
|
if let Some(TrackNode::Midi(track)) = self.tracks.get_mut(&track_id) {
|
||||||
|
// Create a MIDI event and queue it to the instrument
|
||||||
|
let event = crate::audio::midi::MidiEvent {
|
||||||
|
timestamp: 0, // Immediate playback
|
||||||
|
status: 0x90, // Note on
|
||||||
|
data1: note,
|
||||||
|
data2: velocity,
|
||||||
|
};
|
||||||
|
track.instrument.queue_event(event);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Send a live MIDI note off event to a track's instrument
|
||||||
|
pub fn send_midi_note_off(&mut self, track_id: TrackId, note: u8) {
|
||||||
|
if let Some(TrackNode::Midi(track)) = self.tracks.get_mut(&track_id) {
|
||||||
|
// Create a MIDI event and queue it to the instrument
|
||||||
|
let event = crate::audio::midi::MidiEvent {
|
||||||
|
timestamp: 0, // Immediate playback
|
||||||
|
status: 0x80, // Note off
|
||||||
|
data1: note,
|
||||||
|
data2: 0,
|
||||||
|
};
|
||||||
|
track.instrument.queue_event(event);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for Project {
|
impl Default for Project {
|
||||||
|
|
|
||||||
|
|
@ -388,6 +388,33 @@ impl MidiTrack {
|
||||||
!self.muted && (!any_solo || self.solo)
|
!self.muted && (!any_solo || self.solo)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Stop all currently playing notes on this track's instrument
|
||||||
|
pub fn stop_all_notes(&mut self) {
|
||||||
|
self.instrument.all_notes_off();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Process only live MIDI input (queued events) without rendering clips
|
||||||
|
/// This is used when playback is stopped but we want to hear live input
|
||||||
|
pub fn process_live_input(
|
||||||
|
&mut self,
|
||||||
|
output: &mut [f32],
|
||||||
|
sample_rate: u32,
|
||||||
|
channels: u32,
|
||||||
|
) {
|
||||||
|
// Generate audio from the instrument (which processes queued events)
|
||||||
|
self.instrument.process(output, channels as usize, sample_rate);
|
||||||
|
|
||||||
|
// Apply effect chain
|
||||||
|
for effect in &mut self.effects {
|
||||||
|
effect.process(output, channels as usize, sample_rate);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply track volume (no automation during live input)
|
||||||
|
for sample in output.iter_mut() {
|
||||||
|
*sample *= self.volume;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Render this MIDI track into the output buffer
|
/// Render this MIDI track into the output buffer
|
||||||
pub fn render(
|
pub fn render(
|
||||||
&mut self,
|
&mut self,
|
||||||
|
|
|
||||||
|
|
@ -108,6 +108,12 @@ pub enum Command {
|
||||||
// Project commands
|
// Project commands
|
||||||
/// Reset the entire project (remove all tracks, clear audio pool, reset state)
|
/// Reset the entire project (remove all tracks, clear audio pool, reset state)
|
||||||
Reset,
|
Reset,
|
||||||
|
|
||||||
|
// Live MIDI input commands
|
||||||
|
/// Send a live MIDI note on event to a track's instrument (track_id, note, velocity)
|
||||||
|
SendMidiNoteOn(TrackId, u8, u8),
|
||||||
|
/// Send a live MIDI note off event to a track's instrument (track_id, note)
|
||||||
|
SendMidiNoteOff(TrackId, u8),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Events sent from audio thread back to UI/control thread
|
/// Events sent from audio thread back to UI/control thread
|
||||||
|
|
@ -139,4 +145,8 @@ pub enum AudioEvent {
|
||||||
RecordingError(String),
|
RecordingError(String),
|
||||||
/// Project has been reset
|
/// Project has been reset
|
||||||
ProjectReset,
|
ProjectReset,
|
||||||
|
/// MIDI note started playing (note, velocity)
|
||||||
|
NoteOn(u8, u8),
|
||||||
|
/// MIDI note stopped playing (note)
|
||||||
|
NoteOff(u8),
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -192,6 +192,16 @@ impl SimpleSynth {
|
||||||
self.pending_events.push(event);
|
self.pending_events.push(event);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Stop all currently playing notes immediately (no release envelope)
|
||||||
|
pub fn all_notes_off(&mut self) {
|
||||||
|
for voice in &mut self.voices {
|
||||||
|
voice.active = false;
|
||||||
|
voice.envelope_state = EnvelopeState::Off;
|
||||||
|
voice.envelope_level = 0.0;
|
||||||
|
}
|
||||||
|
self.pending_events.clear();
|
||||||
|
}
|
||||||
|
|
||||||
/// Process all queued events
|
/// Process all queued events
|
||||||
fn process_events(&mut self) {
|
fn process_events(&mut self) {
|
||||||
// Collect events first to avoid borrowing issues
|
// Collect events first to avoid borrowing issues
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
use daw_backend::{AudioEvent, AudioSystem, EngineController, EventEmitter, WaveformPeak};
|
use daw_backend::{AudioEvent, AudioSystem, EngineController, EventEmitter, WaveformPeak};
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
use tauri::{Emitter, Manager};
|
use tauri::{Emitter};
|
||||||
|
|
||||||
#[derive(serde::Serialize)]
|
#[derive(serde::Serialize)]
|
||||||
pub struct AudioFileMetadata {
|
pub struct AudioFileMetadata {
|
||||||
|
|
@ -11,6 +11,20 @@ pub struct AudioFileMetadata {
|
||||||
pub waveform: Vec<WaveformPeak>,
|
pub waveform: Vec<WaveformPeak>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Serialize)]
|
||||||
|
pub struct MidiNote {
|
||||||
|
pub note: u8, // MIDI note number (0-127)
|
||||||
|
pub start_time: f64, // Start time in seconds
|
||||||
|
pub duration: f64, // Note duration in seconds
|
||||||
|
pub velocity: u8, // Note velocity (0-127)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Serialize)]
|
||||||
|
pub struct MidiFileMetadata {
|
||||||
|
pub duration: f64,
|
||||||
|
pub notes: Vec<MidiNote>,
|
||||||
|
}
|
||||||
|
|
||||||
pub struct AudioState {
|
pub struct AudioState {
|
||||||
controller: Option<EngineController>,
|
controller: Option<EngineController>,
|
||||||
sample_rate: u32,
|
sample_rate: u32,
|
||||||
|
|
@ -55,6 +69,12 @@ impl EventEmitter for TauriEventEmitter {
|
||||||
AudioEvent::RecordingError(message) => {
|
AudioEvent::RecordingError(message) => {
|
||||||
SerializedAudioEvent::RecordingError { message }
|
SerializedAudioEvent::RecordingError { message }
|
||||||
}
|
}
|
||||||
|
AudioEvent::NoteOn(note, velocity) => {
|
||||||
|
SerializedAudioEvent::NoteOn { note, velocity }
|
||||||
|
}
|
||||||
|
AudioEvent::NoteOff(note) => {
|
||||||
|
SerializedAudioEvent::NoteOff { note }
|
||||||
|
}
|
||||||
_ => return, // Ignore other event types for now
|
_ => return, // Ignore other event types for now
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -191,11 +211,19 @@ pub async fn audio_set_track_parameter(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
pub async fn audio_get_available_instruments() -> Result<Vec<String>, String> {
|
||||||
|
// Return list of available instruments
|
||||||
|
// For now, only SimpleSynth is available
|
||||||
|
Ok(vec!["SimpleSynth".to_string()])
|
||||||
|
}
|
||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
pub async fn audio_create_track(
|
pub async fn audio_create_track(
|
||||||
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
||||||
name: String,
|
name: String,
|
||||||
track_type: String,
|
track_type: String,
|
||||||
|
instrument: Option<String>,
|
||||||
) -> Result<u32, String> {
|
) -> Result<u32, String> {
|
||||||
let mut audio_state = state.lock().unwrap();
|
let mut audio_state = state.lock().unwrap();
|
||||||
|
|
||||||
|
|
@ -206,7 +234,14 @@ pub async fn audio_create_track(
|
||||||
if let Some(controller) = &mut audio_state.controller {
|
if let Some(controller) = &mut audio_state.controller {
|
||||||
match track_type.as_str() {
|
match track_type.as_str() {
|
||||||
"audio" => controller.create_audio_track(name),
|
"audio" => controller.create_audio_track(name),
|
||||||
"midi" => controller.create_midi_track(name),
|
"midi" => {
|
||||||
|
// Validate instrument for MIDI tracks
|
||||||
|
let inst = instrument.unwrap_or_else(|| "SimpleSynth".to_string());
|
||||||
|
if inst != "SimpleSynth" {
|
||||||
|
return Err(format!("Unknown instrument: {}", inst));
|
||||||
|
}
|
||||||
|
controller.create_midi_track(name)
|
||||||
|
},
|
||||||
_ => return Err(format!("Unknown track type: {}", track_type)),
|
_ => return Err(format!("Unknown track type: {}", track_type)),
|
||||||
}
|
}
|
||||||
Ok(track_id)
|
Ok(track_id)
|
||||||
|
|
@ -348,6 +383,130 @@ pub async fn audio_resume_recording(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
pub async fn audio_create_midi_clip(
|
||||||
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
||||||
|
track_id: u32,
|
||||||
|
start_time: f64,
|
||||||
|
duration: f64,
|
||||||
|
) -> Result<u32, String> {
|
||||||
|
let mut audio_state = state.lock().unwrap();
|
||||||
|
if let Some(controller) = &mut audio_state.controller {
|
||||||
|
controller.create_midi_clip(track_id, start_time, duration);
|
||||||
|
// Return a clip ID (for now, just use 0 as clips are managed internally)
|
||||||
|
Ok(0)
|
||||||
|
} else {
|
||||||
|
Err("Audio not initialized".to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
pub async fn audio_add_midi_note(
|
||||||
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
||||||
|
track_id: u32,
|
||||||
|
clip_id: u32,
|
||||||
|
time_offset: f64,
|
||||||
|
note: u8,
|
||||||
|
velocity: u8,
|
||||||
|
duration: f64,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
let mut audio_state = state.lock().unwrap();
|
||||||
|
if let Some(controller) = &mut audio_state.controller {
|
||||||
|
controller.add_midi_note(track_id, clip_id, time_offset, note, velocity, duration);
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err("Audio not initialized".to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
pub async fn audio_send_midi_note_on(
|
||||||
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
||||||
|
track_id: u32,
|
||||||
|
note: u8,
|
||||||
|
velocity: u8,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
let mut audio_state = state.lock().unwrap();
|
||||||
|
if let Some(controller) = &mut audio_state.controller {
|
||||||
|
// For now, send to the first MIDI track (track_id 0)
|
||||||
|
// TODO: Make this configurable to select which track to send to
|
||||||
|
controller.send_midi_note_on(track_id, note, velocity);
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err("Audio not initialized".to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
pub async fn audio_send_midi_note_off(
|
||||||
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
||||||
|
track_id: u32,
|
||||||
|
note: u8,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
let mut audio_state = state.lock().unwrap();
|
||||||
|
if let Some(controller) = &mut audio_state.controller {
|
||||||
|
controller.send_midi_note_off(track_id, note);
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err("Audio not initialized".to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
pub async fn audio_load_midi_file(
|
||||||
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
||||||
|
track_id: u32,
|
||||||
|
path: String,
|
||||||
|
start_time: f64,
|
||||||
|
) -> Result<MidiFileMetadata, String> {
|
||||||
|
let mut audio_state = state.lock().unwrap();
|
||||||
|
|
||||||
|
// Extract sample_rate before the mutable borrow
|
||||||
|
let sample_rate = audio_state.sample_rate;
|
||||||
|
|
||||||
|
if let Some(controller) = &mut audio_state.controller {
|
||||||
|
// Load and parse the MIDI file
|
||||||
|
let mut clip = daw_backend::load_midi_file(&path, 0, sample_rate)?;
|
||||||
|
|
||||||
|
// Set the start time
|
||||||
|
clip.start_time = start_time;
|
||||||
|
let duration = clip.duration;
|
||||||
|
|
||||||
|
// Extract note data from MIDI events
|
||||||
|
let mut notes = Vec::new();
|
||||||
|
let mut active_notes: std::collections::HashMap<u8, (f64, u8)> = std::collections::HashMap::new();
|
||||||
|
|
||||||
|
for event in &clip.events {
|
||||||
|
let time_seconds = event.timestamp as f64 / sample_rate as f64;
|
||||||
|
|
||||||
|
if event.is_note_on() {
|
||||||
|
// Store note on event (time and velocity)
|
||||||
|
active_notes.insert(event.data1, (time_seconds, event.data2));
|
||||||
|
} else if event.is_note_off() {
|
||||||
|
// Find matching note on and create a MidiNote
|
||||||
|
if let Some((start, velocity)) = active_notes.remove(&event.data1) {
|
||||||
|
notes.push(MidiNote {
|
||||||
|
note: event.data1,
|
||||||
|
start_time: start,
|
||||||
|
duration: time_seconds - start,
|
||||||
|
velocity,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the loaded MIDI clip to the track
|
||||||
|
controller.add_loaded_midi_clip(track_id, clip);
|
||||||
|
|
||||||
|
Ok(MidiFileMetadata {
|
||||||
|
duration,
|
||||||
|
notes,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Err("Audio not initialized".to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(serde::Serialize, Clone)]
|
#[derive(serde::Serialize, Clone)]
|
||||||
#[serde(tag = "type")]
|
#[serde(tag = "type")]
|
||||||
pub enum SerializedAudioEvent {
|
pub enum SerializedAudioEvent {
|
||||||
|
|
@ -356,6 +515,8 @@ pub enum SerializedAudioEvent {
|
||||||
RecordingProgress { clip_id: u32, duration: f64 },
|
RecordingProgress { clip_id: u32, duration: f64 },
|
||||||
RecordingStopped { clip_id: u32, pool_index: usize, waveform: Vec<WaveformPeak> },
|
RecordingStopped { clip_id: u32, pool_index: usize, waveform: Vec<WaveformPeak> },
|
||||||
RecordingError { message: String },
|
RecordingError { message: String },
|
||||||
|
NoteOn { note: u8, velocity: u8 },
|
||||||
|
NoteOff { note: u8 },
|
||||||
}
|
}
|
||||||
|
|
||||||
// audio_get_events command removed - events are now pushed via Tauri event system
|
// audio_get_events command removed - events are now pushed via Tauri event system
|
||||||
|
|
|
||||||
|
|
@ -199,6 +199,7 @@ pub fn run() {
|
||||||
audio::audio_seek,
|
audio::audio_seek,
|
||||||
audio::audio_test_beep,
|
audio::audio_test_beep,
|
||||||
audio::audio_set_track_parameter,
|
audio::audio_set_track_parameter,
|
||||||
|
audio::audio_get_available_instruments,
|
||||||
audio::audio_create_track,
|
audio::audio_create_track,
|
||||||
audio::audio_load_file,
|
audio::audio_load_file,
|
||||||
audio::audio_add_clip,
|
audio::audio_add_clip,
|
||||||
|
|
@ -207,6 +208,11 @@ pub fn run() {
|
||||||
audio::audio_stop_recording,
|
audio::audio_stop_recording,
|
||||||
audio::audio_pause_recording,
|
audio::audio_pause_recording,
|
||||||
audio::audio_resume_recording,
|
audio::audio_resume_recording,
|
||||||
|
audio::audio_create_midi_clip,
|
||||||
|
audio::audio_add_midi_note,
|
||||||
|
audio::audio_load_midi_file,
|
||||||
|
audio::audio_send_midi_note_on,
|
||||||
|
audio::audio_send_midi_note_off,
|
||||||
])
|
])
|
||||||
// .manage(window_counter)
|
// .manage(window_counter)
|
||||||
.build(tauri::generate_context!())
|
.build(tauri::generate_context!())
|
||||||
|
|
|
||||||
|
|
@ -432,6 +432,95 @@ export const actions = {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
addMIDI: {
|
||||||
|
create: (filePath, object, midiname) => {
|
||||||
|
redoStack.length = 0;
|
||||||
|
let action = {
|
||||||
|
filePath: filePath,
|
||||||
|
midiname: midiname,
|
||||||
|
trackuuid: uuidv4(),
|
||||||
|
object: object.idx,
|
||||||
|
};
|
||||||
|
undoStack.push({ name: "addMIDI", action: action });
|
||||||
|
actions.addMIDI.execute(action);
|
||||||
|
updateMenu();
|
||||||
|
},
|
||||||
|
execute: async (action) => {
|
||||||
|
// Create new AudioTrack with type='midi' for MIDI files
|
||||||
|
let newMIDITrack = new AudioTrack(action.trackuuid, action.midiname, 'midi');
|
||||||
|
let object = pointerList[action.object];
|
||||||
|
|
||||||
|
// Get available instruments and use the first one (SimpleSynth)
|
||||||
|
const { invoke } = window.__TAURI__.core;
|
||||||
|
const instruments = await invoke('audio_get_available_instruments');
|
||||||
|
const instrument = instruments.length > 0 ? instruments[0] : 'SimpleSynth';
|
||||||
|
newMIDITrack.instrument = instrument;
|
||||||
|
|
||||||
|
// Add placeholder clip immediately so user sees feedback
|
||||||
|
newMIDITrack.clips.push({
|
||||||
|
clipId: 0,
|
||||||
|
name: 'Loading...',
|
||||||
|
startTime: 0,
|
||||||
|
duration: 10,
|
||||||
|
loading: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add track to object immediately
|
||||||
|
object.audioTracks.push(newMIDITrack);
|
||||||
|
|
||||||
|
// Update UI to show placeholder
|
||||||
|
updateLayers();
|
||||||
|
if (context.timelineWidget) {
|
||||||
|
context.timelineWidget.requestRedraw();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load MIDI file asynchronously and update clip
|
||||||
|
try {
|
||||||
|
// Initialize track in backend
|
||||||
|
await newMIDITrack.initializeTrack();
|
||||||
|
|
||||||
|
// Load MIDI file into the track
|
||||||
|
const metadata = await invoke('audio_load_midi_file', {
|
||||||
|
trackId: newMIDITrack.audioTrackId,
|
||||||
|
path: action.filePath,
|
||||||
|
startTime: 0
|
||||||
|
});
|
||||||
|
|
||||||
|
// Replace placeholder clip with real clip including note data
|
||||||
|
newMIDITrack.clips[0] = {
|
||||||
|
clipId: 0,
|
||||||
|
name: action.midiname,
|
||||||
|
startTime: 0,
|
||||||
|
duration: metadata.duration,
|
||||||
|
notes: metadata.notes, // Store MIDI notes for visualization
|
||||||
|
loading: false
|
||||||
|
};
|
||||||
|
|
||||||
|
// Update UI with real clip data
|
||||||
|
updateLayers();
|
||||||
|
if (context.timelineWidget) {
|
||||||
|
context.timelineWidget.requestRedraw();
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to load MIDI file:', error);
|
||||||
|
// Update clip to show error
|
||||||
|
newMIDITrack.clips[0].name = 'Error loading';
|
||||||
|
newMIDITrack.clips[0].loading = false;
|
||||||
|
if (context.timelineWidget) {
|
||||||
|
context.timelineWidget.requestRedraw();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
rollback: (action) => {
|
||||||
|
let object = pointerList[action.object];
|
||||||
|
let track = pointerList[action.trackuuid];
|
||||||
|
object.audioTracks.splice(object.audioTracks.indexOf(track), 1);
|
||||||
|
updateLayers();
|
||||||
|
if (context.timelineWidget) {
|
||||||
|
context.timelineWidget.requestRedraw();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
duplicateObject: {
|
duplicateObject: {
|
||||||
create: (items) => {
|
create: (items) => {
|
||||||
redoStack.length = 0;
|
redoStack.length = 0;
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,105 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||||
|
|
||||||
|
<svg
|
||||||
|
width="48"
|
||||||
|
height="48"
|
||||||
|
viewBox="0 0 12.7 12.7"
|
||||||
|
version="1.1"
|
||||||
|
id="svg1"
|
||||||
|
inkscape:version="1.4 (e7c3feb100, 2024-10-09)"
|
||||||
|
sodipodi:docname="piano.svg"
|
||||||
|
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||||
|
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:svg="http://www.w3.org/2000/svg">
|
||||||
|
<sodipodi:namedview
|
||||||
|
id="namedview1"
|
||||||
|
pagecolor="#505050"
|
||||||
|
bordercolor="#ffffff"
|
||||||
|
borderopacity="1"
|
||||||
|
inkscape:showpageshadow="0"
|
||||||
|
inkscape:pageopacity="0"
|
||||||
|
inkscape:pagecheckerboard="1"
|
||||||
|
inkscape:deskcolor="#505050"
|
||||||
|
inkscape:document-units="mm"
|
||||||
|
inkscape:zoom="16.035511"
|
||||||
|
inkscape:cx="22.231908"
|
||||||
|
inkscape:cy="24.102756"
|
||||||
|
inkscape:window-width="2256"
|
||||||
|
inkscape:window-height="1432"
|
||||||
|
inkscape:window-x="0"
|
||||||
|
inkscape:window-y="0"
|
||||||
|
inkscape:window-maximized="1"
|
||||||
|
inkscape:current-layer="layer1" />
|
||||||
|
<defs
|
||||||
|
id="defs1">
|
||||||
|
<inkscape:path-effect
|
||||||
|
effect="fillet_chamfer"
|
||||||
|
id="path-effect1"
|
||||||
|
is_visible="true"
|
||||||
|
lpeversion="1"
|
||||||
|
nodesatellites_param="F,0,0,1,0,0.79375001,0,1 @ F,0,0,1,0,0.79375001,0,1 @ F,0,0,1,0,0.79375001,0,1 @ F,0,0,1,0,0.79375001,0,1"
|
||||||
|
radius="3"
|
||||||
|
unit="px"
|
||||||
|
method="auto"
|
||||||
|
mode="F"
|
||||||
|
chamfer_steps="1"
|
||||||
|
flexible="false"
|
||||||
|
use_knot_distance="true"
|
||||||
|
apply_no_radius="true"
|
||||||
|
apply_with_radius="true"
|
||||||
|
only_selected="false"
|
||||||
|
hide_knots="false" />
|
||||||
|
</defs>
|
||||||
|
<g
|
||||||
|
inkscape:label="Layer 1"
|
||||||
|
inkscape:groupmode="layer"
|
||||||
|
id="layer1">
|
||||||
|
<path
|
||||||
|
style="fill:none;stroke:#ffffff;stroke-width:0.499999;stroke-linecap:round;stroke-linejoin:round"
|
||||||
|
id="rect1"
|
||||||
|
width="10.427897"
|
||||||
|
height="10.427897"
|
||||||
|
x="1.1219889"
|
||||||
|
y="1.1714885"
|
||||||
|
inkscape:path-effect="#path-effect1"
|
||||||
|
sodipodi:type="rect"
|
||||||
|
d="m 1.9157389,1.1714885 h 8.8403971 a 0.79375001,0.79375001 45 0 1 0.79375,0.79375 v 8.8403975 a 0.79375001,0.79375001 135 0 1 -0.79375,0.79375 H 1.9157389 a 0.79375001,0.79375001 45 0 1 -0.79375,-0.79375 l 0,-8.8403975 a 0.79375001,0.79375001 135 0 1 0.79375,-0.79375 z" />
|
||||||
|
<path
|
||||||
|
style="fill:none;stroke:#ffffff;stroke-width:0.499999;stroke-linecap:round;stroke-linejoin:round"
|
||||||
|
d="M 6.3946773,6.6255579 V 11.361012"
|
||||||
|
id="path1" />
|
||||||
|
<path
|
||||||
|
style="fill:none;stroke:#ffffff;stroke-width:0.499999;stroke-linecap:round;stroke-linejoin:round"
|
||||||
|
d="M 3.6389113,6.6255579 V 11.361012"
|
||||||
|
id="path1-5" />
|
||||||
|
<path
|
||||||
|
style="fill:none;stroke:#ffffff;stroke-width:0.499999;stroke-linecap:round;stroke-linejoin:round"
|
||||||
|
d="M 9.1504432,6.6255589 V 11.361012"
|
||||||
|
id="path1-2" />
|
||||||
|
<rect
|
||||||
|
style="fill:#ffffff;fill-opacity:1;stroke:#ffffff;stroke-width:0.499999;stroke-linecap:round;stroke-linejoin:round"
|
||||||
|
id="rect2"
|
||||||
|
width="1.6664836"
|
||||||
|
height="5.4449468"
|
||||||
|
x="5.537518"
|
||||||
|
y="1.2044882" />
|
||||||
|
<rect
|
||||||
|
style="fill:#ffffff;fill-opacity:1;stroke:#ffffff;stroke-width:0.499999;stroke-linecap:round;stroke-linejoin:round"
|
||||||
|
id="rect2-7"
|
||||||
|
width="1.6664836"
|
||||||
|
height="5.4449468"
|
||||||
|
x="8.4957237"
|
||||||
|
y="1.245263"
|
||||||
|
inkscape:transform-center-x="2.557475"
|
||||||
|
inkscape:transform-center-y="1.4189861" />
|
||||||
|
<rect
|
||||||
|
style="fill:#ffffff;fill-opacity:1;stroke:#ffffff;stroke-width:0.499999;stroke-linecap:round;stroke-linejoin:round"
|
||||||
|
id="rect2-6"
|
||||||
|
width="1.6664836"
|
||||||
|
height="5.4449468"
|
||||||
|
x="2.5793126"
|
||||||
|
y="1.2770908" />
|
||||||
|
</g>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 3.6 KiB |
|
|
@ -0,0 +1,227 @@
|
||||||
|
// Layout Manager - Handles layout serialization and loading
|
||||||
|
import { getLayout, getLayoutByName } from "./layouts.js";
|
||||||
|
import { config } from "./state.js";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds a UI layout from a layout definition using the same approach as DOMContentLoaded
|
||||||
|
* @param {HTMLElement} rootElement - The root container element
|
||||||
|
* @param {Object} layoutDef - Layout definition object
|
||||||
|
* @param {Object} panes - Panes registry with pane functions
|
||||||
|
* @param {Function} createPane - Function to create a pane element
|
||||||
|
* @param {Function} splitPane - Function to create a split pane
|
||||||
|
*/
|
||||||
|
export function buildLayout(rootElement, layoutDef, panes, createPane, splitPane) {
|
||||||
|
if (!layoutDef || !layoutDef.layout) {
|
||||||
|
throw new Error("Invalid layout definition");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start by creating the first pane and adding it to root
|
||||||
|
const firstPane = buildLayoutNode(layoutDef.layout, panes, createPane);
|
||||||
|
rootElement.appendChild(firstPane);
|
||||||
|
|
||||||
|
// Then recursively split it according to the layout definition
|
||||||
|
splitLayoutNode(rootElement, layoutDef.layout, panes, createPane, splitPane);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a pane element for a leaf node (doesn't split anything, just creates the pane)
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
function buildLayoutNode(node, panes, createPane) {
|
||||||
|
if (node.type === "pane") {
|
||||||
|
if (!node.name || !panes[node.name]) {
|
||||||
|
console.warn(`Pane "${node.name}" not found, using placeholder`);
|
||||||
|
return createPlaceholderPane(node.name);
|
||||||
|
}
|
||||||
|
return createPane(panes[node.name]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// For grid nodes, find the leftmost/topmost leaf pane
|
||||||
|
if (node.type === "horizontal-grid" || node.type === "vertical-grid") {
|
||||||
|
return buildLayoutNode(node.children[0], panes, createPane);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`Unknown node type: ${node.type}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recursively splits panes according to the layout definition
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
function splitLayoutNode(container, node, panes, createPane, splitPane) {
|
||||||
|
if (node.type === "pane") {
|
||||||
|
// Leaf node - nothing to split
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (node.type === "horizontal-grid" || node.type === "vertical-grid") {
|
||||||
|
const isHorizontal = node.type === "horizontal-grid";
|
||||||
|
const percent = node.percent || 50;
|
||||||
|
|
||||||
|
// Build the second child pane
|
||||||
|
const child2Pane = buildLayoutNode(node.children[1], panes, createPane);
|
||||||
|
|
||||||
|
// Split the container
|
||||||
|
const [container1, container2] = splitPane(container, percent, isHorizontal, child2Pane);
|
||||||
|
|
||||||
|
// Recursively split both children
|
||||||
|
splitLayoutNode(container1, node.children[0], panes, createPane, splitPane);
|
||||||
|
splitLayoutNode(container2, node.children[1], panes, createPane, splitPane);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a placeholder pane for missing pane types
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
function createPlaceholderPane(paneName) {
|
||||||
|
const div = document.createElement("div");
|
||||||
|
div.className = "pane panecontainer";
|
||||||
|
div.style.display = "flex";
|
||||||
|
div.style.alignItems = "center";
|
||||||
|
div.style.justifyContent = "center";
|
||||||
|
div.style.flexDirection = "column";
|
||||||
|
div.style.color = "#888";
|
||||||
|
div.style.fontSize = "14px";
|
||||||
|
|
||||||
|
const title = document.createElement("div");
|
||||||
|
title.textContent = paneName || "Unknown Pane";
|
||||||
|
title.style.fontSize = "18px";
|
||||||
|
title.style.marginBottom = "8px";
|
||||||
|
|
||||||
|
const message = document.createElement("div");
|
||||||
|
message.textContent = "Coming Soon";
|
||||||
|
|
||||||
|
div.appendChild(title);
|
||||||
|
div.appendChild(message);
|
||||||
|
|
||||||
|
return div;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Serializes the current layout to a layout definition
|
||||||
|
* @param {HTMLElement} rootElement - The root element containing the layout
|
||||||
|
* @returns {Object} Layout definition object
|
||||||
|
*/
|
||||||
|
export function serializeLayout(rootElement) {
|
||||||
|
const layoutNode = serializeLayoutNode(rootElement.firstChild);
|
||||||
|
return {
|
||||||
|
name: "Custom Layout",
|
||||||
|
description: "User-created layout",
|
||||||
|
layout: layoutNode
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recursively serializes a layout node
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
function serializeLayoutNode(element) {
|
||||||
|
if (!element) {
|
||||||
|
throw new Error("Cannot serialize null element");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is a pane
|
||||||
|
if (element.classList.contains("pane") && !element.classList.contains("horizontal-grid") && !element.classList.contains("vertical-grid")) {
|
||||||
|
// Extract pane name from the element (stored in data attribute or class)
|
||||||
|
const paneName = element.getAttribute("data-pane-name") || "stage";
|
||||||
|
return {
|
||||||
|
type: "pane",
|
||||||
|
name: paneName
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is a grid
|
||||||
|
if (element.classList.contains("horizontal-grid") || element.classList.contains("vertical-grid")) {
|
||||||
|
const isHorizontal = element.classList.contains("horizontal-grid");
|
||||||
|
const percent = parseFloat(element.getAttribute("lb-percent")) || 50;
|
||||||
|
|
||||||
|
if (element.children.length !== 2) {
|
||||||
|
throw new Error("Grid must have exactly 2 children");
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: isHorizontal ? "horizontal-grid" : "vertical-grid",
|
||||||
|
percent: percent,
|
||||||
|
children: [
|
||||||
|
serializeLayoutNode(element.children[0]),
|
||||||
|
serializeLayoutNode(element.children[1])
|
||||||
|
]
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// If element has only one child, recurse into it
|
||||||
|
if (element.children.length === 1) {
|
||||||
|
return serializeLayoutNode(element.children[0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`Cannot serialize element: ${element.className}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Loads a layout by key or name
|
||||||
|
* @param {string} keyOrName - Layout key or name
|
||||||
|
* @returns {Object|null} Layout definition or null if not found
|
||||||
|
*/
|
||||||
|
export function loadLayoutByKeyOrName(keyOrName) {
|
||||||
|
// First try as a key
|
||||||
|
let layout = getLayout(keyOrName);
|
||||||
|
|
||||||
|
// If not found, try as a name
|
||||||
|
if (!layout) {
|
||||||
|
layout = getLayoutByName(keyOrName);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If still not found, check custom layouts
|
||||||
|
if (!layout && config.customLayouts) {
|
||||||
|
layout = config.customLayouts.find(l => l.name === keyOrName);
|
||||||
|
}
|
||||||
|
|
||||||
|
return layout;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Saves a custom layout
|
||||||
|
* @param {string} name - Name for the custom layout
|
||||||
|
* @param {Object} layoutDef - Layout definition
|
||||||
|
*/
|
||||||
|
export function saveCustomLayout(name, layoutDef) {
|
||||||
|
if (!config.customLayouts) {
|
||||||
|
config.customLayouts = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if layout with this name already exists
|
||||||
|
const existingIndex = config.customLayouts.findIndex(l => l.name === name);
|
||||||
|
|
||||||
|
const customLayout = {
|
||||||
|
...layoutDef,
|
||||||
|
name: name,
|
||||||
|
custom: true
|
||||||
|
};
|
||||||
|
|
||||||
|
if (existingIndex >= 0) {
|
||||||
|
// Update existing
|
||||||
|
config.customLayouts[existingIndex] = customLayout;
|
||||||
|
} else {
|
||||||
|
// Add new
|
||||||
|
config.customLayouts.push(customLayout);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes a custom layout
|
||||||
|
* @param {string} name - Name of the layout to delete
|
||||||
|
* @returns {boolean} True if deleted, false if not found
|
||||||
|
*/
|
||||||
|
export function deleteCustomLayout(name) {
|
||||||
|
if (!config.customLayouts) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const index = config.customLayouts.findIndex(l => l.name === name);
|
||||||
|
if (index >= 0) {
|
||||||
|
config.customLayouts.splice(index, 1);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,259 @@
|
||||||
|
// Layout definitions for Lightningbeam
|
||||||
|
// Each layout defines a workspace preset optimized for different workflows
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Layout Schema:
|
||||||
|
* {
|
||||||
|
* name: string,
|
||||||
|
* description: string,
|
||||||
|
* layout: {
|
||||||
|
* type: "pane" | "horizontal-grid" | "vertical-grid",
|
||||||
|
* name?: string, // for type="pane"
|
||||||
|
* percent?: number, // for grid types (split percentage)
|
||||||
|
* children?: [LayoutNode, LayoutNode] // for grid types
|
||||||
|
* }
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const defaultLayouts = {
|
||||||
|
animation: {
|
||||||
|
name: "Animation",
|
||||||
|
description: "Drawing tools, timeline, and layers front and center",
|
||||||
|
layout: {
|
||||||
|
type: "horizontal-grid",
|
||||||
|
percent: 10,
|
||||||
|
children: [
|
||||||
|
{ type: "pane", name: "toolbar" },
|
||||||
|
{
|
||||||
|
type: "vertical-grid",
|
||||||
|
percent: 70,
|
||||||
|
children: [
|
||||||
|
{
|
||||||
|
type: "vertical-grid",
|
||||||
|
percent: 30,
|
||||||
|
children: [
|
||||||
|
{ type: "pane", name: "timelineV2" },
|
||||||
|
{ type: "pane", name: "stage" }
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{ type: "pane", name: "infopanel" }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
videoEditing: {
|
||||||
|
name: "Video Editing",
|
||||||
|
description: "Clip timeline, source monitor, and effects panel",
|
||||||
|
layout: {
|
||||||
|
type: "vertical-grid",
|
||||||
|
percent: 10,
|
||||||
|
children: [
|
||||||
|
{ type: "pane", name: "toolbar" },
|
||||||
|
{
|
||||||
|
type: "vertical-grid",
|
||||||
|
percent: 65,
|
||||||
|
children: [
|
||||||
|
{
|
||||||
|
type: "horizontal-grid",
|
||||||
|
percent: 50,
|
||||||
|
children: [
|
||||||
|
{ type: "pane", name: "stage" },
|
||||||
|
{ type: "pane", name: "infopanel" }
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{ type: "pane", name: "timelineV2" }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
audioDaw: {
|
||||||
|
name: "Audio/DAW",
|
||||||
|
description: "Audio tracks prominent with mixer and node editor",
|
||||||
|
layout: {
|
||||||
|
type: "horizontal-grid",
|
||||||
|
percent: 75,
|
||||||
|
children: [
|
||||||
|
{
|
||||||
|
type: "vertical-grid",
|
||||||
|
percent: 70,
|
||||||
|
children: [
|
||||||
|
{ type: "pane", name: "timelineV2" },
|
||||||
|
{ type: "pane", name: "piano"}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{ type: "pane", name: "infopanel" }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
scripting: {
|
||||||
|
name: "Scripting",
|
||||||
|
description: "Code editor, object hierarchy, and console",
|
||||||
|
layout: {
|
||||||
|
type: "vertical-grid",
|
||||||
|
percent: 10,
|
||||||
|
children: [
|
||||||
|
{ type: "pane", name: "toolbar" },
|
||||||
|
{
|
||||||
|
type: "horizontal-grid",
|
||||||
|
percent: 70,
|
||||||
|
children: [
|
||||||
|
{
|
||||||
|
type: "vertical-grid",
|
||||||
|
percent: 50,
|
||||||
|
children: [
|
||||||
|
{ type: "pane", name: "stage" },
|
||||||
|
{ type: "pane", name: "timelineV2" }
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: "vertical-grid",
|
||||||
|
percent: 50,
|
||||||
|
children: [
|
||||||
|
{ type: "pane", name: "infopanel" },
|
||||||
|
{ type: "pane", name: "outlineer" }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
rigging: {
|
||||||
|
name: "Rigging",
|
||||||
|
description: "Viewport focused with bone controls and weight painting",
|
||||||
|
layout: {
|
||||||
|
type: "vertical-grid",
|
||||||
|
percent: 10,
|
||||||
|
children: [
|
||||||
|
{ type: "pane", name: "toolbar" },
|
||||||
|
{
|
||||||
|
type: "horizontal-grid",
|
||||||
|
percent: 75,
|
||||||
|
children: [
|
||||||
|
{ type: "pane", name: "stage" },
|
||||||
|
{
|
||||||
|
type: "vertical-grid",
|
||||||
|
percent: 50,
|
||||||
|
children: [
|
||||||
|
{ type: "pane", name: "infopanel" },
|
||||||
|
{ type: "pane", name: "timelineV2" }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
threeD: {
|
||||||
|
name: "3D",
|
||||||
|
description: "3D viewport, camera controls, and lighting panel",
|
||||||
|
layout: {
|
||||||
|
type: "vertical-grid",
|
||||||
|
percent: 10,
|
||||||
|
children: [
|
||||||
|
{ type: "pane", name: "toolbar" },
|
||||||
|
{
|
||||||
|
type: "horizontal-grid",
|
||||||
|
percent: 70,
|
||||||
|
children: [
|
||||||
|
{
|
||||||
|
type: "vertical-grid",
|
||||||
|
percent: 70,
|
||||||
|
children: [
|
||||||
|
{ type: "pane", name: "stage" },
|
||||||
|
{ type: "pane", name: "timelineV2" }
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{ type: "pane", name: "infopanel" }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
drawingPainting: {
|
||||||
|
name: "Drawing/Painting",
|
||||||
|
description: "Minimal UI - just canvas and drawing tools",
|
||||||
|
layout: {
|
||||||
|
type: "vertical-grid",
|
||||||
|
percent: 8,
|
||||||
|
children: [
|
||||||
|
{ type: "pane", name: "toolbar" },
|
||||||
|
{
|
||||||
|
type: "horizontal-grid",
|
||||||
|
percent: 85,
|
||||||
|
children: [
|
||||||
|
{ type: "pane", name: "stage" },
|
||||||
|
{
|
||||||
|
type: "vertical-grid",
|
||||||
|
percent: 70,
|
||||||
|
children: [
|
||||||
|
{ type: "pane", name: "infopanel" },
|
||||||
|
{ type: "pane", name: "timelineV2" }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
shaderEditor: {
|
||||||
|
name: "Shader Editor",
|
||||||
|
description: "Split between viewport preview and code editor",
|
||||||
|
layout: {
|
||||||
|
type: "vertical-grid",
|
||||||
|
percent: 10,
|
||||||
|
children: [
|
||||||
|
{ type: "pane", name: "toolbar" },
|
||||||
|
{
|
||||||
|
type: "horizontal-grid",
|
||||||
|
percent: 50,
|
||||||
|
children: [
|
||||||
|
{ type: "pane", name: "stage" },
|
||||||
|
{
|
||||||
|
type: "vertical-grid",
|
||||||
|
percent: 60,
|
||||||
|
children: [
|
||||||
|
{ type: "pane", name: "infopanel" },
|
||||||
|
{ type: "pane", name: "timelineV2" }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get all layout names
|
||||||
|
export function getLayoutNames() {
|
||||||
|
return Object.keys(defaultLayouts);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get a layout by key
|
||||||
|
export function getLayout(key) {
|
||||||
|
return defaultLayouts[key];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get a layout by name
|
||||||
|
export function getLayoutByName(name) {
|
||||||
|
for (const [key, layout] of Object.entries(defaultLayouts)) {
|
||||||
|
if (layout.name === name) {
|
||||||
|
return layout;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if a layout exists
|
||||||
|
export function layoutExists(key) {
|
||||||
|
return key in defaultLayouts;
|
||||||
|
}
|
||||||
502
src/main.js
502
src/main.js
|
|
@ -61,7 +61,7 @@ import {
|
||||||
shadow,
|
shadow,
|
||||||
} from "./styles.js";
|
} from "./styles.js";
|
||||||
import { Icon } from "./icon.js";
|
import { Icon } from "./icon.js";
|
||||||
import { AlphaSelectionBar, ColorSelectorWidget, ColorWidget, HueSelectionBar, SaturationValueSelectionGradient, TimelineWindow, TimelineWindowV2, Widget } from "./widgets.js";
|
import { AlphaSelectionBar, ColorSelectorWidget, ColorWidget, HueSelectionBar, SaturationValueSelectionGradient, TimelineWindow, TimelineWindowV2, VirtualPiano, Widget } from "./widgets.js";
|
||||||
|
|
||||||
// State management
|
// State management
|
||||||
import {
|
import {
|
||||||
|
|
@ -102,6 +102,10 @@ import {
|
||||||
import { createRoot } from "./models/root.js";
|
import { createRoot } from "./models/root.js";
|
||||||
import { actions, initializeActions } from "./actions/index.js";
|
import { actions, initializeActions } from "./actions/index.js";
|
||||||
|
|
||||||
|
// Layout system
|
||||||
|
import { defaultLayouts, getLayout, getLayoutNames } from "./layouts.js";
|
||||||
|
import { buildLayout, loadLayoutByKeyOrName, saveCustomLayout } from "./layoutmanager.js";
|
||||||
|
|
||||||
const {
|
const {
|
||||||
writeTextFile: writeTextFile,
|
writeTextFile: writeTextFile,
|
||||||
readTextFile: readTextFile,
|
readTextFile: readTextFile,
|
||||||
|
|
@ -673,7 +677,44 @@ initializeGraphicsObjectDependencies({
|
||||||
|
|
||||||
// ============ ROOT OBJECT INITIALIZATION ============
|
// ============ ROOT OBJECT INITIALIZATION ============
|
||||||
// Extracted to: models/root.js
|
// Extracted to: models/root.js
|
||||||
let root = createRoot();
|
let _rootInternal = createRoot();
|
||||||
|
console.log('[INIT] Setting root.frameRate to config.framerate:', config.framerate);
|
||||||
|
_rootInternal.frameRate = config.framerate;
|
||||||
|
console.log('[INIT] root.frameRate is now:', _rootInternal.frameRate);
|
||||||
|
|
||||||
|
// Make root a global variable with getter/setter to catch reassignments
|
||||||
|
let __root = new Proxy(_rootInternal, {
|
||||||
|
get(target, prop) {
|
||||||
|
return Reflect.get(target, prop);
|
||||||
|
},
|
||||||
|
set(target, prop, value) {
|
||||||
|
return Reflect.set(target, prop, value);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Object.defineProperty(globalThis, 'root', {
|
||||||
|
get() {
|
||||||
|
return __root;
|
||||||
|
},
|
||||||
|
set(newRoot) {
|
||||||
|
console.error('[ROOT REPLACED] root is being replaced!');
|
||||||
|
console.error('[ROOT REPLACED] Old root idx:', __root?.idx, 'New root idx:', newRoot?.idx);
|
||||||
|
console.trace('[ROOT REPLACED] Stack trace:');
|
||||||
|
__root = newRoot;
|
||||||
|
},
|
||||||
|
configurable: true,
|
||||||
|
enumerable: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// Set up a watchdog to monitor root.frameRate
|
||||||
|
setInterval(() => {
|
||||||
|
if (root && root.frameRate === undefined) {
|
||||||
|
console.error('[WATCHDOG] root.frameRate is undefined!');
|
||||||
|
console.error('[WATCHDOG] root object idx:', root.idx);
|
||||||
|
console.error('[WATCHDOG] Has frameRate property?', 'frameRate' in root);
|
||||||
|
console.trace('[WATCHDOG] Stack trace:');
|
||||||
|
}
|
||||||
|
}, 1000);
|
||||||
|
|
||||||
async function greet() {
|
async function greet() {
|
||||||
// Learn more about Tauri commands at https://tauri.app/develop/calling-rust/
|
// Learn more about Tauri commands at https://tauri.app/develop/calling-rust/
|
||||||
|
|
@ -1004,7 +1045,7 @@ function playbackLoop() {
|
||||||
|
|
||||||
// Single-step forward by one frame/second
|
// Single-step forward by one frame/second
|
||||||
function advance() {
|
function advance() {
|
||||||
if (context.timelineWidget?.timelineState?.mode === "frames") {
|
if (context.timelineWidget?.timelineState?.timeFormat === "frames") {
|
||||||
context.activeObject.currentTime += 1 / context.activeObject.frameRate;
|
context.activeObject.currentTime += 1 / context.activeObject.frameRate;
|
||||||
} else {
|
} else {
|
||||||
context.activeObject.currentTime += 1;
|
context.activeObject.currentTime += 1;
|
||||||
|
|
@ -1026,6 +1067,66 @@ function advance() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Calculate which MIDI notes are currently playing at a given time (efficient binary search)
|
||||||
|
function getPlayingNotesAtTime(time) {
|
||||||
|
const playingNotes = [];
|
||||||
|
|
||||||
|
// Check all MIDI tracks
|
||||||
|
for (const track of context.activeObject.audioTracks) {
|
||||||
|
if (track.type !== 'midi') continue;
|
||||||
|
|
||||||
|
// Check all clips in the track
|
||||||
|
for (const clip of track.clips) {
|
||||||
|
if (!clip.notes || clip.notes.length === 0) continue;
|
||||||
|
|
||||||
|
// Check if current time is within the clip's range
|
||||||
|
const clipLocalTime = time - clip.startTime;
|
||||||
|
if (clipLocalTime < 0 || clipLocalTime > clip.duration) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Binary search to find the first note that might be playing
|
||||||
|
// Notes are sorted by start_time
|
||||||
|
let left = 0;
|
||||||
|
let right = clip.notes.length - 1;
|
||||||
|
let firstCandidate = clip.notes.length;
|
||||||
|
|
||||||
|
while (left <= right) {
|
||||||
|
const mid = Math.floor((left + right) / 2);
|
||||||
|
const note = clip.notes[mid];
|
||||||
|
const noteEndTime = note.start_time + note.duration;
|
||||||
|
|
||||||
|
if (noteEndTime <= clipLocalTime) {
|
||||||
|
// This note ends before current time, search right
|
||||||
|
left = mid + 1;
|
||||||
|
} else {
|
||||||
|
// This note might be playing or starts after current time
|
||||||
|
firstCandidate = mid;
|
||||||
|
right = mid - 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check notes from firstCandidate onwards until we find one that starts after current time
|
||||||
|
for (let i = firstCandidate; i < clip.notes.length; i++) {
|
||||||
|
const note = clip.notes[i];
|
||||||
|
|
||||||
|
// If note starts after current time, we're done with this clip
|
||||||
|
if (note.start_time > clipLocalTime) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if note is currently playing
|
||||||
|
const noteEndTime = note.start_time + note.duration;
|
||||||
|
if (note.start_time <= clipLocalTime && clipLocalTime < noteEndTime) {
|
||||||
|
playingNotes.push(note.note);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return playingNotes;
|
||||||
|
}
|
||||||
|
|
||||||
// Handle audio events pushed from Rust via Tauri event system
|
// Handle audio events pushed from Rust via Tauri event system
|
||||||
async function handleAudioEvent(event) {
|
async function handleAudioEvent(event) {
|
||||||
switch (event.type) {
|
switch (event.type) {
|
||||||
|
|
@ -1033,11 +1134,16 @@ async function handleAudioEvent(event) {
|
||||||
// Sync frontend time with DAW time
|
// Sync frontend time with DAW time
|
||||||
if (playing) {
|
if (playing) {
|
||||||
// Quantize time to framerate for animation playback
|
// Quantize time to framerate for animation playback
|
||||||
|
console.log('[PlaybackPosition] context.activeObject:', context.activeObject, 'root:', root, 'same?', context.activeObject === root);
|
||||||
|
console.log('[PlaybackPosition] root.frameRate:', root.frameRate, 'activeObject.frameRate:', context.activeObject.frameRate);
|
||||||
const framerate = context.activeObject.frameRate;
|
const framerate = context.activeObject.frameRate;
|
||||||
|
console.log('[PlaybackPosition] framerate:', framerate, 'event.time:', event.time, 'currentTime before:', context.activeObject.currentTime);
|
||||||
const frameDuration = 1 / framerate;
|
const frameDuration = 1 / framerate;
|
||||||
const quantizedTime = Math.floor(event.time / frameDuration) * frameDuration;
|
const quantizedTime = Math.floor(event.time / frameDuration) * frameDuration;
|
||||||
|
console.log('[PlaybackPosition] frameDuration:', frameDuration, 'quantizedTime:', quantizedTime);
|
||||||
|
|
||||||
context.activeObject.currentTime = quantizedTime;
|
context.activeObject.currentTime = quantizedTime;
|
||||||
|
console.log('[PlaybackPosition] currentTime after:', context.activeObject.currentTime);
|
||||||
if (context.timelineWidget?.timelineState) {
|
if (context.timelineWidget?.timelineState) {
|
||||||
context.timelineWidget.timelineState.currentTime = quantizedTime;
|
context.timelineWidget.timelineState.currentTime = quantizedTime;
|
||||||
}
|
}
|
||||||
|
|
@ -1045,6 +1151,13 @@ async function handleAudioEvent(event) {
|
||||||
if (context.updateTimeDisplay) {
|
if (context.updateTimeDisplay) {
|
||||||
context.updateTimeDisplay();
|
context.updateTimeDisplay();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update piano widget with currently playing notes
|
||||||
|
if (context.pianoWidget && context.pianoRedraw) {
|
||||||
|
const playingNotes = getPlayingNotesAtTime(quantizedTime);
|
||||||
|
context.pianoWidget.setPlayingNotes(playingNotes);
|
||||||
|
context.pianoRedraw();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
|
@ -1156,7 +1269,7 @@ async function finalizeRecording(clipId, poolIndex, waveform) {
|
||||||
|
|
||||||
// Single-step backward by one frame/second
|
// Single-step backward by one frame/second
|
||||||
function rewind() {
|
function rewind() {
|
||||||
if (context.timelineWidget?.timelineState?.mode === "frames") {
|
if (context.timelineWidget?.timelineState?.timeFormat === "frames") {
|
||||||
context.activeObject.currentTime -= 1 / context.activeObject.frameRate;
|
context.activeObject.currentTime -= 1 / context.activeObject.frameRate;
|
||||||
} else {
|
} else {
|
||||||
context.activeObject.currentTime -= 1;
|
context.activeObject.currentTime -= 1;
|
||||||
|
|
@ -1279,21 +1392,67 @@ function newWindow(path) {
|
||||||
invoke("create_window", {app: window.__TAURI__.app, path: path})
|
invoke("create_window", {app: window.__TAURI__.app, path: path})
|
||||||
}
|
}
|
||||||
|
|
||||||
function _newFile(width, height, fps) {
|
function _newFile(width, height, fps, layoutKey) {
|
||||||
|
console.log('[_newFile] REPLACING ROOT - Creating new file with fps:', fps, 'layout:', layoutKey);
|
||||||
|
console.trace('[_newFile] Stack trace for root replacement:');
|
||||||
|
|
||||||
|
const oldRoot = root;
|
||||||
|
console.log('[_newFile] Old root:', oldRoot, 'frameRate:', oldRoot?.frameRate);
|
||||||
|
|
||||||
root = new GraphicsObject("root");
|
root = new GraphicsObject("root");
|
||||||
|
|
||||||
|
// Switch to the selected layout if provided
|
||||||
|
if (layoutKey) {
|
||||||
|
config.currentLayout = layoutKey;
|
||||||
|
config.defaultLayout = layoutKey;
|
||||||
|
console.log('[_newFile] Switching to layout:', layoutKey);
|
||||||
|
switchLayout(layoutKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Define frameRate as a non-configurable property with a backing variable
|
||||||
|
let _frameRate = fps;
|
||||||
|
Object.defineProperty(root, 'frameRate', {
|
||||||
|
get() {
|
||||||
|
return _frameRate;
|
||||||
|
},
|
||||||
|
set(value) {
|
||||||
|
console.log('[frameRate setter] Setting frameRate to:', value, 'from:', _frameRate);
|
||||||
|
console.trace('[frameRate setter] Stack trace:');
|
||||||
|
_frameRate = value;
|
||||||
|
},
|
||||||
|
enumerable: true,
|
||||||
|
configurable: false
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log('[_newFile] Immediately after setting frameRate:', root.frameRate);
|
||||||
|
console.log('[_newFile] Checking if property exists:', 'frameRate' in root);
|
||||||
|
console.log('[_newFile] Property descriptor:', Object.getOwnPropertyDescriptor(root, 'frameRate'));
|
||||||
|
|
||||||
|
console.log('[_newFile] New root:', root, 'frameRate:', root.frameRate);
|
||||||
|
console.log('[_newFile] After setting, root.frameRate:', root.frameRate);
|
||||||
|
console.log('[_newFile] root object:', root);
|
||||||
|
console.log('[_newFile] Before objectStack - root.frameRate:', root.frameRate);
|
||||||
context.objectStack = [root];
|
context.objectStack = [root];
|
||||||
|
console.log('[_newFile] After objectStack - root.frameRate:', root.frameRate);
|
||||||
context.selection = [];
|
context.selection = [];
|
||||||
context.shapeselection = [];
|
context.shapeselection = [];
|
||||||
config.fileWidth = width;
|
config.fileWidth = width;
|
||||||
config.fileHeight = height;
|
config.fileHeight = height;
|
||||||
config.framerate = fps;
|
config.framerate = fps;
|
||||||
filePath = undefined;
|
filePath = undefined;
|
||||||
|
console.log('[_newFile] Before saveConfig - root.frameRate:', root.frameRate);
|
||||||
saveConfig();
|
saveConfig();
|
||||||
|
console.log('[_newFile] After saveConfig - root.frameRate:', root.frameRate);
|
||||||
undoStack.length = 0; // Clear without breaking reference
|
undoStack.length = 0; // Clear without breaking reference
|
||||||
redoStack.length = 0; // Clear without breaking reference
|
redoStack.length = 0; // Clear without breaking reference
|
||||||
|
console.log('[_newFile] Before updateUI - root.frameRate:', root.frameRate);
|
||||||
updateUI();
|
updateUI();
|
||||||
|
console.log('[_newFile] After updateUI - root.frameRate:', root.frameRate);
|
||||||
updateLayers();
|
updateLayers();
|
||||||
|
console.log('[_newFile] After updateLayers - root.frameRate:', root.frameRate);
|
||||||
updateMenu();
|
updateMenu();
|
||||||
|
console.log('[_newFile] After updateMenu - root.frameRate:', root.frameRate);
|
||||||
|
console.log('[_newFile] At end of _newFile, root.frameRate:', root.frameRate);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function newFile() {
|
async function newFile() {
|
||||||
|
|
@ -1570,6 +1729,24 @@ async function _open(path, returnJson = false) {
|
||||||
// undoStack.push(action)
|
// undoStack.push(action)
|
||||||
// }
|
// }
|
||||||
root = GraphicsObject.fromJSON(file.json)
|
root = GraphicsObject.fromJSON(file.json)
|
||||||
|
|
||||||
|
// Restore frameRate property with getter/setter (same pattern as in _newFile)
|
||||||
|
// This is needed because GraphicsObject.fromJSON creates a new object without frameRate
|
||||||
|
let _frameRate = config.framerate; // frameRate was set from file.fps in _newFile call above
|
||||||
|
Object.defineProperty(root, 'frameRate', {
|
||||||
|
get() {
|
||||||
|
return _frameRate;
|
||||||
|
},
|
||||||
|
set(value) {
|
||||||
|
console.log('[frameRate setter] Setting frameRate to:', value, 'from:', _frameRate);
|
||||||
|
console.trace('[frameRate setter] Stack trace:');
|
||||||
|
_frameRate = value;
|
||||||
|
},
|
||||||
|
enumerable: true,
|
||||||
|
configurable: false
|
||||||
|
});
|
||||||
|
console.log('[openFile] After restoring frameRate property, root.frameRate:', root.frameRate);
|
||||||
|
|
||||||
context.objectStack = [root]
|
context.objectStack = [root]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1647,6 +1824,7 @@ async function importFile() {
|
||||||
// Define supported extensions
|
// Define supported extensions
|
||||||
const imageExtensions = ["png", "gif", "avif", "jpg", "jpeg"];
|
const imageExtensions = ["png", "gif", "avif", "jpg", "jpeg"];
|
||||||
const audioExtensions = ["mp3", "wav", "aiff", "ogg", "flac"];
|
const audioExtensions = ["mp3", "wav", "aiff", "ogg", "flac"];
|
||||||
|
const midiExtensions = ["mid", "midi"];
|
||||||
const beamExtensions = ["beam"];
|
const beamExtensions = ["beam"];
|
||||||
|
|
||||||
// Define filters in consistent order
|
// Define filters in consistent order
|
||||||
|
|
@ -1659,6 +1837,10 @@ async function importFile() {
|
||||||
name: "Audio files",
|
name: "Audio files",
|
||||||
extensions: audioExtensions,
|
extensions: audioExtensions,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "MIDI files",
|
||||||
|
extensions: midiExtensions,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: "Lightningbeam files",
|
name: "Lightningbeam files",
|
||||||
extensions: beamExtensions,
|
extensions: beamExtensions,
|
||||||
|
|
@ -1710,8 +1892,10 @@ async function importFile() {
|
||||||
let usedFilterIndex = 0;
|
let usedFilterIndex = 0;
|
||||||
if (audioExtensions.includes(ext)) {
|
if (audioExtensions.includes(ext)) {
|
||||||
usedFilterIndex = 1; // Audio
|
usedFilterIndex = 1; // Audio
|
||||||
|
} else if (midiExtensions.includes(ext)) {
|
||||||
|
usedFilterIndex = 2; // MIDI
|
||||||
} else if (beamExtensions.includes(ext)) {
|
} else if (beamExtensions.includes(ext)) {
|
||||||
usedFilterIndex = 2; // Lightningbeam
|
usedFilterIndex = 3; // Lightningbeam
|
||||||
} else {
|
} else {
|
||||||
usedFilterIndex = 0; // Image (default)
|
usedFilterIndex = 0; // Image (default)
|
||||||
}
|
}
|
||||||
|
|
@ -1778,6 +1962,9 @@ async function importFile() {
|
||||||
} else if (audioExtensions.includes(ext)) {
|
} else if (audioExtensions.includes(ext)) {
|
||||||
// Handle audio files - pass file path directly to backend
|
// Handle audio files - pass file path directly to backend
|
||||||
actions.addAudio.create(path, context.activeObject, filename);
|
actions.addAudio.create(path, context.activeObject, filename);
|
||||||
|
} else if (midiExtensions.includes(ext)) {
|
||||||
|
// Handle MIDI files
|
||||||
|
actions.addMIDI.create(path, context.activeObject, filename);
|
||||||
} else {
|
} else {
|
||||||
// Handle image files - convert to data URL
|
// Handle image files - convert to data URL
|
||||||
const { dataURL, mimeType } = await convertToDataURL(
|
const { dataURL, mimeType } = await convertToDataURL(
|
||||||
|
|
@ -4305,7 +4492,8 @@ function createPane(paneType = undefined, div = undefined) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
div.className = "vertical-grid";
|
div.className = "vertical-grid pane";
|
||||||
|
div.setAttribute("data-pane-name", paneType.name);
|
||||||
header.style.height = "calc( 2 * var(--lineheight))";
|
header.style.height = "calc( 2 * var(--lineheight))";
|
||||||
content.style.height = "calc( 100% - 2 * var(--lineheight) )";
|
content.style.height = "calc( 100% - 2 * var(--lineheight) )";
|
||||||
div.appendChild(header);
|
div.appendChild(header);
|
||||||
|
|
@ -5348,6 +5536,7 @@ function updateMenu() {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function renderMenu() {
|
async function renderMenu() {
|
||||||
|
console.log('[renderMenu] START - root.frameRate:', root.frameRate);
|
||||||
let activeFrame;
|
let activeFrame;
|
||||||
let activeKeyframe;
|
let activeKeyframe;
|
||||||
let newFrameMenuItem;
|
let newFrameMenuItem;
|
||||||
|
|
@ -5358,6 +5547,7 @@ async function renderMenu() {
|
||||||
|
|
||||||
// Move this
|
// Move this
|
||||||
updateOutliner();
|
updateOutliner();
|
||||||
|
console.log('[renderMenu] After updateOutliner - root.frameRate:', root.frameRate);
|
||||||
|
|
||||||
let recentFilesList = [];
|
let recentFilesList = [];
|
||||||
config.recentFiles.forEach((file) => {
|
config.recentFiles.forEach((file) => {
|
||||||
|
|
@ -5563,6 +5753,13 @@ async function renderMenu() {
|
||||||
text: "Add Audio Track",
|
text: "Add Audio Track",
|
||||||
enabled: true,
|
enabled: true,
|
||||||
action: addEmptyAudioTrack,
|
action: addEmptyAudioTrack,
|
||||||
|
accelerator: getShortcut("addAudioTrack")
|
||||||
|
},
|
||||||
|
{
|
||||||
|
text: "Add MIDI Track",
|
||||||
|
enabled: true,
|
||||||
|
action: addEmptyMIDITrack,
|
||||||
|
accelerator: getShortcut("addMIDITrack")
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
text: "Delete Layer",
|
text: "Delete Layer",
|
||||||
|
|
@ -5652,6 +5849,40 @@ async function renderMenu() {
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
|
// Build layout submenu items
|
||||||
|
const layoutMenuItems = [
|
||||||
|
{
|
||||||
|
text: "Next Layout",
|
||||||
|
enabled: true,
|
||||||
|
action: nextLayout,
|
||||||
|
accelerator: getShortcut("nextLayout"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
text: "Previous Layout",
|
||||||
|
enabled: true,
|
||||||
|
action: previousLayout,
|
||||||
|
accelerator: getShortcut("previousLayout"),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
// Add separator
|
||||||
|
layoutMenuItems.push(await PredefinedMenuItem.new({ item: "Separator" }));
|
||||||
|
|
||||||
|
// Add individual layouts
|
||||||
|
for (const layoutKey of getLayoutNames()) {
|
||||||
|
const layout = getLayout(layoutKey);
|
||||||
|
layoutMenuItems.push({
|
||||||
|
text: layout.name,
|
||||||
|
enabled: true,
|
||||||
|
action: () => switchLayout(layoutKey),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const layoutSubmenu = await Submenu.new({
|
||||||
|
text: "Layout",
|
||||||
|
items: layoutMenuItems,
|
||||||
|
});
|
||||||
|
|
||||||
const viewSubmenu = await Submenu.new({
|
const viewSubmenu = await Submenu.new({
|
||||||
text: "View",
|
text: "View",
|
||||||
items: [
|
items: [
|
||||||
|
|
@ -5679,6 +5910,7 @@ async function renderMenu() {
|
||||||
action: recenter,
|
action: recenter,
|
||||||
// accelerator: getShortcut("recenter"),
|
// accelerator: getShortcut("recenter"),
|
||||||
},
|
},
|
||||||
|
layoutSubmenu,
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
const helpSubmenu = await Submenu.new({
|
const helpSubmenu = await Submenu.new({
|
||||||
|
|
@ -5707,10 +5939,91 @@ async function renderMenu() {
|
||||||
const menu = await Menu.new({
|
const menu = await Menu.new({
|
||||||
items: items,
|
items: items,
|
||||||
});
|
});
|
||||||
|
console.log('[renderMenu] Before setAsWindowMenu - root.frameRate:', root.frameRate);
|
||||||
await (macOS ? menu.setAsAppMenu() : menu.setAsWindowMenu());
|
await (macOS ? menu.setAsAppMenu() : menu.setAsWindowMenu());
|
||||||
|
console.log('[renderMenu] END - root.frameRate:', root.frameRate);
|
||||||
}
|
}
|
||||||
updateMenu();
|
updateMenu();
|
||||||
|
|
||||||
|
function piano() {
|
||||||
|
let piano_cvs = document.createElement("canvas");
|
||||||
|
piano_cvs.className = "piano";
|
||||||
|
|
||||||
|
// Create the virtual piano widget
|
||||||
|
piano_cvs.virtualPiano = new VirtualPiano();
|
||||||
|
|
||||||
|
// Variable to store the last time updatePianoCanvasSize was called
|
||||||
|
let lastResizeTime = 0;
|
||||||
|
const throttleIntervalMs = 20;
|
||||||
|
|
||||||
|
function updatePianoCanvasSize() {
|
||||||
|
const canvasStyles = window.getComputedStyle(piano_cvs);
|
||||||
|
const width = parseInt(canvasStyles.width);
|
||||||
|
const height = parseInt(canvasStyles.height);
|
||||||
|
|
||||||
|
// Set actual size in memory (scaled for retina displays)
|
||||||
|
piano_cvs.width = width * window.devicePixelRatio;
|
||||||
|
piano_cvs.height = height * window.devicePixelRatio;
|
||||||
|
|
||||||
|
// Normalize coordinate system to use CSS pixels
|
||||||
|
const ctx = piano_cvs.getContext("2d");
|
||||||
|
ctx.scale(window.devicePixelRatio, window.devicePixelRatio);
|
||||||
|
|
||||||
|
// Render the piano
|
||||||
|
piano_cvs.virtualPiano.draw(ctx, width, height);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store references in context for global access
|
||||||
|
context.pianoWidget = piano_cvs.virtualPiano;
|
||||||
|
context.pianoCanvas = piano_cvs;
|
||||||
|
context.pianoRedraw = updatePianoCanvasSize;
|
||||||
|
|
||||||
|
const resizeObserver = new ResizeObserver((entries) => {
|
||||||
|
const currentTime = Date.now();
|
||||||
|
if (currentTime - lastResizeTime >= throttleIntervalMs) {
|
||||||
|
lastResizeTime = currentTime;
|
||||||
|
updatePianoCanvasSize();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
resizeObserver.observe(piano_cvs);
|
||||||
|
|
||||||
|
// Mouse event handlers
|
||||||
|
piano_cvs.addEventListener("mousedown", (e) => {
|
||||||
|
const rect = piano_cvs.getBoundingClientRect();
|
||||||
|
const x = e.clientX - rect.left;
|
||||||
|
const y = e.clientY - rect.top;
|
||||||
|
const width = parseInt(window.getComputedStyle(piano_cvs).width);
|
||||||
|
const height = parseInt(window.getComputedStyle(piano_cvs).height);
|
||||||
|
piano_cvs.virtualPiano.mousedown(x, y, width, height);
|
||||||
|
updatePianoCanvasSize(); // Redraw to show pressed state
|
||||||
|
});
|
||||||
|
|
||||||
|
piano_cvs.addEventListener("mousemove", (e) => {
|
||||||
|
const rect = piano_cvs.getBoundingClientRect();
|
||||||
|
const x = e.clientX - rect.left;
|
||||||
|
const y = e.clientY - rect.top;
|
||||||
|
const width = parseInt(window.getComputedStyle(piano_cvs).width);
|
||||||
|
const height = parseInt(window.getComputedStyle(piano_cvs).height);
|
||||||
|
piano_cvs.virtualPiano.mousemove(x, y, width, height);
|
||||||
|
updatePianoCanvasSize(); // Redraw to show hover state
|
||||||
|
});
|
||||||
|
|
||||||
|
piano_cvs.addEventListener("mouseup", (e) => {
|
||||||
|
const rect = piano_cvs.getBoundingClientRect();
|
||||||
|
const x = e.clientX - rect.left;
|
||||||
|
const y = e.clientY - rect.top;
|
||||||
|
const width = parseInt(window.getComputedStyle(piano_cvs).width);
|
||||||
|
const height = parseInt(window.getComputedStyle(piano_cvs).height);
|
||||||
|
piano_cvs.virtualPiano.mouseup(x, y, width, height);
|
||||||
|
updatePianoCanvasSize(); // Redraw to show released state
|
||||||
|
});
|
||||||
|
|
||||||
|
// Prevent text selection
|
||||||
|
piano_cvs.addEventListener("selectstart", (e) => e.preventDefault());
|
||||||
|
|
||||||
|
return piano_cvs;
|
||||||
|
}
|
||||||
|
|
||||||
const panes = {
|
const panes = {
|
||||||
stage: {
|
stage: {
|
||||||
name: "stage",
|
name: "stage",
|
||||||
|
|
@ -5736,8 +6049,81 @@ const panes = {
|
||||||
name: "outliner",
|
name: "outliner",
|
||||||
func: outliner,
|
func: outliner,
|
||||||
},
|
},
|
||||||
|
piano: {
|
||||||
|
name: "piano",
|
||||||
|
func: piano,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Switch to a different layout
|
||||||
|
* @param {string} layoutKey - The key of the layout to switch to
|
||||||
|
*/
|
||||||
|
function switchLayout(layoutKey) {
|
||||||
|
try {
|
||||||
|
console.log(`Switching to layout: ${layoutKey}`);
|
||||||
|
|
||||||
|
// Load the layout definition
|
||||||
|
const layoutDef = loadLayoutByKeyOrName(layoutKey);
|
||||||
|
if (!layoutDef) {
|
||||||
|
console.error(`Layout not found: ${layoutKey}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear existing layout (except root element)
|
||||||
|
while (rootPane.firstChild) {
|
||||||
|
rootPane.removeChild(rootPane.firstChild);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear layoutElements array
|
||||||
|
layoutElements.length = 0;
|
||||||
|
|
||||||
|
// Clear canvases array (will be repopulated when stage pane is created)
|
||||||
|
canvases.length = 0;
|
||||||
|
|
||||||
|
// Build new layout from definition directly into rootPane
|
||||||
|
buildLayout(rootPane, layoutDef, panes, createPane, splitPane);
|
||||||
|
|
||||||
|
// Update config
|
||||||
|
config.currentLayout = layoutKey;
|
||||||
|
saveConfig();
|
||||||
|
|
||||||
|
// Trigger layout update
|
||||||
|
updateAll();
|
||||||
|
updateUI();
|
||||||
|
updateLayers();
|
||||||
|
|
||||||
|
console.log(`Layout switched to: ${layoutDef.name}`);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error switching layout:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Switch to the next layout in the list
|
||||||
|
*/
|
||||||
|
function nextLayout() {
|
||||||
|
const layoutKeys = getLayoutNames();
|
||||||
|
const currentIndex = layoutKeys.indexOf(config.currentLayout);
|
||||||
|
const nextIndex = (currentIndex + 1) % layoutKeys.length;
|
||||||
|
switchLayout(layoutKeys[nextIndex]);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Switch to the previous layout in the list
|
||||||
|
*/
|
||||||
|
function previousLayout() {
|
||||||
|
const layoutKeys = getLayoutNames();
|
||||||
|
const currentIndex = layoutKeys.indexOf(config.currentLayout);
|
||||||
|
const prevIndex = (currentIndex - 1 + layoutKeys.length) % layoutKeys.length;
|
||||||
|
switchLayout(layoutKeys[prevIndex]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make layout functions available globally for menu actions
|
||||||
|
window.switchLayout = switchLayout;
|
||||||
|
window.nextLayout = nextLayout;
|
||||||
|
window.previousLayout = previousLayout;
|
||||||
|
|
||||||
function _arrayBufferToBase64(buffer) {
|
function _arrayBufferToBase64(buffer) {
|
||||||
var binary = "";
|
var binary = "";
|
||||||
var bytes = new Uint8Array(buffer);
|
var bytes = new Uint8Array(buffer);
|
||||||
|
|
@ -5857,6 +6243,7 @@ if (window.openedFiles?.length>0) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function addEmptyAudioTrack() {
|
async function addEmptyAudioTrack() {
|
||||||
|
console.log('[addEmptyAudioTrack] BEFORE - root.frameRate:', root.frameRate);
|
||||||
const trackName = `Audio Track ${context.activeObject.audioTracks.length + 1}`;
|
const trackName = `Audio Track ${context.activeObject.audioTracks.length + 1}`;
|
||||||
const trackUuid = uuidv4();
|
const trackUuid = uuidv4();
|
||||||
|
|
||||||
|
|
@ -5867,24 +6254,125 @@ async function addEmptyAudioTrack() {
|
||||||
// Initialize track in backend (creates empty audio track)
|
// Initialize track in backend (creates empty audio track)
|
||||||
await newAudioTrack.initializeTrack();
|
await newAudioTrack.initializeTrack();
|
||||||
|
|
||||||
|
console.log('[addEmptyAudioTrack] After initializeTrack - root.frameRate:', root.frameRate);
|
||||||
|
|
||||||
// Add track to active object
|
// Add track to active object
|
||||||
context.activeObject.audioTracks.push(newAudioTrack);
|
context.activeObject.audioTracks.push(newAudioTrack);
|
||||||
|
|
||||||
|
console.log('[addEmptyAudioTrack] After push - root.frameRate:', root.frameRate);
|
||||||
|
|
||||||
// Select the newly created track
|
// Select the newly created track
|
||||||
context.activeObject.activeLayer = newAudioTrack;
|
context.activeObject.activeLayer = newAudioTrack;
|
||||||
|
|
||||||
|
console.log('[addEmptyAudioTrack] After setting activeLayer - root.frameRate:', root.frameRate);
|
||||||
|
|
||||||
// Update UI
|
// Update UI
|
||||||
updateLayers();
|
updateLayers();
|
||||||
if (context.timelineWidget) {
|
if (context.timelineWidget) {
|
||||||
context.timelineWidget.requestRedraw();
|
context.timelineWidget.requestRedraw();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
console.log('[addEmptyAudioTrack] AFTER - root.frameRate:', root.frameRate);
|
||||||
console.log('Empty audio track created:', trackName, 'with ID:', newAudioTrack.audioTrackId);
|
console.log('Empty audio track created:', trackName, 'with ID:', newAudioTrack.audioTrackId);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Failed to create empty audio track:', error);
|
console.error('Failed to create empty audio track:', error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function addEmptyMIDITrack() {
|
||||||
|
console.log('[addEmptyMIDITrack] Creating new MIDI track');
|
||||||
|
const trackName = `MIDI Track ${context.activeObject.audioTracks.filter(t => t.type === 'midi').length + 1}`;
|
||||||
|
const trackUuid = uuidv4();
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get available instruments
|
||||||
|
const instruments = await getAvailableInstruments();
|
||||||
|
|
||||||
|
// Default to SimpleSynth for now (we can add UI selection later)
|
||||||
|
const instrument = instruments.length > 0 ? instruments[0] : 'SimpleSynth';
|
||||||
|
|
||||||
|
// Create new AudioTrack with type='midi'
|
||||||
|
const newMIDITrack = new AudioTrack(trackUuid, trackName, 'midi');
|
||||||
|
newMIDITrack.instrument = instrument;
|
||||||
|
|
||||||
|
// Initialize track in backend (creates MIDI track with instrument)
|
||||||
|
await newMIDITrack.initializeTrack();
|
||||||
|
|
||||||
|
console.log('[addEmptyMIDITrack] After initializeTrack - instrument:', instrument);
|
||||||
|
|
||||||
|
// Add track to active object
|
||||||
|
context.activeObject.audioTracks.push(newMIDITrack);
|
||||||
|
|
||||||
|
// Select the newly created track
|
||||||
|
context.activeObject.activeLayer = newMIDITrack;
|
||||||
|
|
||||||
|
// Update UI
|
||||||
|
updateLayers();
|
||||||
|
if (context.timelineWidget) {
|
||||||
|
context.timelineWidget.requestRedraw();
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Empty MIDI track created:', trackName, 'with ID:', newMIDITrack.audioTrackId);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to create empty MIDI track:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MIDI Command Wrappers
|
||||||
|
async function getAvailableInstruments() {
|
||||||
|
try {
|
||||||
|
const instruments = await invoke('audio_get_available_instruments');
|
||||||
|
console.log('Available instruments:', instruments);
|
||||||
|
return instruments;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to get available instruments:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function createMIDITrack(name, instrument) {
|
||||||
|
try {
|
||||||
|
const trackId = await invoke('audio_create_track', { name, trackType: 'midi', instrument });
|
||||||
|
console.log('MIDI track created:', name, 'with instrument:', instrument, 'ID:', trackId);
|
||||||
|
return trackId;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to create MIDI track:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function createMIDIClip(trackId, startTime, duration) {
|
||||||
|
try {
|
||||||
|
const clipId = await invoke('audio_create_midi_clip', { trackId, startTime, duration });
|
||||||
|
console.log('MIDI clip created on track', trackId, 'with ID:', clipId);
|
||||||
|
return clipId;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to create MIDI clip:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function addMIDINote(trackId, clipId, timeOffset, note, velocity, duration) {
|
||||||
|
try {
|
||||||
|
await invoke('audio_add_midi_note', { trackId, clipId, timeOffset, note, velocity, duration });
|
||||||
|
console.log('MIDI note added:', note, 'at', timeOffset);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to add MIDI note:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadMIDIFile(trackId, path, startTime) {
|
||||||
|
try {
|
||||||
|
const duration = await invoke('audio_load_midi_file', { trackId, path, startTime });
|
||||||
|
console.log('MIDI file loaded:', path, 'duration:', duration);
|
||||||
|
return duration;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to load MIDI file:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async function testAudio() {
|
async function testAudio() {
|
||||||
console.log("Starting rust")
|
console.log("Starting rust")
|
||||||
await init();
|
await init();
|
||||||
|
|
|
||||||
|
|
@ -1011,14 +1011,15 @@ class Layer extends Widget {
|
||||||
}
|
}
|
||||||
|
|
||||||
class AudioTrack {
|
class AudioTrack {
|
||||||
constructor(uuid, name) {
|
constructor(uuid, name, type = 'audio') {
|
||||||
// ID and name
|
// ID and name
|
||||||
if (!uuid) {
|
if (!uuid) {
|
||||||
this.idx = uuidv4();
|
this.idx = uuidv4();
|
||||||
} else {
|
} else {
|
||||||
this.idx = uuid;
|
this.idx = uuid;
|
||||||
}
|
}
|
||||||
this.name = name || "Audio";
|
this.name = name || (type === 'midi' ? "MIDI" : "Audio");
|
||||||
|
this.type = type; // 'audio' or 'midi'
|
||||||
this.audible = true;
|
this.audible = true;
|
||||||
this.visible = true; // For consistency with Layer (audio tracks are always "visible" in timeline)
|
this.visible = true; // For consistency with Layer (audio tracks are always "visible" in timeline)
|
||||||
|
|
||||||
|
|
@ -1042,8 +1043,8 @@ class AudioTrack {
|
||||||
// Reference to DAW backend track
|
// Reference to DAW backend track
|
||||||
this.audioTrackId = null;
|
this.audioTrackId = null;
|
||||||
|
|
||||||
// Audio clips
|
// Audio clips (for audio tracks) or MIDI clips (for MIDI tracks)
|
||||||
this.clips = []; // { clipId, poolIndex, name, startTime, duration, offset }
|
this.clips = []; // { clipId, poolIndex, name, startTime, duration, offset } or MIDI clip data
|
||||||
|
|
||||||
// Timeline display settings (for track hierarchy)
|
// Timeline display settings (for track hierarchy)
|
||||||
this.collapsed = false
|
this.collapsed = false
|
||||||
|
|
@ -1093,14 +1094,21 @@ class AudioTrack {
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const trackId = await invoke('audio_create_track', {
|
const params = {
|
||||||
name: this.name,
|
name: this.name,
|
||||||
trackType: 'audio'
|
trackType: this.type
|
||||||
});
|
};
|
||||||
|
|
||||||
|
// Add instrument parameter for MIDI tracks
|
||||||
|
if (this.type === 'midi' && this.instrument) {
|
||||||
|
params.instrument = this.instrument;
|
||||||
|
}
|
||||||
|
|
||||||
|
const trackId = await invoke('audio_create_track', params);
|
||||||
this.audioTrackId = trackId;
|
this.audioTrackId = trackId;
|
||||||
console.log('Audio track created:', this.name, 'with ID:', trackId);
|
console.log(`${this.type === 'midi' ? 'MIDI' : 'Audio'} track created:`, this.name, 'with ID:', trackId);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Failed to create audio track:', error);
|
console.error(`Failed to create ${this.type} track:`, error);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -62,6 +62,38 @@ function createNewFileDialog(newFileCallback, openFileCallback, config) {
|
||||||
fpsInput.value = config.framerate;
|
fpsInput.value = config.framerate;
|
||||||
newFileDialog.appendChild(fpsInput);
|
newFileDialog.appendChild(fpsInput);
|
||||||
|
|
||||||
|
// Create Project Type selector
|
||||||
|
const projectTypeLabel = document.createElement('label');
|
||||||
|
projectTypeLabel.setAttribute('for', 'projectType');
|
||||||
|
projectTypeLabel.classList.add('dialog-label');
|
||||||
|
projectTypeLabel.textContent = 'Project Type:';
|
||||||
|
newFileDialog.appendChild(projectTypeLabel);
|
||||||
|
|
||||||
|
const projectTypeSelect = document.createElement('select');
|
||||||
|
projectTypeSelect.id = 'projectType';
|
||||||
|
projectTypeSelect.classList.add('dialog-input');
|
||||||
|
|
||||||
|
const projectTypes = [
|
||||||
|
{ value: 'animation', label: '🎬 Animation - Drawing tools and timeline' },
|
||||||
|
{ value: 'videoEditing', label: '🎥 Video - Clip timeline and effects' },
|
||||||
|
{ value: 'audioDaw', label: '🎵 Music - Audio tracks and mixer' },
|
||||||
|
{ value: 'scripting', label: '💻 Scripting - Code editor and console' },
|
||||||
|
{ value: 'drawingPainting', label: '🎨 Drawing - Minimal UI for sketching' },
|
||||||
|
{ value: 'threeD', label: '🧊 3D - Viewport and camera controls' }
|
||||||
|
];
|
||||||
|
|
||||||
|
projectTypes.forEach(type => {
|
||||||
|
const option = document.createElement('option');
|
||||||
|
option.value = type.value;
|
||||||
|
option.textContent = type.label;
|
||||||
|
if (type.value === config.defaultLayout) {
|
||||||
|
option.selected = true;
|
||||||
|
}
|
||||||
|
projectTypeSelect.appendChild(option);
|
||||||
|
});
|
||||||
|
|
||||||
|
newFileDialog.appendChild(projectTypeSelect);
|
||||||
|
|
||||||
// Create Create button
|
// Create Create button
|
||||||
const createButton = document.createElement('button');
|
const createButton = document.createElement('button');
|
||||||
createButton.textContent = 'Create';
|
createButton.textContent = 'Create';
|
||||||
|
|
@ -82,8 +114,9 @@ function createNewFileDialog(newFileCallback, openFileCallback, config) {
|
||||||
const width = parseInt(document.getElementById('width').value);
|
const width = parseInt(document.getElementById('width').value);
|
||||||
const height = parseInt(document.getElementById('height').value);
|
const height = parseInt(document.getElementById('height').value);
|
||||||
const fps = parseInt(document.getElementById('fps').value);
|
const fps = parseInt(document.getElementById('fps').value);
|
||||||
console.log(`New file created with width: ${width} and height: ${height}`);
|
const projectType = document.getElementById('projectType').value;
|
||||||
newFileCallback(width, height, fps)
|
console.log(`New file created with width: ${width}, height: ${height}, fps: ${fps}, layout: ${projectType}`);
|
||||||
|
newFileCallback(width, height, fps, projectType)
|
||||||
closeDialog();
|
closeDialog();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
11
src/state.js
11
src/state.js
|
|
@ -65,11 +65,14 @@ export let config = {
|
||||||
selectNone: "<mod>A",
|
selectNone: "<mod>A",
|
||||||
group: "<mod>g",
|
group: "<mod>g",
|
||||||
addLayer: "<mod>l",
|
addLayer: "<mod>l",
|
||||||
|
addAudioTrack: "<mod>t",
|
||||||
addKeyframe: "F6",
|
addKeyframe: "F6",
|
||||||
addBlankKeyframe: "F7",
|
addBlankKeyframe: "F7",
|
||||||
zoomIn: "<mod>+",
|
zoomIn: "<mod>+",
|
||||||
zoomOut: "<mod>-",
|
zoomOut: "<mod>-",
|
||||||
resetZoom: "<mod>0",
|
resetZoom: "<mod>0",
|
||||||
|
nextLayout: "<mod>Tab",
|
||||||
|
previousLayout: "<mod><shift>Tab",
|
||||||
},
|
},
|
||||||
fileWidth: 800,
|
fileWidth: 800,
|
||||||
fileHeight: 600,
|
fileHeight: 600,
|
||||||
|
|
@ -78,7 +81,13 @@ export let config = {
|
||||||
scrollSpeed: 1,
|
scrollSpeed: 1,
|
||||||
debug: false,
|
debug: false,
|
||||||
reopenLastSession: false,
|
reopenLastSession: false,
|
||||||
lastImportFilterIndex: 0 // Index of last used filter in import dialog (0=Image, 1=Audio, 2=Lightningbeam)
|
lastImportFilterIndex: 0, // Index of last used filter in import dialog (0=Image, 1=Audio, 2=Lightningbeam)
|
||||||
|
// Layout settings
|
||||||
|
currentLayout: "animation", // Current active layout key
|
||||||
|
defaultLayout: "animation", // Default layout for new files
|
||||||
|
showStartScreen: false, // Show layout picker on startup (disabled for now)
|
||||||
|
restoreLayoutFromFile: false, // Restore layout when opening files
|
||||||
|
customLayouts: [] // User-saved custom layouts
|
||||||
};
|
};
|
||||||
|
|
||||||
// Object pointer registry
|
// Object pointer registry
|
||||||
|
|
|
||||||
520
src/widgets.js
520
src/widgets.js
|
|
@ -1235,9 +1235,19 @@ class TimelineWindowV2 extends Widget {
|
||||||
const endX = this.timelineState.timeToPixel(clip.startTime + clip.duration)
|
const endX = this.timelineState.timeToPixel(clip.startTime + clip.duration)
|
||||||
const clipWidth = endX - startX
|
const clipWidth = endX - startX
|
||||||
|
|
||||||
// Draw clip rectangle with audio-specific color
|
// Determine clip color based on track type
|
||||||
// Use gray color for loading clips, blue for loaded clips
|
const isMIDI = audioTrack.type === 'midi'
|
||||||
ctx.fillStyle = clip.loading ? '#666666' : '#4a90e2'
|
let clipColor
|
||||||
|
if (clip.loading) {
|
||||||
|
clipColor = '#666666' // Gray for loading
|
||||||
|
} else if (isMIDI) {
|
||||||
|
clipColor = '#2d5016' // Dark green background for MIDI clips
|
||||||
|
} else {
|
||||||
|
clipColor = '#4a90e2' // Blue for audio clips
|
||||||
|
}
|
||||||
|
|
||||||
|
// Draw clip rectangle
|
||||||
|
ctx.fillStyle = clipColor
|
||||||
ctx.fillRect(
|
ctx.fillRect(
|
||||||
startX,
|
startX,
|
||||||
y + 5,
|
y + 5,
|
||||||
|
|
@ -1273,8 +1283,74 @@ class TimelineWindowV2 extends Widget {
|
||||||
ctx.restore()
|
ctx.restore()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Draw waveform only for loaded clips
|
// Draw MIDI clip visualization (piano roll bars) or audio waveform
|
||||||
if (!clip.loading && clip.waveform && clip.waveform.length > 0) {
|
if (!clip.loading) {
|
||||||
|
if (isMIDI && clip.notes && clip.notes.length > 0) {
|
||||||
|
// Draw piano roll notes for MIDI clips
|
||||||
|
// Divide track height by 12 to represent chromatic notes (C, C#, D, etc.)
|
||||||
|
// Leave 2px padding at top and bottom
|
||||||
|
const verticalPadding = 2
|
||||||
|
const availableHeight = trackHeight - 10 - (verticalPadding * 2)
|
||||||
|
const noteHeight = availableHeight / 12
|
||||||
|
|
||||||
|
// Calculate visible time range within the clip
|
||||||
|
const clipEndX = startX + clipWidth
|
||||||
|
const visibleStartTime = this.timelineState.pixelToTime(Math.max(startX, 0)) - clip.startTime
|
||||||
|
const visibleEndTime = this.timelineState.pixelToTime(Math.min(clipEndX, this.width)) - clip.startTime
|
||||||
|
|
||||||
|
// Binary search to find first visible note
|
||||||
|
let firstVisibleIdx = 0
|
||||||
|
let left = 0
|
||||||
|
let right = clip.notes.length - 1
|
||||||
|
while (left <= right) {
|
||||||
|
const mid = Math.floor((left + right) / 2)
|
||||||
|
const noteEndTime = clip.notes[mid].start_time + clip.notes[mid].duration
|
||||||
|
|
||||||
|
if (noteEndTime < visibleStartTime) {
|
||||||
|
left = mid + 1
|
||||||
|
firstVisibleIdx = left
|
||||||
|
} else {
|
||||||
|
right = mid - 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Draw visible notes only
|
||||||
|
ctx.fillStyle = '#6fdc6f' // Bright green for note bars
|
||||||
|
|
||||||
|
for (let i = firstVisibleIdx; i < clip.notes.length; i++) {
|
||||||
|
const note = clip.notes[i]
|
||||||
|
|
||||||
|
// Exit early if note starts after visible range
|
||||||
|
if (note.start_time > visibleEndTime) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate note position (pitch mod 12 for chromatic representation)
|
||||||
|
const pitchClass = note.note % 12
|
||||||
|
// Invert Y so higher pitches appear at top
|
||||||
|
const noteY = y + 5 + ((11 - pitchClass) * noteHeight)
|
||||||
|
|
||||||
|
// Calculate note timing on timeline
|
||||||
|
const noteStartX = this.timelineState.timeToPixel(clip.startTime + note.start_time)
|
||||||
|
const noteEndX = this.timelineState.timeToPixel(clip.startTime + note.start_time + note.duration)
|
||||||
|
|
||||||
|
// Clip to visible bounds
|
||||||
|
const visibleStartX = Math.max(noteStartX, startX + 2)
|
||||||
|
const visibleEndX = Math.min(noteEndX, startX + clipWidth - 2)
|
||||||
|
const visibleWidth = visibleEndX - visibleStartX
|
||||||
|
|
||||||
|
if (visibleWidth > 0) {
|
||||||
|
// Draw note rectangle
|
||||||
|
ctx.fillRect(
|
||||||
|
visibleStartX,
|
||||||
|
noteY,
|
||||||
|
visibleWidth,
|
||||||
|
noteHeight - 1 // Small gap between notes
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (!isMIDI && clip.waveform && clip.waveform.length > 0) {
|
||||||
|
// Draw waveform for audio clips
|
||||||
ctx.fillStyle = 'rgba(255, 255, 255, 0.3)'
|
ctx.fillStyle = 'rgba(255, 255, 255, 0.3)'
|
||||||
|
|
||||||
// Only draw waveform within visible area
|
// Only draw waveform within visible area
|
||||||
|
|
@ -1324,6 +1400,7 @@ class TimelineWindowV2 extends Widget {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
ctx.restore()
|
ctx.restore()
|
||||||
}
|
}
|
||||||
|
|
@ -3953,6 +4030,436 @@ class TimelineWindowV2 extends Widget {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* VirtualPiano - Interactive piano keyboard for MIDI input
|
||||||
|
* Displays a piano keyboard that users can click/play
|
||||||
|
* Can be connected to MIDI tracks in the DAW backend
|
||||||
|
*/
|
||||||
|
class VirtualPiano extends Widget {
|
||||||
|
constructor() {
|
||||||
|
super(0, 0);
|
||||||
|
|
||||||
|
// Piano configuration - width scales based on height
|
||||||
|
this.whiteKeyAspectRatio = 6.0; // White key height:width ratio (taller keys)
|
||||||
|
this.blackKeyWidthRatio = 0.6; // Black key width as ratio of white key width
|
||||||
|
this.blackKeyHeightRatio = 0.62; // Black key height as ratio of white key height
|
||||||
|
|
||||||
|
// State
|
||||||
|
this.pressedKeys = new Set(); // Currently pressed MIDI note numbers (user input)
|
||||||
|
this.playingNotes = new Set(); // Currently playing notes (from MIDI playback)
|
||||||
|
this.hoveredKey = null; // Currently hovered key
|
||||||
|
this.visibleStartNote = 48; // C3 - will be adjusted based on pane width
|
||||||
|
this.visibleEndNote = 72; // C5 - will be adjusted based on pane width
|
||||||
|
|
||||||
|
// MIDI note mapping (white keys in an octave: C, D, E, F, G, A, B)
|
||||||
|
this.whiteKeysInOctave = [0, 2, 4, 5, 7, 9, 11]; // Semitones from C
|
||||||
|
// Black keys indexed by white key position (after which white key the black key appears)
|
||||||
|
// Position 0 (after C), 1 (after D), null (no black after E), 3 (after F), 4 (after G), 5 (after A), null (no black after B)
|
||||||
|
this.blackKeysInOctave = [1, 3, null, 6, 8, 10, null]; // Actual semitone values
|
||||||
|
|
||||||
|
// Keyboard bindings matching piano layout
|
||||||
|
// Black keys: W E (one group) T Y U (other group)
|
||||||
|
// White keys: A S D F G H J K
|
||||||
|
this.keyboardMap = {
|
||||||
|
'a': 60, // C4
|
||||||
|
'w': 61, // C#4
|
||||||
|
's': 62, // D4
|
||||||
|
'e': 63, // D#4
|
||||||
|
'd': 64, // E4
|
||||||
|
'f': 65, // F4
|
||||||
|
't': 66, // F#4
|
||||||
|
'g': 67, // G4
|
||||||
|
'y': 68, // G#4
|
||||||
|
'h': 69, // A4
|
||||||
|
'u': 70, // A#4
|
||||||
|
'j': 71, // B4
|
||||||
|
'k': 72, // C5
|
||||||
|
};
|
||||||
|
|
||||||
|
// Reverse mapping for displaying keyboard keys on piano keys
|
||||||
|
this.noteToKeyMap = {};
|
||||||
|
for (const [key, note] of Object.entries(this.keyboardMap)) {
|
||||||
|
this.noteToKeyMap[note] = key.toUpperCase();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Setup keyboard event listeners
|
||||||
|
this.setupKeyboardListeners();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup keyboard event listeners for computer keyboard input
|
||||||
|
*/
|
||||||
|
setupKeyboardListeners() {
|
||||||
|
window.addEventListener('keydown', (e) => {
|
||||||
|
if (e.repeat) return; // Ignore key repeats
|
||||||
|
const midiNote = this.keyboardMap[e.key.toLowerCase()];
|
||||||
|
if (midiNote !== undefined) {
|
||||||
|
this.noteOn(midiNote, 100); // Default velocity 100
|
||||||
|
e.preventDefault();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
window.addEventListener('keyup', (e) => {
|
||||||
|
const midiNote = this.keyboardMap[e.key.toLowerCase()];
|
||||||
|
if (midiNote !== undefined) {
|
||||||
|
this.noteOff(midiNote);
|
||||||
|
e.preventDefault();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert MIDI note number to note info
|
||||||
|
*/
|
||||||
|
getMidiNoteInfo(midiNote) {
|
||||||
|
const octave = Math.floor(midiNote / 12) - 1;
|
||||||
|
const semitone = midiNote % 12;
|
||||||
|
const isBlack = [1, 3, 6, 8, 10].includes(semitone);
|
||||||
|
const noteNames = ['C', 'C#', 'D', 'D#', 'E', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B'];
|
||||||
|
return {
|
||||||
|
octave,
|
||||||
|
semitone,
|
||||||
|
isBlack,
|
||||||
|
name: noteNames[semitone] + octave
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate key position and dimensions for a given MIDI note
|
||||||
|
* @param {number} midiNote - MIDI note number
|
||||||
|
* @param {number} whiteKeyHeight - Height of white keys (full pane height)
|
||||||
|
* @param {number} whiteKeyWidth - Width of white keys (calculated from height)
|
||||||
|
* @param {number} offsetX - Horizontal offset for centering
|
||||||
|
*/
|
||||||
|
getKeyGeometry(midiNote, whiteKeyHeight, whiteKeyWidth, offsetX = 0) {
|
||||||
|
const info = this.getMidiNoteInfo(midiNote);
|
||||||
|
const blackKeyWidth = whiteKeyWidth * this.blackKeyWidthRatio;
|
||||||
|
const blackKeyHeight = whiteKeyHeight * this.blackKeyHeightRatio;
|
||||||
|
|
||||||
|
// Count how many white keys are between visibleStartNote and this note
|
||||||
|
let whiteKeysBefore = 0;
|
||||||
|
for (let n = this.visibleStartNote; n < midiNote; n++) {
|
||||||
|
const nInfo = this.getMidiNoteInfo(n);
|
||||||
|
if (!nInfo.isBlack) {
|
||||||
|
whiteKeysBefore++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (info.isBlack) {
|
||||||
|
// Black key positioning - place it between the white keys
|
||||||
|
// The black key goes after the white key at position whiteKeysBefore
|
||||||
|
const x = offsetX + whiteKeysBefore * whiteKeyWidth + whiteKeyWidth - blackKeyWidth / 2;
|
||||||
|
|
||||||
|
return {
|
||||||
|
x,
|
||||||
|
y: 0,
|
||||||
|
width: blackKeyWidth,
|
||||||
|
height: blackKeyHeight,
|
||||||
|
isBlack: true
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
// White key positioning - just use the count
|
||||||
|
const x = offsetX + whiteKeysBefore * whiteKeyWidth;
|
||||||
|
|
||||||
|
return {
|
||||||
|
x,
|
||||||
|
y: 0,
|
||||||
|
width: whiteKeyWidth,
|
||||||
|
height: whiteKeyHeight,
|
||||||
|
isBlack: false
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate visible range and offset based on pane width and height
|
||||||
|
*/
|
||||||
|
calculateVisibleRange(width, height) {
|
||||||
|
// Calculate white key width based on height to maintain aspect ratio
|
||||||
|
const whiteKeyWidth = height / this.whiteKeyAspectRatio;
|
||||||
|
|
||||||
|
// Calculate how many white keys can fit in the pane (ceiling to fill space)
|
||||||
|
const whiteKeysFit = Math.ceil(width / whiteKeyWidth);
|
||||||
|
|
||||||
|
// Keyboard-mapped range is C4 (60) to C5 (72)
|
||||||
|
// This contains 8 white keys: C, D, E, F, G, A, B, C
|
||||||
|
const keyboardCenter = 60; // C4
|
||||||
|
const keyboardWhiteKeys = 8;
|
||||||
|
|
||||||
|
if (whiteKeysFit <= keyboardWhiteKeys) {
|
||||||
|
// Not enough space to show all keyboard keys, just center what we have
|
||||||
|
this.visibleStartNote = 60; // C4
|
||||||
|
this.visibleEndNote = 72; // C5
|
||||||
|
const totalWhiteKeyWidth = keyboardWhiteKeys * whiteKeyWidth;
|
||||||
|
const offsetX = (width - totalWhiteKeyWidth) / 2;
|
||||||
|
return { offsetX, whiteKeyWidth };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate how many extra white keys we have space for
|
||||||
|
const extraWhiteKeys = whiteKeysFit - keyboardWhiteKeys;
|
||||||
|
const leftExtra = Math.floor(extraWhiteKeys / 2);
|
||||||
|
const rightExtra = extraWhiteKeys - leftExtra;
|
||||||
|
|
||||||
|
// Start from C4 and go back leftExtra white keys
|
||||||
|
let startNote = 60; // C4
|
||||||
|
let leftCount = 0;
|
||||||
|
while (leftCount < leftExtra && startNote > 0) {
|
||||||
|
startNote--;
|
||||||
|
const info = this.getMidiNoteInfo(startNote);
|
||||||
|
if (!info.isBlack) {
|
||||||
|
leftCount++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now count forward exactly whiteKeysFit white keys from startNote
|
||||||
|
let endNote = startNote - 1; // Start one before so the first increment includes startNote
|
||||||
|
let whiteKeyCount = 0;
|
||||||
|
|
||||||
|
while (whiteKeyCount < whiteKeysFit && endNote < 127) {
|
||||||
|
endNote++;
|
||||||
|
const info = this.getMidiNoteInfo(endNote);
|
||||||
|
if (!info.isBlack) {
|
||||||
|
whiteKeyCount++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.visibleStartNote = startNote;
|
||||||
|
this.visibleEndNote = endNote;
|
||||||
|
|
||||||
|
// No offset - keys start from left edge and fill to the right
|
||||||
|
return { offsetX: 0, whiteKeyWidth };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find which MIDI note is at the given x, y position
|
||||||
|
*/
|
||||||
|
findKeyAtPosition(x, y, height, whiteKeyWidth, offsetX) {
|
||||||
|
// Check black keys first (they're on top)
|
||||||
|
for (let note = this.visibleStartNote; note <= this.visibleEndNote; note++) {
|
||||||
|
const info = this.getMidiNoteInfo(note);
|
||||||
|
if (!info.isBlack) continue;
|
||||||
|
|
||||||
|
const geom = this.getKeyGeometry(note, height, whiteKeyWidth, offsetX);
|
||||||
|
if (x >= geom.x && x < geom.x + geom.width &&
|
||||||
|
y >= geom.y && y < geom.y + geom.height) {
|
||||||
|
return note;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Then check white keys
|
||||||
|
for (let note = this.visibleStartNote; note <= this.visibleEndNote; note++) {
|
||||||
|
const info = this.getMidiNoteInfo(note);
|
||||||
|
if (info.isBlack) continue;
|
||||||
|
|
||||||
|
const geom = this.getKeyGeometry(note, height, whiteKeyWidth, offsetX);
|
||||||
|
if (x >= geom.x && x < geom.x + geom.width &&
|
||||||
|
y >= geom.y && y < geom.y + geom.height) {
|
||||||
|
return note;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set which notes are currently playing (from MIDI playback)
|
||||||
|
*/
|
||||||
|
setPlayingNotes(notes) {
|
||||||
|
this.playingNotes = new Set(notes);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger a note on event
|
||||||
|
*/
|
||||||
|
noteOn(midiNote, velocity = 100) {
|
||||||
|
this.pressedKeys.add(midiNote);
|
||||||
|
|
||||||
|
console.log(`Note ON: ${this.getMidiNoteInfo(midiNote).name} (${midiNote}) velocity: ${velocity}`);
|
||||||
|
|
||||||
|
// Send to backend - use track ID 0 (first MIDI track)
|
||||||
|
// TODO: Make this configurable to select which track to send to
|
||||||
|
invoke('audio_send_midi_note_on', { trackId: 0, note: midiNote, velocity }).catch(error => {
|
||||||
|
console.error('Failed to send MIDI note on:', error);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Request redraw to show the pressed key
|
||||||
|
if (typeof context !== 'undefined' && context.pianoRedraw) {
|
||||||
|
context.pianoRedraw();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger a note off event
|
||||||
|
*/
|
||||||
|
noteOff(midiNote) {
|
||||||
|
this.pressedKeys.delete(midiNote);
|
||||||
|
|
||||||
|
console.log(`Note OFF: ${this.getMidiNoteInfo(midiNote).name} (${midiNote})`);
|
||||||
|
|
||||||
|
// Send to backend - use track ID 0 (first MIDI track)
|
||||||
|
invoke('audio_send_midi_note_off', { trackId: 0, note: midiNote }).catch(error => {
|
||||||
|
console.error('Failed to send MIDI note off:', error);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Request redraw to show the released key
|
||||||
|
if (typeof context !== 'undefined' && context.pianoRedraw) {
|
||||||
|
context.pianoRedraw();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
hitTest(x, y) {
|
||||||
|
// Will be calculated in draw() based on pane width/height
|
||||||
|
return true; // Accept all events, let findKeyAtPosition handle precision
|
||||||
|
}
|
||||||
|
|
||||||
|
mousedown(x, y, width, height) {
|
||||||
|
const { offsetX, whiteKeyWidth } = this.calculateVisibleRange(width, height);
|
||||||
|
const key = this.findKeyAtPosition(x, y, height, whiteKeyWidth, offsetX);
|
||||||
|
if (key !== null) {
|
||||||
|
this.noteOn(key, 100);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mousemove(x, y, width, height) {
|
||||||
|
const { offsetX, whiteKeyWidth } = this.calculateVisibleRange(width, height);
|
||||||
|
this.hoveredKey = this.findKeyAtPosition(x, y, height, whiteKeyWidth, offsetX);
|
||||||
|
}
|
||||||
|
|
||||||
|
mouseup(x, y, width, height) {
|
||||||
|
// Release all pressed keys on mouse up
|
||||||
|
for (const key of this.pressedKeys) {
|
||||||
|
this.noteOff(key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
draw(ctx, width, height) {
|
||||||
|
ctx.save();
|
||||||
|
|
||||||
|
// Background
|
||||||
|
ctx.fillStyle = backgroundColor;
|
||||||
|
ctx.fillRect(0, 0, width, height);
|
||||||
|
|
||||||
|
// Calculate visible range and offset
|
||||||
|
const { offsetX, whiteKeyWidth } = this.calculateVisibleRange(width, height);
|
||||||
|
|
||||||
|
// Draw white keys first
|
||||||
|
for (let note = this.visibleStartNote; note <= this.visibleEndNote; note++) {
|
||||||
|
const info = this.getMidiNoteInfo(note);
|
||||||
|
if (info.isBlack) continue;
|
||||||
|
|
||||||
|
const geom = this.getKeyGeometry(note, height, whiteKeyWidth, offsetX);
|
||||||
|
|
||||||
|
// Key color
|
||||||
|
const isPressed = this.pressedKeys.has(note);
|
||||||
|
const isPlaying = this.playingNotes.has(note);
|
||||||
|
const isHovered = this.hoveredKey === note;
|
||||||
|
|
||||||
|
if (isPressed) {
|
||||||
|
ctx.fillStyle = highlight; // User pressed key
|
||||||
|
} else if (isPlaying) {
|
||||||
|
ctx.fillStyle = '#c8e6c9'; // Light green for MIDI playback
|
||||||
|
} else if (isHovered) {
|
||||||
|
ctx.fillStyle = '#f0f0f0';
|
||||||
|
} else {
|
||||||
|
ctx.fillStyle = '#ffffff';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Draw white key with rounded corners at the bottom
|
||||||
|
const radius = 3;
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(geom.x, geom.y);
|
||||||
|
ctx.lineTo(geom.x + geom.width, geom.y);
|
||||||
|
ctx.lineTo(geom.x + geom.width, geom.y + geom.height - radius);
|
||||||
|
ctx.arcTo(geom.x + geom.width, geom.y + geom.height, geom.x + geom.width - radius, geom.y + geom.height, radius);
|
||||||
|
ctx.lineTo(geom.x + radius, geom.y + geom.height);
|
||||||
|
ctx.arcTo(geom.x, geom.y + geom.height, geom.x, geom.y + geom.height - radius, radius);
|
||||||
|
ctx.lineTo(geom.x, geom.y);
|
||||||
|
ctx.closePath();
|
||||||
|
ctx.fill();
|
||||||
|
|
||||||
|
// Key border
|
||||||
|
ctx.strokeStyle = shadow;
|
||||||
|
ctx.lineWidth = 1;
|
||||||
|
ctx.stroke();
|
||||||
|
|
||||||
|
// Keyboard mapping label (if exists)
|
||||||
|
const keyLabel = this.noteToKeyMap[note];
|
||||||
|
if (keyLabel) {
|
||||||
|
ctx.fillStyle = isPressed ? '#000000' : '#333333';
|
||||||
|
ctx.font = 'bold 16px sans-serif';
|
||||||
|
ctx.textAlign = 'center';
|
||||||
|
ctx.textBaseline = 'middle';
|
||||||
|
ctx.fillText(keyLabel, geom.x + geom.width / 2, geom.y + geom.height - 30);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note name at bottom of white keys
|
||||||
|
if (info.semitone === 0) { // Only show octave number on C notes
|
||||||
|
ctx.fillStyle = labelColor;
|
||||||
|
ctx.font = '10px sans-serif';
|
||||||
|
ctx.textAlign = 'center';
|
||||||
|
ctx.textBaseline = 'bottom';
|
||||||
|
ctx.fillText(info.name, geom.x + geom.width / 2, geom.y + geom.height - 5);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Draw black keys on top
|
||||||
|
for (let note = this.visibleStartNote; note <= this.visibleEndNote; note++) {
|
||||||
|
const info = this.getMidiNoteInfo(note);
|
||||||
|
if (!info.isBlack) continue;
|
||||||
|
|
||||||
|
const geom = this.getKeyGeometry(note, height, whiteKeyWidth, offsetX);
|
||||||
|
|
||||||
|
// Key color
|
||||||
|
const isPressed = this.pressedKeys.has(note);
|
||||||
|
const isPlaying = this.playingNotes.has(note);
|
||||||
|
const isHovered = this.hoveredKey === note;
|
||||||
|
|
||||||
|
if (isPressed) {
|
||||||
|
ctx.fillStyle = '#4a4a4a'; // User pressed black key
|
||||||
|
} else if (isPlaying) {
|
||||||
|
ctx.fillStyle = '#66bb6a'; // Darker green for MIDI playback on black keys
|
||||||
|
} else if (isHovered) {
|
||||||
|
ctx.fillStyle = '#2a2a2a';
|
||||||
|
} else {
|
||||||
|
ctx.fillStyle = '#000000';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Draw black key with rounded corners at the bottom
|
||||||
|
const blackRadius = 2;
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(geom.x, geom.y);
|
||||||
|
ctx.lineTo(geom.x + geom.width, geom.y);
|
||||||
|
ctx.lineTo(geom.x + geom.width, geom.y + geom.height - blackRadius);
|
||||||
|
ctx.arcTo(geom.x + geom.width, geom.y + geom.height, geom.x + geom.width - blackRadius, geom.y + geom.height, blackRadius);
|
||||||
|
ctx.lineTo(geom.x + blackRadius, geom.y + geom.height);
|
||||||
|
ctx.arcTo(geom.x, geom.y + geom.height, geom.x, geom.y + geom.height - blackRadius, blackRadius);
|
||||||
|
ctx.lineTo(geom.x, geom.y);
|
||||||
|
ctx.closePath();
|
||||||
|
ctx.fill();
|
||||||
|
|
||||||
|
// Highlight on top edge
|
||||||
|
ctx.strokeStyle = 'rgba(255, 255, 255, 0.1)';
|
||||||
|
ctx.lineWidth = 1;
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(geom.x, geom.y);
|
||||||
|
ctx.lineTo(geom.x + geom.width, geom.y);
|
||||||
|
ctx.stroke();
|
||||||
|
|
||||||
|
// Keyboard mapping label (if exists)
|
||||||
|
const keyLabel = this.noteToKeyMap[note];
|
||||||
|
if (keyLabel) {
|
||||||
|
ctx.fillStyle = isPressed ? '#ffffff' : 'rgba(255, 255, 255, 0.7)';
|
||||||
|
ctx.font = 'bold 14px sans-serif';
|
||||||
|
ctx.textAlign = 'center';
|
||||||
|
ctx.textBaseline = 'middle';
|
||||||
|
ctx.fillText(keyLabel, geom.x + geom.width / 2, geom.y + geom.height - 20);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.restore();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export {
|
export {
|
||||||
SCROLL,
|
SCROLL,
|
||||||
Widget,
|
Widget,
|
||||||
|
|
@ -3965,5 +4472,6 @@ export {
|
||||||
ScrollableWindow,
|
ScrollableWindow,
|
||||||
ScrollableWindowHeaders,
|
ScrollableWindowHeaders,
|
||||||
TimelineWindow,
|
TimelineWindow,
|
||||||
TimelineWindowV2
|
TimelineWindowV2,
|
||||||
|
VirtualPiano
|
||||||
};
|
};
|
||||||
Loading…
Reference in New Issue