Record audio tracks

This commit is contained in:
Skyler Lehmkuhl 2025-10-23 01:08:45 -04:00
parent 48ec738027
commit 20c3b820a3
26 changed files with 12673 additions and 5192 deletions

View File

@ -41,6 +41,7 @@ pub struct Engine {
// Recording state // Recording state
recording_state: Option<RecordingState>, recording_state: Option<RecordingState>,
input_rx: Option<rtrb::Consumer<f32>>, input_rx: Option<rtrb::Consumer<f32>>,
recording_progress_counter: usize,
} }
impl Engine { impl Engine {
@ -74,6 +75,7 @@ impl Engine {
next_clip_id: 0, next_clip_id: 0,
recording_state: None, recording_state: None,
input_rx: None, input_rx: None,
recording_progress_counter: 0,
} }
} }
@ -217,9 +219,8 @@ impl Engine {
// Add samples to recording // Add samples to recording
if !samples.is_empty() { if !samples.is_empty() {
match recording.add_samples(&samples) { match recording.add_samples(&samples) {
Ok(flushed) => { Ok(_flushed) => {
if flushed { // Update clip duration every callback for sample-accurate timing
// A flush occurred, update clip duration and send progress event
let duration = recording.duration(); let duration = recording.duration();
let clip_id = recording.clip_id; let clip_id = recording.clip_id;
let track_id = recording.track_id; let track_id = recording.track_id;
@ -231,8 +232,11 @@ impl Engine {
} }
} }
// Send progress event // Send progress event periodically (every ~0.1 seconds)
self.recording_progress_counter += samples.len();
if self.recording_progress_counter >= (self.sample_rate as usize / 10) {
let _ = self.event_tx.push(AudioEvent::RecordingProgress(clip_id, duration)); let _ = self.event_tx.push(AudioEvent::RecordingProgress(clip_id, duration));
self.recording_progress_counter = 0;
} }
} }
Err(e) => { Err(e) => {
@ -708,7 +712,7 @@ impl Engine {
} }
// Create recording state // Create recording state
let flush_interval_seconds = 5.0; // Flush every 5 seconds let flush_interval_seconds = 1.0; // Flush every 1 second (safer than 5 seconds)
let recording_state = RecordingState::new( let recording_state = RecordingState::new(
track_id, track_id,
clip_id, clip_id,
@ -720,7 +724,23 @@ impl Engine {
flush_interval_seconds, flush_interval_seconds,
); );
// Check how many samples are currently in the input buffer and mark them for skipping
let samples_in_buffer = if let Some(input_rx) = &self.input_rx {
input_rx.slots() // Number of samples currently in the buffer
} else {
0
};
self.recording_state = Some(recording_state); self.recording_state = Some(recording_state);
self.recording_progress_counter = 0; // Reset progress counter
// Set the number of samples to skip on the recording state
if let Some(recording) = &mut self.recording_state {
recording.samples_to_skip = samples_in_buffer;
if samples_in_buffer > 0 {
eprintln!("Will skip {} stale samples from input buffer", samples_in_buffer);
}
}
// Notify UI that recording has started // Notify UI that recording has started
let _ = self.event_tx.push(AudioEvent::RecordingStarted(track_id, clip_id)); let _ = self.event_tx.push(AudioEvent::RecordingStarted(track_id, clip_id));
@ -747,11 +767,19 @@ impl Engine {
let track_id = recording.track_id; let track_id = recording.track_id;
// Finalize the recording and get temp file path // Finalize the recording and get temp file path
let frames_recorded = recording.frames_written;
match recording.finalize() { match recording.finalize() {
Ok(temp_file_path) => { Ok(temp_file_path) => {
eprintln!("Recording finalized: {} frames written to {:?}", frames_recorded, temp_file_path);
// Load the recorded audio file // Load the recorded audio file
match crate::io::AudioFile::load(&temp_file_path) { match crate::io::AudioFile::load(&temp_file_path) {
Ok(audio_file) => { Ok(audio_file) => {
// Generate waveform for UI
let duration = audio_file.duration();
let target_peaks = ((duration * 300.0) as usize).clamp(1000, 20000);
let waveform = audio_file.generate_waveform_overview(target_peaks);
// Add to pool // Add to pool
let pool_file = crate::audio::pool::AudioFile::new( let pool_file = crate::audio::pool::AudioFile::new(
temp_file_path.clone(), temp_file_path.clone(),
@ -772,8 +800,8 @@ impl Engine {
// Delete temp file // Delete temp file
let _ = std::fs::remove_file(&temp_file_path); let _ = std::fs::remove_file(&temp_file_path);
// Notify UI that recording has stopped // Notify UI that recording has stopped (with waveform)
let _ = self.event_tx.push(AudioEvent::RecordingStopped(clip_id, pool_index)); let _ = self.event_tx.push(AudioEvent::RecordingStopped(clip_id, pool_index, waveform));
} }
Err(e) => { Err(e) => {
// Send error event // Send error event

View File

@ -27,6 +27,8 @@ pub struct RecordingState {
pub flush_interval_frames: usize, pub flush_interval_frames: usize,
/// Whether recording is currently paused /// Whether recording is currently paused
pub paused: bool, pub paused: bool,
/// Number of samples remaining to skip (to discard stale buffer data)
pub samples_to_skip: usize,
} }
impl RecordingState { impl RecordingState {
@ -55,6 +57,7 @@ impl RecordingState {
buffer: Vec::new(), buffer: Vec::new(),
flush_interval_frames, flush_interval_frames,
paused: false, paused: false,
samples_to_skip: 0, // Will be set by engine when it knows buffer size
} }
} }
@ -65,7 +68,21 @@ impl RecordingState {
return Ok(false); return Ok(false);
} }
// Skip stale samples from the buffer
if self.samples_to_skip > 0 {
let to_skip = self.samples_to_skip.min(samples.len());
self.samples_to_skip -= to_skip;
if to_skip == samples.len() {
// Skip entire batch
return Ok(false);
}
// Skip partial batch and process the rest
self.buffer.extend_from_slice(&samples[to_skip..]);
} else {
self.buffer.extend_from_slice(samples); self.buffer.extend_from_slice(samples);
}
// Check if we should flush // Check if we should flush
let frames_in_buffer = self.buffer.len() / self.channels as usize; let frames_in_buffer = self.buffer.len() / self.channels as usize;
@ -97,8 +114,11 @@ impl RecordingState {
} }
/// Get current recording duration in seconds /// Get current recording duration in seconds
/// Includes both flushed frames and buffered frames
pub fn duration(&self) -> f64 { pub fn duration(&self) -> f64 {
self.frames_written as f64 / self.sample_rate as f64 let buffered_frames = self.buffer.len() / self.channels as usize;
let total_frames = self.frames_written + buffered_frames;
total_frames as f64 / self.sample_rate as f64
} }
/// Finalize the recording and return the temp file path /// Finalize the recording and return the temp file path

View File

@ -3,6 +3,7 @@ use crate::audio::{
TrackId, TrackId,
}; };
use crate::audio::buffer_pool::BufferPoolStats; use crate::audio::buffer_pool::BufferPoolStats;
use crate::io::WaveformPeak;
/// Commands sent from UI/control thread to audio thread /// Commands sent from UI/control thread to audio thread
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -132,8 +133,8 @@ pub enum AudioEvent {
RecordingStarted(TrackId, ClipId), RecordingStarted(TrackId, ClipId),
/// Recording progress update (clip_id, current_duration) /// Recording progress update (clip_id, current_duration)
RecordingProgress(ClipId, f64), RecordingProgress(ClipId, f64),
/// Recording stopped (clip_id, pool_index) /// Recording stopped (clip_id, pool_index, waveform)
RecordingStopped(ClipId, usize), RecordingStopped(ClipId, usize, Vec<WaveformPeak>),
/// Recording error (error_message) /// Recording error (error_message)
RecordingError(String), RecordingError(String),
/// Project has been reset /// Project has been reset

View File

@ -64,17 +64,26 @@ impl WavWriter {
// Calculate total data size // Calculate total data size
let data_size = self.frames_written * self.channels as usize * 2; // 2 bytes per sample (16-bit) let data_size = self.frames_written * self.channels as usize * 2; // 2 bytes per sample (16-bit)
let file_size = 36 + data_size; // 36 = size of header before data
// WAV file structure:
// RIFF header (12 bytes): "RIFF" + size + "WAVE"
// fmt chunk (24 bytes): "fmt " + size + format data
// data chunk header (8 bytes): "data" + size
// Total header = 44 bytes
// RIFF chunk size = everything after offset 8 = 4 (WAVE) + 24 (fmt) + 8 (data header) + data_size
let riff_chunk_size = 36 + data_size; // 36 = size from "WAVE" to end of data chunk header
// Seek to RIFF chunk size (offset 4) // Seek to RIFF chunk size (offset 4)
self.file.seek(SeekFrom::Start(4))?; self.file.seek(SeekFrom::Start(4))?;
self.file.write_all(&((file_size - 8) as u32).to_le_bytes())?; self.file.write_all(&(riff_chunk_size as u32).to_le_bytes())?;
// Seek to data chunk size (offset 40) // Seek to data chunk size (offset 40)
self.file.seek(SeekFrom::Start(40))?; self.file.seek(SeekFrom::Start(40))?;
self.file.write_all(&(data_size as u32).to_le_bytes())?; self.file.write_all(&(data_size as u32).to_le_bytes())?;
// Flush and sync to ensure all data is written to disk before file is closed
self.file.flush()?; self.file.flush()?;
self.file.sync_all()?;
Ok(()) Ok(())
} }
@ -84,11 +93,14 @@ impl WavWriter {
fn write_wav_header(file: &mut File, sample_rate: u32, channels: u32, frames: usize) -> io::Result<()> { fn write_wav_header(file: &mut File, sample_rate: u32, channels: u32, frames: usize) -> io::Result<()> {
let bytes_per_sample = 2u16; // 16-bit PCM let bytes_per_sample = 2u16; // 16-bit PCM
let data_size = (frames * channels as usize * bytes_per_sample as usize) as u32; let data_size = (frames * channels as usize * bytes_per_sample as usize) as u32;
let file_size = 36 + data_size;
// RIFF chunk size = everything after offset 8
// = 4 (WAVE) + 24 (fmt chunk) + 8 (data chunk header) + data_size
let riff_chunk_size = 36 + data_size;
// RIFF header // RIFF header
file.write_all(b"RIFF")?; file.write_all(b"RIFF")?;
file.write_all(&(file_size - 8).to_le_bytes())?; file.write_all(&riff_chunk_size.to_le_bytes())?;
file.write_all(b"WAVE")?; file.write_all(b"WAVE")?;
// fmt chunk // fmt chunk

View File

@ -32,48 +32,115 @@ pub struct AudioSystem {
} }
impl AudioSystem { impl AudioSystem {
/// Initialize the audio system with default device /// Initialize the audio system with default input and output devices
pub fn new() -> Result<Self, String> { pub fn new() -> Result<Self, String> {
let host = cpal::default_host(); let host = cpal::default_host();
let device = host
// Get output device
let output_device = host
.default_output_device() .default_output_device()
.ok_or("No output device available")?; .ok_or("No output device available")?;
let default_config = device.default_output_config().map_err(|e| e.to_string())?; let default_output_config = output_device.default_output_config().map_err(|e| e.to_string())?;
let sample_rate = default_config.sample_rate().0; let sample_rate = default_output_config.sample_rate().0;
let channels = default_config.channels() as u32; let channels = default_output_config.channels() as u32;
// Create queues // Create queues
let (command_tx, command_rx) = rtrb::RingBuffer::new(256); let (command_tx, command_rx) = rtrb::RingBuffer::new(256);
let (event_tx, event_rx) = rtrb::RingBuffer::new(256); let (event_tx, event_rx) = rtrb::RingBuffer::new(256);
// Create input ringbuffer for recording (large buffer for audio samples)
// Buffer size: 10 seconds of audio at 48kHz stereo = 48000 * 2 * 10 = 960000 samples
let input_buffer_size = (sample_rate * channels * 10) as usize;
let (mut input_tx, input_rx) = rtrb::RingBuffer::new(input_buffer_size);
// Create engine // Create engine
let mut engine = Engine::new(sample_rate, channels, command_rx, event_tx); let mut engine = Engine::new(sample_rate, channels, command_rx, event_tx);
engine.set_input_rx(input_rx);
let controller = engine.get_controller(command_tx); let controller = engine.get_controller(command_tx);
// Build stream // Build output stream
let config: cpal::StreamConfig = default_config.clone().into(); let output_config: cpal::StreamConfig = default_output_config.clone().into();
let mut buffer = vec![0.0f32; 16384]; let mut output_buffer = vec![0.0f32; 16384];
let stream = device let output_stream = output_device
.build_output_stream( .build_output_stream(
&config, &output_config,
move |data: &mut [f32], _: &cpal::OutputCallbackInfo| { move |data: &mut [f32], _: &cpal::OutputCallbackInfo| {
let buf = &mut buffer[..data.len()]; let buf = &mut output_buffer[..data.len()];
buf.fill(0.0); buf.fill(0.0);
engine.process(buf); engine.process(buf);
data.copy_from_slice(buf); data.copy_from_slice(buf);
}, },
|err| eprintln!("Stream error: {}", err), |err| eprintln!("Output stream error: {}", err),
None, None,
) )
.map_err(|e| e.to_string())?; .map_err(|e| e.to_string())?;
stream.play().map_err(|e| e.to_string())?; // Get input device
let input_device = match host.default_input_device() {
Some(device) => device,
None => {
eprintln!("Warning: No input device available, recording will be disabled");
// Start output stream and return without input
output_stream.play().map_err(|e| e.to_string())?;
return Ok(Self {
controller,
stream: output_stream,
event_rx,
sample_rate,
channels,
});
}
};
// Get input config matching output sample rate and channels if possible
let input_config = match input_device.default_input_config() {
Ok(config) => {
let mut cfg: cpal::StreamConfig = config.into();
// Try to match output sample rate and channels
cfg.sample_rate = cpal::SampleRate(sample_rate);
cfg.channels = channels as u16;
cfg
}
Err(e) => {
eprintln!("Warning: Could not get input config: {}, recording will be disabled", e);
output_stream.play().map_err(|e| e.to_string())?;
return Ok(Self {
controller,
stream: output_stream,
event_rx,
sample_rate,
channels,
});
}
};
// Build input stream that feeds into the ringbuffer
let input_stream = input_device
.build_input_stream(
&input_config,
move |data: &[f32], _: &cpal::InputCallbackInfo| {
// Push input samples to ringbuffer for recording
for &sample in data {
let _ = input_tx.push(sample);
}
},
|err| eprintln!("Input stream error: {}", err),
None,
)
.map_err(|e| e.to_string())?;
// Start both streams
output_stream.play().map_err(|e| e.to_string())?;
input_stream.play().map_err(|e| e.to_string())?;
// Leak the input stream to keep it alive
Box::leak(Box::new(input_stream));
Ok(Self { Ok(Self {
controller, controller,
stream, stream: output_stream,
event_rx, event_rx,
sample_rate, sample_rate,
channels, channels,

View File

@ -254,7 +254,7 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
print!("Recording clip {}: {:.2}s", clip_id, duration); print!("Recording clip {}: {:.2}s", clip_id, duration);
io::stdout().flush().ok(); io::stdout().flush().ok();
} }
AudioEvent::RecordingStopped(clip_id, pool_index) => { AudioEvent::RecordingStopped(clip_id, pool_index, _waveform) => {
print!("\r\x1b[K"); print!("\r\x1b[K");
println!("Recording stopped (clip {}, pool index {})", clip_id, pool_index); println!("Recording stopped (clip {}, pool index {})", clip_id, pool_index);
print!("> "); print!("> ");

View File

@ -4,10 +4,18 @@
"version": "0.1.0", "version": "0.1.0",
"type": "module", "type": "module",
"scripts": { "scripts": {
"tauri": "tauri" "tauri": "tauri",
"test": "wdio run wdio.conf.js",
"test:watch": "wdio run wdio.conf.js --watch"
}, },
"devDependencies": { "devDependencies": {
"@tauri-apps/cli": "^2" "@tauri-apps/cli": "^2",
"@wdio/cli": "^9.20.0",
"@wdio/globals": "^9.17.0",
"@wdio/local-runner": "8",
"@wdio/mocha-framework": "^9.20.0",
"@wdio/spec-reporter": "^9.20.0",
"webdriverio": "^9.20.0"
}, },
"dependencies": { "dependencies": {
"@ffmpeg/ffmpeg": "^0.12.10", "@ffmpeg/ffmpeg": "^0.12.10",

File diff suppressed because it is too large Load Diff

551
src-tauri/Cargo.lock generated
View File

@ -1,6 +1,6 @@
# This file is automatically @generated by Cargo. # This file is automatically @generated by Cargo.
# It is not intended for manual editing. # It is not intended for manual editing.
version = 3 version = 4
[[package]] [[package]]
name = "addr2line" name = "addr2line"
@ -52,6 +52,28 @@ dependencies = [
"alloc-no-stdlib", "alloc-no-stdlib",
] ]
[[package]]
name = "alsa"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed7572b7ba83a31e20d1b48970ee402d2e3e0537dcfe0a3ff4d6eb7508617d43"
dependencies = [
"alsa-sys",
"bitflags 2.8.0",
"cfg-if",
"libc",
]
[[package]]
name = "alsa-sys"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db8fee663d06c4e303404ef5f40488a53e062f89ba8bfed81f42325aafad1527"
dependencies = [
"libc",
"pkg-config",
]
[[package]] [[package]]
name = "android-tzdata" name = "android-tzdata"
version = "0.1.1" version = "0.1.1"
@ -207,6 +229,24 @@ version = "0.22.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
[[package]]
name = "bindgen"
version = "0.72.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "993776b509cfb49c750f11b8f07a46fa23e0a1386ffc01fb1e7d343efc387895"
dependencies = [
"bitflags 2.8.0",
"cexpr",
"clang-sys",
"itertools",
"proc-macro2",
"quote",
"regex",
"rustc-hash",
"shlex",
"syn 2.0.96",
]
[[package]] [[package]]
name = "bitflags" name = "bitflags"
version = "1.3.2" version = "1.3.2"
@ -435,6 +475,8 @@ version = "1.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13208fcbb66eaeffe09b99fffbe1af420f00a7b35aa99ad683dfc1aa76145229" checksum = "13208fcbb66eaeffe09b99fffbe1af420f00a7b35aa99ad683dfc1aa76145229"
dependencies = [ dependencies = [
"jobserver",
"libc",
"shlex", "shlex",
] ]
@ -444,6 +486,15 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c"
[[package]]
name = "cexpr"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766"
dependencies = [
"nom",
]
[[package]] [[package]]
name = "cfb" name = "cfb"
version = "0.7.3" version = "0.7.3"
@ -492,6 +543,17 @@ dependencies = [
"windows-targets 0.52.6", "windows-targets 0.52.6",
] ]
[[package]]
name = "clang-sys"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4"
dependencies = [
"glob",
"libc",
"libloading 0.8.6",
]
[[package]] [[package]]
name = "cocoa" name = "cocoa"
version = "0.26.0" version = "0.26.0"
@ -597,6 +659,49 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "coreaudio-rs"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "321077172d79c662f64f5071a03120748d5bb652f5231570141be24cfcd2bace"
dependencies = [
"bitflags 1.3.2",
"core-foundation-sys",
"coreaudio-sys",
]
[[package]]
name = "coreaudio-sys"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ceec7a6067e62d6f931a2baf6f3a751f4a892595bcec1461a3c94ef9949864b6"
dependencies = [
"bindgen",
]
[[package]]
name = "cpal"
version = "0.15.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "873dab07c8f743075e57f524c583985fbaf745602acbe916a01539364369a779"
dependencies = [
"alsa",
"core-foundation-sys",
"coreaudio-rs",
"dasp_sample",
"jni",
"js-sys",
"libc",
"mach2",
"ndk 0.8.0",
"ndk-context",
"oboe",
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
"windows 0.54.0",
]
[[package]] [[package]]
name = "cpufeatures" name = "cpufeatures"
version = "0.2.16" version = "0.2.16"
@ -624,6 +729,25 @@ dependencies = [
"crossbeam-utils", "crossbeam-utils",
] ]
[[package]]
name = "crossbeam-deque"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51"
dependencies = [
"crossbeam-epoch",
"crossbeam-utils",
]
[[package]]
name = "crossbeam-epoch"
version = "0.9.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e"
dependencies = [
"crossbeam-utils",
]
[[package]] [[package]]
name = "crossbeam-utils" name = "crossbeam-utils"
version = "0.8.21" version = "0.8.21"
@ -712,6 +836,23 @@ dependencies = [
"syn 2.0.96", "syn 2.0.96",
] ]
[[package]]
name = "dasp_sample"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c87e182de0887fd5361989c677c4e8f5000cd9491d6d563161a8f3a5519fc7f"
[[package]]
name = "daw-backend"
version = "0.1.0"
dependencies = [
"cpal",
"midly",
"rtrb",
"serde",
"symphonia",
]
[[package]] [[package]]
name = "deranged" name = "deranged"
version = "0.3.11" version = "0.3.11"
@ -857,6 +998,12 @@ version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125"
[[package]]
name = "either"
version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
[[package]] [[package]]
name = "embed-resource" name = "embed-resource"
version = "2.5.1" version = "2.5.1"
@ -970,6 +1117,12 @@ dependencies = [
"pin-project-lite", "pin-project-lite",
] ]
[[package]]
name = "extended"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af9673d8203fcb076b19dfd17e38b3d4ae9f44959416ea532ce72415a6020365"
[[package]] [[package]]
name = "fastrand" name = "fastrand"
version = "2.3.0" version = "2.3.0"
@ -1803,6 +1956,15 @@ dependencies = [
"once_cell", "once_cell",
] ]
[[package]]
name = "itertools"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186"
dependencies = [
"either",
]
[[package]] [[package]]
name = "itoa" name = "itoa"
version = "0.4.8" version = "0.4.8"
@ -1860,6 +2022,15 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130"
[[package]]
name = "jobserver"
version = "0.1.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0"
dependencies = [
"libc",
]
[[package]] [[package]]
name = "js-sys" name = "js-sys"
version = "0.3.77" version = "0.3.77"
@ -1987,7 +2158,10 @@ name = "lightningbeam"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"chrono", "chrono",
"cpal",
"daw-backend",
"log", "log",
"rtrb",
"serde", "serde",
"serde_json", "serde_json",
"tauri", "tauri",
@ -2037,6 +2211,15 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4"
[[package]]
name = "mach2"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d640282b302c0bb0a2a8e0233ead9035e3bed871f0b7e81fe4a1ec829765db44"
dependencies = [
"libc",
]
[[package]] [[package]]
name = "malloc_buf" name = "malloc_buf"
version = "0.0.6" version = "0.0.6"
@ -2090,12 +2273,27 @@ dependencies = [
"autocfg", "autocfg",
] ]
[[package]]
name = "midly"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "207d755f4cb882d20c4da58d707ca9130a0c9bc5061f657a4f299b8e36362b7a"
dependencies = [
"rayon",
]
[[package]] [[package]]
name = "mime" name = "mime"
version = "0.3.17" version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]]
name = "minimal-lexical"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]] [[package]]
name = "miniz_oxide" name = "miniz_oxide"
version = "0.8.3" version = "0.8.3"
@ -2137,6 +2335,20 @@ dependencies = [
"windows-sys 0.59.0", "windows-sys 0.59.0",
] ]
[[package]]
name = "ndk"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2076a31b7010b17a38c01907c45b945e8f11495ee4dd588309718901b1f7a5b7"
dependencies = [
"bitflags 2.8.0",
"jni-sys",
"log",
"ndk-sys 0.5.0+25.2.9519653",
"num_enum",
"thiserror 1.0.69",
]
[[package]] [[package]]
name = "ndk" name = "ndk"
version = "0.9.0" version = "0.9.0"
@ -2146,7 +2358,7 @@ dependencies = [
"bitflags 2.8.0", "bitflags 2.8.0",
"jni-sys", "jni-sys",
"log", "log",
"ndk-sys", "ndk-sys 0.6.0+11769913",
"num_enum", "num_enum",
"raw-window-handle", "raw-window-handle",
"thiserror 1.0.69", "thiserror 1.0.69",
@ -2158,6 +2370,15 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "27b02d87554356db9e9a873add8782d4ea6e3e58ea071a9adb9a2e8ddb884a8b" checksum = "27b02d87554356db9e9a873add8782d4ea6e3e58ea071a9adb9a2e8ddb884a8b"
[[package]]
name = "ndk-sys"
version = "0.5.0+25.2.9519653"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c196769dd60fd4f363e11d948139556a344e79d451aeb2fa2fd040738ef7691"
dependencies = [
"jni-sys",
]
[[package]] [[package]]
name = "ndk-sys" name = "ndk-sys"
version = "0.6.0+11769913" version = "0.6.0+11769913"
@ -2192,6 +2413,16 @@ version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb"
[[package]]
name = "nom"
version = "7.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
dependencies = [
"memchr",
"minimal-lexical",
]
[[package]] [[package]]
name = "nu-ansi-term" name = "nu-ansi-term"
version = "0.46.0" version = "0.46.0"
@ -2208,6 +2439,17 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
[[package]]
name = "num-derive"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
]
[[package]] [[package]]
name = "num-traits" name = "num-traits"
version = "0.2.19" version = "0.2.19"
@ -2484,6 +2726,29 @@ dependencies = [
"memchr", "memchr",
] ]
[[package]]
name = "oboe"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8b61bebd49e5d43f5f8cc7ee2891c16e0f41ec7954d36bcb6c14c5e0de867fb"
dependencies = [
"jni",
"ndk 0.8.0",
"ndk-context",
"num-derive",
"num-traits",
"oboe-sys",
]
[[package]]
name = "oboe-sys"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c8bb09a4a2b1d668170cfe0a7d5bc103f8999fb316c98099b6a9939c9f2e79d"
dependencies = [
"cc",
]
[[package]] [[package]]
name = "once_cell" name = "once_cell"
version = "1.20.2" version = "1.20.2"
@ -3006,6 +3271,26 @@ version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20675572f6f24e9e76ef639bc5552774ed45f1c30e2951e1e99c59888861c539" checksum = "20675572f6f24e9e76ef639bc5552774ed45f1c30e2951e1e99c59888861c539"
[[package]]
name = "rayon"
version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f"
dependencies = [
"either",
"rayon-core",
]
[[package]]
name = "rayon-core"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91"
dependencies = [
"crossbeam-deque",
"crossbeam-utils",
]
[[package]] [[package]]
name = "redox_syscall" name = "redox_syscall"
version = "0.5.8" version = "0.5.8"
@ -3171,6 +3456,12 @@ dependencies = [
"syn 1.0.109", "syn 1.0.109",
] ]
[[package]]
name = "rtrb"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad8388ea1a9e0ea807e442e8263a699e7edcb320ecbcd21b4fa8ff859acce3ba"
[[package]] [[package]]
name = "rust_decimal" name = "rust_decimal"
version = "1.36.0" version = "1.36.0"
@ -3193,6 +3484,12 @@ version = "0.1.24"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f"
[[package]]
name = "rustc-hash"
version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
[[package]] [[package]]
name = "rustc_version" name = "rustc_version"
version = "0.4.1" version = "0.4.1"
@ -3655,6 +3952,201 @@ dependencies = [
"serde_json", "serde_json",
] ]
[[package]]
name = "symphonia"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5773a4c030a19d9bfaa090f49746ff35c75dfddfa700df7a5939d5e076a57039"
dependencies = [
"lazy_static",
"symphonia-bundle-flac",
"symphonia-bundle-mp3",
"symphonia-codec-aac",
"symphonia-codec-adpcm",
"symphonia-codec-alac",
"symphonia-codec-pcm",
"symphonia-codec-vorbis",
"symphonia-core",
"symphonia-format-caf",
"symphonia-format-isomp4",
"symphonia-format-mkv",
"symphonia-format-ogg",
"symphonia-format-riff",
"symphonia-metadata",
]
[[package]]
name = "symphonia-bundle-flac"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c91565e180aea25d9b80a910c546802526ffd0072d0b8974e3ebe59b686c9976"
dependencies = [
"log",
"symphonia-core",
"symphonia-metadata",
"symphonia-utils-xiph",
]
[[package]]
name = "symphonia-bundle-mp3"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4872dd6bb56bf5eac799e3e957aa1981086c3e613b27e0ac23b176054f7c57ed"
dependencies = [
"lazy_static",
"log",
"symphonia-core",
"symphonia-metadata",
]
[[package]]
name = "symphonia-codec-aac"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c263845aa86881416849c1729a54c7f55164f8b96111dba59de46849e73a790"
dependencies = [
"lazy_static",
"log",
"symphonia-core",
]
[[package]]
name = "symphonia-codec-adpcm"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dddc50e2bbea4cfe027441eece77c46b9f319748605ab8f3443350129ddd07f"
dependencies = [
"log",
"symphonia-core",
]
[[package]]
name = "symphonia-codec-alac"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8413fa754942ac16a73634c9dfd1500ed5c61430956b33728567f667fdd393ab"
dependencies = [
"log",
"symphonia-core",
]
[[package]]
name = "symphonia-codec-pcm"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e89d716c01541ad3ebe7c91ce4c8d38a7cf266a3f7b2f090b108fb0cb031d95"
dependencies = [
"log",
"symphonia-core",
]
[[package]]
name = "symphonia-codec-vorbis"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f025837c309cd69ffef572750b4a2257b59552c5399a5e49707cc5b1b85d1c73"
dependencies = [
"log",
"symphonia-core",
"symphonia-utils-xiph",
]
[[package]]
name = "symphonia-core"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea00cc4f79b7f6bb7ff87eddc065a1066f3a43fe1875979056672c9ef948c2af"
dependencies = [
"arrayvec",
"bitflags 1.3.2",
"bytemuck",
"lazy_static",
"log",
]
[[package]]
name = "symphonia-format-caf"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8faf379316b6b6e6bbc274d00e7a592e0d63ff1a7e182ce8ba25e24edd3d096"
dependencies = [
"log",
"symphonia-core",
"symphonia-metadata",
]
[[package]]
name = "symphonia-format-isomp4"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "243739585d11f81daf8dac8d9f3d18cc7898f6c09a259675fc364b382c30e0a5"
dependencies = [
"encoding_rs",
"log",
"symphonia-core",
"symphonia-metadata",
"symphonia-utils-xiph",
]
[[package]]
name = "symphonia-format-mkv"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "122d786d2c43a49beb6f397551b4a050d8229eaa54c7ddf9ee4b98899b8742d0"
dependencies = [
"lazy_static",
"log",
"symphonia-core",
"symphonia-metadata",
"symphonia-utils-xiph",
]
[[package]]
name = "symphonia-format-ogg"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b4955c67c1ed3aa8ae8428d04ca8397fbef6a19b2b051e73b5da8b1435639cb"
dependencies = [
"log",
"symphonia-core",
"symphonia-metadata",
"symphonia-utils-xiph",
]
[[package]]
name = "symphonia-format-riff"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2d7c3df0e7d94efb68401d81906eae73c02b40d5ec1a141962c592d0f11a96f"
dependencies = [
"extended",
"log",
"symphonia-core",
"symphonia-metadata",
]
[[package]]
name = "symphonia-metadata"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36306ff42b9ffe6e5afc99d49e121e0bd62fe79b9db7b9681d48e29fa19e6b16"
dependencies = [
"encoding_rs",
"lazy_static",
"log",
"symphonia-core",
]
[[package]]
name = "symphonia-utils-xiph"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee27c85ab799a338446b68eec77abf42e1a6f1bb490656e121c6e27bfbab9f16"
dependencies = [
"symphonia-core",
"symphonia-metadata",
]
[[package]] [[package]]
name = "syn" name = "syn"
version = "1.0.109" version = "1.0.109"
@ -3731,9 +4223,9 @@ dependencies = [
"lazy_static", "lazy_static",
"libc", "libc",
"log", "log",
"ndk", "ndk 0.9.0",
"ndk-context", "ndk-context",
"ndk-sys", "ndk-sys 0.6.0+11769913",
"objc", "objc",
"once_cell", "once_cell",
"parking_lot", "parking_lot",
@ -3742,7 +4234,7 @@ dependencies = [
"tao-macros", "tao-macros",
"unicode-segmentation", "unicode-segmentation",
"url", "url",
"windows", "windows 0.58.0",
"windows-core 0.58.0", "windows-core 0.58.0",
"windows-version", "windows-version",
"x11-dl", "x11-dl",
@ -3819,7 +4311,7 @@ dependencies = [
"webkit2gtk", "webkit2gtk",
"webview2-com", "webview2-com",
"window-vibrancy", "window-vibrancy",
"windows", "windows 0.58.0",
] ]
[[package]] [[package]]
@ -4002,7 +4494,7 @@ dependencies = [
"tauri-utils", "tauri-utils",
"thiserror 2.0.11", "thiserror 2.0.11",
"url", "url",
"windows", "windows 0.58.0",
] ]
[[package]] [[package]]
@ -4027,7 +4519,7 @@ dependencies = [
"url", "url",
"webkit2gtk", "webkit2gtk",
"webview2-com", "webview2-com",
"windows", "windows 0.58.0",
"wry", "wry",
] ]
@ -4847,7 +5339,7 @@ checksum = "823e7ebcfaea51e78f72c87fc3b65a1e602c321f407a0b36dbb327d7bb7cd921"
dependencies = [ dependencies = [
"webview2-com-macros", "webview2-com-macros",
"webview2-com-sys", "webview2-com-sys",
"windows", "windows 0.58.0",
"windows-core 0.58.0", "windows-core 0.58.0",
"windows-implement", "windows-implement",
"windows-interface", "windows-interface",
@ -4871,7 +5363,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a82bce72db6e5ee83c68b5de1e2cd6ea195b9fbff91cb37df5884cbe3222df4" checksum = "7a82bce72db6e5ee83c68b5de1e2cd6ea195b9fbff91cb37df5884cbe3222df4"
dependencies = [ dependencies = [
"thiserror 1.0.69", "thiserror 1.0.69",
"windows", "windows 0.58.0",
"windows-core 0.58.0", "windows-core 0.58.0",
] ]
@ -4920,6 +5412,16 @@ dependencies = [
"windows-version", "windows-version",
] ]
[[package]]
name = "windows"
version = "0.54.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9252e5725dbed82865af151df558e754e4a3c2c30818359eb17465f1346a1b49"
dependencies = [
"windows-core 0.54.0",
"windows-targets 0.52.6",
]
[[package]] [[package]]
name = "windows" name = "windows"
version = "0.58.0" version = "0.58.0"
@ -4939,6 +5441,16 @@ dependencies = [
"windows-targets 0.52.6", "windows-targets 0.52.6",
] ]
[[package]]
name = "windows-core"
version = "0.54.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "12661b9c89351d684a50a8a643ce5f608e20243b9fb84687800163429f161d65"
dependencies = [
"windows-result 0.1.2",
"windows-targets 0.52.6",
]
[[package]] [[package]]
name = "windows-core" name = "windows-core"
version = "0.58.0" version = "0.58.0"
@ -4947,7 +5459,7 @@ checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99"
dependencies = [ dependencies = [
"windows-implement", "windows-implement",
"windows-interface", "windows-interface",
"windows-result", "windows-result 0.2.0",
"windows-strings", "windows-strings",
"windows-targets 0.52.6", "windows-targets 0.52.6",
] ]
@ -4980,11 +5492,20 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0"
dependencies = [ dependencies = [
"windows-result", "windows-result 0.2.0",
"windows-strings", "windows-strings",
"windows-targets 0.52.6", "windows-targets 0.52.6",
] ]
[[package]]
name = "windows-result"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8"
dependencies = [
"windows-targets 0.52.6",
]
[[package]] [[package]]
name = "windows-result" name = "windows-result"
version = "0.2.0" version = "0.2.0"
@ -5000,7 +5521,7 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10"
dependencies = [ dependencies = [
"windows-result", "windows-result 0.2.0",
"windows-targets 0.52.6", "windows-targets 0.52.6",
] ]
@ -5351,7 +5872,7 @@ dependencies = [
"jni", "jni",
"kuchikiki", "kuchikiki",
"libc", "libc",
"ndk", "ndk 0.9.0",
"objc2", "objc2",
"objc2-app-kit", "objc2-app-kit",
"objc2-foundation", "objc2-foundation",
@ -5368,7 +5889,7 @@ dependencies = [
"webkit2gtk", "webkit2gtk",
"webkit2gtk-sys", "webkit2gtk-sys",
"webview2-com", "webview2-com",
"windows", "windows 0.58.0",
"windows-core 0.58.0", "windows-core 0.58.0",
"windows-version", "windows-version",
"x11-dl", "x11-dl",

View File

@ -31,3 +31,15 @@ tracing-subscriber = {version = "0.3.19", features = ["env-filter"] }
log = "0.4" log = "0.4"
chrono = "0.4" chrono = "0.4"
# DAW backend integration
daw-backend = { path = "../daw-backend" }
cpal = "0.15"
rtrb = "0.3"
[profile.dev]
opt-level = 1 # Enable basic optimizations in debug mode for audio decoding performance
[profile.release]
opt-level = 3
lto = true

352
src-tauri/src/audio.rs Normal file
View File

@ -0,0 +1,352 @@
use daw_backend::{AudioEvent, AudioSystem, EngineController, WaveformPeak};
use std::sync::{Arc, Mutex};
#[derive(serde::Serialize)]
pub struct AudioFileMetadata {
pub pool_index: usize,
pub duration: f64,
pub sample_rate: u32,
pub channels: u32,
pub waveform: Vec<WaveformPeak>,
}
pub struct AudioState {
controller: Option<EngineController>,
event_rx: Option<rtrb::Consumer<AudioEvent>>,
sample_rate: u32,
channels: u32,
next_track_id: u32,
next_pool_index: usize,
}
impl Default for AudioState {
fn default() -> Self {
Self {
controller: None,
event_rx: None,
sample_rate: 0,
channels: 0,
next_track_id: 0,
next_pool_index: 0,
}
}
}
#[tauri::command]
pub async fn audio_init(state: tauri::State<'_, Arc<Mutex<AudioState>>>) -> Result<String, String> {
let mut audio_state = state.lock().unwrap();
// Check if already initialized - if so, reset DAW state (for hot-reload)
if let Some(controller) = &mut audio_state.controller {
controller.reset();
audio_state.next_track_id = 0;
audio_state.next_pool_index = 0;
return Ok(format!(
"Audio already initialized (DAW state reset): {} Hz, {} ch",
audio_state.sample_rate, audio_state.channels
));
}
// AudioSystem handles all cpal initialization internally
let system = AudioSystem::new()?;
let info = format!(
"Audio initialized: {} Hz, {} ch",
system.sample_rate, system.channels
);
// Leak the stream to keep it alive for the lifetime of the app
// This is intentional - we want the audio stream to run until app closes
Box::leak(Box::new(system.stream));
audio_state.controller = Some(system.controller);
audio_state.event_rx = Some(system.event_rx);
audio_state.sample_rate = system.sample_rate;
audio_state.channels = system.channels;
audio_state.next_track_id = 0;
audio_state.next_pool_index = 0;
Ok(info)
}
#[tauri::command]
pub async fn audio_play(state: tauri::State<'_, Arc<Mutex<AudioState>>>) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.play();
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn audio_stop(state: tauri::State<'_, Arc<Mutex<AudioState>>>) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.stop();
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn audio_test_beep(state: tauri::State<'_, Arc<Mutex<AudioState>>>) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
// Create MIDI track
controller.create_midi_track("Test".to_string());
// Note: Track ID will be 0 (first track created)
// Create MIDI clip and add notes for a C major chord arpeggio
controller.create_midi_clip(0, 0.0, 2.0);
controller.add_midi_note(0, 0, 0.0, 60, 100, 0.5); // C
controller.add_midi_note(0, 0, 0.5, 64, 100, 0.5); // E
controller.add_midi_note(0, 0, 1.0, 67, 100, 0.5); // G
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn audio_seek(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
seconds: f64,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.seek(seconds);
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn audio_set_track_parameter(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
parameter: String,
value: f32,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
match parameter.as_str() {
"volume" => controller.set_track_volume(track_id, value),
"mute" => controller.set_track_mute(track_id, value > 0.5),
"solo" => controller.set_track_solo(track_id, value > 0.5),
"pan" => {
// Pan effect - would need to add this via effects system
controller.add_pan_effect(track_id, value);
}
"gain_db" => {
controller.add_gain_effect(track_id, value);
}
_ => return Err(format!("Unknown parameter: {}", parameter)),
}
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn audio_create_track(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
name: String,
track_type: String,
) -> Result<u32, String> {
let mut audio_state = state.lock().unwrap();
// Get track ID and increment counter before borrowing controller
let track_id = audio_state.next_track_id;
audio_state.next_track_id += 1;
if let Some(controller) = &mut audio_state.controller {
match track_type.as_str() {
"audio" => controller.create_audio_track(name),
"midi" => controller.create_midi_track(name),
_ => return Err(format!("Unknown track type: {}", track_type)),
}
Ok(track_id)
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn audio_load_file(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
path: String,
) -> Result<AudioFileMetadata, String> {
// Load the audio file from disk
let audio_file = daw_backend::io::AudioFile::load(&path)?;
// Calculate duration
let duration = audio_file.duration();
// Generate adaptive waveform peaks based on duration
// Aim for ~300 peaks per second, with min 1000 and max 20000
let target_peaks = ((duration * 300.0) as usize).clamp(1000, 20000);
let waveform = audio_file.generate_waveform_overview(target_peaks);
let sample_rate = audio_file.sample_rate;
let channels = audio_file.channels;
// Get a lock on the audio state and send the loaded data to the audio thread
let mut audio_state = state.lock().unwrap();
// Get pool index and increment counter before borrowing controller
let pool_index = audio_state.next_pool_index;
audio_state.next_pool_index += 1;
if let Some(controller) = &mut audio_state.controller {
controller.add_audio_file(
path,
audio_file.data,
audio_file.channels,
audio_file.sample_rate,
);
Ok(AudioFileMetadata {
pool_index,
duration,
sample_rate,
channels,
waveform,
})
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn audio_add_clip(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
pool_index: usize,
start_time: f64,
duration: f64,
offset: f64,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.add_audio_clip(track_id, pool_index, start_time, duration, offset);
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn audio_move_clip(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
clip_id: u32,
new_start_time: f64,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.move_clip(track_id, clip_id, new_start_time);
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn audio_start_recording(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
start_time: f64,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.start_recording(track_id, start_time);
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn audio_stop_recording(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.stop_recording();
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn audio_pause_recording(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.pause_recording();
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn audio_resume_recording(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.resume_recording();
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[derive(serde::Serialize)]
#[serde(tag = "type")]
pub enum SerializedAudioEvent {
RecordingStarted { track_id: u32, clip_id: u32 },
RecordingProgress { clip_id: u32, duration: f64 },
RecordingStopped { clip_id: u32, pool_index: usize, waveform: Vec<WaveformPeak> },
RecordingError { message: String },
}
#[tauri::command]
pub async fn audio_get_events(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
) -> Result<Vec<SerializedAudioEvent>, String> {
let mut audio_state = state.lock().unwrap();
let mut events = Vec::new();
if let Some(event_rx) = &mut audio_state.event_rx {
// Poll all available events
while let Ok(event) = event_rx.pop() {
match event {
AudioEvent::RecordingStarted(track_id, clip_id) => {
events.push(SerializedAudioEvent::RecordingStarted { track_id, clip_id });
}
AudioEvent::RecordingProgress(clip_id, duration) => {
events.push(SerializedAudioEvent::RecordingProgress { clip_id, duration });
}
AudioEvent::RecordingStopped(clip_id, pool_index, waveform) => {
events.push(SerializedAudioEvent::RecordingStopped { clip_id, pool_index, waveform });
}
AudioEvent::RecordingError(message) => {
events.push(SerializedAudioEvent::RecordingError { message });
}
// Ignore other event types for now
_ => {}
}
}
}
Ok(events)
}

View File

@ -1,4 +1,4 @@
use std::{path::PathBuf, sync::Mutex}; use std::{path::PathBuf, sync::{Arc, Mutex}};
use tauri_plugin_log::{Target, TargetKind}; use tauri_plugin_log::{Target, TargetKind};
use log::{trace, info, debug, warn, error}; use log::{trace, info, debug, warn, error};
@ -6,6 +6,8 @@ use tracing_subscriber::EnvFilter;
use chrono::Local; use chrono::Local;
use tauri::{AppHandle, Manager, Url, WebviewUrl, WebviewWindowBuilder}; use tauri::{AppHandle, Manager, Url, WebviewUrl, WebviewWindowBuilder};
mod audio;
#[derive(Default)] #[derive(Default)]
struct AppState { struct AppState {
@ -127,6 +129,7 @@ pub fn run() {
let pkg_name = env!("CARGO_PKG_NAME").to_string(); let pkg_name = env!("CARGO_PKG_NAME").to_string();
tauri::Builder::default() tauri::Builder::default()
.manage(Mutex::new(AppState::default())) .manage(Mutex::new(AppState::default()))
.manage(Arc::new(Mutex::new(audio::AudioState::default())))
.setup(|app| { .setup(|app| {
#[cfg(any(windows, target_os = "linux"))] // Windows/Linux needs different handling from macOS #[cfg(any(windows, target_os = "linux"))] // Windows/Linux needs different handling from macOS
{ {
@ -188,7 +191,24 @@ pub fn run() {
.plugin(tauri_plugin_dialog::init()) .plugin(tauri_plugin_dialog::init())
.plugin(tauri_plugin_fs::init()) .plugin(tauri_plugin_fs::init())
.plugin(tauri_plugin_shell::init()) .plugin(tauri_plugin_shell::init())
.invoke_handler(tauri::generate_handler![greet, trace, debug, info, warn, error, create_window]) .invoke_handler(tauri::generate_handler![
greet, trace, debug, info, warn, error, create_window,
audio::audio_init,
audio::audio_play,
audio::audio_stop,
audio::audio_seek,
audio::audio_test_beep,
audio::audio_set_track_parameter,
audio::audio_create_track,
audio::audio_load_file,
audio::audio_add_clip,
audio::audio_move_clip,
audio::audio_start_recording,
audio::audio_stop_recording,
audio::audio_pause_recording,
audio::audio_resume_recording,
audio::audio_get_events,
])
// .manage(window_counter) // .manage(window_counter)
.build(tauri::generate_context!()) .build(tauri::generate_context!())
.expect("error while running tauri application") .expect("error while running tauri application")

View File

@ -13,7 +13,8 @@
"title": "Lightningbeam", "title": "Lightningbeam",
"width": 1500, "width": 1500,
"height": 1024, "height": 1024,
"dragDropEnabled": false "dragDropEnabled": false,
"zoomHotkeysEnabled": false
} }
], ],
"security": { "security": {

1871
src/actions/index.js Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,166 @@
// Selection actions: selectAll, selectNone, select
import { context, pointerList } from '../state.js';
import { arraysAreEqual } from '../utils.js';
// Forward declarations for injected dependencies
let undoStack = null;
let redoStack = null;
let updateUI = null;
let updateMenu = null;
let actions = null; // Reference to full actions object for self-calls
export function initializeSelectionActions(deps) {
undoStack = deps.undoStack;
redoStack = deps.redoStack;
updateUI = deps.updateUI;
updateMenu = deps.updateMenu;
actions = deps.actions;
}
export const selectionActions = {
selectAll: {
create: () => {
redoStack.length = 0;
let selection = [];
let shapeselection = [];
const currentTime = context.activeObject.currentTime || 0;
const layer = context.activeObject.activeLayer;
for (let child of layer.children) {
let idx = child.idx;
const existsValue = layer.animationData.interpolate(`object.${idx}.exists`, currentTime);
if (existsValue > 0) {
selection.push(child.idx);
}
}
// Use getVisibleShapes instead of currentFrame.shapes
if (layer) {
for (let shape of layer.getVisibleShapes(currentTime)) {
shapeselection.push(shape.idx);
}
}
let action = {
selection: selection,
shapeselection: shapeselection,
};
undoStack.push({ name: "selectAll", action: action });
actions.selectAll.execute(action);
updateMenu();
},
execute: (action) => {
context.selection = [];
context.shapeselection = [];
for (let item of action.selection) {
context.selection.push(pointerList[item]);
}
for (let shape of action.shapeselection) {
context.shapeselection.push(pointerList[shape]);
}
updateUI();
updateMenu();
},
rollback: (action) => {
context.selection = [];
context.shapeselection = [];
updateUI();
updateMenu();
},
},
selectNone: {
create: () => {
redoStack.length = 0;
let selection = [];
let shapeselection = [];
for (let item of context.selection) {
selection.push(item.idx);
}
for (let shape of context.shapeselection) {
shapeselection.push(shape.idx);
}
let action = {
selection: selection,
shapeselection: shapeselection,
};
undoStack.push({ name: "selectNone", action: action });
actions.selectNone.execute(action);
updateMenu();
},
execute: (action) => {
context.selection = [];
context.shapeselection = [];
updateUI();
updateMenu();
},
rollback: (action) => {
context.selection = [];
context.shapeselection = [];
for (let item of action.selection) {
context.selection.push(pointerList[item]);
}
for (let shape of action.shapeselection) {
context.shapeselection.push(pointerList[shape]);
}
updateUI();
updateMenu();
},
},
select: {
create: () => {
redoStack.length = 0;
if (
arraysAreEqual(context.oldselection, context.selection) &&
arraysAreEqual(context.oldshapeselection, context.shapeselection)
)
return;
let oldselection = [];
let oldshapeselection = [];
for (let item of context.oldselection) {
oldselection.push(item.idx);
}
for (let shape of context.oldshapeselection) {
oldshapeselection.push(shape.idx);
}
let selection = [];
let shapeselection = [];
for (let item of context.selection) {
selection.push(item.idx);
}
for (let shape of context.shapeselection) {
shapeselection.push(shape.idx);
}
let action = {
selection: selection,
shapeselection: shapeselection,
oldselection: oldselection,
oldshapeselection: oldshapeselection,
};
undoStack.push({ name: "select", action: action });
actions.select.execute(action);
updateMenu();
},
execute: (action) => {
context.selection = [];
context.shapeselection = [];
for (let item of action.selection) {
context.selection.push(pointerList[item]);
}
for (let shape of action.shapeselection) {
context.shapeselection.push(pointerList[shape]);
}
updateUI();
updateMenu();
},
rollback: (action) => {
context.selection = [];
context.shapeselection = [];
for (let item of action.oldselection) {
context.selection.push(pointerList[item]);
}
for (let shape of action.oldshapeselection) {
context.shapeselection.push(pointerList[shape]);
}
updateUI();
updateMenu();
},
},
};

File diff suppressed because it is too large Load Diff

543
src/models/animation.js Normal file
View File

@ -0,0 +1,543 @@
// Animation system models: Frame, Keyframe, AnimationCurve, AnimationData
import { context, config, pointerList, startProps } from '../state.js';
// Helper function for UUID generation
function uuidv4() {
return "10000000-1000-4000-8000-100000000000".replace(/[018]/g, (c) =>
(
+c ^
(crypto.getRandomValues(new Uint8Array(1))[0] & (15 >> (+c / 4)))
).toString(16),
);
}
class Frame {
constructor(frameType = "normal", uuid = undefined) {
this.keys = {};
this.shapes = [];
this.frameType = frameType;
this.keyTypes = new Set()
if (!uuid) {
this.idx = uuidv4();
} else {
this.idx = uuid;
}
pointerList[this.idx] = this;
}
get exists() {
return true;
}
saveState() {
startProps[this.idx] = structuredClone(this.keys);
}
copy(idx) {
let newFrame = new Frame(
this.frameType,
idx.slice(0, 8) + this.idx.slice(8),
);
newFrame.keys = structuredClone(this.keys);
newFrame.shapes = [];
for (let shape of this.shapes) {
newFrame.shapes.push(shape.copy(idx));
}
return newFrame;
}
static fromJSON(json, Shape = null) {
if (!json) {
return undefined
}
// Shape parameter passed in to avoid circular dependency
// Will be provided by the calling code that has access to both modules
const frame = new Frame(json.frameType, json.idx);
frame.keyTypes = new Set(json.keyTypes)
frame.keys = json.keys;
if (Shape) {
for (let i in json.shapes) {
const shape = json.shapes[i];
frame.shapes.push(Shape.fromJSON(shape));
}
}
return frame;
}
toJSON(randomizeUuid = false) {
const json = {};
json.type = "Frame";
json.frameType = this.frameType;
json.keyTypes = Array.from(this.keyTypes)
if (randomizeUuid) {
json.idx = uuidv4();
} else {
json.idx = this.idx;
}
json.keys = structuredClone(this.keys);
json.shapes = [];
for (let shape of this.shapes) {
json.shapes.push(shape.toJSON(randomizeUuid));
}
return json;
}
addShape(shape, sendToBack) {
if (sendToBack) {
this.shapes.unshift(shape);
} else {
this.shapes.push(shape);
}
}
removeShape(shape) {
let shapeIndex = this.shapes.indexOf(shape);
if (shapeIndex >= 0) {
this.shapes.splice(shapeIndex, 1);
}
}
}
class TempFrame {
constructor() {}
get exists() {
return false;
}
get idx() {
return "tempFrame";
}
get keys() {
return {};
}
get shapes() {
return [];
}
get frameType() {
return "temp";
}
copy() {
return this;
}
addShape() {}
removeShape() {}
}
const tempFrame = new TempFrame();
// Animation system classes
class Keyframe {
constructor(time, value, interpolation = "linear", uuid = undefined) {
this.time = time;
this.value = value;
this.interpolation = interpolation; // 'linear', 'bezier', 'step', 'hold'
// For bezier interpolation
this.easeIn = { x: 0.42, y: 0 }; // Default ease-in control point
this.easeOut = { x: 0.58, y: 1 }; // Default ease-out control point
if (!uuid) {
this.idx = uuidv4();
} else {
this.idx = uuid;
}
}
static fromJSON(json) {
const keyframe = new Keyframe(json.time, json.value, json.interpolation, json.idx);
if (json.easeIn) keyframe.easeIn = json.easeIn;
if (json.easeOut) keyframe.easeOut = json.easeOut;
return keyframe;
}
toJSON() {
return {
idx: this.idx,
time: this.time,
value: this.value,
interpolation: this.interpolation,
easeIn: this.easeIn,
easeOut: this.easeOut
};
}
}
class AnimationCurve {
constructor(parameter, uuid = undefined, parentAnimationData = null) {
this.parameter = parameter; // e.g., "x", "y", "rotation", "scale_x", "exists"
this.keyframes = []; // Always kept sorted by time
this.parentAnimationData = parentAnimationData; // Reference to parent AnimationData for duration updates
if (!uuid) {
this.idx = uuidv4();
} else {
this.idx = uuid;
}
}
addKeyframe(keyframe) {
// Time resolution based on framerate - half a frame's duration
// This can be exposed via UI later
const framerate = context.config?.framerate || 24;
const timeResolution = (1 / framerate) / 2;
// Check if there's already a keyframe within the time resolution
const existingKeyframe = this.getKeyframeAtTime(keyframe.time, timeResolution);
if (existingKeyframe) {
// Update the existing keyframe's value instead of adding a new one
existingKeyframe.value = keyframe.value;
existingKeyframe.interpolation = keyframe.interpolation;
if (keyframe.easeIn) existingKeyframe.easeIn = keyframe.easeIn;
if (keyframe.easeOut) existingKeyframe.easeOut = keyframe.easeOut;
} else {
// Add new keyframe
this.keyframes.push(keyframe);
// Keep sorted by time
this.keyframes.sort((a, b) => a.time - b.time);
}
// Update animation duration after adding keyframe
if (this.parentAnimationData) {
this.parentAnimationData.updateDuration();
}
}
removeKeyframe(keyframe) {
const index = this.keyframes.indexOf(keyframe);
if (index >= 0) {
this.keyframes.splice(index, 1);
// Update animation duration after removing keyframe
if (this.parentAnimationData) {
this.parentAnimationData.updateDuration();
}
}
}
getKeyframeAtTime(time, timeResolution = 0) {
if (this.keyframes.length === 0) return null;
// If no tolerance, use exact match with binary search
if (timeResolution === 0) {
let left = 0;
let right = this.keyframes.length - 1;
while (left <= right) {
const mid = Math.floor((left + right) / 2);
if (this.keyframes[mid].time === time) {
return this.keyframes[mid];
} else if (this.keyframes[mid].time < time) {
left = mid + 1;
} else {
right = mid - 1;
}
}
return null;
}
// With tolerance, find the closest keyframe within timeResolution
let left = 0;
let right = this.keyframes.length - 1;
let closest = null;
let closestDist = Infinity;
// Binary search to find the insertion point
while (left <= right) {
const mid = Math.floor((left + right) / 2);
const dist = Math.abs(this.keyframes[mid].time - time);
if (dist < closestDist) {
closestDist = dist;
closest = this.keyframes[mid];
}
if (this.keyframes[mid].time < time) {
left = mid + 1;
} else {
right = mid - 1;
}
}
// Also check adjacent keyframes for closest match
if (left < this.keyframes.length) {
const dist = Math.abs(this.keyframes[left].time - time);
if (dist < closestDist) {
closestDist = dist;
closest = this.keyframes[left];
}
}
if (right >= 0) {
const dist = Math.abs(this.keyframes[right].time - time);
if (dist < closestDist) {
closestDist = dist;
closest = this.keyframes[right];
}
}
return closestDist < timeResolution ? closest : null;
}
// Find the two keyframes that bracket the given time
getBracketingKeyframes(time) {
if (this.keyframes.length === 0) return { prev: null, next: null };
if (this.keyframes.length === 1) return { prev: this.keyframes[0], next: this.keyframes[0] };
// Binary search to find the last keyframe at or before time
let left = 0;
let right = this.keyframes.length - 1;
let prevIndex = -1;
while (left <= right) {
const mid = Math.floor((left + right) / 2);
if (this.keyframes[mid].time <= time) {
prevIndex = mid; // This could be our answer
left = mid + 1; // But check if there's a better one to the right
} else {
right = mid - 1; // Time is too large, search left
}
}
// If time is before all keyframes
if (prevIndex === -1) {
return { prev: this.keyframes[0], next: this.keyframes[0], t: 0 };
}
// If time is after all keyframes
if (prevIndex === this.keyframes.length - 1) {
return { prev: this.keyframes[prevIndex], next: this.keyframes[prevIndex], t: 1 };
}
const prev = this.keyframes[prevIndex];
const next = this.keyframes[prevIndex + 1];
const t = (time - prev.time) / (next.time - prev.time);
return { prev, next, t };
}
interpolate(time) {
if (this.keyframes.length === 0) {
return null;
}
const { prev, next, t } = this.getBracketingKeyframes(time);
if (!prev || !next) {
return null;
}
if (prev === next) {
return prev.value;
}
// Handle different interpolation types
switch (prev.interpolation) {
case "step":
case "hold":
return prev.value;
case "linear":
// Simple linear interpolation
if (typeof prev.value === "number" && typeof next.value === "number") {
return prev.value + (next.value - prev.value) * t;
}
return prev.value;
case "bezier":
// Cubic bezier interpolation using control points
if (typeof prev.value === "number" && typeof next.value === "number") {
// Use ease-in/ease-out control points
const easedT = this.cubicBezierEase(t, prev.easeOut, next.easeIn);
return prev.value + (next.value - prev.value) * easedT;
}
return prev.value;
case "zero":
// Return 0 for the entire interval (used for inactive segments)
return 0;
default:
return prev.value;
}
}
// Cubic bezier easing function
cubicBezierEase(t, easeOut, easeIn) {
// Simplified cubic bezier for 0,0 -> easeOut -> easeIn -> 1,1
const u = 1 - t;
return 3 * u * u * t * easeOut.y +
3 * u * t * t * easeIn.y +
t * t * t;
}
// Display color for this curve in timeline (based on parameter type) - Phase 4
get displayColor() {
// Auto-determined from parameter name
if (this.parameter.endsWith('.x')) return '#7a00b3' // purple
if (this.parameter.endsWith('.y')) return '#ff00ff' // magenta
if (this.parameter.endsWith('.rotation')) return '#5555ff' // blue
if (this.parameter.endsWith('.scale_x')) return '#ffaa00' // orange
if (this.parameter.endsWith('.scale_y')) return '#ffff55' // yellow
if (this.parameter.endsWith('.exists')) return '#55ff55' // green
if (this.parameter.endsWith('.zOrder')) return '#55ffff' // cyan
if (this.parameter.endsWith('.frameNumber')) return '#ff5555' // red
return '#ffffff' // default white
}
static fromJSON(json) {
const curve = new AnimationCurve(json.parameter, json.idx);
for (let kfJson of json.keyframes || []) {
curve.keyframes.push(Keyframe.fromJSON(kfJson));
}
return curve;
}
toJSON() {
return {
idx: this.idx,
parameter: this.parameter,
keyframes: this.keyframes.map(kf => kf.toJSON())
};
}
}
class AnimationData {
constructor(parentLayer = null, uuid = undefined) {
this.curves = {}; // parameter name -> AnimationCurve
this.duration = 0; // Duration in seconds (max time of all keyframes)
this.parentLayer = parentLayer; // Reference to parent Layer for updating segment keyframes
if (!uuid) {
this.idx = uuidv4();
} else {
this.idx = uuid;
}
}
getCurve(parameter) {
return this.curves[parameter];
}
getOrCreateCurve(parameter) {
if (!this.curves[parameter]) {
this.curves[parameter] = new AnimationCurve(parameter, undefined, this);
}
return this.curves[parameter];
}
addKeyframe(parameter, keyframe) {
const curve = this.getOrCreateCurve(parameter);
curve.addKeyframe(keyframe);
}
removeKeyframe(parameter, keyframe) {
const curve = this.curves[parameter];
if (curve) {
curve.removeKeyframe(keyframe);
}
}
removeCurve(parameter) {
delete this.curves[parameter];
}
setCurve(parameter, curve) {
// Set parent reference for duration tracking
curve.parentAnimationData = this;
this.curves[parameter] = curve;
// Update duration after adding curve with keyframes
this.updateDuration();
}
interpolate(parameter, time) {
const curve = this.curves[parameter];
if (!curve) return null;
return curve.interpolate(time);
}
// Get all animated values at a given time
getValuesAtTime(time) {
const values = {};
for (let parameter in this.curves) {
values[parameter] = this.curves[parameter].interpolate(time);
}
return values;
}
/**
* Update the duration based on all keyframes
* Called automatically when keyframes are added/removed
*/
updateDuration() {
// Calculate max time from all keyframes in all curves
let maxTime = 0;
for (let parameter in this.curves) {
const curve = this.curves[parameter];
if (curve.keyframes && curve.keyframes.length > 0) {
const lastKeyframe = curve.keyframes[curve.keyframes.length - 1];
maxTime = Math.max(maxTime, lastKeyframe.time);
}
}
// Update this AnimationData's duration
this.duration = maxTime;
// If this layer belongs to a nested group, update the segment keyframes in the parent
if (this.parentLayer && this.parentLayer.parentObject) {
this.updateParentSegmentKeyframes();
}
}
/**
* Update segment keyframes in parent layer when this layer's duration changes
* This ensures that nested group segments automatically resize when internal animation is added
*/
updateParentSegmentKeyframes() {
const parentObject = this.parentLayer.parentObject;
// Get the layer that contains this nested object (parentObject.parentLayer)
if (!parentObject.parentLayer || !parentObject.parentLayer.animationData) {
return;
}
const parentLayer = parentObject.parentLayer;
// Get the frameNumber curve for this nested object using the correct naming convention
const curveName = `child.${parentObject.idx}.frameNumber`;
const frameNumberCurve = parentLayer.animationData.getCurve(curveName);
if (!frameNumberCurve || frameNumberCurve.keyframes.length < 2) {
return;
}
// Update the last keyframe to match the new duration
const lastKeyframe = frameNumberCurve.keyframes[frameNumberCurve.keyframes.length - 1];
const newFrameValue = Math.ceil(this.duration * config.framerate) + 1; // +1 because frameNumber is 1-indexed
const newTime = this.duration;
// Only update if the time or value actually changed
if (lastKeyframe.value !== newFrameValue || lastKeyframe.time !== newTime) {
lastKeyframe.value = newFrameValue;
lastKeyframe.time = newTime;
// Re-sort keyframes in case the time change affects order
frameNumberCurve.keyframes.sort((a, b) => a.time - b.time);
// Don't recursively call updateDuration to avoid infinite loop
}
}
static fromJSON(json, parentLayer = null) {
const animData = new AnimationData(parentLayer, json.idx);
for (let param in json.curves || {}) {
const curve = AnimationCurve.fromJSON(json.curves[param]);
curve.parentAnimationData = animData; // Restore parent reference
animData.curves[param] = curve;
}
// Recalculate duration after loading all curves
animData.updateDuration();
return animData;
}
toJSON() {
const curves = {};
for (let param in this.curves) {
curves[param] = this.curves[param].toJSON();
}
return {
idx: this.idx,
curves: curves
};
}
}
export { Frame, TempFrame, tempFrame, Keyframe, AnimationCurve, AnimationData };

View File

@ -0,0 +1,912 @@
// GraphicsObject model: Main container for layers and animation
import { context, config, pointerList, startProps } from '../state.js';
import { Layer, AudioTrack } from './layer.js';
import { TempShape } from './shapes.js';
import { AnimationCurve, Keyframe } from './animation.js';
import { Widget } from '../widgets.js';
// Helper function for UUID generation
function uuidv4() {
return "10000000-1000-4000-8000-100000000000".replace(/[018]/g, (c) =>
(
+c ^
(crypto.getRandomValues(new Uint8Array(1))[0] & (15 >> (+c / 4)))
).toString(16),
);
}
// Forward declarations for dependencies that will be injected
let growBoundingBox = null;
let getRotatedBoundingBox = null;
let multiplyMatrices = null;
let uuidToColor = null;
// Initialize function to be called from main.js
export function initializeGraphicsObjectDependencies(deps) {
growBoundingBox = deps.growBoundingBox;
getRotatedBoundingBox = deps.getRotatedBoundingBox;
multiplyMatrices = deps.multiplyMatrices;
uuidToColor = deps.uuidToColor;
}
class GraphicsObject extends Widget {
constructor(uuid) {
super(0, 0)
this.rotation = 0; // in radians
this.scale_x = 1;
this.scale_y = 1;
if (!uuid) {
this.idx = uuidv4();
} else {
this.idx = uuid;
}
pointerList[this.idx] = this;
this.name = this.idx;
this.currentFrameNum = 0; // LEGACY: kept for backwards compatibility
this.currentTime = 0; // New: continuous time for AnimationData curves
this.currentLayer = 0;
this._activeAudioTrack = null; // Reference to active audio track (if any)
this.children = [new Layer(uuid + "-L1", this)];
// this.layers = [new Layer(uuid + "-L1")];
this.audioTracks = [];
// this.children = []
this.shapes = [];
// Parent reference for nested objects (set when added to a layer)
this.parentLayer = null
// Timeline display settings (Phase 3)
this.showSegment = true // Show segment bar in timeline
this.curvesMode = 'hidden' // 'hidden' | 'minimized' | 'expanded'
this.curvesHeight = 150 // Height in pixels when curves are expanded
this._globalEvents.add("mousedown")
this._globalEvents.add("mousemove")
this._globalEvents.add("mouseup")
}
static fromJSON(json) {
const graphicsObject = new GraphicsObject(json.idx);
graphicsObject.x = json.x;
graphicsObject.y = json.y;
graphicsObject.rotation = json.rotation;
graphicsObject.scale_x = json.scale_x;
graphicsObject.scale_y = json.scale_y;
graphicsObject.name = json.name;
graphicsObject.currentFrameNum = json.currentFrameNum;
graphicsObject.currentLayer = json.currentLayer;
graphicsObject.children = [];
if (json.parent in pointerList) {
graphicsObject.parent = pointerList[json.parent]
}
for (let layer of json.layers) {
graphicsObject.layers.push(Layer.fromJSON(layer, graphicsObject));
}
// Handle audioTracks (may not exist in older files)
if (json.audioTracks) {
for (let audioTrack of json.audioTracks) {
graphicsObject.audioTracks.push(AudioTrack.fromJSON(audioTrack));
}
}
return graphicsObject;
}
toJSON(randomizeUuid = false) {
const json = {};
json.type = "GraphicsObject";
json.x = this.x;
json.y = this.y;
json.rotation = this.rotation;
json.scale_x = this.scale_x;
json.scale_y = this.scale_y;
if (randomizeUuid) {
json.idx = uuidv4();
json.name = this.name + " copy";
} else {
json.idx = this.idx;
json.name = this.name;
}
json.currentFrameNum = this.currentFrameNum;
json.currentLayer = this.currentLayer;
json.layers = [];
json.parent = this.parent?.idx
for (let layer of this.layers) {
json.layers.push(layer.toJSON(randomizeUuid));
}
json.audioTracks = [];
for (let audioTrack of this.audioTracks) {
json.audioTracks.push(audioTrack.toJSON(randomizeUuid));
}
return json;
}
get activeLayer() {
// If an audio track is active, return it instead of a visual layer
if (this._activeAudioTrack !== null) {
return this._activeAudioTrack;
}
return this.layers[this.currentLayer];
}
set activeLayer(layer) {
// Allow setting activeLayer to an AudioTrack or a regular Layer
if (layer instanceof AudioTrack) {
this._activeAudioTrack = layer;
} else {
// It's a regular layer - find its index and set currentLayer
this._activeAudioTrack = null;
const layerIndex = this.children.indexOf(layer);
if (layerIndex !== -1) {
this.currentLayer = layerIndex;
}
}
}
// get children() {
// return this.activeLayer.children;
// }
get layers() {
return this.children
}
/**
* Get the total duration of this GraphicsObject's animation
* Returns the maximum duration across all layers
*/
get duration() {
let maxDuration = 0;
// Check visual layers
for (let layer of this.layers) {
if (layer.animationData && layer.animationData.duration > maxDuration) {
maxDuration = layer.animationData.duration;
}
}
// Check audio tracks
for (let audioTrack of this.audioTracks) {
for (let clip of audioTrack.clips) {
const clipEnd = clip.startTime + clip.duration;
if (clipEnd > maxDuration) {
maxDuration = clipEnd;
}
}
}
return maxDuration;
}
get allLayers() {
return [...this.audioTracks, ...this.layers];
}
get maxFrame() {
return (
Math.max(
...this.layers.map((layer) => {
return (
layer.frames.findLastIndex((frame) => frame !== undefined) || -1
);
}),
) + 1
);
}
get segmentColor() {
return uuidToColor(this.idx);
}
/**
* Set the current playback time in seconds
*/
setTime(time) {
time = Math.max(0, time);
this.currentTime = time;
// Update legacy currentFrameNum for any remaining code that needs it
this.currentFrameNum = Math.floor(time * config.framerate);
// Update layer frameNum for legacy code
for (let layer of this.layers) {
layer.frameNum = this.currentFrameNum;
}
}
advanceFrame() {
const frameDuration = 1 / config.framerate;
this.setTime(this.currentTime + frameDuration);
}
decrementFrame() {
const frameDuration = 1 / config.framerate;
this.setTime(Math.max(0, this.currentTime - frameDuration));
}
bbox() {
let bbox;
// NEW: Include shapes from AnimationData system
let currentTime = this.currentTime || 0;
for (let layer of this.layers) {
for (let shape of layer.shapes) {
// Check if shape exists at current time
let existsValue = layer.animationData.interpolate(`shape.${shape.shapeId}.exists`, currentTime);
if (existsValue !== null && existsValue > 0) {
if (!bbox) {
bbox = structuredClone(shape.boundingBox);
} else {
growBoundingBox(bbox, shape.boundingBox);
}
}
}
}
// Include children
if (this.children.length > 0) {
if (!bbox) {
bbox = structuredClone(this.children[0].bbox());
}
for (let child of this.children) {
growBoundingBox(bbox, child.bbox());
}
}
if (bbox == undefined) {
bbox = { x: { min: 0, max: 0 }, y: { min: 0, max: 0 } };
}
bbox.x.max *= this.scale_x;
bbox.y.max *= this.scale_y;
bbox.x.min += this.x;
bbox.x.max += this.x;
bbox.y.min += this.y;
bbox.y.max += this.y;
return bbox;
}
draw(context, calculateTransform=false) {
let ctx = context.ctx;
ctx.save();
if (calculateTransform) {
this.transformCanvas(ctx)
} else {
ctx.translate(this.x, this.y);
ctx.rotate(this.rotation);
ctx.scale(this.scale_x, this.scale_y);
}
// if (this.currentFrameNum>=this.maxFrame) {
// this.currentFrameNum = 0;
// }
if (
context.activeAction &&
context.activeAction.selection &&
this.idx in context.activeAction.selection
)
return;
for (let layer of this.layers) {
if (context.activeObject == this && !layer.visible) continue;
// Draw activeShape (shape being drawn in progress) for active layer only
if (layer === context.activeLayer && layer.activeShape) {
let cxt = {...context};
layer.activeShape.draw(cxt);
}
// NEW: Use AnimationData system to draw shapes with shape tweening/morphing
let currentTime = this.currentTime || 0;
// Group shapes by shapeId (multiple Shape objects can share a shapeId for tweening)
const shapesByShapeId = new Map();
for (let shape of layer.shapes) {
if (shape instanceof TempShape) continue;
if (!shapesByShapeId.has(shape.shapeId)) {
shapesByShapeId.set(shape.shapeId, []);
}
shapesByShapeId.get(shape.shapeId).push(shape);
}
// Process each logical shape (shapeId) and determine what to draw
let visibleShapes = [];
for (let [shapeId, shapes] of shapesByShapeId) {
// Check if this logical shape exists at current time
const existsCurveKey = `shape.${shapeId}.exists`;
let existsValue = layer.animationData.interpolate(existsCurveKey, currentTime);
if (existsValue === null || existsValue <= 0) {
console.log(`[Widget.draw] Skipping shape ${shapeId} - not visible`);
continue;
}
// Get z-order
let zOrder = layer.animationData.interpolate(`shape.${shapeId}.zOrder`, currentTime);
// Get shapeIndex curve and surrounding keyframes
const shapeIndexCurve = layer.animationData.getCurve(`shape.${shapeId}.shapeIndex`);
if (!shapeIndexCurve || !shapeIndexCurve.keyframes || shapeIndexCurve.keyframes.length === 0) {
// No shapeIndex curve, just show shape with index 0
const shape = shapes.find(s => s.shapeIndex === 0);
if (shape) {
visibleShapes.push({
shape,
zOrder: zOrder || 0,
selected: context.shapeselection.includes(shape)
});
}
continue;
}
// Find surrounding keyframes using AnimationCurve's built-in method
const { prev: prevKf, next: nextKf, t: interpolationT } = shapeIndexCurve.getBracketingKeyframes(currentTime);
// Get interpolated value
let shapeIndexValue = shapeIndexCurve.interpolate(currentTime);
if (shapeIndexValue === null) shapeIndexValue = 0;
// Sort shape versions by shapeIndex
shapes.sort((a, b) => a.shapeIndex - b.shapeIndex);
// Determine whether to morph based on whether interpolated value equals a keyframe value
const atPrevKeyframe = prevKf && Math.abs(shapeIndexValue - prevKf.value) < 0.001;
const atNextKeyframe = nextKf && Math.abs(shapeIndexValue - nextKf.value) < 0.001;
if (atPrevKeyframe || atNextKeyframe) {
// No morphing - display the shape at the keyframe value
const targetValue = atNextKeyframe ? nextKf.value : prevKf.value;
const shape = shapes.find(s => s.shapeIndex === targetValue);
if (shape) {
visibleShapes.push({
shape,
zOrder: zOrder || 0,
selected: context.shapeselection.includes(shape)
});
}
} else if (prevKf && nextKf && prevKf.value !== nextKf.value) {
// Morph between shapes specified by surrounding keyframes
const shape1 = shapes.find(s => s.shapeIndex === prevKf.value);
const shape2 = shapes.find(s => s.shapeIndex === nextKf.value);
if (shape1 && shape2) {
// Use the interpolated shapeIndexValue to calculate blend factor
// This respects the bezier easing curve
const t = (shapeIndexValue - prevKf.value) / (nextKf.value - prevKf.value);
console.log(`[Widget.draw] Morphing from shape ${prevKf.value} to ${nextKf.value}, shapeIndexValue=${shapeIndexValue}, t=${t}`);
const morphedShape = shape1.lerpShape(shape2, t);
visibleShapes.push({
shape: morphedShape,
zOrder: zOrder || 0,
selected: context.shapeselection.includes(shape1) || context.shapeselection.includes(shape2)
});
} else if (shape1) {
visibleShapes.push({
shape: shape1,
zOrder: zOrder || 0,
selected: context.shapeselection.includes(shape1)
});
} else if (shape2) {
visibleShapes.push({
shape: shape2,
zOrder: zOrder || 0,
selected: context.shapeselection.includes(shape2)
});
}
} else if (nextKf) {
// Only next keyframe exists, show that shape
const shape = shapes.find(s => s.shapeIndex === nextKf.value);
if (shape) {
visibleShapes.push({
shape,
zOrder: zOrder || 0,
selected: context.shapeselection.includes(shape)
});
}
}
}
// Sort by zOrder
visibleShapes.sort((a, b) => a.zOrder - b.zOrder);
// Draw sorted shapes
for (let { shape, selected } of visibleShapes) {
let cxt = {...context}
if (selected) {
cxt.selected = true
}
shape.draw(cxt);
}
// Draw child objects using AnimationData curves
for (let child of layer.children) {
if (child == context.activeObject) continue;
let idx = child.idx;
// Use AnimationData to get child's transform
let childX = layer.animationData.interpolate(`child.${idx}.x`, currentTime);
let childY = layer.animationData.interpolate(`child.${idx}.y`, currentTime);
let childRotation = layer.animationData.interpolate(`child.${idx}.rotation`, currentTime);
let childScaleX = layer.animationData.interpolate(`child.${idx}.scale_x`, currentTime);
let childScaleY = layer.animationData.interpolate(`child.${idx}.scale_y`, currentTime);
let childFrameNumber = layer.animationData.interpolate(`child.${idx}.frameNumber`, currentTime);
if (childX !== null && childY !== null) {
child.x = childX;
child.y = childY;
child.rotation = childRotation || 0;
child.scale_x = childScaleX || 1;
child.scale_y = childScaleY || 1;
// Set child's currentTime based on its frameNumber
// frameNumber 1 = time 0, frameNumber 2 = time 1/framerate, etc.
if (childFrameNumber !== null) {
child.currentTime = (childFrameNumber - 1) / config.framerate;
}
ctx.save();
child.draw(context);
ctx.restore();
}
}
}
if (this == context.activeObject) {
// Draw selection rectangles for selected items
if (context.mode == "select") {
for (let item of context.selection) {
if (!item) continue;
ctx.save();
ctx.strokeStyle = "#00ffff";
ctx.lineWidth = 1;
ctx.beginPath();
let bbox = getRotatedBoundingBox(item);
ctx.rect(
bbox.x.min,
bbox.y.min,
bbox.x.max - bbox.x.min,
bbox.y.max - bbox.y.min,
);
ctx.stroke();
ctx.restore();
}
// Draw drag selection rectangle
if (context.selectionRect) {
ctx.save();
ctx.strokeStyle = "#00ffff";
ctx.lineWidth = 1;
ctx.beginPath();
ctx.rect(
context.selectionRect.x1,
context.selectionRect.y1,
context.selectionRect.x2 - context.selectionRect.x1,
context.selectionRect.y2 - context.selectionRect.y1,
);
ctx.stroke();
ctx.restore();
}
} else if (context.mode == "transform") {
let bbox = undefined;
for (let item of context.selection) {
if (bbox == undefined) {
bbox = getRotatedBoundingBox(item);
} else {
growBoundingBox(bbox, getRotatedBoundingBox(item));
}
}
if (bbox != undefined) {
ctx.save();
ctx.strokeStyle = "#00ffff";
ctx.lineWidth = 1;
ctx.beginPath();
let xdiff = bbox.x.max - bbox.x.min;
let ydiff = bbox.y.max - bbox.y.min;
ctx.rect(bbox.x.min, bbox.y.min, xdiff, ydiff);
ctx.stroke();
ctx.fillStyle = "#000000";
let rectRadius = 5;
for (let i of [
[0, 0],
[0.5, 0],
[1, 0],
[1, 0.5],
[1, 1],
[0.5, 1],
[0, 1],
[0, 0.5],
]) {
ctx.beginPath();
ctx.rect(
bbox.x.min + xdiff * i[0] - rectRadius,
bbox.y.min + ydiff * i[1] - rectRadius,
rectRadius * 2,
rectRadius * 2,
);
ctx.fill();
}
ctx.restore();
}
}
if (context.activeCurve) {
ctx.strokeStyle = "magenta";
ctx.beginPath();
ctx.moveTo(
context.activeCurve.current.points[0].x,
context.activeCurve.current.points[0].y,
);
ctx.bezierCurveTo(
context.activeCurve.current.points[1].x,
context.activeCurve.current.points[1].y,
context.activeCurve.current.points[2].x,
context.activeCurve.current.points[2].y,
context.activeCurve.current.points[3].x,
context.activeCurve.current.points[3].y,
);
ctx.stroke();
}
if (context.activeVertex) {
ctx.save();
ctx.strokeStyle = "#00ffff";
let curves = {
...context.activeVertex.current.startCurves,
...context.activeVertex.current.endCurves,
};
// I don't understand why I can't use a for...of loop here
for (let idx in curves) {
let curve = curves[idx];
ctx.beginPath();
ctx.moveTo(curve.points[0].x, curve.points[0].y);
ctx.bezierCurveTo(
curve.points[1].x,
curve.points[1].y,
curve.points[2].x,
curve.points[2].y,
curve.points[3].x,
curve.points[3].y,
);
ctx.stroke();
}
ctx.fillStyle = "#000000aa";
ctx.beginPath();
let vertexSize = 15 / context.zoomLevel;
ctx.rect(
context.activeVertex.current.point.x - vertexSize / 2,
context.activeVertex.current.point.y - vertexSize / 2,
vertexSize,
vertexSize,
);
ctx.fill();
ctx.restore();
}
}
ctx.restore();
}
/*
draw(ctx) {
super.draw(ctx)
if (this==context.activeObject) {
if (context.mode == "select") {
for (let item of context.selection) {
if (!item) continue;
// Check if this is a child object and if it exists at current time
if (item.idx) {
const existsValue = this.activeLayer.animationData.interpolate(
`object.${item.idx}.exists`,
this.currentTime
);
if (existsValue === null || existsValue <= 0) continue;
}
ctx.save();
ctx.strokeStyle = "#00ffff";
ctx.lineWidth = 1;
ctx.beginPath();
let bbox = getRotatedBoundingBox(item);
ctx.rect(
bbox.x.min,
bbox.y.min,
bbox.x.max - bbox.x.min,
bbox.y.max - bbox.y.min,
);
ctx.stroke();
ctx.restore();
}
if (context.selectionRect) {
ctx.save();
ctx.strokeStyle = "#00ffff";
ctx.lineWidth = 1;
ctx.beginPath();
ctx.rect(
context.selectionRect.x1,
context.selectionRect.y1,
context.selectionRect.x2 - context.selectionRect.x1,
context.selectionRect.y2 - context.selectionRect.y1,
);
ctx.stroke();
ctx.restore();
}
} else if (context.mode == "transform") {
let bbox = undefined;
for (let item of context.selection) {
if (bbox == undefined) {
bbox = getRotatedBoundingBox(item);
} else {
growBoundingBox(bbox, getRotatedBoundingBox(item));
}
}
if (bbox != undefined) {
ctx.save();
ctx.strokeStyle = "#00ffff";
ctx.lineWidth = 1;
ctx.beginPath();
let xdiff = bbox.x.max - bbox.x.min;
let ydiff = bbox.y.max - bbox.y.min;
ctx.rect(bbox.x.min, bbox.y.min, xdiff, ydiff);
ctx.stroke();
ctx.fillStyle = "#000000";
let rectRadius = 5;
for (let i of [
[0, 0],
[0.5, 0],
[1, 0],
[1, 0.5],
[1, 1],
[0.5, 1],
[0, 1],
[0, 0.5],
]) {
ctx.beginPath();
ctx.rect(
bbox.x.min + xdiff * i[0] - rectRadius,
bbox.y.min + ydiff * i[1] - rectRadius,
rectRadius * 2,
rectRadius * 2,
);
ctx.fill();
}
ctx.restore();
}
}
}
}
*/
transformCanvas(ctx) {
if (this.parent) {
this.parent.transformCanvas(ctx)
}
ctx.translate(this.x, this.y);
ctx.scale(this.scale_x, this.scale_y);
ctx.rotate(this.rotation);
}
transformMouse(mouse) {
// Apply the transformation matrix to the mouse position
let matrix = this.generateTransformMatrix();
let { x, y } = mouse;
return {
x: matrix[0][0] * x + matrix[0][1] * y + matrix[0][2],
y: matrix[1][0] * x + matrix[1][1] * y + matrix[1][2]
};
}
generateTransformMatrix() {
// Start with the parent's transform matrix if it exists
let parentMatrix = this.parent ? this.parent.generateTransformMatrix() : [[1, 0, 0], [0, 1, 0], [0, 0, 1]];
// Calculate the rotation matrix components
const cos = Math.cos(this.rotation);
const sin = Math.sin(this.rotation);
// Scaling matrix
const scaleMatrix = [
[1/this.scale_x, 0, 0],
[0, 1/this.scale_y, 0],
[0, 0, 1]
];
// Rotation matrix (inverse rotation for transforming back)
const rotationMatrix = [
[cos, -sin, 0],
[sin, cos, 0],
[0, 0, 1]
];
// Translation matrix (inverse translation to adjust for object's position)
const translationMatrix = [
[1, 0, -this.x],
[0, 1, -this.y],
[0, 0, 1]
];
// Multiply translation * rotation * scaling to get the current object's final transformation matrix
let tempMatrix = multiplyMatrices(translationMatrix, rotationMatrix);
let objectMatrix = multiplyMatrices(tempMatrix, scaleMatrix);
// Now combine with the parent's matrix (parent * object)
let finalMatrix = multiplyMatrices(parentMatrix, objectMatrix);
return finalMatrix;
}
handleMouseEvent(eventType, x, y) {
for (let i in this.layers) {
if (i==this.currentLayer) {
this.layers[i]._globalEvents.add("mousedown")
this.layers[i]._globalEvents.add("mousemove")
this.layers[i]._globalEvents.add("mouseup")
} else {
this.layers[i]._globalEvents.delete("mousedown")
this.layers[i]._globalEvents.delete("mousemove")
this.layers[i]._globalEvents.delete("mouseup")
}
}
super.handleMouseEvent(eventType, x, y)
}
addObject(object, x = 0, y = 0, time = undefined, layer=undefined) {
if (time == undefined) {
time = this.currentTime || 0;
}
if (layer==undefined) {
layer = this.activeLayer
}
layer.children.push(object)
object.parent = this;
object.parentLayer = layer;
object.x = x;
object.y = y;
let idx = object.idx;
// Add animation curves for the object's position/transform in the layer
let xCurve = new AnimationCurve(`child.${idx}.x`);
xCurve.addKeyframe(new Keyframe(time, x, 'linear'));
layer.animationData.setCurve(`child.${idx}.x`, xCurve);
let yCurve = new AnimationCurve(`child.${idx}.y`);
yCurve.addKeyframe(new Keyframe(time, y, 'linear'));
layer.animationData.setCurve(`child.${idx}.y`, yCurve);
let rotationCurve = new AnimationCurve(`child.${idx}.rotation`);
rotationCurve.addKeyframe(new Keyframe(time, 0, 'linear'));
layer.animationData.setCurve(`child.${idx}.rotation`, rotationCurve);
let scaleXCurve = new AnimationCurve(`child.${idx}.scale_x`);
scaleXCurve.addKeyframe(new Keyframe(time, 1, 'linear'));
layer.animationData.setCurve(`child.${idx}.scale_x`, scaleXCurve);
let scaleYCurve = new AnimationCurve(`child.${idx}.scale_y`);
scaleYCurve.addKeyframe(new Keyframe(time, 1, 'linear'));
layer.animationData.setCurve(`child.${idx}.scale_y`, scaleYCurve);
// Add exists curve (object visibility)
let existsCurve = new AnimationCurve(`object.${idx}.exists`);
existsCurve.addKeyframe(new Keyframe(time, 1, 'hold'));
layer.animationData.setCurve(`object.${idx}.exists`, existsCurve);
// Initialize frameNumber curve with two keyframes defining the segment
// The segment length is based on the object's internal animation duration
let frameNumberCurve = new AnimationCurve(`child.${idx}.frameNumber`);
// Get the object's animation duration (max time across all its layers)
const objectDuration = object.duration || 0;
const framerate = config.framerate;
// Calculate the last frame number (frameNumber 1 = time 0, so add 1)
const lastFrameNumber = Math.max(1, Math.ceil(objectDuration * framerate) + 1);
// Calculate the end time for the segment (minimum 1 frame duration)
const segmentDuration = Math.max(objectDuration, 1 / framerate);
const endTime = time + segmentDuration;
// Start keyframe: frameNumber 1 at the current time, linear interpolation
frameNumberCurve.addKeyframe(new Keyframe(time, 1, 'linear'));
// End keyframe: last frame at end time, zero interpolation (inactive after this)
frameNumberCurve.addKeyframe(new Keyframe(endTime, lastFrameNumber, 'zero'));
layer.animationData.setCurve(`child.${idx}.frameNumber`, frameNumberCurve);
}
removeChild(childObject) {
let idx = childObject.idx;
for (let layer of this.layers) {
layer.children = layer.children.filter(child => child.idx !== idx);
for (let frame of layer.frames) {
if (frame) {
delete frame[idx];
}
}
}
// this.children.splice(this.children.indexOf(childObject), 1);
}
/**
* Update this object's frameNumber curve in its parent layer based on child content
* This is called when shapes/children are added/modified within this object
*/
updateFrameNumberCurve() {
// Find parent layer that contains this object
if (!this.parent || !this.parent.animationData) return;
const parentLayer = this.parent;
const frameNumberKey = `child.${this.idx}.frameNumber`;
// Collect all keyframe times from this object's content
let allKeyframeTimes = new Set();
// Check all layers in this object
for (let layer of this.layers) {
if (!layer.animationData) continue;
// Get keyframes from all shape curves
for (let shape of layer.shapes) {
const existsKey = `shape.${shape.shapeId}.exists`;
const existsCurve = layer.animationData.curves[existsKey];
if (existsCurve && existsCurve.keyframes) {
for (let kf of existsCurve.keyframes) {
allKeyframeTimes.add(kf.time);
}
}
}
// Get keyframes from all child object curves
for (let child of layer.children) {
const childFrameNumberKey = `child.${child.idx}.frameNumber`;
const childFrameNumberCurve = layer.animationData.curves[childFrameNumberKey];
if (childFrameNumberCurve && childFrameNumberCurve.keyframes) {
for (let kf of childFrameNumberCurve.keyframes) {
allKeyframeTimes.add(kf.time);
}
}
}
}
if (allKeyframeTimes.size === 0) return;
// Sort times
const times = Array.from(allKeyframeTimes).sort((a, b) => a - b);
const firstTime = times[0];
const lastTime = times[times.length - 1];
// Calculate frame numbers (1-based)
const framerate = this.framerate || 24;
const firstFrame = Math.floor(firstTime * framerate) + 1;
const lastFrame = Math.floor(lastTime * framerate) + 1;
// Update or create frameNumber curve in parent layer
let frameNumberCurve = parentLayer.animationData.curves[frameNumberKey];
if (!frameNumberCurve) {
frameNumberCurve = new AnimationCurve(frameNumberKey);
parentLayer.animationData.setCurve(frameNumberKey, frameNumberCurve);
}
// Clear existing keyframes and add new ones
frameNumberCurve.keyframes = [];
frameNumberCurve.addKeyframe(new Keyframe(firstTime, firstFrame, 'hold'));
frameNumberCurve.addKeyframe(new Keyframe(lastTime, lastFrame, 'hold'));
}
addLayer(layer) {
this.children.push(layer);
}
removeLayer(layer) {
this.children.splice(this.children.indexOf(layer), 1);
}
saveState() {
startProps[this.idx] = {
x: this.x,
y: this.y,
rotation: this.rotation,
scale_x: this.scale_x,
scale_y: this.scale_y,
};
}
copy(idx) {
let newGO = new GraphicsObject(idx.slice(0, 8) + this.idx.slice(8));
newGO.x = this.x;
newGO.y = this.y;
newGO.rotation = this.rotation;
newGO.scale_x = this.scale_x;
newGO.scale_y = this.scale_y;
newGO.parent = this.parent;
pointerList[this.idx] = this;
newGO.layers = [];
for (let layer of this.layers) {
newGO.layers.push(layer.copy(idx));
}
for (let audioTrack of this.audioTracks) {
newGO.audioTracks.push(audioTrack.copy(idx));
}
return newGO;
}
}
export { GraphicsObject };

1213
src/models/layer.js Normal file

File diff suppressed because it is too large Load Diff

34
src/models/root.js Normal file
View File

@ -0,0 +1,34 @@
// Root object initialization
// Creates and configures the root GraphicsObject and context properties
import { context } from '../state.js';
import { GraphicsObject } from './graphics-object.js';
/**
* Creates and initializes the root GraphicsObject.
* Sets up context properties for active object and layer access.
*
* @returns {GraphicsObject} The root graphics object
*/
export function createRoot() {
const root = new GraphicsObject("root");
// Define getter for active object (top of stack)
Object.defineProperty(context, "activeObject", {
get: function () {
return this.objectStack.at(-1);
},
});
// Define getter for active layer (active layer of top object)
Object.defineProperty(context, "activeLayer", {
get: function () {
return this.objectStack.at(-1).activeLayer;
}
});
// Initialize object stack with root
context.objectStack = [root];
return root;
}

752
src/models/shapes.js Normal file
View File

@ -0,0 +1,752 @@
// Shape models: BaseShape, TempShape, Shape
import { context, pointerList } from '../state.js';
import { Bezier } from '../bezier.js';
import { Quadtree } from '../quadtree.js';
// Helper function for UUID generation
function uuidv4() {
return "10000000-1000-4000-8000-100000000000".replace(/[018]/g, (c) =>
(
+c ^
(crypto.getRandomValues(new Uint8Array(1))[0] & (15 >> (+c / 4)))
).toString(16),
);
}
// Forward declarations for dependencies that will be injected
let growBoundingBox = null;
let lerp = null;
let lerpColor = null;
let uuidToColor = null;
let simplifyPolyline = null;
let fitCurve = null;
let createMissingTexturePattern = null;
let debugQuadtree = null;
let d3 = null;
// Initialize function to be called from main.js
export function initializeShapeDependencies(deps) {
growBoundingBox = deps.growBoundingBox;
lerp = deps.lerp;
lerpColor = deps.lerpColor;
uuidToColor = deps.uuidToColor;
simplifyPolyline = deps.simplifyPolyline;
fitCurve = deps.fitCurve;
createMissingTexturePattern = deps.createMissingTexturePattern;
debugQuadtree = deps.debugQuadtree;
d3 = deps.d3;
}
class BaseShape {
constructor(startx, starty) {
this.startx = startx;
this.starty = starty;
this.curves = [];
this.regions = [];
this.boundingBox = {
x: { min: startx, max: starty },
y: { min: starty, max: starty },
};
}
recalculateBoundingBox() {
this.boundingBox = undefined;
for (let curve of this.curves) {
if (!this.boundingBox) {
this.boundingBox = curve.bbox();
}
growBoundingBox(this.boundingBox, curve.bbox());
}
}
draw(context) {
let ctx = context.ctx;
ctx.lineWidth = this.lineWidth;
ctx.lineCap = "round";
// Create a repeating pattern for indicating selected shapes
if (!this.patternCanvas) {
this.patternCanvas = document.createElement('canvas');
this.patternCanvas.width = 2;
this.patternCanvas.height = 2;
let patternCtx = this.patternCanvas.getContext('2d');
// Draw the pattern:
// black, transparent,
// transparent, white
patternCtx.fillStyle = 'black';
patternCtx.fillRect(0, 0, 1, 1);
patternCtx.clearRect(1, 0, 1, 1);
patternCtx.clearRect(0, 1, 1, 1);
patternCtx.fillStyle = 'white';
patternCtx.fillRect(1, 1, 1, 1);
}
let pattern = ctx.createPattern(this.patternCanvas, 'repeat'); // repeat the pattern across the canvas
if (this.filled) {
ctx.beginPath();
if (this.fillImage && this.fillImage instanceof Element) {
let pat;
if (this.fillImage instanceof Element ||
Object.keys(this.fillImage).length !== 0) {
pat = ctx.createPattern(this.fillImage, "no-repeat");
} else {
pat = createMissingTexturePattern(ctx)
}
ctx.fillStyle = pat;
} else {
ctx.fillStyle = this.fillStyle;
}
if (context.debugColor) {
ctx.fillStyle = context.debugColor;
}
if (this.curves.length > 0) {
ctx.moveTo(this.curves[0].points[0].x, this.curves[0].points[0].y);
for (let curve of this.curves) {
ctx.bezierCurveTo(
curve.points[1].x,
curve.points[1].y,
curve.points[2].x,
curve.points[2].y,
curve.points[3].x,
curve.points[3].y,
);
}
}
ctx.fill();
if (context.selected) {
ctx.fillStyle = pattern
ctx.fill()
}
}
function drawCurve(curve, selected) {
ctx.strokeStyle = curve.color;
ctx.beginPath();
ctx.moveTo(curve.points[0].x, curve.points[0].y);
ctx.bezierCurveTo(
curve.points[1].x,
curve.points[1].y,
curve.points[2].x,
curve.points[2].y,
curve.points[3].x,
curve.points[3].y,
);
ctx.stroke();
if (selected) {
ctx.strokeStyle = pattern
ctx.stroke()
}
}
if (this.stroked && !context.debugColor) {
for (let curve of this.curves) {
drawCurve(curve, context.selected)
// // Debug, show curve control points
// ctx.beginPath()
// ctx.arc(curve.points[1].x,curve.points[1].y, 5, 0, 2*Math.PI)
// ctx.arc(curve.points[2].x,curve.points[2].y, 5, 0, 2*Math.PI)
// ctx.arc(curve.points[3].x,curve.points[3].y, 5, 0, 2*Math.PI)
// ctx.fill()
}
}
if (context.activeCurve && this==context.activeCurve.shape) {
drawCurve(context.activeCurve.current, true)
}
if (context.activeVertex && this==context.activeVertex.shape) {
const curves = {
...context.activeVertex.current.startCurves,
...context.activeVertex.current.endCurves
}
for (let i in curves) {
let curve = curves[i]
drawCurve(curve, true)
}
ctx.fillStyle = "#000000aa";
ctx.beginPath();
let vertexSize = 15 / context.zoomLevel;
ctx.rect(
context.activeVertex.current.point.x - vertexSize / 2,
context.activeVertex.current.point.y - vertexSize / 2,
vertexSize,
vertexSize,
);
ctx.fill();
}
// Debug, show quadtree
if (debugQuadtree && this.quadtree && !context.debugColor) {
this.quadtree.draw(ctx);
}
}
lerpShape(shape2, t) {
if (this.curves.length == 0) return this;
let path1 = [
{
type: "M",
x: this.curves[0].points[0].x,
y: this.curves[0].points[0].y,
},
];
for (let curve of this.curves) {
path1.push({
type: "C",
x1: curve.points[1].x,
y1: curve.points[1].y,
x2: curve.points[2].x,
y2: curve.points[2].y,
x: curve.points[3].x,
y: curve.points[3].y,
});
}
let path2 = [];
if (shape2.curves.length > 0) {
path2.push({
type: "M",
x: shape2.curves[0].points[0].x,
y: shape2.curves[0].points[0].y,
});
for (let curve of shape2.curves) {
path2.push({
type: "C",
x1: curve.points[1].x,
y1: curve.points[1].y,
x2: curve.points[2].x,
y2: curve.points[2].y,
x: curve.points[3].x,
y: curve.points[3].y,
});
}
}
const interpolator = d3.interpolatePathCommands(path1, path2);
let current = interpolator(t);
let curves = [];
let start = current.shift();
let { x, y } = start;
let bezier;
for (let curve of current) {
bezier = new Bezier(
x,
y,
curve.x1,
curve.y1,
curve.x2,
curve.y2,
curve.x,
curve.y,
)
bezier.color = lerpColor(this.strokeStyle, shape2.strokeStyle)
curves.push(bezier);
x = curve.x;
y = curve.y;
}
let lineWidth = lerp(this.lineWidth, shape2.lineWidth, t);
let strokeStyle = lerpColor(
this.strokeStyle,
shape2.strokeStyle,
t,
);
let fillStyle;
if (!this.fillImage) {
fillStyle = lerpColor(this.fillStyle, shape2.fillStyle, t);
}
return new TempShape(
start.x,
start.y,
curves,
lineWidth,
this.stroked,
this.filled,
strokeStyle,
fillStyle,
)
}
}
class TempShape extends BaseShape {
constructor(
startx,
starty,
curves,
lineWidth,
stroked,
filled,
strokeStyle,
fillStyle,
) {
super(startx, starty);
this.curves = curves;
this.lineWidth = lineWidth;
this.stroked = stroked;
this.filled = filled;
this.strokeStyle = strokeStyle;
this.fillStyle = fillStyle;
this.inProgress = false;
this.recalculateBoundingBox();
}
}
class Shape extends BaseShape {
constructor(startx, starty, context, parent, uuid = undefined, shapeId = undefined) {
super(startx, starty);
this.parent = parent; // Reference to parent Layer (required)
this.vertices = [];
this.triangles = [];
this.fillStyle = context.fillStyle;
this.fillImage = context.fillImage;
this.strokeStyle = context.strokeStyle;
this.lineWidth = context.lineWidth;
this.filled = context.fillShape;
this.stroked = context.strokeShape;
this.quadtree = new Quadtree(
{ x: { min: 0, max: 500 }, y: { min: 0, max: 500 } },
4,
);
if (!uuid) {
this.idx = uuidv4();
} else {
this.idx = uuid;
}
if (!shapeId) {
this.shapeId = uuidv4();
} else {
this.shapeId = shapeId;
}
this.shapeIndex = 0; // Default shape version index for tweening
pointerList[this.idx] = this;
this.regionIdx = 0;
this.inProgress = true;
// Timeline display settings (Phase 3)
this.showSegment = true // Show segment bar in timeline
this.curvesMode = 'hidden' // 'hidden' | 'minimized' | 'expanded'
this.curvesHeight = 150 // Height in pixels when curves are expanded
}
static fromJSON(json, parent) {
let fillImage = undefined;
if (json.fillImage && Object.keys(json.fillImage).length !== 0) {
let img = new Image();
img.src = json.fillImage.src
fillImage = img
} else {
fillImage = {}
}
const shape = new Shape(
json.startx,
json.starty,
{
fillStyle: json.fillStyle,
fillImage: fillImage,
strokeStyle: json.strokeStyle,
lineWidth: json.lineWidth,
fillShape: json.filled,
strokeShape: json.stroked,
},
parent,
json.idx,
json.shapeId,
);
for (let curve of json.curves) {
shape.addCurve(Bezier.fromJSON(curve));
}
for (let region of json.regions) {
const curves = [];
for (let curve of region.curves) {
curves.push(Bezier.fromJSON(curve));
}
shape.regions.push({
idx: region.idx,
curves: curves,
fillStyle: region.fillStyle,
filled: region.filled,
});
}
// Load shapeIndex if present (for shape tweening)
if (json.shapeIndex !== undefined) {
shape.shapeIndex = json.shapeIndex;
}
return shape;
}
toJSON(randomizeUuid = false) {
const json = {};
json.type = "Shape";
json.startx = this.startx;
json.starty = this.starty;
json.fillStyle = this.fillStyle;
if (this.fillImage instanceof Element) {
json.fillImage = {
src: this.fillImage.src
}
}
json.strokeStyle = this.fillStyle;
json.lineWidth = this.lineWidth;
json.filled = this.filled;
json.stroked = this.stroked;
if (randomizeUuid) {
json.idx = uuidv4();
} else {
json.idx = this.idx;
}
json.shapeId = this.shapeId;
json.shapeIndex = this.shapeIndex; // For shape tweening
json.curves = [];
for (let curve of this.curves) {
json.curves.push(curve.toJSON(randomizeUuid));
}
json.regions = [];
for (let region of this.regions) {
const curves = [];
for (let curve of region.curves) {
curves.push(curve.toJSON(randomizeUuid));
}
json.regions.push({
idx: region.idx,
curves: curves,
fillStyle: region.fillStyle,
filled: region.filled,
});
}
return json;
}
get segmentColor() {
return uuidToColor(this.idx);
}
addCurve(curve) {
if (curve.color == undefined) {
curve.color = context.strokeStyle;
}
this.curves.push(curve);
this.quadtree.insert(curve, this.curves.length - 1);
growBoundingBox(this.boundingBox, curve.bbox());
}
addLine(x, y) {
let lastpoint;
if (this.curves.length) {
lastpoint = this.curves[this.curves.length - 1].points[3];
} else {
lastpoint = { x: this.startx, y: this.starty };
}
let midpoint = { x: (x + lastpoint.x) / 2, y: (y + lastpoint.y) / 2 };
let curve = new Bezier(
lastpoint.x,
lastpoint.y,
midpoint.x,
midpoint.y,
midpoint.x,
midpoint.y,
x,
y,
);
curve.color = context.strokeStyle;
this.quadtree.insert(curve, this.curves.length - 1);
this.curves.push(curve);
}
bbox() {
return this.boundingBox;
}
clear() {
this.curves = [];
this.quadtree.clear();
}
copy(idx) {
let newShape = new Shape(
this.startx,
this.starty,
{},
this.parent,
idx.slice(0, 8) + this.idx.slice(8),
this.shapeId,
);
newShape.startx = this.startx;
newShape.starty = this.starty;
for (let curve of this.curves) {
let newCurve = new Bezier(
curve.points[0].x,
curve.points[0].y,
curve.points[1].x,
curve.points[1].y,
curve.points[2].x,
curve.points[2].y,
curve.points[3].x,
curve.points[3].y,
);
newCurve.color = curve.color;
newShape.addCurve(newCurve);
}
// TODO
// for (let vertex of this.vertices) {
// }
newShape.updateVertices();
newShape.fillStyle = this.fillStyle;
if (this.fillImage instanceof Element) {
newShape.fillImage = this.fillImage.cloneNode(true)
} else {
newShape.fillImage = this.fillImage;
}
newShape.strokeStyle = this.strokeStyle;
newShape.lineWidth = this.lineWidth;
newShape.filled = this.filled;
newShape.stroked = this.stroked;
return newShape;
}
fromPoints(points, error = 30) {
console.log(error);
this.curves = [];
let curves = fitCurve.fitCurve(points, error);
for (let curve of curves) {
let bezier = new Bezier(
curve[0][0],
curve[0][1],
curve[1][0],
curve[1][1],
curve[2][0],
curve[2][1],
curve[3][0],
curve[3][1],
);
this.curves.push(bezier);
this.quadtree.insert(bezier, this.curves.length - 1);
}
return this;
}
simplify(mode = "corners") {
this.quadtree.clear();
this.inProgress = false;
// Mode can be corners, smooth or auto
if (mode == "corners") {
let points = [{ x: this.startx, y: this.starty }];
for (let curve of this.curves) {
points.push(curve.points[3]);
}
// points = points.concat(this.curves)
let newpoints = simplifyPolyline(points, 10, false);
this.curves = [];
let lastpoint = newpoints.shift();
let midpoint;
for (let point of newpoints) {
midpoint = {
x: (lastpoint.x + point.x) / 2,
y: (lastpoint.y + point.y) / 2,
};
let bezier = new Bezier(
lastpoint.x,
lastpoint.y,
midpoint.x,
midpoint.y,
midpoint.x,
midpoint.y,
point.x,
point.y,
);
this.curves.push(bezier);
this.quadtree.insert(bezier, this.curves.length - 1);
lastpoint = point;
}
} else if (mode == "smooth") {
let error = 30;
let points = [[this.startx, this.starty]];
for (let curve of this.curves) {
points.push([curve.points[3].x, curve.points[3].y]);
}
this.fromPoints(points, error);
} else if (mode == "verbatim") {
// Just keep existing shape
}
let epsilon = 0.01;
let newCurves = [];
let intersectMap = {};
for (let i = 0; i < this.curves.length - 1; i++) {
// for (let j=i+1; j<this.curves.length; j++) {
for (let j of this.quadtree.query(this.curves[i].bbox())) {
if (i >= j) continue;
let intersects = this.curves[i].intersects(this.curves[j]);
if (intersects.length) {
intersectMap[i] ||= [];
intersectMap[j] ||= [];
for (let intersect of intersects) {
let [t1, t2] = intersect.split("/");
intersectMap[i].push(parseFloat(t1));
intersectMap[j].push(parseFloat(t2));
}
}
}
}
for (let lst in intersectMap) {
for (let i = 1; i < intersectMap[lst].length; i++) {
if (
Math.abs(intersectMap[lst][i] - intersectMap[lst][i - 1]) < epsilon
) {
intersectMap[lst].splice(i, 1);
i--;
}
}
}
for (let i = this.curves.length - 1; i >= 0; i--) {
if (i in intersectMap) {
intersectMap[i].sort().reverse();
let remainingFraction = 1;
let remainingCurve = this.curves[i];
for (let t of intersectMap[i]) {
let split = remainingCurve.split(t / remainingFraction);
remainingFraction = t;
newCurves.push(split.right);
remainingCurve = split.left;
}
newCurves.push(remainingCurve);
} else {
newCurves.push(this.curves[i]);
}
}
for (let curve of newCurves) {
curve.color = context.strokeStyle;
}
newCurves.reverse();
this.curves = newCurves;
}
update() {
this.recalculateBoundingBox();
this.updateVertices();
if (this.curves.length) {
this.startx = this.curves[0].points[0].x;
this.starty = this.curves[0].points[0].y;
}
return [this];
}
getClockwiseCurves(point, otherPoints) {
// Returns array of {x, y, idx, angle}
let points = [];
for (let point of otherPoints) {
points.push({ ...this.vertices[point].point, idx: point });
}
// Add an angle property to each point using tan(angle) = y/x
const angles = points.map(({ x, y, idx }) => {
return {
x,
y,
idx,
angle: (Math.atan2(y - point.y, x - point.x) * 180) / Math.PI,
};
});
// Sort your points by angle
const pointsSorted = angles.sort((a, b) => a.angle - b.angle);
return pointsSorted;
}
translate(x, y) {
this.quadtree.clear()
let j=0;
for (let curve of this.curves) {
for (let i in curve.points) {
const point = curve.points[i];
curve.points[i] = { x: point.x + x, y: point.y + y };
}
this.quadtree.insert(curve, j)
j++;
}
this.update();
}
updateVertices() {
this.vertices = [];
let utils = Bezier.getUtils();
let epsilon = 1.5; // big epsilon whoa
let tooClose;
let i = 0;
let region = {
idx: `${this.idx}-r${this.regionIdx++}`,
curves: [],
fillStyle: context.fillStyle,
filled: context.fillShape,
};
pointerList[region.idx] = region;
this.regions = [region];
for (let curve of this.curves) {
this.regions[0].curves.push(curve);
}
if (this.regions[0].curves.length) {
if (
utils.dist(
this.regions[0].curves[0].points[0],
this.regions[0].curves[this.regions[0].curves.length - 1].points[3],
) < epsilon
) {
this.regions[0].filled = true;
}
}
// Generate vertices
for (let curve of this.curves) {
for (let index of [0, 3]) {
tooClose = false;
for (let vertex of this.vertices) {
if (utils.dist(curve.points[index], vertex.point) < epsilon) {
tooClose = true;
vertex[["startCurves", , , "endCurves"][index]][i] = curve;
break;
}
}
if (!tooClose) {
if (index == 0) {
this.vertices.push({
point: curve.points[index],
startCurves: { [i]: curve },
endCurves: {},
});
} else {
this.vertices.push({
point: curve.points[index],
startCurves: {},
endCurves: { [i]: curve },
});
}
}
}
i++;
}
let shapes = [this];
this.vertices.forEach((vertex, i) => {
for (let i = 0; i < Math.min(10, this.regions.length); i++) {
let region = this.regions[i];
let regionVertexCurves = [];
let vertexCurves = { ...vertex.startCurves, ...vertex.endCurves };
if (Object.keys(vertexCurves).length == 1) {
// endpoint
continue;
} else if (Object.keys(vertexCurves).length == 2) {
// path vertex, don't need to do anything
continue;
} else if (Object.keys(vertexCurves).length == 3) {
// T junction. Region doesn't change but might need to update curves?
// Skip for now.
continue;
} else if (Object.keys(vertexCurves).length == 4) {
// Intersection, split region in 2
for (let i in vertexCurves) {
let curve = vertexCurves[i];
if (region.curves.includes(curve)) {
regionVertexCurves.push(curve);
}
}
let start = region.curves.indexOf(regionVertexCurves[1]);
let end = region.curves.indexOf(regionVertexCurves[3]);
if (end > start) {
let newRegion = {
idx: `${this.idx}-r${this.regionIdx++}`, // TODO: generate this deterministically so that undo/redo works
curves: region.curves.splice(start, end - start),
fillStyle: region.fillStyle,
filled: true,
};
pointerList[newRegion.idx] = newRegion;
this.regions.push(newRegion);
}
} else {
// not sure how to handle vertices with more than 4 curves
console.log(
`Unexpected vertex with ${Object.keys(vertexCurves).length} curves!`,
);
}
}
});
}
}
export { BaseShape, TempShape, Shape };

View File

@ -158,7 +158,7 @@ class GraphicsObject {
this.currentFrameNum = 0; this.currentFrameNum = 0;
this.currentLayer = 0; this.currentLayer = 0;
this.layers = [] this.layers = []
this.audioLayers = [] this.audioTracks = []
} }
static fromJSON(json) { static fromJSON(json) {
const graphicsObject = new GraphicsObject(json.idx) const graphicsObject = new GraphicsObject(json.idx)
@ -174,8 +174,8 @@ class GraphicsObject {
for (let layer of json.layers) { for (let layer of json.layers) {
graphicsObject.layers.push(Layer.fromJSON(layer)) graphicsObject.layers.push(Layer.fromJSON(layer))
} }
for (let audioLayer of json.audioLayers) { for (let audioLayer of json.audioTracks) {
graphicsObject.audioLayers.push(AudioLayer.fromJSON(audioLayer)) graphicsObject.audioTracks.push(AudioTrack.fromJSON(audioLayer))
} }
return graphicsObject return graphicsObject
} }

172
src/state.js Normal file
View File

@ -0,0 +1,172 @@
// Global state management for Lightningbeam
// This module centralizes all global state that was previously scattered in main.js
import { deepMerge } from "./utils.js";
// Core application context
// Contains UI state, selections, tool settings, etc.
export let context = {
mouseDown: false,
mousePos: { x: 0, y: 0 },
swatches: [
"#000000",
"#FFFFFF",
"#FF0000",
"#FFFF00",
"#00FF00",
"#00FFFF",
"#0000FF",
"#FF00FF",
],
lineWidth: 5,
simplifyMode: "smooth",
fillShape: false,
strokeShape: true,
fillGaps: 5,
dropperColor: "Fill color",
dragging: false,
selectionRect: undefined,
selection: [],
shapeselection: [],
oldselection: [],
oldshapeselection: [],
selectedFrames: [],
dragDirection: undefined,
zoomLevel: 1,
timelineWidget: null, // Reference to TimelineWindowV2 widget for zoom controls
config: null, // Reference to config object (set after config is initialized)
mode: "select", // Current tool mode
// Recording state
isRecording: false,
recordingTrackId: null,
recordingClipId: null,
playPauseButton: null, // Reference to play/pause button for updating appearance
};
// Application configuration
// Contains settings, shortcuts, file properties, etc.
export let config = {
shortcuts: {
playAnimation: " ",
undo: "<mod>z",
redo: "<mod>Z",
new: "<mod>n",
newWindow: "<mod>N",
save: "<mod>s",
saveAs: "<mod>S",
open: "<mod>o",
import: "<mod>i",
export: "<mod>e",
quit: "<mod>q",
copy: "<mod>c",
paste: "<mod>v",
delete: "Backspace",
selectAll: "<mod>a",
selectNone: "<mod>A",
group: "<mod>g",
addLayer: "<mod>l",
addKeyframe: "F6",
addBlankKeyframe: "F7",
zoomIn: "<mod>+",
zoomOut: "<mod>-",
resetZoom: "<mod>0",
},
fileWidth: 800,
fileHeight: 600,
framerate: 24,
recentFiles: [],
scrollSpeed: 1,
debug: false,
reopenLastSession: false,
lastImportFilterIndex: 0 // Index of last used filter in import dialog (0=Image, 1=Audio, 2=Lightningbeam)
};
// Object pointer registry
// Maps UUIDs to object instances for quick lookup
export let pointerList = {};
// Undo/redo state tracking
// Stores initial property values when starting an action
export let startProps = {};
// Helper function to get keyboard shortcut in platform format
export function getShortcut(shortcut) {
if (!(shortcut in config.shortcuts)) return undefined;
let shortcutValue = config.shortcuts[shortcut].replace("<mod>", "CmdOrCtrl+");
const key = shortcutValue.slice(-1);
// If the last character is uppercase, prepend "Shift+" to it
return key === key.toUpperCase() && key !== key.toLowerCase()
? shortcutValue.replace(key, `Shift+${key}`)
: shortcutValue.replace("++", "+Shift+="); // Hardcode uppercase from = to +
}
// Configuration file management
const CONFIG_FILE_PATH = "config.json";
// Load configuration from localStorage
export async function loadConfig() {
try {
const configData = localStorage.getItem("lightningbeamConfig") || "{}";
const loaded = JSON.parse(configData);
// Merge loaded config with defaults
Object.assign(config, deepMerge({ ...config }, loaded));
// Make config accessible to widgets via context
context.config = config;
return config;
} catch (error) {
console.log("Error loading config, using defaults:", error);
context.config = config;
return config;
}
}
// Save configuration to localStorage
export async function saveConfig() {
try {
localStorage.setItem(
"lightningbeamConfig",
JSON.stringify(config, null, 2),
);
} catch (error) {
console.error("Error saving config:", error);
}
}
// Add a file to recent files list
export async function addRecentFile(filePath) {
config.recentFiles = [
filePath,
...config.recentFiles.filter(file => file !== filePath)
].slice(0, 10);
await saveConfig();
}
// Utility to reset pointer list (useful for testing)
export function clearPointerList() {
pointerList = {};
}
// Utility to reset start props (useful for testing)
export function clearStartProps() {
startProps = {};
}
// Helper to register an object in the pointer list
export function registerObject(uuid, object) {
pointerList[uuid] = object;
}
// Helper to unregister an object from the pointer list
export function unregisterObject(uuid) {
delete pointerList[uuid];
}
// Helper to get an object from the pointer list
export function getObject(uuid) {
return pointerList[uuid];
}

View File

@ -119,6 +119,8 @@ button {
background-color: #ccc; background-color: #ccc;
text-align: left; text-align: left;
z-index: 1; z-index: 1;
display: flex;
align-items: center;
} }
.icon { .icon {
@ -730,3 +732,218 @@ button {
filter: invert(1); filter: invert(1);
} }
} }
/* Playback Controls */
.playback-controls-group {
display: inline-flex;
gap: 0;
margin: 5px;
align-items: center;
border-radius: 6px;
overflow: hidden;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.2);
}
.playback-btn {
width: 40px;
height: 36px;
padding: 0;
position: relative;
display: inline-flex;
align-items: center;
justify-content: center;
border-radius: 0;
border-right: 1px solid rgba(0, 0, 0, 0.15);
}
.playback-btn:last-child {
border-right: none;
}
/* Play Button - Triangle */
.playback-btn-play::before {
content: "";
width: 0;
height: 0;
border-style: solid;
border-width: 8px 0 8px 14px;
border-color: transparent transparent transparent #0f0f0f;
margin-left: 2px;
}
/* Pause Button - Two Bars */
.playback-btn-pause::before,
.playback-btn-pause::after {
content: "";
width: 4px;
height: 16px;
background-color: #0f0f0f;
position: absolute;
}
.playback-btn-pause::before {
left: 10px;
}
.playback-btn-pause::after {
right: 10px;
}
/* Rewind Button - Double Left Triangle */
.playback-btn-rewind::before,
.playback-btn-rewind::after {
content: "";
width: 0;
height: 0;
border-style: solid;
border-width: 7px 10px 7px 0;
border-color: transparent #0f0f0f transparent transparent;
position: absolute;
}
.playback-btn-rewind::before {
left: 10px;
}
.playback-btn-rewind::after {
left: 20px;
}
/* Fast Forward Button - Double Right Triangle */
.playback-btn-ff::before,
.playback-btn-ff::after {
content: "";
width: 0;
height: 0;
border-style: solid;
border-width: 7px 0 7px 10px;
border-color: transparent transparent transparent #0f0f0f;
position: absolute;
}
.playback-btn-ff::before {
left: 10px;
}
.playback-btn-ff::after {
left: 20px;
}
/* Go to Start - Bar + Left Triangle */
.playback-btn-start::before,
.playback-btn-start::after {
content: "";
position: absolute;
}
.playback-btn-start::before {
width: 2px;
height: 14px;
background-color: #0f0f0f;
left: 13px;
}
.playback-btn-start::after {
width: 0;
height: 0;
border-style: solid;
border-width: 7px 12px 7px 0;
border-color: transparent #0f0f0f transparent transparent;
left: 15px;
}
/* Go to End - Right Triangle + Bar */
.playback-btn-end::before,
.playback-btn-end::after {
content: "";
position: absolute;
}
.playback-btn-end::before {
width: 0;
height: 0;
border-style: solid;
border-width: 7px 0 7px 12px;
border-color: transparent transparent transparent #0f0f0f;
left: 13px;
}
.playback-btn-end::after {
width: 2px;
height: 14px;
background-color: #0f0f0f;
left: 25px;
}
/* Record Button - Circle */
.playback-btn-record::before {
content: "";
width: 14px;
height: 14px;
border-radius: 50%;
background-color: #cc0000;
}
.playback-btn-record:disabled::before {
background-color: #666;
}
/* Recording animation */
@keyframes pulse {
0%, 100% {
opacity: 1;
}
50% {
opacity: 0.5;
}
}
.playback-btn-record.recording::before {
animation: pulse 1s ease-in-out infinite;
}
/* Dark mode playback button adjustments */
@media (prefers-color-scheme: dark) {
.playback-btn {
border-right: 1px solid rgba(255, 255, 255, 0.15);
}
.playback-btn-play::before {
border-color: transparent transparent transparent #f6f6f6;
}
.playback-btn-pause::before,
.playback-btn-pause::after {
background-color: #f6f6f6;
}
.playback-btn-rewind::before,
.playback-btn-rewind::after {
border-color: transparent #f6f6f6 transparent transparent;
}
.playback-btn-ff::before,
.playback-btn-ff::after {
border-color: transparent transparent transparent #f6f6f6;
}
.playback-btn-start::before {
background-color: #f6f6f6;
}
.playback-btn-start::after {
border-color: transparent #f6f6f6 transparent transparent;
}
.playback-btn-end::before {
border-color: transparent transparent transparent #f6f6f6;
}
.playback-btn-end::after {
background-color: #f6f6f6;
}
.playback-btn-record:disabled::before {
background-color: #444;
}
}

View File

@ -437,6 +437,21 @@ class TrackHierarchy {
} }
} }
} }
// Add audio tracks (after visual layers)
if (graphicsObject.audioTracks) {
for (let audioTrack of graphicsObject.audioTracks) {
const audioTrackItem = {
type: 'audio',
object: audioTrack,
name: audioTrack.name || 'Audio',
indent: 0,
collapsed: audioTrack.collapsed || false,
visible: audioTrack.audible !== false
}
this.tracks.push(audioTrackItem)
}
}
} }
/** /**
@ -514,8 +529,8 @@ class TrackHierarchy {
getTrackHeight(track) { getTrackHeight(track) {
const baseHeight = this.trackHeight const baseHeight = this.trackHeight
// Only objects and shapes can have curves // Only objects, shapes, and audio tracks can have curves
if (track.type !== 'object' && track.type !== 'shape') { if (track.type !== 'object' && track.type !== 'shape' && track.type !== 'audio') {
return baseHeight return baseHeight
} }

View File

@ -502,7 +502,7 @@ class TimelineWindow extends ScrollableWindow {
} }
} }
} }
// } else if (layer instanceof AudioLayer) { // } else if (layer instanceof AudioTrack) {
} else if (layer.sounds) { } else if (layer.sounds) {
// TODO: split waveform into chunks // TODO: split waveform into chunks
for (let i in layer.sounds) { for (let i in layer.sounds) {
@ -569,6 +569,9 @@ class TimelineWindowV2 extends Widget {
// Phase 6: Keyframe clipboard // Phase 6: Keyframe clipboard
this.keyframeClipboard = null // {keyframes: [{keyframe, curve, relativeTime}], baseTime} this.keyframeClipboard = null // {keyframes: [{keyframe, curve, relativeTime}], baseTime}
// Selected audio track (for recording)
this.selectedTrack = null
} }
draw(ctx) { draw(ctx) {
@ -710,6 +713,10 @@ class TimelineWindowV2 extends Widget {
const buttonSize = 14 const buttonSize = 14
const twoButtonsWidth = (buttonSize * 2) + 4 + 10 // Two buttons + gap + padding const twoButtonsWidth = (buttonSize * 2) + 4 + 10 // Two buttons + gap + padding
maxTextWidth = this.trackHeaderWidth - textStartX - twoButtonsWidth maxTextWidth = this.trackHeaderWidth - textStartX - twoButtonsWidth
} else if (track.type === 'audio') {
const buttonSize = 14
const oneButtonWidth = buttonSize + 10 // One button (curves mode) + padding
maxTextWidth = this.trackHeaderWidth - textStartX - oneButtonWidth
} }
// Truncate text with ellipsis if needed // Truncate text with ellipsis if needed
@ -729,14 +736,18 @@ class TimelineWindowV2 extends Widget {
// Draw type indicator (only if there's space) // Draw type indicator (only if there's space)
ctx.fillStyle = foregroundColor ctx.fillStyle = foregroundColor
ctx.font = '10px sans-serif' ctx.font = '10px sans-serif'
const typeText = track.type === 'layer' ? '[L]' : track.type === 'object' ? '[G]' : '[S]' const typeText = track.type === 'layer' ? '[L]' :
track.type === 'object' ? '[G]' :
track.type === 'audio' ? '[A]' : '[S]'
const typeX = textStartX + ctx.measureText(displayName).width + 8 const typeX = textStartX + ctx.measureText(displayName).width + 8
if (typeX + ctx.measureText(typeText).width < this.trackHeaderWidth - (track.type === 'object' || track.type === 'shape' ? 50 : 10)) { const buttonSpaceNeeded = (track.type === 'object' || track.type === 'shape') ? 50 :
(track.type === 'audio') ? 25 : 10
if (typeX + ctx.measureText(typeText).width < this.trackHeaderWidth - buttonSpaceNeeded) {
ctx.fillText(typeText, typeX, y + this.trackHierarchy.trackHeight / 2) ctx.fillText(typeText, typeX, y + this.trackHierarchy.trackHeight / 2)
} }
// Draw toggle buttons for object/shape tracks (Phase 3) // Draw toggle buttons for object/shape/audio tracks (Phase 3)
if (track.type === 'object' || track.type === 'shape') { if (track.type === 'object' || track.type === 'shape' || track.type === 'audio') {
const buttonSize = 14 const buttonSize = 14
const buttonY = y + (this.trackHierarchy.trackHeight - buttonSize) / 2 // Use base height for button position const buttonY = y + (this.trackHierarchy.trackHeight - buttonSize) / 2 // Use base height for button position
let buttonX = this.trackHeaderWidth - 10 // Start from right edge let buttonX = this.trackHeaderWidth - 10 // Start from right edge
@ -756,7 +767,8 @@ class TimelineWindowV2 extends Widget {
track.object.curvesMode === 'minimized' ? '≈' : '-' track.object.curvesMode === 'minimized' ? '≈' : '-'
ctx.fillText(curveSymbol, buttonX + buttonSize / 2, buttonY + buttonSize / 2) ctx.fillText(curveSymbol, buttonX + buttonSize / 2, buttonY + buttonSize / 2)
// Segment visibility button // Segment visibility button (only for object/shape tracks, not audio)
if (track.type !== 'audio') {
buttonX -= (buttonSize + 4) buttonX -= (buttonSize + 4)
ctx.strokeStyle = foregroundColor ctx.strokeStyle = foregroundColor
ctx.lineWidth = 1 ctx.lineWidth = 1
@ -767,6 +779,7 @@ class TimelineWindowV2 extends Widget {
ctx.fillStyle = foregroundColor ctx.fillStyle = foregroundColor
ctx.fillRect(buttonX + 2, buttonY + 2, buttonSize - 4, buttonSize - 4) ctx.fillRect(buttonX + 2, buttonY + 2, buttonSize - 4, buttonSize - 4)
} }
}
// Draw legend for expanded curves (Phase 6) // Draw legend for expanded curves (Phase 6)
if (track.object.curvesMode === 'expanded') { if (track.object.curvesMode === 'expanded') {
@ -1113,6 +1126,105 @@ class TimelineWindowV2 extends Widget {
} }
} }
} }
} else if (track.type === 'audio') {
// Draw audio clips for AudioTrack
const audioTrack = track.object
const y = this.trackHierarchy.getTrackY(i)
const trackHeight = this.trackHierarchy.trackHeight // Use base height for clips
// Draw each clip
for (let clip of audioTrack.clips) {
const startX = this.timelineState.timeToPixel(clip.startTime)
const endX = this.timelineState.timeToPixel(clip.startTime + clip.duration)
const clipWidth = endX - startX
// Draw clip rectangle with audio-specific color
// Use gray color for loading clips, blue for loaded clips
ctx.fillStyle = clip.loading ? '#666666' : '#4a90e2'
ctx.fillRect(
startX,
y + 5,
clipWidth,
trackHeight - 10
)
// Draw border
ctx.strokeStyle = shadow
ctx.lineWidth = 1
ctx.strokeRect(
startX,
y + 5,
clipWidth,
trackHeight - 10
)
// Draw clip name if there's enough space
const minWidthForLabel = 40
if (clipWidth >= minWidthForLabel) {
ctx.fillStyle = labelColor
ctx.font = '11px sans-serif'
ctx.textAlign = 'left'
ctx.textBaseline = 'middle'
// Clip text to clip bounds
ctx.save()
ctx.beginPath()
ctx.rect(startX + 2, y + 5, clipWidth - 4, trackHeight - 10)
ctx.clip()
ctx.fillText(clip.name, startX + 4, y + trackHeight / 2)
ctx.restore()
}
// Draw waveform only for loaded clips
if (!clip.loading && clip.waveform && clip.waveform.length > 0) {
ctx.fillStyle = 'rgba(255, 255, 255, 0.3)'
// Only draw waveform within visible area
const visibleStart = Math.max(startX + 2, 0)
const visibleEnd = Math.min(startX + clipWidth - 2, this.width - this.trackHeaderWidth)
if (visibleEnd > visibleStart) {
const centerY = y + trackHeight / 2
const waveformHeight = trackHeight - 14 // Leave padding at top/bottom
const waveformData = clip.waveform
// Calculate how many pixels each waveform peak represents
const pixelsPerPeak = clipWidth / waveformData.length
// Calculate the range of visible peaks
const firstVisiblePeak = Math.max(0, Math.floor((visibleStart - startX) / pixelsPerPeak))
const lastVisiblePeak = Math.min(waveformData.length - 1, Math.ceil((visibleEnd - startX) / pixelsPerPeak))
// Draw waveform as a filled path
ctx.beginPath()
// Trace along the max values (left to right)
for (let i = firstVisiblePeak; i <= lastVisiblePeak; i++) {
const peakX = startX + (i * pixelsPerPeak)
const peak = waveformData[i]
const maxY = centerY + (peak.max * waveformHeight * 0.5)
if (i === firstVisiblePeak) {
ctx.moveTo(peakX, maxY)
} else {
ctx.lineTo(peakX, maxY)
}
}
// Trace back along the min values (right to left)
for (let i = lastVisiblePeak; i >= firstVisiblePeak; i--) {
const peakX = startX + (i * pixelsPerPeak)
const peak = waveformData[i]
const minY = centerY + (peak.min * waveformHeight * 0.5)
ctx.lineTo(peakX, minY)
}
ctx.closePath()
ctx.fill()
}
}
}
} }
} }
@ -1141,8 +1253,8 @@ class TimelineWindowV2 extends Widget {
for (let i = 0; i < this.trackHierarchy.tracks.length; i++) { for (let i = 0; i < this.trackHierarchy.tracks.length; i++) {
const track = this.trackHierarchy.tracks[i] const track = this.trackHierarchy.tracks[i]
// Only draw curves for objects and shapes // Only draw curves for objects, shapes, and audio tracks
if (track.type !== 'object' && track.type !== 'shape') continue if (track.type !== 'object' && track.type !== 'shape' && track.type !== 'audio') continue
const obj = track.object const obj = track.object
@ -1153,7 +1265,10 @@ class TimelineWindowV2 extends Widget {
// Find the layer containing this object/shape to get AnimationData // Find the layer containing this object/shape to get AnimationData
let animationData = null let animationData = null
if (track.type === 'object') { if (track.type === 'audio') {
// For audio tracks, animation data is directly on the track object
animationData = obj.animationData
} else if (track.type === 'object') {
// For objects, get curves from parent layer // For objects, get curves from parent layer
for (let layer of this.context.activeObject.allLayers) { for (let layer of this.context.activeObject.allLayers) {
if (layer.children && layer.children.includes(obj)) { if (layer.children && layer.children.includes(obj)) {
@ -1182,13 +1297,16 @@ class TimelineWindowV2 extends Widget {
if (!animationData) continue if (!animationData) continue
// Get all curves for this object/shape // Get all curves for this object/shape/audio
const curves = [] const curves = []
for (let curveName in animationData.curves) { for (let curveName in animationData.curves) {
const curve = animationData.curves[curveName] const curve = animationData.curves[curveName]
// Filter to only curves for this specific object/shape // Filter to only curves for this specific object/shape/audio
if (track.type === 'object' && curveName.startsWith(`child.${obj.idx}.`)) { if (track.type === 'audio') {
// Audio tracks: include all curves (they're prefixed with 'track.' or 'clip.')
curves.push(curve)
} else if (track.type === 'object' && curveName.startsWith(`child.${obj.idx}.`)) {
curves.push(curve) curves.push(curve)
} else if (track.type === 'shape' && curveName.startsWith(`shape.${obj.shapeId}.`)) { } else if (track.type === 'shape' && curveName.startsWith(`shape.${obj.shapeId}.`)) {
curves.push(curve) curves.push(curve)
@ -1736,6 +1854,32 @@ class TimelineWindowV2 extends Widget {
return true return true
} }
// Check if clicking on audio clip to start dragging
const audioClipInfo = this.getAudioClipAtPoint(track, adjustedX, adjustedY)
if (audioClipInfo) {
// Select the track
this.selectTrack(track)
// Start audio clip dragging
const clickTime = this.timelineState.pixelToTime(adjustedX)
this.draggingAudioClip = {
track: track,
clip: audioClipInfo.clip,
clipIndex: audioClipInfo.clipIndex,
audioTrack: audioClipInfo.audioTrack,
initialMouseTime: clickTime,
initialClipStartTime: audioClipInfo.clip.startTime
}
// Enable global mouse events for dragging
this._globalEvents.add("mousemove")
this._globalEvents.add("mouseup")
console.log('Started dragging audio clip at time', audioClipInfo.clip.startTime)
if (this.requestRedraw) this.requestRedraw()
return true
}
// Phase 6: Check if clicking on segment to start dragging // Phase 6: Check if clicking on segment to start dragging
const segmentInfo = this.getSegmentAtPoint(track, adjustedX, adjustedY) const segmentInfo = this.getSegmentAtPoint(track, adjustedX, adjustedY)
if (segmentInfo) { if (segmentInfo) {
@ -1761,6 +1905,12 @@ class TimelineWindowV2 extends Widget {
if (this.requestRedraw) this.requestRedraw() if (this.requestRedraw) this.requestRedraw()
return true return true
} }
// Fallback: clicking anywhere on track in timeline area selects it
// This is especially important for audio tracks that may not have clips yet
this.selectTrack(track)
if (this.requestRedraw) this.requestRedraw()
return true
} }
} }
@ -2216,6 +2366,45 @@ class TimelineWindowV2 extends Widget {
return null return null
} }
/**
* Get audio clip at a point
* Returns {clip, clipIndex, audioTrack} if clicking on an audio clip
*/
getAudioClipAtPoint(track, x, y) {
if (track.type !== 'audio') return null
const trackIndex = this.trackHierarchy.tracks.indexOf(track)
if (trackIndex === -1) return null
const trackY = this.trackHierarchy.getTrackY(trackIndex)
const trackHeight = this.trackHierarchy.trackHeight
const clipTop = trackY + 5
const clipBottom = trackY + trackHeight - 5
// Check if y is within clip bounds
if (y < clipTop || y > clipBottom) return null
const clickTime = this.timelineState.pixelToTime(x)
const audioTrack = track.object
// Check each clip
for (let i = 0; i < audioTrack.clips.length; i++) {
const clip = audioTrack.clips[i]
const clipStart = clip.startTime
const clipEnd = clip.startTime + clip.duration
if (clickTime >= clipStart && clickTime <= clipEnd) {
return {
clip: clip,
clipIndex: i,
audioTrack: audioTrack
}
}
}
return null
}
/** /**
* Get segment edge at a point (Phase 6) * Get segment edge at a point (Phase 6)
* Returns {edge: 'left'|'right', startTime, endTime, keyframe, animationData, curveName} if near an edge * Returns {edge: 'left'|'right', startTime, endTime, keyframe, animationData, curveName} if near an edge
@ -2440,11 +2629,14 @@ class TimelineWindowV2 extends Widget {
*/ */
isTrackSelected(track) { isTrackSelected(track) {
if (track.type === 'layer') { if (track.type === 'layer') {
return this.context.activeLayer === track.object return this.context.activeObject.activeLayer === track.object
} else if (track.type === 'shape') { } else if (track.type === 'shape') {
return this.context.shapeselection?.includes(track.object) return this.context.shapeselection?.includes(track.object)
} else if (track.type === 'object') { } else if (track.type === 'object') {
return this.context.selection?.includes(track.object) return this.context.selection?.includes(track.object)
} else if (track.type === 'audio') {
// Audio tracks use activeLayer like regular layers
return this.context.activeObject.activeLayer === track.object
} }
return false return false
} }
@ -2458,11 +2650,8 @@ class TimelineWindowV2 extends Widget {
this.context.oldshapeselection = this.context.shapeselection this.context.oldshapeselection = this.context.shapeselection
if (track.type === 'layer') { if (track.type === 'layer') {
// Find the index of this layer in the activeObject // Set the layer as active (this will clear _activeAudioTrack)
const layerIndex = this.context.activeObject.children.indexOf(track.object) this.context.activeObject.activeLayer = track.object
if (layerIndex !== -1) {
this.context.activeObject.currentLayer = layerIndex
}
// Clear selections when selecting layer // Clear selections when selecting layer
this.context.selection = [] this.context.selection = []
this.context.shapeselection = [] this.context.shapeselection = []
@ -2471,11 +2660,8 @@ class TimelineWindowV2 extends Widget {
for (let i = 0; i < this.context.activeObject.allLayers.length; i++) { for (let i = 0; i < this.context.activeObject.allLayers.length; i++) {
const layer = this.context.activeObject.allLayers[i] const layer = this.context.activeObject.allLayers[i]
if (layer.shapes && layer.shapes.includes(track.object)) { if (layer.shapes && layer.shapes.includes(track.object)) {
// Find index in children array // Set the layer as active (this will clear _activeAudioTrack)
const layerIndex = this.context.activeObject.children.indexOf(layer) this.context.activeObject.activeLayer = layer
if (layerIndex !== -1) {
this.context.activeObject.currentLayer = layerIndex
}
// Set shape selection // Set shape selection
this.context.shapeselection = [track.object] this.context.shapeselection = [track.object]
this.context.selection = [] this.context.selection = []
@ -2486,6 +2672,12 @@ class TimelineWindowV2 extends Widget {
// Select the GraphicsObject // Select the GraphicsObject
this.context.selection = [track.object] this.context.selection = [track.object]
this.context.shapeselection = [] this.context.shapeselection = []
} else if (track.type === 'audio') {
// Audio track selected - set as active layer and clear other selections
// Audio tracks can act as layers (they have animationData, shapes=[], children=[])
this.context.activeObject.activeLayer = track.object
this.context.selection = []
this.context.shapeselection = []
} }
// Update the stage UI to reflect selection changes // Update the stage UI to reflect selection changes
@ -2988,6 +3180,25 @@ class TimelineWindowV2 extends Widget {
return true return true
} }
// Handle audio clip dragging
if (this.draggingAudioClip) {
// Adjust coordinates to timeline area
const adjustedX = x - this.trackHeaderWidth
// Convert mouse position to time
const newTime = this.timelineState.pixelToTime(adjustedX)
// Calculate time delta
const timeDelta = newTime - this.draggingAudioClip.initialMouseTime
// Update clip's start time (ensure it doesn't go negative)
this.draggingAudioClip.clip.startTime = Math.max(0, this.draggingAudioClip.initialClipStartTime + timeDelta)
// Trigger timeline redraw
if (this.requestRedraw) this.requestRedraw()
return true
}
// Phase 6: Handle segment dragging // Phase 6: Handle segment dragging
if (this.draggingSegment) { if (this.draggingSegment) {
// Adjust coordinates to timeline area // Adjust coordinates to timeline area
@ -3102,6 +3313,30 @@ class TimelineWindowV2 extends Widget {
return true return true
} }
// Complete audio clip dragging
if (this.draggingAudioClip) {
console.log('Finished dragging audio clip')
// Update backend with new clip position
const { invoke } = window.__TAURI__.core
invoke('audio_move_clip', {
trackId: this.draggingAudioClip.audioTrack.audioTrackId,
clipId: this.draggingAudioClip.clip.clipId,
newStartTime: this.draggingAudioClip.clip.startTime
}).catch(error => {
console.error('Failed to move clip in backend:', error)
})
// Clean up dragging state
this.draggingAudioClip = null
this._globalEvents.delete("mousemove")
this._globalEvents.delete("mouseup")
// Final redraw
if (this.requestRedraw) this.requestRedraw()
return true
}
// Phase 6: Complete segment dragging // Phase 6: Complete segment dragging
if (this.draggingSegment) { if (this.draggingSegment) {
console.log('Finished dragging segment') console.log('Finished dragging segment')