send playback events from backend to use as time reference

This commit is contained in:
Skyler Lehmkuhl 2025-10-23 04:30:52 -04:00
parent d2fa167179
commit 8be10b8213
3 changed files with 62 additions and 25 deletions

View File

@ -52,7 +52,7 @@ impl Engine {
command_rx: rtrb::Consumer<Command>, command_rx: rtrb::Consumer<Command>,
event_tx: rtrb::Producer<AudioEvent>, event_tx: rtrb::Producer<AudioEvent>,
) -> Self { ) -> Self {
let event_interval_frames = (sample_rate as usize * channels as usize) / 10; // Update 10 times per second let event_interval_frames = (sample_rate as usize * channels as usize) / 60; // Update 60 times per second
// Calculate a reasonable buffer size for the pool (typical audio callback size * channels) // Calculate a reasonable buffer size for the pool (typical audio callback size * channels)
let buffer_size = 512 * channels as usize; let buffer_size = 512 * channels as usize;

View File

@ -40,6 +40,9 @@ impl EventEmitter for TauriEventEmitter {
fn emit(&self, event: AudioEvent) { fn emit(&self, event: AudioEvent) {
// Serialize the event to the format expected by the frontend // Serialize the event to the format expected by the frontend
let serialized_event = match event { let serialized_event = match event {
AudioEvent::PlaybackPosition(time) => {
SerializedAudioEvent::PlaybackPosition { time }
}
AudioEvent::RecordingStarted(track_id, clip_id) => { AudioEvent::RecordingStarted(track_id, clip_id) => {
SerializedAudioEvent::RecordingStarted { track_id, clip_id } SerializedAudioEvent::RecordingStarted { track_id, clip_id }
} }
@ -348,6 +351,7 @@ pub async fn audio_resume_recording(
#[derive(serde::Serialize, Clone)] #[derive(serde::Serialize, Clone)]
#[serde(tag = "type")] #[serde(tag = "type")]
pub enum SerializedAudioEvent { pub enum SerializedAudioEvent {
PlaybackPosition { time: f64 },
RecordingStarted { track_id: u32, clip_id: u32 }, RecordingStarted { track_id: u32, clip_id: u32 },
RecordingProgress { clip_id: u32, duration: f64 }, RecordingProgress { clip_id: u32, duration: f64 },
RecordingStopped { clip_id: u32, pool_index: usize, waveform: Vec<WaveformPeak> }, RecordingStopped { clip_id: u32, pool_index: usize, waveform: Vec<WaveformPeak> },

View File

@ -809,7 +809,7 @@ window.addEventListener("keydown", (e) => {
break; break;
// TODO: put these in shortcuts // TODO: put these in shortcuts
case "<mod>ArrowRight": case "<mod>ArrowRight":
advanceFrame(); advance();
e.preventDefault(); e.preventDefault();
break; break;
case "ArrowRight": case "ArrowRight":
@ -832,7 +832,7 @@ window.addEventListener("keydown", (e) => {
e.preventDefault(); e.preventDefault();
break; break;
case "<mod>ArrowLeft": case "<mod>ArrowLeft":
decrementFrame(); rewind();
break; break;
case "ArrowLeft": case "ArrowLeft":
if (context.selection.length) { if (context.selection.length) {
@ -919,8 +919,7 @@ async function playPause() {
console.error('Failed to start audio playback:', error); console.error('Failed to start audio playback:', error);
} }
lastFrameTime = performance.now(); playbackLoop();
advanceFrame();
} else { } else {
// Stop recording if active // Stop recording if active
if (context.isRecording) { if (context.isRecording) {
@ -957,20 +956,9 @@ async function playPause() {
} }
} }
function advanceFrame() { // Playback animation loop - redraws UI while playing
// Calculate elapsed time since last frame (in seconds) // Note: Time is synchronized from DAW via PlaybackPosition events
const now = performance.now(); function playbackLoop() {
const elapsedTime = (now - lastFrameTime) / 1000;
lastFrameTime = now;
// Advance currentTime
context.activeObject.currentTime += elapsedTime;
// Sync timeline playhead position
if (context.timelineWidget?.timelineState) {
context.timelineWidget.timelineState.currentTime = context.activeObject.currentTime;
}
// Redraw stage and timeline // Redraw stage and timeline
updateUI(); updateUI();
if (context.timelineWidget?.requestRedraw) { if (context.timelineWidget?.requestRedraw) {
@ -982,13 +970,13 @@ function advanceFrame() {
// Debug logging for recording // Debug logging for recording
if (context.isRecording) { if (context.isRecording) {
console.log('advanceFrame - recording active, currentTime:', context.activeObject.currentTime, 'duration:', duration, 'isRecording:', context.isRecording); console.log('playbackLoop - recording active, currentTime:', context.activeObject.currentTime, 'duration:', duration, 'isRecording:', context.isRecording);
} }
// Check if we've reached the end (but allow infinite playback when recording) // Check if we've reached the end (but allow infinite playback when recording)
if (context.isRecording || (duration > 0 && context.activeObject.currentTime < duration)) { if (context.isRecording || (duration > 0 && context.activeObject.currentTime < duration)) {
// Continue playing // Continue playing
requestAnimationFrame(advanceFrame); requestAnimationFrame(playbackLoop);
} else { } else {
// Animation finished // Animation finished
playing = false; playing = false;
@ -1014,9 +1002,40 @@ function advanceFrame() {
} }
} }
// Single-step forward by one frame/second
function advance() {
if (context.timelineWidget?.timelineState?.mode === "frames") {
context.activeObject.currentTime += 1 / context.activeObject.frameRate;
} else {
context.activeObject.currentTime += 1;
}
// Sync timeline playhead position
if (context.timelineWidget?.timelineState) {
context.timelineWidget.timelineState.currentTime = context.activeObject.currentTime;
}
updateLayers();
updateMenu();
updateUI();
if (context.timelineWidget?.requestRedraw) {
context.timelineWidget.requestRedraw();
}
}
// Handle audio events pushed from Rust via Tauri event system // Handle audio events pushed from Rust via Tauri event system
async function handleAudioEvent(event) { async function handleAudioEvent(event) {
switch (event.type) { switch (event.type) {
case 'PlaybackPosition':
// Sync frontend time with DAW time
if (playing) {
context.activeObject.currentTime = event.time;
if (context.timelineWidget?.timelineState) {
context.timelineWidget.timelineState.currentTime = event.time;
}
}
break;
case 'RecordingStarted': case 'RecordingStarted':
console.log('[FRONTEND] RecordingStarted - track:', event.track_id, 'clip:', event.clip_id); console.log('[FRONTEND] RecordingStarted - track:', event.track_id, 'clip:', event.clip_id);
context.recordingClipId = event.clip_id; context.recordingClipId = event.clip_id;
@ -1123,11 +1142,25 @@ async function finalizeRecording(clipId, poolIndex, waveform) {
console.error('Could not find clip to finalize:', clipId); console.error('Could not find clip to finalize:', clipId);
} }
function decrementFrame() { // Single-step backward by one frame/second
context.activeObject.decrementFrame(); function rewind() {
if (context.timelineWidget?.timelineState?.mode === "frames") {
context.activeObject.currentTime -= 1 / context.activeObject.frameRate;
} else {
context.activeObject.currentTime -= 1;
}
// Sync timeline playhead position
if (context.timelineWidget?.timelineState) {
context.timelineWidget.timelineState.currentTime = context.activeObject.currentTime;
}
updateLayers(); updateLayers();
updateMenu(); updateMenu();
updateUI(); updateUI();
if (context.timelineWidget?.requestRedraw) {
context.timelineWidget.requestRedraw();
}
} }
async function goToStart() { async function goToStart() {
@ -3745,7 +3778,7 @@ function timelineV2() {
const rewindButton = document.createElement("button"); const rewindButton = document.createElement("button");
rewindButton.className = "playback-btn playback-btn-rewind"; rewindButton.className = "playback-btn playback-btn-rewind";
rewindButton.title = "Rewind"; rewindButton.title = "Rewind";
rewindButton.addEventListener("click", decrementFrame); rewindButton.addEventListener("click", rewind);
playbackGroup.appendChild(rewindButton); playbackGroup.appendChild(rewindButton);
// Play/Pause button // Play/Pause button
@ -3763,7 +3796,7 @@ function timelineV2() {
const ffButton = document.createElement("button"); const ffButton = document.createElement("button");
ffButton.className = "playback-btn playback-btn-ff"; ffButton.className = "playback-btn playback-btn-ff";
ffButton.title = "Fast Forward"; ffButton.title = "Fast Forward";
ffButton.addEventListener("click", advanceFrame); ffButton.addEventListener("click", advance);
playbackGroup.appendChild(ffButton); playbackGroup.appendChild(ffButton);
// Go to end button // Go to end button