use native player to speed up playback

This commit is contained in:
Skyler Lehmkuhl 2025-11-06 11:36:56 -05:00
parent 09426e21f4
commit 430ecb0ae6
9 changed files with 1039 additions and 107 deletions

31
src-tauri/Cargo.lock generated
View File

@ -136,6 +136,12 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
[[package]]
name = "ascii"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d92bec98840b8f03a5ff5413de5293bfcd8bf96467cf5452609f939ec6f5de16"
[[package]] [[package]]
name = "ashpd" name = "ashpd"
version = "0.10.2" version = "0.10.2"
@ -594,6 +600,12 @@ dependencies = [
"windows-targets 0.52.6", "windows-targets 0.52.6",
] ]
[[package]]
name = "chunked_transfer"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e4de3bc4ea267985becf712dc6d9eed8b04c953b3fcfb339ebc87acd9804901"
[[package]] [[package]]
name = "clang-sys" name = "clang-sys"
version = "1.8.1" version = "1.8.1"
@ -1987,6 +1999,12 @@ version = "1.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946"
[[package]]
name = "httpdate"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]] [[package]]
name = "hyper" name = "hyper"
version = "1.5.2" version = "1.5.2"
@ -2504,6 +2522,7 @@ dependencies = [
"tauri-plugin-fs", "tauri-plugin-fs",
"tauri-plugin-log", "tauri-plugin-log",
"tauri-plugin-shell", "tauri-plugin-shell",
"tiny_http",
"tracing", "tracing",
"tracing-subscriber", "tracing-subscriber",
] ]
@ -5191,6 +5210,18 @@ dependencies = [
"time-core", "time-core",
] ]
[[package]]
name = "tiny_http"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "389915df6413a2e74fb181895f933386023c71110878cd0825588928e64cdc82"
dependencies = [
"ascii",
"chunked_transfer",
"httpdate",
"log",
]
[[package]] [[package]]
name = "tinystr" name = "tinystr"
version = "0.7.6" version = "0.7.6"

View File

@ -41,6 +41,9 @@ ffmpeg-next = "7.0"
lru = "0.12" lru = "0.12"
image = { version = "0.24", default-features = false, features = ["jpeg"] } image = { version = "0.24", default-features = false, features = ["jpeg"] }
# HTTP server for video streaming
tiny_http = "0.12"
[profile.dev] [profile.dev]
opt-level = 1 # Enable basic optimizations in debug mode for audio decoding performance opt-level = 1 # Enable basic optimizations in debug mode for audio decoding performance

View File

@ -8,6 +8,7 @@ use tauri::{AppHandle, Manager, Url, WebviewUrl, WebviewWindowBuilder};
mod audio; mod audio;
mod video; mod video;
mod video_server;
#[derive(Default)] #[derive(Default)]
@ -128,10 +129,16 @@ fn handle_file_associations(app: AppHandle, files: Vec<PathBuf>) {
#[cfg_attr(mobile, tauri::mobile_entry_point)] #[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() { pub fn run() {
let pkg_name = env!("CARGO_PKG_NAME").to_string(); let pkg_name = env!("CARGO_PKG_NAME").to_string();
// Initialize video HTTP server
let video_server = video_server::VideoServer::new()
.expect("Failed to start video server");
eprintln!("[App] Video server started on port {}", video_server.port());
tauri::Builder::default() tauri::Builder::default()
.manage(Mutex::new(AppState::default())) .manage(Mutex::new(AppState::default()))
.manage(Arc::new(Mutex::new(audio::AudioState::default()))) .manage(Arc::new(Mutex::new(audio::AudioState::default())))
.manage(Arc::new(Mutex::new(video::VideoState::default()))) .manage(Arc::new(Mutex::new(video::VideoState::default())))
.manage(Arc::new(Mutex::new(video_server)))
.setup(|app| { .setup(|app| {
#[cfg(any(windows, target_os = "linux"))] // Windows/Linux needs different handling from macOS #[cfg(any(windows, target_os = "linux"))] // Windows/Linux needs different handling from macOS
{ {
@ -255,8 +262,12 @@ pub fn run() {
audio::audio_load_track_graph, audio::audio_load_track_graph,
video::video_load_file, video::video_load_file,
video::video_get_frame, video::video_get_frame,
video::video_get_frames_batch,
video::video_set_cache_size, video::video_set_cache_size,
video::video_get_pool_info, video::video_get_pool_info,
video::video_ipc_benchmark,
video::video_get_transcode_status,
video::video_allow_asset,
]) ])
// .manage(window_counter) // .manage(window_counter)
.build(tauri::generate_context!()) .build(tauri::generate_context!())

View File

@ -1,10 +1,10 @@
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use std::num::NonZeroUsize; use std::num::NonZeroUsize;
use std::io::Cursor;
use ffmpeg_next as ffmpeg; use ffmpeg_next as ffmpeg;
use lru::LruCache; use lru::LruCache;
use daw_backend::WaveformPeak; use daw_backend::WaveformPeak;
use image::{RgbaImage, ImageEncoder}; use image::RgbaImage;
use tauri::Manager;
#[derive(serde::Serialize, Clone)] #[derive(serde::Serialize, Clone)]
pub struct VideoFileMetadata { pub struct VideoFileMetadata {
@ -19,6 +19,10 @@ pub struct VideoFileMetadata {
pub audio_sample_rate: Option<u32>, pub audio_sample_rate: Option<u32>,
pub audio_channels: Option<u32>, pub audio_channels: Option<u32>,
pub audio_waveform: Option<Vec<WaveformPeak>>, pub audio_waveform: Option<Vec<WaveformPeak>>,
pub codec_name: String,
pub is_browser_compatible: bool,
pub http_url: Option<String>, // HTTP URL to stream video (if compatible or transcode complete)
pub transcoding: bool, // True if currently transcoding
} }
struct VideoDecoder { struct VideoDecoder {
@ -119,7 +123,7 @@ impl VideoDecoder {
return Ok(cached_frame.clone()); return Ok(cached_frame.clone());
} }
let t_after_cache = Instant::now(); let _t_after_cache = Instant::now();
// Determine if we need to seek // Determine if we need to seek
// Seek if: no decoder open, going backwards, or jumping forward more than 2 seconds // Seek if: no decoder open, going backwards, or jumping forward more than 2 seconds
@ -240,10 +244,24 @@ impl VideoDecoder {
} }
} }
use std::collections::HashMap;
use std::path::PathBuf;
#[derive(Clone)]
pub struct TranscodeJob {
pub pool_index: usize,
pub input_path: String,
pub output_path: String,
pub http_url: Option<String>, // HTTP URL when transcode completes
pub progress: f32, // 0.0 to 1.0
pub completed: bool,
}
pub struct VideoState { pub struct VideoState {
pool: Vec<Arc<Mutex<VideoDecoder>>>, pool: Vec<Arc<Mutex<VideoDecoder>>>,
next_pool_index: usize, next_pool_index: usize,
cache_size: usize, cache_size: usize,
transcode_jobs: Arc<Mutex<HashMap<usize, TranscodeJob>>>, // pool_index -> job
} }
impl Default for VideoState { impl Default for VideoState {
@ -252,6 +270,7 @@ impl Default for VideoState {
pool: Vec::new(), pool: Vec::new(),
next_pool_index: 0, next_pool_index: 0,
cache_size: 20, // Default cache size cache_size: 20, // Default cache size
transcode_jobs: Arc::new(Mutex::new(HashMap::new())),
} }
} }
} }
@ -260,8 +279,11 @@ impl Default for VideoState {
pub async fn video_load_file( pub async fn video_load_file(
video_state: tauri::State<'_, Arc<Mutex<VideoState>>>, video_state: tauri::State<'_, Arc<Mutex<VideoState>>>,
audio_state: tauri::State<'_, Arc<Mutex<crate::audio::AudioState>>>, audio_state: tauri::State<'_, Arc<Mutex<crate::audio::AudioState>>>,
video_server: tauri::State<'_, Arc<Mutex<crate::video_server::VideoServer>>>,
path: String, path: String,
) -> Result<VideoFileMetadata, String> { ) -> Result<VideoFileMetadata, String> {
eprintln!("[Video] Loading file: {}", path);
ffmpeg::init().map_err(|e| e.to_string())?; ffmpeg::init().map_err(|e| e.to_string())?;
// Open input to check for audio stream // Open input to check for audio stream
@ -386,13 +408,45 @@ pub async fn video_load_file(
(None, None, None, None, None) (None, None, None, None, None)
}; };
// Detect video codec
let video_stream = input.streams()
.best(ffmpeg::media::Type::Video)
.ok_or("No video stream found")?;
let codec_id = video_stream.parameters().id();
let codec_name = ffmpeg::codec::Id::name(&codec_id).to_string();
// Check if codec is browser-compatible (can play directly)
// Browsers support: H.264/AVC, VP8, VP9, AV1 (limited)
let is_browser_compatible = matches!(
codec_id,
ffmpeg::codec::Id::H264 |
ffmpeg::codec::Id::VP8 |
ffmpeg::codec::Id::VP9 |
ffmpeg::codec::Id::AV1
);
eprintln!("[Video Codec] {} - Browser compatible: {}", codec_name, is_browser_compatible);
// Create video decoder with max dimensions for playback (800x600) // Create video decoder with max dimensions for playback (800x600)
// This scales down high-res videos to reduce data transfer // This scales down high-res videos to reduce data transfer
let mut video_state_guard = video_state.lock().unwrap(); let mut video_state_guard = video_state.lock().unwrap();
let pool_index = video_state_guard.next_pool_index; let pool_index = video_state_guard.next_pool_index;
video_state_guard.next_pool_index += 1; video_state_guard.next_pool_index += 1;
let decoder = VideoDecoder::new(path, video_state_guard.cache_size, Some(800), Some(600))?; let decoder = VideoDecoder::new(path.clone(), video_state_guard.cache_size, Some(800), Some(600))?;
// Add file to HTTP server if browser-compatible
let http_url = if is_browser_compatible {
let server = video_server.lock().unwrap();
let url_path = format!("/video/{}", pool_index);
server.add_file(url_path.clone(), PathBuf::from(&path));
let http_url = server.get_url(&url_path);
eprintln!("[Video] Browser-compatible, serving at: {}", http_url);
Some(http_url)
} else {
None
};
let metadata = VideoFileMetadata { let metadata = VideoFileMetadata {
pool_index, pool_index,
@ -406,13 +460,152 @@ pub async fn video_load_file(
audio_sample_rate, audio_sample_rate,
audio_channels, audio_channels,
audio_waveform, audio_waveform,
codec_name,
is_browser_compatible,
http_url,
transcoding: !is_browser_compatible,
}; };
video_state_guard.pool.push(Arc::new(Mutex::new(decoder))); video_state_guard.pool.push(Arc::new(Mutex::new(decoder)));
// Start background transcoding if not browser-compatible
if !is_browser_compatible {
eprintln!("[Video Transcode] Starting background transcode for pool_index {}", pool_index);
let jobs = video_state_guard.transcode_jobs.clone();
let input_path = path.clone();
let pool_idx = pool_index;
let server = video_server.inner().clone();
tauri::async_runtime::spawn(async move {
if let Err(e) = start_transcode(jobs, pool_idx, input_path, server).await {
eprintln!("[Video Transcode] Failed: {}", e);
}
});
}
Ok(metadata) Ok(metadata)
} }
// Background transcode to WebM/VP9 for browser compatibility
async fn start_transcode(
jobs: Arc<Mutex<HashMap<usize, TranscodeJob>>>,
pool_index: usize,
input_path: String,
video_server: Arc<Mutex<crate::video_server::VideoServer>>,
) -> Result<(), String> {
use std::process::Command;
// Generate output path in system cache directory
let cache_dir = std::env::temp_dir().join("lightningbeam_transcoded");
std::fs::create_dir_all(&cache_dir).map_err(|e| e.to_string())?;
let input_file = PathBuf::from(&input_path);
let file_stem = input_file.file_stem()
.ok_or("Invalid input path")?
.to_string_lossy();
let output_path = cache_dir.join(format!("{}_{}.webm", file_stem, pool_index));
// Create job entry
{
let mut jobs_guard = jobs.lock().unwrap();
jobs_guard.insert(pool_index, TranscodeJob {
pool_index,
input_path: input_path.clone(),
output_path: output_path.to_string_lossy().to_string(),
http_url: None,
progress: 0.0,
completed: false,
});
}
eprintln!("[Video Transcode] Output: {}", output_path.display());
// Run FFmpeg transcode command
// Using VP9 codec with CRF 30 (good quality/size balance) and fast encoding
let output = Command::new("ffmpeg")
.args(&[
"-i", &input_path,
"-c:v", "libvpx-vp9", // VP9 video codec
"-crf", "30", // Quality (lower = better, 23-32 recommended)
"-b:v", "0", // Use CRF mode
"-threads", "4", // Use 4 threads
"-row-mt", "1", // Enable row-based multithreading
"-speed", "4", // Encoding speed (0=slowest/best, 4=good balance)
"-c:a", "libopus", // Opus audio codec (best for WebM)
"-b:a", "128k", // Audio bitrate
"-y", // Overwrite output
output_path.to_str().ok_or("Invalid output path")?,
])
.output()
.map_err(|e| format!("Failed to spawn ffmpeg: {}", e))?;
if output.status.success() {
eprintln!("[Video Transcode] Completed: {}", output_path.display());
// Add transcoded file to HTTP server
let server = video_server.lock().unwrap();
let url_path = format!("/video/{}", pool_index);
server.add_file(url_path.clone(), output_path.clone());
let http_url = server.get_url(&url_path);
eprintln!("[Video Transcode] Serving at: {}", http_url);
drop(server);
// Mark as completed and store HTTP URL
let mut jobs_guard = jobs.lock().unwrap();
if let Some(job) = jobs_guard.get_mut(&pool_index) {
job.progress = 1.0;
job.completed = true;
job.http_url = Some(http_url);
}
eprintln!("[Video Transcode] Job completed for pool_index {}", pool_index);
Ok(())
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
eprintln!("[Video Transcode] FFmpeg error: {}", stderr);
Err(format!("FFmpeg failed: {}", stderr))
}
}
// Get transcode status for a pool index
#[tauri::command]
pub async fn video_get_transcode_status(
video_state: tauri::State<'_, Arc<Mutex<VideoState>>>,
pool_index: usize,
) -> Result<Option<(String, f32, bool, Option<String>)>, String> {
let state = video_state.lock().unwrap();
let jobs = state.transcode_jobs.lock().unwrap();
if let Some(job) = jobs.get(&pool_index) {
Ok(Some((job.output_path.clone(), job.progress, job.completed, job.http_url.clone())))
} else {
Ok(None)
}
}
// Add a video file to asset protocol scope so browser can access it
#[tauri::command]
pub async fn video_allow_asset(
app: tauri::AppHandle,
path: String,
) -> Result<(), String> {
use tauri_plugin_fs::FsExt;
let file_path = PathBuf::from(&path);
// Add to FS scope
let fs_scope = app.fs_scope();
fs_scope.allow_file(&file_path)
.map_err(|e| format!("Failed to allow file in fs scope: {}", e))?;
// Add to asset protocol scope
let asset_scope = app.asset_protocol_scope();
asset_scope.allow_file(&file_path)
.map_err(|e| format!("Failed to allow file in asset scope: {}", e))?;
eprintln!("[Video] Added to asset scope: {}", path);
Ok(())
}
fn generate_waveform(audio_data: &[f32], channels: u32, target_peaks: usize) -> Vec<WaveformPeak> { fn generate_waveform(audio_data: &[f32], channels: u32, target_peaks: usize) -> Vec<WaveformPeak> {
let total_samples = audio_data.len(); let total_samples = audio_data.len();
let samples_per_channel = total_samples / channels as usize; let samples_per_channel = total_samples / channels as usize;
@ -465,6 +658,9 @@ pub async fn video_get_frame(
) -> Result<(), String> { ) -> Result<(), String> {
use std::time::Instant; use std::time::Instant;
let t_total_start = Instant::now();
let t_lock_start = Instant::now();
let video_state = state.lock().unwrap(); let video_state = state.lock().unwrap();
let decoder = video_state.pool.get(pool_index) let decoder = video_state.pool.get(pool_index)
@ -474,11 +670,14 @@ pub async fn video_get_frame(
drop(video_state); drop(video_state);
let mut decoder = decoder.lock().unwrap(); let mut decoder = decoder.lock().unwrap();
let t_lock_end = Instant::now();
let t_decode_start = Instant::now();
let frame_data = decoder.get_frame(timestamp)?; let frame_data = decoder.get_frame(timestamp)?;
let t_decode_end = Instant::now();
let t_compress_start = Instant::now();
let data_to_send = if use_jpeg { let data_to_send = if use_jpeg {
let t_compress_start = Instant::now();
// Get frame dimensions from decoder // Get frame dimensions from decoder
let width = decoder.output_width; let width = decoder.output_width;
let height = decoder.output_height; let height = decoder.output_height;
@ -500,23 +699,36 @@ pub async fn video_get_frame(
image::ColorType::Rgb8 image::ColorType::Rgb8
).map_err(|e| format!("JPEG encoding failed: {}", e))?; ).map_err(|e| format!("JPEG encoding failed: {}", e))?;
let compress_time = t_compress_start.elapsed().as_millis();
let original_size = width as usize * height as usize * 4;
let compressed_size = jpeg_data.len();
let ratio = original_size as f32 / compressed_size as f32;
eprintln!("[Video JPEG] Compressed {}KB -> {}KB ({}x) in {}ms",
original_size / 1024, compressed_size / 1024, ratio, compress_time);
jpeg_data jpeg_data
} else { } else {
frame_data frame_data
}; };
let t_compress_end = Instant::now();
// Drop decoder lock before sending to avoid blocking
drop(decoder);
let t_send_start = Instant::now();
// Send binary data through channel (bypasses JSON serialization) // Send binary data through channel (bypasses JSON serialization)
// InvokeResponseBody::Raw sends raw binary data without JSON encoding // InvokeResponseBody::Raw sends raw binary data without JSON encoding
channel.send(tauri::ipc::InvokeResponseBody::Raw(data_to_send)) channel.send(tauri::ipc::InvokeResponseBody::Raw(data_to_send.clone()))
.map_err(|e| format!("Channel send error: {}", e))?; .map_err(|e| format!("Channel send error: {}", e))?;
let t_send_end = Instant::now();
let t_total_end = Instant::now();
// Detailed profiling
let lock_time = t_lock_end.duration_since(t_lock_start).as_micros();
let decode_time = t_decode_end.duration_since(t_decode_start).as_micros();
let compress_time = t_compress_end.duration_since(t_compress_start).as_micros();
let send_time = t_send_end.duration_since(t_send_start).as_micros();
let total_time = t_total_end.duration_since(t_total_start).as_micros();
let size_kb = data_to_send.len() / 1024;
let mode = if use_jpeg { "JPEG" } else { "RAW" };
eprintln!("[Video Profile {}] Size: {}KB | Lock: {}μs | Decode: {}μs | Compress: {}μs | Send: {}μs | Total: {}μs",
mode, size_kb, lock_time, decode_time, compress_time, send_time, total_time);
Ok(()) Ok(())
} }
@ -547,3 +759,120 @@ pub async fn video_get_pool_info(
decoder.fps decoder.fps
)) ))
} }
// Benchmark command to test IPC performance with various payload sizes
#[tauri::command]
pub async fn video_ipc_benchmark(
size_bytes: usize,
channel: tauri::ipc::Channel,
) -> Result<(), String> {
use std::time::Instant;
let t_start = Instant::now();
// Create dummy data of requested size
let data = vec![0u8; size_bytes];
let t_after_alloc = Instant::now();
// Send through channel
channel.send(tauri::ipc::InvokeResponseBody::Raw(data))
.map_err(|e| format!("Channel send error: {}", e))?;
let t_after_send = Instant::now();
let alloc_time = t_after_alloc.duration_since(t_start).as_micros();
let send_time = t_after_send.duration_since(t_after_alloc).as_micros();
let total_time = t_after_send.duration_since(t_start).as_micros();
eprintln!("[IPC Benchmark Rust] Size: {}KB | Alloc: {}μs | Send: {}μs | Total: {}μs",
size_bytes / 1024, alloc_time, send_time, total_time);
Ok(())
}
// Batch frame request - get multiple frames in one IPC call
#[tauri::command]
pub async fn video_get_frames_batch(
state: tauri::State<'_, Arc<Mutex<VideoState>>>,
pool_index: usize,
timestamps: Vec<f64>,
use_jpeg: bool,
channel: tauri::ipc::Channel,
) -> Result<(), String> {
use std::time::Instant;
let t_total_start = Instant::now();
let video_state = state.lock().unwrap();
let decoder = video_state.pool.get(pool_index)
.ok_or("Invalid pool index")?
.clone();
drop(video_state);
let mut decoder = decoder.lock().unwrap();
// Decode all frames
let mut all_frames = Vec::new();
let mut total_decode_time = 0u128;
let mut total_compress_time = 0u128;
for timestamp in &timestamps {
let t_decode_start = Instant::now();
let frame_data = decoder.get_frame(*timestamp)?;
let t_decode_end = Instant::now();
total_decode_time += t_decode_end.duration_since(t_decode_start).as_micros();
let t_compress_start = Instant::now();
let data = if use_jpeg {
let width = decoder.output_width;
let height = decoder.output_height;
let img = RgbaImage::from_raw(width, height, frame_data)
.ok_or("Failed to create image from frame data")?;
let rgb_img = image::DynamicImage::ImageRgba8(img).to_rgb8();
let mut jpeg_data = Vec::new();
let mut encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(&mut jpeg_data, 85);
encoder.encode(
rgb_img.as_raw(),
rgb_img.width(),
rgb_img.height(),
image::ColorType::Rgb8
).map_err(|e| format!("JPEG encoding failed: {}", e))?;
jpeg_data
} else {
frame_data
};
let t_compress_end = Instant::now();
total_compress_time += t_compress_end.duration_since(t_compress_start).as_micros();
all_frames.push(data);
}
drop(decoder);
// Pack all frames into one buffer with metadata
// Format: [frame_count: u32][frame1_size: u32][frame1_data...][frame2_size: u32][frame2_data...]
let mut packed_data = Vec::new();
packed_data.extend_from_slice(&(all_frames.len() as u32).to_le_bytes());
for frame in &all_frames {
packed_data.extend_from_slice(&(frame.len() as u32).to_le_bytes());
packed_data.extend_from_slice(frame);
}
let total_size_kb = packed_data.len() / 1024;
let t_send_start = Instant::now();
channel.send(tauri::ipc::InvokeResponseBody::Raw(packed_data))
.map_err(|e| format!("Channel send error: {}", e))?;
let t_send_end = Instant::now();
let send_time = t_send_end.duration_since(t_send_start).as_micros();
let total_time = t_send_end.duration_since(t_total_start).as_micros();
let mode = if use_jpeg { "JPEG" } else { "RAW" };
eprintln!("[Video Batch {}] Frames: {} | Size: {}KB | Decode: {}μs | Compress: {}μs | Send: {}μs | Total: {}μs",
mode, timestamps.len(), total_size_kb, total_decode_time, total_compress_time, send_time, total_time);
Ok(())
}

View File

@ -0,0 +1,203 @@
use std::sync::{Arc, Mutex};
use std::collections::HashMap;
use std::fs::File;
use std::io::{Read, Seek, SeekFrom};
use std::path::PathBuf;
use tiny_http::{Server, Response, Request, Header, StatusCode};
pub struct VideoServer {
port: u16,
allowed_files: Arc<Mutex<HashMap<String, PathBuf>>>, // URL path -> file path
}
impl VideoServer {
pub fn new() -> Result<Self, String> {
// Bind to localhost on a random available port
let server = Server::http("127.0.0.1:0")
.map_err(|e| format!("Failed to create HTTP server: {}", e))?;
let port = match server.server_addr() {
tiny_http::ListenAddr::IP(addr) => addr.port(),
_ => return Err("Unexpected server address type".to_string()),
};
let allowed_files = Arc::new(Mutex::new(HashMap::new()));
eprintln!("[Video Server] Started on port {}", port);
// Spawn server thread
let files = allowed_files.clone();
std::thread::spawn(move || {
for request in server.incoming_requests() {
handle_request(request, &files);
}
});
Ok(Self {
port,
allowed_files,
})
}
pub fn port(&self) -> u16 {
self.port
}
pub fn add_file(&self, url_path: String, file_path: PathBuf) {
eprintln!("[Video Server] Adding file: {} -> {:?}", url_path, file_path);
let mut files = self.allowed_files.lock().unwrap();
files.insert(url_path, file_path);
}
pub fn get_url(&self, url_path: &str) -> String {
format!("http://127.0.0.1:{}{}", self.port, url_path)
}
}
fn handle_request(request: Request, allowed_files: &Arc<Mutex<HashMap<String, PathBuf>>>) {
let url = request.url();
let method = request.method();
eprintln!("[Video Server] {} {}", method, url);
// Get file path
let files = allowed_files.lock().unwrap();
let file_path = match files.get(url) {
Some(path) => path.clone(),
None => {
eprintln!("[Video Server] File not found: {}", url);
let response = Response::from_string("Not Found")
.with_status_code(StatusCode(404));
let _ = request.respond(response);
return;
}
};
drop(files);
// Open file
let mut file = match File::open(&file_path) {
Ok(f) => f,
Err(e) => {
eprintln!("[Video Server] Failed to open file: {}", e);
let response = Response::from_string("Internal Server Error")
.with_status_code(StatusCode(500));
let _ = request.respond(response);
return;
}
};
// Get file size
let file_size = match file.metadata() {
Ok(meta) => meta.len(),
Err(e) => {
eprintln!("[Video Server] Failed to get file metadata: {}", e);
let response = Response::from_string("Internal Server Error")
.with_status_code(StatusCode(500));
let _ = request.respond(response);
return;
}
};
// Check for Range header - convert to owned String to avoid borrow issues
let range_header = request.headers().iter()
.find(|h| h.field.equiv("Range"))
.map(|h| h.value.as_str().to_string());
match range_header {
Some(range) if range.starts_with("bytes=") => {
// Parse range request
eprintln!("[Video Server] Range request: {}", range);
handle_range_request(request, file, file_size, &range, &file_path);
}
_ => {
// Serve entire file
eprintln!("[Video Server] Full file request");
handle_full_request(request, file, file_size, &file_path);
}
}
}
fn handle_range_request(
request: Request,
mut file: File,
file_size: u64,
range: &str,
file_path: &PathBuf,
) {
// Parse "bytes=start-end"
let range = range.trim_start_matches("bytes=");
let parts: Vec<&str> = range.split('-').collect();
let start = parts[0].parse::<u64>().unwrap_or(0);
let end = if parts.len() > 1 && !parts[1].is_empty() {
parts[1].parse::<u64>().unwrap_or(file_size - 1)
} else {
file_size - 1
};
let length = end - start + 1;
// Seek to start position
if let Err(e) = file.seek(SeekFrom::Start(start)) {
eprintln!("[Video Server] Failed to seek: {}", e);
let response = Response::from_string("Internal Server Error")
.with_status_code(StatusCode(500));
let _ = request.respond(response);
return;
}
// Read the requested range
let mut buffer = vec![0u8; length as usize];
if let Err(e) = file.read_exact(&mut buffer) {
eprintln!("[Video Server] Failed to read range: {}", e);
let response = Response::from_string("Internal Server Error")
.with_status_code(StatusCode(500));
let _ = request.respond(response);
return;
}
// Determine content type
let content_type = get_content_type(file_path);
// Send 206 Partial Content response
let content_range = format!("bytes {}-{}/{}", start, end, file_size);
let response = Response::from_data(buffer)
.with_status_code(StatusCode(206))
.with_header(Header::from_bytes(&b"Content-Type"[..], content_type.as_bytes()).unwrap())
.with_header(Header::from_bytes(&b"Content-Length"[..], length.to_string().as_bytes()).unwrap())
.with_header(Header::from_bytes(&b"Content-Range"[..], content_range.as_bytes()).unwrap())
.with_header(Header::from_bytes(&b"Accept-Ranges"[..], &b"bytes"[..]).unwrap())
.with_header(Header::from_bytes(&b"Access-Control-Allow-Origin"[..], &b"*"[..]).unwrap());
let _ = request.respond(response);
}
fn handle_full_request(
request: Request,
file: File,
file_size: u64,
file_path: &PathBuf,
) {
// Determine content type
let content_type = get_content_type(file_path);
// Send 200 OK response using from_file to avoid chunked encoding
let response = Response::from_file(file)
.with_status_code(StatusCode(200))
.with_chunked_threshold(usize::MAX) // Force Content-Length instead of chunked
.with_header(Header::from_bytes(&b"Content-Type"[..], content_type.as_bytes()).unwrap())
.with_header(Header::from_bytes(&b"Accept-Ranges"[..], &b"bytes"[..]).unwrap())
.with_header(Header::from_bytes(&b"Access-Control-Allow-Origin"[..], &b"*"[..]).unwrap());
let _ = request.respond(response);
}
fn get_content_type(path: &PathBuf) -> String {
match path.extension().and_then(|s| s.to_str()) {
Some("webm") => "video/webm".to_string(),
Some("mp4") => "video/mp4".to_string(),
Some("mkv") => "video/x-matroska".to_string(),
Some("avi") => "video/x-msvideo".to_string(),
Some("mov") => "video/quicktime".to_string(),
_ => "application/octet-stream".to_string(),
}
}

View File

@ -629,7 +629,8 @@ export const actions = {
metadata.duration, metadata.duration,
0, // offset 0, // offset
action.videoname, action.videoname,
metadata.duration // sourceDuration metadata.duration, // sourceDuration
metadata // Pass full metadata for browser playback support
); );
// If video has audio, create linked AudioTrack // If video has audio, create linked AudioTrack

View File

@ -220,7 +220,6 @@ let fileExportPath = undefined;
let state = "normal"; let state = "normal";
let playing = false;
let lastFrameTime; let lastFrameTime;
let uiDirty = false; let uiDirty = false;
@ -347,6 +346,65 @@ window.actions = actions;
window.addKeyframeAtPlayhead = addKeyframeAtPlayhead; window.addKeyframeAtPlayhead = addKeyframeAtPlayhead;
window.updateVideoFrames = null; // Will be set after function is defined window.updateVideoFrames = null; // Will be set after function is defined
// IPC Benchmark function - run from console: testIPCBenchmark()
window.testIPCBenchmark = async function() {
const { invoke, Channel } = window.__TAURI__.core;
// Test sizes: 1KB, 10KB, 50KB, 100KB, 500KB, 1MB, 2MB, 5MB
const testSizes = [
1024, // 1 KB
10 * 1024, // 10 KB
50 * 1024, // 50 KB
100 * 1024, // 100 KB
500 * 1024, // 500 KB
1024 * 1024, // 1 MB
2 * 1024 * 1024, // 2 MB
5 * 1024 * 1024 // 5 MB
];
console.log('\n=== IPC Benchmark Starting ===\n');
console.log('Size (KB)\tJS Total (ms)\tJS IPC (ms)\tJS Recv (ms)\tThroughput (MB/s)');
console.log('─'.repeat(80));
for (const sizeBytes of testSizes) {
const t_start = performance.now();
let receivedData = null;
const dataPromise = new Promise((resolve, reject) => {
const channel = new Channel();
channel.onmessage = (data) => {
const t_recv_start = performance.now();
receivedData = data;
const t_recv_end = performance.now();
resolve(t_recv_end - t_recv_start);
};
invoke('video_ipc_benchmark', {
sizeBytes: sizeBytes,
channel: channel
}).catch(reject);
});
const recv_time = await dataPromise;
const t_after_ipc = performance.now();
const total_time = t_after_ipc - t_start;
const ipc_time = total_time - recv_time;
const size_kb = sizeBytes / 1024;
const size_mb = sizeBytes / (1024 * 1024);
const throughput = size_mb / (total_time / 1000);
console.log(`${size_kb.toFixed(0)}\t\t${total_time.toFixed(2)}\t\t${ipc_time.toFixed(2)}\t\t${recv_time.toFixed(2)}\t\t${throughput.toFixed(2)}`);
// Small delay between tests
await new Promise(resolve => setTimeout(resolve, 100));
}
console.log('\n=== IPC Benchmark Complete ===\n');
console.log('Run again with: testIPCBenchmark()');
};
function uuidv4() { function uuidv4() {
return "10000000-1000-4000-8000-100000000000".replace(/[018]/g, (c) => return "10000000-1000-4000-8000-100000000000".replace(/[018]/g, (c) =>
( (
@ -907,8 +965,8 @@ window.addEventListener("keydown", (e) => {
}); });
async function playPause() { async function playPause() {
playing = !playing; context.playing = !context.playing;
if (playing) { if (context.playing) {
// Reset to start if we're at the end // Reset to start if we're at the end
const duration = context.activeObject.duration; const duration = context.activeObject.duration;
if (duration > 0 && context.activeObject.currentTime >= duration) { if (duration > 0 && context.activeObject.currentTime >= duration) {
@ -966,8 +1024,8 @@ async function playPause() {
// Update play/pause button appearance if it exists // Update play/pause button appearance if it exists
if (context.playPauseButton) { if (context.playPauseButton) {
context.playPauseButton.className = playing ? "playback-btn playback-btn-pause" : "playback-btn playback-btn-play"; context.playPauseButton.className = context.playing ? "playback-btn playback-btn-pause" : "playback-btn playback-btn-play";
context.playPauseButton.title = playing ? "Pause" : "Play"; context.playPauseButton.title = context.playing ? "Pause" : "Play";
} }
} }
@ -980,7 +1038,7 @@ function playbackLoop() {
context.timelineWidget.requestRedraw(); context.timelineWidget.requestRedraw();
} }
if (playing) { if (context.playing) {
const duration = context.activeObject.duration; const duration = context.activeObject.duration;
// Check if we've reached the end (but allow infinite playback when recording) // Check if we've reached the end (but allow infinite playback when recording)
@ -989,7 +1047,7 @@ function playbackLoop() {
requestAnimationFrame(playbackLoop); requestAnimationFrame(playbackLoop);
} else { } else {
// Animation finished // Animation finished
playing = false; context.playing = false;
// Stop DAW backend audio playback // Stop DAW backend audio playback
invoke('audio_stop').catch(error => { invoke('audio_stop').catch(error => {
@ -1136,7 +1194,7 @@ async function handleAudioEvent(event) {
switch (event.type) { switch (event.type) {
case 'PlaybackPosition': case 'PlaybackPosition':
// Sync frontend time with DAW time // Sync frontend time with DAW time
if (playing) { if (context.playing) {
// Quantize time to framerate for animation playback // Quantize time to framerate for animation playback
const framerate = context.activeObject.frameRate; const framerate = context.activeObject.frameRate;
const frameDuration = 1 / framerate; const frameDuration = 1 / framerate;
@ -1565,7 +1623,7 @@ async function toggleRecording() {
console.log('[FRONTEND] MIDI recording started successfully'); console.log('[FRONTEND] MIDI recording started successfully');
// Start playback so the timeline moves (if not already playing) // Start playback so the timeline moves (if not already playing)
if (!playing) { if (!context.playing) {
await playPause(); await playPause();
} }
} catch (error) { } catch (error) {
@ -1585,7 +1643,7 @@ async function toggleRecording() {
console.log('[FRONTEND] Audio recording started successfully, waiting for RecordingStarted event'); console.log('[FRONTEND] Audio recording started successfully, waiting for RecordingStarted event');
// Start playback so the timeline moves (if not already playing) // Start playback so the timeline moves (if not already playing)
if (!playing) { if (!context.playing) {
await playPause(); await playPause();
} }
} catch (error) { } catch (error) {
@ -4528,8 +4586,8 @@ function timelineV2() {
// Play/Pause button // Play/Pause button
const playPauseButton = document.createElement("button"); const playPauseButton = document.createElement("button");
playPauseButton.className = playing ? "playback-btn playback-btn-pause" : "playback-btn playback-btn-play"; playPauseButton.className = context.playing ? "playback-btn playback-btn-pause" : "playback-btn playback-btn-play";
playPauseButton.title = playing ? "Pause" : "Play"; playPauseButton.title = context.playing ? "Pause" : "Play";
playPauseButton.addEventListener("click", playPause); playPauseButton.addEventListener("click", playPause);
// Store reference so playPause() can update it // Store reference so playPause() can update it
@ -6709,7 +6767,7 @@ async function renderMenu() {
}, },
{ {
text: "Play", text: "Play",
enabled: !playing, enabled: !context.playing,
action: playPause, action: playPause,
accelerator: getShortcut("playAnimation"), accelerator: getShortcut("playAnimation"),
}, },
@ -10916,7 +10974,7 @@ async function renderAll() {
renderInProgress = false; renderInProgress = false;
// FPS logging (only when playing) // FPS logging (only when playing)
if (playing) { if (context.playing) {
frameCount++; frameCount++;
const now = performance.now(); const now = performance.now();
const renderTime = now - renderStartTime; const renderTime = now - renderStartTime;

View File

@ -1281,7 +1281,8 @@ class VideoLayer extends Widget {
this.linkedAudioTrack = null; // Reference to AudioTrack this.linkedAudioTrack = null; // Reference to AudioTrack
// Performance settings // Performance settings
this.useJpegCompression = true; // Enable JPEG compression for faster transfer (default: true) this.useJpegCompression = false; // JPEG compression adds more overhead than it saves (default: false)
this.prefetchCount = 3; // Number of frames to prefetch ahead of playhead
// Timeline display // Timeline display
this.collapsed = false; this.collapsed = false;
@ -1291,12 +1292,12 @@ class VideoLayer extends Widget {
pointerList[this.idx] = this; pointerList[this.idx] = this;
} }
async addClip(poolIndex, startTime, duration, offset = 0.0, name = '', sourceDuration = null) { async addClip(poolIndex, startTime, duration, offset = 0.0, name = '', sourceDuration = null, metadata = null) {
const poolInfo = await invoke('video_get_pool_info', { poolIndex }); const poolInfo = await invoke('video_get_pool_info', { poolIndex });
// poolInfo is [width, height, fps] tuple from Rust // poolInfo is [width, height, fps] tuple from Rust
const [width, height, fps] = poolInfo; const [width, height, fps] = poolInfo;
this.clips.push({ const clip = {
clipId: this.clips.length, clipId: this.clips.length,
poolIndex, poolIndex,
name: name || `Video ${this.clips.length + 1}`, name: name || `Video ${this.clips.length + 1}`,
@ -1305,10 +1306,124 @@ class VideoLayer extends Widget {
offset, offset,
width, width,
height, height,
sourceDuration: sourceDuration || duration // Store original file duration sourceDuration: sourceDuration || duration, // Store original file duration
httpUrl: metadata?.http_url || null,
isBrowserCompatible: metadata?.is_browser_compatible || false,
transcoding: metadata?.transcoding || false,
videoElement: null, // Will hold HTML5 video element if using browser playback
useBrowserVideo: false, // Switch to true when video element is ready
isPlaying: false, // Track if video element is actively playing
};
this.clips.push(clip);
console.log(`Video clip added: ${name}, ${width}x${height}, duration: ${duration}s, browser-compatible: ${clip.isBrowserCompatible}, http_url: ${clip.httpUrl}`);
// If HTTP URL is available, create video element immediately
if (clip.httpUrl) {
await this._createVideoElement(clip);
clip.useBrowserVideo = true;
}
// If transcoding is in progress, start polling
else if (clip.transcoding) {
console.log(`[Video] Starting transcode polling for ${clip.name}`);
this._pollTranscodeStatus(clip);
}
}
async _createVideoElement(clip) {
// Create hidden video element for hardware-accelerated decoding
const video = document.createElement('video');
// DEBUG: Make video visible on top of everything
video.style.position = 'fixed';
video.style.top = '10px';
video.style.right = '10px';
video.style.width = '400px';
video.style.height = '225px';
video.style.zIndex = '99999';
video.style.border = '3px solid red';
video.controls = true; // DEBUG: Add controls
video.preload = 'auto';
video.muted = true; // Mute video element (audio plays separately)
video.playsInline = true;
video.autoplay = false;
video.crossOrigin = 'anonymous'; // Required for canvas drawing - prevent CORS taint
// Add event listeners for debugging
video.addEventListener('loadedmetadata', () => {
console.log(`[Video] Loaded metadata for ${clip.name}: ${video.videoWidth}x${video.videoHeight}, duration: ${video.duration}s`);
}); });
console.log(`Video clip added: ${name}, ${width}x${height}, duration: ${duration}s`); video.addEventListener('loadeddata', () => {
console.log(`[Video] Loaded data for ${clip.name}, readyState: ${video.readyState}`);
});
video.addEventListener('canplay', () => {
console.log(`[Video] Can play ${clip.name}, duration: ${video.duration}s`);
// Mark video as ready for seeking once we can play AND have valid duration
if (video.duration > 0 && !isNaN(video.duration) && video.duration !== Infinity) {
clip.videoReady = true;
console.log(`[Video] Video is ready for seeking`);
}
});
// When seek completes, trigger UI redraw to show the new frame
video.addEventListener('seeked', () => {
if (updateUI) {
updateUI();
}
});
video.addEventListener('error', (e) => {
const error = video.error;
const errorMessages = {
1: 'MEDIA_ERR_ABORTED - Fetching aborted',
2: 'MEDIA_ERR_NETWORK - Network error',
3: 'MEDIA_ERR_DECODE - Decoding error',
4: 'MEDIA_ERR_SRC_NOT_SUPPORTED - Format not supported or file not accessible'
};
const errorMsg = errorMessages[error?.code] || 'Unknown error';
console.error(`[Video] Error loading ${clip.name}: ${errorMsg}`, error?.message);
});
// Use HTTP URL from local server (supports range requests for seeking)
video.src = clip.httpUrl;
// Try to load the video
video.load();
document.body.appendChild(video);
clip.videoElement = video;
console.log(`[Video] Created video element for clip ${clip.name}: ${clip.httpUrl}`);
}
async _pollTranscodeStatus(clip) {
// Poll transcode status every 2 seconds
const pollInterval = setInterval(async () => {
try {
const status = await invoke('video_get_transcode_status', { poolIndex: clip.poolIndex });
if (status && status[2]) { // [path, progress, completed, httpUrl]
// Transcode complete!
clearInterval(pollInterval);
const [outputPath, progress, completed, httpUrl] = status;
clip.transcodedPath = outputPath;
clip.httpUrl = httpUrl;
clip.transcoding = false;
clip.useBrowserVideo = true;
console.log(`[Video] Transcode complete for ${clip.name}, switching to browser playback: ${httpUrl}`);
// Create video element for browser playback
await this._createVideoElement(clip);
}
} catch (error) {
console.error('Failed to poll transcode status:', error);
clearInterval(pollInterval);
}
}, 2000);
} }
// Pre-fetch frames for current time (call before draw) // Pre-fetch frames for current time (call before draw)
@ -1325,126 +1440,224 @@ class VideoLayer extends Widget {
if (currentTime < clip.startTime || if (currentTime < clip.startTime ||
currentTime >= clip.startTime + clip.duration) { currentTime >= clip.startTime + clip.duration) {
clip.currentFrame = null; clip.currentFrame = null;
// Pause video element if we left its time range
if (clip.videoElement && clip.isPlaying) {
clip.videoElement.pause();
clip.isPlaying = false;
}
continue; continue;
} }
// Calculate video timestamp from clip time // If using browser video element
const clipTime = currentTime - clip.startTime; if (clip.useBrowserVideo && clip.videoElement) {
const videoTimestamp = clip.offset + clipTime; const videoTime = clip.offset + (currentTime - clip.startTime);
// Only fetch if timestamp changed // Don't do anything until video is fully ready
if (clip.lastFetchedTimestamp === videoTimestamp && clip.currentFrame) { if (!clip.videoReady) {
if (!clip._notReadyWarned) {
console.warn(`[Video updateFrame] Video not ready yet (duration=${clip.videoElement.duration})`);
clip._notReadyWarned = true;
}
continue;
}
// During playback: let video play naturally
if (context.playing) {
// Check if we just entered this clip (need to start playing)
if (!clip.isPlaying) {
// Start playing one frame ahead to compensate for canvas drawing lag
const frameDuration = 1 / (clip.fps || 30); // Use clip's actual framerate
const startTime = videoTime + frameDuration;
console.log(`[Video updateFrame] Starting playback at ${startTime.toFixed(3)}s (compensated by ${frameDuration.toFixed(3)}s for ${clip.fps}fps)`);
clip.videoElement.currentTime = startTime;
clip.videoElement.play().catch(e => console.error('Failed to play video:', e));
clip.isPlaying = true;
}
// Otherwise, let it play naturally - don't seek!
}
// When scrubbing (not playing): seek to exact position and pause
else {
if (clip.isPlaying) {
clip.videoElement.pause();
clip.isPlaying = false;
}
// Only seek if the time is actually different
if (!clip.videoElement.seeking) {
const timeDiff = Math.abs(clip.videoElement.currentTime - videoTime);
if (timeDiff > 0.016) { // ~1 frame tolerance at 60fps
clip.videoElement.currentTime = videoTime;
}
}
}
continue; // Skip frame fetching
}
// Use frame batching for frame-based playback
// Initialize frame cache if needed
if (!clip.frameCache) {
clip.frameCache = new Map();
}
// Check if current frame is already cached
if (clip.frameCache.has(currentVideoTimestamp)) {
clip.currentFrame = clip.frameCache.get(currentVideoTimestamp);
clip.lastFetchedTimestamp = currentVideoTimestamp;
continue; continue;
} }
// Skip if already fetching this frame // Skip if already fetching
if (clip.fetchInProgress === videoTimestamp) { if (clip.fetchInProgress) {
continue; continue;
} }
clip.fetchInProgress = videoTimestamp; clip.fetchInProgress = true;
clip.lastFetchedTimestamp = videoTimestamp;
try { try {
// Request frame from Rust backend using IPC Channel for efficient binary transfer // Calculate timestamps to prefetch (current + next N frames)
const frameDuration = 1 / 30; // Assume 30fps for now, could get from clip metadata
const timestamps = [];
for (let i = 0; i < this.prefetchCount; i++) {
const ts = currentVideoTimestamp + (i * frameDuration);
// Don't exceed clip duration
if (ts <= clip.offset + clip.sourceDuration) {
timestamps.push(ts);
}
}
if (timestamps.length === 0) {
continue;
}
const t_start = performance.now(); const t_start = performance.now();
// Create a promise that resolves when channel receives data // Request batch of frames using IPC Channel
const frameDataPromise = new Promise((resolve, reject) => { const batchDataPromise = new Promise((resolve, reject) => {
const channel = new Channel(); const channel = new Channel();
channel.onmessage = (data) => { channel.onmessage = (data) => {
resolve(data); resolve(data);
}; };
// Invoke command with channel invoke('video_get_frames_batch', {
invoke('video_get_frame', {
poolIndex: clip.poolIndex, poolIndex: clip.poolIndex,
timestamp: videoTimestamp, timestamps: timestamps,
useJpeg: this.useJpegCompression, useJpeg: this.useJpegCompression,
channel: channel channel: channel
}).catch(reject); }).catch(reject);
}); });
// Wait for the frame data let batchData = await batchDataPromise;
let frameData = await frameDataPromise;
const t_after_ipc = performance.now(); const t_after_ipc = performance.now();
// Ensure data is Uint8Array // Ensure data is Uint8Array
if (!(frameData instanceof Uint8Array)) { if (!(batchData instanceof Uint8Array)) {
frameData = new Uint8Array(frameData); batchData = new Uint8Array(batchData);
}
// Unpack the batch format: [frame_count: u32][frame1_size: u32][frame1_data...][frame2_size: u32][frame2_data...]...
const view = new DataView(batchData.buffer, batchData.byteOffset, batchData.byteLength);
let offset = 0;
// Read frame count
const frameCount = view.getUint32(offset, true); // little-endian
offset += 4;
if (frameCount !== timestamps.length) {
console.warn(`Expected ${timestamps.length} frames, got ${frameCount}`);
} }
let imageData;
const t_before_conversion = performance.now(); const t_before_conversion = performance.now();
if (this.useJpegCompression) { // Process each frame
// Decode JPEG data for (let i = 0; i < frameCount; i++) {
const blob = new Blob([frameData], { type: 'image/jpeg' }); // Read frame size
const imageUrl = URL.createObjectURL(blob); const frameSize = view.getUint32(offset, true);
offset += 4;
// Load and decode JPEG // Extract frame data
const img = new Image(); const frameData = new Uint8Array(batchData.buffer, batchData.byteOffset + offset, frameSize);
await new Promise((resolve, reject) => { offset += frameSize;
img.onload = resolve;
img.onerror = reject;
img.src = imageUrl;
});
// Create temporary canvas to extract ImageData let imageData;
const tempCanvas = document.createElement('canvas');
tempCanvas.width = clip.width;
tempCanvas.height = clip.height;
const tempCtx = tempCanvas.getContext('2d');
tempCtx.drawImage(img, 0, 0);
imageData = tempCtx.getImageData(0, 0, clip.width, clip.height);
// Cleanup if (this.useJpegCompression) {
URL.revokeObjectURL(imageUrl); // Decode JPEG using createImageBitmap
} else { const blob = new Blob([frameData], { type: 'image/jpeg' });
// Raw RGBA data const imageBitmap = await createImageBitmap(blob);
const expectedSize = clip.width * clip.height * 4; // RGBA = 4 bytes per pixel
if (frameData.length !== expectedSize) { // Create temporary canvas to extract ImageData
throw new Error(`Invalid frame data size: got ${frameData.length}, expected ${expectedSize}`); const tempCanvas = document.createElement('canvas');
tempCanvas.width = clip.width;
tempCanvas.height = clip.height;
const tempCtx = tempCanvas.getContext('2d');
tempCtx.drawImage(imageBitmap, 0, 0);
imageData = tempCtx.getImageData(0, 0, clip.width, clip.height);
imageBitmap.close();
} else {
// Raw RGBA data
const expectedSize = clip.width * clip.height * 4;
if (frameData.length !== expectedSize) {
console.error(`Invalid frame ${i} data size: got ${frameData.length}, expected ${expectedSize}`);
continue;
}
imageData = new ImageData(
new Uint8ClampedArray(frameData),
clip.width,
clip.height
);
} }
imageData = new ImageData( // Create canvas for this frame
new Uint8ClampedArray(frameData), const frameCanvas = document.createElement('canvas');
clip.width, frameCanvas.width = clip.width;
clip.height frameCanvas.height = clip.height;
); const frameCtx = frameCanvas.getContext('2d');
frameCtx.putImageData(imageData, 0, 0);
// Cache the frame
clip.frameCache.set(timestamps[i], frameCanvas);
// Set as current frame if it's the first one
if (i === 0) {
clip.currentFrame = frameCanvas;
clip.lastFetchedTimestamp = timestamps[i];
}
} }
const t_after_conversion = performance.now(); const t_after_conversion = performance.now();
// Create or reuse temp canvas // Limit cache size to avoid memory issues
if (!clip.frameCanvas) { const maxCacheSize = this.prefetchCount * 2;
clip.frameCanvas = document.createElement('canvas'); if (clip.frameCache.size > maxCacheSize) {
clip.frameCanvas.width = clip.width; // Remove oldest entries (simple LRU by keeping only recent timestamps)
clip.frameCanvas.height = clip.height; const sortedKeys = Array.from(clip.frameCache.keys()).sort((a, b) => a - b);
const toRemove = sortedKeys.slice(0, sortedKeys.length - maxCacheSize);
for (let key of toRemove) {
clip.frameCache.delete(key);
}
} }
const tempCtx = clip.frameCanvas.getContext('2d'); // Log timing breakdown
const t_before_putimage = performance.now(); const total_time = t_after_conversion - t_start;
tempCtx.putImageData(imageData, 0, 0);
const t_after_putimage = performance.now();
clip.currentFrame = clip.frameCanvas;
// Log detailed timing breakdown
const total_time = t_after_putimage - t_start;
const ipc_time = t_after_ipc - t_start; const ipc_time = t_after_ipc - t_start;
const conversion_time = t_after_conversion - t_before_conversion; const conversion_time = t_after_conversion - t_before_conversion;
const putimage_time = t_after_putimage - t_before_putimage;
const compression_mode = this.useJpegCompression ? 'JPEG' : 'RAW'; const compression_mode = this.useJpegCompression ? 'JPEG' : 'RAW';
const avg_per_frame = total_time / frameCount;
console.log(`[JS Video Timing ${compression_mode}] ts=${videoTimestamp.toFixed(3)}s | Total: ${total_time.toFixed(1)}ms | IPC: ${ipc_time.toFixed(1)}ms (${(ipc_time/total_time*100).toFixed(0)}%) | Convert: ${conversion_time.toFixed(1)}ms | PutImage: ${putimage_time.toFixed(1)}ms | Size: ${(frameData.length/1024/1024).toFixed(2)}MB`); console.log(`[JS Video Batch ${compression_mode}] Fetched ${frameCount} frames | Total: ${total_time.toFixed(1)}ms | IPC: ${ipc_time.toFixed(1)}ms (${(ipc_time/total_time*100).toFixed(0)}%) | Convert: ${conversion_time.toFixed(1)}ms | Avg/frame: ${avg_per_frame.toFixed(1)}ms | Size: ${(batchData.length/1024/1024).toFixed(2)}MB`);
} catch (error) { } catch (error) {
console.error('Failed to get video frame:', error); console.error('Failed to get video frames batch:', error);
clip.currentFrame = null; clip.currentFrame = null;
} finally { } finally {
clip.fetchInProgress = null; clip.fetchInProgress = false;
} }
} }
} finally { } finally {
@ -1472,8 +1685,89 @@ class VideoLayer extends Widget {
continue; continue;
} }
// Draw cached frame if available // Debug: log what path we're taking
if (clip.currentFrame) { if (!clip._drawPathLogged) {
console.log(`[Video Draw] useBrowserVideo=${clip.useBrowserVideo}, videoElement=${!!clip.videoElement}, currentFrame=${!!clip.currentFrame}`);
clip._drawPathLogged = true;
}
// Prefer browser video element if available
if (clip.useBrowserVideo && clip.videoElement) {
// Debug: log readyState issues
if (clip.videoElement.readyState < 2) {
if (!clip._readyStateWarned) {
console.warn(`[Video] Video not ready: readyState=${clip.videoElement.readyState}, src=${clip.videoElement.src}`);
clip._readyStateWarned = true;
}
}
// Draw if video is ready (shows last frame while seeking, updates when seek completes)
if (clip.videoElement.readyState >= 2) {
try {
// Calculate expected video time
const expectedVideoTime = clip.offset + (currentTime - clip.startTime);
const actualVideoTime = clip.videoElement.currentTime;
const timeDiff = Math.abs(expectedVideoTime - actualVideoTime);
// Debug: log if time is significantly different
if (timeDiff > 0.1 && (!clip._lastTimeDiffWarning || Date.now() - clip._lastTimeDiffWarning > 1000)) {
console.warn(`[Video Draw] Time mismatch: expected ${expectedVideoTime.toFixed(2)}s, actual ${actualVideoTime.toFixed(2)}s, diff=${timeDiff.toFixed(2)}s`);
clip._lastTimeDiffWarning = Date.now();
}
// Debug: log successful draw periodically
if (!clip._lastDrawLog || Date.now() - clip._lastDrawLog > 1000) {
console.log(`[Video Draw] Drawing at currentTime=${actualVideoTime.toFixed(2)}s (expected ${expectedVideoTime.toFixed(2)}s)`);
clip._lastDrawLog = Date.now();
}
// Scale to fit canvas while maintaining aspect ratio
const canvasWidth = config.fileWidth;
const canvasHeight = config.fileHeight;
const scale = Math.min(
canvasWidth / clip.videoElement.videoWidth,
canvasHeight / clip.videoElement.videoHeight
);
const scaledWidth = clip.videoElement.videoWidth * scale;
const scaledHeight = clip.videoElement.videoHeight * scale;
const x = (canvasWidth - scaledWidth) / 2;
const y = (canvasHeight - scaledHeight) / 2;
// Debug: draw a test rectangle to verify canvas is working
if (!clip._canvasTestDone) {
ctx.save();
ctx.fillStyle = 'red';
ctx.fillRect(10, 10, 100, 100);
ctx.restore();
console.log(`[Video Draw] Drew test rectangle at (10, 10, 100, 100)`);
console.log(`[Video Draw] Canvas dimensions: ${canvasWidth}x${canvasHeight}`);
console.log(`[Video Draw] Scaled video dimensions: ${scaledWidth}x${scaledHeight} at (${x}, ${y})`);
clip._canvasTestDone = true;
}
// Debug: Check if video element has dimensions
if (!clip._videoDimensionsLogged) {
console.log(`[Video Draw] Video element dimensions: videoWidth=${clip.videoElement.videoWidth}, videoHeight=${clip.videoElement.videoHeight}, naturalWidth=${clip.videoElement.videoWidth}, naturalHeight=${clip.videoElement.videoHeight}`);
console.log(`[Video Draw] Video element state: paused=${clip.videoElement.paused}, ended=${clip.videoElement.ended}, seeking=${clip.videoElement.seeking}, readyState=${clip.videoElement.readyState}`);
clip._videoDimensionsLogged = true;
}
ctx.drawImage(clip.videoElement, x, y, scaledWidth, scaledHeight);
// Debug: Sample a pixel to see if video is actually drawing
if (!clip._pixelTestDone) {
const imageData = ctx.getImageData(canvasWidth / 2, canvasHeight / 2, 1, 1);
const pixel = imageData.data;
console.log(`[Video Draw] Center pixel after drawImage: R=${pixel[0]}, G=${pixel[1]}, B=${pixel[2]}, A=${pixel[3]}`);
clip._pixelTestDone = true;
}
} catch (error) {
console.error('Failed to draw video element:', error);
}
}
}
// Fall back to cached frame if available
else if (clip.currentFrame) {
try { try {
// Scale to fit canvas while maintaining aspect ratio // Scale to fit canvas while maintaining aspect ratio
const canvasWidth = config.fileWidth; const canvasWidth = config.fileWidth;
@ -1500,7 +1794,8 @@ class VideoLayer extends Widget {
ctx.font = '24px sans-serif'; ctx.font = '24px sans-serif';
ctx.textAlign = 'center'; ctx.textAlign = 'center';
ctx.textBaseline = 'middle'; ctx.textBaseline = 'middle';
ctx.fillText('Loading...', config.fileWidth / 2, config.fileHeight / 2); const msg = clip.transcoding ? 'Transcoding...' : 'Loading...';
ctx.fillText(msg, config.fileWidth / 2, config.fileHeight / 2);
ctx.restore(); ctx.restore();
} }
} }

View File

@ -36,7 +36,8 @@ export let context = {
timelineWidget: null, // Reference to TimelineWindowV2 widget for zoom controls timelineWidget: null, // Reference to TimelineWindowV2 widget for zoom controls
config: null, // Reference to config object (set after config is initialized) config: null, // Reference to config object (set after config is initialized)
mode: "select", // Current tool mode mode: "select", // Current tool mode
// Recording state // Playback and recording state
playing: false,
isRecording: false, isRecording: false,
recordingTrackId: null, recordingTrackId: null,
recordingClipId: null, recordingClipId: null,