remove slower methods

This commit is contained in:
Skyler Lehmkuhl 2025-11-07 03:23:59 -05:00
parent 336b9952e4
commit f28791c2c9
7 changed files with 3 additions and 680 deletions

57
src-tauri/Cargo.lock generated
View File

@ -158,12 +158,6 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
[[package]]
name = "ascii"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d92bec98840b8f03a5ff5413de5293bfcd8bf96467cf5452609f939ec6f5de16"
[[package]] [[package]]
name = "ashpd" name = "ashpd"
version = "0.10.2" version = "0.10.2"
@ -552,12 +546,6 @@ dependencies = [
"windows-targets 0.52.6", "windows-targets 0.52.6",
] ]
[[package]]
name = "chunked_transfer"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e4de3bc4ea267985becf712dc6d9eed8b04c953b3fcfb339ebc87acd9804901"
[[package]] [[package]]
name = "clang-sys" name = "clang-sys"
version = "1.8.1" version = "1.8.1"
@ -599,12 +587,6 @@ dependencies = [
"objc", "objc",
] ]
[[package]]
name = "color_quant"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b"
[[package]] [[package]]
name = "colorchoice" name = "colorchoice"
version = "1.0.4" version = "1.0.4"
@ -1969,12 +1951,6 @@ version = "1.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946"
[[package]]
name = "httpdate"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]] [[package]]
name = "hyper" name = "hyper"
version = "1.5.2" version = "1.5.2"
@ -2191,19 +2167,6 @@ dependencies = [
"icu_properties", "icu_properties",
] ]
[[package]]
name = "image"
version = "0.24.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5690139d2f55868e080017335e4b94cb7414274c74f1669c84fb5feba2c9f69d"
dependencies = [
"bytemuck",
"byteorder",
"color_quant",
"jpeg-decoder",
"num-traits",
]
[[package]] [[package]]
name = "indexmap" name = "indexmap"
version = "1.9.3" version = "1.9.3"
@ -2374,12 +2337,6 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "jpeg-decoder"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00810f1d8b74be64b13dbf3db89ac67740615d6c891f0e7b6179326533011a07"
[[package]] [[package]]
name = "js-sys" name = "js-sys"
version = "0.3.77" version = "0.3.77"
@ -2510,7 +2467,6 @@ dependencies = [
"daw-backend", "daw-backend",
"env_logger", "env_logger",
"ffmpeg-next", "ffmpeg-next",
"image",
"log", "log",
"lru", "lru",
"rtrb", "rtrb",
@ -2521,7 +2477,6 @@ dependencies = [
"tauri-plugin-dialog", "tauri-plugin-dialog",
"tauri-plugin-fs", "tauri-plugin-fs",
"tauri-plugin-shell", "tauri-plugin-shell",
"tiny_http",
"tungstenite", "tungstenite",
] ]
@ -5047,18 +5002,6 @@ dependencies = [
"time-core", "time-core",
] ]
[[package]]
name = "tiny_http"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "389915df6413a2e74fb181895f933386023c71110878cd0825588928e64cdc82"
dependencies = [
"ascii",
"chunked_transfer",
"httpdate",
"log",
]
[[package]] [[package]]
name = "tinystr" name = "tinystr"
version = "0.7.6" version = "0.7.6"

View File

@ -35,10 +35,6 @@ rtrb = "0.3"
# Video decoding # Video decoding
ffmpeg-next = "7.0" ffmpeg-next = "7.0"
lru = "0.12" lru = "0.12"
image = { version = "0.24", default-features = false, features = ["jpeg"] }
# HTTP server for video streaming
tiny_http = "0.12"
# WebSocket for frame streaming (disable default features to remove tracing, but keep handshake) # WebSocket for frame streaming (disable default features to remove tracing, but keep handshake)
tungstenite = { version = "0.20", default-features = false, features = ["handshake"] } tungstenite = { version = "0.20", default-features = false, features = ["handshake"] }

View File

@ -62,10 +62,6 @@ impl FrameStreamer {
pub fn send_frame(&self, pool_index: usize, timestamp: f64, width: u32, height: u32, rgba_data: &[u8]) { pub fn send_frame(&self, pool_index: usize, timestamp: f64, width: u32, height: u32, rgba_data: &[u8]) {
let mut clients = self.clients.lock().unwrap(); let mut clients = self.clients.lock().unwrap();
// Debug: Log input dimensions and first few RGBA bytes
eprintln!("[Frame Streamer SEND] pool={}, {}x{} pixels, RGBA input len={}, first 20 RGBA bytes: {:?}",
pool_index, width, height, rgba_data.len(), &rgba_data[..20.min(rgba_data.len())]);
// Build frame message (rgba_data is already in RGBA format from decoder) // Build frame message (rgba_data is already in RGBA format from decoder)
let mut frame_msg = Vec::with_capacity(16 + rgba_data.len()); let mut frame_msg = Vec::with_capacity(16 + rgba_data.len());
frame_msg.extend_from_slice(&(pool_index as u32).to_le_bytes()); frame_msg.extend_from_slice(&(pool_index as u32).to_le_bytes());

View File

@ -5,7 +5,6 @@ use tauri::{AppHandle, Manager, Url, WebviewUrl, WebviewWindowBuilder};
mod audio; mod audio;
mod video; mod video;
mod video_server;
mod frame_streamer; mod frame_streamer;
@ -139,11 +138,6 @@ pub fn run() {
.filter_level(log::LevelFilter::Error) .filter_level(log::LevelFilter::Error)
.init(); .init();
// Initialize video HTTP server
let video_server = video_server::VideoServer::new()
.expect("Failed to start video server");
eprintln!("[App] Video server started on port {}", video_server.port());
// Initialize WebSocket frame streamer // Initialize WebSocket frame streamer
let frame_streamer = frame_streamer::FrameStreamer::new() let frame_streamer = frame_streamer::FrameStreamer::new()
.expect("Failed to start frame streamer"); .expect("Failed to start frame streamer");
@ -153,7 +147,6 @@ pub fn run() {
.manage(Mutex::new(AppState::default())) .manage(Mutex::new(AppState::default()))
.manage(Arc::new(Mutex::new(audio::AudioState::default()))) .manage(Arc::new(Mutex::new(audio::AudioState::default())))
.manage(Arc::new(Mutex::new(video::VideoState::default()))) .manage(Arc::new(Mutex::new(video::VideoState::default())))
.manage(Arc::new(Mutex::new(video_server)))
.manage(Arc::new(Mutex::new(frame_streamer))) .manage(Arc::new(Mutex::new(frame_streamer)))
.setup(|app| { .setup(|app| {
#[cfg(any(windows, target_os = "linux"))] // Windows/Linux needs different handling from macOS #[cfg(any(windows, target_os = "linux"))] // Windows/Linux needs different handling from macOS
@ -281,14 +274,9 @@ pub fn run() {
audio::audio_serialize_track_graph, audio::audio_serialize_track_graph,
audio::audio_load_track_graph, audio::audio_load_track_graph,
video::video_load_file, video::video_load_file,
video::video_get_frame,
video::video_get_frames_batch,
video::video_stream_frame, video::video_stream_frame,
video::video_set_cache_size, video::video_set_cache_size,
video::video_get_pool_info, video::video_get_pool_info,
video::video_ipc_benchmark,
video::video_get_transcode_status,
video::video_allow_asset,
]) ])
// .manage(window_counter) // .manage(window_counter)
.build(tauri::generate_context!()) .build(tauri::generate_context!())

View File

@ -19,10 +19,6 @@ pub struct VideoFileMetadata {
pub audio_sample_rate: Option<u32>, pub audio_sample_rate: Option<u32>,
pub audio_channels: Option<u32>, pub audio_channels: Option<u32>,
pub audio_waveform: Option<Vec<WaveformPeak>>, pub audio_waveform: Option<Vec<WaveformPeak>>,
pub codec_name: String,
pub is_browser_compatible: bool,
pub http_url: Option<String>, // HTTP URL to stream video (if compatible or transcode complete)
pub transcoding: bool, // True if currently transcoding
} }
struct VideoDecoder { struct VideoDecoder {
@ -244,24 +240,10 @@ impl VideoDecoder {
} }
} }
use std::collections::HashMap;
use std::path::PathBuf;
#[derive(Clone)]
pub struct TranscodeJob {
pub pool_index: usize,
pub input_path: String,
pub output_path: String,
pub http_url: Option<String>, // HTTP URL when transcode completes
pub progress: f32, // 0.0 to 1.0
pub completed: bool,
}
pub struct VideoState { pub struct VideoState {
pool: Vec<Arc<Mutex<VideoDecoder>>>, pool: Vec<Arc<Mutex<VideoDecoder>>>,
next_pool_index: usize, next_pool_index: usize,
cache_size: usize, cache_size: usize,
transcode_jobs: Arc<Mutex<HashMap<usize, TranscodeJob>>>, // pool_index -> job
} }
impl Default for VideoState { impl Default for VideoState {
@ -270,7 +252,6 @@ impl Default for VideoState {
pool: Vec::new(), pool: Vec::new(),
next_pool_index: 0, next_pool_index: 0,
cache_size: 20, // Default cache size cache_size: 20, // Default cache size
transcode_jobs: Arc::new(Mutex::new(HashMap::new())),
} }
} }
} }
@ -279,7 +260,6 @@ impl Default for VideoState {
pub async fn video_load_file( pub async fn video_load_file(
video_state: tauri::State<'_, Arc<Mutex<VideoState>>>, video_state: tauri::State<'_, Arc<Mutex<VideoState>>>,
audio_state: tauri::State<'_, Arc<Mutex<crate::audio::AudioState>>>, audio_state: tauri::State<'_, Arc<Mutex<crate::audio::AudioState>>>,
video_server: tauri::State<'_, Arc<Mutex<crate::video_server::VideoServer>>>,
path: String, path: String,
) -> Result<VideoFileMetadata, String> { ) -> Result<VideoFileMetadata, String> {
eprintln!("[Video] Loading file: {}", path); eprintln!("[Video] Loading file: {}", path);
@ -408,45 +388,13 @@ pub async fn video_load_file(
(None, None, None, None, None) (None, None, None, None, None)
}; };
// Detect video codec // Create video decoder with max dimensions for playback (1920x1080)
let video_stream = input.streams() // This scales videos to reduce data transfer over WebSocket
.best(ffmpeg::media::Type::Video)
.ok_or("No video stream found")?;
let codec_id = video_stream.parameters().id();
let codec_name = ffmpeg::codec::Id::name(&codec_id).to_string();
// Check if codec is browser-compatible (can play directly)
// Browsers support: H.264/AVC, VP8, VP9, AV1 (limited)
let is_browser_compatible = matches!(
codec_id,
ffmpeg::codec::Id::H264 |
ffmpeg::codec::Id::VP8 |
ffmpeg::codec::Id::VP9 |
ffmpeg::codec::Id::AV1
);
eprintln!("[Video Codec] {} - Browser compatible: {}", codec_name, is_browser_compatible);
// Create video decoder with max dimensions for playback (800x600)
// This scales down high-res videos to reduce data transfer
let mut video_state_guard = video_state.lock().unwrap(); let mut video_state_guard = video_state.lock().unwrap();
let pool_index = video_state_guard.next_pool_index; let pool_index = video_state_guard.next_pool_index;
video_state_guard.next_pool_index += 1; video_state_guard.next_pool_index += 1;
let decoder = VideoDecoder::new(path.clone(), video_state_guard.cache_size, Some(800), Some(600))?; let decoder = VideoDecoder::new(path.clone(), video_state_guard.cache_size, Some(1920), Some(1080))?;
// Add file to HTTP server if browser-compatible
let http_url = if is_browser_compatible {
let server = video_server.lock().unwrap();
let url_path = format!("/video/{}", pool_index);
server.add_file(url_path.clone(), PathBuf::from(&path));
let http_url = server.get_url(&url_path);
eprintln!("[Video] Browser-compatible, serving at: {}", http_url);
Some(http_url)
} else {
None
};
let metadata = VideoFileMetadata { let metadata = VideoFileMetadata {
pool_index, pool_index,
@ -460,152 +408,13 @@ pub async fn video_load_file(
audio_sample_rate, audio_sample_rate,
audio_channels, audio_channels,
audio_waveform, audio_waveform,
codec_name,
is_browser_compatible,
http_url,
transcoding: !is_browser_compatible,
}; };
video_state_guard.pool.push(Arc::new(Mutex::new(decoder))); video_state_guard.pool.push(Arc::new(Mutex::new(decoder)));
// Start background transcoding if not browser-compatible
if !is_browser_compatible {
eprintln!("[Video Transcode] Starting background transcode for pool_index {}", pool_index);
let jobs = video_state_guard.transcode_jobs.clone();
let input_path = path.clone();
let pool_idx = pool_index;
let server = video_server.inner().clone();
tauri::async_runtime::spawn(async move {
if let Err(e) = start_transcode(jobs, pool_idx, input_path, server).await {
eprintln!("[Video Transcode] Failed: {}", e);
}
});
}
Ok(metadata) Ok(metadata)
} }
// Background transcode to WebM/VP9 for browser compatibility
async fn start_transcode(
jobs: Arc<Mutex<HashMap<usize, TranscodeJob>>>,
pool_index: usize,
input_path: String,
video_server: Arc<Mutex<crate::video_server::VideoServer>>,
) -> Result<(), String> {
use std::process::Command;
// Generate output path in system cache directory
let cache_dir = std::env::temp_dir().join("lightningbeam_transcoded");
std::fs::create_dir_all(&cache_dir).map_err(|e| e.to_string())?;
let input_file = PathBuf::from(&input_path);
let file_stem = input_file.file_stem()
.ok_or("Invalid input path")?
.to_string_lossy();
let output_path = cache_dir.join(format!("{}_{}.webm", file_stem, pool_index));
// Create job entry
{
let mut jobs_guard = jobs.lock().unwrap();
jobs_guard.insert(pool_index, TranscodeJob {
pool_index,
input_path: input_path.clone(),
output_path: output_path.to_string_lossy().to_string(),
http_url: None,
progress: 0.0,
completed: false,
});
}
eprintln!("[Video Transcode] Output: {}", output_path.display());
// Run FFmpeg transcode command
// Using VP9 codec with CRF 30 (good quality/size balance) and fast encoding
let output = Command::new("ffmpeg")
.args(&[
"-i", &input_path,
"-c:v", "libvpx-vp9", // VP9 video codec
"-crf", "30", // Quality (lower = better, 23-32 recommended)
"-b:v", "0", // Use CRF mode
"-threads", "4", // Use 4 threads
"-row-mt", "1", // Enable row-based multithreading
"-speed", "4", // Encoding speed (0=slowest/best, 4=good balance)
"-c:a", "libopus", // Opus audio codec (best for WebM)
"-b:a", "128k", // Audio bitrate
"-y", // Overwrite output
output_path.to_str().ok_or("Invalid output path")?,
])
.output()
.map_err(|e| format!("Failed to spawn ffmpeg: {}", e))?;
if output.status.success() {
eprintln!("[Video Transcode] Completed: {}", output_path.display());
// Add transcoded file to HTTP server
let server = video_server.lock().unwrap();
let url_path = format!("/video/{}", pool_index);
server.add_file(url_path.clone(), output_path.clone());
let http_url = server.get_url(&url_path);
eprintln!("[Video Transcode] Serving at: {}", http_url);
drop(server);
// Mark as completed and store HTTP URL
let mut jobs_guard = jobs.lock().unwrap();
if let Some(job) = jobs_guard.get_mut(&pool_index) {
job.progress = 1.0;
job.completed = true;
job.http_url = Some(http_url);
}
eprintln!("[Video Transcode] Job completed for pool_index {}", pool_index);
Ok(())
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
eprintln!("[Video Transcode] FFmpeg error: {}", stderr);
Err(format!("FFmpeg failed: {}", stderr))
}
}
// Get transcode status for a pool index
#[tauri::command]
pub async fn video_get_transcode_status(
video_state: tauri::State<'_, Arc<Mutex<VideoState>>>,
pool_index: usize,
) -> Result<Option<(String, f32, bool, Option<String>)>, String> {
let state = video_state.lock().unwrap();
let jobs = state.transcode_jobs.lock().unwrap();
if let Some(job) = jobs.get(&pool_index) {
Ok(Some((job.output_path.clone(), job.progress, job.completed, job.http_url.clone())))
} else {
Ok(None)
}
}
// Add a video file to asset protocol scope so browser can access it
#[tauri::command]
pub async fn video_allow_asset(
app: tauri::AppHandle,
path: String,
) -> Result<(), String> {
use tauri_plugin_fs::FsExt;
let file_path = PathBuf::from(&path);
// Add to FS scope
let fs_scope = app.fs_scope();
fs_scope.allow_file(&file_path)
.map_err(|e| format!("Failed to allow file in fs scope: {}", e))?;
// Add to asset protocol scope
let asset_scope = app.asset_protocol_scope();
asset_scope.allow_file(&file_path)
.map_err(|e| format!("Failed to allow file in asset scope: {}", e))?;
eprintln!("[Video] Added to asset scope: {}", path);
Ok(())
}
fn generate_waveform(audio_data: &[f32], channels: u32, target_peaks: usize) -> Vec<WaveformPeak> { fn generate_waveform(audio_data: &[f32], channels: u32, target_peaks: usize) -> Vec<WaveformPeak> {
let total_samples = audio_data.len(); let total_samples = audio_data.len();
let samples_per_channel = total_samples / channels as usize; let samples_per_channel = total_samples / channels as usize;
@ -648,91 +457,6 @@ fn generate_waveform(audio_data: &[f32], channels: u32, target_peaks: usize) ->
waveform waveform
} }
#[tauri::command]
pub async fn video_get_frame(
state: tauri::State<'_, Arc<Mutex<VideoState>>>,
pool_index: usize,
timestamp: f64,
use_jpeg: bool,
channel: tauri::ipc::Channel,
) -> Result<(), String> {
use std::time::Instant;
let t_total_start = Instant::now();
let t_lock_start = Instant::now();
let video_state = state.lock().unwrap();
let decoder = video_state.pool.get(pool_index)
.ok_or("Invalid pool index")?
.clone();
drop(video_state);
let mut decoder = decoder.lock().unwrap();
let t_lock_end = Instant::now();
let t_decode_start = Instant::now();
let frame_data = decoder.get_frame(timestamp)?;
let t_decode_end = Instant::now();
let t_compress_start = Instant::now();
let data_to_send = if use_jpeg {
// Get frame dimensions from decoder
let width = decoder.output_width;
let height = decoder.output_height;
// Create image from raw RGBA data
let img = RgbaImage::from_raw(width, height, frame_data)
.ok_or("Failed to create image from frame data")?;
// Convert RGBA to RGB (JPEG doesn't support alpha)
let rgb_img = image::DynamicImage::ImageRgba8(img).to_rgb8();
// Encode to JPEG with quality 85 (good balance of size/quality)
let mut jpeg_data = Vec::new();
let mut encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(&mut jpeg_data, 85);
encoder.encode(
rgb_img.as_raw(),
rgb_img.width(),
rgb_img.height(),
image::ColorType::Rgb8
).map_err(|e| format!("JPEG encoding failed: {}", e))?;
jpeg_data
} else {
frame_data
};
let t_compress_end = Instant::now();
// Drop decoder lock before sending to avoid blocking
drop(decoder);
let t_send_start = Instant::now();
// Send binary data through channel (bypasses JSON serialization)
// InvokeResponseBody::Raw sends raw binary data without JSON encoding
channel.send(tauri::ipc::InvokeResponseBody::Raw(data_to_send.clone()))
.map_err(|e| format!("Channel send error: {}", e))?;
let t_send_end = Instant::now();
let t_total_end = Instant::now();
// Detailed profiling
let lock_time = t_lock_end.duration_since(t_lock_start).as_micros();
let decode_time = t_decode_end.duration_since(t_decode_start).as_micros();
let compress_time = t_compress_end.duration_since(t_compress_start).as_micros();
let send_time = t_send_end.duration_since(t_send_start).as_micros();
let total_time = t_total_end.duration_since(t_total_start).as_micros();
let size_kb = data_to_send.len() / 1024;
let mode = if use_jpeg { "JPEG" } else { "RAW" };
eprintln!("[Video Profile {}] Size: {}KB | Lock: {}μs | Decode: {}μs | Compress: {}μs | Send: {}μs | Total: {}μs",
mode, size_kb, lock_time, decode_time, compress_time, send_time, total_time);
Ok(())
}
#[tauri::command] #[tauri::command]
pub async fn video_set_cache_size( pub async fn video_set_cache_size(
state: tauri::State<'_, Arc<Mutex<VideoState>>>, state: tauri::State<'_, Arc<Mutex<VideoState>>>,
@ -760,123 +484,6 @@ pub async fn video_get_pool_info(
)) ))
} }
// Benchmark command to test IPC performance with various payload sizes
#[tauri::command]
pub async fn video_ipc_benchmark(
size_bytes: usize,
channel: tauri::ipc::Channel,
) -> Result<(), String> {
use std::time::Instant;
let t_start = Instant::now();
// Create dummy data of requested size
let data = vec![0u8; size_bytes];
let t_after_alloc = Instant::now();
// Send through channel
channel.send(tauri::ipc::InvokeResponseBody::Raw(data))
.map_err(|e| format!("Channel send error: {}", e))?;
let t_after_send = Instant::now();
let alloc_time = t_after_alloc.duration_since(t_start).as_micros();
let send_time = t_after_send.duration_since(t_after_alloc).as_micros();
let total_time = t_after_send.duration_since(t_start).as_micros();
eprintln!("[IPC Benchmark Rust] Size: {}KB | Alloc: {}μs | Send: {}μs | Total: {}μs",
size_bytes / 1024, alloc_time, send_time, total_time);
Ok(())
}
// Batch frame request - get multiple frames in one IPC call
#[tauri::command]
pub async fn video_get_frames_batch(
state: tauri::State<'_, Arc<Mutex<VideoState>>>,
pool_index: usize,
timestamps: Vec<f64>,
use_jpeg: bool,
channel: tauri::ipc::Channel,
) -> Result<(), String> {
use std::time::Instant;
let t_total_start = Instant::now();
let video_state = state.lock().unwrap();
let decoder = video_state.pool.get(pool_index)
.ok_or("Invalid pool index")?
.clone();
drop(video_state);
let mut decoder = decoder.lock().unwrap();
// Decode all frames
let mut all_frames = Vec::new();
let mut total_decode_time = 0u128;
let mut total_compress_time = 0u128;
for timestamp in &timestamps {
let t_decode_start = Instant::now();
let frame_data = decoder.get_frame(*timestamp)?;
let t_decode_end = Instant::now();
total_decode_time += t_decode_end.duration_since(t_decode_start).as_micros();
let t_compress_start = Instant::now();
let data = if use_jpeg {
let width = decoder.output_width;
let height = decoder.output_height;
let img = RgbaImage::from_raw(width, height, frame_data)
.ok_or("Failed to create image from frame data")?;
let rgb_img = image::DynamicImage::ImageRgba8(img).to_rgb8();
let mut jpeg_data = Vec::new();
let mut encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(&mut jpeg_data, 85);
encoder.encode(
rgb_img.as_raw(),
rgb_img.width(),
rgb_img.height(),
image::ColorType::Rgb8
).map_err(|e| format!("JPEG encoding failed: {}", e))?;
jpeg_data
} else {
frame_data
};
let t_compress_end = Instant::now();
total_compress_time += t_compress_end.duration_since(t_compress_start).as_micros();
all_frames.push(data);
}
drop(decoder);
// Pack all frames into one buffer with metadata
// Format: [frame_count: u32][frame1_size: u32][frame1_data...][frame2_size: u32][frame2_data...]
let mut packed_data = Vec::new();
packed_data.extend_from_slice(&(all_frames.len() as u32).to_le_bytes());
for frame in &all_frames {
packed_data.extend_from_slice(&(frame.len() as u32).to_le_bytes());
packed_data.extend_from_slice(frame);
}
let total_size_kb = packed_data.len() / 1024;
let t_send_start = Instant::now();
channel.send(tauri::ipc::InvokeResponseBody::Raw(packed_data))
.map_err(|e| format!("Channel send error: {}", e))?;
let t_send_end = Instant::now();
let send_time = t_send_end.duration_since(t_send_start).as_micros();
let total_time = t_send_end.duration_since(t_total_start).as_micros();
let mode = if use_jpeg { "JPEG" } else { "RAW" };
eprintln!("[Video Batch {}] Frames: {} | Size: {}KB | Decode: {}μs | Compress: {}μs | Send: {}μs | Total: {}μs",
mode, timestamps.len(), total_size_kb, total_decode_time, total_compress_time, send_time, total_time);
Ok(())
}
/// Stream a decoded video frame over WebSocket (zero-copy performance testing) /// Stream a decoded video frame over WebSocket (zero-copy performance testing)
#[tauri::command] #[tauri::command]
pub async fn video_stream_frame( pub async fn video_stream_frame(

View File

@ -1,203 +0,0 @@
use std::sync::{Arc, Mutex};
use std::collections::HashMap;
use std::fs::File;
use std::io::{Read, Seek, SeekFrom};
use std::path::PathBuf;
use tiny_http::{Server, Response, Request, Header, StatusCode};
pub struct VideoServer {
port: u16,
allowed_files: Arc<Mutex<HashMap<String, PathBuf>>>, // URL path -> file path
}
impl VideoServer {
pub fn new() -> Result<Self, String> {
// Bind to localhost on a random available port
let server = Server::http("127.0.0.1:0")
.map_err(|e| format!("Failed to create HTTP server: {}", e))?;
let port = match server.server_addr() {
tiny_http::ListenAddr::IP(addr) => addr.port(),
_ => return Err("Unexpected server address type".to_string()),
};
let allowed_files = Arc::new(Mutex::new(HashMap::new()));
eprintln!("[Video Server] Started on port {}", port);
// Spawn server thread
let files = allowed_files.clone();
std::thread::spawn(move || {
for request in server.incoming_requests() {
handle_request(request, &files);
}
});
Ok(Self {
port,
allowed_files,
})
}
pub fn port(&self) -> u16 {
self.port
}
pub fn add_file(&self, url_path: String, file_path: PathBuf) {
eprintln!("[Video Server] Adding file: {} -> {:?}", url_path, file_path);
let mut files = self.allowed_files.lock().unwrap();
files.insert(url_path, file_path);
}
pub fn get_url(&self, url_path: &str) -> String {
format!("http://127.0.0.1:{}{}", self.port, url_path)
}
}
fn handle_request(request: Request, allowed_files: &Arc<Mutex<HashMap<String, PathBuf>>>) {
let url = request.url();
let method = request.method();
eprintln!("[Video Server] {} {}", method, url);
// Get file path
let files = allowed_files.lock().unwrap();
let file_path = match files.get(url) {
Some(path) => path.clone(),
None => {
eprintln!("[Video Server] File not found: {}", url);
let response = Response::from_string("Not Found")
.with_status_code(StatusCode(404));
let _ = request.respond(response);
return;
}
};
drop(files);
// Open file
let mut file = match File::open(&file_path) {
Ok(f) => f,
Err(e) => {
eprintln!("[Video Server] Failed to open file: {}", e);
let response = Response::from_string("Internal Server Error")
.with_status_code(StatusCode(500));
let _ = request.respond(response);
return;
}
};
// Get file size
let file_size = match file.metadata() {
Ok(meta) => meta.len(),
Err(e) => {
eprintln!("[Video Server] Failed to get file metadata: {}", e);
let response = Response::from_string("Internal Server Error")
.with_status_code(StatusCode(500));
let _ = request.respond(response);
return;
}
};
// Check for Range header - convert to owned String to avoid borrow issues
let range_header = request.headers().iter()
.find(|h| h.field.equiv("Range"))
.map(|h| h.value.as_str().to_string());
match range_header {
Some(range) if range.starts_with("bytes=") => {
// Parse range request
eprintln!("[Video Server] Range request: {}", range);
handle_range_request(request, file, file_size, &range, &file_path);
}
_ => {
// Serve entire file
eprintln!("[Video Server] Full file request");
handle_full_request(request, file, file_size, &file_path);
}
}
}
fn handle_range_request(
request: Request,
mut file: File,
file_size: u64,
range: &str,
file_path: &PathBuf,
) {
// Parse "bytes=start-end"
let range = range.trim_start_matches("bytes=");
let parts: Vec<&str> = range.split('-').collect();
let start = parts[0].parse::<u64>().unwrap_or(0);
let end = if parts.len() > 1 && !parts[1].is_empty() {
parts[1].parse::<u64>().unwrap_or(file_size - 1)
} else {
file_size - 1
};
let length = end - start + 1;
// Seek to start position
if let Err(e) = file.seek(SeekFrom::Start(start)) {
eprintln!("[Video Server] Failed to seek: {}", e);
let response = Response::from_string("Internal Server Error")
.with_status_code(StatusCode(500));
let _ = request.respond(response);
return;
}
// Read the requested range
let mut buffer = vec![0u8; length as usize];
if let Err(e) = file.read_exact(&mut buffer) {
eprintln!("[Video Server] Failed to read range: {}", e);
let response = Response::from_string("Internal Server Error")
.with_status_code(StatusCode(500));
let _ = request.respond(response);
return;
}
// Determine content type
let content_type = get_content_type(file_path);
// Send 206 Partial Content response
let content_range = format!("bytes {}-{}/{}", start, end, file_size);
let response = Response::from_data(buffer)
.with_status_code(StatusCode(206))
.with_header(Header::from_bytes(&b"Content-Type"[..], content_type.as_bytes()).unwrap())
.with_header(Header::from_bytes(&b"Content-Length"[..], length.to_string().as_bytes()).unwrap())
.with_header(Header::from_bytes(&b"Content-Range"[..], content_range.as_bytes()).unwrap())
.with_header(Header::from_bytes(&b"Accept-Ranges"[..], &b"bytes"[..]).unwrap())
.with_header(Header::from_bytes(&b"Access-Control-Allow-Origin"[..], &b"*"[..]).unwrap());
let _ = request.respond(response);
}
fn handle_full_request(
request: Request,
file: File,
file_size: u64,
file_path: &PathBuf,
) {
// Determine content type
let content_type = get_content_type(file_path);
// Send 200 OK response using from_file to avoid chunked encoding
let response = Response::from_file(file)
.with_status_code(StatusCode(200))
.with_chunked_threshold(usize::MAX) // Force Content-Length instead of chunked
.with_header(Header::from_bytes(&b"Content-Type"[..], content_type.as_bytes()).unwrap())
.with_header(Header::from_bytes(&b"Accept-Ranges"[..], &b"bytes"[..]).unwrap())
.with_header(Header::from_bytes(&b"Access-Control-Allow-Origin"[..], &b"*"[..]).unwrap());
let _ = request.respond(response);
}
fn get_content_type(path: &PathBuf) -> String {
match path.extension().and_then(|s| s.to_str()) {
Some("webm") => "video/webm".to_string(),
Some("mp4") => "video/mp4".to_string(),
Some("mkv") => "video/x-matroska".to_string(),
Some("avi") => "video/x-msvideo".to_string(),
Some("mov") => "video/quicktime".to_string(),
_ => "application/octet-stream".to_string(),
}
}

View File

@ -66,14 +66,10 @@ export class FrameReceiver {
const dataLength = width * height * 4; const dataLength = width * height * 4;
const rgbaData = new Uint8ClampedArray(arrayBuffer, 16, dataLength); const rgbaData = new Uint8ClampedArray(arrayBuffer, 16, dataLength);
// Debug: Log received data
console.log(`[FrameReceiver RECV] pool=${poolIndex}, ${width}x${height}, total buffer len=${arrayBuffer.byteLength}, data len=${dataLength}, first 20 RGBA bytes:`, Array.from(rgbaData.slice(0, 20)));
// Create ImageData directly from the view (zero-copy!) // Create ImageData directly from the view (zero-copy!)
const imageData = new ImageData(rgbaData, width, height); const imageData = new ImageData(rgbaData, width, height);
// Debug: Log ImageData properties
console.log(`[FrameReceiver RECV] ImageData: ${imageData.width}x${imageData.height}, data len=${imageData.data.length}, first 20 bytes:`, Array.from(imageData.data.slice(0, 20)));
// Call subscriber with frame data // Call subscriber with frame data
const timestamp = timestampMs / 1000.0; const timestamp = timestampMs / 1000.0;