Compare commits

..

4 Commits

Author SHA1 Message Date
Skyler Lehmkuhl f9b62bb090 Add frames timeline mode 2026-03-10 15:54:54 -04:00
Skyler Lehmkuhl 4118c75b86 Performance tweaks 2026-03-10 03:24:03 -04:00
Skyler Lehmkuhl ac2b4ff8ab Improve idle performance 2026-03-10 02:41:44 -04:00
Skyler Lehmkuhl 26f06da5bf Add gradient support to vector graphics 2026-03-10 00:57:47 -04:00
12 changed files with 665 additions and 254 deletions

View File

@ -41,6 +41,11 @@ pub struct AudioSystem {
pub event_rx: Option<rtrb::Consumer<AudioEvent>>,
/// Consumer for recording audio mirror (streams recorded samples to UI for live waveform)
recording_mirror_rx: Option<rtrb::Consumer<f32>>,
/// Producer end of the input ring-buffer. Taken into the closure when the
/// input stream is opened; `None` after `open_input_stream()` has been called.
input_tx: Option<rtrb::Producer<f32>>,
/// The live microphone/line-in stream. `None` until `open_input_stream()` is called.
input_stream: Option<cpal::Stream>,
}
impl AudioSystem {
@ -138,137 +143,8 @@ impl AudioSystem {
)
.map_err(|e| format!("Failed to build output stream: {e:?}"))?;
// Get input device
let input_device = match host.default_input_device() {
Some(device) => device,
None => {
eprintln!("Warning: No input device available, recording will be disabled");
// Start output stream and return without input
output_stream.play().map_err(|e| e.to_string())?;
// Spawn emitter thread if provided
if let Some(emitter) = event_emitter {
Self::spawn_emitter_thread(event_rx, emitter);
}
return Ok(Self {
controller,
stream: output_stream,
sample_rate,
channels,
event_rx: None, // No event receiver when audio device unavailable
recording_mirror_rx: None,
});
}
};
// Get input config using the device's default (most compatible)
let input_config = match input_device.default_input_config() {
Ok(config) => {
let cfg: cpal::StreamConfig = config.into();
cfg
}
Err(e) => {
eprintln!("Warning: Could not get input config: {}, recording will be disabled", e);
output_stream.play().map_err(|e| e.to_string())?;
if let Some(emitter) = event_emitter {
Self::spawn_emitter_thread(event_rx, emitter);
}
return Ok(Self {
controller,
stream: output_stream,
sample_rate,
channels,
event_rx: None,
recording_mirror_rx: None,
});
}
};
let input_sample_rate = input_config.sample_rate;
let input_channels = input_config.channels as u32;
let output_sample_rate = sample_rate;
let output_channels = channels;
let needs_resample = input_sample_rate != output_sample_rate || input_channels != output_channels;
if needs_resample {
eprintln!("[AUDIO] Input device: {}Hz {}ch -> resampling to {}Hz {}ch",
input_sample_rate, input_channels, output_sample_rate, output_channels);
}
// Build input stream with resampling if needed
let input_stream = match input_device
.build_input_stream(
&input_config,
move |data: &[f32], _: &cpal::InputCallbackInfo| {
if !needs_resample {
for &sample in data {
let _ = input_tx.push(sample);
}
} else {
// Resample: linear interpolation from input rate to output rate
let in_ch = input_channels as usize;
let out_ch = output_channels as usize;
let ratio = output_sample_rate as f64 / input_sample_rate as f64;
let in_frames = data.len() / in_ch;
let out_frames = (in_frames as f64 * ratio) as usize;
for i in 0..out_frames {
let src_pos = i as f64 / ratio;
let src_idx = src_pos as usize;
let frac = (src_pos - src_idx as f64) as f32;
for ch in 0..out_ch {
// Map output channel to input channel
let in_ch_idx = ch.min(in_ch - 1);
let s0 = if src_idx < in_frames {
data[src_idx * in_ch + in_ch_idx]
} else {
0.0
};
let s1 = if src_idx + 1 < in_frames {
data[(src_idx + 1) * in_ch + in_ch_idx]
} else {
s0
};
let _ = input_tx.push(s0 + frac * (s1 - s0));
}
}
}
},
|err| eprintln!("Input stream error: {}", err),
None,
) {
Ok(stream) => stream,
Err(e) => {
eprintln!("Warning: Could not build input stream: {}, recording will be disabled", e);
output_stream.play().map_err(|e| e.to_string())?;
if let Some(emitter) = event_emitter {
Self::spawn_emitter_thread(event_rx, emitter);
}
return Ok(Self {
controller,
stream: output_stream,
sample_rate,
channels,
event_rx: None,
recording_mirror_rx: None,
});
}
};
// Start both streams
// Start output stream
output_stream.play().map_err(|e| e.to_string())?;
input_stream.play().map_err(|e| e.to_string())?;
// Leak the input stream to keep it alive
Box::leak(Box::new(input_stream));
// Spawn emitter thread if provided, or store event_rx for manual polling
let event_rx_option = if let Some(emitter) = event_emitter {
@ -278,6 +154,8 @@ impl AudioSystem {
Some(event_rx)
};
// Input stream is NOT opened here — call open_input_stream() when an
// audio input track is actually selected, to avoid constant ALSA wakeups.
Ok(Self {
controller,
stream: output_stream,
@ -285,6 +163,8 @@ impl AudioSystem {
channels,
event_rx: event_rx_option,
recording_mirror_rx: Some(mirror_rx),
input_tx: Some(input_tx),
input_stream: None,
})
}
@ -293,6 +173,99 @@ impl AudioSystem {
self.recording_mirror_rx.take()
}
/// Open the microphone/line-in input stream.
///
/// Call this as soon as an audio input track is selected so the stream is
/// ready before recording starts. The stream is opened with the same fixed
/// buffer size as the output stream to avoid ALSA spinning at high callback
/// rates with its tiny default buffer.
///
/// No-ops if the stream is already open.
pub fn open_input_stream(&mut self, buffer_size: u32) -> Result<(), String> {
if self.input_stream.is_some() {
return Ok(());
}
let mut input_tx = match self.input_tx.take() {
Some(tx) => tx,
None => return Err("Input ring-buffer already consumed".into()),
};
let host = cpal::default_host();
let input_device = host.default_input_device()
.ok_or("No input device available")?;
let default_cfg = input_device.default_input_config()
.map_err(|e| e.to_string())?;
let mut input_config: cpal::StreamConfig = default_cfg.into();
// Match the output buffer size so ALSA wakes up at the same rate as
// the output thread — prevents the ~750 wakeups/sec that the default
// 64-frame buffer causes.
if !cfg!(target_os = "windows") {
input_config.buffer_size = cpal::BufferSize::Fixed(buffer_size);
}
let input_sample_rate = input_config.sample_rate;
let input_channels = input_config.channels as u32;
let output_sample_rate = self.sample_rate;
let output_channels = self.channels;
let needs_resample = input_sample_rate != output_sample_rate
|| input_channels != output_channels;
if needs_resample {
eprintln!("[AUDIO] Input: {}Hz {}ch → resampling to {}Hz {}ch",
input_sample_rate, input_channels, output_sample_rate, output_channels);
}
let stream = input_device.build_input_stream(
&input_config,
move |data: &[f32], _: &cpal::InputCallbackInfo| {
if !needs_resample {
for &s in data { let _ = input_tx.push(s); }
} else {
let in_ch = input_channels as usize;
let out_ch = output_channels as usize;
let ratio = output_sample_rate as f64 / input_sample_rate as f64;
let in_frames = data.len() / in_ch;
let out_frames = (in_frames as f64 * ratio) as usize;
for i in 0..out_frames {
let src_pos = i as f64 / ratio;
let src_idx = src_pos as usize;
let frac = (src_pos - src_idx as f64) as f32;
for ch in 0..out_ch {
let ic = ch.min(in_ch - 1);
let s0 = data.get(src_idx * in_ch + ic).copied().unwrap_or(0.0);
let s1 = data.get((src_idx + 1) * in_ch + ic).copied().unwrap_or(s0);
let _ = input_tx.push(s0 + frac * (s1 - s0));
}
}
}
},
|err| eprintln!("Input stream error: {err}"),
None,
).map_err(|e| format!("Failed to build input stream: {e}"))?;
stream.play().map_err(|e| e.to_string())?;
self.input_stream = Some(stream);
Ok(())
}
/// Close the input stream (e.g. when the last audio input track is removed).
pub fn close_input_stream(&mut self) {
self.input_stream = None; // Drop stops the stream
}
/// Extract an [`InputStreamOpener`] that can be stored independently and
/// used to open the microphone/line-in stream on demand.
/// Returns `None` if called a second time.
pub fn take_input_opener(&mut self) -> Option<InputStreamOpener> {
self.input_tx.take().map(|tx| InputStreamOpener {
input_tx: tx,
sample_rate: self.sample_rate,
channels: self.channels,
})
}
/// Spawn a background thread to emit events from the ringbuffer
fn spawn_emitter_thread(mut event_rx: rtrb::Consumer<AudioEvent>, emitter: std::sync::Arc<dyn EventEmitter>) {
std::thread::spawn(move || {
@ -308,3 +281,77 @@ impl AudioSystem {
});
}
}
/// Self-contained handle for opening the microphone/line-in stream on demand.
///
/// Obtained via [`AudioSystem::take_input_opener`]. Call [`open`](Self::open)
/// when the user selects an audio input track; store the returned
/// `cpal::Stream` to keep it alive (dropping it stops the stream).
pub struct InputStreamOpener {
input_tx: rtrb::Producer<f32>,
sample_rate: u32,
channels: u32,
}
impl InputStreamOpener {
/// Open and start the input stream with the given buffer size.
///
/// Uses the same `buffer_size` as the output stream so ALSA wakes up at
/// the same rate (~187/s at 256 frames) rather than the ~750/s it defaults
/// to with 64-frame buffers.
pub fn open(mut self, buffer_size: u32) -> Result<cpal::Stream, String> {
let host = cpal::default_host();
let device = host.default_input_device()
.ok_or("No input device available")?;
let default_cfg = device.default_input_config()
.map_err(|e| e.to_string())?;
let mut cfg: cpal::StreamConfig = default_cfg.into();
if !cfg!(target_os = "windows") {
cfg.buffer_size = cpal::BufferSize::Fixed(buffer_size);
}
let in_rate = cfg.sample_rate;
let in_ch = cfg.channels as u32;
let out_rate = self.sample_rate;
let out_ch = self.channels;
let needs_resample = in_rate != out_rate || in_ch != out_ch;
if needs_resample {
eprintln!("[AUDIO] Input: {}Hz {}ch → resampling to {}Hz {}ch",
in_rate, in_ch, out_rate, out_ch);
}
let stream = device.build_input_stream(
&cfg,
move |data: &[f32], _: &cpal::InputCallbackInfo| {
if !needs_resample {
for &s in data { let _ = self.input_tx.push(s); }
} else {
let ic = in_ch as usize;
let oc = out_ch as usize;
let ratio = out_rate as f64 / in_rate as f64;
let in_frames = data.len() / ic;
let out_frames = (in_frames as f64 * ratio) as usize;
for i in 0..out_frames {
let src = i as f64 / ratio;
let si = src as usize;
let f = (src - si as f64) as f32;
for ch in 0..oc {
let ich = ch.min(ic - 1);
let s0 = data.get(si * ic + ich).copied().unwrap_or(0.0);
let s1 = data.get((si + 1) * ic + ich).copied().unwrap_or(s0);
let _ = self.input_tx.push(s0 + f * (s1 - s0));
}
}
}
},
|err| eprintln!("Input stream error: {err}"),
None,
).map_err(|e| format!("Failed to build input stream: {e}"))?;
stream.play().map_err(|e| e.to_string())?;
Ok(stream)
}
}

View File

@ -3468,7 +3468,7 @@ dependencies = [
[[package]]
name = "lightningbeam-editor"
version = "1.0.1-alpha"
version = "1.0.2-alpha"
dependencies = [
"beamdsp",
"bytemuck",

View File

@ -66,6 +66,18 @@ pub struct ShapeGradient {
/// Ignored for Radial.
pub angle: f32,
pub extend: GradientExtend,
/// Explicit world-space start point set by the gradient drag tool.
/// For Linear: the start of the gradient axis.
/// For Radial: the center of the gradient circle.
/// When `None`, the renderer falls back to bbox-based computation.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub start_world: Option<(f64, f64)>,
/// Explicit world-space end point set by the gradient drag tool.
/// For Linear: the end of the gradient axis.
/// For Radial: a point on the edge of the gradient circle (defines radius).
/// When `None`, the renderer falls back to bbox-based computation.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub end_world: Option<(f64, f64)>,
}
impl Default for ShapeGradient {
@ -73,11 +85,13 @@ impl Default for ShapeGradient {
Self {
kind: GradientType::Linear,
stops: vec![
GradientStop { position: 0.0, color: ShapeColor::rgba(0, 0, 0, 255) },
GradientStop { position: 1.0, color: ShapeColor::rgba(0, 0, 0, 0) },
GradientStop { position: 0.0, color: ShapeColor::rgba(255, 255, 255, 255) },
GradientStop { position: 1.0, color: ShapeColor::rgba(0, 0, 0, 255) },
],
angle: 0.0,
extend: GradientExtend::Pad,
start_world: None,
end_world: None,
}
}
}

View File

@ -94,7 +94,7 @@ pub fn hit_test_layer(
if face.deleted || i == 0 {
continue; // skip unbounded face
}
if face.fill_color.is_none() && face.image_fill.is_none() {
if face.fill_color.is_none() && face.image_fill.is_none() && face.gradient_fill.is_none() {
continue;
}
if face.outer_half_edge.is_none() {
@ -472,7 +472,7 @@ pub fn hit_test_vector_editing(
if face.deleted || i == 0 {
continue;
}
if face.fill_color.is_none() && face.image_fill.is_none() {
if face.fill_color.is_none() && face.image_fill.is_none() && face.gradient_fill.is_none() {
continue;
}
if face.outer_half_edge.is_none() {

View File

@ -1092,8 +1092,24 @@ pub fn render_dcel(
if !filled {
if let Some(ref grad) = face.gradient_fill {
use kurbo::Rect;
use crate::gradient::GradientType;
let bbox: Rect = vello::kurbo::Shape::bounding_box(&path);
let (start, end) = gradient_bbox_endpoints(grad.angle, bbox);
let (start, end) = match (grad.start_world, grad.end_world) {
(Some((sx, sy)), Some((ex, ey))) => match grad.kind {
GradientType::Linear => {
(kurbo::Point::new(sx, sy), kurbo::Point::new(ex, ey))
}
GradientType::Radial => {
// start_world = center, end_world = edge point.
// to_peniko_brush uses midpoint(start, end) as center,
// so reflect the edge through the center to get the
// opposing diameter endpoint.
let opp = kurbo::Point::new(2.0 * sx - ex, 2.0 * sy - ey);
(opp, kurbo::Point::new(ex, ey))
}
},
_ => gradient_bbox_endpoints(grad.angle, bbox),
};
let brush = grad.to_peniko_brush(start, end, opacity_f32);
scene.fill(fill_rule, base_transform, &brush, None, &path);
filled = true;

View File

@ -366,6 +366,7 @@ impl Tool {
Tool::Rectangle,
Tool::Ellipse,
Tool::PaintBucket,
Tool::Gradient,
Tool::Eyedropper,
Tool::Line,
Tool::Polygon,

View File

@ -796,8 +796,15 @@ struct EditorApp {
#[allow(dead_code)] // Must be kept alive to maintain audio output
audio_stream: Option<cpal::Stream>,
audio_controller: Option<std::sync::Arc<std::sync::Mutex<daw_backend::EngineController>>>,
/// Holds `input_tx` and device info needed to open the microphone stream on
/// demand (when the user selects an audio input track).
audio_input: Option<daw_backend::InputStreamOpener>,
/// Active microphone/line-in stream; kept alive while an audio input track is selected.
#[allow(dead_code)]
audio_input_stream: Option<cpal::Stream>,
audio_buffer_size: u32,
audio_event_rx: Option<rtrb::Consumer<daw_backend::AudioEvent>>,
audio_events_pending: std::sync::Arc<std::sync::atomic::AtomicBool>,
last_input_monitoring: bool,
/// Count of in-flight graph preset loads — keeps the repaint loop alive
/// until the audio thread sends GraphPresetLoaded events for all of them
pending_graph_loads: std::sync::Arc<std::sync::atomic::AtomicU32>,
@ -1004,13 +1011,16 @@ impl EditorApp {
let action_executor = lightningbeam_core::action::ActionExecutor::new(document);
// Initialize audio system and destructure it for sharing
let (audio_stream, audio_controller, audio_event_rx, audio_sample_rate, audio_channels, file_command_tx, recording_mirror_rx) =
let (audio_stream, audio_controller, audio_event_rx, audio_sample_rate, audio_channels, file_command_tx, recording_mirror_rx, audio_input) =
match daw_backend::AudioSystem::new(None, config.audio_buffer_size) {
Ok(mut audio_system) => {
println!("✅ Audio engine initialized successfully");
// Extract components
let mirror_rx = audio_system.take_recording_mirror_rx();
// take_input_opener pulls out input_tx + sample_rate/channels into
// a self-contained struct that can open the stream on demand.
let input_opener = audio_system.take_input_opener();
let stream = audio_system.stream;
let sample_rate = audio_system.sample_rate;
let channels = audio_system.channels;
@ -1022,7 +1032,7 @@ impl EditorApp {
// Spawn file operations worker
let file_command_tx = FileOperationsWorker::spawn(controller.clone());
(Some(stream), Some(controller), event_rx, sample_rate, channels, file_command_tx, mirror_rx)
(Some(stream), Some(controller), event_rx, sample_rate, channels, file_command_tx, mirror_rx, input_opener)
}
Err(e) => {
eprintln!("❌ Failed to initialize audio engine: {}", e);
@ -1030,7 +1040,7 @@ impl EditorApp {
// Create a dummy channel for file operations (won't be used)
let (tx, _rx) = std::sync::mpsc::channel();
(None, None, None, 48000, 2, tx, None)
(None, None, None, 48000, 2, tx, None, None)
}
};
@ -1078,7 +1088,10 @@ impl EditorApp {
audio_stream,
audio_controller,
audio_event_rx,
audio_events_pending: std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false)),
last_input_monitoring: false,
audio_input,
audio_input_stream: None,
audio_buffer_size: config.audio_buffer_size,
pending_graph_loads: std::sync::Arc::new(std::sync::atomic::AtomicU32::new(0)),
commit_raster_floating_if_any: false,
pending_node_group: false,
@ -4805,10 +4818,6 @@ impl eframe::App for EditorApp {
}
let _pre_events_ms = _frame_start.elapsed().as_secs_f64() * 1000.0;
// Check if audio events are pending and request repaint if needed
if self.audio_events_pending.load(std::sync::atomic::Ordering::Relaxed) {
ctx.request_repaint();
}
// Keep repainting while waiting for graph preset loads to complete
if self.pending_graph_loads.load(std::sync::atomic::Ordering::Relaxed) > 0 {
ctx.request_repaint();
@ -4834,9 +4843,7 @@ impl eframe::App for EditorApp {
// Poll audio events from the audio engine
if let Some(event_rx) = &mut self.audio_event_rx {
let mut polled_events = false;
while let Ok(event) = event_rx.pop() {
polled_events = true;
use daw_backend::AudioEvent;
match event {
AudioEvent::PlaybackPosition(time) => {
@ -5249,19 +5256,11 @@ impl eframe::App for EditorApp {
}
}
// If we polled events, set the flag to trigger another update
// (in case more events arrive before the next frame)
if polled_events {
self.audio_events_pending.store(true, std::sync::atomic::Ordering::Relaxed);
} else {
// No events this frame, clear the flag
self.audio_events_pending.store(false, std::sync::atomic::Ordering::Relaxed);
}
}
// Update input monitoring based on active layer
if let Some(controller) = &self.audio_controller {
let should_monitor = self.active_layer_id.map_or(false, |layer_id| {
// Update input monitoring based on active layer (only send command when changed)
{
let should_monitor = self.audio_controller.is_some() && self.active_layer_id.map_or(false, |layer_id| {
let doc = self.action_executor.document();
if let Some(layer) = doc.get_layer(&layer_id) {
matches!(layer, lightningbeam_core::layer::AnyLayer::Audio(a) if a.audio_layer_type == lightningbeam_core::layer::AudioLayerType::Sampled)
@ -5269,8 +5268,13 @@ impl eframe::App for EditorApp {
false
}
});
if let Ok(mut ctrl) = controller.try_lock() {
ctrl.set_input_monitoring(should_monitor);
if should_monitor != self.last_input_monitoring {
self.last_input_monitoring = should_monitor;
if let Some(controller) = &self.audio_controller {
if let Ok(mut ctrl) = controller.try_lock() {
ctrl.set_input_monitoring(should_monitor);
}
}
}
}
@ -5679,6 +5683,9 @@ impl eframe::App for EditorApp {
schneider_max_error: &mut self.schneider_max_error,
raster_settings: &mut self.raster_settings,
audio_controller: self.audio_controller.as_ref(),
audio_input_opener: &mut self.audio_input,
audio_input_stream: &mut self.audio_input_stream,
audio_buffer_size: self.audio_buffer_size,
video_manager: &self.video_manager,
playback_time: &mut self.playback_time,
is_playing: &mut self.is_playing,

View File

@ -48,10 +48,14 @@ pub fn gradient_stop_editor(
});
// ── Gradient bar + handles ────────────────────────────────────────────
let bar_height = 22.0_f32;
let handle_h = 14.0_f32;
let bar_height = 22.0_f32;
let peak_h = 7.0_f32; // triangular roof height
let body_h = 12.0_f32; // rectangular body height
let handle_h = peak_h + body_h;
let body_half_w = 6.0_f32;
let right_pad = 10.0_f32; // keep rightmost stop clear of infopanel scrollbar
let total_height = bar_height + handle_h + 4.0;
let avail_w = ui.available_width();
let avail_w = ui.available_width() - right_pad;
let (bar_rect, bar_resp) = ui.allocate_exact_size(
Vec2::new(avail_w, total_height),
@ -68,21 +72,34 @@ pub fn gradient_stop_editor(
// Draw checkerboard background (transparent indicator).
draw_checker(&painter, bar);
// Draw gradient bar as N segments.
let seg = 128_usize;
for i in 0..seg {
let t0 = i as f32 / seg as f32;
let t1 = (i + 1) as f32 / seg as f32;
let t = (t0 + t1) * 0.5;
let [r, g, b, a] = gradient.eval(t);
let col = Color32::from_rgba_unmultiplied(r, g, b, a);
let x0 = bar.min.x + t0 * bar.width();
let x1 = bar.min.x + t1 * bar.width();
let seg_rect = Rect::from_min_max(
egui::pos2(x0, bar.min.y),
egui::pos2(x1, bar.max.y),
);
painter.rect_filled(seg_rect, 0.0, col);
// Draw gradient bar as a mesh: one quad per stop-pair with vertex colours
// so the GPU interpolates linearly — no segmentation artefacts.
{
use egui::epaint::{Mesh, Vertex};
let mut mesh = Mesh::default();
let stops = &gradient.stops;
let color_at = |t: f32| -> Color32 {
let [r, g, b, a] = gradient.eval(t);
Color32::from_rgba_unmultiplied(r, g, b, a)
};
// One quad for each consecutive stop pair.
for pair in stops.windows(2) {
let t0 = pair[0].position;
let t1 = pair[1].position;
let c0 = color_at(t0);
let c1 = color_at(t1);
let x0 = bar.min.x + t0 * bar.width();
let x1 = bar.min.x + t1 * bar.width();
let base = mesh.vertices.len() as u32;
mesh.vertices.extend_from_slice(&[
Vertex { pos: egui::pos2(x0, bar.min.y), uv: egui::Pos2::ZERO, color: c0 },
Vertex { pos: egui::pos2(x1, bar.min.y), uv: egui::Pos2::ZERO, color: c1 },
Vertex { pos: egui::pos2(x1, bar.max.y), uv: egui::Pos2::ZERO, color: c1 },
Vertex { pos: egui::pos2(x0, bar.max.y), uv: egui::Pos2::ZERO, color: c0 },
]);
mesh.indices.extend_from_slice(&[base, base+1, base+2, base, base+2, base+3]);
}
painter.add(egui::Shape::mesh(mesh));
}
// Outline.
painter.rect_stroke(bar, 2.0, Stroke::new(1.0, Color32::from_gray(60)), eframe::egui::StrokeKind::Middle);
@ -98,104 +115,159 @@ pub fn gradient_stop_editor(
color: ShapeColor::rgba(r, g, b, a),
});
gradient.stops.sort_by(|a, b| a.position.partial_cmp(&b.position).unwrap());
*selected_stop = gradient.stops.iter().position(|s| s.position == t);
*selected_stop = gradient.stops.iter().position(|s| (s.position - t).abs() < 1e-5);
changed = true;
}
}
}
// Draw stop handles.
// We need to detect drags per-handle, so allocate individual rects with the
// regular egui input model. To avoid borrow conflicts we collect interactions
// before mutating.
let handle_w = 10.0_f32;
let n_stops = gradient.stops.len();
// ── Stop handles: interact + popup ───────────────────────────────────
let n_stops = gradient.stops.len();
let mut drag_idx: Option<usize> = None;
let mut drag_delta: f32 = 0.0;
let mut click_idx: Option<usize> = None;
// To render handles after collecting, remember their rects.
// Top-anchored hit rects (peak touches track.min.y).
let handle_rects: Vec<Rect> = (0..n_stops).map(|i| {
let cx = track.min.x + gradient.stops[i].position * track.width();
Rect::from_center_size(
egui::pos2(cx, track.center().y),
Vec2::new(handle_w, handle_h),
Rect::from_min_size(
egui::pos2(cx - body_half_w, track.min.y),
Vec2::new(body_half_w * 2.0, handle_h),
)
}).collect();
let mut drag_delta : f32 = 0.0;
let mut drag_active: bool = false;
let mut drag_ended : bool = false;
let mut delete_idx : Option<usize> = None;
for (i, &h_rect) in handle_rects.iter().enumerate() {
let resp = ui.interact(h_rect, ui.id().with(("grad_handle", i)), Sense::click_and_drag());
// Anchor the dragged stop at drag-start time, before any sort can change indices.
if resp.drag_started() {
*selected_stop = Some(i);
}
if resp.dragged() {
drag_idx = Some(i);
drag_delta = resp.drag_delta().x / track.width();
drag_active = true;
}
if resp.drag_stopped() {
drag_ended = true;
}
if resp.clicked() {
click_idx = Some(i);
*selected_stop = Some(i);
}
// Right-click on an interior stop (not the first or last) deletes it.
if resp.secondary_clicked() && i > 0 && i < n_stops - 1 {
delete_idx = Some(i);
}
// Color picker popup — opens on click, closes on click-outside.
egui::containers::Popup::from_toggle_button_response(&resp)
.show(|ui| {
ui.spacing_mut().slider_width = 200.0;
let stop = &mut gradient.stops[i];
let mut c32 = Color32::from_rgba_unmultiplied(
stop.color.r, stop.color.g, stop.color.b, stop.color.a,
);
if egui::color_picker::color_picker_color32(
ui, &mut c32, egui::color_picker::Alpha::OnlyBlend,
) {
// Color32 stores premultiplied RGB; unmultiply before storing
// as straight-alpha ShapeColor to avoid darkening on round-trip.
let [pr, pg, pb, a] = c32.to_array();
let unpm = |c: u8| -> u8 {
if a == 0 { 0 } else { ((c as u32 * 255 + a as u32 / 2) / a as u32).min(255) as u8 }
};
stop.color = ShapeColor::rgba(unpm(pr), unpm(pg), unpm(pb), a);
changed = true;
}
});
}
// Apply drag.
if let (Some(i), delta) = (drag_idx, drag_delta) {
if delta != 0.0 {
let new_pos = (gradient.stops[i].position + delta).clamp(0.0, 1.0);
gradient.stops[i].position = new_pos;
// Re-sort and track the moved stop.
gradient.stops.sort_by(|a, b| a.position.partial_cmp(&b.position).unwrap());
// Find new index of the moved stop (closest position match).
if let Some(ref mut sel) = *selected_stop {
// Re-find by position proximity.
*sel = gradient.stops.iter().enumerate()
.min_by(|(_, a), (_, b)| {
let pa = (a.position - (gradient.stops.get(i).map_or(0.0, |s| s.position))).abs();
let pb = (b.position - (gradient.stops.get(i).map_or(0.0, |s| s.position))).abs();
pa.partial_cmp(&pb).unwrap()
})
.map(|(idx, _)| idx)
.unwrap_or(0);
// Apply drag to whichever stop selected_stop points at.
// Using selected_stop (anchored at drag_started) instead of the widget index
// means sorting never causes a different stop to be dragged when the dragged
// stop passes over a neighbour.
if drag_active {
if let Some(cur) = *selected_stop {
if drag_delta != 0.0 {
let new_pos = (gradient.stops[cur].position + drag_delta).clamp(0.0, 1.0);
gradient.stops[cur].position = new_pos;
gradient.stops.sort_by(|a, b| a.position.partial_cmp(&b.position).unwrap());
// Re-find the moved stop by its new position so selected_stop stays correct.
*selected_stop = gradient.stops.iter()
.position(|s| (s.position - new_pos).abs() < 1e-5);
changed = true;
}
changed = true;
}
}
if let Some(i) = click_idx {
*selected_stop = Some(i);
// Merge-on-drop: if the dragged stop was released within one handle-width of
// another stop, delete that other stop (provided ≥ 3 stops remain).
if drag_ended {
if let Some(cur) = *selected_stop {
if gradient.stops.len() > 2 {
let my_pos = gradient.stops[cur].position;
let merge_thresh = body_half_w / track.width();
if let Some(victim) = gradient.stops.iter().enumerate()
.find(|&(j, s)| j != cur && (s.position - my_pos).abs() < merge_thresh)
.map(|(j, _)| j)
{
gradient.stops.remove(victim);
if victim < cur {
*selected_stop = Some(cur - 1);
}
changed = true;
}
}
}
}
// Paint handles on top (after interaction so they visually react).
for (i, h_rect) in handle_rects.iter().enumerate() {
// Apply right-click delete (after loop to avoid borrow conflicts).
if let Some(i) = delete_idx {
gradient.stops.remove(i);
if *selected_stop == Some(i) {
*selected_stop = None;
} else if let Some(sel) = *selected_stop {
if sel > i {
*selected_stop = Some(sel - 1);
}
}
changed = true;
}
// ── Paint handles ─────────────────────────────────────────────────────
// handle_rects was built before any deletions this frame; guard against OOB.
for (i, h_rect) in handle_rects.iter().enumerate().take(gradient.stops.len()) {
let col = ShapeColor_to_Color32(gradient.stops[i].color);
let is_selected = *selected_stop == Some(i);
// Draw a downward-pointing triangle.
let cx = h_rect.center().x;
let top = h_rect.min.y;
let bot = h_rect.max.y;
let hw = h_rect.width() * 0.5;
let tri = vec![
egui::pos2(cx, bot),
egui::pos2(cx - hw, top),
egui::pos2(cx + hw, top),
];
let stroke = Stroke::new(
if is_selected { 2.0 } else { 1.0 },
if is_selected { Color32::WHITE } else { Color32::from_gray(80) },
);
let cx = h_rect.center().x;
let apex = egui::pos2(cx, track.min.y);
let shoulder_y = track.min.y + peak_h;
let bottom_y = track.min.y + handle_h;
// Convex pentagon: apex → upper-right → lower-right → lower-left → upper-left
painter.add(egui::Shape::convex_polygon(
tri,
vec![
apex,
egui::pos2(cx + body_half_w, shoulder_y),
egui::pos2(cx + body_half_w, bottom_y),
egui::pos2(cx - body_half_w, bottom_y),
egui::pos2(cx - body_half_w, shoulder_y),
],
col,
Stroke::new(if is_selected { 2.0 } else { 1.0 },
if is_selected { Color32::WHITE } else { Color32::from_gray(100) }),
stroke,
));
}
// ── Selected stop detail ──────────────────────────────────────────────
// ── Selected stop detail (position + remove) ──────────────────────────
if let Some(i) = *selected_stop {
if i < gradient.stops.len() {
ui.separator();
ui.horizontal(|ui| {
let stop = &mut gradient.stops[i];
let mut rgba = [stop.color.r, stop.color.g, stop.color.b, stop.color.a];
if ui.color_edit_button_srgba_unmultiplied(&mut rgba).changed() {
stop.color = ShapeColor::rgba(rgba[0], rgba[1], rgba[2], rgba[3]);
changed = true;
}
ui.label("Position:");
if ui.add(
DragValue::new(&mut stop.position)

View File

@ -540,15 +540,17 @@ impl InfopanelPane {
});
}
Tool::Gradient if active_is_raster => {
ui.horizontal(|ui| {
ui.label("Opacity:");
ui.add(egui::Slider::new(
&mut shared.raster_settings.gradient_opacity,
0.0_f32..=1.0,
).custom_formatter(|v, _| format!("{:.0}%", v * 100.0)));
});
ui.add_space(4.0);
Tool::Gradient => {
if active_is_raster {
ui.horizontal(|ui| {
ui.label("Opacity:");
ui.add(egui::Slider::new(
&mut shared.raster_settings.gradient_opacity,
0.0_f32..=1.0,
).custom_formatter(|v, _| format!("{:.0}%", v * 100.0)));
});
ui.add_space(4.0);
}
gradient_stop_editor(
ui,
&mut shared.raster_settings.gradient,

View File

@ -192,6 +192,12 @@ pub struct SharedPaneState<'a> {
pub raster_settings: &'a mut crate::tools::RasterToolSettings,
/// Audio engine controller for playback control (wrapped in Arc<Mutex<>> for thread safety)
pub audio_controller: Option<&'a std::sync::Arc<std::sync::Mutex<daw_backend::EngineController>>>,
/// Opener for the microphone/line-in stream — consumed on first use.
pub audio_input_opener: &'a mut Option<daw_backend::InputStreamOpener>,
/// Live input stream handle; kept alive while recording is active.
pub audio_input_stream: &'a mut Option<cpal::Stream>,
/// Buffer size (frames) used for the output stream, passed to the input stream opener.
pub audio_buffer_size: u32,
/// Video manager for video decoding and frame caching
pub video_manager: &'a std::sync::Arc<std::sync::Mutex<lightningbeam_core::video::VideoManager>>,
/// Mapping from Document layer UUIDs to daw-backend TrackIds

View File

@ -1773,7 +1773,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
// Stipple faces with visible fill
for (i, face) in sel_dcel.faces.iter().enumerate() {
if face.deleted || i == 0 { continue; }
if face.fill_color.is_none() && face.image_fill.is_none() { continue; }
if face.fill_color.is_none() && face.image_fill.is_none() && face.gradient_fill.is_none() { continue; }
let face_id = DcelFaceId(i as u32);
let path = sel_dcel.face_to_bezpath_with_holes(face_id);
scene.fill(
@ -2748,8 +2748,10 @@ pub struct StagePane {
warp_state: Option<WarpState>,
/// Live state for the Liquify tool.
liquify_state: Option<LiquifyState>,
/// Live state for the Gradient fill tool.
/// Live state for the Gradient fill tool (raster layers).
gradient_state: Option<GradientState>,
/// Live state for the Gradient fill tool (vector layers).
vector_gradient_state: Option<VectorGradientState>,
/// GPU gradient fill dispatch to run next prepare() frame.
pending_gradient_op: Option<PendingGradientOp>,
/// GPU ops for Warp/Liquify to dispatch in prepare().
@ -2873,6 +2875,15 @@ struct GradientState {
float_offset: Option<(f32, f32)>,
}
/// Live state for an ongoing vector-layer Gradient fill drag.
struct VectorGradientState {
layer_id: uuid::Uuid,
time: f64,
face_ids: Vec<lightningbeam_core::dcel2::FaceId>,
start: egui::Vec2, // World-space drag start
end: egui::Vec2, // World-space drag end
}
/// GPU ops queued by the Warp/Liquify handlers for `prepare()`.
enum PendingWarpOp {
/// Upload control-point grid displacements and run warp-apply shader.
@ -3162,6 +3173,7 @@ impl StagePane {
warp_state: None,
liquify_state: None,
gradient_state: None,
vector_gradient_state: None,
pending_gradient_op: None,
pending_warp_ops: Vec::new(),
active_raster_tool: None,
@ -8747,6 +8759,11 @@ impl StagePane {
None => return,
};
// Delegate to the vector handler when the active layer is a vector layer.
if let Some(AnyLayer::Vector(_)) = shared.action_executor.document().get_layer(&active_layer_id) {
return self.handle_vector_gradient_tool(ui, response, world_pos, shared, response.rect);
}
let drag_started = response.drag_started();
let dragged = response.dragged();
let drag_stopped = response.drag_stopped();
@ -9075,7 +9092,100 @@ impl StagePane {
out
}
/// Compute gradient pixels and queue upload to the preview GPU canvas for next prepare().
/// Handle the Gradient tool when the active layer is a vector layer.
///
/// Drag start→end across a face to set its gradient angle. On release the
/// current gradient settings (stops, kind, extend) are applied via
/// `SetFillPaintAction`, which records an undo entry.
fn handle_vector_gradient_tool(
&mut self,
ui: &mut egui::Ui,
response: &egui::Response,
world_pos: egui::Vec2,
shared: &mut SharedPaneState,
rect: egui::Rect,
) {
use lightningbeam_core::layer::AnyLayer;
use lightningbeam_core::dcel2::FaceId;
let Some(layer_id) = *shared.active_layer_id else { return };
// ── Drag started: pick the face under the click origin ───────────────
if response.drag_started() {
let click_world = ui
.input(|i| i.pointer.press_origin())
.map(|p| {
let rel = p - rect.min - self.pan_offset;
egui::Vec2::new(rel.x / self.zoom, rel.y / self.zoom)
})
.unwrap_or(world_pos);
let doc = shared.action_executor.document();
let Some(AnyLayer::Vector(vl)) = doc.get_layer(&layer_id) else { return };
let Some(kf) = vl.keyframe_at(*shared.playback_time) else { return };
let point = vello::kurbo::Point::new(click_world.x as f64, click_world.y as f64);
let face_id = kf.dcel.find_face_containing_point(point);
// Face 0 is the unbounded background face — nothing to fill.
if face_id == FaceId(0) || kf.dcel.face(face_id).deleted { return; }
// If the clicked face is already selected, apply to all selected faces;
// otherwise apply only to the clicked face.
let face_ids: Vec<FaceId> = if shared.selection.selected_faces().contains(&face_id) {
shared.selection.selected_faces().iter().cloned().collect()
} else {
vec![face_id]
};
self.vector_gradient_state = Some(VectorGradientState {
layer_id,
time: *shared.playback_time,
face_ids,
start: click_world,
end: click_world,
});
}
// ── Dragged: update end point ─────────────────────────────────────────
if let Some(ref mut gs) = self.vector_gradient_state {
if response.dragged() {
gs.end = world_pos;
}
}
// ── Drag stopped: commit gradient ─────────────────────────────────────
if response.drag_stopped() {
if let Some(gs) = self.vector_gradient_state.take() {
let dx = gs.end.x - gs.start.x;
let dy = gs.end.y - gs.start.y;
// Tiny / no drag → keep the angle stored in the current gradient settings.
let angle = if dx.abs() < 0.5 && dy.abs() < 0.5 {
shared.raster_settings.gradient.angle
} else {
dy.atan2(dx).to_degrees()
};
let gradient = lightningbeam_core::gradient::ShapeGradient {
kind: shared.raster_settings.gradient.kind,
stops: shared.raster_settings.gradient.stops.clone(),
angle,
extend: shared.raster_settings.gradient.extend,
start_world: Some((gs.start.x as f64, gs.start.y as f64)),
end_world: Some((gs.end.x as f64, gs.end.y as f64)),
};
use lightningbeam_core::actions::SetFillPaintAction;
let action = SetFillPaintAction::gradient(
gs.layer_id, gs.time, gs.face_ids, Some(gradient),
);
if let Err(e) = shared.action_executor.execute(Box::new(action)) {
eprintln!("Vector gradient fill: {e}");
}
}
}
}
fn handle_transform_tool(
&mut self,
ui: &mut egui::Ui,
@ -11705,6 +11815,25 @@ impl PaneRenderer for StagePane {
ui.painter().add(cb);
// Gradient direction arrow overlay for vector gradient drags.
if matches!(*shared.selected_tool, lightningbeam_core::tool::Tool::Gradient) {
if let Some(ref gs) = self.vector_gradient_state {
let mut painter = ui.ctx().layer_painter(egui::LayerId::new(
egui::Order::Foreground,
egui::Id::new("vgrad_arrow"),
));
painter.set_clip_rect(rect);
let w2s = |w: egui::Vec2| -> egui::Pos2 {
rect.min + self.pan_offset + w * self.zoom
};
let p0 = w2s(gs.start);
let p1 = w2s(gs.end);
painter.line_segment([p0, p1], egui::Stroke::new(2.0, egui::Color32::WHITE));
painter.circle_stroke(p0, 5.0, egui::Stroke::new(1.5, egui::Color32::WHITE));
painter.circle_filled(p1, 4.0, egui::Color32::WHITE);
}
}
// Show camera info overlay
let info_color = shared.theme.text_color(&["#stage", ".text-secondary"], ui.ctx(), egui::Color32::from_gray(200));
ui.painter().text(

View File

@ -137,6 +137,7 @@ enum ClipDragType {
enum TimeDisplayFormat {
Seconds,
Measures,
Frames,
}
/// State for an in-progress layer header drag-to-reorder operation.
@ -657,6 +658,15 @@ impl TimelinePane {
}
RecordCandidate::AudioSampled => {
if let Some(&track_id) = shared.layer_to_track_map.get(&layer_id) {
// Open the input stream now if it hasn't been opened yet.
if shared.audio_input_stream.is_none() {
if let Some(opener) = shared.audio_input_opener.take() {
match opener.open(shared.audio_buffer_size) {
Ok(stream) => *shared.audio_input_stream = Some(stream),
Err(e) => eprintln!("⚠️ Could not open input stream: {e}"),
}
}
}
if let Some(controller_arc) = shared.audio_controller {
let mut controller = controller_arc.lock().unwrap();
controller.start_recording(track_id, start_time);
@ -1016,9 +1026,21 @@ impl TimelinePane {
.unwrap_or(1.0)
}
/// Calculate appropriate interval for frames ruler based on zoom level
fn calculate_ruler_interval_frames(&self, framerate: f64) -> i64 {
let target_px = 75.0;
let px_per_frame = self.pixels_per_second / framerate as f32;
let target_frames = (target_px / px_per_frame).round() as i64;
let intervals = [1i64, 2, 5, 10, 20, 50, 100, 200, 500, 1000];
intervals.iter()
.min_by_key(|&&i| (i - target_frames).abs())
.copied()
.unwrap_or(1)
}
/// Render the time ruler at the top
fn render_ruler(&self, ui: &mut egui::Ui, rect: egui::Rect, theme: &crate::theme::Theme,
bpm: f64, time_sig: &lightningbeam_core::document::TimeSignature) {
bpm: f64, time_sig: &lightningbeam_core::document::TimeSignature, framerate: f64) {
let painter = ui.painter();
// Background
@ -1116,6 +1138,44 @@ impl TimelinePane {
}
}
}
TimeDisplayFormat::Frames => {
let interval = self.calculate_ruler_interval_frames(framerate);
let start_frame = (self.viewport_start_time.max(0.0) * framerate).floor() as i64;
let end_frame = (self.x_to_time(rect.width()) * framerate).ceil() as i64;
// Align so labels fall on display multiples of interval (5, 10, 15...)
let start_frame = ((start_frame + interval) / interval) * interval - 1;
let mut frame = start_frame;
while frame <= end_frame {
let x = self.time_to_x(frame as f64 / framerate);
if x >= 0.0 && x <= rect.width() {
painter.line_segment(
[rect.min + egui::vec2(x, rect.height() - 10.0),
rect.min + egui::vec2(x, rect.height())],
egui::Stroke::new(1.0, theme.text_color(&["#timeline", ".ruler-tick"], ui.ctx(), egui::Color32::from_gray(100))),
);
painter.text(
rect.min + egui::vec2(x + 2.0, 5.0), egui::Align2::LEFT_TOP,
format!("{}", frame + 1),
egui::FontId::proportional(12.0), text_color,
);
}
let sub = interval / 5;
if sub >= 1 {
for i in 1..5i64 {
let minor_x = self.time_to_x((frame + sub * i) as f64 / framerate);
if minor_x >= 0.0 && minor_x <= rect.width() {
painter.line_segment(
[rect.min + egui::vec2(minor_x, rect.height() - 5.0),
rect.min + egui::vec2(minor_x, rect.height())],
egui::Stroke::new(1.0, theme.text_color(&["#timeline", ".ruler-tick-minor"], ui.ctx(), egui::Color32::from_gray(60))),
);
}
}
}
frame += interval;
}
}
}
}
@ -2038,6 +2098,54 @@ impl TimelinePane {
);
}
}
TimeDisplayFormat::Frames => {
let framerate = document.framerate;
let px_per_frame = self.pixels_per_second / framerate as f32;
// Per-frame column shading when frames are wide enough to see
if px_per_frame >= 3.0 {
let shade_color = egui::Color32::from_rgba_unmultiplied(255, 255, 255, 8);
let start_frame = (self.viewport_start_time.max(0.0) * framerate).floor() as i64;
let end_frame = (self.x_to_time(rect.width()) * framerate).ceil() as i64;
for frame in start_frame..=end_frame {
if (frame + 1) % 5 == 0 {
let x0 = self.time_to_x(frame as f64 / framerate);
let x1 = self.time_to_x((frame + 1) as f64 / framerate);
if x1 >= 0.0 && x0 <= rect.width() {
let x0 = x0.max(0.0);
let x1 = x1.min(rect.width());
painter.rect_filled(
egui::Rect::from_min_max(
egui::pos2(rect.min.x + x0, y),
egui::pos2(rect.min.x + x1, y + LAYER_HEIGHT),
),
0.0,
shade_color,
);
}
}
}
}
// Grid lines at ruler interval
let interval = self.calculate_ruler_interval_frames(framerate);
let start_frame = (self.viewport_start_time.max(0.0) * framerate).floor() as i64;
let end_frame = (self.x_to_time(rect.width()) * framerate).ceil() as i64;
// Align so grid lines fall on display multiples of interval (5, 10, 15...)
let start_frame = ((start_frame + interval) / interval) * interval - 1;
let mut frame = start_frame;
while frame <= end_frame {
let x = self.time_to_x(frame as f64 / framerate);
if x >= 0.0 && x <= rect.width() {
painter.line_segment(
[egui::pos2(rect.min.x + x, y),
egui::pos2(rect.min.x + x, y + LAYER_HEIGHT)],
egui::Stroke::new(1.0, theme.border_color(&["#timeline", ".grid-line"], ui.ctx(), egui::Color32::from_gray(30))),
);
}
frame += interval;
}
}
}
// For collapsed groups, render merged clip spans and skip normal clip rendering
@ -4156,9 +4264,9 @@ impl PaneRenderer for TimelinePane {
// Time display (format-dependent)
{
let (bpm, time_sig_num, time_sig_den) = {
let (bpm, time_sig_num, time_sig_den, framerate) = {
let doc = shared.action_executor.document();
(doc.bpm, doc.time_signature.numerator, doc.time_signature.denominator)
(doc.bpm, doc.time_signature.numerator, doc.time_signature.denominator, doc.framerate)
};
match self.time_display_format {
@ -4176,6 +4284,13 @@ impl PaneRenderer for TimelinePane {
time_sig_num, time_sig_den,
));
}
TimeDisplayFormat::Frames => {
let current_frame = (*shared.playback_time * framerate).floor() as i64 + 1;
let total_frames = (self.duration * framerate).ceil() as i64;
ui.colored_label(text_color, format!(
"Frame: {} / {} | {:.0} FPS", current_frame, total_frames, framerate
));
}
}
ui.separator();
@ -4190,11 +4305,13 @@ impl PaneRenderer for TimelinePane {
.selected_text(match self.time_display_format {
TimeDisplayFormat::Seconds => "Seconds",
TimeDisplayFormat::Measures => "Measures",
TimeDisplayFormat::Frames => "Frames",
})
.width(80.0)
.show_ui(ui, |ui| {
ui.selectable_value(&mut self.time_display_format, TimeDisplayFormat::Seconds, "Seconds");
ui.selectable_value(&mut self.time_display_format, TimeDisplayFormat::Measures, "Measures");
ui.selectable_value(&mut self.time_display_format, TimeDisplayFormat::Frames, "Frames");
});
ui.separator();
@ -4378,7 +4495,7 @@ impl PaneRenderer for TimelinePane {
// Render time ruler (clip to ruler rect)
ui.set_clip_rect(ruler_rect.intersect(original_clip_rect));
self.render_ruler(ui, ruler_rect, shared.theme, document.bpm, &document.time_signature);
self.render_ruler(ui, ruler_rect, shared.theme, document.bpm, &document.time_signature, document.framerate);
// Render layer rows with clipping
ui.set_clip_rect(content_rect.intersect(original_clip_rect));