Compare commits

...

2 Commits

Author SHA1 Message Date
Skyler Lehmkuhl 65fa8a3918 Add preset pane 2026-02-16 06:06:03 -05:00
Skyler Lehmkuhl 0ff651f4a5 Use forked egui to fix wayland/ibus bug 2026-02-16 04:05:59 -05:00
15 changed files with 991 additions and 217 deletions

View File

@ -1970,6 +1970,18 @@ impl Engine {
))),
}
}
Query::GetVoiceOscilloscopeData(track_id, va_node_id, inner_node_id, sample_count) => {
match self.project.get_voice_oscilloscope_data(track_id, va_node_id, inner_node_id, sample_count) {
Some((audio, cv)) => {
use crate::command::OscilloscopeData;
QueryResponse::OscilloscopeData(Ok(OscilloscopeData { audio, cv }))
}
None => QueryResponse::OscilloscopeData(Err(format!(
"Failed to get voice oscilloscope data from track {} VA {} node {}",
track_id, va_node_id, inner_node_id
))),
}
}
Query::GetMidiClip(_track_id, clip_id) => {
// Get MIDI clip data from the pool
if let Some(clip) = self.project.midi_clip_pool.get_clip(clip_id) {
@ -3215,6 +3227,25 @@ impl EngineController {
Err("Query timeout".to_string())
}
/// Query oscilloscope data from a node inside a VoiceAllocator's best voice
pub fn query_voice_oscilloscope_data(&mut self, track_id: TrackId, va_node_id: u32, inner_node_id: u32, sample_count: usize) -> Result<crate::command::OscilloscopeData, String> {
if let Err(_) = self.query_tx.push(Query::GetVoiceOscilloscopeData(track_id, va_node_id, inner_node_id, sample_count)) {
return Err("Failed to send query - queue full".to_string());
}
let start = std::time::Instant::now();
let timeout = std::time::Duration::from_millis(100);
while start.elapsed() < timeout {
if let Ok(QueryResponse::OscilloscopeData(result)) = self.query_response_rx.pop() {
return result;
}
std::thread::sleep(std::time::Duration::from_micros(50));
}
Err("Query timeout".to_string())
}
/// Query automation keyframes from an AutomationInput node
pub fn query_automation_keyframes(&mut self, track_id: TrackId, node_id: u32) -> Result<Vec<crate::command::types::AutomationKeyframeData>, String> {
// Send query

View File

@ -29,6 +29,8 @@ pub struct FilterNode {
resonance: f32,
filter_type: FilterType,
sample_rate: u32,
/// Last cutoff frequency applied to filter coefficients (for change detection with CV modulation)
last_applied_cutoff: f32,
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
@ -62,6 +64,7 @@ impl FilterNode {
resonance: 0.707,
filter_type: FilterType::Lowpass,
sample_rate: 44100,
last_applied_cutoff: 1000.0,
inputs,
outputs,
parameters,
@ -150,11 +153,20 @@ impl AudioNode for FilterNode {
output[..len].copy_from_slice(&input[..len]);
// Check for CV modulation (modulates cutoff)
// CV input (0..1) scales the cutoff: 0 = 20 Hz, 1 = base cutoff * 2
// Sample CV at the start of the buffer - per-sample would be too expensive
let cutoff_cv = cv_input_or_default(inputs, 1, 0, self.cutoff);
if (cutoff_cv - self.cutoff).abs() > 0.01 {
// CV changed significantly, update filter
let new_cutoff = cutoff_cv.clamp(20.0, 20000.0);
let cutoff_cv_raw = cv_input_or_default(inputs, 1, 0, f32::NAN);
let effective_cutoff = if cutoff_cv_raw.is_nan() {
self.cutoff
} else {
// Map CV (0..1) to frequency range around the base cutoff
// 0.5 = base cutoff, 0 = cutoff / 4, 1 = cutoff * 4 (two octaves each way)
let octave_shift = (cutoff_cv_raw.clamp(0.0, 1.0) - 0.5) * 4.0;
self.cutoff * 2.0_f32.powf(octave_shift)
};
if (effective_cutoff - self.last_applied_cutoff).abs() > 0.01 {
let new_cutoff = effective_cutoff.clamp(20.0, 20000.0);
self.last_applied_cutoff = new_cutoff;
match self.filter_type {
FilterType::Lowpass => {
self.filter.set_lowpass(new_cutoff, self.resonance, self.sample_rate as f32);
@ -202,6 +214,7 @@ impl AudioNode for FilterNode {
resonance: self.resonance,
filter_type: self.filter_type,
sample_rate: self.sample_rate,
last_applied_cutoff: self.cutoff,
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),

View File

@ -176,6 +176,35 @@ impl VoiceAllocatorNode {
.unwrap_or(0)
}
/// Get oscilloscope data from the most relevant voice's subgraph.
/// Priority: first active voice → first releasing voice → first voice.
pub fn get_voice_oscilloscope_data(&self, node_id: u32, sample_count: usize) -> Option<(Vec<f32>, Vec<f32>)> {
let voice_idx = self.best_voice_index();
let graph = &self.voice_instances[voice_idx];
let node_idx = petgraph::stable_graph::NodeIndex::new(node_id as usize);
let audio = graph.get_oscilloscope_data(node_idx, sample_count)?;
let cv = graph.get_oscilloscope_cv_data(node_idx, sample_count).unwrap_or_default();
Some((audio, cv))
}
/// Find the best voice index to observe: first active → first releasing → 0
fn best_voice_index(&self) -> usize {
// First active (non-releasing) voice
for (i, v) in self.voices[..self.voice_count].iter().enumerate() {
if v.active && !v.releasing {
return i;
}
}
// First releasing voice
for (i, v) in self.voices[..self.voice_count].iter().enumerate() {
if v.active && v.releasing {
return i;
}
}
// Fallback to first voice
0
}
/// Find all voices playing a specific note (held, not yet releasing)
fn find_voices_for_note_off(&self, note: u8) -> Vec<usize> {
self.voices[..self.voice_count]

View File

@ -228,6 +228,18 @@ impl Project {
None
}
/// Get oscilloscope data from a node inside a VoiceAllocator's best voice
pub fn get_voice_oscilloscope_data(&self, track_id: TrackId, va_node_id: u32, inner_node_id: u32, sample_count: usize) -> Option<(Vec<f32>, Vec<f32>)> {
if let Some(TrackNode::Midi(track)) = self.tracks.get(&track_id) {
let graph = &track.instrument_graph;
let va_idx = petgraph::stable_graph::NodeIndex::new(va_node_id as usize);
let node = graph.get_node(va_idx)?;
let va = node.as_any().downcast_ref::<crate::audio::node_graph::nodes::VoiceAllocatorNode>()?;
return va.get_voice_oscilloscope_data(inner_node_id, sample_count);
}
None
}
/// Get all root-level track IDs
pub fn root_tracks(&self) -> &[TrackId] {
&self.root_tracks

View File

@ -319,6 +319,9 @@ pub enum Query {
GetTemplateState(TrackId, u32),
/// Get oscilloscope data from a node (track_id, node_id, sample_count)
GetOscilloscopeData(TrackId, u32, usize),
/// Get oscilloscope data from a node inside a VoiceAllocator's best voice
/// (track_id, va_node_id, inner_node_id, sample_count)
GetVoiceOscilloscopeData(TrackId, u32, u32, usize),
/// Get MIDI clip data (track_id, clip_id)
GetMidiClip(TrackId, MidiClipId),
/// Get keyframes from an AutomationInput node (track_id, node_id)

View File

@ -1798,8 +1798,6 @@ dependencies = [
[[package]]
name = "ecolor"
version = "0.33.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71ddb8ac7643d1dba1bb02110e804406dd459a838efcb14011ced10556711a8e"
dependencies = [
"bytemuck",
"emath",
@ -1809,8 +1807,6 @@ dependencies = [
[[package]]
name = "eframe"
version = "0.33.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "457481173e6db5ca9fa2be93a58df8f4c7be639587aeb4853b526c6cf87db4e6"
dependencies = [
"ahash 0.8.12",
"bytemuck",
@ -1846,8 +1842,6 @@ dependencies = [
[[package]]
name = "egui"
version = "0.33.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a9b567d356674e9a5121ed3fedfb0a7c31e059fe71f6972b691bcd0bfc284e3"
dependencies = [
"accesskit",
"ahash 0.8.12",
@ -1866,8 +1860,6 @@ dependencies = [
[[package]]
name = "egui-wgpu"
version = "0.33.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e4d209971c84b2352a06174abdba701af1e552ce56b144d96f2bd50a3c91236"
dependencies = [
"ahash 0.8.12",
"bytemuck",
@ -1886,8 +1878,6 @@ dependencies = [
[[package]]
name = "egui-winit"
version = "0.33.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec6687e5bb551702f4ad10ac428bab12acf9d53047ebb1082d4a0ed8c6251a29"
dependencies = [
"accesskit_winit",
"arboard",
@ -1917,8 +1907,6 @@ dependencies = [
[[package]]
name = "egui_extras"
version = "0.33.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d01d34e845f01c62e3fded726961092e70417d66570c499b9817ab24674ca4ed"
dependencies = [
"ahash 0.8.12",
"egui",
@ -1934,8 +1922,6 @@ dependencies = [
[[package]]
name = "egui_glow"
version = "0.33.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6420863ea1d90e750f75075231a260030ad8a9f30a7cef82cdc966492dc4c4eb"
dependencies = [
"bytemuck",
"egui",
@ -1968,8 +1954,6 @@ checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
[[package]]
name = "emath"
version = "0.33.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "491bdf728bf25ddd9ad60d4cf1c48588fa82c013a2440b91aa7fc43e34a07c32"
dependencies = [
"bytemuck",
"serde",
@ -2045,8 +2029,6 @@ dependencies = [
[[package]]
name = "epaint"
version = "0.33.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "009d0dd3c2163823a0abdb899451ecbc78798dec545ee91b43aff1fa790bab62"
dependencies = [
"ab_glyph",
"ahash 0.8.12",
@ -2064,8 +2046,6 @@ dependencies = [
[[package]]
name = "epaint_default_fonts"
version = "0.33.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c4fbe202b6578d3d56428fa185cdf114a05e49da05f477b3c7f0fbb221f1862"
[[package]]
name = "equator"

View File

@ -69,3 +69,14 @@ opt-level = 2
opt-level = 2
[profile.dev.package.cpal]
opt-level = 2
# Use local egui fork with ibus/Wayland text input fix
[patch.crates-io]
egui = { path = "../../egui-fork/crates/egui" }
eframe = { path = "../../egui-fork/crates/eframe" }
egui_extras = { path = "../../egui-fork/crates/egui_extras" }
egui-wgpu = { path = "../../egui-fork/crates/egui-wgpu" }
egui-winit = { path = "../../egui-fork/crates/egui-winit" }
epaint = { path = "../../egui-fork/crates/epaint" }
ecolor = { path = "../../egui-fork/crates/ecolor" }
emath = { path = "../../egui-fork/crates/emath" }

View File

@ -49,7 +49,7 @@ impl PaneType {
PaneType::PianoRoll => "Piano Roll",
PaneType::VirtualPiano => "Virtual Piano",
PaneType::NodeEditor => "Node Editor",
PaneType::PresetBrowser => "Preset Browser",
PaneType::PresetBrowser => "Instrument Browser",
PaneType::AssetLibrary => "Asset Library",
PaneType::ShaderEditor => "Shader Editor",
}

View File

@ -4428,7 +4428,7 @@ impl eframe::App for EditorApp {
pending_menu_actions: &mut pending_menu_actions,
clipboard_manager: &mut self.clipboard_manager,
waveform_stereo: self.config.waveform_stereo,
project_generation: self.project_generation,
project_generation: &mut self.project_generation,
};
render_layout_node(
@ -4704,7 +4704,7 @@ struct RenderContext<'a> {
/// Whether to show waveforms as stacked stereo
waveform_stereo: bool,
/// Project generation counter (incremented on load)
project_generation: u64,
project_generation: &'a mut u64,
}
/// Recursively render a layout node with drag support

View File

@ -218,7 +218,7 @@ pub struct SharedPaneState<'a> {
/// Whether to show waveforms as stacked stereo (true) or combined mono (false)
pub waveform_stereo: bool,
/// Generation counter - incremented on project load to force reloads
pub project_generation: u64,
pub project_generation: &'a mut u64,
}
/// Trait for pane rendering

View File

@ -5,6 +5,7 @@
use eframe::egui;
use egui_node_graph2::*;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
/// Signal types for audio node graph
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
@ -136,10 +137,18 @@ pub struct NodeData {
pub template: NodeTemplate,
}
/// Cached oscilloscope waveform data for rendering in node body
pub struct OscilloscopeCache {
pub audio: Vec<f32>,
pub cv: Vec<f32>,
}
/// Custom graph state - can track selected nodes, etc.
#[derive(Default)]
pub struct GraphState {
pub active_node: Option<NodeId>,
/// Oscilloscope data cached per node, populated before draw_graph_editor()
pub oscilloscope_data: HashMap<NodeId, OscilloscopeCache>,
}
/// User response type (empty for now)
@ -782,15 +791,52 @@ impl NodeDataTrait for NodeData {
fn bottom_ui(
&self,
ui: &mut egui::Ui,
_node_id: NodeId,
node_id: NodeId,
_graph: &Graph<NodeData, DataType, ValueType>,
_user_state: &mut Self::UserState,
user_state: &mut Self::UserState,
) -> Vec<NodeResponse<Self::Response, NodeData>>
where
Self::Response: UserResponseTrait,
{
// No custom UI for now
if self.template == NodeTemplate::Oscilloscope {
let size = egui::vec2(200.0, 80.0);
let (rect, _) = ui.allocate_exact_size(size, egui::Sense::hover());
let painter = ui.painter_at(rect);
// Background
painter.rect_filled(rect, 2.0, egui::Color32::from_rgb(0x1a, 0x1a, 0x1a));
// Center line
let center_y = rect.center().y;
painter.line_segment(
[egui::pos2(rect.left(), center_y), egui::pos2(rect.right(), center_y)],
egui::Stroke::new(1.0, egui::Color32::from_rgb(0x2a, 0x2a, 0x2a)),
);
if let Some(cache) = user_state.oscilloscope_data.get(&node_id) {
// Draw audio waveform (green)
if cache.audio.len() >= 2 {
let points: Vec<egui::Pos2> = cache.audio.iter().enumerate().map(|(i, &sample)| {
let x = rect.left() + (i as f32 / (cache.audio.len() - 1) as f32) * rect.width();
let y = center_y - sample.clamp(-1.0, 1.0) * (rect.height() / 2.0);
egui::pos2(x, y)
}).collect();
painter.add(egui::Shape::line(points, egui::Stroke::new(1.5, egui::Color32::from_rgb(0x4C, 0xAF, 0x50))));
}
// Draw CV waveform (orange) if present
if cache.cv.len() >= 2 {
let points: Vec<egui::Pos2> = cache.cv.iter().enumerate().map(|(i, &sample)| {
let x = rect.left() + (i as f32 / (cache.cv.len() - 1) as f32) * rect.width();
let y = center_y - sample.clamp(-1.0, 1.0) * (rect.height() / 2.0);
egui::pos2(x, y)
}).collect();
painter.add(egui::Shape::line(points, egui::Stroke::new(1.5, egui::Color32::from_rgb(0xFF, 0x98, 0x00))));
}
}
} else {
ui.label("");
}
vec![]
}
}
@ -801,6 +847,22 @@ pub struct AllNodeTemplates;
/// Iterator for subgraph node templates (includes TemplateInput/Output)
pub struct SubgraphNodeTemplates;
/// Node templates available inside a VoiceAllocator subgraph (no nested VA)
pub struct VoiceAllocatorNodeTemplates;
impl NodeTemplateIter for VoiceAllocatorNodeTemplates {
type Item = NodeTemplate;
fn all_kinds(&self) -> Vec<Self::Item> {
let mut templates = AllNodeTemplates.all_kinds();
// VA nodes can't be nested — signals inside a VA are monophonic
templates.retain(|t| *t != NodeTemplate::VoiceAllocator);
templates.push(NodeTemplate::TemplateInput);
templates.push(NodeTemplate::TemplateOutput);
templates
}
}
impl NodeTemplateIter for SubgraphNodeTemplates {
type Item = NodeTemplate;

View File

@ -9,7 +9,7 @@ pub mod graph_data;
pub mod node_types;
use backend::{BackendNodeId, GraphBackend};
use graph_data::{AllNodeTemplates, SubgraphNodeTemplates, DataType, GraphState, NodeData, NodeTemplate, ValueType};
use graph_data::{AllNodeTemplates, SubgraphNodeTemplates, VoiceAllocatorNodeTemplates, DataType, GraphState, NodeData, NodeTemplate, ValueType};
use super::NodePath;
use eframe::egui;
use egui_node_graph2::*;
@ -136,6 +136,11 @@ pub struct NodeGraphPane {
node_context_menu: Option<(NodeId, egui::Pos2)>,
/// Cached node screen rects from last frame (for hit-testing)
last_node_rects: std::collections::HashMap<NodeId, egui::Rect>,
/// Last time we polled oscilloscope data (~20 FPS)
last_oscilloscope_poll: std::time::Instant,
/// Backend track ID (u32) for oscilloscope queries
backend_track_id: Option<u32>,
}
impl NodeGraphPane {
@ -162,6 +167,8 @@ impl NodeGraphPane {
renaming_group: None,
node_context_menu: None,
last_node_rects: HashMap::new(),
last_oscilloscope_poll: std::time::Instant::now(),
backend_track_id: None,
}
}
@ -196,6 +203,8 @@ impl NodeGraphPane {
renaming_group: None,
node_context_menu: None,
last_node_rects: HashMap::new(),
last_oscilloscope_poll: std::time::Instant::now(),
backend_track_id: Some(backend_track_id),
};
// Load existing graph from backend
@ -1227,6 +1236,13 @@ impl NodeGraphPane {
!self.subgraph_stack.is_empty()
}
/// True if any frame in the subgraph stack is a VoiceAllocator
fn inside_voice_allocator(&self) -> bool {
self.subgraph_stack.iter().any(|frame| {
matches!(&frame.context, SubgraphContext::VoiceAllocator { .. })
})
}
/// Get the GroupId of the current group scope (if inside a group), for filtering sub-groups.
fn current_group_scope(&self) -> Option<GroupId> {
self.subgraph_stack.last().and_then(|frame| {
@ -1926,9 +1942,9 @@ impl crate::panes::PaneRenderer for NodeGraphPane {
) {
// Check if we need to reload for a different track or project reload
let current_track = *shared.active_layer_id;
let generation_changed = shared.project_generation != self.last_project_generation;
let generation_changed = *shared.project_generation != self.last_project_generation;
if generation_changed {
self.last_project_generation = shared.project_generation;
self.last_project_generation = *shared.project_generation;
}
// If selected track changed or project was reloaded, reload the graph
@ -1954,6 +1970,7 @@ impl crate::panes::PaneRenderer for NodeGraphPane {
self.track_id = Some(new_track_id);
// Recreate backend
self.backend_track_id = Some(backend_track_id);
self.backend = Some(Box::new(audio_backend::AudioGraphBackend::new(
backend_track_id,
(*audio_controller).clone(),
@ -1987,6 +2004,68 @@ impl crate::panes::PaneRenderer for NodeGraphPane {
painter.galley(text_pos, galley, text_color);
return;
}
// Poll oscilloscope data at ~20 FPS
let has_oscilloscopes;
if self.last_oscilloscope_poll.elapsed() >= std::time::Duration::from_millis(50) {
self.last_oscilloscope_poll = std::time::Instant::now();
// Find all Oscilloscope nodes in the current graph
let oscilloscope_nodes: Vec<(NodeId, u32)> = self.state.graph.iter_nodes()
.filter(|&node_id| {
self.state.graph.nodes.get(node_id)
.map(|n| n.user_data.template == NodeTemplate::Oscilloscope)
.unwrap_or(false)
})
.filter_map(|node_id| {
self.node_id_map.get(&node_id).and_then(|backend_id| {
match backend_id {
BackendNodeId::Audio(idx) => Some((node_id, idx.index() as u32)),
}
})
})
.collect();
has_oscilloscopes = !oscilloscope_nodes.is_empty();
if has_oscilloscopes {
if let (Some(backend_track_id), Some(audio_controller)) = (self.backend_track_id, &shared.audio_controller) {
// Check if we're inside a VoiceAllocator subgraph
let va_backend_id = self.subgraph_stack.iter().rev().find_map(|frame| {
if let SubgraphContext::VoiceAllocator { backend_id } = &frame.context {
match backend_id {
BackendNodeId::Audio(idx) => Some(idx.index() as u32),
}
} else {
None
}
});
let mut controller = audio_controller.lock().unwrap();
for (node_id, backend_node_id) in oscilloscope_nodes {
let result = if let Some(va_id) = va_backend_id {
controller.query_voice_oscilloscope_data(backend_track_id, va_id, backend_node_id, 4800)
} else {
controller.query_oscilloscope_data(backend_track_id, backend_node_id, 4800)
};
if let Ok(data) = result {
self.user_state.oscilloscope_data.insert(node_id, graph_data::OscilloscopeCache {
audio: data.audio,
cv: data.cv,
});
}
}
}
}
} else {
// Between polls, check if we have cached oscilloscope data
has_oscilloscopes = !self.user_state.oscilloscope_data.is_empty();
}
// Continuously repaint when oscilloscopes are present
if has_oscilloscopes {
ui.ctx().request_repaint();
}
// Get colors from theme
let bg_style = shared.theme.style(".node-graph-background", ui.ctx());
let grid_style = shared.theme.style(".node-graph-grid", ui.ctx());
@ -2098,7 +2177,14 @@ impl crate::panes::PaneRenderer for NodeGraphPane {
Self::draw_dot_grid_background(ui, graph_rect, bg_color, grid_color, pan_zoom);
// Draw the graph editor with context-aware node templates
let graph_response = if self.in_subgraph() {
let graph_response = if self.inside_voice_allocator() {
self.state.draw_graph_editor(
ui,
VoiceAllocatorNodeTemplates,
&mut self.user_state,
Vec::default(),
)
} else if self.in_subgraph() {
self.state.draw_graph_editor(
ui,
SubgraphNodeTemplates,

View File

@ -1,45 +1,512 @@
/// Preset Browser pane - asset and preset library
/// Instrument Browser pane — browse, search, load, and save instrument presets
///
/// This will eventually show a file browser for presets.
/// For now, it's a placeholder.
/// Scans factory presets from `src/assets/instruments/` organized by category.
/// Presets are loaded into the currently selected track's audio graph.
use eframe::egui;
use std::path::PathBuf;
use super::{NodePath, PaneRenderer, SharedPaneState};
pub struct PresetBrowserPane {}
/// Metadata extracted from a preset file
struct PresetInfo {
name: String,
path: PathBuf,
category: String,
description: String,
author: String,
tags: Vec<String>,
is_factory: bool,
}
impl PresetBrowserPane {
pub fn new() -> Self {
Self {}
/// State for the save-preset dialog
struct SaveDialogState {
name: String,
description: String,
tags_str: String,
}
impl Default for SaveDialogState {
fn default() -> Self {
Self {
name: String::new(),
description: String::new(),
tags_str: String::new(),
}
}
}
pub struct PresetBrowserPane {
presets: Vec<PresetInfo>,
search_query: String,
/// Index into `self.presets` of the currently selected preset
selected_index: Option<usize>,
selected_category: Option<String>,
needs_reload: bool,
save_dialog: Option<SaveDialogState>,
/// Sorted unique category names extracted from presets
categories: Vec<String>,
}
impl PresetBrowserPane {
pub fn new() -> Self {
Self {
presets: Vec::new(),
search_query: String::new(),
selected_index: None,
selected_category: None,
needs_reload: true,
save_dialog: None,
categories: Vec::new(),
}
}
/// Scan preset directories and populate the preset list
fn scan_presets(&mut self) {
self.presets.clear();
self.categories.clear();
// Factory presets: resolve from CARGO_MANIFEST_DIR (lightningbeam-editor crate)
let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
let factory_dir = manifest_dir.join("../../src/assets/instruments");
if let Ok(factory_dir) = factory_dir.canonicalize() {
self.scan_directory(&factory_dir, &factory_dir, true);
}
// Sort presets alphabetically by name within each category
self.presets.sort_by(|a, b| {
a.category.cmp(&b.category).then(a.name.cmp(&b.name))
});
// Extract unique categories
let mut cats: Vec<String> = self.presets.iter()
.map(|p| p.category.clone())
.collect();
cats.sort();
cats.dedup();
self.categories = cats;
self.needs_reload = false;
}
/// Recursively scan a directory for .json preset files
fn scan_directory(&mut self, dir: &std::path::Path, base_dir: &std::path::Path, is_factory: bool) {
let entries = match std::fs::read_dir(dir) {
Ok(entries) => entries,
Err(_) => return,
};
for entry in entries.flatten() {
let path = entry.path();
if path.is_dir() {
self.scan_directory(&path, base_dir, is_factory);
} else if path.extension().is_some_and(|e| e == "json") {
if let Some(info) = self.load_preset_info(&path, base_dir, is_factory) {
self.presets.push(info);
}
}
}
}
/// Load metadata from a preset JSON file
fn load_preset_info(&self, path: &std::path::Path, base_dir: &std::path::Path, is_factory: bool) -> Option<PresetInfo> {
let contents = std::fs::read_to_string(path).ok()?;
let preset: daw_backend::audio::node_graph::GraphPreset =
serde_json::from_str(&contents).ok()?;
// Category = first directory component relative to base_dir
let relative = path.strip_prefix(base_dir).ok()?;
let category = relative.components().next()
.and_then(|c| c.as_os_str().to_str())
.unwrap_or("other")
.to_string();
Some(PresetInfo {
name: preset.metadata.name,
path: path.to_path_buf(),
category,
description: preset.metadata.description,
author: preset.metadata.author,
tags: preset.metadata.tags,
is_factory,
})
}
/// Get indices of presets matching the current search query and category filter
fn filtered_indices(&self) -> Vec<usize> {
let query = self.search_query.to_lowercase();
self.presets.iter().enumerate()
.filter(|(_, p)| {
// Category filter
if let Some(ref cat) = self.selected_category {
if &p.category != cat {
return false;
}
}
// Search filter
if !query.is_empty() {
let name_match = p.name.to_lowercase().contains(&query);
let desc_match = p.description.to_lowercase().contains(&query);
let tag_match = p.tags.iter().any(|t| t.to_lowercase().contains(&query));
if !name_match && !desc_match && !tag_match {
return false;
}
}
true
})
.map(|(i, _)| i)
.collect()
}
/// Load the selected preset into the current track
fn load_preset(&self, preset_index: usize, shared: &mut SharedPaneState) {
let preset = &self.presets[preset_index];
let track_id = match shared.active_layer_id.and_then(|lid| shared.layer_to_track_map.get(&lid)) {
Some(&tid) => tid,
None => return,
};
if let Some(audio_controller) = &shared.audio_controller {
let mut controller = audio_controller.lock().unwrap();
controller.graph_load_preset(track_id, preset.path.to_string_lossy().to_string());
}
*shared.project_generation += 1;
}
/// Render the save preset dialog
fn render_save_dialog(&mut self, ui: &mut egui::Ui, shared: &mut SharedPaneState) {
let dialog = match &mut self.save_dialog {
Some(d) => d,
None => return,
};
ui.add_space(8.0);
ui.heading("Save Preset");
ui.add_space(4.0);
ui.horizontal(|ui| {
ui.label("Name:");
ui.text_edit_singleline(&mut dialog.name);
});
ui.add_space(4.0);
ui.label("Description:");
ui.add(egui::TextEdit::multiline(&mut dialog.description)
.desired_rows(3)
.desired_width(f32::INFINITY));
ui.add_space(4.0);
ui.horizontal(|ui| {
ui.label("Tags:");
ui.text_edit_singleline(&mut dialog.tags_str);
});
ui.label(egui::RichText::new("Comma-separated, e.g. bass, synth, warm")
.small()
.color(ui.visuals().weak_text_color()));
ui.add_space(8.0);
let name_valid = !dialog.name.trim().is_empty();
let mut do_save = false;
let mut do_cancel = false;
ui.horizontal(|ui| {
if ui.add_enabled(name_valid, egui::Button::new("Save")).clicked() {
do_save = true;
}
if ui.button("Cancel").clicked() {
do_cancel = true;
}
});
// Act after dialog borrow is released
if do_save {
self.do_save_preset(shared);
} else if do_cancel {
self.save_dialog = None;
}
}
/// Execute the save action
fn do_save_preset(&mut self, shared: &mut SharedPaneState) {
let dialog = match self.save_dialog.take() {
Some(d) => d,
None => return,
};
let track_id = match shared.active_layer_id.and_then(|lid| shared.layer_to_track_map.get(&lid)) {
Some(&tid) => tid,
None => return,
};
let name = dialog.name.trim().to_string();
let description = dialog.description.trim().to_string();
let tags: Vec<String> = dialog.tags_str.split(',')
.map(|t| t.trim().to_string())
.filter(|t| !t.is_empty())
.collect();
// Save to user presets directory
let save_dir = user_presets_dir();
if let Err(e) = std::fs::create_dir_all(&save_dir) {
eprintln!("Failed to create presets directory: {}", e);
return;
}
let filename = sanitize_filename(&name);
let save_path = save_dir.join(format!("{}.json", filename));
if let Some(audio_controller) = &shared.audio_controller {
let mut controller = audio_controller.lock().unwrap();
controller.graph_save_preset(
track_id,
save_path.to_string_lossy().to_string(),
name,
description,
tags,
);
}
self.needs_reload = true;
}
}
/// Get the user presets directory ($XDG_DATA_HOME/lightningbeam/presets or ~/.local/share/lightningbeam/presets)
fn user_presets_dir() -> PathBuf {
if let Ok(xdg) = std::env::var("XDG_DATA_HOME") {
PathBuf::from(xdg).join("lightningbeam").join("presets")
} else if let Ok(home) = std::env::var("HOME") {
PathBuf::from(home).join(".local/share/lightningbeam/presets")
} else {
PathBuf::from("presets")
}
}
/// Sanitize a string for use as a filename
fn sanitize_filename(name: &str) -> String {
name.chars()
.map(|c| if c.is_alphanumeric() || c == '-' || c == '_' || c == ' ' { c } else { '_' })
.collect::<String>()
.trim()
.to_string()
}
impl PaneRenderer for PresetBrowserPane {
fn render_header(&mut self, ui: &mut egui::Ui, shared: &mut SharedPaneState) -> bool {
ui.with_layout(egui::Layout::right_to_left(egui::Align::Center), |ui| {
let has_track = shared.active_layer_id
.and_then(|lid| shared.layer_to_track_map.get(&lid))
.is_some();
if ui.add_enabled(has_track, egui::Button::new("Save")).clicked() {
self.save_dialog = Some(SaveDialogState::default());
}
});
true
}
fn render_content(
&mut self,
ui: &mut egui::Ui,
rect: egui::Rect,
_path: &NodePath,
_shared: &mut SharedPaneState,
shared: &mut SharedPaneState,
) {
// Placeholder rendering
ui.painter().rect_filled(
rect,
0.0,
egui::Color32::from_rgb(50, 45, 30),
if self.needs_reload {
self.scan_presets();
}
// Background
let bg_style = shared.theme.style(".pane-content", ui.ctx());
let bg_color = bg_style.background_color.unwrap_or(egui::Color32::from_rgb(47, 47, 47));
ui.painter().rect_filled(rect, 0.0, bg_color);
let text_color = shared.theme.style(".text-primary", ui.ctx())
.text_color.unwrap_or(egui::Color32::from_gray(246));
let text_secondary = shared.theme.style(".text-secondary", ui.ctx())
.text_color.unwrap_or(egui::Color32::from_gray(170));
let content_rect = rect.shrink(4.0);
let mut content_ui = ui.new_child(
egui::UiBuilder::new()
.max_rect(content_rect)
.layout(egui::Layout::top_down(egui::Align::LEFT)),
);
let ui = &mut content_ui;
// Save dialog takes over the content area
if self.save_dialog.is_some() {
self.render_save_dialog(ui, shared);
return;
}
// Search bar
ui.horizontal(|ui| {
ui.label("Search:");
ui.text_edit_singleline(&mut self.search_query);
});
ui.add_space(4.0);
// Category chips
ui.horizontal_wrapped(|ui| {
let all_selected = self.selected_category.is_none();
if ui.selectable_label(all_selected, "All").clicked() {
self.selected_category = None;
self.selected_index = None;
}
for cat in &self.categories.clone() {
let is_selected = self.selected_category.as_ref() == Some(cat);
let display = capitalize_first(cat);
if ui.selectable_label(is_selected, &display).clicked() {
if is_selected {
self.selected_category = None;
} else {
self.selected_category = Some(cat.clone());
}
self.selected_index = None;
}
}
});
ui.separator();
// Preset list
let filtered = self.filtered_indices();
if filtered.is_empty() {
ui.centered_and_justified(|ui| {
ui.label(egui::RichText::new("No presets found")
.color(text_secondary));
});
return;
}
let mut load_index = None;
let mut delete_path = None;
egui::ScrollArea::vertical().auto_shrink([false, false]).show(ui, |ui| {
let mut new_selection = self.selected_index;
for &idx in &filtered {
let preset = &self.presets[idx];
let is_selected = self.selected_index == Some(idx);
let response = ui.push_id(idx, |ui| {
let frame = egui::Frame::NONE
.inner_margin(egui::Margin::same(6))
.corner_radius(4.0);
let mut button_clicked = false;
let frame_response = frame.show(ui, |ui| {
ui.set_min_width(ui.available_width());
ui.label(
egui::RichText::new(&preset.name).strong().color(text_color)
);
let text = "Preset Browser\n(TODO: Implement file browser)";
ui.painter().text(
rect.center(),
egui::Align2::CENTER_CENTER,
text,
egui::FontId::proportional(16.0),
egui::Color32::from_gray(150),
if is_selected {
if !preset.description.is_empty() {
ui.label(egui::RichText::new(&preset.description)
.color(text_secondary)
.small());
}
if !preset.tags.is_empty() {
ui.horizontal_wrapped(|ui| {
for tag in &preset.tags {
let tag_frame = egui::Frame::NONE
.inner_margin(egui::Margin::symmetric(6, 2))
.corner_radius(8.0)
.fill(ui.visuals().selection.bg_fill.linear_multiply(0.3));
tag_frame.show(ui, |ui| {
ui.label(egui::RichText::new(tag).small().color(text_color));
});
}
});
}
ui.horizontal(|ui| {
if !preset.author.is_empty() {
ui.label(egui::RichText::new(format!("by {}", preset.author))
.small()
.color(text_secondary));
}
ui.with_layout(egui::Layout::right_to_left(egui::Align::Center), |ui| {
if !preset.is_factory {
if ui.small_button("Delete").clicked() {
delete_path = Some(preset.path.clone());
button_clicked = true;
}
}
let has_track = shared.active_layer_id
.and_then(|lid| shared.layer_to_track_map.get(&lid))
.is_some();
if ui.add_enabled(has_track, egui::Button::new("Load")).clicked() {
load_index = Some(idx);
button_clicked = true;
}
});
});
}
});
// Hover highlight and click-to-select (no ui.interact overlay)
let frame_rect = frame_response.response.rect;
let is_hovered = ui.rect_contains_pointer(frame_rect);
let fill = if is_selected {
ui.visuals().selection.bg_fill.linear_multiply(0.3)
} else if is_hovered {
ui.visuals().widgets.hovered.bg_fill.linear_multiply(0.3)
} else {
egui::Color32::TRANSPARENT
};
if fill != egui::Color32::TRANSPARENT {
ui.painter().rect_filled(frame_rect, 4.0, fill);
}
if is_hovered {
ui.ctx().set_cursor_icon(egui::CursorIcon::PointingHand);
}
if is_hovered && !button_clicked && ui.input(|i| i.pointer.any_released()) {
new_selection = if is_selected { None } else { Some(idx) };
}
});
let rect = response.response.rect;
ui.painter().line_segment(
[rect.left_bottom(), rect.right_bottom()],
egui::Stroke::new(0.5, ui.visuals().widgets.noninteractive.bg_stroke.color),
);
}
self.selected_index = new_selection;
});
// Deferred actions after ScrollArea borrow is released
if let Some(idx) = load_index {
self.load_preset(idx, shared);
}
if let Some(path) = delete_path {
if let Err(e) = std::fs::remove_file(&path) {
eprintln!("Failed to delete preset: {e}");
}
self.needs_reload = true;
}
}
fn name(&self) -> &str {
"Preset Browser"
"Instrument Browser"
}
}
fn capitalize_first(s: &str) -> String {
let mut chars = s.chars();
match chars.next() {
None => String::new(),
Some(c) => c.to_uppercase().to_string() + chars.as_str(),
}
}

View File

@ -1,27 +1,53 @@
{
"metadata": {
"name": "Bright Lead",
"description": "Piercing lead synth with filter modulation",
"description": "Piercing lead synth with filter modulation (polyphonic)",
"author": "Lightningbeam",
"version": 1,
"version": 2,
"tags": ["lead", "synth", "solo"]
},
"midi_targets": [0],
"output_node": 7,
"output_node": 2,
"nodes": [
{
"id": 0,
"node_type": "MidiInput",
"name": "MIDI In",
"parameters": {},
"position": [100.0, 100.0]
"position": [100.0, 150.0]
},
{
"id": 1,
"node_type": "VoiceAllocator",
"name": "Voice Allocator",
"parameters": {
"0": 8.0
},
"position": [400.0, 150.0],
"template_graph": {
"metadata": {
"name": "Voice Template",
"description": "Per-voice lead synth patch",
"author": "Lightningbeam",
"version": 1,
"tags": []
},
"midi_targets": [0],
"output_node": 7,
"nodes": [
{
"id": 0,
"node_type": "TemplateInput",
"name": "Template Input",
"parameters": {},
"position": [-200.0, 0.0]
},
{
"id": 1,
"node_type": "MidiToCV",
"name": "MIDI→CV",
"parameters": {},
"position": [400.0, 100.0]
"position": [100.0, 0.0]
},
{
"id": 2,
@ -32,7 +58,7 @@
"1": 0.6,
"2": 2.0
},
"position": [700.0, -100.0]
"position": [400.0, -100.0]
},
{
"id": 3,
@ -44,7 +70,7 @@
"2": 0.0,
"3": 0.0
},
"position": [700.0, 200.0]
"position": [400.0, 200.0]
},
{
"id": 4,
@ -55,7 +81,7 @@
"1": 2.0,
"2": 0.0
},
"position": [1000.0, -80.0]
"position": [700.0, -80.0]
},
{
"id": 5,
@ -67,7 +93,7 @@
"2": 0.6,
"3": 0.2
},
"position": [1000.0, 240.0]
"position": [700.0, 200.0]
},
{
"id": 6,
@ -76,14 +102,14 @@
"parameters": {
"0": 1.0
},
"position": [1300.0, 150.0]
"position": [1000.0, 50.0]
},
{
"id": 7,
"node_type": "AudioOutput",
"name": "Out",
"node_type": "TemplateOutput",
"name": "Template Output",
"parameters": {},
"position": [1600.0, 150.0]
"position": [1200.0, 50.0]
}
],
"connections": [
@ -96,4 +122,18 @@
{ "from_node": 5, "from_port": 0, "to_node": 6, "to_port": 1 },
{ "from_node": 6, "from_port": 0, "to_node": 7, "to_port": 0 }
]
}
},
{
"id": 2,
"node_type": "AudioOutput",
"name": "Out",
"parameters": {},
"position": [700.0, 150.0]
}
],
"connections": [
{ "from_node": 0, "from_port": 0, "to_node": 1, "to_port": 0 },
{ "from_node": 1, "from_port": 0, "to_node": 2, "to_port": 0 }
]
}

View File

@ -1,13 +1,13 @@
{
"metadata": {
"name": "Lush Pad",
"description": "Ambient pad with reverb and chorus",
"description": "Ambient pad with reverb and chorus (polyphonic)",
"author": "Lightningbeam",
"version": 1,
"version": 2,
"tags": ["pad", "ambient", "synth"]
},
"midi_targets": [0],
"output_node": 10,
"output_node": 4,
"nodes": [
{
"id": 0,
@ -16,12 +16,38 @@
"parameters": {},
"position": [100.0, 150.0]
},
{
"id": 1,
"node_type": "VoiceAllocator",
"name": "Voice Allocator",
"parameters": {
"0": 8.0
},
"position": [400.0, 150.0],
"template_graph": {
"metadata": {
"name": "Voice Template",
"description": "Per-voice pad patch",
"author": "Lightningbeam",
"version": 1,
"tags": []
},
"midi_targets": [0],
"output_node": 8,
"nodes": [
{
"id": 0,
"node_type": "TemplateInput",
"name": "Template Input",
"parameters": {},
"position": [-200.0, 0.0]
},
{
"id": 1,
"node_type": "MidiToCV",
"name": "MIDI→CV",
"parameters": {},
"position": [400.0, 150.0]
"position": [100.0, 0.0]
},
{
"id": 2,
@ -32,7 +58,7 @@
"1": 0.4,
"2": 0.0
},
"position": [700.0, -100.0]
"position": [400.0, -100.0]
},
{
"id": 3,
@ -43,7 +69,7 @@
"1": 0.4,
"2": 0.0
},
"position": [700.0, 200.0]
"position": [400.0, 200.0]
},
{
"id": 4,
@ -55,7 +81,7 @@
"2": 0.0,
"3": 0.0
},
"position": [1000.0, 150.0]
"position": [700.0, 50.0]
},
{
"id": 5,
@ -66,7 +92,7 @@
"1": 0.707,
"2": 0.0
},
"position": [1300.0, -50.0]
"position": [900.0, -50.0]
},
{
"id": 6,
@ -78,7 +104,7 @@
"2": 0.7,
"3": 1.0
},
"position": [1300.0, 280.0]
"position": [900.0, 200.0]
},
{
"id": 7,
@ -87,36 +113,14 @@
"parameters": {
"0": 1.0
},
"position": [1600.0, 200.0]
"position": [1100.0, 50.0]
},
{
"id": 8,
"node_type": "Chorus",
"name": "Chorus",
"parameters": {
"0": 0.5,
"1": 0.6,
"2": 0.4
},
"position": [1900.0, 200.0]
},
{
"id": 9,
"node_type": "Reverb",
"name": "Reverb",
"parameters": {
"0": 0.7,
"1": 0.5,
"2": 0.5
},
"position": [2200.0, 200.0]
},
{
"id": 10,
"node_type": "AudioOutput",
"name": "Out",
"node_type": "TemplateOutput",
"name": "Template Output",
"parameters": {},
"position": [2500.0, 200.0]
"position": [1300.0, 50.0]
}
],
"connections": [
@ -129,8 +133,44 @@
{ "from_node": 4, "from_port": 0, "to_node": 5, "to_port": 0 },
{ "from_node": 5, "from_port": 0, "to_node": 7, "to_port": 0 },
{ "from_node": 6, "from_port": 0, "to_node": 7, "to_port": 1 },
{ "from_node": 7, "from_port": 0, "to_node": 8, "to_port": 0 },
{ "from_node": 8, "from_port": 0, "to_node": 9, "to_port": 0 },
{ "from_node": 9, "from_port": 0, "to_node": 10, "to_port": 0 }
{ "from_node": 7, "from_port": 0, "to_node": 8, "to_port": 0 }
]
}
},
{
"id": 2,
"node_type": "Chorus",
"name": "Chorus",
"parameters": {
"0": 0.5,
"1": 0.6,
"2": 0.4
},
"position": [700.0, 150.0]
},
{
"id": 3,
"node_type": "Reverb",
"name": "Reverb",
"parameters": {
"0": 0.7,
"1": 0.5,
"2": 0.5
},
"position": [1000.0, 150.0]
},
{
"id": 4,
"node_type": "AudioOutput",
"name": "Out",
"parameters": {},
"position": [1300.0, 150.0]
}
],
"connections": [
{ "from_node": 0, "from_port": 0, "to_node": 1, "to_port": 0 },
{ "from_node": 1, "from_port": 0, "to_node": 2, "to_port": 0 },
{ "from_node": 2, "from_port": 0, "to_node": 3, "to_port": 0 },
{ "from_node": 3, "from_port": 0, "to_node": 4, "to_port": 0 }
]
}