Add presets and make graph follow selected layer/track

This commit is contained in:
Skyler Lehmkuhl 2025-10-25 05:31:18 -04:00
parent 16f4a2a359
commit 139946fb75
19 changed files with 2118 additions and 71 deletions

View File

@ -722,7 +722,7 @@ impl Engine {
} }
// Node graph commands // Node graph commands
Command::GraphAddNode(track_id, node_type, _x, _y) => { Command::GraphAddNode(track_id, node_type, x, y) => {
// Get MIDI track (graphs are only for MIDI tracks currently) // Get MIDI track (graphs are only for MIDI tracks currently)
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
// Create graph if it doesn't exist // Create graph if it doesn't exist
@ -760,6 +760,9 @@ impl Engine {
let node_idx = graph.add_node(node); let node_idx = graph.add_node(node);
let node_id = node_idx.index() as u32; let node_id = node_idx.index() as u32;
// Save position
graph.set_node_position(node_idx, x, y);
// Automatically set MIDI-receiving nodes as MIDI targets // Automatically set MIDI-receiving nodes as MIDI targets
if node_type == "MidiInput" || node_type == "VoiceAllocator" { if node_type == "MidiInput" || node_type == "VoiceAllocator" {
graph.set_midi_target(node_idx, true); graph.set_midi_target(node_idx, true);
@ -907,6 +910,72 @@ impl Engine {
} }
} }
} }
Command::GraphSavePreset(track_id, preset_path, preset_name, description, tags) => {
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
if let Some(ref graph) = track.instrument_graph {
// Serialize the graph to a preset
let mut preset = graph.to_preset(&preset_name);
preset.metadata.description = description;
preset.metadata.tags = tags;
preset.metadata.author = String::from("User");
// Write to file
if let Ok(json) = preset.to_json() {
if let Err(e) = std::fs::write(&preset_path, json) {
let _ = self.event_tx.push(AudioEvent::GraphConnectionError(
track_id,
format!("Failed to save preset: {}", e)
));
}
} else {
let _ = self.event_tx.push(AudioEvent::GraphConnectionError(
track_id,
"Failed to serialize preset".to_string()
));
}
}
}
}
Command::GraphLoadPreset(track_id, preset_path) => {
// Read and deserialize the preset
match std::fs::read_to_string(&preset_path) {
Ok(json) => {
match crate::audio::node_graph::preset::GraphPreset::from_json(&json) {
Ok(preset) => {
match InstrumentGraph::from_preset(&preset, self.sample_rate, 8192) {
Ok(graph) => {
// Replace the track's graph
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
track.instrument_graph = Some(graph);
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
}
}
Err(e) => {
let _ = self.event_tx.push(AudioEvent::GraphConnectionError(
track_id,
format!("Failed to create graph from preset: {}", e)
));
}
}
}
Err(e) => {
let _ = self.event_tx.push(AudioEvent::GraphConnectionError(
track_id,
format!("Failed to parse preset: {}", e)
));
}
}
}
Err(e) => {
let _ = self.event_tx.push(AudioEvent::GraphConnectionError(
track_id,
format!("Failed to read preset file: {}", e)
));
}
}
}
} }
} }
@ -1377,4 +1446,14 @@ impl EngineController {
pub fn graph_set_output_node(&mut self, track_id: TrackId, node_id: u32) { pub fn graph_set_output_node(&mut self, track_id: TrackId, node_id: u32) {
let _ = self.command_tx.push(Command::GraphSetOutputNode(track_id, node_id)); let _ = self.command_tx.push(Command::GraphSetOutputNode(track_id, node_id));
} }
/// Save the current graph as a preset
pub fn graph_save_preset(&mut self, track_id: TrackId, preset_path: String, preset_name: String, description: String, tags: Vec<String>) {
let _ = self.command_tx.push(Command::GraphSavePreset(track_id, preset_path, preset_name, description, tags));
}
/// Load a preset into a track's graph
pub fn graph_load_preset(&mut self, track_id: TrackId, preset_path: String) {
let _ = self.command_tx.push(Command::GraphLoadPreset(track_id, preset_path));
}
} }

View File

@ -78,6 +78,9 @@ pub struct InstrumentGraph {
/// Temporary buffers for node MIDI inputs during processing /// Temporary buffers for node MIDI inputs during processing
midi_input_buffers: Vec<Vec<MidiEvent>>, midi_input_buffers: Vec<Vec<MidiEvent>>,
/// UI positions for nodes (node_index -> (x, y))
node_positions: std::collections::HashMap<u32, (f32, f32)>,
} }
impl InstrumentGraph { impl InstrumentGraph {
@ -94,6 +97,7 @@ impl InstrumentGraph {
input_buffers: vec![vec![0.0; buffer_size * 2]; 16], input_buffers: vec![vec![0.0; buffer_size * 2]; 16],
// Pre-allocate MIDI input buffers (max 128 events per port) // Pre-allocate MIDI input buffers (max 128 events per port)
midi_input_buffers: (0..16).map(|_| Vec::with_capacity(128)).collect(), midi_input_buffers: (0..16).map(|_| Vec::with_capacity(128)).collect(),
node_positions: std::collections::HashMap::new(),
} }
} }
@ -103,6 +107,16 @@ impl InstrumentGraph {
self.graph.add_node(graph_node) self.graph.add_node(graph_node)
} }
/// Set the UI position for a node
pub fn set_node_position(&mut self, node: NodeIndex, x: f32, y: f32) {
self.node_positions.insert(node.index() as u32, (x, y));
}
/// Get the UI position for a node
pub fn get_node_position(&self, node: NodeIndex) -> Option<(f32, f32)> {
self.node_positions.get(&(node.index() as u32)).copied()
}
/// Connect two nodes with type checking /// Connect two nodes with type checking
pub fn connect( pub fn connect(
&mut self, &mut self,
@ -543,4 +557,154 @@ impl InstrumentGraph {
new_graph new_graph
} }
/// Serialize the graph to a preset
pub fn to_preset(&self, name: impl Into<String>) -> crate::audio::node_graph::preset::GraphPreset {
use crate::audio::node_graph::preset::{GraphPreset, SerializedConnection, SerializedNode};
use crate::audio::node_graph::nodes::VoiceAllocatorNode;
let mut preset = GraphPreset::new(name);
// Serialize all nodes
for node_idx in self.graph.node_indices() {
if let Some(graph_node) = self.graph.node_weight(node_idx) {
let node = &graph_node.node;
let node_id = node_idx.index() as u32;
let mut serialized = SerializedNode::new(node_id, node.node_type());
// Get all parameters
for param in node.parameters() {
let value = node.get_parameter(param.id);
serialized.set_parameter(param.id, value);
}
// For VoiceAllocator nodes, serialize the template graph
// We need to downcast to access template_graph()
// This is safe because we know the node type
if node.node_type() == "VoiceAllocator" {
// Use Any to downcast
let node_ptr = &**node as *const dyn crate::audio::node_graph::AudioNode;
let node_ptr = node_ptr as *const VoiceAllocatorNode;
unsafe {
let va_node = &*node_ptr;
let template_preset = va_node.template_graph().to_preset("template");
serialized.template_graph = Some(Box::new(template_preset));
}
}
// Save position if available
if let Some(pos) = self.get_node_position(node_idx) {
serialized.set_position(pos.0, pos.1);
}
preset.add_node(serialized);
}
}
// Serialize connections
for edge in self.graph.edge_references() {
let source = edge.source();
let target = edge.target();
let conn = edge.weight();
preset.add_connection(SerializedConnection {
from_node: source.index() as u32,
from_port: conn.from_port,
to_node: target.index() as u32,
to_port: conn.to_port,
});
}
// MIDI targets
preset.midi_targets = self.midi_targets.iter().map(|idx| idx.index() as u32).collect();
// Output node
preset.output_node = self.output_node.map(|idx| idx.index() as u32);
preset
}
/// Deserialize a preset into the graph
pub fn from_preset(preset: &crate::audio::node_graph::preset::GraphPreset, sample_rate: u32, buffer_size: usize) -> Result<Self, String> {
use crate::audio::node_graph::nodes::*;
use petgraph::stable_graph::NodeIndex;
use std::collections::HashMap;
let mut graph = Self::new(sample_rate, buffer_size);
let mut index_map: HashMap<u32, NodeIndex> = HashMap::new();
// Create all nodes
for serialized_node in &preset.nodes {
// Create the node based on type
let node: Box<dyn crate::audio::node_graph::AudioNode> = match serialized_node.node_type.as_str() {
"Oscillator" => Box::new(OscillatorNode::new("Oscillator")),
"Gain" => Box::new(GainNode::new("Gain")),
"Mixer" => Box::new(MixerNode::new("Mixer")),
"Filter" => Box::new(FilterNode::new("Filter")),
"ADSR" => Box::new(ADSRNode::new("ADSR")),
"MidiInput" => Box::new(MidiInputNode::new("MIDI Input")),
"MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV")),
"AudioToCV" => Box::new(AudioToCVNode::new("Audio→CV")),
"Oscilloscope" => Box::new(OscilloscopeNode::new("Oscilloscope")),
"TemplateInput" => Box::new(TemplateInputNode::new("Template Input")),
"TemplateOutput" => Box::new(TemplateOutputNode::new("Template Output")),
"VoiceAllocator" => {
let mut va = VoiceAllocatorNode::new("VoiceAllocator", sample_rate, buffer_size);
// If there's a template graph, deserialize and set it
if let Some(ref template_preset) = serialized_node.template_graph {
let template_graph = Self::from_preset(template_preset, sample_rate, buffer_size)?;
// Set the template graph (we'll need to add this method to VoiceAllocator)
*va.template_graph_mut() = template_graph;
va.rebuild_voices();
}
Box::new(va)
}
"AudioOutput" => Box::new(AudioOutputNode::new("Output")),
_ => return Err(format!("Unknown node type: {}", serialized_node.node_type)),
};
let node_idx = graph.add_node(node);
index_map.insert(serialized_node.id, node_idx);
// Set parameters
for (&param_id, &value) in &serialized_node.parameters {
if let Some(graph_node) = graph.graph.node_weight_mut(node_idx) {
graph_node.node.set_parameter(param_id, value);
}
}
// Restore position
graph.set_node_position(node_idx, serialized_node.position.0, serialized_node.position.1);
}
// Create connections
for conn in &preset.connections {
let from_idx = index_map.get(&conn.from_node)
.ok_or_else(|| format!("Connection from unknown node {}", conn.from_node))?;
let to_idx = index_map.get(&conn.to_node)
.ok_or_else(|| format!("Connection to unknown node {}", conn.to_node))?;
graph.connect(*from_idx, conn.from_port, *to_idx, conn.to_port)
.map_err(|e| format!("Failed to connect nodes: {:?}", e))?;
}
// Set MIDI targets
for &target_id in &preset.midi_targets {
if let Some(&target_idx) = index_map.get(&target_id) {
graph.set_midi_target(target_idx, true);
}
}
// Set output node
if let Some(output_id) = preset.output_node {
if let Some(&output_idx) = index_map.get(&output_id) {
graph.output_node = Some(output_idx);
}
}
Ok(graph)
}
} }

View File

@ -2,7 +2,9 @@ mod graph;
mod node_trait; mod node_trait;
mod types; mod types;
pub mod nodes; pub mod nodes;
pub mod preset;
pub use graph::{Connection, GraphNode, InstrumentGraph}; pub use graph::{Connection, GraphNode, InstrumentGraph};
pub use node_trait::AudioNode; pub use node_trait::AudioNode;
pub use preset::{GraphPreset, PresetMetadata, SerializedConnection, SerializedNode};
pub use types::{ConnectionError, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType}; pub use types::{ConnectionError, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};

View File

@ -102,7 +102,7 @@ impl OscilloscopeNode {
]; ];
let parameters = vec![ let parameters = vec![
Parameter::new(PARAM_TIME_SCALE, "Time Scale", 10.0, 1000.0, 100.0, ParameterUnit::Milliseconds), Parameter::new(PARAM_TIME_SCALE, "Time Scale", 10.0, 1000.0, 100.0, ParameterUnit::Time),
Parameter::new(PARAM_TRIGGER_MODE, "Trigger", 0.0, 2.0, 0.0, ParameterUnit::Generic), Parameter::new(PARAM_TRIGGER_MODE, "Trigger", 0.0, 2.0, 0.0, ParameterUnit::Generic),
Parameter::new(PARAM_TRIGGER_LEVEL, "Trigger Level", -1.0, 1.0, 0.0, ParameterUnit::Generic), Parameter::new(PARAM_TRIGGER_LEVEL, "Trigger Level", -1.0, 1.0, 0.0, ParameterUnit::Generic),
]; ];

View File

@ -0,0 +1,147 @@
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
/// Serializable representation of a node graph preset
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GraphPreset {
/// Preset metadata
pub metadata: PresetMetadata,
/// Nodes in the graph
pub nodes: Vec<SerializedNode>,
/// Connections between nodes
pub connections: Vec<SerializedConnection>,
/// Which node indices are MIDI targets
pub midi_targets: Vec<u32>,
/// Which node index is the audio output (None if not set)
pub output_node: Option<u32>,
}
/// Metadata about the preset
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PresetMetadata {
/// Preset name
pub name: String,
/// Description of what the preset sounds like
#[serde(default)]
pub description: String,
/// Preset author
#[serde(default)]
pub author: String,
/// Preset version (for compatibility)
#[serde(default = "default_version")]
pub version: u32,
/// Tags for categorization (e.g., "bass", "lead", "pad")
#[serde(default)]
pub tags: Vec<String>,
}
fn default_version() -> u32 {
1
}
/// Serialized node representation
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SerializedNode {
/// Unique ID (node index in the graph)
pub id: u32,
/// Node type (e.g., "Oscillator", "Filter", "ADSR")
pub node_type: String,
/// Parameter values (param_id -> value)
pub parameters: HashMap<u32, f32>,
/// UI position (for visual editor)
#[serde(default)]
pub position: (f32, f32),
/// For VoiceAllocator nodes: the nested template graph
#[serde(skip_serializing_if = "Option::is_none")]
pub template_graph: Option<Box<GraphPreset>>,
}
/// Serialized connection between nodes
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SerializedConnection {
/// Source node ID
pub from_node: u32,
/// Source port index
pub from_port: usize,
/// Destination node ID
pub to_node: u32,
/// Destination port index
pub to_port: usize,
}
impl GraphPreset {
/// Create a new preset with the given name
pub fn new(name: impl Into<String>) -> Self {
Self {
metadata: PresetMetadata {
name: name.into(),
description: String::new(),
author: String::new(),
version: 1,
tags: Vec::new(),
},
nodes: Vec::new(),
connections: Vec::new(),
midi_targets: Vec::new(),
output_node: None,
}
}
/// Serialize to JSON string
pub fn to_json(&self) -> Result<String, serde_json::Error> {
serde_json::to_string_pretty(self)
}
/// Deserialize from JSON string
pub fn from_json(json: &str) -> Result<Self, serde_json::Error> {
serde_json::from_str(json)
}
/// Add a node to the preset
pub fn add_node(&mut self, node: SerializedNode) {
self.nodes.push(node);
}
/// Add a connection to the preset
pub fn add_connection(&mut self, connection: SerializedConnection) {
self.connections.push(connection);
}
}
impl SerializedNode {
/// Create a new serialized node
pub fn new(id: u32, node_type: impl Into<String>) -> Self {
Self {
id,
node_type: node_type.into(),
parameters: HashMap::new(),
position: (0.0, 0.0),
template_graph: None,
}
}
/// Set a parameter value
pub fn set_parameter(&mut self, param_id: u32, value: f32) {
self.parameters.insert(param_id, value);
}
/// Set UI position
pub fn set_position(&mut self, x: f32, y: f32) {
self.position = (x, y);
}
}

View File

@ -137,6 +137,11 @@ pub enum Command {
GraphSetMidiTarget(TrackId, u32, bool), GraphSetMidiTarget(TrackId, u32, bool),
/// Set which node is the audio output (track_id, node_index) /// Set which node is the audio output (track_id, node_index)
GraphSetOutputNode(TrackId, u32), GraphSetOutputNode(TrackId, u32),
/// Save current graph as a preset (track_id, preset_path, preset_name, description, tags)
GraphSavePreset(TrackId, String, String, String, Vec<String>),
/// Load a preset into a track's graph (track_id, preset_path)
GraphLoadPreset(TrackId, String),
} }
/// Events sent from audio thread back to UI/control thread /// Events sent from audio thread back to UI/control thread

View File

@ -17,6 +17,7 @@ pub use audio::{
Metatrack, MidiClip, MidiClipId, MidiEvent, MidiTrack, ParameterId, PoolAudioFile, Project, RecordingState, RenderContext, Track, TrackId, Metatrack, MidiClip, MidiClipId, MidiEvent, MidiTrack, ParameterId, PoolAudioFile, Project, RecordingState, RenderContext, Track, TrackId,
TrackNode, TrackNode,
}; };
pub use audio::node_graph::{GraphPreset, InstrumentGraph, PresetMetadata, SerializedConnection, SerializedNode};
pub use command::{AudioEvent, Command}; pub use command::{AudioEvent, Command};
pub use effects::{Effect, GainEffect, PanEffect, SimpleEQ, SimpleSynth}; pub use effects::{Effect, GainEffect, PanEffect, SimpleEQ, SimpleSynth};
pub use io::{load_midi_file, AudioFile, WaveformPeak, WavWriter}; pub use io::{load_midi_file, AudioFile, WaveformPeak, WavWriter};

View File

@ -0,0 +1,98 @@
{
"metadata": {
"name": "Basic Sine",
"description": "Simple sine wave synthesizer with ADSR envelope. Great for learning the basics of subtractive synthesis.",
"author": "Lightningbeam",
"version": 1,
"tags": ["basic", "lead", "mono"]
},
"nodes": [
{
"id": 0,
"node_type": "MidiInput",
"parameters": {},
"position": [100.0, 200.0]
},
{
"id": 1,
"node_type": "MidiToCV",
"parameters": {},
"position": [300.0, 200.0]
},
{
"id": 2,
"node_type": "Oscillator",
"parameters": {
"0": 440.0,
"1": 0.7,
"2": 0.0
},
"position": [500.0, 150.0]
},
{
"id": 3,
"node_type": "ADSR",
"parameters": {
"0": 0.01,
"1": 0.1,
"2": 0.7,
"3": 0.3
},
"position": [500.0, 300.0]
},
{
"id": 4,
"node_type": "Gain",
"parameters": {
"0": 1.0
},
"position": [700.0, 200.0]
},
{
"id": 5,
"node_type": "AudioOutput",
"parameters": {},
"position": [900.0, 200.0]
}
],
"connections": [
{
"from_node": 0,
"from_port": 0,
"to_node": 1,
"to_port": 0
},
{
"from_node": 1,
"from_port": 0,
"to_node": 2,
"to_port": 0
},
{
"from_node": 1,
"from_port": 1,
"to_node": 3,
"to_port": 0
},
{
"from_node": 2,
"from_port": 0,
"to_node": 4,
"to_port": 0
},
{
"from_node": 3,
"from_port": 0,
"to_node": 4,
"to_port": 1
},
{
"from_node": 4,
"from_port": 0,
"to_node": 5,
"to_port": 0
}
],
"midi_targets": [0],
"output_node": 5
}

View File

@ -0,0 +1,137 @@
{
"metadata": {
"name": "Pluck",
"description": "Percussive pluck sound with fast attack and decay. Great for arpeggios, melodies, and rhythmic patterns.",
"author": "Lightningbeam",
"version": 1,
"tags": ["pluck", "lead", "percussive", "arpeggio"]
},
"nodes": [
{
"id": 0,
"node_type": "MidiInput",
"parameters": {},
"position": [100.0, 250.0]
},
{
"id": 1,
"node_type": "MidiToCV",
"parameters": {},
"position": [300.0, 250.0]
},
{
"id": 2,
"node_type": "Oscillator",
"parameters": {
"0": 440.0,
"1": 0.6,
"2": 2.0
},
"position": [500.0, 150.0]
},
{
"id": 3,
"node_type": "Filter",
"parameters": {
"0": 2000.0,
"1": 0.8,
"2": 0.0
},
"position": [700.0, 150.0]
},
{
"id": 4,
"node_type": "ADSR",
"parameters": {
"0": 0.001,
"1": 0.3,
"2": 0.0,
"3": 0.05
},
"position": [500.0, 350.0]
},
{
"id": 5,
"node_type": "ADSR",
"parameters": {
"0": 0.001,
"1": 0.4,
"2": 0.0,
"3": 0.1
},
"position": [700.0, 350.0]
},
{
"id": 6,
"node_type": "Gain",
"parameters": {
"0": 1.0
},
"position": [900.0, 200.0]
},
{
"id": 7,
"node_type": "AudioOutput",
"parameters": {},
"position": [1100.0, 200.0]
}
],
"connections": [
{
"from_node": 0,
"from_port": 0,
"to_node": 1,
"to_port": 0
},
{
"from_node": 1,
"from_port": 0,
"to_node": 2,
"to_port": 0
},
{
"from_node": 1,
"from_port": 1,
"to_node": 4,
"to_port": 0
},
{
"from_node": 1,
"from_port": 1,
"to_node": 5,
"to_port": 0
},
{
"from_node": 2,
"from_port": 0,
"to_node": 3,
"to_port": 0
},
{
"from_node": 4,
"from_port": 0,
"to_node": 3,
"to_port": 1
},
{
"from_node": 3,
"from_port": 0,
"to_node": 6,
"to_port": 0
},
{
"from_node": 5,
"from_port": 0,
"to_node": 6,
"to_port": 1
},
{
"from_node": 6,
"from_port": 0,
"to_node": 7,
"to_port": 0
}
],
"midi_targets": [0],
"output_node": 7
}

View File

@ -0,0 +1,145 @@
{
"metadata": {
"name": "Poly Synth",
"description": "8-voice polyphonic synthesizer with sawtooth oscillator and ADSR envelope. Perfect for chords and complex harmonies.",
"author": "Lightningbeam",
"version": 1,
"tags": ["poly", "polyphonic", "synth", "chords"]
},
"nodes": [
{
"id": 0,
"node_type": "MidiInput",
"parameters": {},
"position": [100.0, 200.0]
},
{
"id": 1,
"node_type": "VoiceAllocator",
"parameters": {
"0": 8.0
},
"position": [400.0, 200.0],
"template_graph": {
"metadata": {
"name": "template",
"description": "",
"author": "",
"version": 1,
"tags": []
},
"nodes": [
{
"id": 0,
"node_type": "TemplateInput",
"parameters": {},
"position": [100.0, 200.0]
},
{
"id": 1,
"node_type": "MidiToCV",
"parameters": {},
"position": [300.0, 200.0]
},
{
"id": 2,
"node_type": "Oscillator",
"parameters": {
"0": 440.0,
"1": 0.7,
"2": 1.0
},
"position": [500.0, 150.0]
},
{
"id": 3,
"node_type": "ADSR",
"parameters": {
"0": 0.01,
"1": 0.2,
"2": 0.6,
"3": 0.3
},
"position": [500.0, 300.0]
},
{
"id": 4,
"node_type": "Gain",
"parameters": {
"0": 1.0
},
"position": [700.0, 200.0]
},
{
"id": 5,
"node_type": "TemplateOutput",
"parameters": {},
"position": [900.0, 200.0]
}
],
"connections": [
{
"from_node": 0,
"from_port": 0,
"to_node": 1,
"to_port": 0
},
{
"from_node": 1,
"from_port": 0,
"to_node": 2,
"to_port": 0
},
{
"from_node": 1,
"from_port": 1,
"to_node": 3,
"to_port": 0
},
{
"from_node": 2,
"from_port": 0,
"to_node": 4,
"to_port": 0
},
{
"from_node": 3,
"from_port": 0,
"to_node": 4,
"to_port": 1
},
{
"from_node": 4,
"from_port": 0,
"to_node": 5,
"to_port": 0
}
],
"midi_targets": [],
"output_node": 5
}
},
{
"id": 2,
"node_type": "AudioOutput",
"parameters": {},
"position": [700.0, 200.0]
}
],
"connections": [
{
"from_node": 0,
"from_port": 0,
"to_node": 1,
"to_port": 0
},
{
"from_node": 1,
"from_port": 0,
"to_node": 2,
"to_port": 0
}
],
"midi_targets": [0],
"output_node": 2
}

View File

@ -0,0 +1,137 @@
{
"metadata": {
"name": "Sawtooth Bass",
"description": "Classic analog-style bass synth with sawtooth oscillator and resonant lowpass filter. Perfect for electronic music basslines.",
"author": "Lightningbeam",
"version": 1,
"tags": ["bass", "analog", "electronic", "mono"]
},
"nodes": [
{
"id": 0,
"node_type": "MidiInput",
"parameters": {},
"position": [100.0, 250.0]
},
{
"id": 1,
"node_type": "MidiToCV",
"parameters": {},
"position": [300.0, 250.0]
},
{
"id": 2,
"node_type": "Oscillator",
"parameters": {
"0": 110.0,
"1": 0.8,
"2": 1.0
},
"position": [500.0, 150.0]
},
{
"id": 3,
"node_type": "Filter",
"parameters": {
"0": 800.0,
"1": 2.5,
"2": 0.0
},
"position": [700.0, 150.0]
},
{
"id": 4,
"node_type": "ADSR",
"parameters": {
"0": 0.005,
"1": 0.2,
"2": 0.3,
"3": 0.1
},
"position": [500.0, 300.0]
},
{
"id": 5,
"node_type": "ADSR",
"parameters": {
"0": 0.005,
"1": 0.15,
"2": 0.6,
"3": 0.2
},
"position": [700.0, 350.0]
},
{
"id": 6,
"node_type": "Gain",
"parameters": {
"0": 1.2
},
"position": [900.0, 200.0]
},
{
"id": 7,
"node_type": "AudioOutput",
"parameters": {},
"position": [1100.0, 200.0]
}
],
"connections": [
{
"from_node": 0,
"from_port": 0,
"to_node": 1,
"to_port": 0
},
{
"from_node": 1,
"from_port": 0,
"to_node": 2,
"to_port": 0
},
{
"from_node": 1,
"from_port": 1,
"to_node": 4,
"to_port": 0
},
{
"from_node": 1,
"from_port": 1,
"to_node": 5,
"to_port": 0
},
{
"from_node": 2,
"from_port": 0,
"to_node": 3,
"to_port": 0
},
{
"from_node": 4,
"from_port": 0,
"to_node": 3,
"to_port": 1
},
{
"from_node": 3,
"from_port": 0,
"to_node": 6,
"to_port": 0
},
{
"from_node": 5,
"from_port": 0,
"to_node": 6,
"to_port": 1
},
{
"from_node": 6,
"from_port": 0,
"to_node": 7,
"to_port": 0
}
],
"midi_targets": [0],
"output_node": 7
}

View File

@ -0,0 +1,176 @@
{
"metadata": {
"name": "Warm Pad",
"description": "Lush pad sound combining sawtooth and triangle waves with slow filter sweep and gentle attack. Ideal for ambient and cinematic music.",
"author": "Lightningbeam",
"version": 1,
"tags": ["pad", "ambient", "warm", "cinematic"]
},
"nodes": [
{
"id": 0,
"node_type": "MidiInput",
"parameters": {},
"position": [100.0, 300.0]
},
{
"id": 1,
"node_type": "MidiToCV",
"parameters": {},
"position": [300.0, 300.0]
},
{
"id": 2,
"node_type": "Oscillator",
"parameters": {
"0": 440.0,
"1": 0.5,
"2": 1.0
},
"position": [500.0, 150.0]
},
{
"id": 3,
"node_type": "Oscillator",
"parameters": {
"0": 440.0,
"1": 0.4,
"2": 3.0
},
"position": [500.0, 250.0]
},
{
"id": 4,
"node_type": "Mixer",
"parameters": {
"0": 0.5,
"1": 0.5,
"2": 0.0,
"3": 0.0
},
"position": [700.0, 200.0]
},
{
"id": 5,
"node_type": "Filter",
"parameters": {
"0": 1200.0,
"1": 1.0,
"2": 0.0
},
"position": [900.0, 200.0]
},
{
"id": 6,
"node_type": "ADSR",
"parameters": {
"0": 0.8,
"1": 1.0,
"2": 0.6,
"3": 1.5
},
"position": [700.0, 400.0]
},
{
"id": 7,
"node_type": "ADSR",
"parameters": {
"0": 0.5,
"1": 0.5,
"2": 0.8,
"3": 1.0
},
"position": [900.0, 400.0]
},
{
"id": 8,
"node_type": "Gain",
"parameters": {
"0": 0.8
},
"position": [1100.0, 250.0]
},
{
"id": 9,
"node_type": "AudioOutput",
"parameters": {},
"position": [1300.0, 250.0]
}
],
"connections": [
{
"from_node": 0,
"from_port": 0,
"to_node": 1,
"to_port": 0
},
{
"from_node": 1,
"from_port": 0,
"to_node": 2,
"to_port": 0
},
{
"from_node": 1,
"from_port": 0,
"to_node": 3,
"to_port": 0
},
{
"from_node": 1,
"from_port": 1,
"to_node": 6,
"to_port": 0
},
{
"from_node": 1,
"from_port": 1,
"to_node": 7,
"to_port": 0
},
{
"from_node": 2,
"from_port": 0,
"to_node": 4,
"to_port": 0
},
{
"from_node": 3,
"from_port": 0,
"to_node": 4,
"to_port": 1
},
{
"from_node": 4,
"from_port": 0,
"to_node": 5,
"to_port": 0
},
{
"from_node": 6,
"from_port": 0,
"to_node": 5,
"to_port": 1
},
{
"from_node": 5,
"from_port": 0,
"to_node": 8,
"to_port": 0
},
{
"from_node": 7,
"from_port": 0,
"to_node": 8,
"to_port": 1
},
{
"from_node": 8,
"from_port": 0,
"to_node": 9,
"to_port": 0
}
],
"midi_targets": [0],
"output_node": 9
}

View File

@ -1,7 +1,7 @@
use daw_backend::{AudioEvent, AudioSystem, EngineController, EventEmitter, WaveformPeak}; use daw_backend::{AudioEvent, AudioSystem, EngineController, EventEmitter, WaveformPeak};
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use std::collections::HashMap; use std::collections::HashMap;
use tauri::{Emitter}; use tauri::{Emitter, Manager};
#[derive(serde::Serialize)] #[derive(serde::Serialize)]
pub struct AudioFileMetadata { pub struct AudioFileMetadata {
@ -693,6 +693,204 @@ pub async fn graph_set_output_node(
} }
} }
// Preset management commands
#[tauri::command]
pub async fn graph_save_preset(
app_handle: tauri::AppHandle,
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
preset_name: String,
description: String,
tags: Vec<String>,
) -> Result<String, String> {
use std::fs;
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
// Get user presets directory
let app_data_dir = app_handle.path().app_data_dir()
.map_err(|e| format!("Failed to get app data directory: {}", e))?;
let presets_dir = app_data_dir.join("presets");
// Create presets directory if it doesn't exist
fs::create_dir_all(&presets_dir)
.map_err(|e| format!("Failed to create presets directory: {}", e))?;
// Create preset path
let filename = format!("{}.json", preset_name.replace(" ", "_"));
let preset_path = presets_dir.join(&filename);
let preset_path_str = preset_path.to_string_lossy().to_string();
// Send command to save preset
controller.graph_save_preset(
track_id,
preset_path_str.clone(),
preset_name,
description,
tags
);
Ok(preset_path_str)
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn graph_load_preset(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
preset_path: String,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
// Send command to load preset
controller.graph_load_preset(track_id, preset_path);
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[derive(serde::Serialize)]
pub struct PresetInfo {
pub name: String,
pub path: String,
pub description: String,
pub author: String,
pub tags: Vec<String>,
pub is_factory: bool,
}
#[tauri::command]
pub async fn graph_list_presets(
app_handle: tauri::AppHandle,
) -> Result<Vec<PresetInfo>, String> {
use daw_backend::GraphPreset;
use std::fs;
let mut presets = Vec::new();
// Load factory presets from bundled assets
let factory_presets = [
"Basic_Sine.json",
"Sawtooth_Bass.json",
"Warm_Pad.json",
"Pluck.json",
"Poly_Synth.json",
];
for preset_file in &factory_presets {
// Try to load from resource directory
if let Ok(resource_dir) = app_handle.path().resource_dir() {
let factory_path = resource_dir.join("assets/factory_presets").join(preset_file);
if let Ok(json) = fs::read_to_string(&factory_path) {
if let Ok(preset) = GraphPreset::from_json(&json) {
presets.push(PresetInfo {
name: preset.metadata.name,
path: factory_path.to_string_lossy().to_string(),
description: preset.metadata.description,
author: preset.metadata.author,
tags: preset.metadata.tags,
is_factory: true,
});
}
}
}
}
// Load user presets
if let Ok(app_data_dir) = app_handle.path().app_data_dir() {
let user_presets_dir = app_data_dir.join("presets");
if user_presets_dir.exists() {
if let Ok(entries) = fs::read_dir(user_presets_dir) {
for entry in entries.flatten() {
if let Ok(path) = entry.path().canonicalize() {
if path.extension().and_then(|s| s.to_str()) == Some("json") {
if let Ok(json) = fs::read_to_string(&path) {
if let Ok(preset) = GraphPreset::from_json(&json) {
presets.push(PresetInfo {
name: preset.metadata.name,
path: path.to_string_lossy().to_string(),
description: preset.metadata.description,
author: preset.metadata.author,
tags: preset.metadata.tags,
is_factory: false,
});
}
}
}
}
}
}
}
}
Ok(presets)
}
#[tauri::command]
pub async fn graph_delete_preset(
preset_path: String,
) -> Result<(), String> {
use std::fs;
// Only allow deleting user presets (not factory presets)
if preset_path.contains("factory") || preset_path.contains("assets") {
return Err("Cannot delete factory presets".to_string());
}
fs::remove_file(&preset_path)
.map_err(|e| format!("Failed to delete preset: {}", e))?;
Ok(())
}
#[tauri::command]
pub async fn graph_get_state(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
) -> Result<String, String> {
use daw_backend::GraphPreset;
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
// Send a command to get the graph state
// For now, we'll use the preset serialization to get the graph
let temp_path = std::env::temp_dir().join(format!("temp_graph_state_{}.json", track_id));
let temp_path_str = temp_path.to_string_lossy().to_string();
controller.graph_save_preset(
track_id,
temp_path_str.clone(),
"temp".to_string(),
"".to_string(),
vec![]
);
// Give the audio thread time to process
std::thread::sleep(std::time::Duration::from_millis(50));
// Read the temp file
let json = match std::fs::read_to_string(&temp_path) {
Ok(json) => json,
Err(_) => {
// If file doesn't exist, graph is likely empty - return empty preset
let empty_preset = GraphPreset::new("empty");
empty_preset.to_json().unwrap_or_else(|_| "{}".to_string())
}
};
// Clean up temp file
let _ = std::fs::remove_file(&temp_path);
Ok(json)
} else {
Err("Audio not initialized".to_string())
}
}
#[derive(serde::Serialize, Clone)] #[derive(serde::Serialize, Clone)]
#[serde(tag = "type")] #[serde(tag = "type")]
pub enum SerializedAudioEvent { pub enum SerializedAudioEvent {

View File

@ -222,6 +222,11 @@ pub fn run() {
audio::graph_disconnect, audio::graph_disconnect,
audio::graph_set_parameter, audio::graph_set_parameter,
audio::graph_set_output_node, audio::graph_set_output_node,
audio::graph_save_preset,
audio::graph_load_preset,
audio::graph_list_presets,
audio::graph_delete_preset,
audio::graph_get_state,
]) ])
// .manage(window_counter) // .manage(window_counter)
.build(tauri::generate_context!()) .build(tauri::generate_context!())

View File

@ -34,6 +34,9 @@
"icons/icon.icns", "icons/icon.icns",
"icons/icon.ico" "icons/icon.ico"
], ],
"resources": [
"assets/factory_presets/*"
],
"linux": { "linux": {
"appimage": { "appimage": {
"bundleMediaFramework": true, "bundleMediaFramework": true,

View File

@ -72,7 +72,7 @@ export const defaultLayouts = {
audioDaw: { audioDaw: {
name: "Audio/DAW", name: "Audio/DAW",
description: "Audio tracks prominent with mixer and node editor", description: "Audio tracks prominent with mixer, node editor, and preset browser",
layout: { layout: {
type: "horizontal-grid", type: "horizontal-grid",
percent: 75, percent: 75,
@ -85,7 +85,7 @@ export const defaultLayouts = {
{ type: "pane", name: "nodeEditor"} { type: "pane", name: "nodeEditor"}
] ]
}, },
{ type: "pane", name: "infopanel" } { type: "pane", name: "presetBrowser" }
] ]
} }
}, },

View File

@ -6042,6 +6042,18 @@ async function renderMenu() {
} }
updateMenu(); updateMenu();
// Helper function to get the current MIDI track
function getCurrentMidiTrack() {
const activeLayer = context.activeObject?.activeLayer;
if (!activeLayer || !(activeLayer instanceof AudioTrack) || activeLayer.type !== 'midi') {
return null;
}
if (activeLayer.audioTrackId === null) {
return null;
}
return activeLayer.audioTrackId;
}
function nodeEditor() { function nodeEditor() {
// Create container for the node editor // Create container for the node editor
const container = document.createElement("div"); const container = document.createElement("div");
@ -6291,17 +6303,25 @@ function nodeEditor() {
// Send command to backend // Send command to backend
// If parent node exists, add to VoiceAllocator template; otherwise add to main graph // If parent node exists, add to VoiceAllocator template; otherwise add to main graph
const trackId = getCurrentMidiTrack();
if (trackId === null) {
console.error('No MIDI track selected');
showNodeEditorError(container, 'Please select a MIDI track first');
editor.removeNodeId(`node-${drawflowNodeId}`);
return;
}
const commandName = parentNodeId ? "graph_add_node_to_template" : "graph_add_node"; const commandName = parentNodeId ? "graph_add_node_to_template" : "graph_add_node";
const commandArgs = parentNodeId const commandArgs = parentNodeId
? { ? {
trackId: 0, trackId: trackId,
voiceAllocatorId: editor.getNodeFromId(parentNodeId).data.backendId, voiceAllocatorId: editor.getNodeFromId(parentNodeId).data.backendId,
nodeType: nodeType, nodeType: nodeType,
x: x, x: x,
y: y y: y
} }
: { : {
trackId: 0, trackId: trackId,
nodeType: nodeType, nodeType: nodeType,
x: x, x: x,
y: y y: y
@ -6318,14 +6338,17 @@ function nodeEditor() {
// If this is an AudioOutput node, automatically set it as the graph output // If this is an AudioOutput node, automatically set it as the graph output
if (nodeType === "AudioOutput") { if (nodeType === "AudioOutput") {
console.log(`Setting node ${backendNodeId} as graph output`); console.log(`Setting node ${backendNodeId} as graph output`);
invoke("graph_set_output_node", { const currentTrackId = getCurrentMidiTrack();
trackId: 0, if (currentTrackId !== null) {
nodeId: backendNodeId invoke("graph_set_output_node", {
}).then(() => { trackId: currentTrackId,
console.log("Output node set successfully"); nodeId: backendNodeId
}).catch(err => { }).then(() => {
console.error("Failed to set output node:", err); console.log("Output node set successfully");
}); }).catch(err => {
console.error("Failed to set output node:", err);
});
}
} }
// If this is a VoiceAllocator, automatically create template I/O nodes inside it // If this is a VoiceAllocator, automatically create template I/O nodes inside it
@ -6477,14 +6500,17 @@ function nodeEditor() {
// Send to backend // Send to backend
if (nodeData.data.backendId !== null) { if (nodeData.data.backendId !== null) {
invoke("graph_set_parameter", { const currentTrackId = getCurrentMidiTrack();
trackId: 0, if (currentTrackId !== null) {
nodeId: nodeData.data.backendId, invoke("graph_set_parameter", {
paramId: paramId, trackId: currentTrackId,
value: value nodeId: nodeData.data.backendId,
}).catch(err => { paramId: paramId,
console.error("Failed to set parameter:", err); value: value
}); }).catch(err => {
console.error("Failed to set parameter:", err);
});
}
} }
} }
}); });
@ -6632,48 +6658,54 @@ function nodeEditor() {
// Both nodes are inside the same VoiceAllocator - connect in template // Both nodes are inside the same VoiceAllocator - connect in template
const parentNode = editor.getNodeFromId(outputParent); const parentNode = editor.getNodeFromId(outputParent);
console.log(`Connecting in VoiceAllocator template ${parentNode.data.backendId}: node ${outputNode.data.backendId} port ${outputPort} -> node ${inputNode.data.backendId} port ${inputPort}`); console.log(`Connecting in VoiceAllocator template ${parentNode.data.backendId}: node ${outputNode.data.backendId} port ${outputPort} -> node ${inputNode.data.backendId} port ${inputPort}`);
invoke("graph_connect_in_template", { const currentTrackId = getCurrentMidiTrack();
trackId: 0, if (currentTrackId !== null) {
voiceAllocatorId: parentNode.data.backendId, invoke("graph_connect_in_template", {
fromNode: outputNode.data.backendId, trackId: currentTrackId,
fromPort: outputPort, voiceAllocatorId: parentNode.data.backendId,
toNode: inputNode.data.backendId, fromNode: outputNode.data.backendId,
toPort: inputPort fromPort: outputPort,
}).then(() => { toNode: inputNode.data.backendId,
console.log("Template connection successful"); toPort: inputPort
}).catch(err => { }).then(() => {
console.error("Failed to connect nodes in template:", err); console.log("Template connection successful");
showError("Template connection failed: " + err); }).catch(err => {
// Remove the connection console.error("Failed to connect nodes in template:", err);
editor.removeSingleConnection( showError("Template connection failed: " + err);
connection.output_id, // Remove the connection
connection.input_id, editor.removeSingleConnection(
connection.output_class, connection.output_id,
connection.input_class connection.input_id,
); connection.output_class,
}); connection.input_class
);
});
}
} else { } else {
// Normal connection in main graph // Normal connection in main graph
console.log(`Connecting: node ${outputNode.data.backendId} port ${outputPort} -> node ${inputNode.data.backendId} port ${inputPort}`); console.log(`Connecting: node ${outputNode.data.backendId} port ${outputPort} -> node ${inputNode.data.backendId} port ${inputPort}`);
invoke("graph_connect", { const currentTrackId = getCurrentMidiTrack();
trackId: 0, if (currentTrackId !== null) {
fromNode: outputNode.data.backendId, invoke("graph_connect", {
fromPort: outputPort, trackId: currentTrackId,
toNode: inputNode.data.backendId, fromNode: outputNode.data.backendId,
toPort: inputPort fromPort: outputPort,
}).then(() => { toNode: inputNode.data.backendId,
console.log("Connection successful"); toPort: inputPort
}).catch(err => { }).then(() => {
console.error("Failed to connect nodes:", err); console.log("Connection successful");
showError("Connection failed: " + err); }).catch(err => {
// Remove the connection console.error("Failed to connect nodes:", err);
editor.removeSingleConnection( showError("Connection failed: " + err);
connection.output_id, // Remove the connection
connection.input_id, editor.removeSingleConnection(
connection.output_class, connection.output_id,
connection.input_class connection.input_id,
); connection.output_class,
}); connection.input_class
);
});
}
} }
} }
@ -6695,15 +6727,18 @@ function nodeEditor() {
// Send to backend // Send to backend
if (outputNode.data.backendId !== null && inputNode.data.backendId !== null) { if (outputNode.data.backendId !== null && inputNode.data.backendId !== null) {
invoke("graph_disconnect", { const currentTrackId = getCurrentMidiTrack();
trackId: 0, if (currentTrackId !== null) {
fromNode: outputNode.data.backendId, invoke("graph_disconnect", {
fromPort: outputPort, trackId: currentTrackId,
toNode: inputNode.data.backendId, fromNode: outputNode.data.backendId,
toPort: inputPort fromPort: outputPort,
}).catch(err => { toNode: inputNode.data.backendId,
console.error("Failed to disconnect nodes:", err); toPort: inputPort
}); }).catch(err => {
console.error("Failed to disconnect nodes:", err);
});
}
} }
} }
@ -6719,6 +6754,132 @@ function nodeEditor() {
}, 3000); }, 3000);
} }
// Function to reload graph from backend
async function reloadGraph() {
if (!editor) return;
const trackId = getCurrentMidiTrack();
// Clear editor first
editor.clearModuleSelected();
editor.clear();
// If no MIDI track selected, just leave it cleared
if (trackId === null) {
console.log('No MIDI track selected, editor cleared');
return;
}
try {
const graphJson = await invoke('graph_get_state', { trackId });
const preset = JSON.parse(graphJson);
// If graph is empty (no nodes), just leave cleared
if (!preset.nodes || preset.nodes.length === 0) {
console.log('Graph is empty, editor cleared');
return;
}
// Rebuild from preset
const nodeMap = new Map(); // Maps backend node ID to Drawflow node ID
// Add all nodes
for (const serializedNode of preset.nodes) {
const nodeType = serializedNode.node_type;
const nodeDef = nodeTypes[nodeType];
if (!nodeDef) continue;
// Create node HTML
let html = `<div class="node-content"><div class="node-title">${nodeDef.name}</div>`;
for (const param of nodeDef.parameters) {
const value = serializedNode.parameters[param.id] || param.default;
html += `<div class="node-parameter">
<label>${param.name}</label>
<input type="range" data-param-id="${param.id}" min="${param.min}" max="${param.max}" step="${param.step || 0.01}" value="${value}" />
<span class="param-value">${value.toFixed(2)}</span>
</div>`;
}
html += `</div>`;
// Add node to Drawflow
const drawflowId = editor.addNode(
nodeType,
nodeDef.inputs.length,
nodeDef.outputs.length,
serializedNode.position[0],
serializedNode.position[1],
nodeType,
{ nodeType, backendId: serializedNode.id, parentNodeId: null },
html,
false
);
nodeMap.set(serializedNode.id, drawflowId);
// Style ports
setTimeout(() => styleNodePorts(drawflowId, nodeDef), 10);
// Wire up parameter controls
setTimeout(() => {
const nodeElement = container.querySelector(`#node-${drawflowId}`);
if (!nodeElement) return;
nodeElement.querySelectorAll('input[type="range"]').forEach(slider => {
const paramId = parseInt(slider.dataset.paramId);
const displaySpan = slider.nextElementSibling;
slider.addEventListener('input', (e) => {
const value = parseFloat(e.target.value);
if (displaySpan) {
const param = nodeDef.parameters.find(p => p.id === paramId);
displaySpan.textContent = value.toFixed(param?.unit === 'Hz' ? 0 : 2);
}
const currentTrackId = getCurrentMidiTrack();
if (currentTrackId !== null) {
invoke("graph_set_parameter", {
trackId: currentTrackId,
nodeId: serializedNode.id,
paramId: paramId,
value: value
}).catch(err => {
console.error("Failed to set parameter:", err);
});
}
});
});
}, 100);
}
// Add all connections
for (const conn of preset.connections) {
const outputDrawflowId = nodeMap.get(conn.from_node);
const inputDrawflowId = nodeMap.get(conn.to_node);
if (outputDrawflowId && inputDrawflowId) {
// Drawflow uses 1-based port indexing
editor.addConnection(
outputDrawflowId,
inputDrawflowId,
`output_${conn.from_port + 1}`,
`input_${conn.to_port + 1}`
);
}
}
console.log('Graph reloaded from backend');
} catch (error) {
console.error('Failed to reload graph:', error);
showError(`Failed to reload graph: ${error}`);
}
}
// Store reload function in context so it can be called from preset browser
context.reloadNodeEditor = reloadGraph;
// Initial load of graph
setTimeout(() => reloadGraph(), 200);
return container; return container;
} }
@ -6882,6 +7043,296 @@ function pianoRoll() {
return canvas; return canvas;
} }
function presetBrowser() {
const container = document.createElement("div");
container.className = "preset-browser-pane";
container.innerHTML = `
<div class="preset-browser-header">
<h3>Instrument Presets</h3>
<button class="preset-btn preset-save-btn" title="Save current graph as preset">
<span>💾</span> Save Preset
</button>
</div>
<div class="preset-filter">
<input type="text" id="preset-search" placeholder="Search presets..." />
<select id="preset-tag-filter">
<option value="">All Tags</option>
</select>
</div>
<div class="preset-categories">
<div class="preset-category">
<h4>Factory Presets</h4>
<div class="preset-list" id="factory-preset-list">
<div class="preset-loading">Loading...</div>
</div>
</div>
<div class="preset-category">
<h4>User Presets</h4>
<div class="preset-list" id="user-preset-list">
<div class="preset-empty">No user presets yet</div>
</div>
</div>
</div>
`;
// Load presets after DOM insertion
setTimeout(async () => {
await loadPresetList(container);
// Set up save button handler
const saveBtn = container.querySelector('.preset-save-btn');
if (saveBtn) {
saveBtn.addEventListener('click', () => showSavePresetDialog(container));
}
// Set up search and filter
const searchInput = container.querySelector('#preset-search');
const tagFilter = container.querySelector('#preset-tag-filter');
if (searchInput) {
searchInput.addEventListener('input', () => filterPresets(container));
}
if (tagFilter) {
tagFilter.addEventListener('change', () => filterPresets(container));
}
}, 0);
return container;
}
async function loadPresetList(container) {
try {
const presets = await invoke('graph_list_presets');
const factoryList = container.querySelector('#factory-preset-list');
const userList = container.querySelector('#user-preset-list');
const tagFilter = container.querySelector('#preset-tag-filter');
// Collect all unique tags
const allTags = new Set();
presets.forEach(preset => {
preset.tags.forEach(tag => allTags.add(tag));
});
// Populate tag filter
if (tagFilter) {
allTags.forEach(tag => {
const option = document.createElement('option');
option.value = tag;
option.textContent = tag.charAt(0).toUpperCase() + tag.slice(1);
tagFilter.appendChild(option);
});
}
// Separate factory and user presets
const factoryPresets = presets.filter(p => p.is_factory);
const userPresets = presets.filter(p => !p.is_factory);
// Render factory presets
if (factoryList) {
if (factoryPresets.length === 0) {
factoryList.innerHTML = '<div class="preset-empty">No factory presets found</div>';
} else {
factoryList.innerHTML = factoryPresets.map(preset => createPresetItem(preset)).join('');
addPresetItemHandlers(factoryList);
}
}
// Render user presets
if (userList) {
if (userPresets.length === 0) {
userList.innerHTML = '<div class="preset-empty">No user presets yet</div>';
} else {
userList.innerHTML = userPresets.map(preset => createPresetItem(preset)).join('');
addPresetItemHandlers(userList);
}
}
} catch (error) {
console.error('Failed to load presets:', error);
const factoryList = container.querySelector('#factory-preset-list');
const userList = container.querySelector('#user-preset-list');
if (factoryList) factoryList.innerHTML = '<div class="preset-error">Failed to load presets</div>';
if (userList) userList.innerHTML = '';
}
}
function createPresetItem(preset) {
const tags = preset.tags.map(tag => `<span class="preset-tag">${tag}</span>`).join('');
const deleteBtn = preset.is_factory ? '' : '<button class="preset-delete-btn" title="Delete preset">🗑️</button>';
return `
<div class="preset-item" data-preset-path="${preset.path}" data-preset-tags="${preset.tags.join(',')}">
<div class="preset-item-header">
<span class="preset-name">${preset.name}</span>
${deleteBtn}
</div>
<div class="preset-description">${preset.description || 'No description'}</div>
<div class="preset-tags">${tags}</div>
<div class="preset-author">by ${preset.author || 'Unknown'}</div>
</div>
`;
}
function addPresetItemHandlers(listElement) {
// Load preset on click
listElement.querySelectorAll('.preset-item').forEach(item => {
item.addEventListener('click', async (e) => {
// Don't trigger if clicking delete button
if (e.target.classList.contains('preset-delete-btn')) return;
const presetPath = item.dataset.presetPath;
await loadPreset(presetPath);
});
});
// Delete preset on delete button click
listElement.querySelectorAll('.preset-delete-btn').forEach(btn => {
btn.addEventListener('click', async (e) => {
e.stopPropagation();
const item = btn.closest('.preset-item');
const presetPath = item.dataset.presetPath;
const presetName = item.querySelector('.preset-name').textContent;
if (confirm(`Delete preset "${presetName}"?`)) {
try {
await invoke('graph_delete_preset', { presetPath });
// Reload preset list
const container = btn.closest('.preset-browser-pane');
await loadPresetList(container);
} catch (error) {
alert(`Failed to delete preset: ${error}`);
}
}
});
});
}
async function loadPreset(presetPath) {
const trackId = getCurrentMidiTrack();
if (trackId === null) {
alert('Please select a MIDI track first');
return;
}
try {
await invoke('graph_load_preset', {
trackId: trackId,
presetPath
});
// Refresh the node editor to show the loaded preset
await context.reloadNodeEditor?.();
console.log('Preset loaded successfully');
} catch (error) {
alert(`Failed to load preset: ${error}`);
}
}
function showSavePresetDialog(container) {
const currentTrackId = getCurrentMidiTrack();
if (currentTrackId === null) {
alert('Please select a MIDI track first');
return;
}
// Create modal dialog
const dialog = document.createElement('div');
dialog.className = 'modal-overlay';
dialog.innerHTML = `
<div class="modal-dialog">
<h3>Save Preset</h3>
<form id="save-preset-form">
<div class="form-group">
<label>Preset Name</label>
<input type="text" id="preset-name" required placeholder="My Awesome Synth" />
</div>
<div class="form-group">
<label>Description</label>
<textarea id="preset-description" placeholder="Describe the sound..." rows="3"></textarea>
</div>
<div class="form-group">
<label>Tags (comma-separated)</label>
<input type="text" id="preset-tags" placeholder="bass, lead, pad" />
</div>
<div class="form-actions">
<button type="button" class="btn-cancel">Cancel</button>
<button type="submit" class="btn-primary">Save</button>
</div>
</form>
</div>
`;
document.body.appendChild(dialog);
// Focus name input
setTimeout(() => dialog.querySelector('#preset-name')?.focus(), 100);
// Handle cancel
dialog.querySelector('.btn-cancel').addEventListener('click', () => {
dialog.remove();
});
// Handle save
dialog.querySelector('#save-preset-form').addEventListener('submit', async (e) => {
e.preventDefault();
const name = dialog.querySelector('#preset-name').value.trim();
const description = dialog.querySelector('#preset-description').value.trim();
const tagsInput = dialog.querySelector('#preset-tags').value.trim();
const tags = tagsInput ? tagsInput.split(',').map(t => t.trim()).filter(t => t) : [];
if (!name) {
alert('Please enter a preset name');
return;
}
try {
await invoke('graph_save_preset', {
trackId: currentTrackId,
presetName: name,
description,
tags
});
dialog.remove();
// Reload preset list
await loadPresetList(container);
alert(`Preset "${name}" saved successfully!`);
} catch (error) {
alert(`Failed to save preset: ${error}`);
}
});
// Close on background click
dialog.addEventListener('click', (e) => {
if (e.target === dialog) {
dialog.remove();
}
});
}
function filterPresets(container) {
const searchTerm = container.querySelector('#preset-search')?.value.toLowerCase() || '';
const selectedTag = container.querySelector('#preset-tag-filter')?.value || '';
const allItems = container.querySelectorAll('.preset-item');
allItems.forEach(item => {
const name = item.querySelector('.preset-name').textContent.toLowerCase();
const description = item.querySelector('.preset-description').textContent.toLowerCase();
const tags = item.dataset.presetTags.split(',');
const matchesSearch = !searchTerm || name.includes(searchTerm) || description.includes(searchTerm);
const matchesTag = !selectedTag || tags.includes(selectedTag);
item.style.display = (matchesSearch && matchesTag) ? 'block' : 'none';
});
}
const panes = { const panes = {
stage: { stage: {
name: "stage", name: "stage",
@ -6919,6 +7370,10 @@ const panes = {
name: "node-editor", name: "node-editor",
func: nodeEditor, func: nodeEditor,
}, },
presetBrowser: {
name: "preset-browser",
func: presetBrowser,
},
}; };
/** /**
@ -7179,6 +7634,9 @@ async function addEmptyMIDITrack() {
context.timelineWidget.requestRedraw(); context.timelineWidget.requestRedraw();
} }
// Refresh node editor to show empty graph
setTimeout(() => context.reloadNodeEditor?.(), 100);
console.log('Empty MIDI track created:', trackName, 'with ID:', newMIDITrack.audioTrackId); console.log('Empty MIDI track created:', trackName, 'with ID:', newMIDITrack.audioTrackId);
} catch (error) { } catch (error) {
console.error('Failed to create empty MIDI track:', error); console.error('Failed to create empty MIDI track:', error);

View File

@ -1307,3 +1307,287 @@ button {
0%, 70% { opacity: 1; } 0%, 70% { opacity: 1; }
100% { opacity: 0; } 100% { opacity: 0; }
} }
/* Preset Browser Pane Styling */
.preset-browser-pane {
display: flex;
flex-direction: column;
height: 100%;
background: #1e1e1e;
color: #ddd;
overflow: hidden;
}
.preset-browser-header {
display: flex;
justify-content: space-between;
align-items: center;
padding: 12px 16px;
background: #252525;
border-bottom: 1px solid #3d3d3d;
}
.preset-browser-header h3 {
margin: 0;
font-size: 16px;
font-weight: 500;
color: #fff;
}
.preset-btn {
background: #4CAF50;
color: white;
border: none;
padding: 6px 12px;
border-radius: 4px;
cursor: pointer;
font-size: 13px;
display: flex;
align-items: center;
gap: 6px;
transition: background 0.2s;
}
.preset-btn:hover {
background: #45a049;
}
.preset-btn span {
font-size: 16px;
}
.preset-filter {
padding: 12px 16px;
background: #252525;
border-bottom: 1px solid #3d3d3d;
display: flex;
gap: 8px;
}
.preset-filter input,
.preset-filter select {
flex: 1;
background: #1e1e1e;
color: #ddd;
border: 1px solid #3d3d3d;
padding: 6px 10px;
border-radius: 4px;
font-size: 13px;
}
.preset-filter input:focus,
.preset-filter select:focus {
outline: none;
border-color: #4CAF50;
}
.preset-categories {
flex: 1;
overflow-y: auto;
padding: 12px;
}
.preset-category {
margin-bottom: 24px;
}
.preset-category h4 {
margin: 0 0 12px 0;
font-size: 13px;
font-weight: 600;
color: #999;
text-transform: uppercase;
letter-spacing: 0.5px;
}
.preset-list {
display: flex;
flex-direction: column;
gap: 8px;
}
.preset-item {
background: #252525;
border: 1px solid #3d3d3d;
border-radius: 4px;
padding: 10px 12px;
cursor: pointer;
transition: all 0.2s;
}
.preset-item:hover {
background: #2d2d2d;
border-color: #4CAF50;
}
.preset-item-header {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 6px;
}
.preset-name {
font-size: 14px;
font-weight: 500;
color: #fff;
}
.preset-delete-btn {
background: transparent;
border: none;
color: #f44336;
cursor: pointer;
font-size: 16px;
padding: 2px 6px;
border-radius: 3px;
transition: background 0.2s;
}
.preset-delete-btn:hover {
background: rgba(244, 67, 54, 0.2);
}
.preset-description {
font-size: 12px;
color: #999;
margin-bottom: 6px;
line-height: 1.4;
}
.preset-tags {
display: flex;
flex-wrap: wrap;
gap: 4px;
margin-bottom: 4px;
}
.preset-tag {
background: #3d3d3d;
color: #aaa;
font-size: 10px;
padding: 2px 6px;
border-radius: 3px;
text-transform: lowercase;
}
.preset-author {
font-size: 11px;
color: #777;
font-style: italic;
}
.preset-loading,
.preset-empty,
.preset-error {
padding: 20px;
text-align: center;
color: #777;
font-size: 13px;
}
.preset-error {
color: #f44336;
}
/* Modal Dialog for Save Preset */
.modal-overlay {
position: fixed;
top: 0;
left: 0;
width: 100%;
height: 100%;
background: rgba(0, 0, 0, 0.7);
display: flex;
align-items: center;
justify-content: center;
z-index: 10000;
}
.modal-dialog {
background: #252525;
border: 1px solid #3d3d3d;
border-radius: 6px;
padding: 24px;
min-width: 400px;
max-width: 500px;
box-shadow: 0 4px 20px rgba(0, 0, 0, 0.5);
}
.modal-dialog h3 {
margin: 0 0 20px 0;
font-size: 18px;
color: #fff;
}
.form-group {
margin-bottom: 16px;
}
.form-group label {
display: block;
margin-bottom: 6px;
font-size: 13px;
color: #aaa;
font-weight: 500;
}
.form-group input,
.form-group textarea {
width: 100%;
background: #1e1e1e;
color: #ddd;
border: 1px solid #3d3d3d;
padding: 8px 10px;
border-radius: 4px;
font-size: 13px;
font-family: inherit;
box-sizing: border-box;
}
.form-group input:focus,
.form-group textarea:focus {
outline: none;
border-color: #4CAF50;
}
.form-group textarea {
resize: vertical;
min-height: 60px;
}
.form-actions {
display: flex;
justify-content: flex-end;
gap: 10px;
margin-top: 20px;
}
.btn-cancel,
.btn-primary {
padding: 8px 16px;
border-radius: 4px;
font-size: 13px;
cursor: pointer;
border: none;
font-weight: 500;
transition: background 0.2s;
}
.btn-cancel {
background: #3d3d3d;
color: #ddd;
}
.btn-cancel:hover {
background: #4d4d4d;
}
.btn-primary {
background: #4CAF50;
color: white;
}
.btn-primary:hover {
background: #45a049;
}

View File

@ -2839,6 +2839,9 @@ class TimelineWindowV2 extends Widget {
// Clear selections when selecting layer // Clear selections when selecting layer
this.context.selection = [] this.context.selection = []
this.context.shapeselection = [] this.context.shapeselection = []
// Clear node editor when selecting a non-audio layer
setTimeout(() => this.context.reloadNodeEditor?.(), 50);
} else if (track.type === 'shape') { } else if (track.type === 'shape') {
// Find the layer this shape belongs to and select it // Find the layer this shape belongs to and select it
for (let i = 0; i < this.context.activeObject.allLayers.length; i++) { for (let i = 0; i < this.context.activeObject.allLayers.length; i++) {
@ -2862,6 +2865,11 @@ class TimelineWindowV2 extends Widget {
this.context.activeObject.activeLayer = track.object this.context.activeObject.activeLayer = track.object
this.context.selection = [] this.context.selection = []
this.context.shapeselection = [] this.context.shapeselection = []
// If this is a MIDI track, reload the node editor
if (track.object.type === 'midi') {
setTimeout(() => this.context.reloadNodeEditor?.(), 50);
}
} }
// Update the stage UI to reflect selection changes // Update the stage UI to reflect selection changes