diff --git a/daw-backend/src/audio/engine.rs b/daw-backend/src/audio/engine.rs index e36f0ff..77344f4 100644 --- a/daw-backend/src/audio/engine.rs +++ b/daw-backend/src/audio/engine.rs @@ -1003,6 +1003,8 @@ impl Engine { if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) { track.instrument_graph = Some(graph); let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id)); + // Emit preset loaded event after everything is loaded + let _ = self.event_tx.push(AudioEvent::GraphPresetLoaded(track_id)); } } Err(e) => { diff --git a/daw-backend/src/audio/node_graph/graph.rs b/daw-backend/src/audio/node_graph/graph.rs index a29a4b7..43a07da 100644 --- a/daw-backend/src/audio/node_graph/graph.rs +++ b/daw-backend/src/audio/node_graph/graph.rs @@ -861,7 +861,9 @@ impl InstrumentGraph { } else if let Some(ref path) = file_path { // Fall back to loading from file (resolve path relative to preset) let resolved_path = resolve_sample_path(path); - let _ = sampler_node.load_sample_from_file(&resolved_path); + if let Err(e) = sampler_node.load_sample_from_file(&resolved_path) { + eprintln!("Failed to load sample from {}: {}", resolved_path, e); + } } } } @@ -901,14 +903,16 @@ impl InstrumentGraph { } else if let Some(ref path) = layer.file_path { // Fall back to loading from file (resolve path relative to preset) let resolved_path = resolve_sample_path(path); - let _ = multi_sampler_node.load_layer_from_file( + if let Err(e) = multi_sampler_node.load_layer_from_file( &resolved_path, layer.key_min, layer.key_max, layer.root_key, layer.velocity_min, layer.velocity_max, - ); + ) { + eprintln!("Failed to load sample layer from {}: {}", resolved_path, e); + } } } } diff --git a/daw-backend/src/command/types.rs b/daw-backend/src/command/types.rs index a7cdb28..302da08 100644 --- a/daw-backend/src/command/types.rs +++ b/daw-backend/src/command/types.rs @@ -196,6 +196,8 @@ pub enum AudioEvent { GraphConnectionError(TrackId, String), /// Graph state changed (for full UI sync) GraphStateChanged(TrackId), + /// Preset fully loaded (track_id) - emitted after all nodes and samples are loaded + GraphPresetLoaded(TrackId), } /// Synchronous queries sent from UI thread to audio thread diff --git a/src-tauri/src/audio.rs b/src-tauri/src/audio.rs index a24902e..b44d6c0 100644 --- a/src-tauri/src/audio.rs +++ b/src-tauri/src/audio.rs @@ -90,6 +90,9 @@ impl EventEmitter for TauriEventEmitter { AudioEvent::GraphStateChanged(track_id) => { SerializedAudioEvent::GraphStateChanged { track_id } } + AudioEvent::GraphPresetLoaded(track_id) => { + SerializedAudioEvent::GraphPresetLoaded { track_id } + } _ => return, // Ignore other event types for now }; @@ -999,6 +1002,7 @@ pub async fn multi_sampler_get_layers( track_id: u32, node_id: u32, ) -> Result, String> { + eprintln!("[multi_sampler_get_layers] FUNCTION CALLED with track_id: {}, node_id: {}", track_id, node_id); use daw_backend::GraphPreset; let mut audio_state = state.lock().unwrap(); @@ -1011,6 +1015,7 @@ pub async fn multi_sampler_get_layers( .as_nanos(); let temp_path = std::env::temp_dir().join(format!("temp_layers_query_{}_{}_{}.json", track_id, node_id, timestamp)); let temp_path_str = temp_path.to_string_lossy().to_string(); + eprintln!("[multi_sampler_get_layers] Temp path: {}", temp_path_str); controller.graph_save_preset( track_id, @@ -1024,6 +1029,7 @@ pub async fn multi_sampler_get_layers( std::thread::sleep(std::time::Duration::from_millis(50)); // Read the temp file and parse it + eprintln!("[multi_sampler_get_layers] Reading temp file..."); match std::fs::read_to_string(&temp_path) { Ok(json) => { // Clean up temp file @@ -1036,10 +1042,16 @@ pub async fn multi_sampler_get_layers( }; // Find the node with the matching ID + eprintln!("[multi_sampler_get_layers] Looking for node_id: {}", node_id); + eprintln!("[multi_sampler_get_layers] Available nodes: {:?}", preset.nodes.iter().map(|n| (n.id, &n.node_type)).collect::>()); + if let Some(node) = preset.nodes.iter().find(|n| n.id == node_id) { + eprintln!("[multi_sampler_get_layers] Found node: {} type: {}", node.id, node.node_type); if let Some(ref sample_data) = node.sample_data { + eprintln!("[multi_sampler_get_layers] Node has sample_data"); // Check if it's a MultiSampler if let daw_backend::audio::node_graph::preset::SampleData::MultiSampler { layers } = sample_data { + eprintln!("[multi_sampler_get_layers] Returning {} layers", layers.len()); return Ok(layers.iter().map(|layer| LayerInfo { file_path: layer.file_path.clone().unwrap_or_default(), key_min: layer.key_min, @@ -1048,15 +1060,25 @@ pub async fn multi_sampler_get_layers( velocity_min: layer.velocity_min, velocity_max: layer.velocity_max, }).collect()); + } else { + eprintln!("[multi_sampler_get_layers] sample_data is not MultiSampler type"); } + } else { + eprintln!("[multi_sampler_get_layers] Node has no sample_data"); } + } else { + eprintln!("[multi_sampler_get_layers] Node not found"); } Ok(Vec::new()) } - Err(_) => Ok(Vec::new()), // Return empty list if file doesn't exist + Err(e) => { + eprintln!("[multi_sampler_get_layers] Failed to read temp file: {}", e); + Ok(Vec::new()) // Return empty list if file doesn't exist + } } } else { + eprintln!("[multi_sampler_get_layers] Audio not initialized"); Err("Audio not initialized".to_string()) } } @@ -1122,6 +1144,7 @@ pub enum SerializedAudioEvent { GraphNodeAdded { track_id: u32, node_id: u32, node_type: String }, GraphConnectionError { track_id: u32, message: String }, GraphStateChanged { track_id: u32 }, + GraphPresetLoaded { track_id: u32 }, } // audio_get_events command removed - events are now pushed via Tauri event system diff --git a/src/assets/instruments/keyboards/piano/piano.json b/src/assets/instruments/keyboards/piano/piano.json index e99fef1..ff2d1ca 100644 --- a/src/assets/instruments/keyboards/piano/piano.json +++ b/src/assets/instruments/keyboards/piano/piano.json @@ -23,7 +23,7 @@ "parameters": { "0": 1.0, "1": 0.001, - "2": 0.5, + "2": 0.25, "3": 0.0 }, "sample_data": { diff --git a/src/main.js b/src/main.js index fea0971..572f59d 100644 --- a/src/main.js +++ b/src/main.js @@ -1202,6 +1202,11 @@ async function handleAudioEvent(event) { context.recordingTrackId = null; context.recordingClipId = null; break; + + case 'GraphPresetLoaded': + // Preset loaded - layers are already populated during graph reload + console.log('GraphPresetLoaded event received for track:', event.track_id); + break; } } @@ -7219,6 +7224,7 @@ function nodeEditor() { // Rebuild from preset const nodeMap = new Map(); // Maps backend node ID to Drawflow node ID + const setupPromises = []; // Track async setup operations // Add all nodes for (const serializedNode of preset.nodes) { @@ -7226,17 +7232,9 @@ function nodeEditor() { const nodeDef = nodeTypes[nodeType]; if (!nodeDef) continue; - // Create node HTML - let html = `
${nodeDef.name}
`; - for (const param of nodeDef.parameters) { - const value = serializedNode.parameters[param.id] || param.default; - html += `
- - - ${value.toFixed(2)} -
`; - } - html += `
`; + // Create node HTML using the node definition's getHTML function + // Use backend node ID as the nodeId for unique element IDs + const html = nodeDef.getHTML(serializedNode.id); // Add node to Drawflow const drawflowId = editor.addNode( @@ -7253,39 +7251,241 @@ function nodeEditor() { nodeMap.set(serializedNode.id, drawflowId); - // Style ports - setTimeout(() => styleNodePorts(drawflowId, nodeDef), 10); + // Style ports (as Promise) + setupPromises.push(new Promise(resolve => { + setTimeout(() => { + styleNodePorts(drawflowId, nodeDef); + resolve(); + }, 10); + })); - // Wire up parameter controls - setTimeout(() => { + // Wire up parameter controls and set values from preset (as Promise) + setupPromises.push(new Promise(resolve => { + setTimeout(() => { const nodeElement = container.querySelector(`#node-${drawflowId}`); if (!nodeElement) return; + // Set parameter values from preset nodeElement.querySelectorAll('input[type="range"]').forEach(slider => { - const paramId = parseInt(slider.dataset.paramId); - const displaySpan = slider.nextElementSibling; + const paramId = parseInt(slider.dataset.param); + const value = serializedNode.parameters[paramId]; + if (value !== undefined) { + slider.value = value; + // Update display span + const param = nodeDef.parameters.find(p => p.id === paramId); + const displaySpan = slider.previousElementSibling?.querySelector('span'); + if (displaySpan && param) { + displaySpan.textContent = value.toFixed(param.unit === 'Hz' ? 0 : 2) + (param.unit ? ` ${param.unit}` : ''); + } + } + }); - slider.addEventListener('input', (e) => { - const value = parseFloat(e.target.value); - if (displaySpan) { - const param = nodeDef.parameters.find(p => p.id === paramId); - displaySpan.textContent = value.toFixed(param?.unit === 'Hz' ? 0 : 2); + // Set up event handlers for buttons + + // Handle Load Sample button for SimpleSampler + const loadSampleBtn = nodeElement.querySelector(".load-sample-btn"); + if (loadSampleBtn) { + loadSampleBtn.addEventListener("mousedown", (e) => e.stopPropagation()); + loadSampleBtn.addEventListener("pointerdown", (e) => e.stopPropagation()); + loadSampleBtn.addEventListener("click", async (e) => { + e.stopPropagation(); + + const nodeData = editor.getNodeFromId(drawflowId); + if (!nodeData || nodeData.data.backendId === null) { + showError("Node not yet created on backend"); + return; } const currentTrackId = getCurrentMidiTrack(); - if (currentTrackId !== null) { - invoke("graph_set_parameter", { - trackId: currentTrackId, - nodeId: serializedNode.id, - paramId: paramId, - value: value - }).catch(err => { - console.error("Failed to set parameter:", err); + if (currentTrackId === null) { + showError("No MIDI track selected"); + return; + } + + try { + const filePath = await openFileDialog({ + title: "Load Audio Sample", + filters: [{ + name: "Audio Files", + extensions: audioExtensions + }] }); + + if (filePath) { + await invoke("sampler_load_sample", { + trackId: currentTrackId, + nodeId: nodeData.data.backendId, + filePath: filePath + }); + + // Update UI to show filename + const sampleInfo = nodeElement.querySelector(`#sample-info-${drawflowId}`); + if (sampleInfo) { + const filename = filePath.split('/').pop().split('\\').pop(); + sampleInfo.textContent = filename; + } + } + } catch (err) { + console.error("Failed to load sample:", err); + showError(`Failed to load sample: ${err}`); } }); - }); + } + + // Handle Add Layer button for MultiSampler + const addLayerBtn = nodeElement.querySelector(".add-layer-btn"); + if (addLayerBtn) { + addLayerBtn.addEventListener("mousedown", (e) => e.stopPropagation()); + addLayerBtn.addEventListener("pointerdown", (e) => e.stopPropagation()); + addLayerBtn.addEventListener("click", async (e) => { + e.stopPropagation(); + + const nodeData = editor.getNodeFromId(drawflowId); + if (!nodeData || nodeData.data.backendId === null) { + showError("Node not yet created on backend"); + return; + } + + const currentTrackId = getCurrentMidiTrack(); + if (currentTrackId === null) { + showError("No MIDI track selected"); + return; + } + + try { + const filePath = await openFileDialog({ + title: "Add Sample Layer", + filters: [{ + name: "Audio Files", + extensions: audioExtensions + }] + }); + + if (filePath) { + // Show dialog to configure layer mapping + const layerConfig = await showLayerConfigDialog(filePath); + + if (layerConfig) { + await invoke("multi_sampler_add_layer", { + trackId: currentTrackId, + nodeId: nodeData.data.backendId, + filePath: filePath, + keyMin: layerConfig.keyMin, + keyMax: layerConfig.keyMax, + rootKey: layerConfig.rootKey, + velocityMin: layerConfig.velocityMin, + velocityMax: layerConfig.velocityMax + }); + + // Wait a bit for the audio thread to process the add command + await new Promise(resolve => setTimeout(resolve, 100)); + + // Refresh the layers list + await refreshSampleLayersList(drawflowId); + } + } + } catch (err) { + console.error("Failed to add layer:", err); + showError(`Failed to add layer: ${err}`); + } + }); + } + + // For MultiSampler nodes, populate the layers table from preset data + if (nodeType === 'MultiSampler') { + console.log(`[reloadGraph] Found MultiSampler node ${drawflowId}, sample_data:`, serializedNode.sample_data); + if (serializedNode.sample_data) { + console.log(`[reloadGraph] sample_data.type:`, serializedNode.sample_data.type); + console.log(`[reloadGraph] sample_data keys:`, Object.keys(serializedNode.sample_data)); + } + } + + if (nodeType === 'MultiSampler' && serializedNode.sample_data && serializedNode.sample_data.type === 'multi_sampler') { + console.log(`[reloadGraph] Condition met for node ${drawflowId}, looking for layers list element with backend ID ${serializedNode.id}`); + // Use backend ID (serializedNode.id) since that's what was used in getHTML + const layersList = nodeElement.querySelector(`#sample-layers-list-${serializedNode.id}`); + const layersContainer = nodeElement.querySelector(`#sample-layers-container-${serializedNode.id}`); + console.log(`[reloadGraph] layersList:`, layersList); + console.log(`[reloadGraph] layersContainer:`, layersContainer); + + if (layersList) { + const layers = serializedNode.sample_data.layers || []; + console.log(`[reloadGraph] Populating ${layers.length} layers for node ${drawflowId}`); + + // Prevent scroll events from bubbling to canvas + if (layersContainer && !layersContainer.dataset.scrollListenerAdded) { + layersContainer.addEventListener('wheel', (e) => { + e.stopPropagation(); + }, { passive: false }); + layersContainer.dataset.scrollListenerAdded = 'true'; + } + + if (layers.length === 0) { + layersList.innerHTML = 'No layers loaded'; + } else { + layersList.innerHTML = layers.map((layer, index) => { + const filename = layer.file_path.split('/').pop().split('\\').pop(); + const keyRange = `${midiToNoteName(layer.key_min)}-${midiToNoteName(layer.key_max)}`; + const rootNote = midiToNoteName(layer.root_key); + const velRange = `${layer.velocity_min}-${layer.velocity_max}`; + + return ` + + ${filename} + ${keyRange} + ${rootNote} + ${velRange} + +
+ + +
+ + + `; + }).join(''); + + // Set up event handlers for edit/delete buttons + layersList.querySelectorAll('.btn-edit-layer').forEach(btn => { + btn.addEventListener('click', async (e) => { + e.stopPropagation(); + const drawflowNodeId = parseInt(btn.dataset.drawflowNode); + const layerIndex = parseInt(btn.dataset.index); + const layer = layers[layerIndex]; + await showLayerEditDialog(drawflowNodeId, layerIndex, layer); + }); + }); + + layersList.querySelectorAll('.btn-delete-layer').forEach(btn => { + btn.addEventListener('click', async (e) => { + e.stopPropagation(); + const drawflowNodeId = parseInt(btn.dataset.drawflowNode); + const layerIndex = parseInt(btn.dataset.index); + if (confirm('Delete this sample layer?')) { + const nodeData = editor.getNodeFromId(drawflowNodeId); + const currentTrackId = getCurrentMidiTrack(); + if (nodeData && currentTrackId !== null) { + try { + await invoke("multi_sampler_remove_layer", { + trackId: currentTrackId, + nodeId: nodeData.data.backendId, + layerIndex: layerIndex + }); + await refreshSampleLayersList(drawflowNodeId); + } catch (err) { + showError(`Failed to remove layer: ${err}`); + } + } + } + }); + }); + } + } + } + + resolve(); }, 100); + })); } // Add all connections @@ -7304,6 +7504,9 @@ function nodeEditor() { } } + // Wait for all node setup operations to complete + await Promise.all(setupPromises); + console.log('Graph reloaded from backend'); } catch (error) { console.error('Failed to reload graph:', error); @@ -7312,7 +7515,15 @@ function nodeEditor() { } // Store reload function in context so it can be called from preset browser - context.reloadNodeEditor = reloadGraph; + // Wrap it to track the promise + context.reloadNodeEditor = async () => { + context.reloadGraphPromise = reloadGraph(); + await context.reloadGraphPromise; + context.reloadGraphPromise = null; + }; + + // Store refreshSampleLayersList in context so it can be called from event handlers + context.refreshSampleLayersList = refreshSampleLayersList; // Initial load of graph setTimeout(() => reloadGraph(), 200); diff --git a/src/styles.css b/src/styles.css index c09c4a1..1746182 100644 --- a/src/styles.css +++ b/src/styles.css @@ -1337,6 +1337,7 @@ button { /* Wider nodes for nodes with sample layers */ .drawflow .drawflow-node:has(.sample-layers-container) { + width: 296px !important; /* Fixed width to prevent dragging issues with table layout */ min-width: 296px !important; /* 280px content + 8px padding on each side */ } @@ -1903,6 +1904,7 @@ button { border: 1px solid #444; border-radius: 3px; background: #2a2a2a; + padding-right: 4px; /* Space to prevent scrollbar overlap */ } .sample-layers-table { @@ -1928,11 +1930,11 @@ button { font-size: 9px; } -.sample-layers-table th:nth-child(1) { width: 28%; } /* File */ -.sample-layers-table th:nth-child(2) { width: 24%; } /* Range */ -.sample-layers-table th:nth-child(3) { width: 12%; } /* Root */ +.sample-layers-table th:nth-child(1) { width: 24%; } /* File */ +.sample-layers-table th:nth-child(2) { width: 20%; } /* Range */ +.sample-layers-table th:nth-child(3) { width: 10%; } /* Root */ .sample-layers-table th:nth-child(4) { width: 14%; } /* Vel */ -.sample-layers-table th:nth-child(5) { width: 22%; } /* Actions */ +.sample-layers-table th:nth-child(5) { width: 32%; } /* Actions - wider to avoid scrollbar */ .sample-layers-table td { padding: 3px;