Give metatracks explicit node graphs

This commit is contained in:
Skyler Lehmkuhl 2026-03-10 20:20:46 -04:00
parent f9b62bb090
commit 7a3f522735
16 changed files with 1381 additions and 385 deletions

446
daw-backend/Cargo.lock generated
View File

@ -31,9 +31,9 @@ dependencies = [
[[package]]
name = "alsa"
version = "0.9.1"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed7572b7ba83a31e20d1b48970ee402d2e3e0537dcfe0a3ff4d6eb7508617d43"
checksum = "7c88dbbce13b232b26250e1e2e6ac18b6a891a646b8148285036ebce260ac5c3"
dependencies = [
"alsa-sys",
"bitflags 2.9.4",
@ -69,6 +69,13 @@ version = "0.22.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
[[package]]
name = "beamdsp"
version = "0.1.0"
dependencies = [
"serde",
]
[[package]]
name = "bindgen"
version = "0.72.1"
@ -99,6 +106,15 @@ version = "2.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394"
[[package]]
name = "block2"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdeb9d870516001442e364c5220d3574d2da8dc765554b4a617230d33fa58ef5"
dependencies = [
"objc2",
]
[[package]]
name = "bumpalo"
version = "3.19.0"
@ -139,8 +155,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac9fe6cdbb24b6ade63616c0a0688e45bb56732262c158df3c0c4bea4ca47cb7"
dependencies = [
"find-msvc-tools",
"jobserver",
"libc",
"shlex",
]
@ -176,6 +190,15 @@ dependencies = [
"libloading",
]
[[package]]
name = "cmake"
version = "0.1.54"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7caa3f9de89ddbe2c607f4101924c5abec803763ae9534e4f4d7d8f84aa81f0"
dependencies = [
"cc",
]
[[package]]
name = "combine"
version = "4.6.7"
@ -217,22 +240,16 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
[[package]]
name = "coreaudio-rs"
version = "0.11.3"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "321077172d79c662f64f5071a03120748d5bb652f5231570141be24cfcd2bace"
checksum = "1aae284fbaf7d27aa0e292f7677dfbe26503b0d555026f702940805a630eac17"
dependencies = [
"bitflags 1.3.2",
"core-foundation-sys",
"coreaudio-sys",
]
[[package]]
name = "coreaudio-sys"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ceec7a6067e62d6f931a2baf6f3a751f4a892595bcec1461a3c94ef9949864b6"
dependencies = [
"bindgen",
"libc",
"objc2-audio-toolbox",
"objc2-core-audio",
"objc2-core-audio-types",
"objc2-core-foundation",
]
[[package]]
@ -257,12 +274,11 @@ dependencies = [
[[package]]
name = "cpal"
version = "0.15.3"
version = "0.17.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "873dab07c8f743075e57f524c583985fbaf745602acbe916a01539364369a779"
checksum = "5b1f9c7312f19fc2fa12fd7acaf38de54e8320ba10d1a02dcbe21038def51ccb"
dependencies = [
"alsa 0.9.1",
"core-foundation-sys",
"alsa 0.10.0",
"coreaudio-rs",
"dasp_sample",
"jni",
@ -271,11 +287,19 @@ dependencies = [
"mach2",
"ndk",
"ndk-context",
"oboe",
"num-derive",
"num-traits",
"objc2",
"objc2-audio-toolbox",
"objc2-avf-audio",
"objc2-core-audio",
"objc2-core-audio-types",
"objc2-core-foundation",
"objc2-foundation",
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
"windows 0.54.0",
"windows 0.62.2",
]
[[package]]
@ -447,6 +471,7 @@ name = "daw-backend"
version = "0.1.0"
dependencies = [
"base64",
"beamdsp",
"cpal",
"crossterm",
"dasp_envelope",
@ -457,19 +482,33 @@ dependencies = [
"dasp_rms",
"dasp_sample",
"dasp_signal",
"ffmpeg-next",
"hound",
"memmap2",
"midir",
"midly",
"nam-ffi",
"pathdiff",
"petgraph 0.6.5",
"rand",
"ratatui",
"rayon",
"rtrb",
"serde",
"serde_json",
"symphonia",
]
[[package]]
name = "dispatch2"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e0e367e4e7da84520dedcac1901e4da967309406d1e51017ae1abfb97adbd38"
dependencies = [
"bitflags 2.9.4",
"objc2",
]
[[package]]
name = "either"
version = "1.15.0"
@ -497,6 +536,31 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af9673d8203fcb076b19dfd17e38b3d4ae9f44959416ea532ce72415a6020365"
[[package]]
name = "ffmpeg-next"
version = "8.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d658424d233cbd993a972dd73a66ca733acd12a494c68995c9ac32ae1fe65b40"
dependencies = [
"bitflags 2.9.4",
"ffmpeg-sys-next",
"libc",
]
[[package]]
name = "ffmpeg-sys-next"
version = "8.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9bca20aa4ee774fe384c2490096c122b0b23cf524a9910add0686691003d797b"
dependencies = [
"bindgen",
"cc",
"libc",
"num_cpus",
"pkg-config",
"vcpkg",
]
[[package]]
name = "find-msvc-tools"
version = "0.1.4"
@ -532,18 +596,6 @@ dependencies = [
"wasi",
]
[[package]]
name = "getrandom"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd"
dependencies = [
"cfg-if",
"libc",
"r-efi",
"wasip2",
]
[[package]]
name = "glob"
version = "0.3.3"
@ -579,6 +631,12 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "hermit-abi"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c"
[[package]]
name = "hound"
version = "3.5.1"
@ -651,16 +709,6 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130"
[[package]]
name = "jobserver"
version = "0.1.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33"
dependencies = [
"getrandom 0.3.4",
"libc",
]
[[package]]
name = "js-sys"
version = "0.3.81"
@ -719,9 +767,9 @@ dependencies = [
[[package]]
name = "mach2"
version = "0.4.3"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d640282b302c0bb0a2a8e0233ead9035e3bed871f0b7e81fe4a1ec829765db44"
checksum = "6a1b95cd5421ec55b445b5ae102f5ea0e768de1f82bd3001e11f426c269c3aea"
dependencies = [
"libc",
]
@ -732,6 +780,15 @@ version = "2.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273"
[[package]]
name = "memmap2"
version = "0.9.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "714098028fe011992e1c3962653c96b2d578c4b4bce9036e15ff220319b1e0e3"
dependencies = [
"libc",
]
[[package]]
name = "midir"
version = "0.9.1"
@ -775,11 +832,18 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "nam-ffi"
version = "0.1.0"
dependencies = [
"cmake",
]
[[package]]
name = "ndk"
version = "0.8.0"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2076a31b7010b17a38c01907c45b945e8f11495ee4dd588309718901b1f7a5b7"
checksum = "c3f42e7bbe13d351b6bead8286a43aac9534b82bd3cc43e47037f012ebfd62d4"
dependencies = [
"bitflags 2.9.4",
"jni-sys",
@ -797,9 +861,9 @@ checksum = "27b02d87554356db9e9a873add8782d4ea6e3e58ea071a9adb9a2e8ddb884a8b"
[[package]]
name = "ndk-sys"
version = "0.5.0+25.2.9519653"
version = "0.6.0+11769913"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c196769dd60fd4f363e11d948139556a344e79d451aeb2fa2fd040738ef7691"
checksum = "ee6cda3051665f1fb8d9e08fc35c96d5a244fb1be711a03b71118828afc9a873"
dependencies = [
"jni-sys",
]
@ -845,6 +909,16 @@ dependencies = [
"autocfg",
]
[[package]]
name = "num_cpus"
version = "1.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b"
dependencies = [
"hermit-abi",
"libc",
]
[[package]]
name = "num_enum"
version = "0.7.4"
@ -868,26 +942,92 @@ dependencies = [
]
[[package]]
name = "oboe"
version = "0.6.1"
name = "objc2"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8b61bebd49e5d43f5f8cc7ee2891c16e0f41ec7954d36bcb6c14c5e0de867fb"
checksum = "3a12a8ed07aefc768292f076dc3ac8c48f3781c8f2d5851dd3d98950e8c5a89f"
dependencies = [
"jni",
"ndk",
"ndk-context",
"num-derive",
"num-traits",
"oboe-sys",
"objc2-encode",
]
[[package]]
name = "oboe-sys"
version = "0.6.1"
name = "objc2-audio-toolbox"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c8bb09a4a2b1d668170cfe0a7d5bc103f8999fb316c98099b6a9939c9f2e79d"
checksum = "6948501a91121d6399b79abaa33a8aa4ea7857fe019f341b8c23ad6e81b79b08"
dependencies = [
"cc",
"bitflags 2.9.4",
"libc",
"objc2",
"objc2-core-audio",
"objc2-core-audio-types",
"objc2-core-foundation",
"objc2-foundation",
]
[[package]]
name = "objc2-avf-audio"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13a380031deed8e99db00065c45937da434ca987c034e13b87e4441f9e4090be"
dependencies = [
"objc2",
"objc2-foundation",
]
[[package]]
name = "objc2-core-audio"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1eebcea8b0dbff5f7c8504f3107c68fc061a3eb44932051c8cf8a68d969c3b2"
dependencies = [
"dispatch2",
"objc2",
"objc2-core-audio-types",
"objc2-core-foundation",
"objc2-foundation",
]
[[package]]
name = "objc2-core-audio-types"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a89f2ec274a0cf4a32642b2991e8b351a404d290da87bb6a9a9d8632490bd1c"
dependencies = [
"bitflags 2.9.4",
"objc2",
]
[[package]]
name = "objc2-core-foundation"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a180dd8642fa45cdb7dd721cd4c11b1cadd4929ce112ebd8b9f5803cc79d536"
dependencies = [
"bitflags 2.9.4",
"block2",
"dispatch2",
"libc",
"objc2",
]
[[package]]
name = "objc2-encode"
version = "4.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef25abbcd74fb2609453eb695bd2f860d389e457f67dc17cafc8b8cbc89d0c33"
[[package]]
name = "objc2-foundation"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3e0adef53c21f888deb4fa59fc59f7eb17404926ee8a6f59f5df0fd7f9f3272"
dependencies = [
"bitflags 2.9.4",
"block2",
"libc",
"objc2",
"objc2-core-foundation",
]
[[package]]
@ -993,12 +1133,6 @@ dependencies = [
"proc-macro2",
]
[[package]]
name = "r-efi"
version = "5.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
[[package]]
name = "rand"
version = "0.8.5"
@ -1026,7 +1160,7 @@ version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom 0.2.16",
"getrandom",
]
[[package]]
@ -1554,6 +1688,12 @@ version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
[[package]]
name = "vcpkg"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]]
name = "walkdir"
version = "2.5.0"
@ -1570,15 +1710,6 @@ version = "0.11.1+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b"
[[package]]
name = "wasip2"
version = "1.0.1+wasi-0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7"
dependencies = [
"wit-bindgen",
]
[[package]]
name = "wasm-bindgen"
version = "0.2.104"
@ -1709,22 +1840,69 @@ dependencies = [
[[package]]
name = "windows"
version = "0.54.0"
version = "0.62.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9252e5725dbed82865af151df558e754e4a3c2c30818359eb17465f1346a1b49"
checksum = "527fadee13e0c05939a6a05d5bd6eec6cd2e3dbd648b9f8e447c6518133d8580"
dependencies = [
"windows-collections",
"windows-core",
"windows-future",
"windows-numerics",
]
[[package]]
name = "windows-collections"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23b2d95af1a8a14a3c7367e1ed4fc9c20e0a26e79551b1454d72583c97cc6610"
dependencies = [
"windows-core",
"windows-targets 0.52.6",
]
[[package]]
name = "windows-core"
version = "0.54.0"
version = "0.62.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "12661b9c89351d684a50a8a643ce5f608e20243b9fb84687800163429f161d65"
checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb"
dependencies = [
"windows-implement",
"windows-interface",
"windows-link",
"windows-result",
"windows-targets 0.52.6",
"windows-strings",
]
[[package]]
name = "windows-future"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1d6f90251fe18a279739e78025bd6ddc52a7e22f921070ccdc67dde84c605cb"
dependencies = [
"windows-core",
"windows-link",
"windows-threading",
]
[[package]]
name = "windows-implement"
version = "0.60.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "windows-interface"
version = "0.59.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
@ -1734,12 +1912,31 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
[[package]]
name = "windows-result"
version = "0.1.2"
name = "windows-numerics"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8"
checksum = "6e2e40844ac143cdb44aead537bbf727de9b044e107a0f1220392177d15b0f26"
dependencies = [
"windows-targets 0.52.6",
"windows-core",
"windows-link",
]
[[package]]
name = "windows-result"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5"
dependencies = [
"windows-link",
]
[[package]]
name = "windows-strings"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091"
dependencies = [
"windows-link",
]
[[package]]
@ -1800,19 +1997,12 @@ dependencies = [
]
[[package]]
name = "windows-targets"
version = "0.52.6"
name = "windows-threading"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
checksum = "3949bd5b99cafdf1c7ca86b43ca564028dfe27d66958f2470940f73d86d75b37"
dependencies = [
"windows_aarch64_gnullvm 0.52.6",
"windows_aarch64_msvc 0.52.6",
"windows_i686_gnu 0.52.6",
"windows_i686_gnullvm",
"windows_i686_msvc 0.52.6",
"windows_x86_64_gnu 0.52.6",
"windows_x86_64_gnullvm 0.52.6",
"windows_x86_64_msvc 0.52.6",
"windows-link",
]
[[package]]
@ -1827,12 +2017,6 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
[[package]]
name = "windows_aarch64_msvc"
version = "0.42.2"
@ -1845,12 +2029,6 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
[[package]]
name = "windows_i686_gnu"
version = "0.42.2"
@ -1863,18 +2041,6 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
[[package]]
name = "windows_i686_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
[[package]]
name = "windows_i686_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
[[package]]
name = "windows_i686_msvc"
version = "0.42.2"
@ -1887,12 +2053,6 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
[[package]]
name = "windows_i686_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
[[package]]
name = "windows_x86_64_gnu"
version = "0.42.2"
@ -1905,12 +2065,6 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.42.2"
@ -1923,12 +2077,6 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
[[package]]
name = "windows_x86_64_msvc"
version = "0.42.2"
@ -1941,12 +2089,6 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "winnow"
version = "0.7.13"
@ -1956,12 +2098,6 @@ dependencies = [
"memchr",
]
[[package]]
name = "wit-bindgen"
version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59"
[[package]]
name = "zerocopy"
version = "0.8.27"

View File

@ -259,12 +259,6 @@ impl Engine {
}
}
/// Process live MIDI input from all MIDI tracks
fn process_live_midi(&mut self, output: &mut [f32]) {
// Process all MIDI tracks to handle live input
self.project.process_live_midi(output, self.sample_rate, self.channels);
}
/// Process audio callback - called from the audio thread
pub fn process(&mut self, output: &mut [f32]) {
let t_start = if self.debug_audio { Some(std::time::Instant::now()) } else { None };
@ -349,6 +343,7 @@ impl Engine {
playhead_seconds,
self.sample_rate,
self.channels,
false,
);
// Copy mix to output
@ -394,8 +389,25 @@ impl Engine {
}
}
} else {
// Not playing, but process live MIDI input
self.process_live_midi(output);
// Not playing: render live MIDI (keyboard input + note-off tails) through the
// normal group hierarchy so mixer gain is correctly applied.
let playhead_seconds = self.playhead as f64 / self.sample_rate as f64;
if self.mix_buffer.len() != output.len() {
self.mix_buffer.resize(output.len(), 0.0);
}
if self.buffer_pool.buffer_size() != output.len() {
self.buffer_pool = BufferPool::new(8, output.len());
}
self.project.render(
&mut self.mix_buffer,
&self.audio_pool,
&mut self.buffer_pool,
playhead_seconds,
self.sample_rate,
self.channels,
true, // live_only
);
output.copy_from_slice(&self.mix_buffer);
}
// Compute stereo output peaks for master VU meter (independent of playback state)
@ -1224,6 +1236,9 @@ impl Engine {
eprintln!("[DEBUG] Found Audio track, using effects_graph");
Some(&mut track.effects_graph)
},
Some(TrackNode::Group(track)) => {
Some(&mut track.audio_graph)
},
_ => {
eprintln!("[DEBUG] Track not found or invalid type!");
None
@ -1266,6 +1281,7 @@ impl Engine {
eprintln!("[DEBUG] Emitting GraphNodeAdded event: track_id={}, node_id={}, node_type={}", track_id, node_id, node_type);
// Emit success event
let _ = self.event_tx.push(AudioEvent::GraphNodeAdded(track_id, node_id, node_type.clone()));
self.set_track_graph_is_default(track_id, false);
} else {
eprintln!("[DEBUG] Graph was None, node not added!");
}
@ -1312,6 +1328,7 @@ impl Engine {
let graph = match self.project.get_track_mut(track_id) {
Some(TrackNode::Midi(track)) => Some(&mut track.instrument_graph),
Some(TrackNode::Audio(track)) => Some(&mut track.effects_graph),
Some(TrackNode::Group(track)) => Some(&mut track.audio_graph),
_ => None,
};
if let Some(graph) = graph {
@ -1319,6 +1336,7 @@ impl Engine {
graph.remove_node(node_idx);
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
}
self.set_track_graph_is_default(track_id, false);
}
Command::GraphConnect(track_id, from, from_port, to, to_port) => {
@ -1333,6 +1351,9 @@ impl Engine {
eprintln!("[DEBUG] Found Audio track for connection");
Some(&mut track.effects_graph)
},
Some(TrackNode::Group(track)) => {
Some(&mut track.audio_graph)
},
_ => {
eprintln!("[DEBUG] Track not found for connection!");
None
@ -1347,6 +1368,7 @@ impl Engine {
Ok(()) => {
eprintln!("[DEBUG] Connection successful!");
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
self.set_track_graph_is_default(track_id, false);
}
Err(e) => {
eprintln!("[DEBUG] Connection failed: {:?}", e);
@ -1443,6 +1465,7 @@ impl Engine {
eprintln!("[AUDIO ENGINE] Found audio track, disconnecting in effects_graph");
Some(&mut track.effects_graph)
}
Some(TrackNode::Group(track)) => Some(&mut track.audio_graph),
_ => {
eprintln!("[AUDIO ENGINE] Track not found!");
None
@ -1455,12 +1478,14 @@ impl Engine {
eprintln!("[AUDIO ENGINE] Disconnect completed");
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
}
self.set_track_graph_is_default(track_id, false);
}
Command::GraphSetParameter(track_id, node_index, param_id, value) => {
let graph = match self.project.get_track_mut(track_id) {
Some(TrackNode::Midi(track)) => Some(&mut track.instrument_graph),
Some(TrackNode::Audio(track)) => Some(&mut track.effects_graph),
Some(TrackNode::Group(track)) => Some(&mut track.audio_graph),
_ => None,
};
if let Some(graph) = graph {
@ -1469,12 +1494,14 @@ impl Engine {
graph_node.node.set_parameter(param_id, value);
}
}
self.set_track_graph_is_default(track_id, false);
}
Command::GraphSetNodePosition(track_id, node_index, x, y) => {
let graph = match self.project.get_track_mut(track_id) {
Some(TrackNode::Midi(track)) => Some(&mut track.instrument_graph),
Some(TrackNode::Audio(track)) => Some(&mut track.effects_graph),
Some(TrackNode::Group(track)) => Some(&mut track.audio_graph),
_ => None,
};
if let Some(graph) = graph {
@ -1505,6 +1532,7 @@ impl Engine {
let graph = match self.project.get_track_mut(track_id) {
Some(TrackNode::Midi(track)) => Some(&mut track.instrument_graph),
Some(TrackNode::Audio(track)) => Some(&mut track.effects_graph),
Some(TrackNode::Group(track)) => Some(&mut track.audio_graph),
_ => None,
};
if let Some(graph) = graph {
@ -1517,6 +1545,7 @@ impl Engine {
let graph = match self.project.get_track_mut(track_id) {
Some(TrackNode::Midi(track)) => Some(&mut track.instrument_graph),
Some(TrackNode::Audio(track)) => Some(&mut track.effects_graph),
Some(TrackNode::Group(track)) => Some(&mut track.audio_graph),
_ => None,
};
if let Some(graph) = graph {
@ -1545,6 +1574,7 @@ impl Engine {
let graph = match self.project.get_track(track_id) {
Some(TrackNode::Midi(track)) => Some(&track.instrument_graph),
Some(TrackNode::Audio(track)) => Some(&track.effects_graph),
Some(TrackNode::Group(track)) => Some(&track.audio_graph),
_ => None,
};
if let Some(graph) = graph {
@ -1595,11 +1625,19 @@ impl Engine {
match self.project.get_track_mut(track_id) {
Some(TrackNode::Midi(track)) => {
track.instrument_graph = graph;
track.graph_is_default = true;
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
let _ = self.event_tx.push(AudioEvent::GraphPresetLoaded(track_id));
}
Some(TrackNode::Audio(track)) => {
track.effects_graph = graph;
track.graph_is_default = true;
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
let _ = self.event_tx.push(AudioEvent::GraphPresetLoaded(track_id));
}
Some(TrackNode::Group(track)) => {
track.audio_graph = graph;
track.graph_is_default = true;
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
let _ = self.event_tx.push(AudioEvent::GraphPresetLoaded(track_id));
}
@ -1655,6 +1693,82 @@ impl Engine {
}
}
Command::SetMetatrackSubtrackGraph(track_id, subtracks) => {
let buffer_size = self.buffer_pool.buffer_size();
if let Some(TrackNode::Group(metatrack)) = self.project.get_track_mut(track_id) {
let current = metatrack.current_subtracks();
// No-op if subtrack list is unchanged (prevents every-frame graph rebuilds)
if current == subtracks {
return;
}
if metatrack.graph_is_default {
// Default graph: full rebuild with new subtrack layout
metatrack.set_subtrack_graph(subtracks.clone(), self.sample_rate, buffer_size);
} else {
// User-modified graph: incremental port changes only
let current_ids: std::collections::HashSet<TrackId> =
current.iter().map(|&(id, _)| id).collect();
let new_ids: std::collections::HashSet<TrackId> =
subtracks.iter().map(|&(id, _)| id).collect();
for (id, name) in &subtracks {
if !current_ids.contains(id) {
metatrack.add_subtrack_to_graph(*id, name.clone(), buffer_size);
}
}
for &(id, _) in &current {
if !new_ids.contains(&id) {
metatrack.remove_subtrack_from_graph(id, buffer_size);
}
}
}
// Sync the group's children list so they render through the mixer graph.
// `move_to_group` removes each child from root_tracks (or another parent)
// and registers it under this group — idempotent if already there.
let new_child_ids: Vec<TrackId> = subtracks.iter().map(|&(id, _)| id).collect();
for &child_id in &new_child_ids {
// Only move if not already a child of this group
let already_child = self.project.get_track(track_id)
.and_then(|t| if let TrackNode::Group(g) = t { Some(g) } else { None })
.map(|g| g.children.contains(&child_id))
.unwrap_or(false);
if !already_child {
self.project.move_to_group(child_id, track_id);
}
}
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
}
}
Command::AddMetatrackSubtrack(track_id, subtrack_id, name) => {
let buffer_size = self.buffer_pool.buffer_size();
if let Some(TrackNode::Group(metatrack)) = self.project.get_track_mut(track_id) {
metatrack.add_subtrack_to_graph(subtrack_id, name, buffer_size);
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
}
}
Command::RemoveMetatrackSubtrack(track_id, subtrack_id) => {
let buffer_size = self.buffer_pool.buffer_size();
if let Some(TrackNode::Group(metatrack)) = self.project.get_track_mut(track_id) {
metatrack.remove_subtrack_from_graph(subtrack_id, buffer_size);
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
}
}
Command::UpdateMetatrackSubtrackIds(track_id, subtracks) => {
let buffer_size = self.buffer_pool.buffer_size();
if let Some(TrackNode::Group(metatrack)) = self.project.get_track_mut(track_id) {
metatrack.update_subtrack_ids(subtracks, buffer_size);
}
}
Command::SetGraphIsDefault(track_id, value) => {
self.set_track_graph_is_default(track_id, value);
}
Command::GraphSetScript(track_id, node_id, source) => {
use crate::audio::node_graph::nodes::ScriptNode;
@ -2169,6 +2283,14 @@ impl Engine {
Err(e) => QueryResponse::GraphState(Err(format!("Failed to serialize graph: {:?}", e))),
}
}
Some(TrackNode::Group(track)) => {
let graph = &track.audio_graph;
let preset = graph.to_preset("temp");
match preset.to_json() {
Ok(json) => QueryResponse::GraphState(Ok(json)),
Err(e) => QueryResponse::GraphState(Err(format!("Failed to serialize graph: {:?}", e))),
}
}
_ => {
QueryResponse::GraphState(Err(format!("Track {} not found", track_id)))
}
@ -2596,6 +2718,15 @@ impl Engine {
None => QueryResponse::MidiClipDuplicated(Err(format!("MIDI clip {} not found", clip_id))),
}
}
Query::GetGraphIsDefault(track_id) => {
let is_default = match self.project.get_track(track_id) {
Some(TrackNode::Midi(track)) => track.graph_is_default,
Some(TrackNode::Audio(track)) => track.graph_is_default,
Some(TrackNode::Group(track)) => track.graph_is_default,
_ => false,
};
QueryResponse::GraphIsDefault(is_default)
}
};
// Send response back
@ -2605,6 +2736,16 @@ impl Engine {
}
}
/// Set graph_is_default on any track type.
fn set_track_graph_is_default(&mut self, track_id: TrackId, value: bool) {
match self.project.get_track_mut(track_id) {
Some(TrackNode::Midi(track)) => track.graph_is_default = value,
Some(TrackNode::Audio(track)) => track.graph_is_default = value,
Some(TrackNode::Group(track)) => track.graph_is_default = value,
_ => {}
}
}
/// Handle starting a recording
fn handle_start_recording(&mut self, track_id: TrackId, start_time: f64) {
use crate::io::WavWriter;
@ -3431,6 +3572,47 @@ impl EngineController {
let _ = self.command_tx.push(Command::MultiSamplerClearLayers(track_id, node_id));
}
/// Set the full subtrack list for a metatrack's mixing graph (rebuilds the graph)
pub fn set_metatrack_subtrack_graph(&mut self, track_id: TrackId, subtracks: Vec<(TrackId, String)>) {
let _ = self.command_tx.push(Command::SetMetatrackSubtrackGraph(track_id, subtracks));
}
/// Add a subtrack port to a metatrack's mixing graph
pub fn add_metatrack_subtrack(&mut self, track_id: TrackId, subtrack_id: TrackId, name: String) {
let _ = self.command_tx.push(Command::AddMetatrackSubtrack(track_id, subtrack_id, name));
}
/// Remove a subtrack port from a metatrack's mixing graph
pub fn remove_metatrack_subtrack(&mut self, track_id: TrackId, subtrack_id: TrackId) {
let _ = self.command_tx.push(Command::RemoveMetatrackSubtrack(track_id, subtrack_id));
}
/// Re-associate backend TrackIds with SubtrackInputsNode slots (called after project load)
pub fn update_metatrack_subtrack_ids(&mut self, track_id: TrackId, subtracks: Vec<(TrackId, String)>) {
let _ = self.command_tx.push(Command::UpdateMetatrackSubtrackIds(track_id, subtracks));
}
/// Set the graph_is_default flag on a track (command, processed async)
pub fn set_graph_is_default(&mut self, track_id: TrackId, value: bool) {
let _ = self.command_tx.push(Command::SetGraphIsDefault(track_id, value));
}
/// Query whether a track's graph is the auto-generated default (synchronous)
pub fn get_graph_is_default(&mut self, track_id: TrackId) -> bool {
if let Err(_) = self.query_tx.push(Query::GetGraphIsDefault(track_id)) {
return false;
}
let start = std::time::Instant::now();
let timeout = std::time::Duration::from_millis(500);
while start.elapsed() < timeout {
if let Ok(QueryResponse::GraphIsDefault(v)) = self.query_response_rx.pop() {
return v;
}
std::thread::sleep(std::time::Duration::from_micros(100));
}
false
}
/// Send a synchronous query and wait for the response
/// This blocks until the audio thread processes the query
/// Generic method that works with any Query/QueryResponse pair

View File

@ -180,6 +180,7 @@ pub fn render_to_memory(
playhead,
settings.sample_rate,
settings.channels,
false,
);
// Calculate how many samples we actually need from this chunk
@ -397,6 +398,7 @@ fn export_mp3<P: AsRef<Path>>(
playhead,
settings.sample_rate,
settings.channels,
false,
);
// Calculate how many samples we need from this chunk
@ -564,6 +566,7 @@ fn export_aac<P: AsRef<Path>>(
playhead,
settings.sample_rate,
settings.channels,
false,
);
// Calculate how many samples we need from this chunk

View File

@ -193,6 +193,15 @@ impl AudioGraph {
self.graph.add_edge(from, to, Connection { from_port, to_port });
self.topo_cache = None;
// Auto-grow MixerNode: always keep one spare port beyond the connected count
let n_incoming = self.graph.edges_directed(to, petgraph::Direction::Incoming).count();
if let Some(graph_node) = self.graph.node_weight_mut(to) {
use crate::audio::node_graph::nodes::MixerNode;
if let Some(mixer) = graph_node.node.as_any_mut().downcast_mut::<MixerNode>() {
mixer.ensure_min_ports(n_incoming + 1);
}
}
Ok(())
}
@ -204,12 +213,24 @@ impl AudioGraph {
to: NodeIndex,
to_port: usize,
) {
// Find and remove the edge
let mut did_remove = false;
if let Some(edge_idx) = self.graph.find_edge(from, to) {
let conn = &self.graph[edge_idx];
if conn.from_port == from_port && conn.to_port == to_port {
self.graph.remove_edge(edge_idx);
self.topo_cache = None;
did_remove = true;
}
}
// Shrink MixerNode back to n_remaining + 1 spare after a disconnect
if did_remove {
let n_remaining = self.graph.edges_directed(to, petgraph::Direction::Incoming).count();
if let Some(graph_node) = self.graph.node_weight_mut(to) {
use crate::audio::node_graph::nodes::MixerNode;
if let Some(mixer) = graph_node.node.as_any_mut().downcast_mut::<MixerNode>() {
mixer.resize(n_remaining + 1);
}
}
}
}
@ -716,6 +737,50 @@ impl AudioGraph {
self.graph.node_indices()
}
/// Reallocate a node's output buffers to match its current port list.
///
/// Must be called after `SubtrackInputsNode::update_subtracks` changes the port count,
/// since `GraphNode.output_buffers` was allocated at `add_node` time.
pub fn reallocate_node_output_buffers(&mut self, idx: NodeIndex, buffer_size: usize) {
if let Some(graph_node) = self.graph.node_weight_mut(idx) {
let outputs = graph_node.node.outputs();
graph_node.output_buffers.clear();
for port in outputs.iter() {
match port.signal_type {
super::types::SignalType::Audio => graph_node.output_buffers.push(vec![0.0; buffer_size * 2]),
super::types::SignalType::CV => graph_node.output_buffers.push(vec![0.0; buffer_size]),
super::types::SignalType::Midi => graph_node.output_buffers.push(vec![]),
}
}
self.topo_cache = None;
}
}
/// Remove all edges going OUT of a specific output port of a node.
pub fn disconnect_output_port(&mut self, node: NodeIndex, port: usize) {
let edges: Vec<_> = self.graph
.edges_directed(node, petgraph::Direction::Outgoing)
.filter(|e| e.weight().from_port == port)
.map(|e| e.id())
.collect();
for edge_id in edges {
self.graph.remove_edge(edge_id);
}
self.topo_cache = None;
}
/// Remove all edges going INTO a node (all input connections).
pub fn disconnect_all_inputs(&mut self, node: NodeIndex) {
let edges: Vec<_> = self.graph
.edges_directed(node, petgraph::Direction::Incoming)
.map(|e| e.id())
.collect();
for edge_id in edges {
self.graph.remove_edge(edge_id);
}
self.topo_cache = None;
}
/// Get all connections
pub fn connections(&self) -> impl Iterator<Item = (NodeIndex, NodeIndex, &Connection)> + '_ {
self.graph.edge_references().map(|e| (e.source(), e.target(), e.weight()))
@ -787,7 +852,7 @@ impl AudioGraph {
/// Serialize the graph to a preset
pub fn to_preset(&self, name: impl Into<String>) -> crate::audio::node_graph::preset::GraphPreset {
use crate::audio::node_graph::preset::{GraphPreset, SerializedConnection, SerializedNode};
use crate::audio::node_graph::nodes::VoiceAllocatorNode;
use crate::audio::node_graph::nodes::{VoiceAllocatorNode, MixerNode, SubtrackInputsNode};
let mut preset = GraphPreset::new(name);
@ -805,6 +870,19 @@ impl AudioGraph {
serialized.set_parameter(param.id, value);
}
// Save port count for dynamic-port nodes so they round-trip correctly
if node.node_type() == "Mixer" {
if let Some(mixer) = node.as_any().downcast_ref::<MixerNode>() {
serialized.num_ports = Some(mixer.num_inputs() as u32);
}
}
if node.node_type() == "SubtrackInputs" {
if let Some(si) = node.as_any().downcast_ref::<SubtrackInputsNode>() {
serialized.num_ports = Some(si.num_subtracks() as u32);
serialized.port_names = si.subtracks().iter().map(|(_, name)| name.clone()).collect();
}
}
// For VoiceAllocator nodes, serialize the template graph
if node.node_type() == "VoiceAllocator" {
// Downcast using safe Any trait
@ -1002,12 +1080,46 @@ impl AudioGraph {
let mut graph = Self::new(sample_rate, buffer_size);
let mut index_map: HashMap<u32, NodeIndex> = HashMap::new();
// Pre-pass: compute required min port count for dynamic-port nodes from the connection list.
// This ensures old presets (without num_ports) still size correctly regardless of
// connection-restoration order.
let mut required_ports: HashMap<u32, usize> = HashMap::new();
for conn in &preset.connections {
let entry = required_ports.entry(conn.to_node).or_insert(0);
*entry = (*entry).max(conn.to_port + 2); // port N + 1 spare
}
// Create all nodes
for serialized_node in &preset.nodes {
// Create the node based on type
let mut node = crate::audio::node_graph::nodes::create_node(&serialized_node.node_type, sample_rate, buffer_size)
.ok_or_else(|| format!("Unknown node type: {}", serialized_node.node_type))?;
// Pre-size dynamic-port nodes before graph.add_node() so output buffers are
// allocated at the correct size. num_ports takes priority; fall back to
// connection-count inference so old presets without num_ports still work.
if serialized_node.node_type == "Mixer" {
use crate::audio::node_graph::nodes::MixerNode;
if let Some(mixer) = node.as_any_mut().downcast_mut::<MixerNode>() {
let from_conns = required_ports.get(&serialized_node.id).copied().unwrap_or(1);
let target = serialized_node.num_ports.map(|n| n as usize).unwrap_or(0).max(from_conns).max(1);
mixer.resize(target);
}
}
if serialized_node.node_type == "SubtrackInputs" {
use crate::audio::node_graph::nodes::SubtrackInputsNode;
if let Some(si) = node.as_any_mut().downcast_mut::<SubtrackInputsNode>() {
let from_conns = required_ports.get(&serialized_node.id).copied().unwrap_or(0);
let target = serialized_node.num_ports.map(|n| n as usize).unwrap_or(0).max(from_conns);
if target > 0 {
let subtracks = (0..target)
.map(|i| (0u32, format!("Subtrack {}", i + 1)))
.collect();
si.update_subtracks(subtracks, buffer_size);
}
}
}
// VoiceAllocator needs its template graph deserialized and set
if serialized_node.node_type == "VoiceAllocator" {
if let Some(ref template_preset) = serialized_node.template_graph {

View File

@ -1,48 +1,74 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
use crate::audio::midi::MidiEvent;
const PARAM_GAIN_1: u32 = 0;
const PARAM_GAIN_2: u32 = 1;
const PARAM_GAIN_3: u32 = 2;
const PARAM_GAIN_4: u32 = 3;
/// Mixer node - combines multiple audio inputs with independent gain controls
/// Mixer node — combines N audio inputs with independent gain controls.
///
/// The number of input ports is dynamic: one spare unconnected port is always present
/// beyond however many are currently wired, so users can keep patching in without
/// manually adding inputs. Port count is managed by `AudioGraph::connect` /
/// `AudioGraph::disconnect` calling `ensure_min_ports` / `resize`.
///
/// Gain values are stored separately from the port list so they survive resize
/// operations and can be set via `set_parameter` before the port is visible.
pub struct MixerNode {
name: String,
gains: [f32; 4],
/// Displayed input ports. Length = num_ports (connected + 1 spare).
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
/// Per-channel gains, indexed by port. May be longer than `inputs` if gains
/// were set before ports were created (handled gracefully).
gains: Vec<f32>,
/// Mirrored parameter list so `parameters()` stays in sync with `inputs`.
parameters: Vec<Parameter>,
}
impl MixerNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
let mut node = Self {
name: name.into(),
inputs: Vec::new(),
outputs: vec![NodePort::new("Mixed Out", SignalType::Audio, 0)],
gains: Vec::new(),
parameters: Vec::new(),
};
node.resize(1); // start with one spare input
node
}
let inputs = vec![
NodePort::new("Input 1", SignalType::Audio, 0),
NodePort::new("Input 2", SignalType::Audio, 1),
NodePort::new("Input 3", SignalType::Audio, 2),
NodePort::new("Input 4", SignalType::Audio, 3),
];
/// Return the current number of input ports (connected + 1 spare).
pub fn num_inputs(&self) -> usize {
self.inputs.len()
}
let outputs = vec![
NodePort::new("Mixed Out", SignalType::Audio, 0),
];
/// Set the exact number of input ports.
///
/// Existing gain values are preserved. Truncates spare gains when shrinking,
/// but gain slots that have already been written survive a grow-shrink-grow cycle.
pub fn resize(&mut self, n: usize) {
let n = n.max(1); // always at least one spare
let parameters = vec![
Parameter::new(PARAM_GAIN_1, "Gain 1", 0.0, 2.0, 1.0, ParameterUnit::Generic),
Parameter::new(PARAM_GAIN_2, "Gain 2", 0.0, 2.0, 1.0, ParameterUnit::Generic),
Parameter::new(PARAM_GAIN_3, "Gain 3", 0.0, 2.0, 1.0, ParameterUnit::Generic),
Parameter::new(PARAM_GAIN_4, "Gain 4", 0.0, 2.0, 1.0, ParameterUnit::Generic),
];
self.inputs = (0..n)
.map(|i| NodePort::new(format!("Input {}", i + 1).as_str(), SignalType::Audio, i))
.collect();
Self {
name,
gains: [1.0, 1.0, 1.0, 1.0],
inputs,
outputs,
parameters,
// Extend gains with 1.0 for new slots; preserve existing values.
if self.gains.len() < n {
self.gains.resize(n, 1.0);
}
self.parameters = (0..n)
.map(|i| {
Parameter::new(i as u32, format!("Gain {}", i + 1).as_str(), 0.0, 2.0, 1.0, ParameterUnit::Generic)
})
.collect();
}
/// Ensure at least `n` input ports exist, growing if needed but never shrinking.
///
/// Called by `AudioGraph::connect` after adding a connection.
pub fn ensure_min_ports(&mut self, n: usize) {
if n > self.inputs.len() {
self.resize(n);
}
}
}
@ -65,23 +91,17 @@ impl AudioNode for MixerNode {
}
fn set_parameter(&mut self, id: u32, value: f32) {
match id {
PARAM_GAIN_1 => self.gains[0] = value.clamp(0.0, 2.0),
PARAM_GAIN_2 => self.gains[1] = value.clamp(0.0, 2.0),
PARAM_GAIN_3 => self.gains[2] = value.clamp(0.0, 2.0),
PARAM_GAIN_4 => self.gains[3] = value.clamp(0.0, 2.0),
_ => {}
let idx = id as usize;
// Extend gains if this port hasn't been created yet (e.g. loaded from preset
// before connections are restored).
if idx >= self.gains.len() {
self.gains.resize(idx + 1, 1.0);
}
self.gains[idx] = value.clamp(0.0, 2.0);
}
fn get_parameter(&self, id: u32) -> f32 {
match id {
PARAM_GAIN_1 => self.gains[0],
PARAM_GAIN_2 => self.gains[1],
PARAM_GAIN_3 => self.gains[2],
PARAM_GAIN_4 => self.gains[3],
_ => 0.0,
}
self.gains.get(id as usize).copied().unwrap_or(1.0)
}
fn process(
@ -97,32 +117,23 @@ impl AudioNode for MixerNode {
}
let output = &mut outputs[0];
// Audio signals are stereo (interleaved L/R)
let frames = output.len() / 2;
// Clear output buffer first
output.fill(0.0);
// Mix each input with its gain
for (input_idx, input) in inputs.iter().enumerate().take(4) {
if input_idx >= self.gains.len() {
break;
}
let gain = self.gains[input_idx];
for (input_idx, input) in inputs.iter().enumerate() {
let gain = self.gains.get(input_idx).copied().unwrap_or(1.0);
let input_frames = input.len() / 2;
let process_frames = frames.min(input_frames);
for frame in 0..process_frames {
output[frame * 2] += input[frame * 2] * gain; // Left
output[frame * 2] += input[frame * 2] * gain; // Left
output[frame * 2 + 1] += input[frame * 2 + 1] * gain; // Right
}
}
}
fn reset(&mut self) {
// No state to reset
// No per-frame state
}
fn node_type(&self) -> &str {
@ -136,9 +147,9 @@ impl AudioNode for MixerNode {
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
gains: self.gains,
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
gains: self.gains.clone(),
parameters: self.parameters.clone(),
})
}

View File

@ -1,6 +1,7 @@
mod amp_sim;
pub mod bundled_models;
mod adsr;
mod subtrack_inputs;
mod arpeggiator;
mod audio_input;
mod audio_to_cv;
@ -96,6 +97,7 @@ pub use vibrato::VibratoNode;
pub use vocoder::VocoderNode;
pub use voice_allocator::VoiceAllocatorNode;
pub use wavetable_oscillator::WavetableOscillatorNode;
pub use subtrack_inputs::SubtrackInputsNode;
/// Create a node instance by type name string.
///
@ -152,6 +154,7 @@ pub fn create_node(node_type: &str, sample_rate: u32, buffer_size: usize) -> Opt
"Vibrato" => Box::new(VibratoNode::new("Vibrato")),
"AmpSim" => Box::new(AmpSimNode::new("Amp Sim")),
"AudioOutput" => Box::new(AudioOutputNode::new("Output")),
"SubtrackInputs" => Box::new(SubtrackInputsNode::new("Subtrack Inputs", vec![])),
_ => return None,
})
}

View File

@ -0,0 +1,177 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, SignalType};
use crate::audio::midi::MidiEvent;
use crate::audio::track::TrackId;
/// Subtrack inputs node for metatracks.
///
/// Exposes one output port per child track so users can route individual subtracks
/// independently in the mixing graph (e.g., for sidechain effects).
///
/// Audio is injected into pre-allocated per-slot buffers by the render system before
/// the graph is processed — no heap allocation occurs during audio rendering.
pub struct SubtrackInputsNode {
name: String,
/// Ordered list of (TrackId, display_name) for each subtrack slot.
/// TrackId is used by the render system to match the right buffer to the right slot.
subtracks: Vec<(TrackId, String)>,
/// Output port descriptors — rebuilt whenever subtracks changes.
outputs: Vec<NodePort>,
/// Pre-allocated audio buffers, one per subtrack slot (stereo interleaved, length = buffer_size * 2).
/// Filled by `inject_subtrack_audio` before graph processing; no alloc per frame.
buffers: Vec<Vec<f32>>,
/// The buffer size this node was last sized for.
buffer_size: usize,
}
impl SubtrackInputsNode {
pub fn new(name: impl Into<String>, subtracks: Vec<(TrackId, String)>) -> Self {
let outputs = Self::build_outputs(&subtracks);
let n = subtracks.len();
Self {
name: name.into(),
subtracks,
outputs,
buffers: vec![Vec::new(); n],
buffer_size: 0,
}
}
fn build_outputs(subtracks: &[(TrackId, String)]) -> Vec<NodePort> {
subtracks
.iter()
.enumerate()
.map(|(i, (_, name))| NodePort::new(name.as_str(), SignalType::Audio, i))
.collect()
}
/// Inject audio from a child track into its pre-allocated slot.
///
/// `idx` is the slot index (matching the order in `subtracks`).
/// Called by the render system once per child per frame — no allocation.
pub fn inject_subtrack_audio(&mut self, idx: usize, audio: &[f32]) {
if let Some(buf) = self.buffers.get_mut(idx) {
let len = buf.len().min(audio.len());
buf[..len].copy_from_slice(&audio[..len]);
// Zero any remaining samples if audio is shorter than the buffer
if audio.len() < buf.len() {
buf[audio.len()..].fill(0.0);
}
}
}
/// Rebuild ports and resize pre-allocated buffers.
///
/// Only reallocates when the subtrack list actually changes in size or content.
/// Pass `buffer_size` in frames (stereo buffers will be `buffer_size * 2` samples).
pub fn update_subtracks(&mut self, subtracks: Vec<(TrackId, String)>, buffer_size: usize) {
let n = subtracks.len();
self.outputs = Self::build_outputs(&subtracks);
self.subtracks = subtracks;
self.buffer_size = buffer_size;
// Resize buffers: keep existing allocations where possible
self.buffers.resize_with(n, Vec::new);
for buf in &mut self.buffers {
let target = buffer_size * 2; // stereo interleaved
if buf.len() != target {
buf.resize(target, 0.0);
}
}
}
/// Return the slot index for the given TrackId, or None if not found.
pub fn subtrack_index_for(&self, track_id: TrackId) -> Option<usize> {
self.subtracks.iter().position(|(id, _)| *id == track_id)
}
/// Return the number of subtrack slots.
pub fn num_subtracks(&self) -> usize {
self.subtracks.len()
}
/// Return the ordered subtrack list.
pub fn subtracks(&self) -> &[(TrackId, String)] {
&self.subtracks
}
}
impl AudioNode for SubtrackInputsNode {
fn category(&self) -> NodeCategory {
NodeCategory::Input
}
fn inputs(&self) -> &[NodePort] {
&[] // No inputs — audio is injected externally
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&[] // No user-facing parameters; port count is stored via num_ports in serialization
}
fn set_parameter(&mut self, _id: u32, _value: f32) {}
fn get_parameter(&self, _id: u32) -> f32 {
0.0
}
fn process(
&mut self,
_inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
_sample_rate: u32,
) {
// Copy each pre-filled buffer to its output port
for (i, output) in outputs.iter_mut().enumerate() {
if let Some(buf) = self.buffers.get(i) {
let len = output.len().min(buf.len());
if len > 0 {
output[..len].copy_from_slice(&buf[..len]);
}
if output.len() > len {
output[len..].fill(0.0);
}
} else {
output.fill(0.0);
}
}
}
fn reset(&mut self) {
for buf in &mut self.buffers {
buf.fill(0.0);
}
}
fn node_type(&self) -> &str {
"SubtrackInputs"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
subtracks: self.subtracks.clone(),
outputs: self.outputs.clone(),
// Don't clone audio buffers; fresh node starts silent
buffers: vec![vec![0.0; self.buffer_size * 2]; self.subtracks.len()],
buffer_size: self.buffer_size,
})
}
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
}

View File

@ -131,6 +131,16 @@ pub struct SerializedNode {
/// For AmpSim nodes: path to the .nam model file
#[serde(skip_serializing_if = "Option::is_none")]
pub nam_model_path: Option<String>,
/// For dynamic-port nodes (Mixer, SubtrackInputs): saved port count so ports
/// round-trip correctly through save/load independent of connection order.
#[serde(skip_serializing_if = "Option::is_none")]
pub num_ports: Option<u32>,
/// For SubtrackInputs: ordered port names (one per subtrack slot).
/// Allows the UI to display actual track names on the node's output ports.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub port_names: Vec<String>,
}
/// Serialized group definition (frontend-only visual grouping, stored opaquely by backend)
@ -227,6 +237,8 @@ impl SerializedNode {
sample_data: None,
script_source: None,
nam_model_path: None,
num_ports: None,
port_names: Vec::new(),
}
}

View File

@ -356,7 +356,11 @@ impl Project {
}
}
/// Render all root tracks into the output buffer
/// Render all root tracks into the output buffer.
///
/// When `live_only` is true, MIDI tracks skip clip event collection and only process
/// their live MIDI queue (note-off tails + keyboard input). Audio tracks produce silence.
/// This lets the caller use the same group-hierarchy render path regardless of play state.
pub fn render(
&mut self,
output: &mut [f32],
@ -365,18 +369,17 @@ impl Project {
playhead_seconds: f64,
sample_rate: u32,
channels: u32,
live_only: bool,
) {
output.fill(0.0);
let any_solo = self.any_solo();
// Create initial render context
let ctx = RenderContext::new(
playhead_seconds,
sample_rate,
channels,
output.len(),
);
let ctx = RenderContext {
live_only,
..RenderContext::new(playhead_seconds, sample_rate, channels, output.len())
};
// Render each root track (index-based to avoid clone)
for i in 0..self.root_tracks.len() {
@ -441,6 +444,10 @@ impl Project {
// Handle audio track vs MIDI track vs group track
match self.tracks.get_mut(&track_id) {
Some(TrackNode::Audio(track)) => {
// Audio tracks have no live input; skip in live_only mode.
if ctx.live_only {
return;
}
// Render audio track into a temp buffer for peak measurement
let mut track_buffer = buffer_pool.acquire();
track_buffer.resize(output.len(), 0.0);
@ -460,7 +467,7 @@ impl Project {
let mut track_buffer = buffer_pool.acquire();
track_buffer.resize(output.len(), 0.0);
track_buffer.fill(0.0);
track.render(&mut track_buffer, &self.midi_clip_pool, ctx.playhead_seconds, ctx.sample_rate, ctx.channels);
track.render(&mut track_buffer, &self.midi_clip_pool, ctx);
// Accumulate peak level for VU metering (max over meter interval)
let buffer_peak = track_buffer.iter().map(|s| s.abs()).fold(0.0f32, f32::max);
track.peak_level = track.peak_level.max(buffer_peak);
@ -471,72 +478,75 @@ impl Project {
buffer_pool.release(track_buffer);
}
Some(TrackNode::Group(group)) => {
// Skip rendering if playhead is outside the metatrack's trim window
if !group.is_active_at_time(ctx.playhead_seconds) {
// Skip rendering if playhead is outside the metatrack's trim window.
// In live_only mode always render so note-off tails pass through the mixer.
if !ctx.live_only && !group.is_active_at_time(ctx.playhead_seconds) {
return;
}
// Read group properties and transform context (index-based child iteration to avoid clone)
// Read group properties and transform context before any mutable borrows
let num_children = group.children.len();
let this_group_is_soloed = group.solo;
let child_ctx = group.transform_context(ctx);
// Acquire a temporary buffer for the group mix
let mut group_buffer = buffer_pool.acquire();
group_buffer.resize(output.len(), 0.0);
group_buffer.fill(0.0);
// Recursively render all children into the group buffer
// If this group is soloed (or parent was soloed), children inherit that state
let children_parent_soloed = parent_is_soloed || this_group_is_soloed;
// Render each child into its own buffer and inject into SubtrackInputsNode.
// One pool buffer is reused per child (no extra allocation per frame).
for i in 0..num_children {
let child_id = match self.tracks.get(&track_id) {
Some(TrackNode::Group(g)) => g.children[i],
_ => break,
};
let mut child_buffer = buffer_pool.acquire();
child_buffer.resize(output.len(), 0.0);
child_buffer.fill(0.0);
self.render_track(
child_id,
&mut group_buffer,
&mut child_buffer,
audio_pool,
buffer_pool,
child_ctx,
any_solo,
children_parent_soloed,
);
}
// Route children's mix through metatrack's audio graph
if let Some(TrackNode::Group(group)) = self.tracks.get_mut(&track_id) {
// Inject children's mix into audio graph's input node
let node_indices: Vec<_> = group.audio_graph.node_indices().collect();
for node_idx in node_indices {
if let Some(graph_node) = group.audio_graph.get_graph_node_mut(node_idx) {
if graph_node.node.node_type() == "AudioInput" {
if let Some(input_node) = graph_node.node.as_any_mut()
.downcast_mut::<super::node_graph::nodes::AudioInputNode>()
{
input_node.inject_audio(&group_buffer);
// Inject into the SubtrackInputsNode slot for this child
if let Some(TrackNode::Group(group)) = self.tracks.get_mut(&track_id) {
use super::node_graph::nodes::SubtrackInputsNode;
let node_indices: Vec<_> = group.audio_graph.node_indices().collect();
for node_idx in node_indices {
if let Some(gn) = group.audio_graph.get_graph_node_mut(node_idx) {
if gn.node.node_type() == "SubtrackInputs" {
if let Some(si) = gn.node.as_any_mut()
.downcast_mut::<SubtrackInputsNode>()
{
if let Some(slot) = si.subtrack_index_for(child_id) {
si.inject_subtrack_audio(slot, &child_buffer);
}
}
break;
}
}
}
}
// Process through the audio graph into a fresh buffer
buffer_pool.release(child_buffer);
}
// Process children's audio through the metatrack's mixing graph
if let Some(TrackNode::Group(group)) = self.tracks.get_mut(&track_id) {
let mut graph_output = buffer_pool.acquire();
graph_output.resize(output.len(), 0.0);
graph_output.fill(0.0);
group.audio_graph.process(&mut graph_output, &[], ctx.playhead_seconds);
// Apply group volume and mix into output
for (out_sample, graph_sample) in output.iter_mut().zip(graph_output.iter()) {
*out_sample += graph_sample * group.volume;
}
buffer_pool.release(graph_output);
}
// Release children mix buffer back to pool
buffer_pool.release(group_buffer);
}
None => {}
}
@ -620,17 +630,6 @@ impl Project {
}
}
/// Process live MIDI input from all MIDI tracks (called even when not playing)
pub fn process_live_midi(&mut self, output: &mut [f32], sample_rate: u32, channels: u32) {
// Process all MIDI tracks to handle queued live input events
for track in self.tracks.values_mut() {
if let TrackNode::Midi(midi_track) = track {
// Process only queued live events, not clips
midi_track.process_live_input(output, sample_rate, channels);
}
}
}
/// Send a live MIDI note on event to a track's instrument
/// Note: With node-based instruments, MIDI events are handled during the process() call
pub fn send_midi_note_on(&mut self, track_id: TrackId, note: u8, velocity: u8) {

View File

@ -35,6 +35,10 @@ pub struct RenderContext {
pub buffer_size: usize,
/// Accumulated time stretch factor (1.0 = normal, 0.5 = half speed, 2.0 = double speed)
pub time_stretch: f32,
/// When true: skip clip event collection; only render instrument state and live MIDI queue.
/// Used after pause/stop to route note-off tails through the normal group hierarchy
/// without re-triggering notes from clips at the paused position.
pub live_only: bool,
}
impl RenderContext {
@ -51,6 +55,7 @@ impl RenderContext {
channels,
buffer_size,
time_stretch: 1.0,
live_only: false,
}
}
@ -181,6 +186,10 @@ pub struct Metatrack {
pub audio_graph: AudioGraph,
/// Saved graph preset for serialization
audio_graph_preset: Option<GraphPreset>,
/// True while the mixing graph is still the auto-generated default (no user edits).
/// Used to auto-connect new subtracks and to prompt before loading a preset.
#[serde(default)]
pub graph_is_default: bool,
}
impl Clone for Metatrack {
@ -201,15 +210,17 @@ impl Clone for Metatrack {
next_automation_id: self.next_automation_id,
audio_graph: default_audio_graph(), // Create fresh graph, not cloned
audio_graph_preset: self.audio_graph_preset.clone(),
graph_is_default: self.graph_is_default,
}
}
}
impl Metatrack {
/// Create a new metatrack with an audio graph (input → output)
/// Create a new metatrack. The mixing graph is set up later via `set_subtrack_graph`
/// once the child track list is known.
pub fn new(id: TrackId, name: String, sample_rate: u32) -> Self {
let default_buffer_size = 8192;
let audio_graph = Self::create_default_graph(sample_rate, default_buffer_size);
let audio_graph = Self::create_empty_graph(sample_rate, default_buffer_size);
Self {
id,
@ -227,25 +238,192 @@ impl Metatrack {
next_automation_id: 0,
audio_graph,
audio_graph_preset: None,
graph_is_default: true,
}
}
/// Create a default audio graph with AudioInput -> AudioOutput
fn create_default_graph(sample_rate: u32, buffer_size: usize) -> AudioGraph {
/// Minimal graph used before subtracks are known (just an AudioOutput node).
fn create_empty_graph(sample_rate: u32, buffer_size: usize) -> AudioGraph {
let mut graph = AudioGraph::new(sample_rate, buffer_size);
let input_node = Box::new(AudioInputNode::new("Audio Input"));
let input_id = graph.add_node(input_node);
graph.set_node_position(input_id, 100.0, 150.0);
let output_node = Box::new(AudioOutputNode::new("Audio Output"));
let output_id = graph.add_node(output_node);
graph.set_node_position(output_id, 500.0, 150.0);
graph.set_output_node(Some(output_id));
graph
}
let _ = graph.connect(input_id, 0, output_id, 0);
/// Build the explicit subtrack mixing graph: SubtrackInputs → Mixer → AudioOutput.
///
/// `subtracks` is an ordered list of (backend TrackId, display name) for each child.
/// Replaces the current graph and marks `graph_is_default = true`.
pub fn set_subtrack_graph(
&mut self,
subtracks: Vec<(TrackId, String)>,
sample_rate: u32,
buffer_size: usize,
) {
use super::node_graph::nodes::{SubtrackInputsNode, MixerNode};
let n = subtracks.len();
let mut graph = AudioGraph::new(sample_rate, buffer_size);
// SubtrackInputs node (N outputs, one per child)
// NOTE: `new()` initialises buffers as zero-length; call `update_subtracks` immediately
// to allocate stereo interleaved buffers (buffer_size * 2 samples each).
let mut inputs_node = SubtrackInputsNode::new("Subtrack Inputs", subtracks);
let subtracks_copy = inputs_node.subtracks().to_vec();
inputs_node.update_subtracks(subtracks_copy, buffer_size);
let inputs_id = graph.add_node(Box::new(inputs_node));
graph.set_node_position(inputs_id, 100.0, 150.0);
// Mixer node (starts with 1 spare; grows as connections are made)
let mixer_node = Box::new(MixerNode::new("Mixer"));
let mixer_id = graph.add_node(mixer_node);
graph.set_node_position(mixer_id, 350.0, 150.0);
// AudioOutput node
let output_node = Box::new(AudioOutputNode::new("Audio Output"));
let output_id = graph.add_node(output_node);
graph.set_node_position(output_id, 600.0, 150.0);
// Connect SubtrackInputs[i] → Mixer[i] for each subtrack
for i in 0..n {
let _ = graph.connect(inputs_id, i, mixer_id, i);
}
let _ = graph.connect(mixer_id, 0, output_id, 0);
graph.set_output_node(Some(output_id));
graph
self.audio_graph = graph;
self.audio_graph_preset = None;
self.graph_is_default = true;
}
/// Add a new subtrack port to the existing graph.
///
/// If `graph_is_default`: also connects the new port to a new Mixer input.
/// If the user has modified the graph: just adds the port (unconnected).
pub fn add_subtrack_to_graph(&mut self, track_id: TrackId, name: String, buffer_size: usize) {
use super::node_graph::nodes::SubtrackInputsNode;
// Find SubtrackInputs node index
let si_idx = self.audio_graph.node_indices()
.find(|&idx| self.audio_graph.get_graph_node(idx)
.map(|n| n.node.node_type() == "SubtrackInputs")
.unwrap_or(false));
let si_idx = match si_idx {
Some(idx) => idx,
None => return, // No subtrack graph set up yet
};
// Get current subtrack count (= new port index after adding)
let new_slot = {
let gn = self.audio_graph.get_graph_node_mut(si_idx).unwrap();
let si = gn.node.as_any_mut().downcast_mut::<SubtrackInputsNode>().unwrap();
let mut subtracks = si.subtracks().to_vec();
subtracks.push((track_id, name));
let n = subtracks.len();
si.update_subtracks(subtracks, buffer_size);
// Rebuild output buffers for the new port count
n - 1 // index of the newly added slot
};
// Reallocate GraphNode output buffers to match new port count
self.audio_graph.reallocate_node_output_buffers(si_idx, buffer_size);
if self.graph_is_default {
// Find the Mixer node and connect the new subtrack port to a new Mixer input
let mixer_idx = self.audio_graph.node_indices()
.find(|&idx| self.audio_graph.get_graph_node(idx)
.map(|n| n.node.node_type() == "Mixer")
.unwrap_or(false));
if let Some(mixer_idx) = mixer_idx {
// n_incoming after connecting = new_slot + 1; auto-grow handled by connect()
let _ = self.audio_graph.connect(si_idx, new_slot, mixer_idx, new_slot);
}
}
}
/// Remove a subtrack from the graph (by TrackId).
///
/// Always disconnects any connections from the removed port and removes the port.
/// If `graph_is_default`: also reshuffles Mixer connections to stay compact.
pub fn remove_subtrack_from_graph(&mut self, track_id: TrackId, buffer_size: usize) {
use super::node_graph::nodes::SubtrackInputsNode;
let si_idx = self.audio_graph.node_indices()
.find(|&idx| self.audio_graph.get_graph_node(idx)
.map(|n| n.node.node_type() == "SubtrackInputs")
.unwrap_or(false));
let si_idx = match si_idx {
Some(idx) => idx,
None => return,
};
// Find the slot index for this track
let slot = {
let gn = self.audio_graph.get_graph_node(si_idx).unwrap();
let si = gn.node.as_any().downcast_ref::<SubtrackInputsNode>().unwrap();
si.subtrack_index_for(track_id)
};
let slot = match slot {
Some(s) => s,
None => return,
};
// Remove all connections from this output port
self.audio_graph.disconnect_output_port(si_idx, slot);
// Update the SubtrackInputsNode's subtrack list
{
let gn = self.audio_graph.get_graph_node_mut(si_idx).unwrap();
let si = gn.node.as_any_mut().downcast_mut::<SubtrackInputsNode>().unwrap();
let mut subtracks = si.subtracks().to_vec();
subtracks.remove(slot);
si.update_subtracks(subtracks, buffer_size);
}
self.audio_graph.reallocate_node_output_buffers(si_idx, buffer_size);
if self.graph_is_default {
// Rebuild default Mixer connections (they've shifted after removal)
let mixer_idx = self.audio_graph.node_indices()
.find(|&idx| self.audio_graph.get_graph_node(idx)
.map(|n| n.node.node_type() == "Mixer")
.unwrap_or(false));
if let Some(mixer_idx) = mixer_idx {
// Clear all connections TO mixer
self.audio_graph.disconnect_all_inputs(mixer_idx);
// Get new subtrack count
let n = {
let gn = self.audio_graph.get_graph_node(si_idx).unwrap();
gn.node.as_any().downcast_ref::<SubtrackInputsNode>().unwrap().num_subtracks()
};
// Resize mixer and reconnect
{
let gn = self.audio_graph.get_graph_node_mut(mixer_idx).unwrap();
let mixer = gn.node.as_any_mut().downcast_mut::<super::node_graph::nodes::MixerNode>().unwrap();
mixer.resize(n + 1);
}
for i in 0..n {
let _ = self.audio_graph.connect(si_idx, i, mixer_idx, i);
}
}
}
}
/// Return the current ordered subtrack list from SubtrackInputsNode, or empty vec if none.
pub fn current_subtracks(&self) -> Vec<(TrackId, String)> {
use super::node_graph::nodes::SubtrackInputsNode;
for idx in self.audio_graph.node_indices().collect::<Vec<_>>() {
if let Some(gn) = self.audio_graph.get_graph_node(idx) {
if let Some(si) = gn.node.as_any().downcast_ref::<SubtrackInputsNode>() {
return si.subtracks().to_vec();
}
}
}
Vec::new()
}
/// Prepare for serialization by saving the audio graph as a preset
@ -253,20 +431,42 @@ impl Metatrack {
self.audio_graph_preset = Some(self.audio_graph.to_preset("Metatrack Graph"));
}
/// Rebuild the audio graph from preset after deserialization
/// Rebuild the audio graph from preset after deserialization.
///
/// After loading, the caller must call `update_subtrack_ids` to re-associate
/// backend TrackIds with the SubtrackInputsNode's port slots.
pub fn rebuild_audio_graph(&mut self, sample_rate: u32, buffer_size: usize) -> Result<(), String> {
if let Some(preset) = &self.audio_graph_preset {
if !preset.nodes.is_empty() && preset.output_node.is_some() {
self.audio_graph = AudioGraph::from_preset(preset, sample_rate, buffer_size, None)?;
// graph_is_default remains as serialized (false for user-modified graphs)
} else {
self.audio_graph = Self::create_default_graph(sample_rate, buffer_size);
self.audio_graph = Self::create_empty_graph(sample_rate, buffer_size);
self.graph_is_default = true;
}
} else {
self.audio_graph = Self::create_default_graph(sample_rate, buffer_size);
self.audio_graph = Self::create_empty_graph(sample_rate, buffer_size);
self.graph_is_default = true;
}
Ok(())
}
/// Re-associate backend TrackIds with the SubtrackInputsNode's port slots after reload.
///
/// The preset stores placeholder TrackId=0 entries; this call fills in the real IDs.
pub fn update_subtrack_ids(&mut self, subtracks: Vec<(TrackId, String)>, buffer_size: usize) {
use super::node_graph::nodes::SubtrackInputsNode;
for idx in self.audio_graph.node_indices().collect::<Vec<_>>() {
if let Some(gn) = self.audio_graph.get_graph_node_mut(idx) {
if let Some(si) = gn.node.as_any_mut().downcast_mut::<SubtrackInputsNode>() {
si.update_subtracks(subtracks, buffer_size);
return;
}
}
}
}
/// Add an automation lane to this metatrack
pub fn add_automation_lane(&mut self, parameter_id: ParameterId) -> AutomationLaneId {
let lane_id = self.next_automation_id;
@ -439,6 +639,11 @@ pub struct MidiTrack {
/// Peak level of last render() call (for VU metering)
#[serde(skip, default)]
pub peak_level: f32,
/// True while the instrument graph is still the auto-generated default (no user edits).
/// Used to prompt before loading a preset.
#[serde(default)]
pub graph_is_default: bool,
}
impl Clone for MidiTrack {
@ -457,6 +662,7 @@ impl Clone for MidiTrack {
live_midi_queue: Vec::new(), // Don't clone live MIDI queue
prev_active_instances: HashSet::new(),
peak_level: 0.0,
graph_is_default: self.graph_is_default,
}
}
}
@ -485,6 +691,7 @@ impl MidiTrack {
live_midi_queue: Vec::new(),
prev_active_instances: HashSet::new(),
peak_level: 0.0,
graph_is_default: true,
}
}
@ -584,77 +791,55 @@ impl MidiTrack {
self.live_midi_queue.clear();
}
/// Process only live MIDI input (queued events) without rendering clips
/// This is used when playback is stopped but we want to hear live input
pub fn process_live_input(
&mut self,
output: &mut [f32],
_sample_rate: u32,
_channels: u32,
) {
// Generate audio using instrument graph with live MIDI events
self.instrument_graph.process(output, &self.live_midi_queue, 0.0);
// Clear the queue after processing
self.live_midi_queue.clear();
// Apply track volume (no automation during live input)
for sample in output.iter_mut() {
*sample *= self.volume;
}
}
/// Render this MIDI track into the output buffer
/// Render this MIDI track into the output buffer.
///
/// When `ctx.live_only` is true, clip event collection is skipped and only the live MIDI
/// queue is processed. This lets note-off tails (and live keyboard input) route through
/// the normal group hierarchy without re-triggering notes from clips at the paused position.
pub fn render(
&mut self,
output: &mut [f32],
midi_pool: &MidiClipPool,
playhead_seconds: f64,
sample_rate: u32,
channels: u32,
ctx: RenderContext,
) {
let buffer_duration_seconds = output.len() as f64 / (sample_rate as f64 * channels as f64);
let buffer_end_seconds = playhead_seconds + buffer_duration_seconds;
// Collect MIDI events from all clip instances that overlap with current time range
let mut midi_events = Vec::new();
let mut currently_active = HashSet::new();
for instance in &self.clip_instances {
if instance.overlaps_range(playhead_seconds, buffer_end_seconds) {
currently_active.insert(instance.id);
}
// Get the clip content from the pool
if let Some(clip) = midi_pool.get_clip(instance.clip_id) {
let events = instance.get_events_in_range(
clip,
playhead_seconds,
buffer_end_seconds,
);
midi_events.extend(events);
}
}
// Send all-notes-off for clip instances that just became inactive
// (playhead exited the clip). This prevents stuck notes from malformed clips.
for prev_id in &self.prev_active_instances {
if !currently_active.contains(prev_id) {
for note in 0..128u8 {
midi_events.push(MidiEvent::note_off(playhead_seconds, 0, note, 0));
if !ctx.live_only {
let buffer_duration_seconds = output.len() as f64 / (ctx.sample_rate as f64 * ctx.channels as f64);
let buffer_end_seconds = ctx.playhead_seconds + buffer_duration_seconds;
// Collect MIDI events from all clip instances that overlap with current time range
let mut currently_active = HashSet::new();
for instance in &self.clip_instances {
if instance.overlaps_range(ctx.playhead_seconds, buffer_end_seconds) {
currently_active.insert(instance.id);
}
if let Some(clip) = midi_pool.get_clip(instance.clip_id) {
let events = instance.get_events_in_range(clip, ctx.playhead_seconds, buffer_end_seconds);
midi_events.extend(events);
}
break; // One round of all-notes-off is enough
}
// Send all-notes-off for clip instances that just became inactive
for prev_id in &self.prev_active_instances {
if !currently_active.contains(prev_id) {
for note in 0..128u8 {
midi_events.push(MidiEvent::note_off(ctx.playhead_seconds, 0, note, 0));
}
break;
}
}
self.prev_active_instances = currently_active;
}
self.prev_active_instances = currently_active;
// Add live MIDI events (from virtual keyboard or MIDI controllers)
// This allows real-time input to be heard during playback/recording
midi_events.extend(self.live_midi_queue.drain(..));
// Generate audio using instrument graph
self.instrument_graph.process(output, &midi_events, playhead_seconds);
self.instrument_graph.process(output, &midi_events, ctx.playhead_seconds);
// Evaluate and apply automation
let effective_volume = self.evaluate_automation_at_time(playhead_seconds);
// Evaluate and apply automation (skip automation in live_only mode — no playhead to evaluate at)
let effective_volume = if ctx.live_only { self.volume } else { self.evaluate_automation_at_time(ctx.playhead_seconds) };
// Apply track volume
for sample in output.iter_mut() {
@ -715,6 +900,11 @@ pub struct AudioTrack {
/// Peak level of last render() call (for VU metering)
#[serde(skip, default)]
pub peak_level: f32,
/// True while the effects graph is still the auto-generated default (no user edits).
/// Used to prompt before loading a preset.
#[serde(default)]
pub graph_is_default: bool,
}
impl Clone for AudioTrack {
@ -732,6 +922,7 @@ impl Clone for AudioTrack {
effects_graph: default_audio_graph(), // Create fresh graph, not cloned
clip_render_buffer: Vec::new(),
peak_level: 0.0,
graph_is_default: self.graph_is_default,
}
}
}
@ -776,6 +967,7 @@ impl AudioTrack {
effects_graph,
clip_render_buffer: Vec::new(),
peak_level: 0.0,
graph_is_default: true,
}
}

View File

@ -180,6 +180,22 @@ pub enum Command {
GraphSavePreset(TrackId, String, String, String, Vec<String>),
/// Load a preset into a track's graph (track_id, preset_path)
GraphLoadPreset(TrackId, String),
// Metatrack subtrack graph commands
/// Replace a metatrack's mixing graph with the default SubtrackInputs→Mixer→Output layout.
/// (metatrack_id, ordered list of (child_track_id, display_name))
SetMetatrackSubtrackGraph(TrackId, Vec<(TrackId, String)>),
/// Add a new subtrack port to a metatrack's SubtrackInputsNode.
/// (metatrack_id, child_track_id, display_name)
AddMetatrackSubtrack(TrackId, TrackId, String),
/// Remove a subtrack port from a metatrack's SubtrackInputsNode.
/// (metatrack_id, child_track_id)
RemoveMetatrackSubtrack(TrackId, TrackId),
/// Re-associate backend TrackIds with SubtrackInputsNode slots after project reload.
/// (metatrack_id, ordered list of (child_track_id, display_name))
UpdateMetatrackSubtrackIds(TrackId, Vec<(TrackId, String)>),
/// Set or clear the graph_is_default flag on any track (track_id, value)
SetGraphIsDefault(TrackId, bool),
/// Save a VoiceAllocator's template graph as a preset (track_id, voice_allocator_id, preset_path, preset_name)
GraphSaveTemplatePreset(TrackId, u32, String, String),
@ -422,6 +438,8 @@ pub enum Query {
SetProject(Box<crate::audio::project::Project>),
/// Duplicate a MIDI clip in the pool, returning the new clip's ID
DuplicateMidiClipSync(MidiClipId),
/// Get whether a track's graph is still the auto-generated default
GetGraphIsDefault(TrackId),
}
/// Oscilloscope data from a node
@ -497,4 +515,6 @@ pub enum QueryResponse {
ProjectSet(Result<(), String>),
/// MIDI clip duplicated (returns new clip ID)
MidiClipDuplicated(Result<MidiClipId, String>),
/// Whether a track's graph is the auto-generated default
GraphIsDefault(bool),
}

View File

@ -479,7 +479,6 @@ enum FileCommand {
path: std::path::PathBuf,
document: lightningbeam_core::document::Document,
layer_to_track_map: std::collections::HashMap<uuid::Uuid, u32>,
clip_to_metatrack_map: std::collections::HashMap<uuid::Uuid, u32>,
progress_tx: std::sync::mpsc::Sender<FileProgress>,
},
Load {
@ -554,8 +553,8 @@ impl FileOperationsWorker {
fn run(self) {
while let Ok(command) = self.command_rx.recv() {
match command {
FileCommand::Save { path, document, layer_to_track_map, clip_to_metatrack_map, progress_tx } => {
self.handle_save(path, document, &layer_to_track_map, &clip_to_metatrack_map, progress_tx);
FileCommand::Save { path, document, layer_to_track_map, progress_tx } => {
self.handle_save(path, document, &layer_to_track_map, progress_tx);
}
FileCommand::Load { path, progress_tx } => {
self.handle_load(path, progress_tx);
@ -570,7 +569,6 @@ impl FileOperationsWorker {
path: std::path::PathBuf,
document: lightningbeam_core::document::Document,
layer_to_track_map: &std::collections::HashMap<uuid::Uuid, u32>,
clip_to_metatrack_map: &std::collections::HashMap<uuid::Uuid, u32>,
progress_tx: std::sync::mpsc::Sender<FileProgress>,
) {
use lightningbeam_core::file_io::{save_beam, SaveSettings};
@ -613,7 +611,7 @@ impl FileOperationsWorker {
let step3_start = std::time::Instant::now();
let settings = SaveSettings::default();
match save_beam(&path, &document, &mut audio_project, audio_pool_entries, layer_to_track_map, clip_to_metatrack_map, &settings) {
match save_beam(&path, &document, &mut audio_project, audio_pool_entries, layer_to_track_map, &settings) {
Ok(()) => {
eprintln!("📊 [SAVE] Step 3: save_beam() took {:.2}ms", step3_start.elapsed().as_secs_f64() * 1000.0);
eprintln!("📊 [SAVE] ✅ Total save time: {:.2}ms", save_start.elapsed().as_secs_f64() * 1000.0);
@ -829,8 +827,6 @@ struct EditorApp {
// Track ID mapping (Document layer UUIDs <-> daw-backend TrackIds)
layer_to_track_map: HashMap<Uuid, daw_backend::TrackId>,
track_to_layer_map: HashMap<daw_backend::TrackId, Uuid>,
// Movie clip ID -> backend metatrack (group track) mapping
clip_to_metatrack_map: HashMap<Uuid, daw_backend::TrackId>,
/// Generation counter - incremented on project load to force UI components to reload
project_generation: u64,
// Clip instance ID mapping (Document clip instance UUIDs <-> backend clip instance IDs)
@ -1106,7 +1102,6 @@ impl EditorApp {
webcam_record_command: None,
layer_to_track_map: HashMap::new(),
track_to_layer_map: HashMap::new(),
clip_to_metatrack_map: HashMap::new(),
project_generation: 0,
clip_instance_to_backend_map: HashMap::new(),
playback_time: 0.0, // Start at beginning
@ -1623,13 +1618,103 @@ impl EditorApp {
}
}
}
// After all tracks are created, push subtrack mixing graph commands for group metatracks
self.update_metatrack_subtrack_graphs();
}
/// Collect all group layers (depth-first) and push their subtrack graph to the backend.
/// Also creates metatracks for any groups that don't have one yet (e.g., empty groups).
/// Called at the end of sync_audio_layers_to_backend, after all child tracks exist.
fn update_metatrack_subtrack_graphs(&mut self) {
use lightningbeam_core::layer::AnyLayer;
// Collect (group_layer_id, group_name, children snapshot) for every group.
// Snapshot the name too so we can create metatracks for groups not yet in the map.
let mut group_snapshots: Vec<(uuid::Uuid, String, Vec<AnyLayer>)> = Vec::new();
fn collect_groups(layers: &[AnyLayer], out: &mut Vec<(uuid::Uuid, String, Vec<AnyLayer>)>) {
for layer in layers {
if let AnyLayer::Group(g) = layer {
out.push((g.layer.id, g.layer.name.clone(), g.children.clone()));
collect_groups(&g.children, out);
}
}
}
collect_groups(
&self.action_executor.document().root.children,
&mut group_snapshots,
);
// Ensure metatracks exist for ALL groups, not just those with audio children.
// This allows the node graph pane to open for empty group layers too.
if let Some(ref controller_arc) = self.audio_controller {
for (group_id, group_name, _) in &group_snapshots {
if !self.layer_to_track_map.contains_key(group_id) {
let track_id_result = {
let mut controller = controller_arc.lock().unwrap();
controller.create_group_track_sync(format!("[{}]", group_name), None)
};
match track_id_result {
Ok(track_id) => {
self.layer_to_track_map.insert(*group_id, track_id);
println!("✅ Created metatrack for group '{}' (TrackId: {})", group_name, track_id);
}
Err(e) => {
eprintln!("⚠️ Failed to create metatrack for group '{}': {}", group_name, e);
}
}
}
}
}
// Push subtrack graph commands for every group that now has a metatrack.
if let Some(ref controller_arc) = self.audio_controller {
let mut controller = controller_arc.lock().unwrap();
for (group_id, _, children) in &group_snapshots {
if let Some(&metatrack_id) = self.layer_to_track_map.get(group_id) {
let subtracks = self.build_subtrack_list_for_group(children);
controller.set_metatrack_subtrack_graph(metatrack_id, subtracks);
}
}
}
}
/// Build the ordered subtrack list for a group layer's direct children.
/// Audio child layers → looked up via layer_to_track_map.
/// Nested group children → looked up via layer_to_track_map.
fn build_subtrack_list_for_group(
&self,
children: &[lightningbeam_core::layer::AnyLayer],
) -> Vec<(daw_backend::TrackId, String)> {
use lightningbeam_core::layer::AnyLayer;
let mut subtracks = Vec::new();
// Iterate in reverse so the top timeline layer maps to port 0 (top of node).
// Timeline layers are stored top-to-bottom (index 0 = topmost), and node ports
// are displayed top-to-bottom, so reversing aligns them.
for child in children.iter().rev() {
match child {
AnyLayer::Audio(audio_layer) => {
if let Some(&track_id) = self.layer_to_track_map.get(&audio_layer.layer.id) {
subtracks.push((track_id, audio_layer.layer.name.clone()));
}
}
AnyLayer::Group(group_layer) => {
if let Some(&meta_id) = self.layer_to_track_map.get(&group_layer.layer.id) {
subtracks.push((meta_id, group_layer.layer.name.clone()));
}
}
_ => {}
}
}
subtracks
}
/// Ensure a backend metatrack exists for a parent container (VectorClip or GroupLayer).
/// Checks if the ID belongs to a GroupLayer first, then falls back to VectorClip.
fn ensure_metatrack_for_parent(&mut self, parent_id: Uuid) -> Option<daw_backend::TrackId> {
// Return existing metatrack if already mapped
if let Some(&track_id) = self.clip_to_metatrack_map.get(&parent_id) {
if let Some(&track_id) = self.layer_to_track_map.get(&parent_id) {
return Some(track_id);
}
@ -1647,7 +1732,7 @@ impl EditorApp {
/// Ensure a backend metatrack (group track) exists for a GroupLayer.
fn ensure_metatrack_for_group(&mut self, group_layer_id: Uuid) -> Option<daw_backend::TrackId> {
if let Some(&track_id) = self.clip_to_metatrack_map.get(&group_layer_id) {
if let Some(&track_id) = self.layer_to_track_map.get(&group_layer_id) {
return Some(track_id);
}
@ -1660,7 +1745,7 @@ impl EditorApp {
let mut controller = controller_arc.lock().unwrap();
match controller.create_group_track_sync(format!("[{}]", group_name), None) {
Ok(track_id) => {
self.clip_to_metatrack_map.insert(group_layer_id, track_id);
self.layer_to_track_map.insert(group_layer_id, track_id);
println!("✅ Created metatrack for group '{}' (TrackId: {})", group_name, track_id);
return Some(track_id);
}
@ -1676,7 +1761,7 @@ impl EditorApp {
/// Returns the metatrack's TrackId, creating one if needed.
fn ensure_metatrack_for_clip(&mut self, clip_id: Uuid) -> Option<daw_backend::TrackId> {
// Return existing metatrack if already mapped
if let Some(&track_id) = self.clip_to_metatrack_map.get(&clip_id) {
if let Some(&track_id) = self.layer_to_track_map.get(&clip_id) {
return Some(track_id);
}
@ -1690,7 +1775,7 @@ impl EditorApp {
let mut controller = controller_arc.lock().unwrap();
match controller.create_group_track_sync(format!("[{}]", clip_name), None) {
Ok(track_id) => {
self.clip_to_metatrack_map.insert(clip_id, track_id);
self.layer_to_track_map.insert(clip_id, track_id);
println!("✅ Created metatrack for clip '{}' (TrackId: {})", clip_name, track_id);
return Some(track_id);
}
@ -1817,7 +1902,6 @@ impl EditorApp {
audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map,
clip_instance_to_backend_map: &mut self.clip_instance_to_backend_map,
clip_to_metatrack_map: &self.clip_to_metatrack_map,
};
if let Err(e) = self.action_executor.execute_with_backend(Box::new(action), &mut backend_context) {
@ -2269,7 +2353,6 @@ impl EditorApp {
audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map,
clip_instance_to_backend_map: &mut self.clip_instance_to_backend_map,
clip_to_metatrack_map: &self.clip_to_metatrack_map,
};
if let Err(e) = self
.action_executor
@ -2457,7 +2540,6 @@ impl EditorApp {
audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map,
clip_instance_to_backend_map: &mut self.clip_instance_to_backend_map,
clip_to_metatrack_map: &self.clip_to_metatrack_map,
};
if let Err(e) = self
.action_executor
@ -2689,7 +2771,6 @@ impl EditorApp {
audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map,
clip_instance_to_backend_map: &mut self.clip_instance_to_backend_map,
clip_to_metatrack_map: &self.clip_to_metatrack_map,
};
if let Err(e) = self.action_executor.execute_with_backend(Box::new(action), &mut backend_context) {
eprintln!("Duplicate clip failed: {}", e);
@ -2788,7 +2869,7 @@ impl EditorApp {
// Reset state and return to start screen
self.layer_to_track_map.clear();
self.track_to_layer_map.clear();
self.clip_to_metatrack_map.clear();
self.layer_to_track_map.clear();
self.clip_instance_to_backend_map.clear();
self.current_file_path = None;
self.selection.clear();
@ -3037,7 +3118,6 @@ impl EditorApp {
audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map,
clip_instance_to_backend_map: &mut self.clip_instance_to_backend_map,
clip_to_metatrack_map: &self.clip_to_metatrack_map,
};
match self.action_executor.undo_with_backend(&mut backend_context) {
Ok(true) => {
@ -3076,7 +3156,6 @@ impl EditorApp {
audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map,
clip_instance_to_backend_map: &mut self.clip_instance_to_backend_map,
clip_to_metatrack_map: &self.clip_to_metatrack_map,
};
match self.action_executor.redo_with_backend(&mut backend_context) {
Ok(true) => {
@ -3555,7 +3634,6 @@ impl EditorApp {
path: path.clone(),
document,
layer_to_track_map: self.layer_to_track_map.clone(),
clip_to_metatrack_map: self.clip_to_metatrack_map.clone(),
progress_tx,
};
@ -3712,15 +3790,6 @@ impl EditorApp {
eprintln!("📊 [APPLY] Step 5: No saved track mappings (old file format)");
}
// Restore clip-to-metatrack mappings
if !loaded_project.clip_to_metatrack_map.is_empty() {
for (&clip_id, &track_id) in &loaded_project.clip_to_metatrack_map {
self.clip_to_metatrack_map.insert(clip_id, track_id);
}
eprintln!("📊 [APPLY] Step 5b: Restored {} clip-to-metatrack mappings",
loaded_project.clip_to_metatrack_map.len());
}
// Sync any audio layers that don't have a mapping yet (new layers, or old file format)
let step6_start = std::time::Instant::now();
self.sync_audio_layers_to_backend();
@ -4357,7 +4426,6 @@ impl EditorApp {
audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map,
clip_instance_to_backend_map: &mut self.clip_instance_to_backend_map,
clip_to_metatrack_map: &self.clip_to_metatrack_map,
};
if let Err(e) = self.action_executor.execute_with_backend(Box::new(action), &mut backend_context) {
@ -4403,7 +4471,6 @@ impl EditorApp {
audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map,
clip_instance_to_backend_map: &mut self.clip_instance_to_backend_map,
clip_to_metatrack_map: &self.clip_to_metatrack_map,
};
if let Err(e) = self.action_executor.execute_with_backend(Box::new(audio_action), &mut backend_context) {
@ -4521,7 +4588,6 @@ impl EditorApp {
audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map,
clip_instance_to_backend_map: &mut self.clip_instance_to_backend_map,
clip_to_metatrack_map: &self.clip_to_metatrack_map,
};
if let Err(e) = self.action_executor.execute_with_backend(Box::new(audio_action), &mut backend_context) {
@ -5797,7 +5863,6 @@ impl eframe::App for EditorApp {
audio_controller: Some(&mut *controller),
layer_to_track_map: &self.layer_to_track_map,
clip_instance_to_backend_map: &mut self.clip_instance_to_backend_map,
clip_to_metatrack_map: &self.clip_to_metatrack_map,
};
// Execute action with backend synchronization

View File

@ -200,7 +200,7 @@ pub struct SharedPaneState<'a> {
pub audio_buffer_size: u32,
/// Video manager for video decoding and frame caching
pub video_manager: &'a std::sync::Arc<std::sync::Mutex<lightningbeam_core::video::VideoManager>>,
/// Mapping from Document layer UUIDs to daw-backend TrackIds
/// Maps all layer/group/clip UUIDs to backend track IDs (audio, MIDI, and metatracks)
pub layer_to_track_map: &'a std::collections::HashMap<Uuid, daw_backend::TrackId>,
/// Global playback state
pub playback_time: &'a mut f64, // Current playback position in seconds

View File

@ -149,6 +149,8 @@ node_templates! {
// Subgraph I/O
TemplateInput, "TemplateInput", "Template Input", "Subgraph I/O", false;
TemplateOutput, "TemplateOutput", "Template Output", "Subgraph I/O", false;
// Auto-generated (not user-addable)
SubtrackInputs, "SubtrackInputs", "Subtrack Inputs", "Inputs", false;
// Outputs
AudioOutput, "AudioOutput", "Audio Output", "Outputs", true;
}
@ -936,6 +938,10 @@ impl NodeTemplateTrait for NodeTemplate {
graph.add_input_param(node_id, "Audio In".into(), DataType::Audio, ValueType::float(0.0), InputParamKind::ConnectionOnly, true);
graph.add_output_param(node_id, "Audio Out".into(), DataType::Audio);
}
NodeTemplate::SubtrackInputs => {
// Ports are dynamic — populated from backend graph state when loaded.
// No static ports at construction time.
}
}
}
}

View File

@ -515,6 +515,28 @@ impl NodeGraphPane {
)
));
self.pending_action = Some(action);
// If disconnecting from a Mixer, shrink it by removing the
// last (spare) audio input and its corresponding gain param.
// The spare is always last; removing it keeps N+1 invariant.
{
let to_frontend_id = to_node_id;
let is_mixer = self.state.graph.nodes.get(to_frontend_id)
.map(|n| n.user_data.template == NodeTemplate::Mixer)
.unwrap_or(false);
if is_mixer {
let ids: Vec<_> = self.state.graph.nodes
.get(to_frontend_id)
.map(|n| n.inputs.iter().map(|(_, id)| *id).collect())
.unwrap_or_default();
let n = ids.len() / 2; // audio count = total / 2
if n > 1 {
// Remove last gain param first (index 2n-1), then last audio (index n-1)
self.state.graph.remove_input_param(ids[2 * n - 1]);
self.state.graph.remove_input_param(ids[n - 1]);
}
}
}
}
}
}
@ -650,12 +672,10 @@ impl NodeGraphPane {
let mut controller = audio_controller.lock().unwrap();
// Node graph actions don't use clip instances, so we use an empty map
let mut empty_clip_map = std::collections::HashMap::new();
let empty_metatrack_map = std::collections::HashMap::new();
let mut backend_context = lightningbeam_core::action::BackendContext {
audio_controller: Some(&mut *controller),
layer_to_track_map: shared.layer_to_track_map,
clip_instance_to_backend_map: &mut empty_clip_map,
clip_to_metatrack_map: &empty_metatrack_map,
};
if let Err(e) = shared.action_executor.execute_with_backend(action, &mut backend_context) {
@ -1421,7 +1441,7 @@ impl NodeGraphPane {
}
};
let frontend_id = self.add_node_to_editor(node_template, &node.node_type, node.position, node.id, &node.parameters);
let frontend_id = self.add_node_to_editor(node_template, &node.node_type, node.position, node.id, &node.parameters, node.num_ports, &node.port_names);
// For Script nodes: rebuild ports now (before connections), defer script_id resolution
if node.node_type == "Script" {
@ -1872,7 +1892,7 @@ impl NodeGraphPane {
}
};
self.add_node_to_editor(node_template, &node.node_type, node.position, node.id, &node.parameters);
self.add_node_to_editor(node_template, &node.node_type, node.position, node.id, &node.parameters, node.num_ports, &node.port_names);
}
// Add sub-group placeholder nodes
@ -2129,7 +2149,9 @@ impl NodeGraphPane {
}
}
/// Helper: add a node to the editor state and return its frontend ID
/// Helper: add a node to the editor state and return its frontend ID.
/// `num_ports` overrides the static port count for dynamic-port nodes (Mixer, SubtrackInputs).
/// `port_names` provides per-port display names for SubtrackInputs nodes.
fn add_node_to_editor(
&mut self,
node_template: NodeTemplate,
@ -2137,6 +2159,8 @@ impl NodeGraphPane {
position: (f32, f32),
backend_node_id: u32,
parameters: &std::collections::HashMap<u32, f32>,
num_ports: Option<u32>,
port_names: &[String],
) -> Option<NodeId> {
let frontend_id = self.state.graph.nodes.insert(egui_node_graph2::Node {
id: NodeId::default(),
@ -2148,6 +2172,54 @@ impl NodeGraphPane {
node_template.build_node(&mut self.state.graph, &mut self.user_state, frontend_id);
// For dynamic-port nodes loaded from backend state, resize to the serialized port count.
if let Some(n) = num_ports {
let n = n as usize;
match node_template {
NodeTemplate::SubtrackInputs => {
// build_node created 0 outputs; add n audio outputs using actual track names
for i in 0..n {
let name = port_names.get(i)
.cloned()
.unwrap_or_else(|| format!("Subtrack {}", i + 1));
self.state.graph.add_output_param(frontend_id, name, DataType::Audio);
}
}
NodeTemplate::Mixer => {
// build_node added static inputs; remove them all and add n dynamic inputs
let input_ids: Vec<InputId> = self.state.graph.nodes.get(frontend_id)
.map(|node| node.inputs.iter().map(|(_, id)| *id).collect())
.unwrap_or_default();
for id in input_ids {
self.state.graph.remove_input_param(id);
}
// Audio inputs
for i in 0..n {
self.state.graph.add_input_param(
frontend_id,
format!("Input {}", i + 1),
DataType::Audio,
ValueType::float(0.0),
InputParamKind::ConnectionOnly,
true,
);
}
// Level/gain parameters
for i in 0..n {
self.state.graph.add_input_param(
frontend_id,
format!("Level {}", i + 1),
DataType::CV,
ValueType::float_param(1.0, 0.0, 1.0, "", i as u32, None),
InputParamKind::ConstantOnly,
true,
);
}
}
_ => {}
}
}
self.state.node_positions.insert(frontend_id, egui::pos2(position.0, position.1));
self.state.node_order.push(frontend_id);
@ -2227,8 +2299,9 @@ impl crate::panes::PaneRenderer for NodeGraphPane {
// If selected track changed or project was reloaded, reload the graph
if self.track_id != current_track || (generation_changed && current_track.is_some()) {
if let Some(new_track_id) = current_track {
// Get backend track ID
if let Some(&backend_track_id) = shared.layer_to_track_map.get(&new_track_id) {
// Get backend track ID — check audio/MIDI layers first, then group/metatrack layers
if let Some(&backend_track_id) = shared.layer_to_track_map.get(&new_track_id)
{
// Check if track is MIDI or Audio
if let Some(audio_controller) = &shared.audio_controller {
let is_valid_track = {
@ -2272,7 +2345,7 @@ impl crate::panes::PaneRenderer for NodeGraphPane {
let bg_color = shared.theme.bg_color(&["#node-editor", ".pane-content"], ui.ctx(), egui::Color32::from_gray(30));
painter.rect_filled(rect, 0.0, bg_color);
let text = "Select a MIDI or Audio track to view its node graph";
let text = "Select a track to view its node graph";
let font_id = egui::FontId::proportional(16.0);
let text_color = shared.theme.text_color(&["#node-editor", ".text-secondary"], ui.ctx(), egui::Color32::from_gray(150));

View File

@ -837,10 +837,15 @@ impl PaneRenderer for VirtualPianoPane {
self.release_all_keyboard_notes(shared);
}
// Show message if no active MIDI track
ui.centered_and_justified(|ui| {
ui.label("No MIDI track selected. Create a MIDI track to use the virtual piano.");
});
// Draw message centered in rect using painter — avoids allocating in the full
// parent UI (which could block header interactions in other panes).
ui.painter_at(rect).text(
rect.center(),
egui::Align2::CENTER_CENTER,
"No MIDI track selected. Create a MIDI track to use the virtual piano.",
egui::FontId::proportional(16.0),
egui::Color32::from_gray(150),
);
return;
}