Add audio node graph editing

This commit is contained in:
Skyler Lehmkuhl 2025-10-25 03:29:54 -04:00
parent 19e99fa8bf
commit 16f4a2a359
43 changed files with 7110 additions and 824 deletions

575
daw-backend/Cargo.lock generated
View File

@ -11,6 +11,12 @@ dependencies = [
"memchr",
]
[[package]]
name = "allocator-api2"
version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
name = "alsa"
version = "0.9.1"
@ -54,7 +60,7 @@ dependencies = [
"bitflags 2.9.4",
"cexpr",
"clang-sys",
"itertools",
"itertools 0.13.0",
"proc-macro2",
"quote",
"regex",
@ -93,6 +99,21 @@ version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
[[package]]
name = "cassowary"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53"
[[package]]
name = "castaway"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dec551ab6e7578819132c713a93c022a05d60159dc86e7a7050223577484c55a"
dependencies = [
"rustversion",
]
[[package]]
name = "cc"
version = "1.2.41"
@ -147,6 +168,19 @@ dependencies = [
"memchr",
]
[[package]]
name = "compact_str"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f86b9c4c00838774a6d902ef931eff7470720c51d90c2e32cfe15dc304737b3f"
dependencies = [
"castaway",
"cfg-if",
"itoa",
"ryu",
"static_assertions",
]
[[package]]
name = "core-foundation-sys"
version = "0.8.7"
@ -221,20 +255,165 @@ version = "0.8.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
[[package]]
name = "crossterm"
version = "0.27.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df"
dependencies = [
"bitflags 2.9.4",
"crossterm_winapi",
"libc",
"mio",
"parking_lot",
"signal-hook",
"signal-hook-mio",
"winapi",
]
[[package]]
name = "crossterm_winapi"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acdd7c62a3665c7f6830a51635d9ac9b23ed385797f70a83bb8bafe9c572ab2b"
dependencies = [
"winapi",
]
[[package]]
name = "dasp_envelope"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ec617ce7016f101a87fe85ed44180839744265fae73bb4aa43e7ece1b7668b6"
dependencies = [
"dasp_frame",
"dasp_peak",
"dasp_ring_buffer",
"dasp_rms",
"dasp_sample",
]
[[package]]
name = "dasp_frame"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2a3937f5fe2135702897535c8d4a5553f8b116f76c1529088797f2eee7c5cd6"
dependencies = [
"dasp_sample",
]
[[package]]
name = "dasp_graph"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39b17b071a1fa4c78054730085620c3bb22dc5fded00483312557a3fdf26d7c4"
dependencies = [
"dasp_frame",
"dasp_ring_buffer",
"dasp_signal",
"dasp_slice",
"petgraph 0.5.1",
]
[[package]]
name = "dasp_interpolate"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fc975a6563bb7ca7ec0a6c784ead49983a21c24835b0bc96eea11ee407c7486"
dependencies = [
"dasp_frame",
"dasp_ring_buffer",
"dasp_sample",
]
[[package]]
name = "dasp_peak"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5cf88559d79c21f3d8523d91250c397f9a15b5fc72fbb3f87fdb0a37b79915bf"
dependencies = [
"dasp_frame",
"dasp_sample",
]
[[package]]
name = "dasp_ring_buffer"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07d79e19b89618a543c4adec9c5a347fe378a19041699b3278e616e387511ea1"
[[package]]
name = "dasp_rms"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6c5dcb30b7e5014486e2822537ea2beae50b19722ffe2ed7549ab03774575aa"
dependencies = [
"dasp_frame",
"dasp_ring_buffer",
"dasp_sample",
]
[[package]]
name = "dasp_sample"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c87e182de0887fd5361989c677c4e8f5000cd9491d6d563161a8f3a5519fc7f"
[[package]]
name = "dasp_signal"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa1ab7d01689c6ed4eae3d38fe1cea08cba761573fbd2d592528d55b421077e7"
dependencies = [
"dasp_envelope",
"dasp_frame",
"dasp_interpolate",
"dasp_peak",
"dasp_ring_buffer",
"dasp_rms",
"dasp_sample",
"dasp_window",
]
[[package]]
name = "dasp_slice"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e1c7335d58e7baedafa516cb361360ff38d6f4d3f9d9d5ee2a2fc8e27178fa1"
dependencies = [
"dasp_frame",
"dasp_sample",
]
[[package]]
name = "dasp_window"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99ded7b88821d2ce4e8b842c9f1c86ac911891ab89443cc1de750cae764c5076"
dependencies = [
"dasp_sample",
]
[[package]]
name = "daw-backend"
version = "0.1.0"
dependencies = [
"cpal",
"crossterm",
"dasp_envelope",
"dasp_graph",
"dasp_interpolate",
"dasp_peak",
"dasp_ring_buffer",
"dasp_rms",
"dasp_sample",
"dasp_signal",
"midly",
"petgraph 0.6.5",
"ratatui",
"rtrb",
"serde",
"serde_json",
"symphonia",
]
@ -271,6 +450,24 @@ version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52051878f80a721bb68ebfbc930e07b65ba72f2da88968ea5c06fd6ca3d3a127"
[[package]]
name = "fixedbitset"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d"
[[package]]
name = "fixedbitset"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "foldhash"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
[[package]]
name = "getrandom"
version = "0.3.4"
@ -289,12 +486,45 @@ version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280"
[[package]]
name = "hashbrown"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
name = "hashbrown"
version = "0.15.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1"
dependencies = [
"allocator-api2",
"equivalent",
"foldhash",
]
[[package]]
name = "hashbrown"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d"
[[package]]
name = "heck"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "indexmap"
version = "1.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
dependencies = [
"autocfg",
"hashbrown 0.12.3",
]
[[package]]
name = "indexmap"
version = "2.11.4"
@ -302,7 +532,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5"
dependencies = [
"equivalent",
"hashbrown",
"hashbrown 0.16.0",
]
[[package]]
name = "itertools"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569"
dependencies = [
"either",
]
[[package]]
@ -314,6 +553,12 @@ dependencies = [
"either",
]
[[package]]
name = "itoa"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]]
name = "jni"
version = "0.21.1"
@ -378,12 +623,30 @@ dependencies = [
"windows-link",
]
[[package]]
name = "lock_api"
version = "0.4.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965"
dependencies = [
"scopeguard",
]
[[package]]
name = "log"
version = "0.4.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432"
[[package]]
name = "lru"
version = "0.12.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38"
dependencies = [
"hashbrown 0.15.5",
]
[[package]]
name = "mach2"
version = "0.4.3"
@ -414,6 +677,18 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "mio"
version = "0.8.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c"
dependencies = [
"libc",
"log",
"wasi",
"windows-sys 0.48.0",
]
[[package]]
name = "ndk"
version = "0.8.0"
@ -524,6 +799,55 @@ version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
[[package]]
name = "parking_lot"
version = "0.12.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a"
dependencies = [
"lock_api",
"parking_lot_core",
]
[[package]]
name = "parking_lot_core"
version = "0.9.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1"
dependencies = [
"cfg-if",
"libc",
"redox_syscall",
"smallvec",
"windows-link",
]
[[package]]
name = "paste"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
[[package]]
name = "petgraph"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "467d164a6de56270bd7c4d070df81d07beace25012d5103ced4e9ff08d6afdb7"
dependencies = [
"fixedbitset 0.2.0",
"indexmap 1.9.3",
]
[[package]]
name = "petgraph"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db"
dependencies = [
"fixedbitset 0.4.2",
"indexmap 2.11.4",
]
[[package]]
name = "pkg-config"
version = "0.3.32"
@ -563,6 +887,26 @@ version = "5.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
[[package]]
name = "ratatui"
version = "0.26.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f44c9e68fd46eda15c646fbb85e1040b657a58cdc8c98db1d97a55930d991eef"
dependencies = [
"bitflags 2.9.4",
"cassowary",
"compact_str",
"crossterm",
"itertools 0.12.1",
"lru",
"paste",
"stability",
"strum",
"unicode-segmentation",
"unicode-truncate",
"unicode-width",
]
[[package]]
name = "rayon"
version = "1.11.0"
@ -583,6 +927,15 @@ dependencies = [
"crossbeam-utils",
]
[[package]]
name = "redox_syscall"
version = "0.5.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d"
dependencies = [
"bitflags 2.9.4",
]
[[package]]
name = "regex"
version = "1.12.2"
@ -630,6 +983,12 @@ version = "1.0.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d"
[[package]]
name = "ryu"
version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "same-file"
version = "1.0.6"
@ -639,6 +998,12 @@ dependencies = [
"winapi-util",
]
[[package]]
name = "scopeguard"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "serde"
version = "1.0.228"
@ -669,12 +1034,99 @@ dependencies = [
"syn",
]
[[package]]
name = "serde_json"
version = "1.0.145"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c"
dependencies = [
"itoa",
"memchr",
"ryu",
"serde",
"serde_core",
]
[[package]]
name = "shlex"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "signal-hook"
version = "0.3.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d881a16cf4426aa584979d30bd82cb33429027e42122b169753d6ef1085ed6e2"
dependencies = [
"libc",
"signal-hook-registry",
]
[[package]]
name = "signal-hook-mio"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34db1a06d485c9142248b7a054f034b349b212551f3dfd19c94d45a754a217cd"
dependencies = [
"libc",
"mio",
"signal-hook",
]
[[package]]
name = "signal-hook-registry"
version = "1.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b"
dependencies = [
"libc",
]
[[package]]
name = "smallvec"
version = "1.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
[[package]]
name = "stability"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d904e7009df136af5297832a3ace3370cd14ff1546a232f4f185036c2736fcac"
dependencies = [
"quote",
"syn",
]
[[package]]
name = "static_assertions"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "strum"
version = "0.26.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06"
dependencies = [
"strum_macros",
]
[[package]]
name = "strum_macros"
version = "0.26.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be"
dependencies = [
"heck",
"proc-macro2",
"quote",
"rustversion",
"syn",
]
[[package]]
name = "symphonia"
version = "0.5.5"
@ -916,7 +1368,7 @@ version = "0.23.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6485ef6d0d9b5d0ec17244ff7eb05310113c3f316f2d14200d4de56b3cb98f8d"
dependencies = [
"indexmap",
"indexmap 2.11.4",
"toml_datetime",
"toml_parser",
"winnow",
@ -937,6 +1389,29 @@ version = "1.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d"
[[package]]
name = "unicode-segmentation"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
[[package]]
name = "unicode-truncate"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3644627a5af5fa321c95b9b235a72fd24cd29c648c2c379431e6628655627bf"
dependencies = [
"itertools 0.13.0",
"unicode-segmentation",
"unicode-width",
]
[[package]]
name = "unicode-width"
version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
[[package]]
name = "walkdir"
version = "2.5.0"
@ -947,6 +1422,12 @@ dependencies = [
"winapi-util",
]
[[package]]
name = "wasi"
version = "0.11.1+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b"
[[package]]
name = "wasip2"
version = "1.0.1+wasi-0.2.4"
@ -1038,6 +1519,22 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.11"
@ -1047,6 +1544,12 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows"
version = "0.54.0"
@ -1091,6 +1594,15 @@ dependencies = [
"windows-targets 0.42.2",
]
[[package]]
name = "windows-sys"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [
"windows-targets 0.48.5",
]
[[package]]
name = "windows-sys"
version = "0.61.2"
@ -1115,6 +1627,21 @@ dependencies = [
"windows_x86_64_msvc 0.42.2",
]
[[package]]
name = "windows-targets"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
dependencies = [
"windows_aarch64_gnullvm 0.48.5",
"windows_aarch64_msvc 0.48.5",
"windows_i686_gnu 0.48.5",
"windows_i686_msvc 0.48.5",
"windows_x86_64_gnu 0.48.5",
"windows_x86_64_gnullvm 0.48.5",
"windows_x86_64_msvc 0.48.5",
]
[[package]]
name = "windows-targets"
version = "0.52.6"
@ -1137,6 +1664,12 @@ version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8"
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.6"
@ -1149,6 +1682,12 @@ version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43"
[[package]]
name = "windows_aarch64_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.6"
@ -1161,6 +1700,12 @@ version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f"
[[package]]
name = "windows_i686_gnu"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
[[package]]
name = "windows_i686_gnu"
version = "0.52.6"
@ -1179,6 +1724,12 @@ version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060"
[[package]]
name = "windows_i686_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
[[package]]
name = "windows_i686_msvc"
version = "0.52.6"
@ -1191,6 +1742,12 @@ version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36"
[[package]]
name = "windows_x86_64_gnu"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.6"
@ -1203,6 +1760,12 @@ version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.6"
@ -1215,6 +1778,12 @@ version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0"
[[package]]
name = "windows_x86_64_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.6"

View File

@ -9,6 +9,20 @@ symphonia = { version = "0.5", features = ["all"] }
rtrb = "0.3"
midly = "0.5"
serde = { version = "1.0", features = ["derive"] }
ratatui = "0.26"
crossterm = "0.27"
# Node-based audio graph dependencies
dasp_graph = "0.11"
dasp_signal = "0.11"
dasp_sample = "0.11"
dasp_interpolate = "0.11"
dasp_envelope = "0.11"
dasp_ring_buffer = "0.11"
dasp_peak = "0.11"
dasp_rms = "0.11"
petgraph = "0.6"
serde_json = "1.0"
[dev-dependencies]

View File

@ -1,12 +1,14 @@
use crate::audio::buffer_pool::BufferPool;
use crate::audio::clip::ClipId;
use crate::audio::midi::{MidiClip, MidiClipId, MidiEvent};
use crate::audio::node_graph::{nodes::*, InstrumentGraph};
use crate::audio::pool::AudioPool;
use crate::audio::project::Project;
use crate::audio::recording::RecordingState;
use crate::audio::track::{Track, TrackId};
use crate::audio::track::{Track, TrackId, TrackNode};
use crate::command::{AudioEvent, Command};
use crate::effects::{Effect, GainEffect, PanEffect, SimpleEQ};
use petgraph::stable_graph::NodeIndex;
use std::sync::atomic::{AtomicU64, Ordering};
use std::sync::Arc;
@ -718,6 +720,193 @@ impl Engine {
// Send a live MIDI note off event to the specified track's instrument
self.project.send_midi_note_off(track_id, note);
}
// Node graph commands
Command::GraphAddNode(track_id, node_type, _x, _y) => {
// Get MIDI track (graphs are only for MIDI tracks currently)
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
// Create graph if it doesn't exist
if track.instrument_graph.is_none() {
// Use large buffer to accommodate any audio callback size
track.instrument_graph = Some(InstrumentGraph::new(self.sample_rate, 8192));
}
if let Some(graph) = &mut track.instrument_graph {
// Create the node based on type
let node: Box<dyn crate::audio::node_graph::AudioNode> = match node_type.as_str() {
"Oscillator" => Box::new(OscillatorNode::new("Oscillator".to_string())),
"Gain" => Box::new(GainNode::new("Gain".to_string())),
"Mixer" => Box::new(MixerNode::new("Mixer".to_string())),
"Filter" => Box::new(FilterNode::new("Filter".to_string())),
"ADSR" => Box::new(ADSRNode::new("ADSR".to_string())),
"MidiInput" => Box::new(MidiInputNode::new("MIDI Input".to_string())),
"MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV".to_string())),
"AudioToCV" => Box::new(AudioToCVNode::new("Audio→CV".to_string())),
"Oscilloscope" => Box::new(OscilloscopeNode::new("Oscilloscope".to_string())),
"TemplateInput" => Box::new(TemplateInputNode::new("Template Input".to_string())),
"TemplateOutput" => Box::new(TemplateOutputNode::new("Template Output".to_string())),
"VoiceAllocator" => Box::new(VoiceAllocatorNode::new("VoiceAllocator".to_string(), self.sample_rate, 8192)),
"AudioOutput" => Box::new(AudioOutputNode::new("Output".to_string())),
_ => {
let _ = self.event_tx.push(AudioEvent::GraphConnectionError(
track_id,
format!("Unknown node type: {}", node_type)
));
return;
}
};
// Add node to graph
let node_idx = graph.add_node(node);
let node_id = node_idx.index() as u32;
// Automatically set MIDI-receiving nodes as MIDI targets
if node_type == "MidiInput" || node_type == "VoiceAllocator" {
graph.set_midi_target(node_idx, true);
}
// Emit success event
let _ = self.event_tx.push(AudioEvent::GraphNodeAdded(track_id, node_id, node_type.clone()));
}
}
}
Command::GraphAddNodeToTemplate(track_id, voice_allocator_id, node_type, _x, _y) => {
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
if let Some(graph) = &mut track.instrument_graph {
let va_idx = NodeIndex::new(voice_allocator_id as usize);
// Create the node
let node: Box<dyn crate::audio::node_graph::AudioNode> = match node_type.as_str() {
"Oscillator" => Box::new(OscillatorNode::new("Oscillator".to_string())),
"Gain" => Box::new(GainNode::new("Gain".to_string())),
"Mixer" => Box::new(MixerNode::new("Mixer".to_string())),
"Filter" => Box::new(FilterNode::new("Filter".to_string())),
"ADSR" => Box::new(ADSRNode::new("ADSR".to_string())),
"MidiInput" => Box::new(MidiInputNode::new("MIDI Input".to_string())),
"MidiToCV" => Box::new(MidiToCVNode::new("MIDI→CV".to_string())),
"AudioToCV" => Box::new(AudioToCVNode::new("Audio→CV".to_string())),
"Oscilloscope" => Box::new(OscilloscopeNode::new("Oscilloscope".to_string())),
"TemplateInput" => Box::new(TemplateInputNode::new("Template Input".to_string())),
"TemplateOutput" => Box::new(TemplateOutputNode::new("Template Output".to_string())),
"AudioOutput" => Box::new(AudioOutputNode::new("Output".to_string())),
_ => {
let _ = self.event_tx.push(AudioEvent::GraphConnectionError(
track_id,
format!("Unknown node type: {}", node_type)
));
return;
}
};
// Add node to VoiceAllocator's template graph
match graph.add_node_to_voice_allocator_template(va_idx, node) {
Ok(node_id) => {
println!("Added node {} (ID: {}) to VoiceAllocator {} template", node_type, node_id, voice_allocator_id);
let _ = self.event_tx.push(AudioEvent::GraphNodeAdded(track_id, node_id, node_type.clone()));
}
Err(e) => {
let _ = self.event_tx.push(AudioEvent::GraphConnectionError(
track_id,
format!("Failed to add node to template: {}", e)
));
}
}
}
}
}
Command::GraphRemoveNode(track_id, node_index) => {
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
if let Some(graph) = &mut track.instrument_graph {
let node_idx = NodeIndex::new(node_index as usize);
graph.remove_node(node_idx);
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
}
}
}
Command::GraphConnect(track_id, from, from_port, to, to_port) => {
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
if let Some(graph) = &mut track.instrument_graph {
let from_idx = NodeIndex::new(from as usize);
let to_idx = NodeIndex::new(to as usize);
match graph.connect(from_idx, from_port, to_idx, to_port) {
Ok(()) => {
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
}
Err(e) => {
let _ = self.event_tx.push(AudioEvent::GraphConnectionError(
track_id,
format!("{:?}", e)
));
}
}
}
}
}
Command::GraphConnectInTemplate(track_id, voice_allocator_id, from, from_port, to, to_port) => {
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
if let Some(graph) = &mut track.instrument_graph {
let va_idx = NodeIndex::new(voice_allocator_id as usize);
match graph.connect_in_voice_allocator_template(va_idx, from, from_port, to, to_port) {
Ok(()) => {
println!("Connected nodes in VoiceAllocator {} template: {} -> {}", voice_allocator_id, from, to);
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
}
Err(e) => {
let _ = self.event_tx.push(AudioEvent::GraphConnectionError(
track_id,
format!("Failed to connect in template: {}", e)
));
}
}
}
}
}
Command::GraphDisconnect(track_id, from, from_port, to, to_port) => {
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
if let Some(graph) = &mut track.instrument_graph {
let from_idx = NodeIndex::new(from as usize);
let to_idx = NodeIndex::new(to as usize);
graph.disconnect(from_idx, from_port, to_idx, to_port);
let _ = self.event_tx.push(AudioEvent::GraphStateChanged(track_id));
}
}
}
Command::GraphSetParameter(track_id, node_index, param_id, value) => {
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
if let Some(graph) = &mut track.instrument_graph {
let node_idx = NodeIndex::new(node_index as usize);
if let Some(graph_node) = graph.get_graph_node_mut(node_idx) {
graph_node.node.set_parameter(param_id, value);
}
}
}
}
Command::GraphSetMidiTarget(track_id, node_index, enabled) => {
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
if let Some(graph) = &mut track.instrument_graph {
let node_idx = NodeIndex::new(node_index as usize);
graph.set_midi_target(node_idx, enabled);
}
}
}
Command::GraphSetOutputNode(track_id, node_index) => {
if let Some(TrackNode::Midi(track)) = self.project.get_track_mut(track_id) {
if let Some(graph) = &mut track.instrument_graph {
let node_idx = NodeIndex::new(node_index as usize);
graph.set_output_node(Some(node_idx));
}
}
}
}
}
@ -1143,4 +1332,49 @@ impl EngineController {
pub fn send_midi_note_off(&mut self, track_id: TrackId, note: u8) {
let _ = self.command_tx.push(Command::SendMidiNoteOff(track_id, note));
}
// Node graph operations
/// Add a node to a track's instrument graph
pub fn graph_add_node(&mut self, track_id: TrackId, node_type: String, x: f32, y: f32) {
let _ = self.command_tx.push(Command::GraphAddNode(track_id, node_type, x, y));
}
pub fn graph_add_node_to_template(&mut self, track_id: TrackId, voice_allocator_id: u32, node_type: String, x: f32, y: f32) {
let _ = self.command_tx.push(Command::GraphAddNodeToTemplate(track_id, voice_allocator_id, node_type, x, y));
}
pub fn graph_connect_in_template(&mut self, track_id: TrackId, voice_allocator_id: u32, from_node: u32, from_port: usize, to_node: u32, to_port: usize) {
let _ = self.command_tx.push(Command::GraphConnectInTemplate(track_id, voice_allocator_id, from_node, from_port, to_node, to_port));
}
/// Remove a node from a track's instrument graph
pub fn graph_remove_node(&mut self, track_id: TrackId, node_id: u32) {
let _ = self.command_tx.push(Command::GraphRemoveNode(track_id, node_id));
}
/// Connect two nodes in a track's instrument graph
pub fn graph_connect(&mut self, track_id: TrackId, from_node: u32, from_port: usize, to_node: u32, to_port: usize) {
let _ = self.command_tx.push(Command::GraphConnect(track_id, from_node, from_port, to_node, to_port));
}
/// Disconnect two nodes in a track's instrument graph
pub fn graph_disconnect(&mut self, track_id: TrackId, from_node: u32, from_port: usize, to_node: u32, to_port: usize) {
let _ = self.command_tx.push(Command::GraphDisconnect(track_id, from_node, from_port, to_node, to_port));
}
/// Set a parameter on a node in a track's instrument graph
pub fn graph_set_parameter(&mut self, track_id: TrackId, node_id: u32, param_id: u32, value: f32) {
let _ = self.command_tx.push(Command::GraphSetParameter(track_id, node_id, param_id, value));
}
/// Set which node receives MIDI events in a track's instrument graph
pub fn graph_set_midi_target(&mut self, track_id: TrackId, node_id: u32, enabled: bool) {
let _ = self.command_tx.push(Command::GraphSetMidiTarget(track_id, node_id, enabled));
}
/// Set which node is the audio output in a track's instrument graph
pub fn graph_set_output_node(&mut self, track_id: TrackId, node_id: u32) {
let _ = self.command_tx.push(Command::GraphSetOutputNode(track_id, node_id));
}
}

View File

@ -3,6 +3,7 @@ pub mod buffer_pool;
pub mod clip;
pub mod engine;
pub mod midi;
pub mod node_graph;
pub mod pool;
pub mod project;
pub mod recording;

View File

@ -0,0 +1,546 @@
use super::node_trait::AudioNode;
use super::types::{ConnectionError, SignalType};
use crate::audio::midi::MidiEvent;
use petgraph::algo::has_path_connecting;
use petgraph::stable_graph::{NodeIndex, StableGraph};
use petgraph::visit::{EdgeRef, IntoEdgeReferences};
use petgraph::Direction;
/// Connection information between nodes
#[derive(Debug, Clone)]
pub struct Connection {
pub from_port: usize,
pub to_port: usize,
}
/// Wrapper for audio nodes in the graph
pub struct GraphNode {
pub node: Box<dyn AudioNode>,
/// Buffers for each audio/CV output port
pub output_buffers: Vec<Vec<f32>>,
/// Buffers for each MIDI output port
pub midi_output_buffers: Vec<Vec<MidiEvent>>,
}
impl GraphNode {
pub fn new(node: Box<dyn AudioNode>, buffer_size: usize) -> Self {
let outputs = node.outputs();
// Allocate buffers based on signal type
// Audio signals are stereo (2 samples per frame), CV is mono (1 sample per frame)
let mut output_buffers = Vec::new();
let mut midi_output_buffers = Vec::new();
for port in outputs.iter() {
match port.signal_type {
SignalType::Audio => {
output_buffers.push(vec![0.0; buffer_size * 2]); // Stereo (interleaved L/R)
}
SignalType::CV => {
output_buffers.push(vec![0.0; buffer_size]); // Mono
}
SignalType::Midi => {
output_buffers.push(vec![]); // Placeholder for indexing alignment
let mut midi_buf = Vec::new();
midi_buf.reserve(128); // Max 128 MIDI events per cycle
midi_output_buffers.push(midi_buf);
}
}
}
Self {
node,
output_buffers,
midi_output_buffers,
}
}
}
/// Audio processing graph for instruments/effects
pub struct InstrumentGraph {
/// The audio graph (StableGraph allows node removal without index invalidation)
graph: StableGraph<GraphNode, Connection>,
/// MIDI input mapping (which nodes receive MIDI)
midi_targets: Vec<NodeIndex>,
/// Audio output node index (where we read final audio)
output_node: Option<NodeIndex>,
/// Sample rate
sample_rate: u32,
/// Buffer size for internal processing
buffer_size: usize,
/// Temporary buffers for node audio/CV inputs during processing
input_buffers: Vec<Vec<f32>>,
/// Temporary buffers for node MIDI inputs during processing
midi_input_buffers: Vec<Vec<MidiEvent>>,
}
impl InstrumentGraph {
/// Create a new empty instrument graph
pub fn new(sample_rate: u32, buffer_size: usize) -> Self {
Self {
graph: StableGraph::new(),
midi_targets: Vec::new(),
output_node: None,
sample_rate,
buffer_size,
// Pre-allocate input buffers with stereo size (2x) to accommodate Audio signals
// CV signals will only use the first half
input_buffers: vec![vec![0.0; buffer_size * 2]; 16],
// Pre-allocate MIDI input buffers (max 128 events per port)
midi_input_buffers: (0..16).map(|_| Vec::with_capacity(128)).collect(),
}
}
/// Add a node to the graph
pub fn add_node(&mut self, node: Box<dyn AudioNode>) -> NodeIndex {
let graph_node = GraphNode::new(node, self.buffer_size);
self.graph.add_node(graph_node)
}
/// Connect two nodes with type checking
pub fn connect(
&mut self,
from: NodeIndex,
from_port: usize,
to: NodeIndex,
to_port: usize,
) -> Result<(), ConnectionError> {
// Validate the connection
self.validate_connection(from, from_port, to, to_port)?;
// Add the edge
self.graph.add_edge(from, to, Connection { from_port, to_port });
Ok(())
}
/// Disconnect two nodes
pub fn disconnect(
&mut self,
from: NodeIndex,
from_port: usize,
to: NodeIndex,
to_port: usize,
) {
// Find and remove the edge
if let Some(edge_idx) = self.graph.find_edge(from, to) {
let conn = &self.graph[edge_idx];
if conn.from_port == from_port && conn.to_port == to_port {
self.graph.remove_edge(edge_idx);
}
}
}
/// Remove a node from the graph
pub fn remove_node(&mut self, node: NodeIndex) {
self.graph.remove_node(node);
// Update MIDI targets
self.midi_targets.retain(|&idx| idx != node);
// Update output node
if self.output_node == Some(node) {
self.output_node = None;
}
}
/// Validate a connection is type-compatible and wouldn't create a cycle
fn validate_connection(
&self,
from: NodeIndex,
from_port: usize,
to: NodeIndex,
to_port: usize,
) -> Result<(), ConnectionError> {
// Check nodes exist
let from_node = self.graph.node_weight(from).ok_or(ConnectionError::InvalidPort)?;
let to_node = self.graph.node_weight(to).ok_or(ConnectionError::InvalidPort)?;
// Check ports are valid
let from_outputs = from_node.node.outputs();
let to_inputs = to_node.node.inputs();
if from_port >= from_outputs.len() || to_port >= to_inputs.len() {
return Err(ConnectionError::InvalidPort);
}
// Check signal types match
let from_type = from_outputs[from_port].signal_type;
let to_type = to_inputs[to_port].signal_type;
if from_type != to_type {
return Err(ConnectionError::TypeMismatch {
expected: to_type,
got: from_type,
});
}
// Check for cycles: if there's already a path from 'to' to 'from',
// then adding 'from' -> 'to' would create a cycle
if has_path_connecting(&self.graph, to, from, None) {
return Err(ConnectionError::WouldCreateCycle);
}
Ok(())
}
/// Set which node receives MIDI events
pub fn set_midi_target(&mut self, node: NodeIndex, enabled: bool) {
if enabled {
if !self.midi_targets.contains(&node) {
self.midi_targets.push(node);
}
} else {
self.midi_targets.retain(|&idx| idx != node);
}
}
/// Set the output node (where final audio is read from)
pub fn set_output_node(&mut self, node: Option<NodeIndex>) {
self.output_node = node;
}
/// Add a node to a VoiceAllocator's template graph
pub fn add_node_to_voice_allocator_template(
&mut self,
voice_allocator_idx: NodeIndex,
node: Box<dyn AudioNode>,
) -> Result<u32, String> {
use crate::audio::node_graph::nodes::VoiceAllocatorNode;
// Get the VoiceAllocator node
if let Some(graph_node) = self.graph.node_weight_mut(voice_allocator_idx) {
// We need to downcast to VoiceAllocatorNode
// This is tricky with trait objects, so we'll need to use Any
// For now, let's use a different approach - store the node pointer temporarily
// Check node type first
if graph_node.node.node_type() != "VoiceAllocator" {
return Err("Node is not a VoiceAllocator".to_string());
}
// Get mutable reference and downcast using raw pointers
let node_ptr = &mut *graph_node.node as *mut dyn AudioNode;
// SAFETY: We just checked that this is a VoiceAllocator
// This is safe because we know the concrete type
unsafe {
let va_ptr = node_ptr as *mut VoiceAllocatorNode;
let va = &mut *va_ptr;
// Add node to template graph
let node_idx = va.template_graph_mut().add_node(node);
let node_id = node_idx.index() as u32;
// Rebuild voice instances from template
va.rebuild_voices();
return Ok(node_id);
}
}
Err("VoiceAllocator node not found".to_string())
}
/// Connect nodes in a VoiceAllocator's template graph
pub fn connect_in_voice_allocator_template(
&mut self,
voice_allocator_idx: NodeIndex,
from_node: u32,
from_port: usize,
to_node: u32,
to_port: usize,
) -> Result<(), String> {
use crate::audio::node_graph::nodes::VoiceAllocatorNode;
// Get the VoiceAllocator node
if let Some(graph_node) = self.graph.node_weight_mut(voice_allocator_idx) {
// Check node type first
if graph_node.node.node_type() != "VoiceAllocator" {
return Err("Node is not a VoiceAllocator".to_string());
}
// Get mutable reference and downcast using raw pointers
let node_ptr = &mut *graph_node.node as *mut dyn AudioNode;
// SAFETY: We just checked that this is a VoiceAllocator
unsafe {
let va_ptr = node_ptr as *mut VoiceAllocatorNode;
let va = &mut *va_ptr;
// Connect in template graph
let from_idx = NodeIndex::new(from_node as usize);
let to_idx = NodeIndex::new(to_node as usize);
va.template_graph_mut().connect(from_idx, from_port, to_idx, to_port)
.map_err(|e| format!("{:?}", e))?;
// Rebuild voice instances from template
va.rebuild_voices();
return Ok(());
}
}
Err("VoiceAllocator node not found".to_string())
}
/// Process the graph and produce audio output
pub fn process(&mut self, output_buffer: &mut [f32], midi_events: &[MidiEvent]) {
// Use the requested output buffer size for processing
let process_size = output_buffer.len();
// Clear all output buffers (audio/CV and MIDI)
for node in self.graph.node_weights_mut() {
for buffer in &mut node.output_buffers {
let len = buffer.len();
buffer[..process_size.min(len)].fill(0.0);
}
for midi_buffer in &mut node.midi_output_buffers {
midi_buffer.clear();
}
}
// Distribute incoming MIDI events to target nodes' MIDI output buffers
// This puts MIDI into the graph so it can flow through connections
for &target_idx in &self.midi_targets {
if let Some(node) = self.graph.node_weight_mut(target_idx) {
// Find the first MIDI output port and add events there
if !node.midi_output_buffers.is_empty() {
node.midi_output_buffers[0].extend_from_slice(midi_events);
}
}
}
// Topological sort for processing order
let topo = petgraph::algo::toposort(&self.graph, None)
.unwrap_or_else(|_| {
// If there's a cycle (shouldn't happen due to validation), just process in index order
self.graph.node_indices().collect()
});
// Process nodes in topological order
for node_idx in topo {
// Get input port information
let inputs = self.graph[node_idx].node.inputs();
let num_audio_cv_inputs = inputs.iter().filter(|p| p.signal_type != SignalType::Midi).count();
let num_midi_inputs = inputs.iter().filter(|p| p.signal_type == SignalType::Midi).count();
// Clear audio/CV input buffers
for i in 0..num_audio_cv_inputs {
if i < self.input_buffers.len() {
self.input_buffers[i].fill(0.0);
}
}
// Clear MIDI input buffers
for i in 0..num_midi_inputs {
if i < self.midi_input_buffers.len() {
self.midi_input_buffers[i].clear();
}
}
// Collect inputs from connected nodes
let incoming = self.graph.edges_directed(node_idx, Direction::Incoming).collect::<Vec<_>>();
for edge in incoming {
let source_idx = edge.source();
let conn = edge.weight();
let source_node = &self.graph[source_idx];
// Determine source port type
if conn.from_port < source_node.node.outputs().len() {
let source_port_type = source_node.node.outputs()[conn.from_port].signal_type;
match source_port_type {
SignalType::Audio | SignalType::CV => {
// Copy audio/CV data
if conn.to_port < num_audio_cv_inputs && conn.from_port < source_node.output_buffers.len() {
let source_buffer = &source_node.output_buffers[conn.from_port];
if conn.to_port < self.input_buffers.len() {
for (dst, src) in self.input_buffers[conn.to_port].iter_mut().zip(source_buffer.iter()) {
*dst += src;
}
}
}
}
SignalType::Midi => {
// Copy MIDI events
// Map from global port index to MIDI-only port index
let midi_port_idx = inputs.iter()
.take(conn.to_port + 1)
.filter(|p| p.signal_type == SignalType::Midi)
.count() - 1;
let source_midi_idx = source_node.node.outputs().iter()
.take(conn.from_port + 1)
.filter(|p| p.signal_type == SignalType::Midi)
.count() - 1;
if midi_port_idx < self.midi_input_buffers.len() &&
source_midi_idx < source_node.midi_output_buffers.len() {
self.midi_input_buffers[midi_port_idx]
.extend_from_slice(&source_node.midi_output_buffers[source_midi_idx]);
}
}
}
}
}
// Prepare audio/CV input slices
let input_slices: Vec<&[f32]> = (0..num_audio_cv_inputs)
.map(|i| {
if i < self.input_buffers.len() {
&self.input_buffers[i][..process_size.min(self.input_buffers[i].len())]
} else {
&[][..]
}
})
.collect();
// Prepare MIDI input slices
let midi_input_slices: Vec<&[MidiEvent]> = (0..num_midi_inputs)
.map(|i| {
if i < self.midi_input_buffers.len() {
&self.midi_input_buffers[i][..]
} else {
&[][..]
}
})
.collect();
// Get mutable access to output buffers
let node = &mut self.graph[node_idx];
let outputs = node.node.outputs();
let num_audio_cv_outputs = outputs.iter().filter(|p| p.signal_type != SignalType::Midi).count();
let num_midi_outputs = outputs.iter().filter(|p| p.signal_type == SignalType::Midi).count();
// Create mutable slices for audio/CV outputs
let mut output_slices: Vec<&mut [f32]> = Vec::with_capacity(num_audio_cv_outputs);
for i in 0..num_audio_cv_outputs {
if i < node.output_buffers.len() {
// Safety: We need to work around borrowing rules here
// This is safe because each output buffer is independent
let buffer = &mut node.output_buffers[i] as *mut Vec<f32>;
unsafe {
let slice = &mut (*buffer)[..process_size.min((*buffer).len())];
output_slices.push(slice);
}
}
}
// Create mutable references for MIDI outputs
let mut midi_output_refs: Vec<&mut Vec<MidiEvent>> = Vec::with_capacity(num_midi_outputs);
for i in 0..num_midi_outputs {
if i < node.midi_output_buffers.len() {
// Safety: Similar to above
let buffer = &mut node.midi_output_buffers[i] as *mut Vec<MidiEvent>;
unsafe {
midi_output_refs.push(&mut *buffer);
}
}
}
// Process the node with both audio/CV and MIDI
node.node.process(&input_slices, &mut output_slices, &midi_input_slices, &mut midi_output_refs, self.sample_rate);
}
// Copy output node's first output to the provided buffer
if let Some(output_idx) = self.output_node {
if let Some(output_node) = self.graph.node_weight(output_idx) {
if !output_node.output_buffers.is_empty() {
let len = output_buffer.len().min(output_node.output_buffers[0].len());
output_buffer[..len].copy_from_slice(&output_node.output_buffers[0][..len]);
}
}
}
}
/// Get node by index
pub fn get_node(&self, idx: NodeIndex) -> Option<&dyn AudioNode> {
self.graph.node_weight(idx).map(|n| &*n.node)
}
/// Get oscilloscope data from a specific node
pub fn get_oscilloscope_data(&self, idx: NodeIndex, sample_count: usize) -> Option<Vec<f32>> {
self.get_node(idx).and_then(|node| node.get_oscilloscope_data(sample_count))
}
/// Get node mutably by index
/// Note: Due to lifetime constraints with trait objects, this returns a mutable reference
/// to the GraphNode, from which you can access the node
pub fn get_graph_node_mut(&mut self, idx: NodeIndex) -> Option<&mut GraphNode> {
self.graph.node_weight_mut(idx)
}
/// Get all node indices
pub fn node_indices(&self) -> impl Iterator<Item = NodeIndex> + '_ {
self.graph.node_indices()
}
/// Get all connections
pub fn connections(&self) -> impl Iterator<Item = (NodeIndex, NodeIndex, &Connection)> + '_ {
self.graph.edge_references().map(|e| (e.source(), e.target(), e.weight()))
}
/// Reset all nodes in the graph
pub fn reset(&mut self) {
// Collect indices first to avoid borrow checker issues
let indices: Vec<_> = self.graph.node_indices().collect();
for node_idx in indices {
if let Some(node) = self.graph.node_weight_mut(node_idx) {
node.node.reset();
}
}
}
/// Clone the graph structure with all nodes and connections
pub fn clone_graph(&self) -> Self {
let mut new_graph = Self::new(self.sample_rate, self.buffer_size);
// Map from old NodeIndex to new NodeIndex
let mut index_map = std::collections::HashMap::new();
// Clone all nodes
for node_idx in self.graph.node_indices() {
if let Some(graph_node) = self.graph.node_weight(node_idx) {
let cloned_node = graph_node.node.clone_node();
let new_idx = new_graph.add_node(cloned_node);
index_map.insert(node_idx, new_idx);
}
}
// Clone all connections
for edge in self.graph.edge_references() {
let source = edge.source();
let target = edge.target();
let conn = edge.weight();
if let (Some(&new_source), Some(&new_target)) = (index_map.get(&source), index_map.get(&target)) {
let _ = new_graph.connect(new_source, conn.from_port, new_target, conn.to_port);
}
}
// Clone MIDI targets
for &old_target in &self.midi_targets {
if let Some(&new_target) = index_map.get(&old_target) {
new_graph.set_midi_target(new_target, true);
}
}
// Clone output node reference
if let Some(old_output) = self.output_node {
if let Some(&new_output) = index_map.get(&old_output) {
new_graph.output_node = Some(new_output);
}
}
new_graph
}
}

View File

@ -0,0 +1,8 @@
mod graph;
mod node_trait;
mod types;
pub mod nodes;
pub use graph::{Connection, GraphNode, InstrumentGraph};
pub use node_trait::AudioNode;
pub use types::{ConnectionError, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};

View File

@ -0,0 +1,67 @@
use super::types::{NodeCategory, NodePort, Parameter};
use crate::audio::midi::MidiEvent;
/// Custom node trait for audio processing nodes
///
/// All nodes must be Send to be usable in the audio thread.
/// Nodes should be real-time safe: no allocations, no blocking operations.
pub trait AudioNode: Send {
/// Node category for UI organization
fn category(&self) -> NodeCategory;
/// Input port definitions
fn inputs(&self) -> &[NodePort];
/// Output port definitions
fn outputs(&self) -> &[NodePort];
/// User-facing parameters
fn parameters(&self) -> &[Parameter];
/// Set parameter by ID
fn set_parameter(&mut self, id: u32, value: f32);
/// Get parameter by ID
fn get_parameter(&self, id: u32) -> f32;
/// Process audio buffers
///
/// # Arguments
/// * `inputs` - Audio/CV input buffers for each input port
/// * `outputs` - Audio/CV output buffers for each output port
/// * `midi_inputs` - MIDI event buffers for each MIDI input port
/// * `midi_outputs` - MIDI event buffers for each MIDI output port
/// * `sample_rate` - Current sample rate in Hz
fn process(
&mut self,
inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
midi_inputs: &[&[MidiEvent]],
midi_outputs: &mut [&mut Vec<MidiEvent>],
sample_rate: u32,
);
/// Handle MIDI events (for nodes with MIDI inputs)
fn handle_midi(&mut self, _event: &MidiEvent) {
// Default: do nothing
}
/// Reset internal state (clear delays, resonances, etc.)
fn reset(&mut self);
/// Get the node type name (for serialization)
fn node_type(&self) -> &str;
/// Get a unique identifier for this node instance
fn name(&self) -> &str;
/// Clone this node into a new boxed instance
/// Required for VoiceAllocator to create multiple instances
fn clone_node(&self) -> Box<dyn AudioNode>;
/// Get oscilloscope data if this is an oscilloscope node
/// Returns None for non-oscilloscope nodes
fn get_oscilloscope_data(&self, _sample_count: usize) -> Option<Vec<f32>> {
None
}
}

View File

@ -0,0 +1,215 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
use crate::audio::midi::MidiEvent;
const PARAM_ATTACK: u32 = 0;
const PARAM_DECAY: u32 = 1;
const PARAM_SUSTAIN: u32 = 2;
const PARAM_RELEASE: u32 = 3;
#[derive(Debug, Clone, Copy, PartialEq)]
enum EnvelopeStage {
Idle,
Attack,
Decay,
Sustain,
Release,
}
/// ADSR Envelope Generator
/// Outputs a CV signal (0.0-1.0) based on gate input and ADSR parameters
pub struct ADSRNode {
name: String,
attack: f32, // seconds
decay: f32, // seconds
sustain: f32, // level (0.0-1.0)
release: f32, // seconds
stage: EnvelopeStage,
level: f32, // current envelope level
gate_was_high: bool,
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
}
impl ADSRNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
let inputs = vec![
NodePort::new("Gate", SignalType::CV, 0),
];
let outputs = vec![
NodePort::new("Envelope Out", SignalType::CV, 0),
];
let parameters = vec![
Parameter::new(PARAM_ATTACK, "Attack", 0.001, 5.0, 0.01, ParameterUnit::Time),
Parameter::new(PARAM_DECAY, "Decay", 0.001, 5.0, 0.1, ParameterUnit::Time),
Parameter::new(PARAM_SUSTAIN, "Sustain", 0.0, 1.0, 0.7, ParameterUnit::Generic),
Parameter::new(PARAM_RELEASE, "Release", 0.001, 5.0, 0.2, ParameterUnit::Time),
];
Self {
name,
attack: 0.01,
decay: 0.1,
sustain: 0.7,
release: 0.2,
stage: EnvelopeStage::Idle,
level: 0.0,
gate_was_high: false,
inputs,
outputs,
parameters,
}
}
}
impl AudioNode for ADSRNode {
fn category(&self) -> NodeCategory {
NodeCategory::Utility
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, id: u32, value: f32) {
match id {
PARAM_ATTACK => self.attack = value.clamp(0.001, 5.0),
PARAM_DECAY => self.decay = value.clamp(0.001, 5.0),
PARAM_SUSTAIN => self.sustain = value.clamp(0.0, 1.0),
PARAM_RELEASE => self.release = value.clamp(0.001, 5.0),
_ => {}
}
}
fn get_parameter(&self, id: u32) -> f32 {
match id {
PARAM_ATTACK => self.attack,
PARAM_DECAY => self.decay,
PARAM_SUSTAIN => self.sustain,
PARAM_RELEASE => self.release,
_ => 0.0,
}
}
fn process(
&mut self,
inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
sample_rate: u32,
) {
if outputs.is_empty() {
return;
}
let output = &mut outputs[0];
let sample_rate_f32 = sample_rate as f32;
// CV signals are mono
let frames = output.len();
for frame in 0..frames {
// Read gate input (if available)
let gate_high = if !inputs.is_empty() && frame < inputs[0].len() {
inputs[0][frame] > 0.5 // Gate is high if CV > 0.5
} else {
false
};
// Detect gate transitions
if gate_high && !self.gate_was_high {
// Note on: Start attack
self.stage = EnvelopeStage::Attack;
} else if !gate_high && self.gate_was_high {
// Note off: Start release
self.stage = EnvelopeStage::Release;
}
self.gate_was_high = gate_high;
// Process envelope stage
match self.stage {
EnvelopeStage::Idle => {
self.level = 0.0;
}
EnvelopeStage::Attack => {
// Rise from current level to 1.0
let increment = 1.0 / (self.attack * sample_rate_f32);
self.level += increment;
if self.level >= 1.0 {
self.level = 1.0;
self.stage = EnvelopeStage::Decay;
}
}
EnvelopeStage::Decay => {
// Fall from 1.0 to sustain level
let target = self.sustain;
let decrement = (1.0 - target) / (self.decay * sample_rate_f32);
self.level -= decrement;
if self.level <= target {
self.level = target;
self.stage = EnvelopeStage::Sustain;
}
}
EnvelopeStage::Sustain => {
// Hold at sustain level
self.level = self.sustain;
}
EnvelopeStage::Release => {
// Fall from current level to 0.0
let decrement = self.level / (self.release * sample_rate_f32);
self.level -= decrement;
if self.level <= 0.001 {
self.level = 0.0;
self.stage = EnvelopeStage::Idle;
}
}
}
// Write envelope value (CV is mono)
output[frame] = self.level;
}
}
fn reset(&mut self) {
self.stage = EnvelopeStage::Idle;
self.level = 0.0;
self.gate_was_high = false;
}
fn node_type(&self) -> &str {
"ADSR"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
attack: self.attack,
decay: self.decay,
sustain: self.sustain,
release: self.release,
stage: EnvelopeStage::Idle, // Reset state
level: 0.0, // Reset level
gate_was_high: false, // Reset gate
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
})
}
}

View File

@ -0,0 +1,151 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
use crate::audio::midi::MidiEvent;
const PARAM_ATTACK: u32 = 0;
const PARAM_RELEASE: u32 = 1;
/// Audio to CV converter (Envelope Follower)
/// Converts audio amplitude to control voltage
pub struct AudioToCVNode {
name: String,
envelope: f32, // Current envelope value
attack: f32, // Attack time in seconds
release: f32, // Release time in seconds
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
}
impl AudioToCVNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
let inputs = vec![
NodePort::new("Audio In", SignalType::Audio, 0),
];
let outputs = vec![
NodePort::new("CV Out", SignalType::CV, 0),
];
let parameters = vec![
Parameter::new(PARAM_ATTACK, "Attack", 0.001, 1.0, 0.01, ParameterUnit::Time),
Parameter::new(PARAM_RELEASE, "Release", 0.001, 1.0, 0.1, ParameterUnit::Time),
];
Self {
name,
envelope: 0.0,
attack: 0.01,
release: 0.1,
inputs,
outputs,
parameters,
}
}
}
impl AudioNode for AudioToCVNode {
fn category(&self) -> NodeCategory {
NodeCategory::Utility
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, id: u32, value: f32) {
match id {
PARAM_ATTACK => self.attack = value.clamp(0.001, 1.0),
PARAM_RELEASE => self.release = value.clamp(0.001, 1.0),
_ => {}
}
}
fn get_parameter(&self, id: u32) -> f32 {
match id {
PARAM_ATTACK => self.attack,
PARAM_RELEASE => self.release,
_ => 0.0,
}
}
fn process(
&mut self,
inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
sample_rate: u32,
) {
if inputs.is_empty() || outputs.is_empty() {
return;
}
let input = inputs[0];
let output = &mut outputs[0];
// Audio input is stereo (interleaved L/R), CV output is mono
let audio_frames = input.len() / 2;
let cv_frames = output.len();
let frames = audio_frames.min(cv_frames);
// Calculate attack and release coefficients
let sample_rate_f32 = sample_rate as f32;
let attack_coeff = (-1.0 / (self.attack * sample_rate_f32)).exp();
let release_coeff = (-1.0 / (self.release * sample_rate_f32)).exp();
for frame in 0..frames {
// Get stereo samples
let left = input[frame * 2];
let right = input[frame * 2 + 1];
// Calculate RMS-like value (average of absolute values for simplicity)
let amplitude = (left.abs() + right.abs()) / 2.0;
// Envelope follower with attack/release
if amplitude > self.envelope {
// Attack: follow signal up quickly
self.envelope = amplitude * (1.0 - attack_coeff) + self.envelope * attack_coeff;
} else {
// Release: decay slowly
self.envelope = amplitude * (1.0 - release_coeff) + self.envelope * release_coeff;
}
// Output CV (mono)
output[frame] = self.envelope;
}
}
fn reset(&mut self) {
self.envelope = 0.0;
}
fn node_type(&self) -> &str {
"AudioToCV"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
envelope: 0.0, // Reset envelope
attack: self.attack,
release: self.release,
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
})
}
}

View File

@ -0,0 +1,201 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
use crate::audio::midi::MidiEvent;
use crate::dsp::biquad::BiquadFilter;
const PARAM_CUTOFF: u32 = 0;
const PARAM_RESONANCE: u32 = 1;
const PARAM_TYPE: u32 = 2;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum FilterType {
Lowpass = 0,
Highpass = 1,
}
impl FilterType {
fn from_f32(value: f32) -> Self {
match value.round() as i32 {
1 => FilterType::Highpass,
_ => FilterType::Lowpass,
}
}
}
/// Filter node using biquad implementation
pub struct FilterNode {
name: String,
filter: BiquadFilter,
cutoff: f32,
resonance: f32,
filter_type: FilterType,
sample_rate: u32,
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
}
impl FilterNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
let inputs = vec![
NodePort::new("Audio In", SignalType::Audio, 0),
NodePort::new("Cutoff CV", SignalType::CV, 1),
];
let outputs = vec![
NodePort::new("Audio Out", SignalType::Audio, 0),
];
let parameters = vec![
Parameter::new(PARAM_CUTOFF, "Cutoff", 20.0, 20000.0, 1000.0, ParameterUnit::Frequency),
Parameter::new(PARAM_RESONANCE, "Resonance", 0.1, 10.0, 0.707, ParameterUnit::Generic),
Parameter::new(PARAM_TYPE, "Type", 0.0, 1.0, 0.0, ParameterUnit::Generic),
];
let filter = BiquadFilter::lowpass(1000.0, 0.707, 44100.0);
Self {
name,
filter,
cutoff: 1000.0,
resonance: 0.707,
filter_type: FilterType::Lowpass,
sample_rate: 44100,
inputs,
outputs,
parameters,
}
}
fn update_filter(&mut self) {
match self.filter_type {
FilterType::Lowpass => {
self.filter.set_lowpass(self.cutoff, self.resonance, self.sample_rate as f32);
}
FilterType::Highpass => {
self.filter.set_highpass(self.cutoff, self.resonance, self.sample_rate as f32);
}
}
}
}
impl AudioNode for FilterNode {
fn category(&self) -> NodeCategory {
NodeCategory::Effect
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, id: u32, value: f32) {
match id {
PARAM_CUTOFF => {
self.cutoff = value.clamp(20.0, 20000.0);
self.update_filter();
}
PARAM_RESONANCE => {
self.resonance = value.clamp(0.1, 10.0);
self.update_filter();
}
PARAM_TYPE => {
self.filter_type = FilterType::from_f32(value);
self.update_filter();
}
_ => {}
}
}
fn get_parameter(&self, id: u32) -> f32 {
match id {
PARAM_CUTOFF => self.cutoff,
PARAM_RESONANCE => self.resonance,
PARAM_TYPE => self.filter_type as i32 as f32,
_ => 0.0,
}
}
fn process(
&mut self,
inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
sample_rate: u32,
) {
if inputs.is_empty() || outputs.is_empty() {
return;
}
// Update sample rate if changed
if self.sample_rate != sample_rate {
self.sample_rate = sample_rate;
self.update_filter();
}
let input = inputs[0];
let output = &mut outputs[0];
let len = input.len().min(output.len());
// Copy input to output
output[..len].copy_from_slice(&input[..len]);
// Check for CV modulation (modulates cutoff)
if inputs.len() > 1 && !inputs[1].is_empty() {
// CV input modulates cutoff frequency
// For now, just use the base cutoff - per-sample modulation would be expensive
// TODO: Sample CV at frame rate and update filter periodically
}
// Apply filter (processes stereo interleaved)
self.filter.process_buffer(&mut output[..len], 2);
}
fn reset(&mut self) {
self.filter.reset();
}
fn node_type(&self) -> &str {
"Filter"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
// Create new filter with same parameters but reset state
let mut new_filter = BiquadFilter::new();
// Set filter to match current type
match self.filter_type {
FilterType::Lowpass => {
new_filter.set_lowpass(self.sample_rate as f32, self.cutoff, self.resonance);
}
FilterType::Highpass => {
new_filter.set_highpass(self.sample_rate as f32, self.cutoff, self.resonance);
}
}
Box::new(Self {
name: self.name.clone(),
filter: new_filter,
cutoff: self.cutoff,
resonance: self.resonance,
filter_type: self.filter_type,
sample_rate: self.sample_rate,
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
})
}
}

View File

@ -0,0 +1,130 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
use crate::audio::midi::MidiEvent;
const PARAM_GAIN: u32 = 0;
/// Gain/volume control node
pub struct GainNode {
name: String,
gain: f32,
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
}
impl GainNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
let inputs = vec![
NodePort::new("Audio In", SignalType::Audio, 0),
NodePort::new("Gain CV", SignalType::CV, 1),
];
let outputs = vec![
NodePort::new("Audio Out", SignalType::Audio, 0),
];
let parameters = vec![
Parameter::new(PARAM_GAIN, "Gain", 0.0, 2.0, 1.0, ParameterUnit::Generic),
];
Self {
name,
gain: 1.0,
inputs,
outputs,
parameters,
}
}
}
impl AudioNode for GainNode {
fn category(&self) -> NodeCategory {
NodeCategory::Utility
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, id: u32, value: f32) {
match id {
PARAM_GAIN => self.gain = value.clamp(0.0, 2.0),
_ => {}
}
}
fn get_parameter(&self, id: u32) -> f32 {
match id {
PARAM_GAIN => self.gain,
_ => 0.0,
}
}
fn process(
&mut self,
inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
_sample_rate: u32,
) {
if inputs.is_empty() || outputs.is_empty() {
return;
}
let input = inputs[0];
let output = &mut outputs[0];
// Audio signals are stereo (interleaved L/R)
// Process by frames, not samples
let frames = input.len().min(output.len()) / 2;
for frame in 0..frames {
// Calculate final gain
let mut final_gain = self.gain;
// CV input acts as a VCA (voltage-controlled amplifier)
// CV ranges from 0.0 (silence) to 1.0 (full gain parameter value)
if inputs.len() > 1 && frame < inputs[1].len() {
let cv = inputs[1][frame];
final_gain *= cv; // Multiply gain by CV (0.0 = silence, 1.0 = full gain)
}
// Apply gain to both channels
output[frame * 2] = input[frame * 2] * final_gain; // Left
output[frame * 2 + 1] = input[frame * 2 + 1] * final_gain; // Right
}
}
fn reset(&mut self) {
// No state to reset
}
fn node_type(&self) -> &str {
"Gain"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
gain: self.gain,
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
})
}
}

View File

@ -0,0 +1,105 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, SignalType};
use crate::audio::midi::MidiEvent;
/// MIDI Input node - receives MIDI events from the track and passes them through
pub struct MidiInputNode {
name: String,
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
pending_events: Vec<MidiEvent>,
}
impl MidiInputNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
let inputs = vec![];
let outputs = vec![
NodePort::new("MIDI Out", SignalType::Midi, 0),
];
Self {
name,
inputs,
outputs,
parameters: vec![],
pending_events: Vec::new(),
}
}
/// Add MIDI events to be processed
pub fn add_midi_events(&mut self, events: Vec<MidiEvent>) {
self.pending_events.extend(events);
}
/// Get pending MIDI events (used for routing to connected nodes)
pub fn take_midi_events(&mut self) -> Vec<MidiEvent> {
std::mem::take(&mut self.pending_events)
}
}
impl AudioNode for MidiInputNode {
fn category(&self) -> NodeCategory {
NodeCategory::Input
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, _id: u32, _value: f32) {
// No parameters
}
fn get_parameter(&self, _id: u32) -> f32 {
0.0
}
fn process(
&mut self,
_inputs: &[&[f32]],
_outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
_sample_rate: u32,
) {
// MidiInput receives MIDI from external sources (marked as MIDI target)
// and outputs it through the graph
// The MIDI was already placed in midi_outputs by the graph before calling process()
}
fn reset(&mut self) {
self.pending_events.clear();
}
fn node_type(&self) -> &str {
"MidiInput"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
pending_events: Vec::new(),
})
}
fn handle_midi(&mut self, event: &MidiEvent) {
self.pending_events.push(*event);
}
}

View File

@ -0,0 +1,185 @@
use crate::audio::midi::MidiEvent;
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, SignalType};
/// MIDI to CV converter
/// Converts MIDI note events to control voltage signals
pub struct MidiToCVNode {
name: String,
note: u8, // Current MIDI note number
gate: f32, // Gate CV (1.0 when note on, 0.0 when off)
velocity: f32, // Velocity CV (0.0-1.0)
pitch_cv: f32, // Pitch CV (0.0-1.0 V/oct)
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
}
impl MidiToCVNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
// MIDI input port for receiving MIDI through graph connections
let inputs = vec![
NodePort::new("MIDI In", SignalType::Midi, 0),
];
let outputs = vec![
NodePort::new("V/Oct", SignalType::CV, 0), // 0.0-1.0 pitch CV
NodePort::new("Gate", SignalType::CV, 1), // 1.0 = on, 0.0 = off
NodePort::new("Velocity", SignalType::CV, 2), // 0.0-1.0
];
Self {
name,
note: 60, // Middle C
gate: 0.0,
velocity: 0.0,
pitch_cv: Self::midi_note_to_voct(60),
inputs,
outputs,
parameters: vec![], // No user parameters
}
}
/// Convert MIDI note to V/oct CV (0-1 range representing pitch)
/// Maps MIDI notes 0-127 to CV 0.0-1.0 for pitch tracking
fn midi_note_to_voct(note: u8) -> f32 {
// Simple linear mapping: each semitone is 1/127 of the CV range
note as f32 / 127.0
}
}
impl AudioNode for MidiToCVNode {
fn category(&self) -> NodeCategory {
NodeCategory::Input
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, _id: u32, _value: f32) {
// No parameters
}
fn get_parameter(&self, _id: u32) -> f32 {
0.0
}
fn handle_midi(&mut self, event: &MidiEvent) {
let status = event.status & 0xF0;
match status {
0x90 => {
// Note on
if event.data2 > 0 {
// Velocity > 0 means note on
self.note = event.data1;
self.pitch_cv = Self::midi_note_to_voct(self.note);
self.velocity = event.data2 as f32 / 127.0;
self.gate = 1.0;
} else {
// Velocity = 0 means note off
if event.data1 == self.note {
self.gate = 0.0;
}
}
}
0x80 => {
// Note off
if event.data1 == self.note {
self.gate = 0.0;
}
}
_ => {}
}
}
fn process(
&mut self,
_inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
_sample_rate: u32,
) {
// Process MIDI events from input buffer
if !midi_inputs.is_empty() {
for event in midi_inputs[0] {
let status = event.status & 0xF0;
match status {
0x90 if event.data2 > 0 => {
// Note on
self.note = event.data1;
self.pitch_cv = Self::midi_note_to_voct(self.note);
self.velocity = event.data2 as f32 / 127.0;
self.gate = 1.0;
}
0x80 | 0x90 => {
// Note off (or note on with velocity 0)
if event.data1 == self.note {
self.gate = 0.0;
}
}
_ => {}
}
}
}
if outputs.len() < 3 {
return;
}
// CV signals are mono
// Use split_at_mut to get multiple mutable references
let (pitch_and_rest, rest) = outputs.split_at_mut(1);
let (gate_and_rest, velocity_slice) = rest.split_at_mut(1);
let pitch_out = &mut pitch_and_rest[0];
let gate_out = &mut gate_and_rest[0];
let velocity_out = &mut velocity_slice[0];
let frames = pitch_out.len();
// Output constant CV values for the entire buffer
for frame in 0..frames {
pitch_out[frame] = self.pitch_cv;
gate_out[frame] = self.gate;
velocity_out[frame] = self.velocity;
}
}
fn reset(&mut self) {
self.gate = 0.0;
self.velocity = 0.0;
}
fn node_type(&self) -> &str {
"MidiToCV"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
note: 60, // Reset to middle C
gate: 0.0, // Reset gate
velocity: 0.0, // Reset velocity
pitch_cv: Self::midi_note_to_voct(60), // Reset pitch
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
})
}
}

View File

@ -0,0 +1,145 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
use crate::audio::midi::MidiEvent;
const PARAM_GAIN_1: u32 = 0;
const PARAM_GAIN_2: u32 = 1;
const PARAM_GAIN_3: u32 = 2;
const PARAM_GAIN_4: u32 = 3;
/// Mixer node - combines multiple audio inputs with independent gain controls
pub struct MixerNode {
name: String,
gains: [f32; 4],
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
}
impl MixerNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
let inputs = vec![
NodePort::new("Input 1", SignalType::Audio, 0),
NodePort::new("Input 2", SignalType::Audio, 1),
NodePort::new("Input 3", SignalType::Audio, 2),
NodePort::new("Input 4", SignalType::Audio, 3),
];
let outputs = vec![
NodePort::new("Mixed Out", SignalType::Audio, 0),
];
let parameters = vec![
Parameter::new(PARAM_GAIN_1, "Gain 1", 0.0, 2.0, 1.0, ParameterUnit::Generic),
Parameter::new(PARAM_GAIN_2, "Gain 2", 0.0, 2.0, 1.0, ParameterUnit::Generic),
Parameter::new(PARAM_GAIN_3, "Gain 3", 0.0, 2.0, 1.0, ParameterUnit::Generic),
Parameter::new(PARAM_GAIN_4, "Gain 4", 0.0, 2.0, 1.0, ParameterUnit::Generic),
];
Self {
name,
gains: [1.0, 1.0, 1.0, 1.0],
inputs,
outputs,
parameters,
}
}
}
impl AudioNode for MixerNode {
fn category(&self) -> NodeCategory {
NodeCategory::Utility
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, id: u32, value: f32) {
match id {
PARAM_GAIN_1 => self.gains[0] = value.clamp(0.0, 2.0),
PARAM_GAIN_2 => self.gains[1] = value.clamp(0.0, 2.0),
PARAM_GAIN_3 => self.gains[2] = value.clamp(0.0, 2.0),
PARAM_GAIN_4 => self.gains[3] = value.clamp(0.0, 2.0),
_ => {}
}
}
fn get_parameter(&self, id: u32) -> f32 {
match id {
PARAM_GAIN_1 => self.gains[0],
PARAM_GAIN_2 => self.gains[1],
PARAM_GAIN_3 => self.gains[2],
PARAM_GAIN_4 => self.gains[3],
_ => 0.0,
}
}
fn process(
&mut self,
inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
_sample_rate: u32,
) {
if outputs.is_empty() {
return;
}
let output = &mut outputs[0];
// Audio signals are stereo (interleaved L/R)
let frames = output.len() / 2;
// Clear output buffer first
output.fill(0.0);
// Mix each input with its gain
for (input_idx, input) in inputs.iter().enumerate().take(4) {
if input_idx >= self.gains.len() {
break;
}
let gain = self.gains[input_idx];
let input_frames = input.len() / 2;
let process_frames = frames.min(input_frames);
for frame in 0..process_frames {
output[frame * 2] += input[frame * 2] * gain; // Left
output[frame * 2 + 1] += input[frame * 2 + 1] * gain; // Right
}
}
}
fn reset(&mut self) {
// No state to reset
}
fn node_type(&self) -> &str {
"Mixer"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
gains: self.gains,
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
})
}
}

View File

@ -0,0 +1,25 @@
mod adsr;
mod audio_to_cv;
mod filter;
mod gain;
mod midi_input;
mod midi_to_cv;
mod mixer;
mod oscillator;
mod oscilloscope;
mod output;
mod template_io;
mod voice_allocator;
pub use adsr::ADSRNode;
pub use audio_to_cv::AudioToCVNode;
pub use filter::FilterNode;
pub use gain::GainNode;
pub use midi_input::MidiInputNode;
pub use midi_to_cv::MidiToCVNode;
pub use mixer::MixerNode;
pub use oscillator::OscillatorNode;
pub use oscilloscope::OscilloscopeNode;
pub use output::AudioOutputNode;
pub use template_io::{TemplateInputNode, TemplateOutputNode};
pub use voice_allocator::VoiceAllocatorNode;

View File

@ -0,0 +1,198 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
use crate::audio::midi::MidiEvent;
use std::f32::consts::PI;
const PARAM_FREQUENCY: u32 = 0;
const PARAM_AMPLITUDE: u32 = 1;
const PARAM_WAVEFORM: u32 = 2;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Waveform {
Sine = 0,
Saw = 1,
Square = 2,
Triangle = 3,
}
impl Waveform {
fn from_f32(value: f32) -> Self {
match value.round() as i32 {
1 => Waveform::Saw,
2 => Waveform::Square,
3 => Waveform::Triangle,
_ => Waveform::Sine,
}
}
}
/// Oscillator node with multiple waveforms
pub struct OscillatorNode {
name: String,
frequency: f32,
amplitude: f32,
waveform: Waveform,
phase: f32,
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
}
impl OscillatorNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
let inputs = vec![
NodePort::new("V/Oct", SignalType::CV, 0),
NodePort::new("FM", SignalType::CV, 1),
];
let outputs = vec![
NodePort::new("Audio Out", SignalType::Audio, 0),
];
let parameters = vec![
Parameter::new(PARAM_FREQUENCY, "Frequency", 20.0, 20000.0, 440.0, ParameterUnit::Frequency),
Parameter::new(PARAM_AMPLITUDE, "Amplitude", 0.0, 1.0, 0.5, ParameterUnit::Generic),
Parameter::new(PARAM_WAVEFORM, "Waveform", 0.0, 3.0, 0.0, ParameterUnit::Generic),
];
Self {
name,
frequency: 440.0,
amplitude: 0.5,
waveform: Waveform::Sine,
phase: 0.0,
inputs,
outputs,
parameters,
}
}
}
impl AudioNode for OscillatorNode {
fn category(&self) -> NodeCategory {
NodeCategory::Generator
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, id: u32, value: f32) {
match id {
PARAM_FREQUENCY => self.frequency = value.clamp(20.0, 20000.0),
PARAM_AMPLITUDE => self.amplitude = value.clamp(0.0, 1.0),
PARAM_WAVEFORM => self.waveform = Waveform::from_f32(value),
_ => {}
}
}
fn get_parameter(&self, id: u32) -> f32 {
match id {
PARAM_FREQUENCY => self.frequency,
PARAM_AMPLITUDE => self.amplitude,
PARAM_WAVEFORM => self.waveform as i32 as f32,
_ => 0.0,
}
}
fn process(
&mut self,
inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
sample_rate: u32,
) {
if outputs.is_empty() {
return;
}
let output = &mut outputs[0];
let sample_rate_f32 = sample_rate as f32;
// Audio signals are stereo (interleaved L/R)
// Process by frames, not samples
let frames = output.len() / 2;
for frame in 0..frames {
// Start with base frequency
let mut frequency = self.frequency;
// V/Oct input: 0.0-1.0 maps to MIDI notes 0-127
if !inputs.is_empty() && frame < inputs[0].len() {
let voct = inputs[0][frame]; // Read V/Oct CV (mono)
if voct > 0.001 {
// Convert CV to MIDI note number (0-1 -> 0-127)
let midi_note = voct * 127.0;
// Convert MIDI note to frequency: f = 440 * 2^((n-69)/12)
frequency = 440.0 * 2.0_f32.powf((midi_note - 69.0) / 12.0);
}
}
// FM input: modulates the frequency
if inputs.len() > 1 && frame < inputs[1].len() {
let fm = inputs[1][frame]; // Read FM CV (mono)
frequency *= 1.0 + fm;
}
let freq_mod = frequency;
// Generate waveform sample based on waveform type
let sample = match self.waveform {
Waveform::Sine => (self.phase * 2.0 * PI).sin(),
Waveform::Saw => 2.0 * self.phase - 1.0, // Ramp from -1 to 1
Waveform::Square => {
if self.phase < 0.5 { 1.0 } else { -1.0 }
}
Waveform::Triangle => {
// Triangle: rises from -1 to 1, falls back to -1
4.0 * (self.phase - 0.5).abs() - 1.0
}
} * self.amplitude;
// Write to both channels (mono source duplicated to stereo)
output[frame * 2] = sample; // Left
output[frame * 2 + 1] = sample; // Right
// Update phase once per frame
self.phase += freq_mod / sample_rate_f32;
if self.phase >= 1.0 {
self.phase -= 1.0;
}
}
}
fn reset(&mut self) {
self.phase = 0.0;
}
fn node_type(&self) -> &str {
"Oscillator"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
frequency: self.frequency,
amplitude: self.amplitude,
waveform: self.waveform,
phase: 0.0, // Reset phase for new instance
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
})
}
}

View File

@ -0,0 +1,253 @@
use crate::audio::midi::MidiEvent;
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
use std::sync::{Arc, Mutex};
const PARAM_TIME_SCALE: u32 = 0;
const PARAM_TRIGGER_MODE: u32 = 1;
const PARAM_TRIGGER_LEVEL: u32 = 2;
const BUFFER_SIZE: usize = 96000; // 2 seconds at 48kHz (stereo)
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum TriggerMode {
FreeRunning = 0,
RisingEdge = 1,
FallingEdge = 2,
}
impl TriggerMode {
fn from_f32(value: f32) -> Self {
match value.round() as i32 {
1 => TriggerMode::RisingEdge,
2 => TriggerMode::FallingEdge,
_ => TriggerMode::FreeRunning,
}
}
}
/// Circular buffer for storing audio samples
struct CircularBuffer {
buffer: Vec<f32>,
write_pos: usize,
capacity: usize,
}
impl CircularBuffer {
fn new(capacity: usize) -> Self {
Self {
buffer: vec![0.0; capacity],
write_pos: 0,
capacity,
}
}
fn write(&mut self, samples: &[f32]) {
for &sample in samples {
self.buffer[self.write_pos] = sample;
self.write_pos = (self.write_pos + 1) % self.capacity;
}
}
fn read(&self, count: usize) -> Vec<f32> {
let count = count.min(self.capacity);
let mut result = Vec::with_capacity(count);
// Read backwards from current write position
let start_pos = if self.write_pos >= count {
self.write_pos - count
} else {
self.capacity - (count - self.write_pos)
};
for i in 0..count {
let pos = (start_pos + i) % self.capacity;
result.push(self.buffer[pos]);
}
result
}
fn clear(&mut self) {
self.buffer.fill(0.0);
self.write_pos = 0;
}
}
/// Oscilloscope node for visualizing audio signals
pub struct OscilloscopeNode {
name: String,
time_scale: f32, // Milliseconds to display (10-1000ms)
trigger_mode: TriggerMode,
trigger_level: f32, // -1.0 to 1.0
last_sample: f32, // For edge detection
// Shared buffer for reading from Tauri commands
buffer: Arc<Mutex<CircularBuffer>>,
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
}
impl OscilloscopeNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
let inputs = vec![
NodePort::new("Audio In", SignalType::Audio, 0),
];
let outputs = vec![
NodePort::new("Audio Out", SignalType::Audio, 0),
];
let parameters = vec![
Parameter::new(PARAM_TIME_SCALE, "Time Scale", 10.0, 1000.0, 100.0, ParameterUnit::Milliseconds),
Parameter::new(PARAM_TRIGGER_MODE, "Trigger", 0.0, 2.0, 0.0, ParameterUnit::Generic),
Parameter::new(PARAM_TRIGGER_LEVEL, "Trigger Level", -1.0, 1.0, 0.0, ParameterUnit::Generic),
];
Self {
name,
time_scale: 100.0,
trigger_mode: TriggerMode::FreeRunning,
trigger_level: 0.0,
last_sample: 0.0,
buffer: Arc::new(Mutex::new(CircularBuffer::new(BUFFER_SIZE))),
inputs,
outputs,
parameters,
}
}
/// Get a clone of the buffer Arc for reading from external code (Tauri commands)
pub fn get_buffer(&self) -> Arc<Mutex<CircularBuffer>> {
Arc::clone(&self.buffer)
}
/// Read samples from the buffer (for Tauri commands)
pub fn read_samples(&self, count: usize) -> Vec<f32> {
if let Ok(buffer) = self.buffer.lock() {
buffer.read(count)
} else {
vec![0.0; count]
}
}
/// Clear the buffer
pub fn clear_buffer(&self) {
if let Ok(mut buffer) = self.buffer.lock() {
buffer.clear();
}
}
/// Check if trigger condition is met
fn is_triggered(&self, current_sample: f32) -> bool {
match self.trigger_mode {
TriggerMode::FreeRunning => true,
TriggerMode::RisingEdge => {
self.last_sample <= self.trigger_level && current_sample > self.trigger_level
}
TriggerMode::FallingEdge => {
self.last_sample >= self.trigger_level && current_sample < self.trigger_level
}
}
}
}
impl AudioNode for OscilloscopeNode {
fn category(&self) -> NodeCategory {
NodeCategory::Utility
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, id: u32, value: f32) {
match id {
PARAM_TIME_SCALE => self.time_scale = value.clamp(10.0, 1000.0),
PARAM_TRIGGER_MODE => self.trigger_mode = TriggerMode::from_f32(value),
PARAM_TRIGGER_LEVEL => self.trigger_level = value.clamp(-1.0, 1.0),
_ => {}
}
}
fn get_parameter(&self, id: u32) -> f32 {
match id {
PARAM_TIME_SCALE => self.time_scale,
PARAM_TRIGGER_MODE => self.trigger_mode as i32 as f32,
PARAM_TRIGGER_LEVEL => self.trigger_level,
_ => 0.0,
}
}
fn process(
&mut self,
inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
_sample_rate: u32,
) {
if inputs.is_empty() || outputs.is_empty() {
return;
}
let input = inputs[0];
let output = &mut outputs[0];
let len = input.len().min(output.len());
// Pass through audio (copy input to output)
output[..len].copy_from_slice(&input[..len]);
// Capture samples to buffer
if let Ok(mut buffer) = self.buffer.lock() {
buffer.write(&input[..len]);
}
// Update last sample for trigger detection (use left channel, frame 0)
if !input.is_empty() {
self.last_sample = input[0];
}
}
fn reset(&mut self) {
self.last_sample = 0.0;
self.clear_buffer();
}
fn node_type(&self) -> &str {
"Oscilloscope"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
time_scale: self.time_scale,
trigger_mode: self.trigger_mode,
trigger_level: self.trigger_level,
last_sample: 0.0,
buffer: Arc::new(Mutex::new(CircularBuffer::new(BUFFER_SIZE))),
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
})
}
fn get_oscilloscope_data(&self, sample_count: usize) -> Option<Vec<f32>> {
Some(self.read_samples(sample_count))
}
}

View File

@ -0,0 +1,96 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, SignalType};
use crate::audio::midi::MidiEvent;
/// Audio output node - collects audio and passes it to the main output
pub struct AudioOutputNode {
name: String,
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
}
impl AudioOutputNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
let inputs = vec![
NodePort::new("Audio In", SignalType::Audio, 0),
];
// Output node has an output for graph consistency, but it's typically the final node
let outputs = vec![
NodePort::new("Audio Out", SignalType::Audio, 0),
];
Self {
name,
inputs,
outputs,
}
}
}
impl AudioNode for AudioOutputNode {
fn category(&self) -> NodeCategory {
NodeCategory::Output
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&[] // No parameters
}
fn set_parameter(&mut self, _id: u32, _value: f32) {
// No parameters
}
fn get_parameter(&self, _id: u32) -> f32 {
0.0
}
fn process(
&mut self,
inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
_sample_rate: u32,
) {
if inputs.is_empty() || outputs.is_empty() {
return;
}
// Simply pass through the input to the output
let input = inputs[0];
let output = &mut outputs[0];
let len = input.len().min(output.len());
output[..len].copy_from_slice(&input[..len]);
}
fn reset(&mut self) {
// No state to reset
}
fn node_type(&self) -> &str {
"AudioOutput"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
})
}
}

View File

@ -0,0 +1,176 @@
use crate::audio::node_graph::{AudioNode, NodeCategory, NodePort, Parameter, SignalType};
use crate::audio::midi::MidiEvent;
/// Template Input node - represents the MIDI input for one voice in a VoiceAllocator
pub struct TemplateInputNode {
name: String,
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
}
impl TemplateInputNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
let inputs = vec![];
let outputs = vec![
NodePort::new("MIDI Out", SignalType::Midi, 0),
];
Self {
name,
inputs,
outputs,
parameters: vec![],
}
}
}
impl AudioNode for TemplateInputNode {
fn category(&self) -> NodeCategory {
NodeCategory::Input
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, _id: u32, _value: f32) {}
fn get_parameter(&self, _id: u32) -> f32 {
0.0
}
fn process(
&mut self,
_inputs: &[&[f32]],
_outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
_sample_rate: u32,
) {
// TemplateInput receives MIDI from VoiceAllocator and outputs it
// The MIDI was already placed in midi_outputs by the graph before calling process()
// So there's nothing to do here - the MIDI is already in the output buffer
}
fn reset(&mut self) {}
fn node_type(&self) -> &str {
"TemplateInput"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
})
}
fn handle_midi(&mut self, _event: &MidiEvent) {
// Pass through to connected nodes
}
}
/// Template Output node - represents the audio output from one voice in a VoiceAllocator
pub struct TemplateOutputNode {
name: String,
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
}
impl TemplateOutputNode {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
let inputs = vec![
NodePort::new("Audio In", SignalType::Audio, 0),
];
let outputs = vec![
NodePort::new("Audio Out", SignalType::Audio, 0),
];
Self {
name,
inputs,
outputs,
parameters: vec![],
}
}
}
impl AudioNode for TemplateOutputNode {
fn category(&self) -> NodeCategory {
NodeCategory::Output
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, _id: u32, _value: f32) {}
fn get_parameter(&self, _id: u32) -> f32 {
0.0
}
fn process(
&mut self,
inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
_midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
_sample_rate: u32,
) {
// Copy input to output - the graph reads from output buffers
if !inputs.is_empty() && !outputs.is_empty() {
let input = inputs[0];
let output = &mut outputs[0];
let len = input.len().min(output.len());
output[..len].copy_from_slice(&input[..len]);
}
}
fn reset(&mut self) {}
fn node_type(&self) -> &str {
"TemplateOutput"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
Box::new(Self {
name: self.name.clone(),
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
})
}
}

View File

@ -0,0 +1,344 @@
use crate::audio::midi::MidiEvent;
use crate::audio::node_graph::{AudioNode, InstrumentGraph, NodeCategory, NodePort, Parameter, ParameterUnit, SignalType};
const PARAM_VOICE_COUNT: u32 = 0;
const MAX_VOICES: usize = 16; // Maximum allowed voices
const DEFAULT_VOICES: usize = 8;
/// Voice state for voice allocation
#[derive(Clone)]
struct VoiceState {
active: bool,
note: u8,
age: u32, // For voice stealing
pending_events: Vec<MidiEvent>, // MIDI events to send to this voice
}
impl VoiceState {
fn new() -> Self {
Self {
active: false,
note: 0,
age: 0,
pending_events: Vec::new(),
}
}
}
/// VoiceAllocatorNode - A group node that creates N polyphonic instances of its internal graph
///
/// This node acts as a container for a "voice template" graph. At runtime, it creates
/// N instances of that graph (one per voice) and routes MIDI note events to them.
/// All voice outputs are mixed together into a single output.
pub struct VoiceAllocatorNode {
name: String,
/// The template graph (edited by user via UI)
template_graph: InstrumentGraph,
/// Runtime voice instances (clones of template)
voice_instances: Vec<InstrumentGraph>,
/// Voice allocation state
voices: [VoiceState; MAX_VOICES],
/// Number of active voices (configurable parameter)
voice_count: usize,
/// Mix buffer for combining voice outputs
mix_buffer: Vec<f32>,
inputs: Vec<NodePort>,
outputs: Vec<NodePort>,
parameters: Vec<Parameter>,
}
impl VoiceAllocatorNode {
pub fn new(name: impl Into<String>, sample_rate: u32, buffer_size: usize) -> Self {
let name = name.into();
// MIDI input for receiving note events
let inputs = vec![
NodePort::new("MIDI In", SignalType::Midi, 0),
];
// Single mixed audio output
let outputs = vec![
NodePort::new("Mixed Out", SignalType::Audio, 0),
];
// Voice count parameter
let parameters = vec![
Parameter::new(PARAM_VOICE_COUNT, "Voices", 1.0, MAX_VOICES as f32, DEFAULT_VOICES as f32, ParameterUnit::Generic),
];
// Create empty template graph
let template_graph = InstrumentGraph::new(sample_rate, buffer_size);
// Create voice instances (initially empty clones of template)
let voice_instances: Vec<InstrumentGraph> = (0..MAX_VOICES)
.map(|_| InstrumentGraph::new(sample_rate, buffer_size))
.collect();
Self {
name,
template_graph,
voice_instances,
voices: std::array::from_fn(|_| VoiceState::new()),
voice_count: DEFAULT_VOICES,
mix_buffer: vec![0.0; buffer_size * 2], // Stereo
inputs,
outputs,
parameters,
}
}
/// Get mutable reference to template graph (for UI editing)
pub fn template_graph_mut(&mut self) -> &mut InstrumentGraph {
&mut self.template_graph
}
/// Get reference to template graph (for serialization)
pub fn template_graph(&self) -> &InstrumentGraph {
&self.template_graph
}
/// Rebuild voice instances from template (called after template is edited)
pub fn rebuild_voices(&mut self) {
// Clone template to all voice instances
for voice in &mut self.voice_instances {
*voice = self.template_graph.clone_graph();
// Find TemplateInput and TemplateOutput nodes
let mut template_input_idx = None;
let mut template_output_idx = None;
for node_idx in voice.node_indices() {
if let Some(node) = voice.get_node(node_idx) {
match node.node_type() {
"TemplateInput" => template_input_idx = Some(node_idx),
"TemplateOutput" => template_output_idx = Some(node_idx),
_ => {}
}
}
}
// Mark ONLY TemplateInput as a MIDI target
// MIDI will flow through graph connections to other nodes (like MidiToCV)
if let Some(input_idx) = template_input_idx {
voice.set_midi_target(input_idx, true);
}
// Set TemplateOutput as output node
voice.set_output_node(template_output_idx);
}
}
/// Find a free voice, or steal the oldest one
fn find_voice_for_note_on(&mut self) -> usize {
// Only search within active voice_count
// First, look for an inactive voice
for (i, voice) in self.voices[..self.voice_count].iter().enumerate() {
if !voice.active {
return i;
}
}
// No free voices, steal the oldest one within voice_count
self.voices[..self.voice_count]
.iter()
.enumerate()
.max_by_key(|(_, v)| v.age)
.map(|(i, _)| i)
.unwrap_or(0)
}
/// Find all voices playing a specific note
fn find_voices_for_note_off(&self, note: u8) -> Vec<usize> {
self.voices[..self.voice_count]
.iter()
.enumerate()
.filter_map(|(i, v)| {
if v.active && v.note == note {
Some(i)
} else {
None
}
})
.collect()
}
}
impl AudioNode for VoiceAllocatorNode {
fn category(&self) -> NodeCategory {
NodeCategory::Utility
}
fn inputs(&self) -> &[NodePort] {
&self.inputs
}
fn outputs(&self) -> &[NodePort] {
&self.outputs
}
fn parameters(&self) -> &[Parameter] {
&self.parameters
}
fn set_parameter(&mut self, id: u32, value: f32) {
match id {
PARAM_VOICE_COUNT => {
let new_count = (value.round() as usize).clamp(1, MAX_VOICES);
if new_count != self.voice_count {
self.voice_count = new_count;
// Stop voices beyond the new count
for voice in &mut self.voices[new_count..] {
voice.active = false;
}
}
}
_ => {}
}
}
fn get_parameter(&self, id: u32) -> f32 {
match id {
PARAM_VOICE_COUNT => self.voice_count as f32,
_ => 0.0,
}
}
fn handle_midi(&mut self, event: &MidiEvent) {
let status = event.status & 0xF0;
match status {
0x90 => {
// Note on
if event.data2 > 0 {
let voice_idx = self.find_voice_for_note_on();
self.voices[voice_idx].active = true;
self.voices[voice_idx].note = event.data1;
self.voices[voice_idx].age = 0;
// Store MIDI event for this voice to process
self.voices[voice_idx].pending_events.push(*event);
} else {
// Velocity = 0 means note off - send to ALL voices playing this note
let voice_indices = self.find_voices_for_note_off(event.data1);
for voice_idx in voice_indices {
self.voices[voice_idx].active = false;
self.voices[voice_idx].pending_events.push(*event);
}
}
}
0x80 => {
// Note off - send to ALL voices playing this note
let voice_indices = self.find_voices_for_note_off(event.data1);
for voice_idx in voice_indices {
self.voices[voice_idx].active = false;
self.voices[voice_idx].pending_events.push(*event);
}
}
_ => {
// Other MIDI events (CC, pitch bend, etc.) - send to all active voices
for voice_idx in 0..self.voice_count {
if self.voices[voice_idx].active {
self.voices[voice_idx].pending_events.push(*event);
}
}
}
}
}
fn process(
&mut self,
_inputs: &[&[f32]],
outputs: &mut [&mut [f32]],
midi_inputs: &[&[MidiEvent]],
_midi_outputs: &mut [&mut Vec<MidiEvent>],
_sample_rate: u32,
) {
// Process MIDI events from input (allocate notes to voices)
if !midi_inputs.is_empty() {
for event in midi_inputs[0] {
self.handle_midi(event);
}
}
if outputs.is_empty() {
return;
}
let output = &mut outputs[0];
let output_len = output.len();
// Process each active voice and mix (only up to voice_count)
for voice_idx in 0..self.voice_count {
let voice_state = &mut self.voices[voice_idx];
if voice_state.active {
voice_state.age = voice_state.age.saturating_add(1);
// Get pending MIDI events for this voice
let midi_events = std::mem::take(&mut voice_state.pending_events);
// IMPORTANT: Process only the slice of mix_buffer that matches output size
// This prevents phase discontinuities in oscillators
let mix_slice = &mut self.mix_buffer[..output_len];
mix_slice.fill(0.0);
// Process this voice's graph with its MIDI events
self.voice_instances[voice_idx].process(mix_slice, &midi_events);
// Mix into output (accumulate)
for (i, sample) in mix_slice.iter().enumerate() {
output[i] += sample;
}
}
}
// Apply normalization to prevent clipping (divide by active voice count)
let active_count = self.voices[..self.voice_count].iter().filter(|v| v.active).count();
if active_count > 1 {
let scale = 1.0 / (active_count as f32).sqrt(); // Use sqrt for better loudness perception
for sample in output.iter_mut() {
*sample *= scale;
}
}
}
fn reset(&mut self) {
for voice in &mut self.voices {
voice.active = false;
voice.pending_events.clear();
}
for graph in &mut self.voice_instances {
graph.reset();
}
self.template_graph.reset();
}
fn node_type(&self) -> &str {
"VoiceAllocator"
}
fn name(&self) -> &str {
&self.name
}
fn clone_node(&self) -> Box<dyn AudioNode> {
// Clone creates a new VoiceAllocator with the same template graph
// Voice instances will be rebuilt when rebuild_voices() is called
Box::new(Self {
name: self.name.clone(),
template_graph: self.template_graph.clone_graph(),
voice_instances: self.voice_instances.iter().map(|g| g.clone_graph()).collect(),
voices: std::array::from_fn(|_| VoiceState::new()), // Reset voices
voice_count: self.voice_count,
mix_buffer: vec![0.0; self.mix_buffer.len()],
inputs: self.inputs.clone(),
outputs: self.outputs.clone(),
parameters: self.parameters.clone(),
})
}
}

View File

@ -0,0 +1,96 @@
use serde::{Deserialize, Serialize};
/// Three distinct signal types for graph edges
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum SignalType {
/// Audio-rate signals (-1.0 to 1.0 typically) - Blue in UI
Audio,
/// MIDI events (discrete messages) - Green in UI
Midi,
/// Control Voltage (modulation signals, typically 0.0 to 1.0) - Orange in UI
CV,
}
/// Port definition for node inputs/outputs
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct NodePort {
pub name: String,
pub signal_type: SignalType,
pub index: usize,
}
impl NodePort {
pub fn new(name: impl Into<String>, signal_type: SignalType, index: usize) -> Self {
Self {
name: name.into(),
signal_type,
index,
}
}
}
/// Node category for UI organization
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum NodeCategory {
Input,
Generator,
Effect,
Utility,
Output,
}
/// User-facing parameter definition
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Parameter {
pub id: u32,
pub name: String,
pub min: f32,
pub max: f32,
pub default: f32,
pub unit: ParameterUnit,
}
impl Parameter {
pub fn new(id: u32, name: impl Into<String>, min: f32, max: f32, default: f32, unit: ParameterUnit) -> Self {
Self {
id,
name: name.into(),
min,
max,
default,
unit,
}
}
}
/// Units for parameter values
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum ParameterUnit {
Generic,
Frequency, // Hz
Decibels, // dB
Time, // seconds
Percent, // 0-100
}
/// Errors that can occur during graph operations
#[derive(Debug, Clone)]
pub enum ConnectionError {
TypeMismatch { expected: SignalType, got: SignalType },
InvalidPort,
WouldCreateCycle,
}
impl std::fmt::Display for ConnectionError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ConnectionError::TypeMismatch { expected, got } => {
write!(f, "Signal type mismatch: expected {:?}, got {:?}", expected, got)
}
ConnectionError::InvalidPort => write!(f, "Invalid port index"),
ConnectionError::WouldCreateCycle => write!(f, "Connection would create a cycle"),
}
}
}
impl std::error::Error for ConnectionError {}

View File

@ -198,6 +198,17 @@ impl Project {
self.tracks.get_mut(&track_id)
}
/// Get oscilloscope data from a node in a track's graph
pub fn get_oscilloscope_data(&self, track_id: TrackId, node_id: u32, sample_count: usize) -> Option<Vec<f32>> {
if let Some(TrackNode::Midi(track)) = self.tracks.get(&track_id) {
if let Some(ref graph) = track.instrument_graph {
let node_idx = petgraph::stable_graph::NodeIndex::new(node_id as usize);
return graph.get_oscilloscope_data(node_idx, sample_count);
}
}
None
}
/// Get all root-level track IDs
pub fn root_tracks(&self) -> &[TrackId] {
&self.root_tracks

View File

@ -1,6 +1,7 @@
use super::automation::{AutomationLane, AutomationLaneId, ParameterId};
use super::clip::Clip;
use super::midi::MidiClip;
use super::node_graph::InstrumentGraph;
use super::pool::AudioPool;
use crate::effects::{Effect, SimpleSynth};
use std::collections::HashMap;
@ -309,6 +310,8 @@ pub struct MidiTrack {
/// Automation lanes for this track
pub automation_lanes: HashMap<AutomationLaneId, AutomationLane>,
next_automation_id: AutomationLaneId,
/// Optional instrument graph (replaces SimpleSynth when present)
pub instrument_graph: Option<InstrumentGraph>,
}
impl MidiTrack {
@ -325,6 +328,7 @@ impl MidiTrack {
solo: false,
automation_lanes: HashMap::new(),
next_automation_id: 0,
instrument_graph: None,
}
}
@ -401,8 +405,19 @@ impl MidiTrack {
sample_rate: u32,
channels: u32,
) {
// Generate audio from the instrument (which processes queued events)
// Generate audio - use instrument graph if available, otherwise SimpleSynth
if let Some(graph) = &mut self.instrument_graph {
// Get pending MIDI events from SimpleSynth (they're queued there by send_midi_note_on/off)
// We need to drain them so they're not processed again
let events: Vec<crate::audio::midi::MidiEvent> =
self.instrument.pending_events.drain(..).collect();
// Process graph with MIDI events
graph.process(output, &events);
} else {
// Fallback to SimpleSynth (which processes queued events)
self.instrument.process(output, channels as usize, sample_rate);
}
// Apply effect chain
for effect in &mut self.effects {
@ -427,6 +442,7 @@ impl MidiTrack {
let buffer_end_seconds = playhead_seconds + buffer_duration_seconds;
// Collect MIDI events from all clips that overlap with current time range
let mut midi_events = Vec::new();
for clip in &self.clips {
let events = clip.get_events_in_range(
playhead_seconds,
@ -434,14 +450,22 @@ impl MidiTrack {
sample_rate,
);
// Queue events in the instrument
for (_timestamp, event) in events {
self.instrument.queue_event(event);
midi_events.push(event);
}
}
// Generate audio from the instrument
// Generate audio - use instrument graph if available, otherwise SimpleSynth
if let Some(graph) = &mut self.instrument_graph {
// Use node graph for audio generation
graph.process(output, &midi_events);
} else {
// Fallback to SimpleSynth
for event in &midi_events {
self.instrument.queue_event(*event);
}
self.instrument.process(output, channels as usize, sample_rate);
}
// Apply effect chain
for effect in &mut self.effects {

View File

@ -117,6 +117,26 @@ pub enum Command {
SendMidiNoteOn(TrackId, u8, u8),
/// Send a live MIDI note off event to a track's instrument (track_id, note)
SendMidiNoteOff(TrackId, u8),
// Node graph commands
/// Add a node to a track's instrument graph (track_id, node_type, position_x, position_y)
GraphAddNode(TrackId, String, f32, f32),
/// Add a node to a VoiceAllocator's template graph (track_id, voice_allocator_node_id, node_type, position_x, position_y)
GraphAddNodeToTemplate(TrackId, u32, String, f32, f32),
/// Remove a node from a track's instrument graph (track_id, node_index)
GraphRemoveNode(TrackId, u32),
/// Connect two nodes in a track's graph (track_id, from_node, from_port, to_node, to_port)
GraphConnect(TrackId, u32, usize, u32, usize),
/// Connect nodes in a VoiceAllocator template (track_id, voice_allocator_node_id, from_node, from_port, to_node, to_port)
GraphConnectInTemplate(TrackId, u32, u32, usize, u32, usize),
/// Disconnect two nodes in a track's graph (track_id, from_node, from_port, to_node, to_port)
GraphDisconnect(TrackId, u32, usize, u32, usize),
/// Set a parameter on a node (track_id, node_index, param_id, value)
GraphSetParameter(TrackId, u32, u32, f32),
/// Set which node receives MIDI events (track_id, node_index, enabled)
GraphSetMidiTarget(TrackId, u32, bool),
/// Set which node is the audio output (track_id, node_index)
GraphSetOutputNode(TrackId, u32),
}
/// Events sent from audio thread back to UI/control thread
@ -152,4 +172,12 @@ pub enum AudioEvent {
NoteOn(u8, u8),
/// MIDI note stopped playing (note)
NoteOff(u8),
// Node graph events
/// Node added to graph (track_id, node_index, node_type)
GraphNodeAdded(TrackId, u32, String),
/// Connection error occurred (track_id, error_message)
GraphConnectionError(TrackId, String),
/// Graph state changed (for full UI sync)
GraphStateChanged(TrackId),
}

View File

@ -130,7 +130,7 @@ impl SynthVoice {
pub struct SimpleSynth {
voices: Vec<SynthVoice>,
sample_rate: f32,
pending_events: Vec<MidiEvent>,
pub pending_events: Vec<MidiEvent>,
}
impl SimpleSynth {

View File

@ -9,6 +9,7 @@ pub mod command;
pub mod dsp;
pub mod effects;
pub mod io;
pub mod tui;
// Re-export commonly used types
pub use audio::{

View File

@ -1,826 +1,84 @@
use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
use daw_backend::{load_midi_file, AudioEvent, AudioFile, Clip, CurveType, Engine, ParameterId, PoolAudioFile, Track};
use daw_backend::{AudioEvent, AudioSystem, EventEmitter};
use daw_backend::tui::run_tui;
use std::env;
use std::io::{self, Write};
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::sync::Mutex;
use std::thread;
use std::time::Duration;
use std::sync::{Arc, Mutex};
/// Event emitter that pushes events to a ringbuffer for the TUI
struct TuiEventEmitter {
tx: Arc<Mutex<rtrb::Producer<AudioEvent>>>,
}
impl TuiEventEmitter {
fn new(tx: rtrb::Producer<AudioEvent>) -> Self {
Self {
tx: Arc::new(Mutex::new(tx)),
}
}
}
impl EventEmitter for TuiEventEmitter {
fn emit(&self, event: AudioEvent) {
if let Ok(mut tx) = self.tx.lock() {
let _ = tx.push(event);
}
}
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
// Get audio file paths from command line arguments
// Check if user wants the old CLI mode
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
eprintln!("Usage: {} <audio_file1> [audio_file2] [audio_file3] ...", args[0]);
eprintln!("Example: {} track1.wav track2.wav", args[0]);
if args.len() > 1 && args[1] == "--help" {
print_usage();
return Ok(());
}
println!("DAW Backend - Phase 6: Hierarchical Tracks\n");
println!("Lightningbeam DAW - Starting TUI...\n");
println!("Controls:");
println!(" ESC - Enter Command mode (type commands like 'track MyTrack')");
println!(" i - Enter Play mode (play MIDI notes with keyboard)");
println!(" awsedftgyhujkolp;' - Play MIDI notes (chromatic scale in Play mode)");
println!(" r - Release all notes (in Play mode)");
println!(" SPACE - Play/Pause");
println!(" Ctrl+Q - Quit");
println!("\nStarting audio system...");
// Load all audio files
let mut audio_files = Vec::new();
let mut max_sample_rate = 0;
let mut max_channels = 0;
// Create event channel for TUI
let (event_tx, event_rx) = rtrb::RingBuffer::new(256);
let emitter = Arc::new(TuiEventEmitter::new(event_tx));
for (i, path) in args.iter().skip(1).enumerate() {
println!("Loading file {}: {}", i + 1, path);
match AudioFile::load(path) {
Ok(audio_file) => {
let duration = audio_file.frames as f64 / audio_file.sample_rate as f64;
println!(
" {} Hz, {} channels, {} frames ({:.2}s)",
audio_file.sample_rate, audio_file.channels, audio_file.frames, duration
);
// Initialize audio system with event emitter
let mut audio_system = AudioSystem::new(Some(emitter))?;
max_sample_rate = max_sample_rate.max(audio_file.sample_rate);
max_channels = max_channels.max(audio_file.channels);
println!("Audio system initialized:");
println!(" Sample rate: {} Hz", audio_system.sample_rate);
println!(" Channels: {}", audio_system.channels);
audio_files.push((
Path::new(path)
.file_name()
.unwrap()
.to_string_lossy()
.to_string(),
PathBuf::from(path),
audio_file,
));
}
Err(e) => {
eprintln!(" Error loading {}: {}", path, e);
eprintln!(" Skipping this file...");
}
}
}
// Create a test MIDI track to verify event handling
audio_system.controller.create_midi_track("Test Track".to_string());
if audio_files.is_empty() {
eprintln!("No audio files loaded. Exiting.");
return Ok(());
}
println!("\nTUI starting...\n");
std::thread::sleep(std::time::Duration::from_millis(100)); // Give time for event
println!("\nProject settings:");
println!(" Sample rate: {} Hz", max_sample_rate);
println!(" Channels: {}", max_channels);
println!(" Files: {}", audio_files.len());
// Initialize cpal
let host = cpal::default_host();
let device = host
.default_output_device()
.ok_or("No output device available")?;
println!("\nUsing audio device: {}", device.name()?);
// Get the default output config to determine sample format
let default_config = device.default_output_config()?;
let sample_format = default_config.sample_format();
// Create a custom config matching the project settings
let config = cpal::StreamConfig {
channels: max_channels as u16,
sample_rate: cpal::SampleRate(max_sample_rate),
buffer_size: cpal::BufferSize::Default,
};
println!("Output config: {:?} with format {:?}", config, sample_format);
// Create lock-free command and event queues
let (command_tx, command_rx) = rtrb::RingBuffer::<daw_backend::Command>::new(256);
let (event_tx, event_rx) = rtrb::RingBuffer::<AudioEvent>::new(256);
// Create the audio engine
let mut engine = Engine::new(max_sample_rate, max_channels, command_rx, event_tx);
// Add all files to the audio pool and create tracks with clips
let track_ids = Arc::new(Mutex::new(Vec::new()));
let mut clip_info = Vec::new(); // Store (track_id, clip_id, name, duration)
let mut max_duration = 0.0f64;
let mut clip_id_counter = 0u32;
println!("\nCreating tracks and clips:");
for (name, path, audio_file) in audio_files.into_iter() {
let duration = audio_file.frames as f64 / audio_file.sample_rate as f64;
max_duration = max_duration.max(duration);
// Add audio file to pool
let pool_file = PoolAudioFile::new(
path,
audio_file.data,
audio_file.channels,
audio_file.sample_rate,
);
let pool_index = engine.audio_pool_mut().add_file(pool_file);
// Create track (the ID passed to Track::new is ignored; Project assigns IDs)
let mut track = Track::new(0, name.clone());
// Create clip that plays the entire file starting at time 0
let clip_id = clip_id_counter;
let clip = Clip::new(
clip_id,
pool_index,
0.0, // start at beginning of timeline
duration, // full duration
0.0, // no offset into file
);
clip_id_counter += 1;
track.add_clip(clip);
// Capture the ACTUAL track ID assigned by the project
let actual_track_id = engine.add_track(track);
track_ids.lock().unwrap().push(actual_track_id);
clip_info.push((actual_track_id, clip_id, name.clone(), duration));
println!(" Track {}: {} (clip {} at 0.0s, duration {:.2}s)", actual_track_id, name, clip_id, duration);
}
println!("\nTimeline duration: {:.2}s", max_duration);
let mut controller = engine.get_controller(command_tx);
// Build the output stream - Engine moves into the audio thread (no Arc, no Mutex!)
let stream = match sample_format {
cpal::SampleFormat::F32 => build_stream::<f32>(&device, &config, engine)?,
cpal::SampleFormat::I16 => build_stream::<i16>(&device, &config, engine)?,
cpal::SampleFormat::U16 => build_stream::<u16>(&device, &config, engine)?,
_ => return Err("Unsupported sample format".into()),
};
// Start the audio stream
stream.play()?;
println!("\nAudio stream started!");
print_help();
{
let ids = track_ids.lock().unwrap();
print_status(0.0, max_duration, &ids);
}
// Spawn event listener thread
// Wrap event receiver for TUI
let event_rx = Arc::new(Mutex::new(event_rx));
let event_rx_clone = Arc::clone(&event_rx);
let track_ids_clone = Arc::clone(&track_ids);
let _event_thread = thread::spawn(move || {
loop {
thread::sleep(Duration::from_millis(50));
let mut rx = event_rx_clone.lock().unwrap();
while let Ok(event) = rx.pop() {
match event {
AudioEvent::PlaybackPosition(pos) => {
// Clear the line and show position
print!("\r\x1b[K");
print!("Position: {:.2}s / {:.2}s", pos, max_duration);
print!(" [");
let bar_width = 30;
let filled = ((pos / max_duration) * bar_width as f64) as usize;
for i in 0..bar_width {
if i < filled {
print!("=");
} else if i == filled {
print!(">");
} else {
print!(" ");
}
}
print!("]");
io::stdout().flush().ok();
}
AudioEvent::PlaybackStopped => {
print!("\r\x1b[K");
println!("Playback stopped (end of timeline)");
print!("> ");
io::stdout().flush().ok();
}
AudioEvent::BufferUnderrun => {
eprintln!("\nWarning: Buffer underrun detected");
}
AudioEvent::TrackCreated(track_id, is_metatrack, name) => {
print!("\r\x1b[K");
if is_metatrack {
println!("Metatrack {} created: '{}' (ID: {})", track_id, name, track_id);
} else {
println!("Track {} created: '{}' (ID: {})", track_id, name, track_id);
}
track_ids_clone.lock().unwrap().push(track_id);
print!("> ");
io::stdout().flush().ok();
}
AudioEvent::BufferPoolStats(stats) => {
print!("\r\x1b[K");
println!("\n=== Buffer Pool Statistics ===");
println!(" Total buffers: {}", stats.total_buffers);
println!(" Available buffers: {}", stats.available_buffers);
println!(" In-use buffers: {}", stats.in_use_buffers);
println!(" Peak usage: {}", stats.peak_usage);
println!(" Total allocations: {}", stats.total_allocations);
println!(" Buffer size: {} samples", stats.buffer_size);
if stats.total_allocations == 0 {
println!(" Status: \x1b[32mOK\x1b[0m - Zero allocations during playback");
} else {
println!(" Status: \x1b[33mWARNING\x1b[0m - {} allocation(s) occurred", stats.total_allocations);
println!(" Recommendation: Increase initial buffer pool capacity to {}", stats.peak_usage + 2);
}
println!();
print!("> ");
io::stdout().flush().ok();
}
AudioEvent::AutomationLaneCreated(track_id, lane_id, parameter_id) => {
print!("\r\x1b[K");
println!("Automation lane {} created on track {} for parameter {:?}",
lane_id, track_id, parameter_id);
print!("> ");
io::stdout().flush().ok();
}
AudioEvent::AudioFileAdded(pool_index, path) => {
print!("\r\x1b[K");
println!("Audio file added to pool at index {}: '{}'", pool_index, path);
print!("> ");
io::stdout().flush().ok();
}
AudioEvent::ClipAdded(track_id, clip_id) => {
print!("\r\x1b[K");
println!("Clip {} added to track {}", clip_id, track_id);
print!("> ");
io::stdout().flush().ok();
}
AudioEvent::RecordingStarted(track_id, clip_id) => {
print!("\r\x1b[K");
println!("Recording started on track {} (clip {})", track_id, clip_id);
print!("> ");
io::stdout().flush().ok();
}
AudioEvent::RecordingProgress(clip_id, duration) => {
print!("\r\x1b[K");
print!("Recording clip {}: {:.2}s", clip_id, duration);
io::stdout().flush().ok();
}
AudioEvent::RecordingStopped(clip_id, pool_index, _waveform) => {
print!("\r\x1b[K");
println!("Recording stopped (clip {}, pool index {})", clip_id, pool_index);
print!("> ");
io::stdout().flush().ok();
}
AudioEvent::RecordingError(error) => {
print!("\r\x1b[K");
println!("Recording error: {}", error);
print!("> ");
io::stdout().flush().ok();
}
AudioEvent::ProjectReset => {
print!("\r\x1b[K");
println!("Project reset - all tracks and audio cleared");
// Clear the local track list
track_ids_clone.lock().unwrap().clear();
print!("> ");
io::stdout().flush().ok();
}
}
}
}
});
// Simple command loop
loop {
let mut input = String::new();
print!("\r\x1b[K> ");
io::stdout().flush()?;
io::stdin().read_line(&mut input)?;
let input = input.trim();
// Parse input
if input.is_empty() {
controller.play();
println!("Playing...");
} else if input == "q" || input == "quit" {
println!("Quitting...");
break;
} else if input == "s" || input == "stop" {
controller.stop();
println!("Stopped (reset to beginning)");
} else if input == "p" || input == "play" {
controller.play();
println!("Playing...");
} else if input == "pause" {
controller.pause();
println!("Paused");
} else if input.starts_with("seek ") {
// Parse seek time
if let Ok(seconds) = input[5..].trim().parse::<f64>() {
if seconds >= 0.0 {
controller.seek(seconds);
println!("Seeking to {:.2}s", seconds);
} else {
println!("Invalid seek time (must be >= 0.0)");
}
} else {
println!("Invalid seek format. Usage: seek <seconds>");
}
} else if input.starts_with("volume ") {
// Parse: volume <track_id> <volume>
let parts: Vec<&str> = input.split_whitespace().collect();
if parts.len() == 3 {
if let (Ok(track_id), Ok(volume)) = (parts[1].parse::<u32>(), parts[2].parse::<f32>()) {
let ids = track_ids.lock().unwrap();
if ids.contains(&track_id) {
drop(ids);
controller.set_track_volume(track_id, volume);
println!("Set track {} volume to {:.2}", track_id, volume);
} else {
println!("Invalid track ID. Available tracks: {:?}", *ids);
}
} else {
println!("Invalid format. Usage: volume <track_id> <volume>");
}
} else {
println!("Usage: volume <track_id> <volume>");
}
} else if input.starts_with("mute ") {
// Parse: mute <track_id>
if let Ok(track_id) = input[5..].trim().parse::<u32>() {
let ids = track_ids.lock().unwrap();
if ids.contains(&track_id) {
drop(ids);
controller.set_track_mute(track_id, true);
println!("Muted track {}", track_id);
} else {
println!("Invalid track ID. Available tracks: {:?}", *ids);
}
} else {
println!("Usage: mute <track_id>");
}
} else if input.starts_with("unmute ") {
// Parse: unmute <track_id>
if let Ok(track_id) = input[7..].trim().parse::<u32>() {
let ids = track_ids.lock().unwrap();
if ids.contains(&track_id) {
drop(ids);
controller.set_track_mute(track_id, false);
println!("Unmuted track {}", track_id);
} else {
println!("Invalid track ID. Available tracks: {:?}", *ids);
}
} else {
println!("Usage: unmute <track_id>");
}
} else if input.starts_with("solo ") {
// Parse: solo <track_id>
if let Ok(track_id) = input[5..].trim().parse::<u32>() {
let ids = track_ids.lock().unwrap();
if ids.contains(&track_id) {
drop(ids);
controller.set_track_solo(track_id, true);
println!("Soloed track {}", track_id);
} else {
println!("Invalid track ID. Available tracks: {:?}", *ids);
}
} else {
println!("Usage: solo <track_id>");
}
} else if input.starts_with("unsolo ") {
// Parse: unsolo <track_id>
if let Ok(track_id) = input[7..].trim().parse::<u32>() {
let ids = track_ids.lock().unwrap();
if ids.contains(&track_id) {
drop(ids);
controller.set_track_solo(track_id, false);
println!("Unsoloed track {}", track_id);
} else {
println!("Invalid track ID. Available tracks: {:?}", *ids);
}
} else {
println!("Usage: unsolo <track_id>");
}
} else if input.starts_with("move ") {
// Parse: move <track_id> <clip_id> <new_start_time>
let parts: Vec<&str> = input.split_whitespace().collect();
if parts.len() == 4 {
if let (Ok(track_id), Ok(clip_id), Ok(time)) =
(parts[1].parse::<u32>(), parts[2].parse::<u32>(), parts[3].parse::<f64>()) {
// Validate track and clip exist
if let Some((_tid, _cid, name, _)) = clip_info.iter().find(|(t, c, _, _)| *t == track_id && *c == clip_id) {
controller.move_clip(track_id, clip_id, time);
println!("Moved clip {} ('{}') on track {} to {:.2}s", clip_id, name, track_id, time);
} else {
println!("Invalid track ID or clip ID");
println!("Available clips:");
for (tid, cid, name, dur) in &clip_info {
println!(" Track {}, Clip {} ('{}', duration {:.2}s)", tid, cid, name, dur);
}
}
} else {
println!("Invalid format. Usage: move <track_id> <clip_id> <time>");
}
} else {
println!("Usage: move <track_id> <clip_id> <time>");
}
} else if input == "tracks" {
let ids = track_ids.lock().unwrap();
println!("Available tracks: {:?}", *ids);
} else if input == "clips" {
// Display clips from the tracked clip_info
println!("Available clips:");
if clip_info.is_empty() {
println!(" (no clips)");
} else {
for (tid, cid, name, dur) in &clip_info {
println!(" Track {}, Clip {} ('{}', duration {:.2}s)", tid, cid, name, dur);
}
}
} else if input.starts_with("gain ") {
// Parse: gain <track_id> <gain_db>
let parts: Vec<&str> = input.split_whitespace().collect();
if parts.len() == 3 {
if let (Ok(track_id), Ok(gain_db)) = (parts[1].parse::<u32>(), parts[2].parse::<f32>()) {
let ids = track_ids.lock().unwrap();
if ids.contains(&track_id) {
drop(ids);
controller.add_gain_effect(track_id, gain_db);
println!("Set gain on track {} to {:.1} dB", track_id, gain_db);
} else {
println!("Invalid track ID. Available tracks: {:?}", *ids);
}
} else {
println!("Invalid format. Usage: gain <track_id> <gain_db>");
}
} else {
println!("Usage: gain <track_id> <gain_db>");
}
} else if input.starts_with("pan ") {
// Parse: pan <track_id> <pan>
let parts: Vec<&str> = input.split_whitespace().collect();
if parts.len() == 3 {
if let (Ok(track_id), Ok(pan)) = (parts[1].parse::<u32>(), parts[2].parse::<f32>()) {
let ids = track_ids.lock().unwrap();
if ids.contains(&track_id) {
drop(ids);
let clamped_pan = pan.clamp(-1.0, 1.0);
controller.add_pan_effect(track_id, clamped_pan);
let pos = if clamped_pan < -0.01 {
format!("{:.0}% left", -clamped_pan * 100.0)
} else if clamped_pan > 0.01 {
format!("{:.0}% right", clamped_pan * 100.0)
} else {
"center".to_string()
};
println!("Set pan on track {} to {} ({:.2})", track_id, pos, clamped_pan);
} else {
println!("Invalid track ID. Available tracks: {:?}", *ids);
}
} else {
println!("Invalid format. Usage: pan <track_id> <pan>");
}
} else {
println!("Usage: pan <track_id> <pan> (where pan is -1.0=left, 0.0=center, 1.0=right)");
}
} else if input.starts_with("eq ") {
// Parse: eq <track_id> <low_db> <mid_db> <high_db>
let parts: Vec<&str> = input.split_whitespace().collect();
if parts.len() == 5 {
if let (Ok(track_id), Ok(low), Ok(mid), Ok(high)) =
(parts[1].parse::<u32>(), parts[2].parse::<f32>(), parts[3].parse::<f32>(), parts[4].parse::<f32>()) {
let ids = track_ids.lock().unwrap();
if ids.contains(&track_id) {
drop(ids);
controller.add_eq_effect(track_id, low, mid, high);
println!("Set EQ on track {}: Low {:.1} dB, Mid {:.1} dB, High {:.1} dB",
track_id, low, mid, high);
} else {
println!("Invalid track ID. Available tracks: {:?}", *ids);
}
} else {
println!("Invalid format. Usage: eq <track_id> <low_db> <mid_db> <high_db>");
}
} else {
println!("Usage: eq <track_id> <low_db> <mid_db> <high_db>");
}
} else if input.starts_with("clearfx ") {
// Parse: clearfx <track_id>
if let Ok(track_id) = input[8..].trim().parse::<u32>() {
let ids = track_ids.lock().unwrap();
if ids.contains(&track_id) {
drop(ids);
controller.clear_effects(track_id);
println!("Cleared all effects from track {}", track_id);
} else {
println!("Invalid track ID. Available tracks: {:?}", *ids);
}
} else {
println!("Usage: clearfx <track_id>");
}
} else if input.starts_with("meta ") {
// Parse: meta <name>
let name = input[5..].trim().to_string();
if !name.is_empty() {
controller.create_metatrack(name.clone());
println!("Created metatrack '{}'", name);
} else {
println!("Usage: meta <name>");
}
} else if input.starts_with("addtometa ") {
// Parse: addtometa <track_id> <metatrack_id>
let parts: Vec<&str> = input.split_whitespace().collect();
if parts.len() == 3 {
if let (Ok(track_id), Ok(metatrack_id)) = (parts[1].parse::<u32>(), parts[2].parse::<u32>()) {
controller.add_to_metatrack(track_id, metatrack_id);
println!("Added track {} to metatrack {}", track_id, metatrack_id);
} else {
println!("Invalid format. Usage: addtometa <track_id> <metatrack_id>");
}
} else {
println!("Usage: addtometa <track_id> <metatrack_id>");
}
} else if input.starts_with("removefrommeta ") {
// Parse: removefrommeta <track_id>
if let Ok(track_id) = input[15..].trim().parse::<u32>() {
controller.remove_from_metatrack(track_id);
println!("Removed track {} from its metatrack", track_id);
} else {
println!("Usage: removefrommeta <track_id>");
}
} else if input.starts_with("midi ") {
// Parse: midi <name>
let name = input[5..].trim().to_string();
if !name.is_empty() {
controller.create_midi_track(name.clone());
println!("Created MIDI track '{}'", name);
} else {
println!("Usage: midi <name>");
}
} else if input.starts_with("midiclip ") {
// Parse: midiclip <track_id> <start_time> <duration>
let parts: Vec<&str> = input.split_whitespace().collect();
if parts.len() == 4 {
if let (Ok(track_id), Ok(start_time), Ok(duration)) =
(parts[1].parse::<u32>(), parts[2].parse::<f64>(), parts[3].parse::<f64>()) {
let ids = track_ids.lock().unwrap();
if ids.contains(&track_id) {
drop(ids);
controller.create_midi_clip(track_id, start_time, duration);
println!("Created MIDI clip on track {} at {:.2}s (duration {:.2}s)",
track_id, start_time, duration);
} else {
println!("Invalid track ID. Available tracks: {:?}", *ids);
}
} else {
println!("Invalid format. Usage: midiclip <track_id> <start_time> <duration>");
}
} else {
println!("Usage: midiclip <track_id> <start_time> <duration>");
}
} else if input.starts_with("note ") {
// Parse: note <track_id> <clip_id> <time_offset> <note> <velocity> <duration>
let parts: Vec<&str> = input.split_whitespace().collect();
if parts.len() == 7 {
if let (Ok(track_id), Ok(clip_id), Ok(time_offset), Ok(note), Ok(velocity), Ok(duration)) =
(parts[1].parse::<u32>(), parts[2].parse::<u32>(), parts[3].parse::<f64>(),
parts[4].parse::<u8>(), parts[5].parse::<u8>(), parts[6].parse::<f64>()) {
if note > 127 || velocity > 127 {
println!("Note and velocity must be 0-127");
} else {
controller.add_midi_note(track_id, clip_id, time_offset, note, velocity, duration);
println!("Added note {} (velocity {}) to clip {} on track {} at offset {:.2}s (duration {:.2}s)",
note, velocity, clip_id, track_id, time_offset, duration);
}
} else {
println!("Invalid format. Usage: note <track_id> <clip_id> <time_offset> <note> <velocity> <duration>");
}
} else {
println!("Usage: note <track_id> <clip_id> <time_offset> <note> <velocity> <duration>");
}
} else if input.starts_with("loadmidi ") {
// Parse: loadmidi <track_id> <file_path> [start_time]
let parts: Vec<&str> = input.splitn(4, ' ').collect();
if parts.len() >= 3 {
if let Ok(track_id) = parts[1].parse::<u32>() {
let file_path = parts[2];
let start_time = if parts.len() == 4 {
parts[3].parse::<f64>().unwrap_or(0.0)
} else {
0.0
};
let ids = track_ids.lock().unwrap();
if ids.contains(&track_id) {
drop(ids);
// Load the MIDI file (this happens on the UI thread, not audio thread)
match load_midi_file(file_path, clip_id_counter, max_sample_rate) {
Ok(mut clip) => {
clip.start_time = start_time;
let event_count = clip.events.len();
let duration = clip.duration;
let clip_id = clip.id;
clip_id_counter += 1;
controller.add_loaded_midi_clip(track_id, clip);
println!("Loaded MIDI file '{}' to track {} as clip {} at {:.2}s ({} events, duration {:.2}s)",
file_path, track_id, clip_id, start_time, event_count, duration);
}
Err(e) => {
println!("Error loading MIDI file: {}", e);
}
}
} else {
println!("Invalid track ID. Available tracks: {:?}", *ids);
}
} else {
println!("Invalid format. Usage: loadmidi <track_id> <file_path> [start_time]");
}
} else {
println!("Usage: loadmidi <track_id> <file_path> [start_time]");
}
} else if input.starts_with("stretch ") {
// Parse: stretch <track_id> <factor>
let parts: Vec<&str> = input.split_whitespace().collect();
if parts.len() == 3 {
if let (Ok(track_id), Ok(stretch)) = (parts[1].parse::<u32>(), parts[2].parse::<f32>()) {
let ids = track_ids.lock().unwrap();
if ids.contains(&track_id) {
drop(ids);
controller.set_time_stretch(track_id, stretch);
let speed = if stretch < 0.99 {
format!("{:.0}% speed (slower)", stretch * 100.0)
} else if stretch > 1.01 {
format!("{:.0}% speed (faster)", stretch * 100.0)
} else {
"normal speed".to_string()
};
println!("Set time stretch on track {} to {:.2}x ({})", track_id, stretch, speed);
} else {
println!("Invalid track ID. Available tracks: {:?}", *ids);
}
} else {
println!("Invalid format. Usage: stretch <track_id> <factor>");
}
} else {
println!("Usage: stretch <track_id> <factor> (0.5=half speed, 1.0=normal, 2.0=double speed)");
}
} else if input.starts_with("offset ") {
// Parse: offset <track_id> <seconds>
let parts: Vec<&str> = input.split_whitespace().collect();
if parts.len() == 3 {
if let (Ok(track_id), Ok(offset)) = (parts[1].parse::<u32>(), parts[2].parse::<f64>()) {
let ids = track_ids.lock().unwrap();
if ids.contains(&track_id) {
drop(ids);
controller.set_offset(track_id, offset);
let direction = if offset > 0.01 {
format!("{:.2}s later", offset)
} else if offset < -0.01 {
format!("{:.2}s earlier", -offset)
} else {
"no offset".to_string()
};
println!("Set time offset on track {} to {:.2}s (content shifted {})", track_id, offset, direction);
} else {
println!("Invalid track ID. Available tracks: {:?}", *ids);
}
} else {
println!("Invalid format. Usage: offset <track_id> <seconds>");
}
} else {
println!("Usage: offset <track_id> <seconds> (positive=later, negative=earlier)");
}
} else if input == "stats" || input == "buffers" {
controller.request_buffer_pool_stats();
} else if input.starts_with("autovolume ") {
// Parse: autovolume <track_id> <time> <value>
let parts: Vec<&str> = input.split_whitespace().collect();
if parts.len() == 4 {
if let (Ok(track_id), Ok(time), Ok(value)) =
(parts[1].parse::<u32>(), parts[2].parse::<f64>(), parts[3].parse::<f32>()) {
let ids = track_ids.lock().unwrap();
if ids.contains(&track_id) {
drop(ids);
// Create automation lane (if not exists, will be reused)
controller.create_automation_lane(track_id, ParameterId::TrackVolume);
// Add automation point (note: lane_id=0 is assumed, real app would track this)
controller.add_automation_point(track_id, 0, time, value, CurveType::Linear);
println!("Added volume automation point on track {} at {:.2}s: {:.2}", track_id, time, value);
} else {
println!("Invalid track ID. Available tracks: {:?}", *ids);
}
} else {
println!("Invalid format. Usage: autovolume <track_id> <time> <value>");
}
} else {
println!("Usage: autovolume <track_id> <time> <value>");
println!(" Example: autovolume 0 2.0 0.5 (set volume to 0.5 at 2 seconds)");
}
} else if input == "reset" {
controller.reset();
// Clear local clip info tracking
clip_info.clear();
println!("Resetting project...");
} else if input == "help" || input == "h" {
print_help();
} else {
println!("Unknown command: {}. Type 'help' for commands.", input);
}
}
// Drop the stream to stop playback
drop(stream);
println!("Goodbye!");
// Run the TUI
run_tui(audio_system.controller, event_rx)?;
println!("\nGoodbye!");
Ok(())
}
fn print_help() {
println!("\nTransport Commands:");
println!(" ENTER - Play");
println!(" p, play - Play");
println!(" pause - Pause");
println!(" s, stop - Stop and reset to beginning");
println!(" seek <time> - Seek to position in seconds (e.g. 'seek 10.5')");
println!("\nTrack Commands:");
println!(" tracks - List all track IDs");
println!(" volume <id> <v> - Set track volume (e.g. 'volume 0 0.5' for 50%)");
println!(" mute <id> - Mute a track");
println!(" unmute <id> - Unmute a track");
println!(" solo <id> - Solo a track (only soloed tracks play)");
println!(" unsolo <id> - Unsolo a track");
println!("\nClip Commands:");
println!(" clips - List all clips");
println!(" move <t> <c> <s> - Move clip to new timeline position");
println!(" (e.g. 'move 0 0 5.0' moves clip 0 on track 0 to 5.0s)");
println!("\nEffect Commands:");
println!(" gain <id> <db> - Add/update gain effect (e.g. 'gain 0 6.0' for +6dB)");
println!(" pan <id> <pan> - Add/update pan effect (-1.0=left, 0.0=center, 1.0=right)");
println!(" eq <id> <l> <m> <h> - Add/update 3-band EQ (low, mid, high in dB)");
println!(" (e.g. 'eq 0 3.0 0.0 -2.0')");
println!(" clearfx <id> - Clear all effects from a track");
println!("\nMetatrack Commands:");
println!(" meta <name> - Create a new metatrack");
println!(" addtometa <t> <m> - Add track to metatrack (e.g. 'addtometa 0 2')");
println!(" removefrommeta <t> - Remove track from its parent metatrack");
println!(" stretch <id> <f> - Set time stretch (0.5=half speed, 1.0=normal, 2.0=double)");
println!(" offset <id> <s> - Set time offset in seconds (positive=later, negative=earlier)");
println!("\nMIDI Commands:");
println!(" midi <name> - Create a new MIDI track");
println!(" midiclip <t> <s> <d> - Create MIDI clip on track (start, duration)");
println!(" (e.g. 'midiclip 0 0.0 4.0')");
println!(" note <t> <c> <o> <n> <v> <d> - Add note to MIDI clip");
println!(" (track, clip, time_offset, note, velocity, duration)");
println!(" (e.g. 'note 0 0 0.0 60 100 0.5' adds middle C)");
println!(" loadmidi <t> <file> [start] - Load .mid file into track");
println!(" (e.g. 'loadmidi 0 song.mid 0.0')");
println!("\nDiagnostics:");
println!(" stats, buffers - Show buffer pool statistics");
println!("\nProject Commands:");
println!(" reset - Clear all tracks and audio (reset to empty project)");
println!("\nOther:");
println!(" h, help - Show this help");
println!(" q, quit - Quit");
println!();
}
fn print_status(position: f64, duration: f64, track_ids: &[u32]) {
println!("Position: {:.2}s / {:.2}s", position, duration);
println!("Tracks: {:?}", track_ids);
}
fn build_stream<T>(
device: &cpal::Device,
config: &cpal::StreamConfig,
mut engine: Engine,
) -> Result<cpal::Stream, Box<dyn std::error::Error>>
where
T: cpal::Sample + cpal::SizedSample + cpal::FromSample<f32>,
{
let err_fn = |err| eprintln!("Audio stream error: {}", err);
// Preallocate a large buffer for format conversion to avoid allocations in audio callback
// Size it generously to handle typical buffer sizes (up to 8192 samples = 2048 frames * stereo * 2x safety)
let mut conversion_buffer = vec![0.0f32; 16384];
let stream = device.build_output_stream(
config,
move |data: &mut [T], _: &cpal::OutputCallbackInfo| {
// NO MUTEX LOCK! Engine lives entirely on audio thread with ownership
// Safety check - if buffer is too small, we have a problem
if conversion_buffer.len() < data.len() {
eprintln!("ERROR: Audio buffer size {} exceeds preallocated buffer size {}",
data.len(), conversion_buffer.len());
return;
}
// Get a slice of the preallocated buffer
let buffer_slice = &mut conversion_buffer[..data.len()];
buffer_slice.fill(0.0);
// Process audio - completely lock-free!
engine.process(buffer_slice);
// Convert f32 samples to output format
for (i, sample) in data.iter_mut().enumerate() {
*sample = cpal::Sample::from_sample(buffer_slice[i]);
}
},
err_fn,
None,
)?;
Ok(stream)
fn print_usage() {
println!("Lightningbeam DAW - Terminal User Interface");
println!("\nUsage: {} [OPTIONS]", env::args().next().unwrap());
println!("\nOptions:");
println!(" --help Show this help message");
println!("\nThe DAW will start in TUI mode with an empty project.");
println!("Use commands to create tracks and load audio:");
println!(" :track <name> - Create MIDI track");
println!(" :audiotrack <name> - Create audio track");
println!(" :play - Start playback");
println!(" :stop - Stop playback");
println!(" :quit - Exit application");
}

923
daw-backend/src/tui/mod.rs Normal file
View File

@ -0,0 +1,923 @@
use crate::audio::EngineController;
use crate::command::AudioEvent;
use crate::io::load_midi_file;
use crossterm::{
event::{self, DisableMouseCapture, EnableMouseCapture, Event, KeyCode, KeyModifiers},
execute,
terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen},
};
use ratatui::{
backend::CrosstermBackend,
layout::{Constraint, Direction, Layout},
style::{Color, Modifier, Style},
text::{Line, Span},
widgets::{Block, Borders, List, ListItem, Paragraph},
Frame, Terminal,
};
use std::io;
use std::sync::{Arc, Mutex};
use std::time::Duration;
/// TUI application mode
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum AppMode {
/// Command mode - type vim-style commands
Command,
/// Play mode - use keyboard to play MIDI notes
Play,
}
/// TUI application state
pub struct TuiApp {
/// Current application mode
mode: AppMode,
/// Command input buffer (for Command mode)
command_input: String,
/// Current playback position (seconds)
playback_position: f64,
/// Whether playback is active
is_playing: bool,
/// Status message to display
status_message: String,
/// List of tracks (track_id, name)
tracks: Vec<(u32, String)>,
/// Currently selected track for MIDI input
selected_track: Option<u32>,
/// Active MIDI notes (currently held down)
active_notes: Vec<u8>,
/// Command history for up/down navigation
command_history: Vec<String>,
/// Current position in command history
history_index: Option<usize>,
/// Clips on timeline: (track_id, clip_id, start_time, duration, name, notes)
/// Notes: Vec<(pitch, time_offset, duration)>
clips: Vec<(u32, u32, f64, f64, String, Vec<(u8, f64, f64)>)>,
/// Next clip ID for locally created clips
next_clip_id: u32,
/// Timeline scroll offset in seconds (start of visible window)
timeline_scroll: f64,
/// Timeline visible duration in seconds (zoom level)
timeline_visible_duration: f64,
}
impl TuiApp {
pub fn new() -> Self {
Self {
mode: AppMode::Command,
command_input: String::new(),
playback_position: 0.0,
is_playing: false,
status_message: "SPACE=play/pause | ←/→ scroll | -/+ zoom | 'i'=Play mode | Type 'help'".to_string(),
tracks: Vec::new(),
selected_track: None,
active_notes: Vec::new(),
command_history: Vec::new(),
history_index: None,
clips: Vec::new(),
next_clip_id: 0,
timeline_scroll: 0.0,
timeline_visible_duration: 10.0, // Show 10 seconds at a time by default
}
}
/// Switch to command mode
pub fn enter_command_mode(&mut self) {
self.mode = AppMode::Command;
self.command_input.clear();
self.history_index = None;
self.status_message = "-- COMMAND -- SPACE=play/pause | ←/→ scroll | -/+ zoom | 'i' for Play mode | Type 'help'".to_string();
}
/// Switch to play mode
pub fn enter_play_mode(&mut self) {
self.mode = AppMode::Play;
self.command_input.clear();
self.status_message = "-- PLAY -- Press '?' for help, 'ESC' for Command mode".to_string();
}
/// Add a character to command input
pub fn push_command_char(&mut self, c: char) {
self.command_input.push(c);
}
/// Remove last character from command input
pub fn pop_command_char(&mut self) {
self.command_input.pop();
}
/// Get the current command input
pub fn command_input(&self) -> &str {
&self.command_input
}
/// Clear command input
pub fn clear_command(&mut self) {
self.command_input.clear();
self.history_index = None;
}
/// Add command to history
pub fn add_to_history(&mut self, command: String) {
if !command.is_empty() && self.command_history.last() != Some(&command) {
self.command_history.push(command);
}
}
/// Navigate command history up
pub fn history_up(&mut self) {
if self.command_history.is_empty() {
return;
}
let new_index = match self.history_index {
None => Some(self.command_history.len() - 1),
Some(0) => Some(0),
Some(i) => Some(i - 1),
};
if let Some(idx) = new_index {
self.history_index = Some(idx);
self.command_input = self.command_history[idx].clone();
}
}
/// Navigate command history down
pub fn history_down(&mut self) {
match self.history_index {
None => {}
Some(i) if i >= self.command_history.len() - 1 => {
self.history_index = None;
self.command_input.clear();
}
Some(i) => {
let new_idx = i + 1;
self.history_index = Some(new_idx);
self.command_input = self.command_history[new_idx].clone();
}
}
}
/// Update playback position and auto-scroll timeline if needed
pub fn update_playback_position(&mut self, position: f64) {
self.playback_position = position;
// Auto-scroll to keep playhead in view when playing
if self.is_playing {
// Keep playhead in the visible window, with some margin
let margin = self.timeline_visible_duration * 0.1; // 10% margin
// If playhead is ahead of visible window, scroll forward
if position > self.timeline_scroll + self.timeline_visible_duration - margin {
self.timeline_scroll = (position - self.timeline_visible_duration * 0.5).max(0.0);
}
// If playhead is behind visible window, scroll backward
else if position < self.timeline_scroll + margin {
self.timeline_scroll = (position - margin).max(0.0);
}
}
}
/// Set playing state
pub fn set_playing(&mut self, playing: bool) {
self.is_playing = playing;
}
/// Set status message
pub fn set_status(&mut self, message: String) {
self.status_message = message;
}
/// Add a track to the UI
pub fn add_track(&mut self, track_id: u32, name: String) {
self.tracks.push((track_id, name));
// Auto-select first MIDI track for playing
if self.selected_track.is_none() {
self.selected_track = Some(track_id);
}
}
/// Clear all tracks
pub fn clear_tracks(&mut self) {
self.tracks.clear();
self.clips.clear();
self.selected_track = None;
self.next_clip_id = 0;
self.timeline_scroll = 0.0;
}
/// Select a track by index
pub fn select_track(&mut self, index: usize) {
if let Some((track_id, _)) = self.tracks.get(index) {
self.selected_track = Some(*track_id);
}
}
/// Get selected track
pub fn selected_track(&self) -> Option<u32> {
self.selected_track
}
/// Add a clip to the timeline
pub fn add_clip(&mut self, track_id: u32, clip_id: u32, start_time: f64, duration: f64, name: String, notes: Vec<(u8, f64, f64)>) {
self.clips.push((track_id, clip_id, start_time, duration, name, notes));
}
/// Get max timeline duration based on clips
pub fn get_timeline_duration(&self) -> f64 {
self.clips
.iter()
.map(|(_, _, start, dur, _, _)| start + dur)
.max_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal))
.unwrap_or(10.0) // Default to 10 seconds if no clips
}
/// Add an active MIDI note
pub fn add_active_note(&mut self, note: u8) {
if !self.active_notes.contains(&note) {
self.active_notes.push(note);
}
}
/// Remove an active MIDI note
pub fn remove_active_note(&mut self, note: u8) {
self.active_notes.retain(|&n| n != note);
}
/// Get current mode
pub fn mode(&self) -> AppMode {
self.mode
}
/// Scroll timeline left
pub fn scroll_timeline_left(&mut self) {
let scroll_amount = self.timeline_visible_duration * 0.2; // Scroll by 20% of visible duration
self.timeline_scroll = (self.timeline_scroll - scroll_amount).max(0.0);
}
/// Scroll timeline right
pub fn scroll_timeline_right(&mut self) {
let scroll_amount = self.timeline_visible_duration * 0.2; // Scroll by 20% of visible duration
let max_duration = self.get_timeline_duration();
self.timeline_scroll = (self.timeline_scroll + scroll_amount).min(max_duration - self.timeline_visible_duration).max(0.0);
}
/// Zoom timeline in (show less time, more detail)
pub fn zoom_timeline_in(&mut self) {
self.timeline_visible_duration = (self.timeline_visible_duration * 0.8).max(1.0); // Min 1 second visible
}
/// Zoom timeline out (show more time, less detail)
pub fn zoom_timeline_out(&mut self) {
let max_duration = self.get_timeline_duration();
self.timeline_visible_duration = (self.timeline_visible_duration * 1.25).min(max_duration).max(1.0);
}
}
/// Map keyboard keys to MIDI notes
/// Uses chromatic layout: awsedftgyhujkolp;'
/// This provides 1.5 octaves starting from C4 (MIDI note 60)
pub fn key_to_midi_note(key: KeyCode) -> Option<u8> {
let base = 60; // C4
match key {
KeyCode::Char('a') => Some(base), // C4
KeyCode::Char('w') => Some(base + 1), // C#4
KeyCode::Char('s') => Some(base + 2), // D4
KeyCode::Char('e') => Some(base + 3), // D#4
KeyCode::Char('d') => Some(base + 4), // E4
KeyCode::Char('f') => Some(base + 5), // F4
KeyCode::Char('t') => Some(base + 6), // F#4
KeyCode::Char('g') => Some(base + 7), // G4
KeyCode::Char('y') => Some(base + 8), // G#4
KeyCode::Char('h') => Some(base + 9), // A4
KeyCode::Char('u') => Some(base + 10), // A#4
KeyCode::Char('j') => Some(base + 11), // B4
KeyCode::Char('k') => Some(base + 12), // C5
KeyCode::Char('o') => Some(base + 13), // C#5
KeyCode::Char('l') => Some(base + 14), // D5
KeyCode::Char('p') => Some(base + 15), // D#5
KeyCode::Char(';') => Some(base + 16), // E5
KeyCode::Char('\'') => Some(base + 17), // F5
_ => None,
}
}
/// Convert pitch % 8 to braille dot bit position
fn pitch_to_braille_bit(pitch_mod_8: u8) -> u8 {
match pitch_mod_8 {
0 => 0x01, // Dot 1
1 => 0x02, // Dot 2
2 => 0x04, // Dot 3
3 => 0x40, // Dot 7
4 => 0x08, // Dot 4
5 => 0x10, // Dot 5
6 => 0x20, // Dot 6
7 => 0x80, // Dot 8
_ => 0x00,
}
}
/// Draw the timeline view with clips
fn draw_timeline(f: &mut Frame, area: ratatui::layout::Rect, app: &TuiApp) {
let num_tracks = app.tracks.len();
// Use visible duration for the timeline window
let visible_start = app.timeline_scroll;
let visible_end = app.timeline_scroll + app.timeline_visible_duration;
// Create the timeline block with visible range
let block = Block::default()
.borders(Borders::ALL)
.title(format!("Timeline ({:.1}s - {:.1}s) | ←/→ scroll | -/+ zoom", visible_start, visible_end));
let inner_area = block.inner(area);
f.render_widget(block, area);
// Calculate dimensions
let width = inner_area.width as usize;
if width == 0 || num_tracks == 0 {
return;
}
// Fixed track height: 2 lines per track
let track_height = 2;
// Build timeline content with braille characters
let mut lines: Vec<Line> = Vec::new();
for track_idx in 0..num_tracks {
let track_id = if let Some((id, _)) = app.tracks.get(track_idx) {
*id
} else {
continue;
};
// Create exactly 2 lines for this track
for _ in 0..track_height {
let mut spans = Vec::new();
// Build the timeline character by character
for char_x in 0..width {
// Map character position to time, using scroll offset
let time_pos = visible_start + (char_x as f64 / width as f64) * app.timeline_visible_duration;
// Check if playhead is at this position
let is_playhead = (time_pos - app.playback_position).abs() < (app.timeline_visible_duration / width as f64);
// Find all notes active at this time position on this track
let mut braille_pattern: u8 = 0;
let mut has_notes = false;
for (clip_track_id, _clip_id, clip_start, _clip_duration, _name, notes) in &app.clips {
if *clip_track_id == track_id {
// Check each note in this clip
for (pitch, note_offset, note_duration) in notes {
let note_start = clip_start + note_offset;
let note_end = note_start + note_duration;
// Is this note active at current time position?
if time_pos >= note_start && time_pos < note_end {
let pitch_mod = pitch % 8;
braille_pattern |= pitch_to_braille_bit(pitch_mod);
has_notes = true;
}
}
}
}
// Determine color
let color = if Some(track_id) == app.selected_track {
Color::Yellow
} else {
Color::Cyan
};
// Create span
if is_playhead {
// Playhead: red background
if has_notes {
// Show white notes with red background
let braille_char = char::from_u32(0x2800 + braille_pattern as u32).unwrap_or(' ');
spans.push(Span::styled(braille_char.to_string(), Style::default().fg(Color::White).bg(Color::Red)));
} else {
spans.push(Span::styled(" ", Style::default().bg(Color::Red)));
}
} else if has_notes {
// Show white braille pattern on colored background
let braille_char = char::from_u32(0x2800 + braille_pattern as u32).unwrap_or(' ');
spans.push(Span::styled(braille_char.to_string(), Style::default().fg(Color::White).bg(color)));
} else {
// Empty space
spans.push(Span::raw(" "));
}
}
lines.push(Line::from(spans));
}
}
let paragraph = Paragraph::new(lines);
f.render_widget(paragraph, inner_area);
}
/// Draw the TUI
pub fn draw_ui(f: &mut Frame, app: &TuiApp) {
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([
Constraint::Length(3), // Title bar
Constraint::Min(10), // Main content
Constraint::Length(3), // Status bar
Constraint::Length(1), // Command line
])
.split(f.size());
// Title bar
let title = Paragraph::new("Lightningbeam DAW")
.style(Style::default().fg(Color::Cyan).add_modifier(Modifier::BOLD))
.block(Block::default().borders(Borders::ALL));
f.render_widget(title, chunks[0]);
// Main content area - split into tracks and timeline
let content_chunks = Layout::default()
.direction(Direction::Horizontal)
.constraints([Constraint::Percentage(20), Constraint::Percentage(80)])
.split(chunks[1]);
// Tracks list - each track gets 2 lines to match timeline
let track_items: Vec<ListItem> = app
.tracks
.iter()
.map(|(id, name)| {
let style = if app.selected_track == Some(*id) {
Style::default().fg(Color::Yellow).add_modifier(Modifier::BOLD)
} else {
Style::default()
};
// Create a 2-line item: track info on first line, empty second line
let lines = vec![
Line::from(format!("T{}: {}", id, name)),
Line::from(""),
];
ListItem::new(lines).style(style)
})
.collect();
let tracks_list = List::new(track_items)
.block(Block::default().borders(Borders::ALL).title("Tracks"));
f.render_widget(tracks_list, content_chunks[0]);
// Timeline area - split vertically into playback info and timeline view
let timeline_chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Length(4), Constraint::Min(5)])
.split(content_chunks[1]);
// Playback info
let playback_info = vec![
Line::from(vec![
Span::raw("Position: "),
Span::styled(
format!("{:.2}s", app.playback_position),
Style::default().fg(Color::Green),
),
Span::raw(" | Status: "),
Span::styled(
if app.is_playing { "Playing" } else { "Stopped" },
if app.is_playing {
Style::default().fg(Color::Green)
} else {
Style::default().fg(Color::Red)
},
),
]),
Line::from(format!("Active Notes: {}",
app.active_notes
.iter()
.map(|n| format!("{} ", n))
.collect::<String>()
)),
];
let info = Paragraph::new(playback_info)
.block(Block::default().borders(Borders::ALL).title("Playback"));
f.render_widget(info, timeline_chunks[0]);
// Draw timeline
draw_timeline(f, timeline_chunks[1], app);
// Status bar
let mode_indicator = match app.mode {
AppMode::Command => "COMMAND",
AppMode::Play => "PLAY",
};
let status_text = format!("Mode: {} | {}", mode_indicator, app.status_message);
let status_bar = Paragraph::new(status_text)
.style(Style::default().fg(Color::White))
.block(Block::default().borders(Borders::ALL));
f.render_widget(status_bar, chunks[2]);
// Command line
let command_line = if app.mode == AppMode::Command {
format!(":{}", app.command_input)
} else {
String::from("ESC=cmd mode | awsedftgyhujkolp;'=notes | R=release notes | ?=help | SPACE=play/pause")
};
let cmd_widget = Paragraph::new(command_line).style(Style::default().fg(Color::Yellow));
f.render_widget(cmd_widget, chunks[3]);
}
/// Run the TUI application
pub fn run_tui(
mut controller: EngineController,
event_rx: Arc<Mutex<rtrb::Consumer<AudioEvent>>>,
) -> Result<(), Box<dyn std::error::Error>> {
// Setup terminal
enable_raw_mode()?;
let mut stdout = io::stdout();
execute!(stdout, EnterAlternateScreen, EnableMouseCapture)?;
let backend = CrosstermBackend::new(stdout);
let mut terminal = Terminal::new(backend)?;
// Create app state
let mut app = TuiApp::new();
// Main loop
loop {
// Draw UI
terminal.draw(|f| draw_ui(f, &app))?;
// Poll for audio events
if let Ok(mut rx) = event_rx.lock() {
while let Ok(event) = rx.pop() {
match event {
AudioEvent::PlaybackPosition(pos) => {
app.update_playback_position(pos);
}
AudioEvent::PlaybackStopped => {
app.set_playing(false);
}
AudioEvent::TrackCreated(track_id, _, name) => {
app.add_track(track_id, name);
}
AudioEvent::RecordingStopped(clip_id, _pool_index, _waveform) => {
// Update status
app.set_status(format!("Recording stopped - Clip {}", clip_id));
}
AudioEvent::ProjectReset => {
app.clear_tracks();
app.set_status("Project reset".to_string());
}
_ => {}
}
}
}
// Handle keyboard input with timeout
if event::poll(Duration::from_millis(100))? {
if let Event::Key(key) = event::read()? {
match app.mode() {
AppMode::Command => {
match key.code {
KeyCode::Left => {
// Scroll timeline left only if command buffer is empty
if app.command_input().is_empty() {
app.scroll_timeline_left();
}
}
KeyCode::Right => {
// Scroll timeline right only if command buffer is empty
if app.command_input().is_empty() {
app.scroll_timeline_right();
}
}
KeyCode::Char('-') | KeyCode::Char('_') => {
// Zoom out only if command buffer is empty
if app.command_input().is_empty() {
app.zoom_timeline_out();
}
}
KeyCode::Char('+') | KeyCode::Char('=') => {
// Zoom in only if command buffer is empty
if app.command_input().is_empty() {
app.zoom_timeline_in();
}
}
KeyCode::Char(' ') => {
// Spacebar toggles play/pause only if command buffer is empty
// Otherwise, add space to command
if app.command_input().is_empty() {
if app.is_playing {
controller.pause();
app.set_playing(false);
app.set_status("Paused".to_string());
} else {
controller.play();
app.set_playing(true);
app.set_status("Playing".to_string());
}
} else {
app.push_command_char(' ');
}
}
KeyCode::Esc => {
app.clear_command();
}
KeyCode::Enter => {
let command = app.command_input().to_string();
app.add_to_history(command.clone());
// Execute command
match execute_command(&command, &mut controller, &mut app) {
Err(e) if e == "Quit requested" => {
break; // Exit the application
}
Err(e) => {
app.set_status(format!("Error: {}", e));
}
Ok(_) => {}
}
app.clear_command();
}
KeyCode::Backspace => {
app.pop_command_char();
}
KeyCode::Up => {
app.history_up();
}
KeyCode::Down => {
app.history_down();
}
KeyCode::Char('i') => {
// Only switch to Play mode if command buffer is empty
if app.command_input().is_empty() {
app.enter_play_mode();
} else {
app.push_command_char('i');
}
}
KeyCode::Char(c) => {
app.push_command_char(c);
}
_ => {}
}
}
AppMode::Play => {
// Check for mode switch first
if key.code == KeyCode::Esc {
app.enter_command_mode();
continue;
}
// Check for quit
if key.code == KeyCode::Char('q') && key.modifiers.contains(KeyModifiers::CONTROL) {
break;
}
// Handle MIDI note playing
if let Some(note) = key_to_midi_note(key.code) {
if let Some(track_id) = app.selected_track() {
// Release all previous notes before playing new one
for prev_note in app.active_notes.clone() {
controller.send_midi_note_off(track_id, prev_note);
}
app.active_notes.clear();
// Play the new note
controller.send_midi_note_on(track_id, note, 100);
app.add_active_note(note);
}
} else {
// Handle other play mode shortcuts
match key.code {
KeyCode::Char(' ') => {
// Release all notes and toggle play/pause
if let Some(track_id) = app.selected_track() {
for note in app.active_notes.clone() {
controller.send_midi_note_off(track_id, note);
}
app.active_notes.clear();
}
if app.is_playing {
controller.pause();
app.set_playing(false);
} else {
controller.play();
app.set_playing(true);
}
}
KeyCode::Char('s') if key.modifiers.contains(KeyModifiers::CONTROL) => {
// Release all notes and stop
if let Some(track_id) = app.selected_track() {
for note in app.active_notes.clone() {
controller.send_midi_note_off(track_id, note);
}
app.active_notes.clear();
}
controller.stop();
app.set_playing(false);
}
KeyCode::Char('r') | KeyCode::Char('R') => {
// Release all notes manually (r for release)
if let Some(track_id) = app.selected_track() {
for note in app.active_notes.clone() {
controller.send_midi_note_off(track_id, note);
}
app.active_notes.clear();
}
app.set_status("All notes released".to_string());
}
KeyCode::Char('?') | KeyCode::Char('h') | KeyCode::Char('H') => {
app.set_status("Play Mode: awsedftgyhujkolp;'=notes | R=release | SPACE=play/pause | ESC=command | Ctrl+Q=quit".to_string());
}
_ => {}
}
}
}
}
}
}
}
// Restore terminal
disable_raw_mode()?;
execute!(
terminal.backend_mut(),
LeaveAlternateScreen,
DisableMouseCapture
)?;
terminal.show_cursor()?;
Ok(())
}
/// Execute a command string
fn execute_command(
command: &str,
controller: &mut EngineController,
app: &mut TuiApp,
) -> Result<(), String> {
let parts: Vec<&str> = command.trim().split_whitespace().collect();
if parts.is_empty() {
return Ok(());
}
match parts[0] {
"play" => {
controller.play();
app.set_playing(true);
app.set_status("Playing".to_string());
}
"pause" => {
controller.pause();
app.set_playing(false);
app.set_status("Paused".to_string());
}
"stop" => {
controller.stop();
app.set_playing(false);
app.set_status("Stopped".to_string());
}
"seek" => {
if parts.len() < 2 {
return Err("Usage: seek <seconds>".to_string());
}
let pos: f64 = parts[1].parse().map_err(|_| "Invalid position")?;
controller.seek(pos);
app.set_status(format!("Seeked to {:.2}s", pos));
}
"track" => {
if parts.len() < 2 {
return Err("Usage: track <name>".to_string());
}
let name = parts[1..].join(" ");
controller.create_midi_track(name.clone());
app.set_status(format!("Created MIDI track: {}", name));
}
"audiotrack" => {
if parts.len() < 2 {
return Err("Usage: audiotrack <name>".to_string());
}
let name = parts[1..].join(" ");
controller.create_audio_track(name.clone());
app.set_status(format!("Created audio track: {}", name));
}
"select" => {
if parts.len() < 2 {
return Err("Usage: select <track_number>".to_string());
}
let idx: usize = parts[1].parse().map_err(|_| "Invalid track number")?;
app.select_track(idx);
app.set_status(format!("Selected track {}", idx));
}
"clip" => {
if parts.len() < 4 {
return Err("Usage: clip <track_id> <start_time> <duration>".to_string());
}
let track_id: u32 = parts[1].parse().map_err(|_| "Invalid track ID")?;
let start_time: f64 = parts[2].parse().map_err(|_| "Invalid start time")?;
let duration: f64 = parts[3].parse().map_err(|_| "Invalid duration")?;
// Add clip to local UI state (empty clip, no notes)
let clip_id = app.next_clip_id;
app.next_clip_id += 1;
app.add_clip(track_id, clip_id, start_time, duration, format!("Clip {}", clip_id), Vec::new());
controller.create_midi_clip(track_id, start_time, duration);
app.set_status(format!("Created MIDI clip on track {} at {:.2}s for {:.2}s", track_id, start_time, duration));
}
"loadmidi" => {
if parts.len() < 3 {
return Err("Usage: loadmidi <track_id> <file_path> [start_time]".to_string());
}
let track_id: u32 = parts[1].parse().map_err(|_| "Invalid track ID")?;
let file_path = parts[2];
let start_time: f64 = if parts.len() >= 4 {
parts[3].parse().unwrap_or(0.0)
} else {
0.0
};
// Load the MIDI file
match load_midi_file(file_path, app.next_clip_id, 48000) {
Ok(mut midi_clip) => {
midi_clip.start_time = start_time;
let clip_id = midi_clip.id;
let duration = midi_clip.duration;
let event_count = midi_clip.events.len();
// Extract note data for visualization
let mut notes = Vec::new();
let mut active_notes: std::collections::HashMap<u8, f64> = std::collections::HashMap::new();
let sample_rate = 48000.0; // Sample rate used for loading MIDI
for event in &midi_clip.events {
let status = event.status & 0xF0;
let time_seconds = event.timestamp as f64 / sample_rate;
match status {
0x90 if event.data2 > 0 => {
// Note on
active_notes.insert(event.data1, time_seconds);
}
0x80 | 0x90 => {
// Note off (or note on with velocity 0)
if let Some(start) = active_notes.remove(&event.data1) {
let note_duration = time_seconds - start;
notes.push((event.data1, start, note_duration));
}
}
_ => {}
}
}
// Add to local UI state with note data
app.add_clip(track_id, clip_id, start_time, duration, file_path.to_string(), notes);
app.next_clip_id += 1;
// Send to audio engine
controller.add_loaded_midi_clip(track_id, midi_clip);
app.set_status(format!("Loaded {} ({} events, {:.2}s) to track {} at {:.2}s",
file_path, event_count, duration, track_id, start_time));
}
Err(e) => {
return Err(format!("Failed to load MIDI file: {}", e));
}
}
}
"reset" => {
controller.reset();
app.clear_tracks();
app.set_status("Project reset".to_string());
}
"q" | "quit" => {
return Err("Quit requested".to_string());
}
"help" | "h" | "?" => {
// Show comprehensive help
let help_msg = concat!(
"Commands: ",
"play | pause | stop | seek <s> | ",
"track <name> | audiotrack <name> | select <idx> | ",
"clip <track_id> <start> <dur> | ",
"loadmidi <track_id> <file> [start] | ",
"reset | quit | help | ",
"Keys: ←/→ scroll | -/+ zoom"
);
app.set_status(help_msg.to_string());
}
_ => {
return Err(format!("Unknown command: '{}'. Type 'help' for commands", parts[0]));
}
}
Ok(())
}

View File

@ -0,0 +1,109 @@
use daw_backend::audio::node_graph::{
nodes::{AudioOutputNode, GainNode, OscillatorNode},
ConnectionError, InstrumentGraph, SignalType,
};
#[test]
fn test_basic_node_graph() {
// Create a graph with sample rate 44100 and buffer size 512
let mut graph = InstrumentGraph::new(44100, 512);
// Create nodes
let osc = Box::new(OscillatorNode::new("Oscillator"));
let gain = Box::new(GainNode::new("Gain"));
let output = Box::new(AudioOutputNode::new("Output"));
// Add nodes to graph
let osc_idx = graph.add_node(osc);
let gain_idx = graph.add_node(gain);
let output_idx = graph.add_node(output);
// Connect: Oscillator -> Gain -> Output
assert!(graph.connect(osc_idx, 0, gain_idx, 0).is_ok());
assert!(graph.connect(gain_idx, 0, output_idx, 0).is_ok());
// Set output node
graph.set_output_node(Some(output_idx));
// Set oscillator frequency to 440 Hz
if let Some(node) = graph.get_graph_node_mut(osc_idx) {
node.node.set_parameter(0, 440.0); // Frequency parameter
}
// Process a buffer
let mut output_buffer = vec![0.0f32; 512];
graph.process(&mut output_buffer, &[]);
// Check that we got some audio output (oscillator should produce non-zero samples)
let has_output = output_buffer.iter().any(|&s| s != 0.0);
assert!(has_output, "Expected non-zero audio output from oscillator");
// Check that output is within reasonable bounds
let max_amplitude = output_buffer.iter().map(|s| s.abs()).fold(0.0f32, f32::max);
assert!(max_amplitude <= 1.0, "Output amplitude too high: {}", max_amplitude);
}
#[test]
fn test_connection_type_validation() {
let mut graph = InstrumentGraph::new(44100, 512);
let osc = Box::new(OscillatorNode::new("Oscillator"));
let output = Box::new(AudioOutputNode::new("Output"));
let osc_idx = graph.add_node(osc);
let output_idx = graph.add_node(output);
// This should work (Audio -> Audio)
let result = graph.connect(osc_idx, 0, output_idx, 0);
assert!(result.is_ok());
// Try to connect CV to Audio - should fail
// Oscillator CV input (port 0 - wait, actually oscillator has CV input)
// Let me create a more clear test:
let osc2 = Box::new(OscillatorNode::new("Oscillator2"));
let osc2_idx = graph.add_node(osc2);
// Try to connect audio output to CV input
// This would be caught if we had different signal types
// For now, just verify the connection succeeds with matching types
let result = graph.connect(osc_idx, 0, osc2_idx, 0);
// This should actually fail because audio output can't connect to CV input
assert!(result.is_err());
match result {
Err(ConnectionError::TypeMismatch { expected, got }) => {
assert_eq!(expected, SignalType::CV);
assert_eq!(got, SignalType::Audio);
}
_ => panic!("Expected TypeMismatch error"),
}
}
#[test]
fn test_cycle_detection() {
let mut graph = InstrumentGraph::new(44100, 512);
let gain1 = Box::new(GainNode::new("Gain1"));
let gain2 = Box::new(GainNode::new("Gain2"));
let gain3 = Box::new(GainNode::new("Gain3"));
let g1 = graph.add_node(gain1);
let g2 = graph.add_node(gain2);
let g3 = graph.add_node(gain3);
// Create a chain: g1 -> g2 -> g3
assert!(graph.connect(g1, 0, g2, 0).is_ok());
assert!(graph.connect(g2, 0, g3, 0).is_ok());
// Try to create a cycle: g3 -> g1
let result = graph.connect(g3, 0, g1, 0);
assert!(result.is_err());
match result {
Err(ConnectionError::WouldCreateCycle) => {
// Expected!
}
_ => panic!("Expected WouldCreateCycle error"),
}
}

352
src-tauri/Cargo.lock generated
View File

@ -52,6 +52,12 @@ dependencies = [
"alloc-no-stdlib",
]
[[package]]
name = "allocator-api2"
version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
name = "alsa"
version = "0.9.1"
@ -238,7 +244,7 @@ dependencies = [
"bitflags 2.8.0",
"cexpr",
"clang-sys",
"itertools",
"itertools 0.13.0",
"proc-macro2",
"quote",
"regex",
@ -469,6 +475,21 @@ dependencies = [
"toml 0.8.19",
]
[[package]]
name = "cassowary"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53"
[[package]]
name = "castaway"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dec551ab6e7578819132c713a93c022a05d60159dc86e7a7050223577484c55a"
dependencies = [
"rustversion",
]
[[package]]
name = "cc"
version = "1.2.10"
@ -594,6 +615,19 @@ dependencies = [
"memchr",
]
[[package]]
name = "compact_str"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f86b9c4c00838774a6d902ef931eff7470720c51d90c2e32cfe15dc304737b3f"
dependencies = [
"castaway",
"cfg-if",
"itoa 1.0.14",
"ryu",
"static_assertions",
]
[[package]]
name = "concurrent-queue"
version = "2.5.0"
@ -754,6 +788,31 @@ version = "0.8.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
[[package]]
name = "crossterm"
version = "0.27.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df"
dependencies = [
"bitflags 2.8.0",
"crossterm_winapi",
"libc",
"mio 0.8.11",
"parking_lot",
"signal-hook",
"signal-hook-mio",
"winapi",
]
[[package]]
name = "crossterm_winapi"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acdd7c62a3665c7f6830a51635d9ac9b23ed385797f70a83bb8bafe9c572ab2b"
dependencies = [
"winapi",
]
[[package]]
name = "crypto-common"
version = "0.1.6"
@ -836,20 +895,140 @@ dependencies = [
"syn 2.0.96",
]
[[package]]
name = "dasp_envelope"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ec617ce7016f101a87fe85ed44180839744265fae73bb4aa43e7ece1b7668b6"
dependencies = [
"dasp_frame",
"dasp_peak",
"dasp_ring_buffer",
"dasp_rms",
"dasp_sample",
]
[[package]]
name = "dasp_frame"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2a3937f5fe2135702897535c8d4a5553f8b116f76c1529088797f2eee7c5cd6"
dependencies = [
"dasp_sample",
]
[[package]]
name = "dasp_graph"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39b17b071a1fa4c78054730085620c3bb22dc5fded00483312557a3fdf26d7c4"
dependencies = [
"dasp_frame",
"dasp_ring_buffer",
"dasp_signal",
"dasp_slice",
"petgraph 0.5.1",
]
[[package]]
name = "dasp_interpolate"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fc975a6563bb7ca7ec0a6c784ead49983a21c24835b0bc96eea11ee407c7486"
dependencies = [
"dasp_frame",
"dasp_ring_buffer",
"dasp_sample",
]
[[package]]
name = "dasp_peak"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5cf88559d79c21f3d8523d91250c397f9a15b5fc72fbb3f87fdb0a37b79915bf"
dependencies = [
"dasp_frame",
"dasp_sample",
]
[[package]]
name = "dasp_ring_buffer"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07d79e19b89618a543c4adec9c5a347fe378a19041699b3278e616e387511ea1"
[[package]]
name = "dasp_rms"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6c5dcb30b7e5014486e2822537ea2beae50b19722ffe2ed7549ab03774575aa"
dependencies = [
"dasp_frame",
"dasp_ring_buffer",
"dasp_sample",
]
[[package]]
name = "dasp_sample"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c87e182de0887fd5361989c677c4e8f5000cd9491d6d563161a8f3a5519fc7f"
[[package]]
name = "dasp_signal"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa1ab7d01689c6ed4eae3d38fe1cea08cba761573fbd2d592528d55b421077e7"
dependencies = [
"dasp_envelope",
"dasp_frame",
"dasp_interpolate",
"dasp_peak",
"dasp_ring_buffer",
"dasp_rms",
"dasp_sample",
"dasp_window",
]
[[package]]
name = "dasp_slice"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e1c7335d58e7baedafa516cb361360ff38d6f4d3f9d9d5ee2a2fc8e27178fa1"
dependencies = [
"dasp_frame",
"dasp_sample",
]
[[package]]
name = "dasp_window"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99ded7b88821d2ce4e8b842c9f1c86ac911891ab89443cc1de750cae764c5076"
dependencies = [
"dasp_sample",
]
[[package]]
name = "daw-backend"
version = "0.1.0"
dependencies = [
"cpal",
"crossterm",
"dasp_envelope",
"dasp_graph",
"dasp_interpolate",
"dasp_peak",
"dasp_ring_buffer",
"dasp_rms",
"dasp_sample",
"dasp_signal",
"midly",
"petgraph 0.6.5",
"ratatui",
"rtrb",
"serde",
"serde_json",
"symphonia",
]
@ -1157,6 +1336,18 @@ dependencies = [
"rustc_version",
]
[[package]]
name = "fixedbitset"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d"
[[package]]
name = "fixedbitset"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flate2"
version = "1.0.35"
@ -1173,6 +1364,12 @@ version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "foldhash"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
[[package]]
name = "foreign-types"
version = "0.5.0"
@ -1605,6 +1802,11 @@ name = "hashbrown"
version = "0.15.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
dependencies = [
"allocator-api2",
"equivalent",
"foldhash",
]
[[package]]
name = "heck"
@ -1956,6 +2158,15 @@ dependencies = [
"once_cell",
]
[[package]]
name = "itertools"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569"
dependencies = [
"either",
]
[[package]]
name = "itertools"
version = "0.13.0"
@ -2205,6 +2416,15 @@ dependencies = [
"value-bag",
]
[[package]]
name = "lru"
version = "0.12.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38"
dependencies = [
"hashbrown 0.15.2",
]
[[package]]
name = "mac"
version = "0.1.1"
@ -2304,6 +2524,18 @@ dependencies = [
"simd-adler32",
]
[[package]]
name = "mio"
version = "0.8.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c"
dependencies = [
"libc",
"log",
"wasi 0.11.0+wasi-snapshot-preview1",
"windows-sys 0.48.0",
]
[[package]]
name = "mio"
version = "1.0.3"
@ -2853,6 +3085,12 @@ dependencies = [
"windows-targets 0.52.6",
]
[[package]]
name = "paste"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
[[package]]
name = "pathdiff"
version = "0.2.3"
@ -2865,6 +3103,26 @@ version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "petgraph"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "467d164a6de56270bd7c4d070df81d07beace25012d5103ced4e9ff08d6afdb7"
dependencies = [
"fixedbitset 0.2.0",
"indexmap 1.9.3",
]
[[package]]
name = "petgraph"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db"
dependencies = [
"fixedbitset 0.4.2",
"indexmap 2.7.0",
]
[[package]]
name = "phf"
version = "0.8.0"
@ -3265,6 +3523,26 @@ dependencies = [
"rand_core 0.5.1",
]
[[package]]
name = "ratatui"
version = "0.26.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f44c9e68fd46eda15c646fbb85e1040b657a58cdc8c98db1d97a55930d991eef"
dependencies = [
"bitflags 2.8.0",
"cassowary",
"compact_str",
"crossterm",
"itertools 0.12.1",
"lru",
"paste",
"stability",
"strum",
"unicode-segmentation",
"unicode-truncate",
"unicode-width",
]
[[package]]
name = "raw-window-handle"
version = "0.6.2"
@ -3791,6 +4069,27 @@ version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "signal-hook"
version = "0.3.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d881a16cf4426aa584979d30bd82cb33429027e42122b169753d6ef1085ed6e2"
dependencies = [
"libc",
"signal-hook-registry",
]
[[package]]
name = "signal-hook-mio"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34db1a06d485c9142248b7a054f034b349b212551f3dfd19c94d45a754a217cd"
dependencies = [
"libc",
"mio 0.8.11",
"signal-hook",
]
[[package]]
name = "signal-hook-registry"
version = "1.4.2"
@ -3897,6 +4196,16 @@ dependencies = [
"system-deps",
]
[[package]]
name = "stability"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d904e7009df136af5297832a3ace3370cd14ff1546a232f4f185036c2736fcac"
dependencies = [
"quote",
"syn 2.0.96",
]
[[package]]
name = "stable_deref_trait"
version = "1.2.0"
@ -3941,6 +4250,28 @@ version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "strum"
version = "0.26.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06"
dependencies = [
"strum_macros",
]
[[package]]
name = "strum_macros"
version = "0.26.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be"
dependencies = [
"heck 0.5.0",
"proc-macro2",
"quote",
"rustversion",
"syn 2.0.96",
]
[[package]]
name = "swift-rs"
version = "1.0.7"
@ -4718,7 +5049,7 @@ dependencies = [
"backtrace",
"bytes",
"libc",
"mio",
"mio 1.0.3",
"pin-project-lite",
"signal-hook-registry",
"socket2",
@ -5000,6 +5331,23 @@ version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
[[package]]
name = "unicode-truncate"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3644627a5af5fa321c95b9b235a72fd24cd29c648c2c379431e6628655627bf"
dependencies = [
"itertools 0.13.0",
"unicode-segmentation",
"unicode-width",
]
[[package]]
name = "unicode-width"
version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
[[package]]
name = "url"
version = "2.5.4"

View File

@ -1,5 +1,6 @@
use daw_backend::{AudioEvent, AudioSystem, EngineController, EventEmitter, WaveformPeak};
use std::sync::{Arc, Mutex};
use std::collections::HashMap;
use tauri::{Emitter};
#[derive(serde::Serialize)]
@ -31,6 +32,9 @@ pub struct AudioState {
channels: u32,
next_track_id: u32,
next_pool_index: usize,
next_graph_node_id: u32,
// Track next node ID for each VoiceAllocator template (VoiceAllocator backend ID -> next template node ID)
template_node_counters: HashMap<u32, u32>,
}
impl Default for AudioState {
@ -41,6 +45,8 @@ impl Default for AudioState {
channels: 0,
next_track_id: 0,
next_pool_index: 0,
next_graph_node_id: 0,
template_node_counters: HashMap::new(),
}
}
}
@ -75,6 +81,15 @@ impl EventEmitter for TauriEventEmitter {
AudioEvent::NoteOff(note) => {
SerializedAudioEvent::NoteOff { note }
}
AudioEvent::GraphNodeAdded(track_id, node_id, node_type) => {
SerializedAudioEvent::GraphNodeAdded { track_id, node_id, node_type }
}
AudioEvent::GraphConnectionError(track_id, message) => {
SerializedAudioEvent::GraphConnectionError { track_id, message }
}
AudioEvent::GraphStateChanged(track_id) => {
SerializedAudioEvent::GraphStateChanged { track_id }
}
_ => return, // Ignore other event types for now
};
@ -97,6 +112,7 @@ pub async fn audio_init(
controller.reset();
audio_state.next_track_id = 0;
audio_state.next_pool_index = 0;
audio_state.next_graph_node_id = 0;
return Ok(format!(
"Audio already initialized (DAW state reset): {} Hz, {} ch",
audio_state.sample_rate, audio_state.channels
@ -123,6 +139,7 @@ pub async fn audio_init(
audio_state.channels = system.channels;
audio_state.next_track_id = 0;
audio_state.next_pool_index = 0;
audio_state.next_graph_node_id = 0;
Ok(info)
}
@ -524,6 +541,158 @@ pub async fn audio_update_midi_clip_notes(
}
}
// Node graph commands
#[tauri::command]
pub async fn graph_add_node(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
node_type: String,
x: f32,
y: f32,
) -> Result<u32, String> {
let mut audio_state = state.lock().unwrap();
// Get the next node ID before adding (nodes are added sequentially)
let node_id = audio_state.next_graph_node_id;
audio_state.next_graph_node_id += 1;
if let Some(controller) = &mut audio_state.controller {
controller.graph_add_node(track_id, node_type, x, y);
Ok(node_id)
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn graph_add_node_to_template(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
voice_allocator_id: u32,
node_type: String,
x: f32,
y: f32,
) -> Result<u32, String> {
let mut audio_state = state.lock().unwrap();
// Get template-local node ID for this VoiceAllocator
let node_id = audio_state.template_node_counters
.entry(voice_allocator_id)
.or_insert(0);
let template_node_id = *node_id;
*node_id += 1;
if let Some(controller) = &mut audio_state.controller {
controller.graph_add_node_to_template(track_id, voice_allocator_id, node_type, x, y);
Ok(template_node_id)
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn graph_remove_node(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
node_id: u32,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.graph_remove_node(track_id, node_id);
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn graph_connect(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
from_node: u32,
from_port: usize,
to_node: u32,
to_port: usize,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.graph_connect(track_id, from_node, from_port, to_node, to_port);
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn graph_connect_in_template(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
voice_allocator_id: u32,
from_node: u32,
from_port: usize,
to_node: u32,
to_port: usize,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.graph_connect_in_template(track_id, voice_allocator_id, from_node, from_port, to_node, to_port);
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn graph_disconnect(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
from_node: u32,
from_port: usize,
to_node: u32,
to_port: usize,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.graph_disconnect(track_id, from_node, from_port, to_node, to_port);
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn graph_set_parameter(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
node_id: u32,
param_id: u32,
value: f32,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.graph_set_parameter(track_id, node_id, param_id, value);
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[tauri::command]
pub async fn graph_set_output_node(
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
track_id: u32,
node_id: u32,
) -> Result<(), String> {
let mut audio_state = state.lock().unwrap();
if let Some(controller) = &mut audio_state.controller {
controller.graph_set_output_node(track_id, node_id);
Ok(())
} else {
Err("Audio not initialized".to_string())
}
}
#[derive(serde::Serialize, Clone)]
#[serde(tag = "type")]
pub enum SerializedAudioEvent {
@ -534,6 +703,9 @@ pub enum SerializedAudioEvent {
RecordingError { message: String },
NoteOn { note: u8, velocity: u8 },
NoteOff { note: u8 },
GraphNodeAdded { track_id: u32, node_id: u32, node_type: String },
GraphConnectionError { track_id: u32, message: String },
GraphStateChanged { track_id: u32 },
}
// audio_get_events command removed - events are now pushed via Tauri event system

View File

@ -214,6 +214,14 @@ pub fn run() {
audio::audio_update_midi_clip_notes,
audio::audio_send_midi_note_on,
audio::audio_send_midi_note_off,
audio::graph_add_node,
audio::graph_add_node_to_template,
audio::graph_remove_node,
audio::graph_connect,
audio::graph_connect_in_template,
audio::graph_disconnect,
audio::graph_set_parameter,
audio::graph_set_output_node,
])
// .manage(window_counter)
.build(tauri::generate_context!())

View File

@ -0,0 +1,22 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<!-- Three connected nodes -->
<!-- Node 1 (left) -->
<rect x="2" y="8" width="6" height="8" rx="1" fill="#4d4d4d" stroke="#fff" stroke-width="1.5"/>
<!-- Node 2 (top right) -->
<rect x="16" y="2" width="6" height="6" rx="1" fill="#4d4d4d" stroke="#fff" stroke-width="1.5"/>
<!-- Node 3 (bottom right) -->
<rect x="16" y="16" width="6" height="6" rx="1" fill="#4d4d4d" stroke="#fff" stroke-width="1.5"/>
<!-- Connection lines -->
<path d="M 8 10 L 16 5" stroke="#4CAF50" stroke-width="2" stroke-linecap="round"/>
<path d="M 8 14 L 16 19" stroke="#2196F3" stroke-width="2" stroke-linecap="round"/>
<!-- Port dots -->
<circle cx="8" cy="10" r="2" fill="#4CAF50"/>
<circle cx="16" cy="5" r="2" fill="#4CAF50"/>
<circle cx="8" cy="14" r="2" fill="#2196F3"/>
<circle cx="16" cy="19" r="2" fill="#2196F3"/>
</svg>

After

Width:  |  Height:  |  Size: 939 B

217
src/drawflow.css Normal file
View File

@ -0,0 +1,217 @@
:root {
--dfBackgroundColor: rgba(47, 47, 47, 1);
--dfBackgroundSize: 64px;
--dfBackgroundImage: radial-gradient(rgba(77, 77, 77, 1) 1px, transparent 1px);
--dfNodeType: flex;
--dfNodeTypeFloat: none;
--dfNodeBackgroundColor: rgba(88, 88, 88, 1);
--dfNodeTextColor: rgba(255, 255, 255, 1);
--dfNodeBorderSize: 2px;
--dfNodeBorderColor: rgba(99, 99, 99, 1);
--dfNodeBorderRadius: 12px;
--dfNodeMinHeight: 40px;
--dfNodeMinWidth: 160px;
--dfNodePaddingTop: 15px;
--dfNodePaddingBottom: 15px;
--dfNodeBoxShadowHL: 0px;
--dfNodeBoxShadowVL: 2px;
--dfNodeBoxShadowBR: 15px;
--dfNodeBoxShadowS: 2px;
--dfNodeBoxShadowColor: rgba(0, 0, 0, 0.37);
--dfNodeHoverBackgroundColor: rgba(115, 115, 115, 1);
--dfNodeHoverTextColor: rgba(255, 255, 255, 1);
--dfNodeHoverBorderSize: 2px;
--dfNodeHoverBorderColor: rgba(135, 135, 135, 1);
--dfNodeHoverBorderRadius: 12px;
--dfNodeHoverBoxShadowHL: 0px;
--dfNodeHoverBoxShadowVL: 2px;
--dfNodeHoverBoxShadowBR: 15px;
--dfNodeHoverBoxShadowS: 2px;
--dfNodeHoverBoxShadowColor: rgba(24, 24, 24, 1);
--dfNodeSelectedBackgroundColor: rgba(132, 132, 132, 1);
--dfNodeSelectedTextColor: #ffffff;
--dfNodeSelectedBorderSize: 2px;
--dfNodeSelectedBorderColor: rgba(183, 183, 183, 1);
--dfNodeSelectedBorderRadius: 12px;
--dfNodeSelectedBoxShadowHL: 0px;
--dfNodeSelectedBoxShadowVL: 2px;
--dfNodeSelectedBoxShadowBR: 15px;
--dfNodeSelectedBoxShadowS: 2px;
--dfNodeSelectedBoxShadowColor: rgba(0, 0, 0, 0.28);
--dfInputBackgroundColor: rgba(135, 135, 135, 1);
--dfInputBorderSize: 2px;
--dfInputBorderColor: #000000;
--dfInputBorderRadius: 10px;
--dfInputLeft: -27px;
--dfInputHeight: 15px;
--dfInputWidth: 15px;
--dfInputHoverBackgroundColor: #ffffff;
--dfInputHoverBorderSize: 2px;
--dfInputHoverBorderColor: #000000;
--dfInputHoverBorderRadius: 50px;
--dfOutputBackgroundColor: rgba(147, 147, 147, 1);
--dfOutputBorderSize: 2px;
--dfOutputBorderColor: #000000;
--dfOutputBorderRadius: 50px;
--dfOutputRight: -3px;
--dfOutputHeight: 15px;
--dfOutputWidth: 15px;
--dfOutputHoverBackgroundColor: #ffffff;
--dfOutputHoverBorderSize: 2px;
--dfOutputHoverBorderColor: #000000;
--dfOutputHoverBorderRadius: 50px;
--dfLineWidth: 5px;
--dfLineColor: rgba(137, 137, 137, 1);
--dfLineHoverColor: #4682b4;
--dfLineSelectedColor: #43b993;
--dfRerouteBorderWidth: 2px;
--dfRerouteBorderColor: #000000;
--dfRerouteBackgroundColor: rgba(193, 193, 193, 1);
--dfRerouteHoverBorderWidth: 2px;
--dfRerouteHoverBorderColor: #000000;
--dfRerouteHoverBackgroundColor: #ffffff;
--dfDeleteDisplay: block;
--dfDeleteColor: #ffffff;
--dfDeleteBackgroundColor: #000000;
--dfDeleteBorderSize: 2px;
--dfDeleteBorderColor: #ffffff;
--dfDeleteBorderRadius: 50px;
--dfDeleteTop: -15px;
--dfDeleteHoverColor: #000000;
--dfDeleteHoverBackgroundColor: #ffffff;
--dfDeleteHoverBorderSize: 2px;
--dfDeleteHoverBorderColor: #000000;
--dfDeleteHoverBorderRadius: 50px;
}
#drawflow {
background: var(--dfBackgroundColor);
background-size: var(--dfBackgroundSize) var(--dfBackgroundSize);
background-image: var(--dfBackgroundImage);
}
.drawflow .drawflow-node {
display: var(--dfNodeType);
background: var(--dfNodeBackgroundColor);
color: var(--dfNodeTextColor);
border: var(--dfNodeBorderSize) solid var(--dfNodeBorderColor);
border-radius: var(--dfNodeBorderRadius);
min-height: var(--dfNodeMinHeight);
width: auto;
min-width: var(--dfNodeMinWidth);
padding-top: var(--dfNodePaddingTop);
padding-bottom: var(--dfNodePaddingBottom);
-webkit-box-shadow: var(--dfNodeBoxShadowHL) var(--dfNodeBoxShadowVL) var(--dfNodeBoxShadowBR) var(--dfNodeBoxShadowS) var(--dfNodeBoxShadowColor);
box-shadow: var(--dfNodeBoxShadowHL) var(--dfNodeBoxShadowVL) var(--dfNodeBoxShadowBR) var(--dfNodeBoxShadowS) var(--dfNodeBoxShadowColor);
}
.drawflow .drawflow-node:hover {
background: var(--dfNodeHoverBackgroundColor);
color: var(--dfNodeHoverTextColor);
border: var(--dfNodeHoverBorderSize) solid var(--dfNodeHoverBorderColor);
border-radius: var(--dfNodeHoverBorderRadius);
-webkit-box-shadow: var(--dfNodeHoverBoxShadowHL) var(--dfNodeHoverBoxShadowVL) var(--dfNodeHoverBoxShadowBR) var(--dfNodeHoverBoxShadowS) var(--dfNodeHoverBoxShadowColor);
box-shadow: var(--dfNodeHoverBoxShadowHL) var(--dfNodeHoverBoxShadowVL) var(--dfNodeHoverBoxShadowBR) var(--dfNodeHoverBoxShadowS) var(--dfNodeHoverBoxShadowColor);
}
.drawflow .drawflow-node.selected {
background: var(--dfNodeSelectedBackgroundColor);
color: var(--dfNodeSelectedTextColor);
border: var(--dfNodeSelectedBorderSize) solid var(--dfNodeSelectedBorderColor);
border-radius: var(--dfNodeSelectedBorderRadius);
-webkit-box-shadow: var(--dfNodeSelectedBoxShadowHL) var(--dfNodeSelectedBoxShadowVL) var(--dfNodeSelectedBoxShadowBR) var(--dfNodeSelectedBoxShadowS) var(--dfNodeSelectedBoxShadowColor);
box-shadow: var(--dfNodeSelectedBoxShadowHL) var(--dfNodeSelectedBoxShadowVL) var(--dfNodeSelectedBoxShadowBR) var(--dfNodeSelectedBoxShadowS) var(--dfNodeSelectedBoxShadowColor);
}
.drawflow .drawflow-node .input {
left: var(--dfInputLeft);
background: var(--dfInputBackgroundColor);
border: var(--dfInputBorderSize) solid var(--dfInputBorderColor);
border-radius: var(--dfInputBorderRadius);
height: var(--dfInputHeight);
width: var(--dfInputWidth);
}
.drawflow .drawflow-node .input:hover {
background: var(--dfInputHoverBackgroundColor);
border: var(--dfInputHoverBorderSize) solid var(--dfInputHoverBorderColor);
border-radius: var(--dfInputHoverBorderRadius);
}
.drawflow .drawflow-node .outputs {
float: var(--dfNodeTypeFloat);
}
.drawflow .drawflow-node .output {
right: var(--dfOutputRight);
background: var(--dfOutputBackgroundColor);
border: var(--dfOutputBorderSize) solid var(--dfOutputBorderColor);
border-radius: var(--dfOutputBorderRadius);
height: var(--dfOutputHeight);
width: var(--dfOutputWidth);
}
.drawflow .drawflow-node .output:hover {
background: var(--dfOutputHoverBackgroundColor);
border: var(--dfOutputHoverBorderSize) solid var(--dfOutputHoverBorderColor);
border-radius: var(--dfOutputHoverBorderRadius);
}
.drawflow .connection .main-path {
stroke-width: var(--dfLineWidth);
stroke: var(--dfLineColor);
}
.drawflow .connection .main-path:hover {
stroke: var(--dfLineHoverColor);
}
.drawflow .connection .main-path.selected {
stroke: var(--dfLineSelectedColor);
}
.drawflow .connection .point {
stroke: var(--dfRerouteBorderColor);
stroke-width: var(--dfRerouteBorderWidth);
fill: var(--dfRerouteBackgroundColor);
}
.drawflow .connection .point:hover {
stroke: var(--dfRerouteHoverBorderColor);
stroke-width: var(--dfRerouteHoverBorderWidth);
fill: var(--dfRerouteHoverBackgroundColor);
}
.drawflow-delete {
display: var(--dfDeleteDisplay);
color: var(--dfDeleteColor);
background: var(--dfDeleteBackgroundColor);
border: var(--dfDeleteBorderSize) solid var(--dfDeleteBorderColor);
border-radius: var(--dfDeleteBorderRadius);
}
.parent-node .drawflow-delete {
top: var(--dfDeleteTop);
}
.drawflow-delete:hover {
color: var(--dfDeleteHoverColor);
background: var(--dfDeleteHoverBackgroundColor);
border: var(--dfDeleteHoverBorderSize) solid var(--dfDeleteHoverBorderColor);
border-radius: var(--dfDeleteHoverBorderRadius);
}

1
src/drawflow.min.css vendored Normal file
View File

@ -0,0 +1 @@
.drawflow,.drawflow .parent-node{position:relative}.parent-drawflow{display:flex;overflow:hidden;touch-action:none;outline:0}.drawflow{width:100%;height:100%;user-select:none;perspective:0}.drawflow .drawflow-node{display:flex;align-items:center;position:absolute;background:#0ff;width:160px;min-height:40px;border-radius:4px;border:2px solid #000;color:#000;z-index:2;padding:15px}.drawflow .drawflow-node.selected{background:red}.drawflow .drawflow-node:hover{cursor:move}.drawflow .drawflow-node .inputs,.drawflow .drawflow-node .outputs{width:0}.drawflow .drawflow-node .drawflow_content_node{width:100%;display:block}.drawflow .drawflow-node .input,.drawflow .drawflow-node .output{position:relative;width:20px;height:20px;background:#fff;border-radius:50%;border:2px solid #000;cursor:crosshair;z-index:1;margin-bottom:5px}.drawflow .drawflow-node .input{left:-27px;top:2px;background:#ff0}.drawflow .drawflow-node .output{right:-3px;top:2px}.drawflow svg{z-index:0;position:absolute;overflow:visible!important}.drawflow .connection{position:absolute;pointer-events:none;aspect-ratio:1/1}.drawflow .connection .main-path{fill:none;stroke-width:5px;stroke:#4682b4;pointer-events:all}.drawflow .connection .main-path:hover{stroke:#1266ab;cursor:pointer}.drawflow .connection .main-path.selected{stroke:#43b993}.drawflow .connection .point{cursor:move;stroke:#000;stroke-width:2;fill:#fff;pointer-events:all}.drawflow .connection .point.selected,.drawflow .connection .point:hover{fill:#1266ab}.drawflow .main-path{fill:none;stroke-width:5px;stroke:#4682b4}.drawflow-delete{position:absolute;display:block;width:30px;height:30px;background:#000;color:#fff;z-index:4;border:2px solid #fff;line-height:30px;font-weight:700;text-align:center;border-radius:50%;font-family:monospace;cursor:pointer}.drawflow>.drawflow-delete{margin-left:-15px;margin-top:15px}.parent-node .drawflow-delete{right:-15px;top:-15px}

1
src/drawflow.min.js vendored Normal file

File diff suppressed because one or more lines are too long

View File

@ -25,6 +25,11 @@
<script src="UPNG.js"></script>
<script src="pako.js"></script>
<script type="module" src="/d3-interpolate-path.js"></script>
<!-- Drawflow for node-based editor -->
<link rel="stylesheet" href="/drawflow.min.css">
<script src="/drawflow.min.js"></script>
<script type="module" src="/main.js" defer></script>
</head>

View File

@ -79,10 +79,10 @@ export const defaultLayouts = {
children: [
{
type: "vertical-grid",
percent: 70,
percent: 50,
children: [
{ type: "pane", name: "timelineV2" },
{ type: "pane", name: "piano"}
{ type: "pane", name: "nodeEditor"}
]
},
{ type: "pane", name: "infopanel" }

View File

@ -62,6 +62,7 @@ import {
} from "./styles.js";
import { Icon } from "./icon.js";
import { AlphaSelectionBar, ColorSelectorWidget, ColorWidget, HueSelectionBar, SaturationValueSelectionGradient, TimelineWindow, TimelineWindowV2, VirtualPiano, PianoRollEditor, Widget } from "./widgets.js";
import { nodeTypes, SignalType, getPortClass } from "./nodeTypes.js";
// State management
import {
@ -6041,6 +6042,686 @@ async function renderMenu() {
}
updateMenu();
function nodeEditor() {
// Create container for the node editor
const container = document.createElement("div");
container.id = "node-editor-container";
// Create the Drawflow canvas
const editorDiv = document.createElement("div");
editorDiv.id = "drawflow";
editorDiv.style.width = "100%";
editorDiv.style.height = "100%";
editorDiv.style.position = "relative";
container.appendChild(editorDiv);
// Create node palette
const palette = document.createElement("div");
palette.className = "node-palette";
palette.innerHTML = `
<h3>Nodes</h3>
${Object.entries(nodeTypes)
.filter(([type, def]) => type !== 'TemplateInput' && type !== 'TemplateOutput') // Hide template nodes
.map(([type, def]) => `
<div class="node-palette-item" data-node-type="${type}">
${def.name}
</div>
`).join('')}
`;
container.appendChild(palette);
// Initialize Drawflow editor (will be set up after DOM insertion)
let editor = null;
let nodeIdCounter = 1;
// Track expanded VoiceAllocator nodes
const expandedNodes = new Set(); // Set of node IDs that are expanded
const nodeParents = new Map(); // Map of child node ID -> parent VoiceAllocator ID
// Wait for DOM insertion
setTimeout(() => {
const drawflowDiv = container.querySelector("#drawflow");
if (!drawflowDiv) return;
editor = new Drawflow(drawflowDiv);
editor.reroute = true;
editor.reroute_fix_curvature = true;
editor.force_first_input = false;
editor.start();
// Store editor reference in context
context.nodeEditor = editor;
// Add palette item drag-and-drop handlers
const paletteItems = container.querySelectorAll(".node-palette-item");
let draggedNodeType = null;
paletteItems.forEach(item => {
// Make items draggable
item.setAttribute('draggable', 'true');
// Click handler for quick add
item.addEventListener("click", () => {
const nodeType = item.getAttribute("data-node-type");
addNode(nodeType, 100, 100, null);
});
// Drag start
item.addEventListener('dragstart', (e) => {
draggedNodeType = item.getAttribute('data-node-type');
e.dataTransfer.effectAllowed = 'copy';
e.dataTransfer.setData('text/plain', draggedNodeType); // Required for drag to work
console.log('Drag started:', draggedNodeType);
});
// Drag end
item.addEventListener('dragend', () => {
console.log('Drag ended');
draggedNodeType = null;
});
});
// Add drop handler to drawflow canvas
drawflowDiv.addEventListener('dragover', (e) => {
e.preventDefault();
e.dataTransfer.dropEffect = 'copy';
});
drawflowDiv.addEventListener('drop', (e) => {
e.preventDefault();
// Get node type from dataTransfer instead of global variable
const nodeType = e.dataTransfer.getData('text/plain');
console.log('Drop event fired, nodeType:', nodeType);
if (!nodeType) {
console.log('No nodeType in drop data, aborting');
return;
}
// Get drop position relative to the editor
const rect = drawflowDiv.getBoundingClientRect();
const precanvasX = editor.precanvas?.x || 0;
const precanvasY = editor.precanvas?.y || 0;
const zoom = editor.zoom || 1;
const x = (e.clientX - rect.left - precanvasX) / zoom;
const y = (e.clientY - rect.top - precanvasY) / zoom;
console.log('Position calculation:', { clientX: e.clientX, clientY: e.clientY, rectLeft: rect.left, rectTop: rect.top, precanvasX, precanvasY, zoom, x, y });
// Check if dropping into an expanded VoiceAllocator
let parentNodeId = null;
for (const expandedNodeId of expandedNodes) {
const contentsArea = document.getElementById(`voice-allocator-contents-${expandedNodeId}`);
if (contentsArea) {
const contentsRect = contentsArea.getBoundingClientRect();
if (e.clientX >= contentsRect.left && e.clientX <= contentsRect.right &&
e.clientY >= contentsRect.top && e.clientY <= contentsRect.bottom) {
parentNodeId = expandedNodeId;
console.log(`Dropping into VoiceAllocator ${expandedNodeId} at position (${x}, ${y})`);
break;
}
}
}
// Add the node
console.log(`Adding node ${nodeType} at (${x}, ${y}) with parent ${parentNodeId}`);
addNode(nodeType, x, y, parentNodeId);
// Clear the draggedNodeType
draggedNodeType = null;
});
// Connection event handlers
editor.on("connectionCreated", (connection) => {
handleConnectionCreated(connection);
});
editor.on("connectionRemoved", (connection) => {
handleConnectionRemoved(connection);
});
// Node events
editor.on("nodeCreated", (nodeId) => {
setupNodeParameters(nodeId);
// Add double-click handler for VoiceAllocator expansion
setTimeout(() => {
const nodeElement = document.getElementById(`node-${nodeId}`);
if (nodeElement) {
nodeElement.addEventListener('dblclick', (e) => {
// Prevent double-click from bubbling to canvas
e.stopPropagation();
handleNodeDoubleClick(nodeId);
});
}
}, 50);
});
// Node moved - resize parent VoiceAllocator
editor.on("nodeMoved", (nodeId) => {
const node = editor.getNodeFromId(nodeId);
if (node && node.data.parentNodeId) {
resizeVoiceAllocatorToFit(node.data.parentNodeId);
}
});
// Node removed - prevent deletion of template nodes
editor.on("nodeRemoved", (nodeId) => {
const nodeElement = document.getElementById(`node-${nodeId}`);
if (nodeElement && nodeElement.getAttribute('data-template-node') === 'true') {
console.warn('Cannot delete template nodes');
// TODO: Re-add the node if it was deleted
return;
}
// Clean up parent-child tracking
const parentId = nodeParents.get(nodeId);
nodeParents.delete(nodeId);
// Resize parent if needed
if (parentId) {
resizeVoiceAllocatorToFit(parentId);
}
});
}, 100);
// Add a node to the graph
function addNode(nodeType, x, y, parentNodeId = null) {
if (!editor) return;
const nodeDef = nodeTypes[nodeType];
if (!nodeDef) return;
const nodeId = nodeIdCounter++;
const html = nodeDef.getHTML(nodeId);
// Count inputs and outputs by type
const inputsCount = nodeDef.inputs.length;
const outputsCount = nodeDef.outputs.length;
// Add node to Drawflow
const drawflowNodeId = editor.addNode(
nodeType,
inputsCount,
outputsCount,
x,
y,
`node-${nodeType.toLowerCase()}`,
{ nodeType, backendId: null, parentNodeId: parentNodeId },
html
);
// Track parent-child relationship
if (parentNodeId !== null) {
nodeParents.set(drawflowNodeId, parentNodeId);
console.log(`Node ${drawflowNodeId} (${nodeType}) is child of VoiceAllocator ${parentNodeId}`);
// Mark template nodes as non-deletable
const isTemplateNode = (nodeType === 'TemplateInput' || nodeType === 'TemplateOutput');
// Add CSS class to mark as child node
setTimeout(() => {
const nodeElement = document.getElementById(`node-${drawflowNodeId}`);
if (nodeElement) {
nodeElement.classList.add('child-node');
nodeElement.setAttribute('data-parent-node', parentNodeId);
if (isTemplateNode) {
nodeElement.classList.add('template-node');
nodeElement.setAttribute('data-template-node', 'true');
}
// Only show if parent is currently expanded
if (!expandedNodes.has(parentNodeId)) {
nodeElement.style.display = 'none';
}
}
// Auto-resize parent VoiceAllocator after adding child node
resizeVoiceAllocatorToFit(parentNodeId);
}, 10);
}
// Apply port styling based on signal types
setTimeout(() => {
styleNodePorts(drawflowNodeId, nodeDef);
}, 10);
// Send command to backend
// If parent node exists, add to VoiceAllocator template; otherwise add to main graph
const commandName = parentNodeId ? "graph_add_node_to_template" : "graph_add_node";
const commandArgs = parentNodeId
? {
trackId: 0,
voiceAllocatorId: editor.getNodeFromId(parentNodeId).data.backendId,
nodeType: nodeType,
x: x,
y: y
}
: {
trackId: 0,
nodeType: nodeType,
x: x,
y: y
};
invoke(commandName, commandArgs).then(backendNodeId => {
console.log(`Node ${nodeType} added with backend ID: ${backendNodeId} (parent: ${parentNodeId})`);
// Store backend node ID using Drawflow's update method
editor.updateNodeDataFromId(drawflowNodeId, { nodeType, backendId: backendNodeId, parentNodeId: parentNodeId });
console.log("Verifying stored backend ID:", editor.getNodeFromId(drawflowNodeId).data.backendId);
// If this is an AudioOutput node, automatically set it as the graph output
if (nodeType === "AudioOutput") {
console.log(`Setting node ${backendNodeId} as graph output`);
invoke("graph_set_output_node", {
trackId: 0,
nodeId: backendNodeId
}).then(() => {
console.log("Output node set successfully");
}).catch(err => {
console.error("Failed to set output node:", err);
});
}
// If this is a VoiceAllocator, automatically create template I/O nodes inside it
if (nodeType === "VoiceAllocator") {
setTimeout(() => {
// Get the node position
const node = editor.getNodeFromId(drawflowNodeId);
if (node) {
// Create TemplateInput on the left
addNode("TemplateInput", node.pos_x + 50, node.pos_y + 100, drawflowNodeId);
// Create TemplateOutput on the right
addNode("TemplateOutput", node.pos_x + 450, node.pos_y + 100, drawflowNodeId);
}
}, 100);
}
}).catch(err => {
console.error("Failed to add node to backend:", err);
showError("Failed to add node: " + err);
});
}
// Auto-resize VoiceAllocator to fit its child nodes
function resizeVoiceAllocatorToFit(voiceAllocatorNodeId) {
if (!voiceAllocatorNodeId) return;
const parentNode = editor.getNodeFromId(voiceAllocatorNodeId);
const parentElement = document.getElementById(`node-${voiceAllocatorNodeId}`);
if (!parentNode || !parentElement) return;
// Find all child nodes
const childNodeIds = [];
for (const [childId, parentId] of nodeParents.entries()) {
if (parentId === voiceAllocatorNodeId) {
childNodeIds.push(childId);
}
}
if (childNodeIds.length === 0) return;
// Calculate bounding box of all child nodes
let minX = Infinity, minY = Infinity;
let maxX = -Infinity, maxY = -Infinity;
for (const childId of childNodeIds) {
const childNode = editor.getNodeFromId(childId);
const childElement = document.getElementById(`node-${childId}`);
if (!childNode || !childElement) continue;
const childWidth = childElement.offsetWidth || 200;
const childHeight = childElement.offsetHeight || 150;
minX = Math.min(minX, childNode.pos_x);
minY = Math.min(minY, childNode.pos_y);
maxX = Math.max(maxX, childNode.pos_x + childWidth);
maxY = Math.max(maxY, childNode.pos_y + childHeight);
}
// Add generous margin
const margin = 60;
const headerHeight = 120; // Space for VoiceAllocator header
const requiredWidth = (maxX - minX) + (margin * 2);
const requiredHeight = (maxY - minY) + (margin * 2) + headerHeight;
// Set minimum size
const finalWidth = Math.max(requiredWidth, 600);
const finalHeight = Math.max(requiredHeight, 400);
// Only resize if expanded
if (expandedNodes.has(voiceAllocatorNodeId)) {
parentElement.style.width = `${finalWidth}px`;
parentElement.style.height = `${finalHeight}px`;
parentElement.style.minWidth = `${finalWidth}px`;
parentElement.style.minHeight = `${finalHeight}px`;
console.log(`Resized VoiceAllocator ${voiceAllocatorNodeId} to ${finalWidth}x${finalHeight}`);
}
}
// Style node ports based on signal types
function styleNodePorts(nodeId, nodeDef) {
const nodeElement = document.getElementById(`node-${nodeId}`);
if (!nodeElement) return;
// Style input ports
const inputs = nodeElement.querySelectorAll(".input");
inputs.forEach((input, index) => {
if (index < nodeDef.inputs.length) {
const portDef = nodeDef.inputs[index];
const connector = input.querySelector(".input_0, .input_1, .input_2, .input_3");
if (connector) {
connector.classList.add(getPortClass(portDef.type));
}
// Add label
const label = document.createElement("span");
label.textContent = portDef.name;
input.insertBefore(label, input.firstChild);
}
});
// Style output ports
const outputs = nodeElement.querySelectorAll(".output");
outputs.forEach((output, index) => {
if (index < nodeDef.outputs.length) {
const portDef = nodeDef.outputs[index];
const connector = output.querySelector(".output_0, .output_1, .output_2, .output_3");
if (connector) {
connector.classList.add(getPortClass(portDef.type));
}
// Add label
const label = document.createElement("span");
label.textContent = portDef.name;
output.appendChild(label);
}
});
}
// Setup parameter event listeners for a node
function setupNodeParameters(nodeId) {
setTimeout(() => {
const nodeElement = document.getElementById(`node-${nodeId}`);
if (!nodeElement) return;
const sliders = nodeElement.querySelectorAll(".node-slider");
sliders.forEach(slider => {
// Prevent node dragging when interacting with slider
slider.addEventListener("mousedown", (e) => {
e.stopPropagation();
});
slider.addEventListener("pointerdown", (e) => {
e.stopPropagation();
});
slider.addEventListener("input", (e) => {
const paramId = parseInt(e.target.getAttribute("data-param"));
const value = parseFloat(e.target.value);
// Update display
const nodeData = editor.getNodeFromId(nodeId);
if (nodeData) {
const nodeDef = nodeTypes[nodeData.name];
if (nodeDef && nodeDef.parameters[paramId]) {
const param = nodeDef.parameters[paramId];
const displaySpan = nodeElement.querySelector(`#${param.name}-${nodeId}`);
if (displaySpan) {
displaySpan.textContent = value.toFixed(param.unit === 'Hz' ? 0 : 2);
}
}
// Send to backend
if (nodeData.data.backendId !== null) {
invoke("graph_set_parameter", {
trackId: 0,
nodeId: nodeData.data.backendId,
paramId: paramId,
value: value
}).catch(err => {
console.error("Failed to set parameter:", err);
});
}
}
});
});
}, 100);
}
// Handle double-click on nodes (for VoiceAllocator expansion)
function handleNodeDoubleClick(nodeId) {
const node = editor.getNodeFromId(nodeId);
if (!node) return;
// Only VoiceAllocator nodes can be expanded
if (node.data.nodeType !== 'VoiceAllocator') return;
const nodeElement = document.getElementById(`node-${nodeId}`);
if (!nodeElement) return;
const contentsArea = document.getElementById(`voice-allocator-contents-${nodeId}`);
if (!contentsArea) return;
// Toggle expanded state
if (expandedNodes.has(nodeId)) {
// Collapse
expandedNodes.delete(nodeId);
nodeElement.classList.remove('expanded');
nodeElement.style.width = '';
nodeElement.style.height = '';
nodeElement.style.minWidth = '';
nodeElement.style.minHeight = '';
contentsArea.style.display = 'none';
// Hide all child nodes
for (const [childId, parentId] of nodeParents.entries()) {
if (parentId === nodeId) {
const childElement = document.getElementById(`node-${childId}`);
if (childElement) {
childElement.style.display = 'none';
}
}
}
console.log('Collapsed VoiceAllocator node:', nodeId);
} else {
// Expand
expandedNodes.add(nodeId);
nodeElement.classList.add('expanded');
// Make the node larger to show contents
nodeElement.style.width = '600px';
nodeElement.style.height = '400px';
nodeElement.style.minWidth = '600px';
nodeElement.style.minHeight = '400px';
contentsArea.style.display = 'block';
// Show all child nodes
for (const [childId, parentId] of nodeParents.entries()) {
if (parentId === nodeId) {
const childElement = document.getElementById(`node-${childId}`);
if (childElement) {
childElement.style.display = 'block';
}
}
}
console.log('Expanded VoiceAllocator node:', nodeId);
}
}
// Handle connection creation
function handleConnectionCreated(connection) {
console.log("handleConnectionCreated called:", connection);
const outputNode = editor.getNodeFromId(connection.output_id);
const inputNode = editor.getNodeFromId(connection.input_id);
console.log("Output node:", outputNode, "Input node:", inputNode);
if (!outputNode || !inputNode) {
console.log("Missing node - returning");
return;
}
console.log("Output node name:", outputNode.name, "Input node name:", inputNode.name);
const outputDef = nodeTypes[outputNode.name];
const inputDef = nodeTypes[inputNode.name];
console.log("Output def:", outputDef, "Input def:", inputDef);
if (!outputDef || !inputDef) {
console.log("Missing node definition - returning");
return;
}
// Extract port indices from connection class names
// Drawflow uses 1-based indexing, but our arrays are 0-based
const outputPort = parseInt(connection.output_class.replace("output_", "")) - 1;
const inputPort = parseInt(connection.input_class.replace("input_", "")) - 1;
console.log("Port indices (0-based) - output:", outputPort, "input:", inputPort);
console.log("Output class:", connection.output_class, "Input class:", connection.input_class);
// Validate signal types
console.log("Checking port bounds - outputPort:", outputPort, "< outputs.length:", outputDef.outputs.length, "inputPort:", inputPort, "< inputs.length:", inputDef.inputs.length);
if (outputPort < outputDef.outputs.length && inputPort < inputDef.inputs.length) {
const outputType = outputDef.outputs[outputPort].type;
const inputType = inputDef.inputs[inputPort].type;
console.log("Signal types - output:", outputType, "input:", inputType);
if (outputType !== inputType) {
console.log("TYPE MISMATCH! Removing connection");
// Type mismatch - remove connection
editor.removeSingleConnection(
connection.output_id,
connection.input_id,
connection.output_class,
connection.input_class
);
showError(`Cannot connect ${outputType} to ${inputType}`);
return;
}
console.log("Types match - proceeding with connection");
// Style the connection based on signal type
setTimeout(() => {
const connectionElement = document.querySelector(
`.connection.node_in_node-${connection.input_id}.node_out_node-${connection.output_id}`
);
if (connectionElement) {
connectionElement.classList.add(`connection-${outputType}`);
}
}, 10);
// Send to backend
console.log("Backend IDs - output:", outputNode.data.backendId, "input:", inputNode.data.backendId);
if (outputNode.data.backendId !== null && inputNode.data.backendId !== null) {
// Check if both nodes are inside the same VoiceAllocator
// Convert connection IDs to numbers to match Map keys
const outputId = parseInt(connection.output_id);
const inputId = parseInt(connection.input_id);
const outputParent = nodeParents.get(outputId);
const inputParent = nodeParents.get(inputId);
console.log(`Parent detection - output node ${outputId} parent: ${outputParent}, input node ${inputId} parent: ${inputParent}`);
if (outputParent && inputParent && outputParent === inputParent) {
// Both nodes are inside the same VoiceAllocator - connect in template
const parentNode = editor.getNodeFromId(outputParent);
console.log(`Connecting in VoiceAllocator template ${parentNode.data.backendId}: node ${outputNode.data.backendId} port ${outputPort} -> node ${inputNode.data.backendId} port ${inputPort}`);
invoke("graph_connect_in_template", {
trackId: 0,
voiceAllocatorId: parentNode.data.backendId,
fromNode: outputNode.data.backendId,
fromPort: outputPort,
toNode: inputNode.data.backendId,
toPort: inputPort
}).then(() => {
console.log("Template connection successful");
}).catch(err => {
console.error("Failed to connect nodes in template:", err);
showError("Template connection failed: " + err);
// Remove the connection
editor.removeSingleConnection(
connection.output_id,
connection.input_id,
connection.output_class,
connection.input_class
);
});
} else {
// Normal connection in main graph
console.log(`Connecting: node ${outputNode.data.backendId} port ${outputPort} -> node ${inputNode.data.backendId} port ${inputPort}`);
invoke("graph_connect", {
trackId: 0,
fromNode: outputNode.data.backendId,
fromPort: outputPort,
toNode: inputNode.data.backendId,
toPort: inputPort
}).then(() => {
console.log("Connection successful");
}).catch(err => {
console.error("Failed to connect nodes:", err);
showError("Connection failed: " + err);
// Remove the connection
editor.removeSingleConnection(
connection.output_id,
connection.input_id,
connection.output_class,
connection.input_class
);
});
}
}
} else {
console.log("Port validation FAILED - ports out of bounds");
}
}
// Handle connection removal
function handleConnectionRemoved(connection) {
const outputNode = editor.getNodeFromId(connection.output_id);
const inputNode = editor.getNodeFromId(connection.input_id);
if (!outputNode || !inputNode) return;
// Drawflow uses 1-based indexing, but our arrays are 0-based
const outputPort = parseInt(connection.output_class.replace("output_", "")) - 1;
const inputPort = parseInt(connection.input_class.replace("input_", "")) - 1;
// Send to backend
if (outputNode.data.backendId !== null && inputNode.data.backendId !== null) {
invoke("graph_disconnect", {
trackId: 0,
fromNode: outputNode.data.backendId,
fromPort: outputPort,
toNode: inputNode.data.backendId,
toPort: inputPort
}).catch(err => {
console.error("Failed to disconnect nodes:", err);
});
}
}
// Show error message
function showError(message) {
const errorDiv = document.createElement("div");
errorDiv.className = "node-editor-error";
errorDiv.textContent = message;
container.appendChild(errorDiv);
setTimeout(() => {
errorDiv.remove();
}, 3000);
}
return container;
}
function piano() {
let piano_cvs = document.createElement("canvas");
piano_cvs.className = "piano";
@ -6234,6 +6915,10 @@ const panes = {
name: "piano-roll",
func: pianoRoll,
},
nodeEditor: {
name: "node-editor",
func: nodeEditor,
},
};
/**

444
src/nodeTypes.js Normal file
View File

@ -0,0 +1,444 @@
// Node type definitions for the audio node graph editor
// Each node type defines its inputs, outputs, parameters, and HTML template
/**
* Signal types for node ports
* These match the backend SignalType enum
*/
export const SignalType = {
AUDIO: 'audio', // Blue circles
MIDI: 'midi', // Green squares
CV: 'cv' // Orange diamonds
};
/**
* Node category for organization in the palette
*/
export const NodeCategory = {
INPUT: 'input',
GENERATOR: 'generator',
EFFECT: 'effect',
UTILITY: 'utility',
OUTPUT: 'output'
};
/**
* Get CSS class for a port based on its signal type
*/
export function getPortClass(signalType) {
return `connector-${signalType}`;
}
/**
* Node type catalog
* Maps node type names to their definitions
*/
export const nodeTypes = {
Oscillator: {
name: 'Oscillator',
category: NodeCategory.GENERATOR,
description: 'Oscillator with multiple waveforms and CV modulation',
inputs: [
{ name: 'V/Oct', type: SignalType.CV, index: 0 },
{ name: 'FM', type: SignalType.CV, index: 1 }
],
outputs: [
{ name: 'Audio', type: SignalType.AUDIO, index: 0 }
],
parameters: [
{ id: 0, name: 'frequency', label: 'Frequency', min: 20, max: 20000, default: 440, unit: 'Hz' },
{ id: 1, name: 'amplitude', label: 'Amplitude', min: 0, max: 1, default: 0.5, unit: '' },
{ id: 2, name: 'waveform', label: 'Waveform', min: 0, max: 3, default: 0, unit: '' }
],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">Oscillator</div>
<div class="node-param">
<label>Waveform: <span id="wave-${nodeId}">Sine</span></label>
<input type="range"
class="node-slider"
data-node="${nodeId}"
data-param="2"
min="0"
max="3"
value="0"
step="1">
</div>
<div class="node-param">
<label>Frequency: <span id="freq-${nodeId}">440</span> Hz</label>
<input type="range"
class="node-slider"
data-node="${nodeId}"
data-param="0"
min="20"
max="20000"
value="440"
step="1">
</div>
<div class="node-param">
<label>Amplitude: <span id="amp-${nodeId}">0.5</span></label>
<input type="range"
class="node-slider"
data-node="${nodeId}"
data-param="1"
min="0"
max="1"
value="0.5"
step="0.01">
</div>
</div>
`
},
Gain: {
name: 'Gain',
category: NodeCategory.UTILITY,
description: 'VCA (voltage-controlled amplifier) - CV multiplies gain',
inputs: [
{ name: 'Audio In', type: SignalType.AUDIO, index: 0 },
{ name: 'CV', type: SignalType.CV, index: 1 }
],
outputs: [
{ name: 'Audio Out', type: SignalType.AUDIO, index: 0 }
],
parameters: [
{ id: 0, name: 'gain', label: 'Gain', min: 0, max: 2, default: 1, unit: 'x' }
],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">Gain</div>
<div class="node-param">
<label>Gain: <span id="gain-${nodeId}">1.0</span>x</label>
<input type="range"
class="node-slider"
data-node="${nodeId}"
data-param="0"
min="0"
max="2"
value="1"
step="0.01">
</div>
</div>
`
},
Mixer: {
name: 'Mixer',
category: NodeCategory.UTILITY,
description: 'Mix up to 4 audio inputs with independent gain controls',
inputs: [
{ name: 'Input 1', type: SignalType.AUDIO, index: 0 },
{ name: 'Input 2', type: SignalType.AUDIO, index: 1 },
{ name: 'Input 3', type: SignalType.AUDIO, index: 2 },
{ name: 'Input 4', type: SignalType.AUDIO, index: 3 }
],
outputs: [
{ name: 'Mixed Out', type: SignalType.AUDIO, index: 0 }
],
parameters: [
{ id: 0, name: 'gain1', label: 'Gain 1', min: 0, max: 2, default: 1, unit: 'x' },
{ id: 1, name: 'gain2', label: 'Gain 2', min: 0, max: 2, default: 1, unit: 'x' },
{ id: 2, name: 'gain3', label: 'Gain 3', min: 0, max: 2, default: 1, unit: 'x' },
{ id: 3, name: 'gain4', label: 'Gain 4', min: 0, max: 2, default: 1, unit: 'x' }
],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">Mixer</div>
<div class="node-param">
<label>Gain 1: <span id="g1-${nodeId}">1.0</span>x</label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="0" min="0" max="2" value="1" step="0.01">
</div>
<div class="node-param">
<label>Gain 2: <span id="g2-${nodeId}">1.0</span>x</label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="1" min="0" max="2" value="1" step="0.01">
</div>
<div class="node-param">
<label>Gain 3: <span id="g3-${nodeId}">1.0</span>x</label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="2" min="0" max="2" value="1" step="0.01">
</div>
<div class="node-param">
<label>Gain 4: <span id="g4-${nodeId}">1.0</span>x</label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="3" min="0" max="2" value="1" step="0.01">
</div>
</div>
`
},
Filter: {
name: 'Filter',
category: NodeCategory.EFFECT,
description: 'Biquad filter with lowpass/highpass modes',
inputs: [
{ name: 'Audio In', type: SignalType.AUDIO, index: 0 },
{ name: 'Cutoff CV', type: SignalType.CV, index: 1 }
],
outputs: [
{ name: 'Audio Out', type: SignalType.AUDIO, index: 0 }
],
parameters: [
{ id: 0, name: 'cutoff', label: 'Cutoff', min: 20, max: 20000, default: 1000, unit: 'Hz' },
{ id: 1, name: 'resonance', label: 'Resonance', min: 0.1, max: 10, default: 0.707, unit: 'Q' },
{ id: 2, name: 'type', label: 'Type', min: 0, max: 1, default: 0, unit: '' }
],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">Filter</div>
<div class="node-param">
<label>Cutoff: <span id="cutoff-${nodeId}">1000</span> Hz</label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="0" min="20" max="20000" value="1000" step="1">
</div>
<div class="node-param">
<label>Resonance: <span id="res-${nodeId}">0.707</span></label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="1" min="0.1" max="10" value="0.707" step="0.01">
</div>
<div class="node-param">
<label>Type: <span id="ftype-${nodeId}">LP</span></label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="2" min="0" max="1" value="0" step="1">
</div>
</div>
`
},
ADSR: {
name: 'ADSR',
category: NodeCategory.UTILITY,
description: 'Attack-Decay-Sustain-Release envelope generator',
inputs: [
{ name: 'Gate', type: SignalType.CV, index: 0 }
],
outputs: [
{ name: 'Envelope', type: SignalType.CV, index: 0 }
],
parameters: [
{ id: 0, name: 'attack', label: 'Attack', min: 0.001, max: 5, default: 0.01, unit: 's' },
{ id: 1, name: 'decay', label: 'Decay', min: 0.001, max: 5, default: 0.1, unit: 's' },
{ id: 2, name: 'sustain', label: 'Sustain', min: 0, max: 1, default: 0.7, unit: '' },
{ id: 3, name: 'release', label: 'Release', min: 0.001, max: 5, default: 0.2, unit: 's' }
],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">ADSR</div>
<div class="node-param">
<label>A: <span id="a-${nodeId}">0.01</span>s</label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="0" min="0.001" max="5" value="0.01" step="0.001">
</div>
<div class="node-param">
<label>D: <span id="d-${nodeId}">0.1</span>s</label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="1" min="0.001" max="5" value="0.1" step="0.001">
</div>
<div class="node-param">
<label>S: <span id="s-${nodeId}">0.7</span></label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="2" min="0" max="1" value="0.7" step="0.01">
</div>
<div class="node-param">
<label>R: <span id="r-${nodeId}">0.2</span>s</label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="3" min="0.001" max="5" value="0.2" step="0.001">
</div>
</div>
`
},
MidiInput: {
name: 'MidiInput',
category: NodeCategory.INPUT,
description: 'MIDI input - receives MIDI events from track',
inputs: [],
outputs: [
{ name: 'MIDI Out', type: SignalType.MIDI, index: 0 }
],
parameters: [],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">MIDI Input</div>
<div class="node-info">Receives MIDI from track</div>
</div>
`
},
MidiToCV: {
name: 'MidiToCV',
category: NodeCategory.UTILITY,
description: 'Convert MIDI notes to CV signals',
inputs: [
{ name: 'MIDI In', type: SignalType.MIDI, index: 0 }
],
outputs: [
{ name: 'V/Oct', type: SignalType.CV, index: 0 },
{ name: 'Gate', type: SignalType.CV, index: 1 },
{ name: 'Velocity', type: SignalType.CV, index: 2 }
],
parameters: [],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">MIDICV</div>
<div class="node-info">Converts MIDI to CV signals</div>
</div>
`
},
AudioToCV: {
name: 'AudioToCV',
category: NodeCategory.UTILITY,
description: 'Envelope follower - converts audio amplitude to CV',
inputs: [
{ name: 'Audio In', type: SignalType.AUDIO, index: 0 }
],
outputs: [
{ name: 'CV Out', type: SignalType.CV, index: 0 }
],
parameters: [
{ id: 0, name: 'attack', label: 'Attack', min: 0.001, max: 1.0, default: 0.01, unit: 's' },
{ id: 1, name: 'release', label: 'Release', min: 0.001, max: 1.0, default: 0.1, unit: 's' }
],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">AudioCV</div>
<div class="node-param">
<label>Attack: <span id="att-${nodeId}">0.01</span>s</label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="0" min="0.001" max="1.0" value="0.01" step="0.001">
</div>
<div class="node-param">
<label>Release: <span id="rel-${nodeId}">0.1</span>s</label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="1" min="0.001" max="1.0" value="0.1" step="0.001">
</div>
</div>
`
},
Oscilloscope: {
name: 'Oscilloscope',
category: NodeCategory.UTILITY,
description: 'Visual audio signal monitor (pass-through)',
inputs: [
{ name: 'Audio In', type: SignalType.AUDIO, index: 0 }
],
outputs: [
{ name: 'Audio Out', type: SignalType.AUDIO, index: 0 }
],
parameters: [
{ id: 0, name: 'time_scale', label: 'Time Scale', min: 10, max: 1000, default: 100, unit: 'ms' },
{ id: 1, name: 'trigger_mode', label: 'Trigger', min: 0, max: 2, default: 0, unit: '' },
{ id: 2, name: 'trigger_level', label: 'Trigger Level', min: -1, max: 1, default: 0, unit: '' }
],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">Oscilloscope</div>
<div class="node-param">
<label>Time: <span id="time-${nodeId}">100</span>ms</label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="0" min="10" max="1000" value="100" step="10">
</div>
<div class="node-param">
<label>Trigger: <span id="trig-${nodeId}">Free</span></label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="1" min="0" max="2" value="0" step="1">
</div>
<div class="node-info" style="margin-top: 4px; font-size: 10px;">Pass-through monitor</div>
</div>
`
},
VoiceAllocator: {
name: 'VoiceAllocator',
category: NodeCategory.UTILITY,
description: 'Polyphonic voice allocator - creates N instances of internal graph',
inputs: [
{ name: 'MIDI In', type: SignalType.MIDI, index: 0 }
],
outputs: [
{ name: 'Mixed Out', type: SignalType.AUDIO, index: 0 }
],
parameters: [
{ id: 0, name: 'voices', label: 'Voices', min: 1, max: 16, default: 8, unit: '' }
],
getHTML: (nodeId) => `
<div class="node-content">
<div class="voice-allocator-header">
<div class="node-title">Voice Allocator</div>
<div class="node-param">
<label>Voices: <span id="voices-${nodeId}">8</span></label>
<input type="range" class="node-slider" data-node="${nodeId}" data-param="0" min="1" max="16" value="8" step="1">
</div>
<div class="node-info" style="margin-top: 4px; font-size: 10px;">Double-click to edit</div>
</div>
<div class="voice-allocator-contents" id="voice-allocator-contents-${nodeId}" style="display: none;">
<div class="node-info" style="font-size: 10px; color: #aaa; margin-bottom: 8px;">Build a synth voice template:</div>
<div class="node-info" style="font-size: 9px; color: #c77dff;">Purple nodes are Template Input/Output (non-deletable)</div>
<div class="node-info" style="font-size: 9px; color: #888;">Connect MIDI from Template Input MidiToCV</div>
<div class="node-info" style="font-size: 9px; color: #888;">Add synth nodes: Oscillator, ADSR, Gain, etc.</div>
<div class="node-info" style="font-size: 9px; color: #888;">Connect final audio Template Output</div>
<div class="node-info" style="font-size: 9px; color: #666; margin-top: 8px;">Drag nodes from palette Container auto-resizes</div>
</div>
</div>
`
},
AudioOutput: {
name: 'AudioOutput',
category: NodeCategory.OUTPUT,
description: 'Final audio output',
inputs: [
{ name: 'Audio In', type: SignalType.AUDIO, index: 0 }
],
outputs: [
{ name: 'Audio Out', type: SignalType.AUDIO, index: 0 }
],
parameters: [],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">Audio Output</div>
<div class="node-info">Final output to speakers</div>
</div>
`
},
TemplateInput: {
name: 'TemplateInput',
category: NodeCategory.INPUT,
description: 'VoiceAllocator template input - receives MIDI for one voice',
inputs: [],
outputs: [
{ name: 'MIDI Out', type: SignalType.MIDI, index: 0 }
],
parameters: [],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">Template Input</div>
<div class="node-info" style="font-size: 9px;">MIDI for one voice</div>
</div>
`
},
TemplateOutput: {
name: 'TemplateOutput',
category: NodeCategory.OUTPUT,
description: 'VoiceAllocator template output - sends audio to voice mixer',
inputs: [
{ name: 'Audio In', type: SignalType.AUDIO, index: 0 }
],
outputs: [],
parameters: [],
getHTML: (nodeId) => `
<div class="node-content">
<div class="node-title">Template Output</div>
<div class="node-info" style="font-size: 9px;">Audio to mixer</div>
</div>
`
}
};
/**
* Get all node types in a specific category
*/
export function getNodesByCategory(category) {
return Object.entries(nodeTypes)
.filter(([_, def]) => def.category === category)
.map(([type, def]) => ({ type, ...def }));
}
/**
* Get all categories that have nodes
*/
export function getCategories() {
const categories = new Set();
Object.values(nodeTypes).forEach(def => categories.add(def.category));
return Array.from(categories);
}

View File

@ -1038,3 +1038,272 @@ button {
color: #999;
}
}
/* ============================================
NODE EDITOR STYLES
============================================ */
/* Node editor container */
#node-editor-container {
width: 100%;
height: 100%;
position: relative;
background: #1e1e1e;
}
/* Node palette */
.node-palette {
position: absolute;
top: 10px;
left: 10px;
background: #2d2d2d;
border: 1px solid #3d3d3d;
border-radius: 4px;
padding: 8px;
max-width: 200px;
z-index: 100;
}
.node-palette h3 {
margin: 0 0 8px 0;
font-size: 12px;
color: #ccc;
text-transform: uppercase;
}
.node-palette-item {
padding: 6px 8px;
margin: 4px 0;
background: #3d3d3d;
border: 1px solid #4d4d4d;
border-radius: 3px;
cursor: pointer;
color: #ddd;
font-size: 13px;
transition: background 0.2s;
}
.node-palette-item:hover {
background: #4d4d4d;
}
.node-palette-item:active {
background: #5d5d5d;
}
/* Node content styling */
.node-content {
padding: 8px;
min-width: 180px;
}
/* Expanded VoiceAllocator node */
.drawflow .drawflow-node.expanded {
background: rgba(60, 60, 80, 0.95) !important;
border: 2px solid #7c7cff !important;
box-shadow: 0 0 20px rgba(124, 124, 255, 0.4);
}
.drawflow .drawflow-node.expanded .node-content {
display: flex;
flex-direction: column;
height: 100%;
}
.drawflow .drawflow-node.expanded .voice-allocator-contents {
flex: 1;
background: rgba(40, 40, 50, 0.8);
border-radius: 4px;
margin-top: 8px;
padding: 8px;
position: relative;
overflow: auto;
}
/* Child nodes (inside VoiceAllocator) */
.drawflow .drawflow-node.child-node {
opacity: 0.9;
border: 1px solid #5a5aaa !important;
box-shadow: 0 2px 8px rgba(90, 90, 170, 0.3);
z-index: 10;
}
.drawflow .drawflow-node.child-node .node-title {
font-size: 11px;
}
/* Template nodes (non-deletable I/O nodes) */
.drawflow .drawflow-node.template-node {
border: 2px solid #9d4edd !important;
background: rgba(157, 78, 221, 0.15) !important;
box-shadow: 0 0 12px rgba(157, 78, 221, 0.4);
pointer-events: auto;
cursor: default;
}
.drawflow .drawflow-node.template-node .node-title {
color: #c77dff;
font-weight: bold;
}
.node-title {
font-weight: bold;
font-size: 13px;
margin-bottom: 6px;
color: #fff;
text-align: center;
}
.node-info {
font-size: 11px;
color: #999;
text-align: center;
padding: 4px 0;
}
.node-param {
margin: 3px 0;
}
.node-param label {
display: block;
font-size: 10px;
color: #ccc;
margin-bottom: 2px;
}
.node-slider {
width: calc(100% - 8px);
max-width: 140px;
height: 3px;
-webkit-appearance: none;
appearance: none;
background: #4d4d4d;
outline: none;
border-radius: 2px;
}
.node-slider::-webkit-slider-thumb {
-webkit-appearance: none;
appearance: none;
width: 12px;
height: 12px;
background: #4CAF50;
cursor: pointer;
border-radius: 50%;
}
.node-slider::-moz-range-thumb {
width: 12px;
height: 12px;
background: #4CAF50;
cursor: pointer;
border-radius: 50%;
border: none;
}
/* Signal Type Connectors */
/* Audio ports - Blue circles (matches audio clips) */
.connector-audio {
width: 14px !important;
height: 14px !important;
border-radius: 50% !important;
background: #2196F3 !important;
border: 2px solid #1565C0 !important;
}
/* MIDI ports - Green squares (matches MIDI clips) */
.connector-midi {
width: 14px !important;
height: 14px !important;
border-radius: 2px !important;
background: #4CAF50 !important;
border: 2px solid #2E7D32 !important;
}
/* CV ports - Orange diamonds */
.connector-cv {
width: 12px !important;
height: 12px !important;
background: #FF9800 !important;
border: 2px solid #E65100 !important;
transform: rotate(45deg) !important;
border-radius: 2px !important;
}
/* Connection line styling - Override Drawflow defaults */
.drawflow .connection .main-path {
stroke-width: 3px;
}
.connection-audio .main-path {
stroke: #2196F3 !important;
stroke-width: 4px !important;
}
.connection-midi .main-path {
stroke: #4CAF50 !important;
stroke-width: 3px !important;
stroke-dasharray: 8, 4 !important;
}
.connection-cv .main-path {
stroke: #FF9800 !important;
stroke-width: 2px !important;
}
/* Port label text styling - position labels away from connectors */
.drawflow .drawflow-node .input > span,
.drawflow .drawflow-node .output > span {
font-size: 9px;
color: #999;
pointer-events: none;
position: absolute;
line-height: 20px;
top: 0;
}
/* Input labels - position to the right of the connector */
.drawflow .drawflow-node .input > span {
left: 24px;
}
/* Output labels - position to the left of the connector */
.drawflow .drawflow-node .output > span {
right: 24px;
}
/* Node styling overrides for Drawflow */
.drawflow .drawflow-node {
background: #2d2d2d !important;
border: 2px solid #4d4d4d !important;
border-radius: 6px !important;
color: #ddd !important;
padding: 8px !important;
}
.drawflow .drawflow-node.selected {
border-color: #4CAF50 !important;
box-shadow: 0 0 10px rgba(76, 175, 80, 0.5) !important;
}
/* Error message styling */
.node-editor-error {
position: absolute;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
background: rgba(244, 67, 54, 0.9);
color: white;
padding: 12px 20px;
border-radius: 4px;
font-size: 14px;
z-index: 200;
animation: fadeOut 3s forwards;
}
@keyframes fadeOut {
0%, 70% { opacity: 1; }
100% { opacity: 0; }
}

View File

@ -4146,9 +4146,10 @@ class VirtualPiano extends Widget {
}
if (info.isBlack) {
// Black key positioning - place it between the white keys
// The black key goes after the white key at position whiteKeysBefore
const x = offsetX + whiteKeysBefore * whiteKeyWidth + whiteKeyWidth - blackKeyWidth / 2;
// Black key positioning - place it at the right edge of the preceding white key
// whiteKeysBefore is the number of white keys to the left, so multiply by width
// and subtract half the black key width to center it at the gap
const x = offsetX + whiteKeysBefore * whiteKeyWidth - blackKeyWidth / 2;
return {
x,