Moved files

This commit is contained in:
2025-06-10 21:14:10 +02:00
parent b70026d562
commit 06600b8341
85 changed files with 194 additions and 37 deletions

1
audio_engine/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

743
audio_engine/Cargo.lock generated Normal file
View File

@@ -0,0 +1,743 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "addr2line"
version = "0.24.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1"
dependencies = [
"gimli",
]
[[package]]
name = "adler2"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
[[package]]
name = "audio_engine"
version = "0.1.0"
dependencies = [
"dirs",
"hound",
"jack",
"kanal",
"log",
"serde",
"serde_json",
"simple_logger",
"thiserror",
"tokio",
"wmidi",
]
[[package]]
name = "autocfg"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
[[package]]
name = "backtrace"
version = "0.3.75"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002"
dependencies = [
"addr2line",
"cfg-if",
"libc",
"miniz_oxide",
"object",
"rustc-demangle",
"windows-targets 0.52.6",
]
[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967"
[[package]]
name = "bytes"
version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "colored"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c"
dependencies = [
"lazy_static",
"windows-sys 0.59.0",
]
[[package]]
name = "deranged"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e"
dependencies = [
"powerfmt",
]
[[package]]
name = "dirs"
version = "5.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225"
dependencies = [
"dirs-sys",
]
[[package]]
name = "dirs-sys"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
dependencies = [
"libc",
"option-ext",
"redox_users",
"windows-sys 0.48.0",
]
[[package]]
name = "futures-core"
version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
[[package]]
name = "getrandom"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592"
dependencies = [
"cfg-if",
"libc",
"wasi",
]
[[package]]
name = "gimli"
version = "0.31.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
[[package]]
name = "hound"
version = "3.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62adaabb884c94955b19907d60019f4e145d091c75345379e70d1ee696f7854f"
[[package]]
name = "itoa"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]]
name = "jack"
version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f70ca699f44c04a32d419fc9ed699aaea89657fc09014bf3fa238e91d13041b9"
dependencies = [
"bitflags 2.9.1",
"jack-sys",
"lazy_static",
"libc",
"log",
]
[[package]]
name = "jack-sys"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6013b7619b95a22b576dfb43296faa4ecbe40abbdb97dfd22ead520775fc86ab"
dependencies = [
"bitflags 1.3.2",
"lazy_static",
"libc",
"libloading",
"log",
"pkg-config",
]
[[package]]
name = "kanal"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e3953adf0cd667798b396c2fa13552d6d9b3269d7dd1154c4c416442d1ff574"
dependencies = [
"futures-core",
"lock_api",
]
[[package]]
name = "lazy_static"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]]
name = "libc"
version = "0.2.172"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
[[package]]
name = "libloading"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f"
dependencies = [
"cfg-if",
"winapi",
]
[[package]]
name = "libredox"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
dependencies = [
"bitflags 2.9.1",
"libc",
]
[[package]]
name = "lock_api"
version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765"
dependencies = [
"autocfg",
"scopeguard",
]
[[package]]
name = "log"
version = "0.4.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
[[package]]
name = "memchr"
version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
[[package]]
name = "miniz_oxide"
version = "0.8.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a"
dependencies = [
"adler2",
]
[[package]]
name = "mio"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c"
dependencies = [
"libc",
"wasi",
"windows-sys 0.59.0",
]
[[package]]
name = "num-conv"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
[[package]]
name = "num_threads"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9"
dependencies = [
"libc",
]
[[package]]
name = "object"
version = "0.36.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87"
dependencies = [
"memchr",
]
[[package]]
name = "option-ext"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
[[package]]
name = "parking_lot"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13"
dependencies = [
"lock_api",
"parking_lot_core",
]
[[package]]
name = "parking_lot_core"
version = "0.9.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5"
dependencies = [
"cfg-if",
"libc",
"redox_syscall",
"smallvec",
"windows-targets 0.52.6",
]
[[package]]
name = "pin-project-lite"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
[[package]]
name = "pkg-config"
version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
[[package]]
name = "powerfmt"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
[[package]]
name = "proc-macro2"
version = "1.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
dependencies = [
"proc-macro2",
]
[[package]]
name = "redox_syscall"
version = "0.5.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "928fca9cf2aa042393a8325b9ead81d2f0df4cb12e1e24cef072922ccd99c5af"
dependencies = [
"bitflags 2.9.1",
]
[[package]]
name = "redox_users"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43"
dependencies = [
"getrandom",
"libredox",
"thiserror",
]
[[package]]
name = "rustc-demangle"
version = "0.1.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f"
[[package]]
name = "ryu"
version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "scopeguard"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "serde"
version = "1.0.219"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.219"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_json"
version = "1.0.140"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
dependencies = [
"itoa",
"memchr",
"ryu",
"serde",
]
[[package]]
name = "signal-hook-registry"
version = "1.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9203b8055f63a2a00e2f593bb0510367fe707d7ff1e5c872de2f537b339e5410"
dependencies = [
"libc",
]
[[package]]
name = "simple_logger"
version = "5.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8c5dfa5e08767553704aa0ffd9d9794d527103c736aba9854773851fd7497eb"
dependencies = [
"colored",
"log",
"time",
"windows-sys 0.48.0",
]
[[package]]
name = "smallvec"
version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9"
[[package]]
name = "socket2"
version = "0.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c"
dependencies = [
"libc",
"windows-sys 0.52.0",
]
[[package]]
name = "syn"
version = "2.0.101"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "thiserror"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "time"
version = "0.3.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40"
dependencies = [
"deranged",
"itoa",
"libc",
"num-conv",
"num_threads",
"powerfmt",
"serde",
"time-core",
"time-macros",
]
[[package]]
name = "time-core"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c"
[[package]]
name = "time-macros"
version = "0.2.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49"
dependencies = [
"num-conv",
"time-core",
]
[[package]]
name = "tokio"
version = "1.45.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75ef51a33ef1da925cea3e4eb122833cb377c61439ca401b770f54902b806779"
dependencies = [
"backtrace",
"bytes",
"libc",
"mio",
"parking_lot",
"pin-project-lite",
"signal-hook-registry",
"socket2",
"tokio-macros",
"windows-sys 0.52.0",
]
[[package]]
name = "tokio-macros"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "unicode-ident"
version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
[[package]]
name = "wasi"
version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-sys"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [
"windows-targets 0.48.5",
]
[[package]]
name = "windows-sys"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
dependencies = [
"windows-targets 0.52.6",
]
[[package]]
name = "windows-sys"
version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
dependencies = [
"windows-targets 0.52.6",
]
[[package]]
name = "windows-targets"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
dependencies = [
"windows_aarch64_gnullvm 0.48.5",
"windows_aarch64_msvc 0.48.5",
"windows_i686_gnu 0.48.5",
"windows_i686_msvc 0.48.5",
"windows_x86_64_gnu 0.48.5",
"windows_x86_64_gnullvm 0.48.5",
"windows_x86_64_msvc 0.48.5",
]
[[package]]
name = "windows-targets"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
dependencies = [
"windows_aarch64_gnullvm 0.52.6",
"windows_aarch64_msvc 0.52.6",
"windows_i686_gnu 0.52.6",
"windows_i686_gnullvm",
"windows_i686_msvc 0.52.6",
"windows_x86_64_gnu 0.52.6",
"windows_x86_64_gnullvm 0.52.6",
"windows_x86_64_msvc 0.52.6",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
[[package]]
name = "windows_aarch64_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
[[package]]
name = "windows_i686_gnu"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
[[package]]
name = "windows_i686_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
[[package]]
name = "windows_i686_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
[[package]]
name = "windows_i686_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
[[package]]
name = "windows_i686_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
[[package]]
name = "windows_x86_64_gnu"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
[[package]]
name = "windows_x86_64_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "wmidi"
version = "4.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e55f35b40ad0178422d06e9ba845041baf2faf04627b91fde928d0f6a21c712"

17
audio_engine/Cargo.toml Normal file
View File

@@ -0,0 +1,17 @@
[package]
name = "audio_engine"
version = "0.1.0"
edition = "2021"
[dependencies]
dirs = "5"
hound = "3.5"
jack = "0.13"
kanal = "0.1"
log = "0.4"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
simple_logger = "5"
thiserror = "1"
tokio = { version = "1", features = ["full"] }
wmidi = "4"

View File

@@ -0,0 +1,42 @@
use crate::*;
pub struct Allocator {
channel: kanal::Receiver<Arc<AudioChunk>>,
}
impl Allocator {
pub fn spawn(buffer_size: usize, pool_size: usize) -> Self {
let (allocated_buffer_sender, allocated_buffer_receiver) = kanal::bounded(pool_size);
// Pre-fill the channel with initial buffers
while let Ok(true) = allocated_buffer_sender.try_send(AudioChunk::allocate(buffer_size)) {}
let allocator = Allocator {
channel: allocated_buffer_receiver,
};
let allocated_buffer_sender = allocated_buffer_sender.to_async();
// Spawn the background task that continuously allocates buffers
tokio::runtime::Handle::current().spawn(async move {
loop {
let chunk = AudioChunk::allocate(buffer_size);
if allocated_buffer_sender.send(chunk).await.is_err() {
break;
}
}
});
allocator
}
}
impl ChunkFactory for Allocator {
fn create_chunk(&mut self) -> Result<std::sync::Arc<AudioChunk>> {
//match self.channel.try_recv_realtime() {
match self.channel.try_recv() {
Ok(Some(chunk)) => Ok(chunk),
_ => Err(LooperError::ChunkAllocation(std::panic::Location::caller())),
}
}
}

View File

@@ -0,0 +1,876 @@
use std::sync::Arc;
use crate::*;
/// A chunk of audio data that can be linked to form longer recordings.
/// Designed for lock-free sharing between real-time and I/O threads.
#[derive(Debug)]
pub struct AudioChunk {
/// Fixed-size audio buffer
pub samples: Box<[f32]>,
/// Number of valid samples (≤ samples.len())
pub sample_count: usize,
/// Next chunk in linked list
pub next: Option<Arc<AudioChunk>>,
}
impl AudioChunk {
/// Create a new empty chunk with specified capacity (for buffer pool)
pub fn allocate(size: usize) -> Arc<Self> {
Arc::new(Self {
samples: vec![0.0; size].into_boxed_slice(),
sample_count: 0,
next: None,
})
}
/// Consolidate a linked list of chunks into a single optimized chunk
pub fn consolidate(this: &Arc<AudioChunk>) -> Arc<Self> {
// Calculate total sample count
let mut total_samples = 0;
let mut current = Some(this.clone());
while let Some(chunk) = current {
total_samples += chunk.sample_count;
current = chunk.next.clone();
}
// Create consolidated buffer
let mut consolidated_samples = Vec::with_capacity(total_samples);
let mut current = Some(this.clone());
while let Some(chunk) = current {
consolidated_samples.extend_from_slice(&chunk.samples[..chunk.sample_count]);
current = chunk.next.clone();
}
Arc::new(Self {
samples: consolidated_samples.into_boxed_slice(),
sample_count: total_samples,
next: None,
})
}
/// Add samples to this chunk and allocate additional chunks as needed.
/// Uses the factory to get pre-allocated chunks when current chunk fills up.
pub fn append_samples<F: ChunkFactory>(
self: &mut Arc<Self>,
samples: &[f32],
chunk_factory: &mut F,
) -> Result<()> {
let mut_self = Arc::get_mut(self)
.ok_or(LooperError::ChunkOwnership(std::panic::Location::caller()))?;
if let Some(next) = &mut mut_self.next {
// Not the last chunk, recurse
next.append_samples(samples, chunk_factory)
} else {
// Add to this chunk first
let available_space = mut_self.samples.len() - mut_self.sample_count;
let samples_to_append_to_this_chunk = samples.len().min(available_space);
let dest = &mut mut_self.samples
[mut_self.sample_count..mut_self.sample_count + samples_to_append_to_this_chunk];
dest.copy_from_slice(&samples[..samples_to_append_to_this_chunk]);
mut_self.sample_count += samples_to_append_to_this_chunk;
// Handle remaining samples by recursion
if samples_to_append_to_this_chunk < samples.len() {
let mut new_chunk = chunk_factory.create_chunk()?;
let remaining_samples = &samples[samples_to_append_to_this_chunk..];
new_chunk.append_samples(remaining_samples, chunk_factory)?;
// Link the new chunk
mut_self.next = Some(new_chunk);
Ok(())
} else {
Ok(())
}
}
}
/// Get total sample count across entire chunk chain
pub fn len(&self) -> usize {
let mut total = self.sample_count;
let mut current = &self.next;
while let Some(chunk) = current {
total += chunk.sample_count;
current = &chunk.next;
}
total
}
pub fn copy_samples(self: &Arc<Self>, dest: &mut [f32], start: usize) -> Result<()> {
if start < self.sample_count {
// Copy from this chunk
let end = start + dest.len();
if end <= self.sample_count {
dest.copy_from_slice(&self.samples[start..end]);
} else if let Some(next) = self.next.as_ref() {
let sample_count_from_this_chunk = self.sample_count - start;
dest[..sample_count_from_this_chunk]
.copy_from_slice(&self.samples[start..self.sample_count]);
next.copy_samples(&mut dest[sample_count_from_this_chunk..], 0)?;
} else {
return Err(LooperError::OutOfBounds(std::panic::Location::caller()));
}
Ok(())
} else if let Some(next) = &self.next {
// Copy from next chunk
next.copy_samples(dest, start - self.sample_count)
} else {
Err(LooperError::OutOfBounds(std::panic::Location::caller()))
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_len_single_chunk_with_samples() {
let chunk = Arc::new(AudioChunk {
samples: vec![1.0, 2.0, 3.0, 4.0, 5.0].into_boxed_slice(),
sample_count: 5,
next: None,
});
assert_eq!(chunk.len(), 5);
}
#[test]
fn test_len_single_chunk_empty() {
let chunk = Arc::new(AudioChunk {
samples: vec![0.0, 0.0, 0.0].into_boxed_slice(),
sample_count: 0,
next: None,
});
assert_eq!(chunk.len(), 0);
}
#[test]
fn test_len_single_chunk_partial() {
let chunk = Arc::new(AudioChunk {
samples: vec![1.0, 2.0, 3.0, 0.0, 0.0].into_boxed_slice(),
sample_count: 3, // Only first 3 samples are valid
next: None,
});
assert_eq!(chunk.len(), 3);
}
#[test]
fn test_len_linked_chunks() {
let chunk3 = Arc::new(AudioChunk {
samples: vec![7.0, 8.0, 9.0].into_boxed_slice(),
sample_count: 3,
next: None,
});
let chunk2 = Arc::new(AudioChunk {
samples: vec![4.0, 5.0].into_boxed_slice(),
sample_count: 2,
next: Some(chunk3),
});
let chunk1 = Arc::new(AudioChunk {
samples: vec![1.0, 2.0, 3.0].into_boxed_slice(),
sample_count: 3,
next: Some(chunk2),
});
assert_eq!(chunk1.len(), 8); // 3 + 2 + 3
}
#[test]
fn test_len_chain_with_empty_chunks() {
let chunk3 = Arc::new(AudioChunk {
samples: vec![4.0, 5.0].into_boxed_slice(),
sample_count: 2,
next: None,
});
let chunk2 = Arc::new(AudioChunk {
samples: vec![0.0, 0.0].into_boxed_slice(),
sample_count: 0, // Empty chunk in middle
next: Some(chunk3),
});
let chunk1 = Arc::new(AudioChunk {
samples: vec![1.0, 2.0, 3.0].into_boxed_slice(),
sample_count: 3,
next: Some(chunk2),
});
assert_eq!(chunk1.len(), 5); // 3 + 0 + 2
}
#[test]
fn test_len_all_empty_chunks() {
let chunk2 = Arc::new(AudioChunk {
samples: vec![0.0, 0.0].into_boxed_slice(),
sample_count: 0,
next: None,
});
let chunk1 = Arc::new(AudioChunk {
samples: vec![0.0, 0.0, 0.0].into_boxed_slice(),
sample_count: 0,
next: Some(chunk2),
});
assert_eq!(chunk1.len(), 0);
}
#[test]
fn test_len_long_chain() {
// Create a longer chain to test iteration
let mut current_chunk = Arc::new(AudioChunk {
samples: vec![10.0].into_boxed_slice(),
sample_count: 1,
next: None,
});
// Build chain backwards: 9 -> 8 -> ... -> 1 -> 0
for i in (0..10).rev() {
current_chunk = Arc::new(AudioChunk {
samples: vec![i as f32].into_boxed_slice(),
sample_count: 1,
next: Some(current_chunk),
});
}
assert_eq!(current_chunk.len(), 11); // 11 chunks, each with 1 sample
}
#[test]
fn test_allocate_creates_empty_chunk() {
let chunk = AudioChunk::allocate(1024);
assert_eq!(chunk.samples.len(), 1024);
assert_eq!(chunk.sample_count, 0);
assert!(chunk.next.is_none());
// All samples should be initialized to 0.0
assert!(chunk.samples.iter().all(|&x| x == 0.0));
}
#[test]
fn test_consolidate_single_chunk() {
let mut chunk = AudioChunk::allocate(10);
// Write some sample data
let samples = vec![1.0, 2.0, 3.0, 4.0, 5.0];
let mut factory = || panic!("Factory should not be called");
let result = AudioChunk::append_samples(&mut chunk, &samples, &mut factory);
assert!(result.is_ok());
let consolidated = AudioChunk::consolidate(&chunk);
assert_eq!(consolidated.samples.len(), 5);
assert_eq!(consolidated.sample_count, 5);
assert!(consolidated.next.is_none());
assert_eq!(consolidated.samples[0], 1.0);
assert_eq!(consolidated.samples[4], 5.0);
}
#[test]
fn test_consolidate_linked_chunks() {
// Create first chunk
let chunk1 = Arc::new(AudioChunk {
samples: vec![1.0, 2.0, 3.0].into_boxed_slice(),
sample_count: 3,
next: None,
});
// Create second chunk
let chunk2 = Arc::new(AudioChunk {
samples: vec![4.0, 5.0].into_boxed_slice(),
sample_count: 2,
next: None,
});
// Create third chunk
let chunk3 = Arc::new(AudioChunk {
samples: vec![6.0, 7.0, 8.0, 9.0].into_boxed_slice(),
sample_count: 4,
next: None,
});
// Link them together
let chunk1 = Arc::new(AudioChunk {
samples: chunk1.samples.clone(),
sample_count: chunk1.sample_count,
next: Some(Arc::new(AudioChunk {
samples: chunk2.samples.clone(),
sample_count: chunk2.sample_count,
next: Some(chunk3),
})),
});
let consolidated = AudioChunk::consolidate(&chunk1);
assert_eq!(consolidated.samples.len(), 9);
assert_eq!(consolidated.sample_count, 9);
assert!(consolidated.next.is_none());
let expected = vec![1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0];
assert_eq!(consolidated.samples.as_ref(), expected.as_slice());
}
#[test]
fn test_consolidate_partial_chunks() {
// Test chunks where sample_count < samples.len()
let chunk1 = Arc::new(AudioChunk {
samples: vec![1.0, 2.0, 3.0, 0.0, 0.0].into_boxed_slice(),
sample_count: 3, // Only first 3 samples are valid
next: None,
});
let chunk2 = Arc::new(AudioChunk {
samples: vec![4.0, 5.0, 0.0].into_boxed_slice(),
sample_count: 2, // Only first 2 samples are valid
next: None,
});
let chunk1 = Arc::new(AudioChunk {
samples: chunk1.samples.clone(),
sample_count: chunk1.sample_count,
next: Some(chunk2),
});
let consolidated = AudioChunk::consolidate(&chunk1);
assert_eq!(consolidated.samples.len(), 5);
assert_eq!(consolidated.sample_count, 5);
let expected = vec![1.0, 2.0, 3.0, 4.0, 5.0];
assert_eq!(consolidated.samples.as_ref(), expected.as_slice());
}
#[test]
fn test_append_samples_empty() {
let mut chunk = AudioChunk::allocate(5);
let mut factory = || panic!("Factory should not be called for empty samples");
let result = AudioChunk::append_samples(&mut chunk, &[], &mut factory);
assert!(result.is_ok());
assert_eq!(chunk.sample_count, 0);
assert!(chunk.next.is_none());
}
#[test]
fn test_append_samples_fits_in_one_chunk() {
let mut chunk = AudioChunk::allocate(5);
let mut factory = || panic!("Factory should not be called when samples fit");
let samples = vec![1.0, 2.0, 3.0];
let result = AudioChunk::append_samples(&mut chunk, &samples, &mut factory);
assert!(result.is_ok());
assert_eq!(chunk.sample_count, 3);
assert_eq!(chunk.samples[0], 1.0);
assert_eq!(chunk.samples[1], 2.0);
assert_eq!(chunk.samples[2], 3.0);
assert!(chunk.next.is_none());
}
#[test]
fn test_append_samples_exactly_fills_chunk() {
let mut chunk = AudioChunk::allocate(3);
let mut factory = || panic!("Factory should not be called when samples exactly fit");
let samples = vec![1.0, 2.0, 3.0];
let result = AudioChunk::append_samples(&mut chunk, &samples, &mut factory);
assert!(result.is_ok());
assert_eq!(chunk.sample_count, 3);
assert_eq!(chunk.samples[0], 1.0);
assert_eq!(chunk.samples[1], 2.0);
assert_eq!(chunk.samples[2], 3.0);
assert!(chunk.next.is_none());
}
#[test]
fn test_append_samples_requires_new_chunk() {
let mut chunk = AudioChunk::allocate(2);
let mut factory = chunk_factory::mock::MockFactory::new(vec![AudioChunk::allocate(3)]);
let samples = vec![1.0, 2.0, 3.0, 4.0]; // 4 samples, first chunk holds 2
let result = AudioChunk::append_samples(&mut chunk, &samples, &mut factory);
assert!(result.is_ok());
// First chunk: 2 samples
assert_eq!(chunk.sample_count, 2);
assert_eq!(chunk.samples[0], 1.0);
assert_eq!(chunk.samples[1], 2.0);
// Second chunk: remaining 2 samples
let chunk2 = chunk.next.as_ref().unwrap();
assert_eq!(chunk2.sample_count, 2);
assert_eq!(chunk2.samples[0], 3.0);
assert_eq!(chunk2.samples[1], 4.0);
assert!(chunk2.next.is_none());
}
#[test]
fn test_append_samples_multiple_chunks_cascade() {
let mut chunk = AudioChunk::allocate(2);
let mut factory = chunk_factory::mock::MockFactory::new(vec![
AudioChunk::allocate(2),
AudioChunk::allocate(2),
]);
let samples = vec![1.0, 2.0, 3.0, 4.0, 5.0, 6.0]; // 6 samples, each chunk holds 2
let result = AudioChunk::append_samples(&mut chunk, &samples, &mut factory);
assert!(result.is_ok());
// First chunk
assert_eq!(chunk.sample_count, 2);
assert_eq!(chunk.samples[0], 1.0);
assert_eq!(chunk.samples[1], 2.0);
// Second chunk
let chunk2 = chunk.next.as_ref().unwrap();
assert_eq!(chunk2.sample_count, 2);
assert_eq!(chunk2.samples[0], 3.0);
assert_eq!(chunk2.samples[1], 4.0);
// Third chunk
let chunk3 = chunk2.next.as_ref().unwrap();
assert_eq!(chunk3.sample_count, 2);
assert_eq!(chunk3.samples[0], 5.0);
assert_eq!(chunk3.samples[1], 6.0);
assert!(chunk3.next.is_none());
}
#[test]
fn test_append_samples_factory_failure() {
let mut chunk = AudioChunk::allocate(2);
let mut factory = || Err(LooperError::ChunkAllocation(std::panic::Location::caller()));
let samples = vec![1.0, 2.0, 3.0]; // 3 samples, chunk only holds 2
let result = AudioChunk::append_samples(&mut chunk, &samples, &mut factory);
assert!(result.is_err());
assert!(matches!(
result.unwrap_err(),
LooperError::ChunkAllocation(_)
));
// First chunk should have been written before factory failure
assert_eq!(chunk.sample_count, 2);
assert_eq!(chunk.samples[0], 1.0);
assert_eq!(chunk.samples[1], 2.0);
assert!(chunk.next.is_none());
}
#[test]
fn test_append_samples_ownership_failure() {
let chunk = AudioChunk::allocate(3);
let chunk_clone = chunk.clone(); // Create another reference
let mut chunk_original = chunk;
let mut factory = || panic!("Factory should not be called");
let samples = vec![1.0, 2.0];
let result = AudioChunk::append_samples(&mut chunk_original, &samples, &mut factory);
assert!(result.is_err());
assert!(matches!(
result.unwrap_err(),
LooperError::ChunkOwnership(_)
));
// Chunk should be unchanged
assert_eq!(chunk_original.sample_count, 0);
// Clean up the clone reference to avoid unused variable warning
drop(chunk_clone);
}
#[test]
fn test_copy_samples_single_chunk_full_copy() {
let chunk = Arc::new(AudioChunk {
samples: vec![1.0, 2.0, 3.0, 4.0, 5.0].into_boxed_slice(),
sample_count: 5,
next: None,
});
let mut dest = vec![0.0; 5];
let result = chunk.copy_samples(&mut dest, 0);
assert!(result.is_ok());
assert_eq!(dest, vec![1.0, 2.0, 3.0, 4.0, 5.0]);
}
#[test]
fn test_copy_samples_single_chunk_partial_copy() {
let chunk = Arc::new(AudioChunk {
samples: vec![1.0, 2.0, 3.0, 4.0, 5.0].into_boxed_slice(),
sample_count: 5,
next: None,
});
let mut dest = vec![0.0; 3];
let result = chunk.copy_samples(&mut dest, 1); // Start at index 1
assert!(result.is_ok());
assert_eq!(dest, vec![2.0, 3.0, 4.0]);
}
#[test]
fn test_copy_samples_single_chunk_zero_samples() {
let chunk = Arc::new(AudioChunk {
samples: vec![1.0, 2.0, 3.0].into_boxed_slice(),
sample_count: 3,
next: None,
});
let mut dest: Vec<f32> = vec![];
let result = chunk.copy_samples(&mut dest, 0);
assert!(result.is_ok());
assert_eq!(dest, Vec::<f32>::new());
}
#[test]
fn test_copy_samples_single_chunk_out_of_bounds() {
let chunk = Arc::new(AudioChunk {
samples: vec![1.0, 2.0, 3.0].into_boxed_slice(),
sample_count: 3,
next: None,
});
let mut dest = vec![0.0; 2];
let result = chunk.copy_samples(&mut dest, 5); // Start beyond sample_count
assert!(result.is_err());
assert!(matches!(result.unwrap_err(), LooperError::OutOfBounds(_)));
}
#[test]
fn test_copy_samples_single_chunk_partial_out_of_bounds() {
let chunk = Arc::new(AudioChunk {
samples: vec![1.0, 2.0, 3.0].into_boxed_slice(),
sample_count: 3,
next: None,
});
let mut dest = vec![0.0; 5]; // Want 5 samples starting at index 1, but only 2 available
let result = chunk.copy_samples(&mut dest, 1);
assert!(result.is_err());
assert!(matches!(result.unwrap_err(), LooperError::OutOfBounds(_)));
}
#[test]
fn test_copy_samples_linked_chunks_single_chunk_boundary() {
let chunk1 = Arc::new(AudioChunk {
samples: vec![1.0, 2.0, 3.0].into_boxed_slice(),
sample_count: 3,
next: Some(Arc::new(AudioChunk {
samples: vec![4.0, 5.0, 6.0].into_boxed_slice(),
sample_count: 3,
next: None,
})),
});
let mut dest = vec![0.0; 3];
let result = chunk1.copy_samples(&mut dest, 3); // Start exactly at chunk boundary
assert!(result.is_ok());
assert_eq!(dest, vec![4.0, 5.0, 6.0]);
}
#[test]
fn test_copy_samples_linked_chunks_across_boundary() {
let chunk1 = Arc::new(AudioChunk {
samples: vec![1.0, 2.0, 3.0].into_boxed_slice(),
sample_count: 3,
next: Some(Arc::new(AudioChunk {
samples: vec![4.0, 5.0, 6.0].into_boxed_slice(),
sample_count: 3,
next: None,
})),
});
let mut dest = vec![0.0; 4];
let result = chunk1.copy_samples(&mut dest, 2); // Start in first chunk, cross to second
assert!(result.is_ok());
assert_eq!(dest, vec![3.0, 4.0, 5.0, 6.0]);
}
#[test]
fn test_copy_samples_linked_chunks_multiple_spans() {
let chunk1 = Arc::new(AudioChunk {
samples: vec![1.0, 2.0].into_boxed_slice(),
sample_count: 2,
next: Some(Arc::new(AudioChunk {
samples: vec![3.0, 4.0].into_boxed_slice(),
sample_count: 2,
next: Some(Arc::new(AudioChunk {
samples: vec![5.0, 6.0, 7.0].into_boxed_slice(),
sample_count: 3,
next: None,
})),
})),
});
let mut dest = vec![0.0; 5];
let result = chunk1.copy_samples(&mut dest, 1); // Start in first, span all three chunks
assert!(result.is_ok());
assert_eq!(dest, vec![2.0, 3.0, 4.0, 5.0, 6.0]);
}
#[test]
fn test_copy_samples_linked_chunks_out_of_bounds() {
let chunk1 = Arc::new(AudioChunk {
samples: vec![1.0, 2.0, 3.0].into_boxed_slice(),
sample_count: 3,
next: Some(Arc::new(AudioChunk {
samples: vec![4.0, 5.0].into_boxed_slice(),
sample_count: 2,
next: None,
})),
});
let mut dest = vec![0.0; 3];
let result = chunk1.copy_samples(&mut dest, 10); // Start beyond all chunks
assert!(result.is_err());
assert!(matches!(result.unwrap_err(), LooperError::OutOfBounds(_)));
}
#[test]
fn test_append_samples_to_partially_filled_chunk() {
let mut chunk = Arc::new(AudioChunk {
samples: vec![1.0, 2.0, 0.0, 0.0, 0.0].into_boxed_slice(),
sample_count: 2, // Already has 2 samples
next: None,
});
let mut factory = || panic!("Factory should not be called");
let samples = vec![3.0, 4.0];
let result = AudioChunk::append_samples(&mut chunk, &samples, &mut factory);
assert!(result.is_ok());
assert_eq!(chunk.sample_count, 4);
assert_eq!(chunk.samples[0], 1.0);
assert_eq!(chunk.samples[1], 2.0);
assert_eq!(chunk.samples[2], 3.0);
assert_eq!(chunk.samples[3], 4.0);
assert!(chunk.next.is_none());
}
#[test]
fn test_append_samples_to_full_chunk_creates_new() {
let mut chunk = Arc::new(AudioChunk {
samples: vec![1.0, 2.0, 3.0].into_boxed_slice(),
sample_count: 3, // Chunk is full
next: None,
});
let mut factory = chunk_factory::mock::MockFactory::new(vec![AudioChunk::allocate(3)]);
let samples = vec![4.0, 5.0];
let result = AudioChunk::append_samples(&mut chunk, &samples, &mut factory);
assert!(result.is_ok());
assert_eq!(chunk.sample_count, 3); // First chunk unchanged
let chunk2 = chunk.next.as_ref().unwrap();
assert_eq!(chunk2.sample_count, 2);
assert_eq!(chunk2.samples[0], 4.0);
assert_eq!(chunk2.samples[1], 5.0);
}
#[test]
fn test_append_samples_to_middle_of_chain() {
// Create a chain: chunk1 -> chunk2 -> chunk3
let chunk3 = Arc::new(AudioChunk {
samples: vec![7.0, 8.0, 0.0].into_boxed_slice(),
sample_count: 2,
next: None,
});
let chunk2 = Arc::new(AudioChunk {
samples: vec![4.0, 5.0, 6.0].into_boxed_slice(),
sample_count: 3,
next: Some(chunk3),
});
let mut chunk1 = Arc::new(AudioChunk {
samples: vec![1.0, 2.0, 3.0].into_boxed_slice(),
sample_count: 3,
next: Some(chunk2),
});
// Append should go to the last chunk (chunk3)
let mut factory = || panic!("Factory should not be called when space available");
let samples = vec![9.0];
let result = AudioChunk::append_samples(&mut chunk1, &samples, &mut factory);
assert!(result.is_ok());
// Navigate to chunk3 and verify the sample was added
let chunk2 = chunk1.next.as_ref().unwrap();
let chunk3 = chunk2.next.as_ref().unwrap();
assert_eq!(chunk3.sample_count, 3);
assert_eq!(chunk3.samples[2], 9.0);
}
#[test]
fn test_append_samples_multiple_calls() {
let mut chunk = AudioChunk::allocate(5);
let mut factory = || panic!("Factory should not be called");
// First append
let result1 = AudioChunk::append_samples(&mut chunk, &[1.0, 2.0], &mut factory);
assert!(result1.is_ok());
assert_eq!(chunk.sample_count, 2);
// Second append
let result2 = AudioChunk::append_samples(&mut chunk, &[3.0], &mut factory);
assert!(result2.is_ok());
assert_eq!(chunk.sample_count, 3);
// Third append
let result3 = AudioChunk::append_samples(&mut chunk, &[4.0, 5.0], &mut factory);
assert!(result3.is_ok());
assert_eq!(chunk.sample_count, 5);
// Verify all samples
assert_eq!(chunk.samples[0], 1.0);
assert_eq!(chunk.samples[1], 2.0);
assert_eq!(chunk.samples[2], 3.0);
assert_eq!(chunk.samples[3], 4.0);
assert_eq!(chunk.samples[4], 5.0);
}
#[test]
fn test_append_samples_chain_with_overflow() {
// Create initial chain with some space in last chunk
let chunk2 = Arc::new(AudioChunk {
samples: vec![4.0, 5.0, 0.0].into_boxed_slice(),
sample_count: 2, // Has space for 1 more
next: None,
});
let mut chunk1 = Arc::new(AudioChunk {
samples: vec![1.0, 2.0, 3.0].into_boxed_slice(),
sample_count: 3,
next: Some(chunk2),
});
let mut factory = chunk_factory::mock::MockFactory::new(vec![AudioChunk::allocate(3)]);
let samples = vec![6.0, 7.0, 8.0]; // 3 samples, but only 1 space available
let result = AudioChunk::append_samples(&mut chunk1, &samples, &mut factory);
assert!(result.is_ok());
// Check that chunk2 got filled
let chunk2 = chunk1.next.as_ref().unwrap();
assert_eq!(chunk2.sample_count, 3);
assert_eq!(chunk2.samples[2], 6.0);
// Check that chunk3 was created with remaining samples
let chunk3 = chunk2.next.as_ref().unwrap();
assert_eq!(chunk3.sample_count, 2);
assert_eq!(chunk3.samples[0], 7.0);
assert_eq!(chunk3.samples[1], 8.0);
}
#[test]
fn test_consolidate_empty_chunk() {
let chunk = Arc::new(AudioChunk {
samples: vec![0.0, 0.0, 0.0].into_boxed_slice(),
sample_count: 0, // No valid samples
next: None,
});
let consolidated = AudioChunk::consolidate(&chunk);
assert_eq!(consolidated.samples.len(), 0);
assert_eq!(consolidated.sample_count, 0);
assert!(consolidated.next.is_none());
}
#[test]
fn test_consolidate_chain_with_empty_chunks() {
let chunk3 = Arc::new(AudioChunk {
samples: vec![4.0, 5.0, 0.0].into_boxed_slice(),
sample_count: 2,
next: None,
});
let chunk2 = Arc::new(AudioChunk {
samples: vec![0.0, 0.0].into_boxed_slice(),
sample_count: 0, // Empty middle chunk
next: Some(chunk3),
});
let chunk1 = Arc::new(AudioChunk {
samples: vec![1.0, 2.0, 3.0].into_boxed_slice(),
sample_count: 3,
next: Some(chunk2),
});
let consolidated = AudioChunk::consolidate(&chunk1);
assert_eq!(consolidated.samples.len(), 5);
assert_eq!(consolidated.sample_count, 5);
assert_eq!(consolidated.samples.as_ref(), &[1.0, 2.0, 3.0, 4.0, 5.0]);
}
#[test]
fn test_consolidate_all_empty_chunks() {
let chunk2 = Arc::new(AudioChunk {
samples: vec![0.0, 0.0].into_boxed_slice(),
sample_count: 0,
next: None,
});
let chunk1 = Arc::new(AudioChunk {
samples: vec![0.0, 0.0, 0.0].into_boxed_slice(),
sample_count: 0,
next: Some(chunk2),
});
let consolidated = AudioChunk::consolidate(&chunk1);
assert_eq!(consolidated.samples.len(), 0);
assert_eq!(consolidated.sample_count, 0);
assert!(consolidated.next.is_none());
}
#[test]
fn test_copy_samples_from_consolidated_chunk() {
// Test that copy_samples works correctly after consolidation
let chunk2 = Arc::new(AudioChunk {
samples: vec![4.0, 5.0].into_boxed_slice(),
sample_count: 2,
next: None,
});
let chunk1 = Arc::new(AudioChunk {
samples: vec![1.0, 2.0, 3.0].into_boxed_slice(),
sample_count: 3,
next: Some(chunk2),
});
let consolidated = AudioChunk::consolidate(&chunk1);
let mut dest = vec![0.0; 3];
let result = consolidated.copy_samples(&mut dest, 2);
assert!(result.is_ok());
assert_eq!(dest, vec![3.0, 4.0, 5.0]);
}
}

View File

@@ -0,0 +1,483 @@
use crate::*;
/// Audio data representation supporting sync offsets for column-based timing
#[derive(Debug)]
pub enum AudioData {
/// No audio data present
Empty,
/// Unconsolidated linked chunks with sync offset (used during/after recording)
Unconsolidated {
chunks: Arc<AudioChunk>,
sync_offset: usize, // samples from column start to recording start
length: usize, // total samples in recording
},
/// Consolidated single buffer, reordered for optimal playback
Consolidated {
buffer: Box<[f32]>, // single optimized array
},
}
impl AudioData {
/// Create new empty audio data
pub fn new_empty() -> Self {
Self::Empty
}
/// Create new unconsolidated audio data from initial chunk
pub fn new_unconsolidated<F: ChunkFactory>(
chunk_factory: &mut F,
sync_offset: usize,
) -> Result<Self> {
Ok(Self::Unconsolidated {
chunks: chunk_factory.create_chunk()?,
sync_offset,
length: 0,
})
}
/// Get total length in samples
pub fn len(&self) -> usize {
match self {
Self::Empty => 0,
Self::Unconsolidated { length, .. } => *length,
Self::Consolidated { buffer } => buffer.len(),
}
}
/// Check if audio data is empty
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Append samples during recording (only valid for Unconsolidated state)
pub fn append_samples<F: ChunkFactory>(
&mut self,
samples: &[f32],
chunk_factory: &mut F,
) -> Result<()> {
match self {
Self::Unconsolidated { chunks, length, .. } => {
chunks.append_samples(samples, chunk_factory)?;
*length += samples.len();
Ok(())
}
_ => Err(LooperError::OutOfBounds(std::panic::Location::caller())),
}
}
/// Copy samples to output buffer with volume scaling and looping
/// logical_position is the position within the column cycle (0 = beat 1)
pub fn copy_samples_to_output(
&self,
output_slice: &mut [f32],
logical_position: usize,
volume: f32,
) -> Result<()> {
match self {
Self::Empty => {
output_slice.fill(0.0);
Ok(())
}
Self::Unconsolidated {
chunks,
sync_offset,
length,
} => self.copy_unconsolidated_samples(
chunks,
*sync_offset,
*length,
output_slice,
logical_position,
volume,
),
Self::Consolidated { buffer } => {
self.copy_consolidated_samples(buffer, output_slice, logical_position, volume)
}
}
}
/// Get underlying chunk for post-record processing
pub fn get_chunk_for_processing(&self) -> Option<(Arc<AudioChunk>, usize)> {
match self {
Self::Unconsolidated {
chunks,
sync_offset,
..
} => Some((chunks.clone(), *sync_offset)),
_ => None,
}
}
/// Replace with consolidated buffer from post-record processing
pub fn set_consolidated_buffer(&mut self, buffer: Box<[f32]>) -> Result<()> {
match self {
Self::Unconsolidated {
length: old_length, ..
} => {
if buffer.len() != *old_length {
return Err(LooperError::OutOfBounds(std::panic::Location::caller()));
}
*self = Self::Consolidated { buffer };
Ok(())
}
_ => Err(LooperError::OutOfBounds(std::panic::Location::caller())),
}
}
/// Clear all audio data
pub fn clear(&mut self) -> Result<()> {
*self = Self::Empty;
Ok(())
}
}
impl AudioData {
/// Copy samples from unconsolidated chunks with sync offset handling
fn copy_unconsolidated_samples(
&self,
chunks: &Arc<AudioChunk>,
sync_offset: usize,
length: usize,
output_slice: &mut [f32],
mut logical_position: usize,
volume: f32,
) -> Result<()> {
if length == 0 {
output_slice.fill(0.0);
return Ok(());
}
let mut samples_written = 0;
let samples_needed = output_slice.len();
while samples_written < samples_needed {
// Map logical position to buffer position using sync offset
let buffer_position = (logical_position + sync_offset) % length;
let samples_remaining_in_output = samples_needed - samples_written;
// This is the crucial change: determine how many contiguous samples can be read from the current buffer_position
let readable_samples_from_here = length - buffer_position;
let samples_to_copy = samples_remaining_in_output.min(readable_samples_from_here);
// Copy from chunks to output slice
chunks.copy_samples(
&mut output_slice[samples_written..samples_written + samples_to_copy],
buffer_position,
)?;
// Apply volume scaling in-place
for sample in &mut output_slice[samples_written..samples_written + samples_to_copy] {
*sample *= volume;
}
samples_written += samples_to_copy;
logical_position = (logical_position + samples_to_copy) % length;
}
Ok(())
}
/// Copy samples from consolidated buffer (already reordered)
fn copy_consolidated_samples(
&self,
buffer: &[f32],
output_slice: &mut [f32],
mut logical_position: usize,
volume: f32,
) -> Result<()> {
let length = buffer.len();
if length == 0 {
output_slice.fill(0.0);
return Ok(());
}
let mut samples_written = 0;
let samples_needed = output_slice.len();
while samples_written < samples_needed {
let samples_remaining = samples_needed - samples_written;
let samples_until_loop = length - logical_position;
let samples_to_copy = samples_remaining.min(samples_until_loop);
// Direct copy since consolidated buffer is already reordered
let src = &buffer[logical_position..logical_position + samples_to_copy];
let dest = &mut output_slice[samples_written..samples_written + samples_to_copy];
dest.copy_from_slice(src);
// Apply volume scaling in-place
for sample in dest {
*sample *= volume;
}
samples_written += samples_to_copy;
logical_position += samples_to_copy;
// Handle looping
if logical_position >= length {
logical_position = 0;
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::chunk_factory::mock::MockFactory;
#[test]
fn test_new_empty() {
let audio_data = AudioData::new_empty();
assert_eq!(audio_data.len(), 0);
assert!(audio_data.is_empty());
}
#[test]
fn test_new_unconsolidated() {
let mut factory = MockFactory::new(vec![AudioChunk::allocate(1024)]);
let sync_offset = 100;
let audio_data = AudioData::new_unconsolidated(&mut factory, sync_offset).unwrap();
match audio_data {
AudioData::Unconsolidated {
sync_offset: offset,
length,
..
} => {
assert_eq!(offset, 100);
assert_eq!(length, 0);
}
_ => panic!("Expected Unconsolidated variant"),
}
}
#[test]
fn test_append_samples_unconsolidated() {
let mut factory = MockFactory::new(vec![AudioChunk::allocate(1024)]);
let mut audio_data = AudioData::new_unconsolidated(&mut factory, 0).unwrap();
let samples = vec![1.0, 2.0, 3.0, 4.0];
audio_data.append_samples(&samples, &mut factory).unwrap();
assert_eq!(audio_data.len(), 4);
assert!(!audio_data.is_empty());
}
#[test]
fn test_append_samples_invalid_state() {
let mut factory = MockFactory::new(vec![]);
let mut audio_data = AudioData::new_empty();
let samples = vec![1.0, 2.0];
let result = audio_data.append_samples(&samples, &mut factory);
assert!(result.is_err());
}
#[test]
fn test_copy_samples_empty() {
let audio_data = AudioData::new_empty();
let mut output = vec![99.0; 4]; // Fill with non-zero to verify silence
audio_data
.copy_samples_to_output(&mut output, 0, 1.0)
.unwrap();
assert_eq!(output, vec![0.0, 0.0, 0.0, 0.0]);
}
#[test]
fn test_copy_samples_unconsolidated_no_offset() {
let mut factory = MockFactory::new(vec![AudioChunk::allocate(10)]);
let mut audio_data = AudioData::new_unconsolidated(&mut factory, 0).unwrap();
// Add some test samples
let samples = vec![1.0, 2.0, 3.0, 4.0];
audio_data.append_samples(&samples, &mut factory).unwrap();
let mut output = vec![0.0; 4];
audio_data
.copy_samples_to_output(&mut output, 0, 1.0)
.unwrap();
assert_eq!(output, vec![1.0, 2.0, 3.0, 4.0]);
}
#[test]
fn test_copy_samples_unconsolidated_with_offset() {
let mut factory = MockFactory::new(vec![AudioChunk::allocate(10)]);
let sync_offset = 2;
let mut audio_data = AudioData::new_unconsolidated(&mut factory, sync_offset).unwrap();
// Recording order: [A, B, C, D] but recorded starting at beat 3 (sync_offset=2)
// So logical mapping should be: beat1=C, beat2=D, beat3=A, beat4=B
let samples = vec![10.0, 20.0, 30.0, 40.0]; // A, B, C, D
audio_data.append_samples(&samples, &mut factory).unwrap();
let mut output = vec![0.0; 4];
audio_data
.copy_samples_to_output(&mut output, 0, 1.0)
.unwrap(); // Start at logical beat 1
// Should get: [C, D, A, B] = [30.0, 40.0, 10.0, 20.0]
assert_eq!(output, vec![30.0, 40.0, 10.0, 20.0]);
}
#[test]
fn test_copy_samples_unconsolidated_with_volume() {
let mut factory = MockFactory::new(vec![AudioChunk::allocate(10)]);
let mut audio_data = AudioData::new_unconsolidated(&mut factory, 0).unwrap();
let samples = vec![1.0, 2.0, 3.0, 4.0];
audio_data.append_samples(&samples, &mut factory).unwrap();
let mut output = vec![0.0; 4];
audio_data
.copy_samples_to_output(&mut output, 0, 0.5)
.unwrap(); // 50% volume
assert_eq!(output, vec![0.5, 1.0, 1.5, 2.0]);
}
#[test]
fn test_copy_samples_unconsolidated_looping() {
let mut factory = MockFactory::new(vec![AudioChunk::allocate(10)]);
let mut audio_data = AudioData::new_unconsolidated(&mut factory, 0).unwrap();
let samples = vec![1.0, 2.0];
audio_data.append_samples(&samples, &mut factory).unwrap();
let mut output = vec![0.0; 6]; // Request more samples than available
audio_data
.copy_samples_to_output(&mut output, 0, 1.0)
.unwrap();
// Should loop: [1.0, 2.0, 1.0, 2.0, 1.0, 2.0]
assert_eq!(output, vec![1.0, 2.0, 1.0, 2.0, 1.0, 2.0]);
}
#[test]
fn test_copy_samples_unconsolidated_offset_with_looping() {
let mut factory = MockFactory::new(vec![AudioChunk::allocate(10)]);
let sync_offset = 1;
let mut audio_data = AudioData::new_unconsolidated(&mut factory, sync_offset).unwrap();
// Recording: [A, B, C] starting at beat 2 (sync_offset=1)
// Logical mapping: beat1=B, beat2=C, beat3=A
let samples = vec![10.0, 20.0, 30.0];
audio_data.append_samples(&samples, &mut factory).unwrap();
let mut output = vec![0.0; 6]; // Request 2 full loops
audio_data
.copy_samples_to_output(&mut output, 0, 1.0)
.unwrap();
// Should get: [B, C, A, B, C, A] = [20.0, 30.0, 10.0, 20.0, 30.0, 10.0]
assert_eq!(output, vec![20.0, 30.0, 10.0, 20.0, 30.0, 10.0]);
}
#[test]
fn test_copy_samples_consolidated() {
// Create consolidated data directly
let buffer = vec![1.0, 2.0, 3.0, 4.0].into_boxed_slice();
let audio_data = AudioData::Consolidated { buffer };
let mut output = vec![0.0; 4];
audio_data
.copy_samples_to_output(&mut output, 0, 1.0)
.unwrap();
assert_eq!(output, vec![1.0, 2.0, 3.0, 4.0]);
}
#[test]
fn test_get_chunk_for_processing() {
let mut factory = MockFactory::new(vec![AudioChunk::allocate(10)]);
let sync_offset = 42;
let mut audio_data = AudioData::new_unconsolidated(&mut factory, sync_offset).unwrap();
let samples = vec![1.0, 2.0, 3.0];
audio_data.append_samples(&samples, &mut factory).unwrap();
let result = audio_data.get_chunk_for_processing();
assert!(result.is_some());
let (chunk, offset) = result.unwrap();
assert_eq!(offset, 42);
assert_eq!(chunk.len(), 3);
}
#[test]
fn test_get_chunk_for_processing_wrong_state() {
let audio_data = AudioData::new_empty();
let result = audio_data.get_chunk_for_processing();
assert!(result.is_none());
}
#[test]
fn test_set_consolidated_buffer() {
let mut factory = MockFactory::new(vec![AudioChunk::allocate(10)]);
let mut audio_data = AudioData::new_unconsolidated(&mut factory, 100).unwrap();
let samples = vec![1.0, 2.0, 3.0, 4.0];
audio_data.append_samples(&samples, &mut factory).unwrap();
// Create consolidated buffer with same length
let consolidated_buffer = vec![10.0, 20.0, 30.0, 40.0].into_boxed_slice();
audio_data
.set_consolidated_buffer(consolidated_buffer)
.unwrap();
// Should now be Consolidated variant
match audio_data {
AudioData::Consolidated { buffer } => {
assert_eq!(buffer.len(), 4);
}
_ => panic!("Expected Consolidated variant"),
}
}
#[test]
fn test_set_consolidated_buffer_length_mismatch() {
let mut factory = MockFactory::new(vec![AudioChunk::allocate(10)]);
let mut audio_data = AudioData::new_unconsolidated(&mut factory, 0).unwrap();
let samples = vec![1.0, 2.0]; // Length 2
audio_data.append_samples(&samples, &mut factory).unwrap();
// Try to set buffer with different length
let wrong_length_buffer = vec![10.0, 20.0, 30.0, 40.0].into_boxed_slice(); // Length 4
let result = audio_data.set_consolidated_buffer(wrong_length_buffer);
assert!(result.is_err());
}
#[test]
fn test_clear() {
let mut factory = MockFactory::new(vec![AudioChunk::allocate(10)]);
let mut audio_data = AudioData::new_unconsolidated(&mut factory, 0).unwrap();
let samples = vec![1.0, 2.0, 3.0];
audio_data.append_samples(&samples, &mut factory).unwrap();
assert_eq!(audio_data.len(), 3);
audio_data.clear().unwrap();
assert_eq!(audio_data.len(), 0);
assert!(audio_data.is_empty());
match audio_data {
AudioData::Empty => (), // Expected
_ => panic!("Expected Empty variant after clear"),
}
}
}

25
audio_engine/src/beep.rs Normal file
View File

@@ -0,0 +1,25 @@
use crate::*;
/// Generate a 100ms sine wave beep at 1000Hz
pub fn generate_beep<F: ChunkFactory>(
sample_rate: u32,
chunk_factory: &mut F,
) -> Result<Arc<AudioChunk>> {
const FREQUENCY_HZ: f32 = 1000.0;
const DURATION_MS: f32 = 100.0;
let sample_count = ((sample_rate as f32) * (DURATION_MS / 1000.0)) as usize;
let mut samples = Vec::with_capacity(sample_count);
for i in 0..sample_count {
let t = i as f32 / sample_rate as f32;
let sample = (std::f32::consts::TAU * FREQUENCY_HZ * t).sin();
samples.push(sample);
}
// Create AudioChunk and fill it with samples
let mut chunk = chunk_factory.create_chunk()?;
chunk.append_samples(&samples, chunk_factory)?;
Ok(chunk)
}

View File

@@ -0,0 +1,40 @@
use crate::*;
/// Creates a new chunk of audio data.
pub trait ChunkFactory: Send {
fn create_chunk(&mut self) -> Result<std::sync::Arc<AudioChunk>>;
}
impl<F> ChunkFactory for F
where
F: FnMut() -> Result<std::sync::Arc<AudioChunk>> + Send,
{
fn create_chunk(&mut self) -> Result<std::sync::Arc<AudioChunk>> {
self()
}
}
#[cfg(test)]
pub mod mock {
use super::*;
pub struct MockFactory {
chunks: Vec<std::sync::Arc<AudioChunk>>,
}
impl MockFactory {
pub fn new(chunks: Vec<std::sync::Arc<AudioChunk>>) -> Self {
Self { chunks }
}
}
impl ChunkFactory for MockFactory {
fn create_chunk(&mut self) -> Result<std::sync::Arc<AudioChunk>> {
if let Some(chunk) = self.chunks.pop() {
Ok(chunk)
} else {
Err(LooperError::ChunkAllocation(std::panic::Location::caller()))
}
}
}
}

View File

@@ -0,0 +1,84 @@
use crate::*;
use tokio::sync::{broadcast, watch};
pub struct ConnectionManager {
state_watch: watch::Receiver<State>,
notification_rx: broadcast::Receiver<JackNotification>,
jack_client: jack::Client,
jack_client_name: String,
}
impl ConnectionManager {
pub fn new(
state_watch: watch::Receiver<State>,
notification_rx: broadcast::Receiver<JackNotification>,
jack_client_name: String,
) -> Result<Self> {
let (jack_client, _status) = jack::Client::new(
&format!("{}_connector", jack_client_name),
jack::ClientOptions::NO_START_SERVER,
)
.map_err(|_| LooperError::JackConnection(std::panic::Location::caller()))?;
Ok(Self {
state_watch,
notification_rx,
jack_client,
jack_client_name,
})
}
pub async fn run(&mut self) -> Result<()> {
self.restore_connections().await;
while let Ok(notification) = self.notification_rx.recv().await {
if let JackNotification::PortRegistered { .. } = notification {
self.restore_connections().await;
}
}
Ok(())
}
async fn restore_connections(&self) {
let state = self.state_watch.borrow();
for external_port in &state.connections.midi_in {
let our_port = format!("{}:midi_in", self.jack_client_name);
let result = self
.jack_client
.connect_ports_by_name(external_port, &our_port);
if let Ok(_) = result {
log::info!("Connected {} -> {}", external_port, our_port);
}
}
for external_port in &state.connections.audio_in {
let our_port = format!("{}:audio_in", self.jack_client_name);
let result = self
.jack_client
.connect_ports_by_name(external_port, &our_port);
if let Ok(_) = result {
log::info!("Connected {} -> {}", external_port, our_port);
}
}
for external_port in &state.connections.audio_out {
let our_port = format!("{}:audio_out", self.jack_client_name);
let result = self
.jack_client
.connect_ports_by_name(&our_port, external_port);
if let Ok(_) = result {
log::info!("Connected {} -> {}", our_port, external_port);
}
}
for external_port in &state.connections.click_track_out {
let our_port = format!("{}:click_track", self.jack_client_name);
let result = self
.jack_client
.connect_ports_by_name(&our_port, external_port);
if let Ok(_) = result {
log::info!("Connected {} -> {}", our_port, external_port);
}
}
}
}

View File

@@ -0,0 +1,25 @@
#[derive(Debug, thiserror::Error)]
pub enum LooperError {
#[error("Failed to allocate new audio chunk")]
ChunkAllocation(&'static std::panic::Location<'static>),
#[error("Cannot modify chunk with multiple references")]
ChunkOwnership(&'static std::panic::Location<'static>),
#[error("Index out of bounds")]
OutOfBounds(&'static std::panic::Location<'static>),
#[error("Failed to load state")]
StateLoad(&'static std::panic::Location<'static>),
#[error("Failed to save state")]
StateSave(&'static std::panic::Location<'static>),
#[error("Failed to connect to JACK")]
JackConnection(&'static std::panic::Location<'static>),
#[error("Irrecoverable XRUN")]
Xrun(&'static std::panic::Location<'static>),
}
pub type Result<T> = std::result::Result<T, LooperError>;

154
audio_engine/src/main.rs Normal file
View File

@@ -0,0 +1,154 @@
mod allocator;
mod audio_chunk;
mod audio_data;
mod beep;
mod chunk_factory;
mod connection_manager;
mod looper_error;
mod metronome;
mod midi;
mod notification_handler;
mod persistence_manager;
mod post_record_handler;
mod process_handler;
mod state;
mod track;
use std::sync::Arc;
use allocator::Allocator;
use audio_chunk::AudioChunk;
use audio_data::AudioData;
use beep::generate_beep;
use chunk_factory::ChunkFactory;
use connection_manager::ConnectionManager;
use looper_error::LooperError;
use looper_error::Result;
use metronome::Metronome;
use notification_handler::JackNotification;
use notification_handler::NotificationHandler;
use persistence_manager::PersistenceManager;
use post_record_handler::PostRecordController;
use post_record_handler::PostRecordHandler;
use process_handler::ProcessHandler;
use state::State;
use track::Track;
use track::TrackState;
use track::TrackTiming;
pub struct JackPorts {
pub audio_in: jack::Port<jack::AudioIn>,
pub audio_out: jack::Port<jack::AudioOut>,
pub click_track_out: jack::Port<jack::AudioOut>,
pub midi_in: jack::Port<jack::MidiIn>,
}
#[tokio::main]
async fn main() {
simple_logger::SimpleLogger::new()
.init()
.expect("Could not initialize logger");
let (jack_client, ports) = setup_jack();
let mut allocator = Allocator::spawn(jack_client.sample_rate(), 3);
let beep_samples = generate_beep(jack_client.sample_rate() as u32, &mut allocator)
.expect("Could not generate beep samples");
let notification_handler = NotificationHandler::new();
let mut notification_channel = notification_handler.subscribe();
let (mut persistence_manager, state_watch) =
PersistenceManager::new(notification_handler.subscribe());
// Load state values for metronome configuration
let initial_state = state_watch.borrow().clone();
// Create post-record handler and get controller for ProcessHandler
let (mut post_record_handler, post_record_controller) =
PostRecordHandler::new().expect("Could not create post-record handler");
let process_handler = ProcessHandler::new(
ports,
allocator,
beep_samples,
&initial_state,
post_record_controller,
)
.expect("Could not create process handler");
let mut connection_manager = ConnectionManager::new(
state_watch,
notification_handler.subscribe(),
jack_client.name().to_string(),
)
.expect("Could not create connection manager");
let _active_client = jack_client
.activate_async(notification_handler, process_handler)
.expect("Could not activate Jack");
loop {
tokio::select! {
notification = notification_channel.recv() => {
if let Ok(JackNotification::Shutdown {reason, status}) = notification {
log::error!("Jack shutdown: {reason} {status:?}");
break;
}
}
result = persistence_manager.run() => {
if let Err(e) = result {
log::error!("StateManager task failed: {}", e);
}
break;
}
result = connection_manager.run() => {
if let Err(e) = result {
log::error!("ConnectionManager task failed: {}", e);
}
break;
}
result = post_record_handler.run() => {
if let Err(e) = result {
log::error!("PostRecordHandler task failed: {}", e);
}
break;
}
_ = tokio::signal::ctrl_c() => {
log::info!("Stopping");
break;
}
}
}
}
fn setup_jack() -> (jack::Client, JackPorts) {
let (jack_client, jack_status) =
jack::Client::new("looper", jack::ClientOptions::NO_START_SERVER)
.expect("Could not create Jack client");
if !jack_status.is_empty() {
panic!("Could not start jack client: {jack_status:?}");
}
let audio_in = jack_client
.register_port("audio_in", jack::AudioIn::default())
.expect("Could not create audio_in port");
let audio_out = jack_client
.register_port("audio_out", jack::AudioOut::default())
.expect("Could not create audio_out port");
let click_track_out = jack_client
.register_port("click_track", jack::AudioOut::default())
.expect("Could not create click_track_out port");
let midi_in = jack_client
.register_port("midi_in", jack::MidiIn::default())
.expect("Could not create midi_in port");
let ports = JackPorts {
audio_in,
audio_out,
click_track_out,
midi_in,
};
(jack_client, ports)
}

View File

@@ -0,0 +1,368 @@
use crate::*;
pub struct Metronome {
// Audio playback
click_samples: Arc<AudioChunk>,
click_volume: f32,
// Timing state
frames_per_beat: u32,
frames_since_last_beat: u32, // Where we are in the current beat cycle
last_frame_time: Option<u32>, // For xrun detection
}
#[derive(Debug, Clone, PartialEq)]
pub struct BufferTiming {
/// Beat index within the current buffer (if any)
pub beat_in_buffer: Option<u32>,
/// Number of frames missed due to xrun (0 if no xrun)
pub missed_frames: u32,
/// Beat index within the missed frames (if any)
pub beat_in_missed: Option<u32>,
}
impl Metronome {
pub fn new(click_samples: Arc<AudioChunk>, state: &State) -> Self {
Self {
click_samples,
click_volume: state.metronome.click_volume,
frames_per_beat: state.metronome.frames_per_beat,
frames_since_last_beat: 0,
last_frame_time: None,
}
}
pub fn frames_per_beat(&self) -> u32 {
self.frames_per_beat
}
/// Process audio for current buffer, writing to output slice
pub fn process(
&mut self,
ps: &jack::ProcessScope,
ports: &mut JackPorts,
) -> Result<BufferTiming> {
let buffer_size = ps.n_frames();
let current_frame_time = ps.last_frame_time();
// Calculate timing for this buffer
let timing = self.calculate_timing(current_frame_time, buffer_size)?;
// Get output buffer for click track
let click_output = ports.click_track_out.as_mut_slice(ps);
self.render_click(buffer_size, current_frame_time, &timing, click_output);
Ok(timing)
}
fn render_click(
&mut self,
buffer_size: u32,
current_frame_time: u32,
timing: &BufferTiming,
click_output: &mut [f32],
) {
// Calculate current position within the beep (frames since last beat started)
let frames_since_beat_start = current_frame_time - self.frames_since_last_beat;
let click_length = self.click_samples.sample_count as u32;
if let Some(beat_offset) = timing.beat_in_buffer {
// Write silence up to beat boundary
let silence_end = beat_offset.min(buffer_size);
click_output[0..silence_end as _].fill(0.0);
// Write click samples from boundary onward
if beat_offset < buffer_size {
let remaining_buffer = buffer_size - beat_offset;
let samples_to_write = remaining_buffer.min(click_length);
// Copy click samples in bulk
let dest = &mut click_output
[beat_offset as usize..beat_offset as usize + samples_to_write as usize];
if self.click_samples.copy_samples(dest, 0).is_ok() {
// Apply volume scaling with iterators
dest.iter_mut()
.for_each(|sample| *sample *= self.click_volume);
}
// Fill remaining buffer with silence
click_output[(beat_offset as usize + samples_to_write as usize)..].fill(0.0);
}
} else if frames_since_beat_start < click_length {
// Continue playing click from previous beat if still within beep duration
let click_start_offset = frames_since_beat_start;
let remaining_click_samples = click_length - click_start_offset;
let samples_to_write = buffer_size.min(remaining_click_samples);
// Copy remaining beep samples in bulk
let dest = &mut click_output[0..samples_to_write as _];
if self
.click_samples
.copy_samples(dest, click_start_offset as _)
.is_ok()
{
// Apply volume scaling with iterators
dest.iter_mut()
.for_each(|sample| *sample *= self.click_volume);
}
// Fill remaining buffer with silence
click_output[samples_to_write as _..].fill(0.0);
} else {
click_output.fill(0.0);
}
}
pub fn calculate_timing(
&mut self,
current_frame_time: u32,
buffer_size: u32,
) -> Result<BufferTiming> {
// Detect xrun
let (missed_samples, beat_in_missed) = if let Some(last) = self.last_frame_time {
let expected = last.wrapping_add(buffer_size); // Handle u32 wrap
if current_frame_time != expected {
// We have a gap
let missed = current_frame_time.wrapping_sub(expected);
// Check if we missed multiple beats
let total_samples = self.frames_since_last_beat + missed + buffer_size;
if total_samples >= 2 * self.frames_per_beat {
return Err(LooperError::Xrun(std::panic::Location::caller()));
}
// Check if a beat occurred in the missed section
let beat_in_missed = if self.frames_since_last_beat + missed >= self.frames_per_beat
{
Some(self.frames_per_beat - self.frames_since_last_beat)
} else {
None
};
(missed, beat_in_missed)
} else {
(0, None)
}
} else {
// First call
(0, None)
};
// Check for beat in current buffer
// We need to account for any missed samples here too
let start_position = (self.frames_since_last_beat + missed_samples) % self.frames_per_beat;
let beat_in_buffer = if start_position + buffer_size >= self.frames_per_beat {
Some(self.frames_per_beat - start_position)
} else {
None
};
// Update state - advance by total samples (missed + buffer)
self.frames_since_last_beat =
(self.frames_since_last_beat + missed_samples + buffer_size) % self.frames_per_beat;
self.last_frame_time = Some(current_frame_time);
Ok(BufferTiming {
beat_in_buffer,
missed_frames: missed_samples,
beat_in_missed,
})
}
}
#[cfg(test)]
mod tests {
use super::*;
fn create_test_metronome(samples_per_beat: u32) -> Metronome {
let beep_samples = Arc::new(AudioChunk {
samples: vec![1.0; 100].into_boxed_slice(),
sample_count: 100,
next: None,
});
Metronome {
click_samples: beep_samples,
click_volume: 1.0,
frames_per_beat: samples_per_beat,
frames_since_last_beat: 0,
last_frame_time: None,
}
}
#[test]
fn test_first_call_initialization() {
let mut metronome = create_test_metronome(1000);
let result = metronome.calculate_timing(5000, 128).unwrap();
assert_eq!(metronome.frames_since_last_beat, 128);
assert_eq!(metronome.last_frame_time, Some(5000));
assert_eq!(result.missed_frames, 0);
assert_eq!(result.beat_in_missed, None);
assert_eq!(result.beat_in_buffer, None);
}
#[test]
fn test_normal_buffer_no_beat() {
let mut metronome = create_test_metronome(1000);
// Initialize at time 1000
metronome.calculate_timing(1000, 128).unwrap();
assert_eq!(metronome.frames_since_last_beat, 128);
// Next buffer at 1128 - no beat expected
let result = metronome.calculate_timing(1128, 128).unwrap();
assert_eq!(metronome.frames_since_last_beat, 256);
assert_eq!(result.missed_frames, 0);
assert_eq!(result.beat_in_missed, None);
assert_eq!(result.beat_in_buffer, None);
}
#[test]
fn test_beat_in_buffer() {
let mut metronome = create_test_metronome(1000);
// Initialize at time 0
metronome.calculate_timing(0, 512).unwrap();
assert_eq!(metronome.frames_since_last_beat, 512);
// Next buffer: 512 -> 1024, beat at 1000 (offset 488)
let result = metronome.calculate_timing(512, 512).unwrap();
assert_eq!(result.missed_frames, 0);
assert_eq!(result.beat_in_missed, None);
assert_eq!(result.beat_in_buffer, Some(488)); // 1000 - 512
assert_eq!(metronome.frames_since_last_beat, 24); // (512 + 512) % 1000
}
#[test]
fn test_xrun_no_missed_beat() {
let mut metronome = create_test_metronome(1000);
// Initialize at time 1000
metronome.calculate_timing(1000, 128).unwrap();
assert_eq!(metronome.frames_since_last_beat, 128);
// Normal buffer at 1128
metronome.calculate_timing(1128, 128).unwrap();
assert_eq!(metronome.frames_since_last_beat, 256);
// Xrun: expected 1256 but got 1428 (172 samples missed)
let result = metronome.calculate_timing(1428, 128).unwrap();
assert_eq!(result.missed_frames, 172);
assert_eq!(result.beat_in_missed, None);
assert_eq!(result.beat_in_buffer, None);
assert_eq!(metronome.frames_since_last_beat, 556); // 256 + 172 + 128
}
#[test]
fn test_xrun_with_missed_beat() {
let mut metronome = create_test_metronome(1000);
// Initialize at time 1000
metronome.calculate_timing(1000, 128).unwrap();
assert_eq!(metronome.frames_since_last_beat, 128);
// Normal buffer at 1128
metronome.calculate_timing(1128, 128).unwrap();
assert_eq!(metronome.frames_since_last_beat, 256);
// Xrun: expected 1256 but got 2228 (972 samples missed)
// We're at position 256, miss 972 samples = 1228 total
// Beat occurs at position 1000, so beat_in_missed = 1000 - 256 = 744
let result = metronome.calculate_timing(2228, 128).unwrap();
assert_eq!(result.missed_frames, 972);
assert_eq!(result.beat_in_missed, Some(744)); // 1000 - 256
assert_eq!(result.beat_in_buffer, None);
assert_eq!(metronome.frames_since_last_beat, 356); // (256 + 972 + 128) % 1000
}
#[test]
fn test_xrun_with_missed_beat_and_upcoming_beat() {
let mut metronome = create_test_metronome(1000);
// Initialize at time 1000
metronome.calculate_timing(1000, 128).unwrap();
// Normal buffer at 1128
metronome.calculate_timing(1128, 128).unwrap();
assert_eq!(metronome.frames_since_last_beat, 256);
// Xrun: expected 1256 but got 2078 (822 samples missed)
// We're at position 256, miss 822 samples = 1078 total
// Beat occurs at position 1000, so beat_in_missed = 1000 - 256 = 744
// After missed: position = 78, buffer = 128, no beat in buffer
let result = metronome.calculate_timing(2078, 128).unwrap();
assert_eq!(result.missed_frames, 822);
assert_eq!(result.beat_in_missed, Some(744)); // 1000 - 256
assert_eq!(result.beat_in_buffer, None); // 78 + 128 < 1000
assert_eq!(metronome.frames_since_last_beat, 206); // (256 + 822 + 128) % 1000
}
#[test]
fn test_xrun_multiple_beats_error() {
let mut metronome = create_test_metronome(1000);
// Initialize at time 1000
metronome.calculate_timing(1000, 128).unwrap();
// Normal buffer at 1128
metronome.calculate_timing(1128, 128).unwrap();
// Xrun: expected 1256 but got 3328 (2072 samples missed)
// Total advancement would be 256 + 2072 + 128 = 2456 samples
// That's more than 2 beats (2000 samples), so error
let result = metronome.calculate_timing(3328, 128);
assert!(result.is_err());
}
#[test]
fn test_consecutive_buffers_with_beat() {
let mut metronome = create_test_metronome(1000);
// First buffer - initialization
let result1 = metronome.calculate_timing(0, 512).unwrap();
assert_eq!(result1.beat_in_buffer, None);
assert_eq!(metronome.frames_since_last_beat, 512);
// Second buffer - beat should occur at position 1000
let result2 = metronome.calculate_timing(512, 512).unwrap();
assert_eq!(result2.beat_in_buffer, Some(488)); // 1000 - 512
assert_eq!(metronome.frames_since_last_beat, 24); // (512 + 512) % 1000
// Third buffer - no beat
let result3 = metronome.calculate_timing(1024, 512).unwrap();
assert_eq!(result3.beat_in_buffer, None);
assert_eq!(metronome.frames_since_last_beat, 536);
// Fourth buffer - next beat at position 1000 again
let result4 = metronome.calculate_timing(1536, 512).unwrap();
assert_eq!(result4.beat_in_buffer, Some(464)); // 1000 - 536
assert_eq!(metronome.frames_since_last_beat, 48); // (536 + 512) % 1000
}
#[test]
fn test_u32_wrapping() {
let mut metronome = create_test_metronome(1000);
// Initialize near u32::MAX
let start_time = u32::MAX - 100;
metronome.calculate_timing(start_time, 128).unwrap();
// Next buffer wraps around
let next_time = start_time.wrapping_add(128);
let result = metronome.calculate_timing(next_time, 128).unwrap();
assert_eq!(result.missed_frames, 0);
assert_eq!(metronome.frames_since_last_beat, 256);
}
}

75
audio_engine/src/midi.rs Normal file
View File

@@ -0,0 +1,75 @@
use crate::*;
/// Process MIDI events
pub fn process_events<F: ChunkFactory>(
process_handler: &mut ProcessHandler<F>,
ps: &jack::ProcessScope,
) -> Result<()> {
// First, collect all MIDI events into a fixed-size array
// This avoids allocations while solving borrow checker issues
const MAX_EVENTS: usize = 16; // Reasonable limit for real-time processing
let mut raw_events = [[0u8; 3]; MAX_EVENTS];
let mut event_count = 0;
// Collect events from the MIDI input iterator
let midi_input = process_handler.ports.midi_in.iter(ps);
for midi_event in midi_input {
if event_count < MAX_EVENTS && midi_event.bytes.len() >= 3 {
raw_events[event_count][0] = midi_event.bytes[0];
raw_events[event_count][1] = midi_event.bytes[1];
raw_events[event_count][2] = midi_event.bytes[2];
event_count += 1;
} else {
return Err(LooperError::OutOfBounds(std::panic::Location::caller()));
}
}
// Now process the collected events using wmidi
// The iterator borrow is dropped, so we can mutably borrow process_handler
for i in 0..event_count {
let event_bytes = &raw_events[i];
// Use wmidi for cleaner MIDI parsing instead of manual byte checking
match wmidi::MidiMessage::try_from(&event_bytes[..]) {
Ok(message) => {
match message {
wmidi::MidiMessage::ControlChange(_, controller, value) => {
// Only process button presses (value > 0)
if u8::from(value) > 0 {
match u8::from(controller) {
20 => {
// Button 1: Record/Play toggle
process_handler.record_toggle()?;
}
21 => {
// Button 2: Play/Mute
process_handler.play_toggle()?;
}
22 => {
// Button 3: Auto-stop record
process_handler.record_auto_stop()?;
}
24 => {
// Button 5: Clear track
process_handler.clear_track()?;
}
_ => {
// Other CC messages - ignore for now
}
}
}
}
_ => {
// Ignore other MIDI messages for now
}
}
}
Err(_) => {
// Skip malformed MIDI messages instead of panicking
continue;
}
}
}
Ok(())
}

View File

@@ -0,0 +1,79 @@
pub struct NotificationHandler {
channel: tokio::sync::broadcast::Sender<JackNotification>,
}
#[derive(Clone, Debug)]
pub enum JackNotification {
Shutdown {
status: jack::ClientStatus,
reason: String,
},
PortConnect {
port_a: String,
port_b: String,
connected: bool,
},
PortRegistered {},
}
impl NotificationHandler {
pub fn new() -> Self {
let (channel, _) = tokio::sync::broadcast::channel(16);
Self { channel }
}
pub fn subscribe(&self) -> tokio::sync::broadcast::Receiver<JackNotification> {
self.channel.subscribe()
}
}
impl jack::NotificationHandler for NotificationHandler {
unsafe fn shutdown(&mut self, status: jack::ClientStatus, reason: &str) {
let reason = reason.to_string();
self.channel
.send(JackNotification::Shutdown { status, reason })
.expect("Could not send shutdown notification");
}
fn ports_connected(
&mut self,
client: &jack::Client,
port_id_a: jack::PortId,
port_id_b: jack::PortId,
connect: bool,
) {
// Convert port IDs to port names
let Some(port_a) = client.port_by_id(port_id_a) else {
return;
};
let Ok(port_a) = port_a.name() else { return };
let Some(port_b) = client.port_by_id(port_id_b) else {
return;
};
let Ok(port_b) = port_b.name() else { return };
let notification = JackNotification::PortConnect {
port_a,
port_b,
connected: connect,
};
self.channel
.send(notification)
.expect("Could not send port connection notification");
}
fn port_registration(
&mut self,
_client: &jack::Client,
_port_id: jack::PortId,
register: bool,
) {
if register {
let notification = JackNotification::PortRegistered {};
self.channel
.send(notification)
.expect("Could not send port registration notification");
};
}
}

View File

@@ -0,0 +1,132 @@
use crate::*;
use std::path::PathBuf;
use tokio::sync::{broadcast, watch};
pub struct PersistenceManager {
state: State,
state_tx: watch::Sender<State>,
notification_rx: broadcast::Receiver<JackNotification>,
state_file_path: PathBuf,
}
impl PersistenceManager {
pub fn new(
notification_rx: broadcast::Receiver<JackNotification>,
) -> (Self, watch::Receiver<State>) {
let state_file_path = Self::get_state_file_path();
let initial_state = Self::load_from_disk(&state_file_path).unwrap_or_default();
let (state_tx, state_rx) = watch::channel(initial_state.clone());
let manager = Self {
state: initial_state,
state_tx,
notification_rx,
state_file_path,
};
(manager, state_rx)
}
pub async fn run(&mut self) -> Result<()> {
while let Ok(notification) = self.notification_rx.recv().await {
if let JackNotification::PortConnect {
port_a,
port_b,
connected,
} = notification
{
self.handle_port_connection(port_a, port_b, connected)?;
}
}
Ok(())
}
fn handle_port_connection(
&mut self,
port_a: String,
port_b: String,
connected: bool,
) -> Result<()> {
// Determine which port is ours and which is external
let (our_port, external_port) = if port_a.starts_with("looper:") {
(port_a, port_b)
} else if port_b.starts_with("looper:") {
(port_b, port_a)
} else {
// Neither port is ours, ignore
return Ok(());
};
// Update the connections state
let our_port_name = our_port.strip_prefix("looper:").unwrap_or(&our_port);
let connections = &mut self.state.connections;
let port_list = match our_port_name {
"midi_in" => &mut connections.midi_in,
"audio_in" => &mut connections.audio_in,
"audio_out" => &mut connections.audio_out,
"click_track" => &mut connections.click_track_out,
_ => {
log::warn!("Unknown port: {}", our_port_name);
return Ok(());
}
};
if connected {
// Add connection if not already present
if !port_list.contains(&external_port) {
port_list.push(external_port.clone());
log::info!("Added connection: {} -> {}", our_port, external_port);
}
} else {
// Remove connection
port_list.retain(|p| p != &external_port);
log::info!("Removed connection: {} -> {}", our_port, external_port);
}
// Broadcast state change
if let Err(e) = self.state_tx.send(self.state.clone()) {
log::error!("Failed to broadcast state change: {}", e);
}
// Save to disk
self.save_to_disk()?;
Ok(())
}
fn get_state_file_path() -> PathBuf {
let mut path = dirs::home_dir().unwrap_or_else(|| PathBuf::from("."));
path.push(".fcb_looper");
std::fs::create_dir_all(&path).ok(); // Create directory if it doesn't exist
path.push("state.json");
path
}
fn load_from_disk(path: &PathBuf) -> Result<State> {
match std::fs::read_to_string(path) {
Ok(content) => {
let state: State = serde_json::from_str(&content)
.map_err(|_| LooperError::StateLoad(std::panic::Location::caller()))?;
log::info!("Loaded state from {:?}", path);
Ok(state)
}
Err(_) => {
log::info!("Could not load state from {:?}. Using defaults.", path);
Err(LooperError::StateLoad(std::panic::Location::caller()))
}
}
}
fn save_to_disk(&self) -> Result<()> {
let json = serde_json::to_string_pretty(&self.state)
.map_err(|_| LooperError::StateSave(std::panic::Location::caller()))?;
std::fs::write(&self.state_file_path, json)
.map_err(|_| LooperError::StateSave(std::panic::Location::caller()))?;
log::debug!("Saved state to {:?}", self.state_file_path);
Ok(())
}
}

View File

@@ -0,0 +1,227 @@
use crate::*;
use std::path::PathBuf;
/// Request to process a recorded chunk chain with sync offset
#[derive(Debug)]
pub struct PostRecordRequest {
pub chunk_chain: Arc<AudioChunk>,
pub sync_offset: u32,
pub sample_rate: u32,
}
/// Response containing the consolidated buffer
#[derive(Debug)]
pub struct PostRecordResponse {
pub consolidated_buffer: Box<[f32]>,
}
/// RT-side interface for post-record operations
#[derive(Debug)]
pub struct PostRecordController {
request_sender: kanal::Sender<PostRecordRequest>,
response_receiver: kanal::Receiver<PostRecordResponse>,
}
impl PostRecordController {
/// Send a post-record processing request (RT-safe)
pub fn send_request(
&self,
chunk_chain: Arc<AudioChunk>,
sync_offset: u32,
sample_rate: u32,
) -> Result<()> {
let request = PostRecordRequest {
chunk_chain,
sync_offset,
sample_rate,
};
match self.request_sender.try_send(request) {
Ok(true) => Ok(()), // Successfully sent
Ok(false) => Err(LooperError::ChunkAllocation(std::panic::Location::caller())), // Channel full
Err(_) => Err(LooperError::ChunkAllocation(std::panic::Location::caller())), // Channel closed
}
}
/// Try to receive a processing response (RT-safe)
pub fn try_recv_response(&self) -> Option<PostRecordResponse> {
match self.response_receiver.try_recv() {
Ok(Some(response)) => Some(response),
Ok(None) | Err(_) => None, // No message or channel closed = None
}
}
}
/// Handles post-record processing: consolidation with sync offset and saving
pub struct PostRecordHandler {
request_receiver: kanal::AsyncReceiver<PostRecordRequest>,
response_sender: kanal::AsyncSender<PostRecordResponse>,
directory: PathBuf,
}
impl PostRecordHandler {
/// Create new handler and return RT controller
pub fn new() -> Result<(Self, PostRecordController)> {
// Create channels for bidirectional communication
let (request_sender, request_receiver) = kanal::bounded(16);
let (response_sender, response_receiver) = kanal::bounded(16);
let request_receiver = request_receiver.to_async();
let response_sender = response_sender.to_async();
let controller = PostRecordController {
request_sender,
response_receiver,
};
let handler = Self {
request_receiver,
response_sender,
directory: Self::create_directory()?,
};
Ok((handler, controller))
}
/// Run the async processing task
pub async fn run(&mut self) -> Result<()> {
while let Ok(request) = self.request_receiver.recv().await {
if let Err(e) = self.process_request(request).await {
log::error!("Failed to process post-record request: {}", e);
}
}
Ok(())
}
/// Process a single post-record request
async fn process_request(&self, request: PostRecordRequest) -> Result<()> {
log::debug!(
"Processing post-record request for {} samples with sync_offset {}",
request.chunk_chain.len(),
request.sync_offset
);
// Step 1: Consolidate and reorder chunk chain based on sync offset
let consolidated_buffer =
self.consolidate_with_sync_offset(&request.chunk_chain, request.sync_offset as usize)?;
log::debug!(
"Consolidated and reordered {} samples",
consolidated_buffer.len()
);
// Step 2: Send consolidated buffer back to RT thread immediately
let response = PostRecordResponse {
consolidated_buffer,
};
if let Err(_) = self.response_sender.send(response).await {
log::warn!("Failed to send consolidated buffer to RT thread");
}
// Step 3: Save WAV file in background (I/O intensive)
// Use original chunk chain for saving (not reordered)
let consolidated_chunk = AudioChunk::consolidate(&request.chunk_chain);
let file_path = self.get_file_path();
match self
.save_wav_file(&consolidated_chunk, request.sample_rate, &file_path)
.await
{
Ok(_) => log::info!("Saved recording to {:?}", file_path),
Err(e) => log::error!("Failed to save recording to {:?}: {}", file_path, e),
}
Ok(())
}
/// Consolidate chunk chain and reorder samples based on sync offset
fn consolidate_with_sync_offset(
&self,
chunk_chain: &Arc<AudioChunk>,
sync_offset: usize,
) -> Result<Box<[f32]>> {
let total_length = chunk_chain.len();
if total_length == 0 {
return Ok(Vec::new().into_boxed_slice());
}
// Step 1: Extract all samples from the chunk chain into a temporary buffer
let mut all_samples = vec![0.0; total_length];
chunk_chain.copy_samples(&mut all_samples, 0)?;
// Step 2: Reorder samples based on sync offset
// The goal is to put the sample that represents "logical beat 1" at index 0
if sync_offset == 0 {
// No offset - return samples as-is
return Ok(all_samples.into_boxed_slice());
}
let mut reordered = Vec::with_capacity(total_length);
// For each output position, calculate which input position to take from
for i in 0..total_length {
let source_index = (i + sync_offset) % total_length;
reordered.push(all_samples[source_index]);
}
Ok(reordered.into_boxed_slice())
}
/// Save consolidated chunk as WAV file
async fn save_wav_file(
&self,
chunk: &AudioChunk,
sample_rate: u32,
file_path: &PathBuf,
) -> Result<()> {
// Run WAV writing in blocking task to avoid blocking async runtime
let chunk_samples: Vec<f32> = chunk.samples[..chunk.sample_count].to_vec();
let file_path_clone = file_path.clone();
tokio::task::spawn_blocking(move || {
let spec = hound::WavSpec {
channels: 1,
sample_rate,
bits_per_sample: 32,
sample_format: hound::SampleFormat::Float,
};
let mut writer = hound::WavWriter::create(&file_path_clone, spec)
.map_err(|_| LooperError::StateSave(std::panic::Location::caller()))?;
// Write all samples from the chunk
for sample in chunk_samples {
writer
.write_sample(sample)
.map_err(|_| LooperError::StateSave(std::panic::Location::caller()))?;
}
writer
.finalize()
.map_err(|_| LooperError::StateSave(std::panic::Location::caller()))?;
Ok::<(), LooperError>(())
})
.await
.map_err(|_| LooperError::StateSave(std::panic::Location::caller()))?
}
/// Create save directory and return path
fn create_directory() -> Result<PathBuf> {
let mut path = dirs::home_dir().unwrap_or_else(|| PathBuf::from("."));
path.push(".fcb_looper");
std::fs::create_dir_all(&path)
.map_err(|_| LooperError::StateSave(std::panic::Location::caller()))?;
Ok(path)
}
/// Get file path for track recording
fn get_file_path(&self) -> PathBuf {
self.directory.join("track.wav")
}
}

View File

@@ -0,0 +1,248 @@
use crate::*;
// Testing constants for sync offset functionality
const SYNC_OFFSET_BEATS: u32 = 2; // Start recording at beat 3 (0-indexed)
const AUTO_STOP_BEATS: u32 = 4; // Record for 4 beats total
pub struct ProcessHandler<F: ChunkFactory> {
track: Track,
playback_position: usize,
pub ports: JackPorts,
chunk_factory: F,
metronome: Metronome,
post_record_controller: PostRecordController,
}
impl<F: ChunkFactory> ProcessHandler<F> {
pub fn new(
ports: JackPorts,
chunk_factory: F,
beep_samples: Arc<AudioChunk>,
state: &State,
post_record_controller: PostRecordController,
) -> Result<Self> {
Ok(Self {
track: Track::new(),
playback_position: 0,
ports,
chunk_factory,
metronome: Metronome::new(beep_samples, state),
post_record_controller,
})
}
/// Handle record/play toggle button (Button 1)
pub fn record_toggle(&mut self) -> Result<()> {
self.track.queue_record_toggle();
Ok(())
}
/// Handle play/mute toggle button (Button 2)
pub fn play_toggle(&mut self) -> Result<()> {
self.track.queue_play_toggle();
Ok(())
}
/// Handle auto-stop record button (Button 3)
pub fn record_auto_stop(&mut self) -> Result<()> {
let samples_per_beat = self.metronome.frames_per_beat();
let sync_offset = SYNC_OFFSET_BEATS * samples_per_beat;
let target_samples = AUTO_STOP_BEATS * samples_per_beat;
self.track
.queue_record_auto_stop(target_samples as usize, sync_offset as usize);
Ok(())
}
/// Handle clear button (Button 5)
pub fn clear_track(&mut self) -> Result<()> {
self.track.queue_clear();
Ok(())
}
}
impl<F: ChunkFactory> jack::ProcessHandler for ProcessHandler<F> {
fn process(&mut self, client: &jack::Client, ps: &jack::ProcessScope) -> jack::Control {
// Process MIDI first - this updates next_state on the track
if let Err(e) = midi::process_events(self, ps) {
log::error!("Error processing MIDI events: {}", e);
return jack::Control::Quit;
}
// Process metronome and get beat timing information
let beat_sample_index = match self.metronome.process(ps, &mut self.ports) {
Ok(beat_index) => beat_index,
Err(e) => {
log::error!("Error processing metronome: {}", e);
return jack::Control::Quit;
}
}
.beat_in_buffer;
let buffer_size = client.buffer_size() as usize;
let state_before = self.track.current_state().clone();
// Calculate timing information for track processing
let timing = self.calculate_track_timing(beat_sample_index, &state_before);
// Process track audio with calculated timing
let should_consolidate =
match self
.track
.process(ps, &mut self.ports, timing, &mut self.chunk_factory)
{
Ok(consolidate) => consolidate,
Err(e) => {
log::error!("Error processing track: {}", e);
return jack::Control::Quit;
}
};
// Handle post-record processing
if let Err(e) =
self.handle_post_record_processing(should_consolidate, client.sample_rate() as u32)
{
log::error!("Error handling post-record processing: {}", e);
return jack::Control::Quit;
}
// Update playback position based on what happened
self.update_playback_position(beat_sample_index, buffer_size, &state_before);
jack::Control::Continue
}
}
impl<F: ChunkFactory> ProcessHandler<F> {
/// Handle post-record processing: send requests and swap buffers
fn handle_post_record_processing(
&mut self,
should_consolidate: bool,
sample_rate: u32,
) -> Result<()> {
// Send audio data for processing if track indicates consolidation needed
if should_consolidate {
if let Some((chunk_chain, sync_offset)) = self.track.get_audio_data_for_processing() {
self.post_record_controller.send_request(
chunk_chain,
sync_offset as u32,
sample_rate,
)?;
}
}
// Check for consolidation response
if let Some(response) = self.post_record_controller.try_recv_response() {
self.track
.set_consolidated_buffer(response.consolidated_buffer)?;
}
Ok(())
}
/// Calculate timing information for track processing
fn calculate_track_timing(
&self,
beat_sample_index: Option<u32>,
state_before: &TrackState,
) -> TrackTiming {
match beat_sample_index {
None => {
// No beat in this buffer
TrackTiming::NoBeat {
position: self.playback_position,
}
}
Some(beat_index) => {
let beat_index = beat_index as usize;
let pre_beat_position = self.playback_position;
let post_beat_position = self.calculate_post_beat_position(state_before);
TrackTiming::Beat {
pre_beat_position,
post_beat_position,
beat_sample_index: beat_index,
}
}
}
}
/// Calculate the correct playback position after a beat transition
fn calculate_post_beat_position(&self, state_before: &TrackState) -> usize {
let state_after = self.track.next_state(); // Use next_state since transition hasn't happened yet
match (state_before, state_after) {
(_, TrackState::Playing) if !matches!(state_before, TrackState::Playing) => {
// Just started playing - start from beginning
// Note: In future Column implementation, this will be:
// column.get_sync_position() to sync with other playing tracks
0
}
(TrackState::Playing, TrackState::Playing) => {
// Continue playing - use current position
self.playback_position
}
_ => {
// Not playing after transition - position doesn't matter
self.playback_position
}
}
}
/// Update playback position after track processing
fn update_playback_position(
&mut self,
beat_sample_index: Option<u32>,
buffer_size: usize,
state_before: &TrackState,
) {
let state_after = self.track.current_state().clone();
match beat_sample_index {
None => {
// No beat - simple position update
if *state_before == TrackState::Playing {
self.advance_playback_position(buffer_size);
}
}
Some(beat_index) => {
let beat_index = beat_index as usize;
// Handle position updates around beat boundary
if beat_index > 0 && *state_before == TrackState::Playing {
// Advance position for samples before beat
self.advance_playback_position(beat_index);
}
// Check if state transition at beat affects position
if state_after == TrackState::Playing
&& !matches!(state_before, TrackState::Playing)
{
// Started playing at beat - reset position to post-beat calculation
self.playback_position = self.calculate_post_beat_position(state_before);
}
// Advance position for samples after beat if playing
if beat_index < buffer_size && state_after == TrackState::Playing {
let samples_after_beat = buffer_size - beat_index;
self.advance_playback_position(samples_after_beat);
}
}
}
}
/// Advance playback position with looping
fn advance_playback_position(&mut self, samples: usize) {
if self.track.len() == 0 {
self.playback_position = 0;
return;
}
self.playback_position += samples;
// Handle looping
while self.playback_position >= self.track.len() {
self.playback_position -= self.track.len();
}
}
}

38
audio_engine/src/state.rs Normal file
View File

@@ -0,0 +1,38 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct State {
pub connections: ConnectionState,
pub metronome: MetronomeState,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ConnectionState {
pub midi_in: Vec<String>,
pub audio_in: Vec<String>,
pub audio_out: Vec<String>,
pub click_track_out: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MetronomeState {
pub frames_per_beat: u32,
pub click_volume: f32, // 0.0 to 1.0
}
impl Default for State {
fn default() -> Self {
Self {
connections: ConnectionState {
midi_in: Vec::new(),
audio_in: Vec::new(),
audio_out: Vec::new(),
click_track_out: Vec::new(),
},
metronome: MetronomeState {
frames_per_beat: 96000, // 120 BPM at 192kHz sample rate
click_volume: 0.5, // Default 50% volume
},
}
}
}

334
audio_engine/src/track.rs Normal file
View File

@@ -0,0 +1,334 @@
use crate::*;
#[derive(Debug, Clone, PartialEq)]
pub enum TrackState {
Empty, // No audio data (---)
Idle, // Has data, not playing (READY)
Playing, // Currently playing (PLAY)
Recording, // Currently recording (REC) - manual stop
RecordingAutoStop {
target_samples: usize, // Auto-stop when this many samples recorded
sync_offset: usize, // Offset in samples from column start
},
}
#[derive(Debug)]
pub enum TrackTiming {
NoBeat {
position: usize,
},
Beat {
pre_beat_position: usize,
post_beat_position: usize,
beat_sample_index: usize,
},
}
pub struct Track {
audio_data: AudioData,
current_state: TrackState,
next_state: TrackState,
volume: f32,
}
impl Track {
pub fn new() -> Self {
Self {
audio_data: AudioData::new_empty(),
current_state: TrackState::Empty,
next_state: TrackState::Empty,
volume: 1.0,
}
}
/// Main audio processing method called from ProcessHandler
/// Returns true if track should be consolidated and saved
pub fn process<F: ChunkFactory>(
&mut self,
ps: &jack::ProcessScope,
ports: &mut JackPorts,
timing: TrackTiming,
chunk_factory: &mut F,
) -> Result<bool> {
let input_buffer = ports.audio_in.as_slice(ps);
let output_buffer = ports.audio_out.as_mut_slice(ps);
let buffer_size = output_buffer.len();
let should_consolidate = match timing {
TrackTiming::NoBeat { position } => {
// No beat in this buffer - process entire buffer with current state
self.process_audio_range(
input_buffer,
output_buffer,
0,
buffer_size,
position,
chunk_factory,
)?;
false // No state transition possible without beat
}
TrackTiming::Beat {
pre_beat_position,
post_beat_position,
beat_sample_index,
} => {
if beat_sample_index > 0 {
// Process samples before beat with current state
self.process_audio_range(
input_buffer,
output_buffer,
0,
beat_sample_index,
pre_beat_position,
chunk_factory,
)?;
}
// Apply state transition at beat boundary and check if consolidation needed
let should_consolidate = self.apply_state_transition(chunk_factory)?;
if beat_sample_index < buffer_size {
// Process samples after beat with new current state
self.process_audio_range(
input_buffer,
output_buffer,
beat_sample_index,
buffer_size,
post_beat_position,
chunk_factory,
)?;
}
should_consolidate
}
};
Ok(should_consolidate)
}
/// Process audio for a specific range within the buffer
fn process_audio_range<F: ChunkFactory>(
&mut self,
input_buffer: &[f32],
output_buffer: &mut [f32],
start_index: usize,
end_index: usize,
playback_position: usize,
chunk_factory: &mut F,
) -> Result<()> {
let sample_count = end_index - start_index;
if sample_count == 0 {
return Ok(());
}
match &mut self.current_state {
TrackState::Empty | TrackState::Idle => {
// Output silence for this range
output_buffer[start_index..end_index].fill(0.0);
}
TrackState::Recording => {
// Record input samples (manual recording)
let samples_to_record = &input_buffer[start_index..end_index];
self.audio_data
.append_samples(samples_to_record, chunk_factory)?;
// Output silence during recording
output_buffer[start_index..end_index].fill(0.0);
}
TrackState::RecordingAutoStop { target_samples, .. } => {
// Record input samples with auto-stop logic
let samples_to_record = &input_buffer[start_index..end_index];
let current_length = self.audio_data.len();
if current_length < *target_samples {
// Still recording - determine how many samples to actually record
let samples_needed = *target_samples - current_length;
let samples_to_append = samples_to_record.len().min(samples_needed);
if samples_to_append > 0 {
self.audio_data.append_samples(
&samples_to_record[..samples_to_append],
chunk_factory,
)?;
}
// Check if we've reached target and should auto-transition
if self.audio_data.len() >= *target_samples {
self.next_state = TrackState::Playing;
}
}
// Output silence during recording
output_buffer[start_index..end_index].fill(0.0);
}
TrackState::Playing => {
// Playback with looping
self.audio_data.copy_samples_to_output(
&mut output_buffer[start_index..end_index],
playback_position,
self.volume,
)?;
}
}
Ok(())
}
/// Apply state transition from next_state to current_state
/// Returns true if track should be consolidated and saved
fn apply_state_transition<F: ChunkFactory>(&mut self, chunk_factory: &mut F) -> Result<bool> {
// Check if this is a recording → playing transition (consolidation trigger)
let should_consolidate = matches!(
(&self.current_state, &self.next_state),
(TrackState::Recording, TrackState::Playing)
| (TrackState::RecordingAutoStop { .. }, TrackState::Playing)
);
// Handle transitions that require setup
match (&self.current_state, &self.next_state) {
(current_state, TrackState::Recording)
if !matches!(current_state, TrackState::Recording) =>
{
// Starting manual recording - clear previous data and create new unconsolidated data
self.audio_data = AudioData::new_unconsolidated(chunk_factory, 0)?;
}
(current_state, TrackState::RecordingAutoStop { sync_offset, .. })
if !matches!(current_state, TrackState::RecordingAutoStop { .. }) =>
{
// Starting auto-stop recording - clear previous data and create new unconsolidated data with offset
self.audio_data = AudioData::new_unconsolidated(chunk_factory, *sync_offset)?;
}
(_, TrackState::Playing) => {
// Starting playback - check if we have audio data
if self.audio_data.is_empty() {
// No audio data - transition to Idle instead
self.next_state = TrackState::Idle;
}
}
(_, TrackState::Empty) => {
// Clear operation - remove audio data
self.audio_data.clear()?;
}
_ => {
// Other transitions don't require special handling
}
}
// Apply the state transition
self.current_state = self.next_state.clone();
Ok(should_consolidate)
}
/// Get audio data for post-record processing (returns chunk and sync offset)
pub fn get_audio_data_for_processing(&self) -> Option<(Arc<AudioChunk>, usize)> {
self.audio_data.get_chunk_for_processing()
}
/// Set consolidated buffer (for swapping in consolidated audio data)
pub fn set_consolidated_buffer(&mut self, buffer: Box<[f32]>) -> Result<()> {
self.audio_data.set_consolidated_buffer(buffer)
}
// Public accessors and commands for MIDI handling
pub fn current_state(&self) -> &TrackState {
&self.current_state
}
pub fn next_state(&self) -> &TrackState {
&self.next_state
}
pub fn len(&self) -> usize {
self.audio_data.len()
}
pub fn volume(&self) -> f32 {
self.volume
}
pub fn set_volume(&mut self, volume: f32) {
self.volume = volume.clamp(0.0, 1.0);
}
/// Handle record/play toggle command (sets next_state)
pub fn queue_record_toggle(&mut self) {
match self.current_state {
TrackState::Empty | TrackState::Idle => {
self.next_state = TrackState::Recording;
}
TrackState::Recording => {
self.next_state = TrackState::Playing;
}
TrackState::RecordingAutoStop { .. } => {
// Auto-stop recording - can't manually stop, wait for auto-transition
self.next_state = self.current_state.clone();
}
TrackState::Playing => {
self.next_state = TrackState::Idle;
}
}
}
/// Handle auto-stop record command (sets next_state)
pub fn queue_record_auto_stop(&mut self, target_samples: usize, sync_offset: usize) {
match self.current_state {
TrackState::Empty | TrackState::Idle => {
self.next_state = TrackState::RecordingAutoStop {
target_samples,
sync_offset,
};
}
TrackState::Recording => {
// Switch from manual to auto-stop recording
self.next_state = TrackState::RecordingAutoStop {
target_samples,
sync_offset,
};
}
TrackState::RecordingAutoStop { .. } => {
// Already auto-recording - update parameters
self.next_state = TrackState::RecordingAutoStop {
target_samples,
sync_offset,
};
}
TrackState::Playing => {
// Stop playing and start auto-recording
self.next_state = TrackState::RecordingAutoStop {
target_samples,
sync_offset,
};
}
}
}
/// Handle play/mute toggle command (sets next_state)
pub fn queue_play_toggle(&mut self) {
match self.current_state {
TrackState::Empty => {
// Can't play empty track
self.next_state = TrackState::Empty;
}
TrackState::Idle => {
if !self.audio_data.is_empty() {
self.next_state = TrackState::Playing;
} else {
self.next_state = TrackState::Idle;
}
}
TrackState::Recording | TrackState::RecordingAutoStop { .. } => {
// Don't change state while recording
self.next_state = self.current_state.clone();
}
TrackState::Playing => {
self.next_state = TrackState::Idle;
}
}
}
/// Handle clear command (sets next_state)
pub fn queue_clear(&mut self) {
self.next_state = TrackState::Empty;
}
}