initial public commit
This commit is contained in:
commit
0b958e7852
56 changed files with 30235 additions and 0 deletions
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
/target
|
||||
.vscode
|
593
Cargo.lock
generated
Normal file
593
Cargo.lock
generated
Normal file
|
@ -0,0 +1,593 @@
|
|||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.8.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
"version_check",
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "allocator-api2"
|
||||
version = "0.2.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5"
|
||||
|
||||
[[package]]
|
||||
name = "autocfg"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
||||
|
||||
[[package]]
|
||||
name = "base16ct"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf"
|
||||
|
||||
[[package]]
|
||||
name = "basic-toml"
|
||||
version = "0.1.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2db21524cad41c5591204d22d75e1970a2d1f71060214ca931dc7d5afe2c14e5"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf"
|
||||
|
||||
[[package]]
|
||||
name = "bitvec"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c"
|
||||
dependencies = [
|
||||
"funty",
|
||||
"radium",
|
||||
"serde",
|
||||
"tap",
|
||||
"wyz",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "block-buffer"
|
||||
version = "0.10.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
|
||||
dependencies = [
|
||||
"generic-array",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "cpufeatures"
|
||||
version = "0.2.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crypto-common"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
|
||||
dependencies = [
|
||||
"generic-array",
|
||||
"typenum",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "digest"
|
||||
version = "0.10.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
|
||||
dependencies = [
|
||||
"block-buffer",
|
||||
"crypto-common",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "equivalent"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
|
||||
|
||||
[[package]]
|
||||
name = "errno"
|
||||
version = "0.3.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fastrand"
|
||||
version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
|
||||
|
||||
[[package]]
|
||||
name = "fayalite"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"bitvec",
|
||||
"fayalite-proc-macros",
|
||||
"fayalite-visit-gen",
|
||||
"hashbrown",
|
||||
"num-bigint",
|
||||
"num-traits",
|
||||
"paste",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"trybuild",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fayalite-proc-macros"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"fayalite-proc-macros-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fayalite-proc-macros-impl"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"base16ct",
|
||||
"num-bigint",
|
||||
"prettyplease",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"sha2",
|
||||
"syn",
|
||||
"tempfile",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fayalite-visit-gen"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"prettyplease",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"syn",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "funty"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
|
||||
|
||||
[[package]]
|
||||
name = "generic-array"
|
||||
version = "0.14.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
|
||||
dependencies = [
|
||||
"typenum",
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "glob"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.14.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"allocator-api2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.2.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itoa"
|
||||
version = "1.0.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.153"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
version = "0.4.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c"
|
||||
|
||||
[[package]]
|
||||
name = "num-bigint"
|
||||
version = "0.4.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"num-integer",
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-integer"
|
||||
version = "0.1.46"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f"
|
||||
dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-traits"
|
||||
version = "0.2.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.19.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
|
||||
|
||||
[[package]]
|
||||
name = "paste"
|
||||
version = "1.0.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c"
|
||||
|
||||
[[package]]
|
||||
name = "prettyplease"
|
||||
version = "0.2.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f12335488a2f3b0a83b14edad48dca9879ce89b2edd10e80237e4e852dd645e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.83"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b33eb56c327dec362a9e55b3ad14f9d2f0904fb5a5b03b513ab5465399e9f43"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.36"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "radium"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
|
||||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
version = "0.38.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.202"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "226b61a0d411b2ba5ff6d7f73a476ac4f8bb900373459cd00fab8512828ba395"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.202"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6048858004bcff69094cd972ed40a32500f153bd3be9f716b2eed2e8217c4838"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.117"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"itoa",
|
||||
"ryu",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sha2"
|
||||
version = "0.10.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cpufeatures",
|
||||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.66"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tap"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"fastrand",
|
||||
"rustix",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "termcolor"
|
||||
version = "1.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755"
|
||||
dependencies = [
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.61"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "1.0.61"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "trybuild"
|
||||
version = "1.0.89"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9a9d3ba662913483d6722303f619e75ea10b7855b0f8e0d72799cf8621bb488f"
|
||||
dependencies = [
|
||||
"basic-toml",
|
||||
"glob",
|
||||
"once_cell",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
"termcolor",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typenum"
|
||||
version = "1.17.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.9.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
|
||||
dependencies = [
|
||||
"winapi-i686-pc-windows-gnu",
|
||||
"winapi-x86_64-pc-windows-gnu",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-i686-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||
|
||||
[[package]]
|
||||
name = "winapi-util"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596"
|
||||
dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-x86_64-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
|
||||
dependencies = [
|
||||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm",
|
||||
"windows_aarch64_msvc",
|
||||
"windows_i686_gnu",
|
||||
"windows_i686_msvc",
|
||||
"windows_x86_64_gnu",
|
||||
"windows_x86_64_gnullvm",
|
||||
"windows_x86_64_msvc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8"
|
||||
|
||||
[[package]]
|
||||
name = "wyz"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed"
|
||||
dependencies = [
|
||||
"tap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.7.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be"
|
||||
dependencies = [
|
||||
"zerocopy-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy-derive"
|
||||
version = "0.7.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
5
Cargo.toml
Normal file
5
Cargo.toml
Normal file
|
@ -0,0 +1,5 @@
|
|||
# SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
# See Notices.txt for copyright information
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = ["crates/*"]
|
157
LICENSE.md
Normal file
157
LICENSE.md
Normal file
|
@ -0,0 +1,157 @@
|
|||
### GNU LESSER GENERAL PUBLIC LICENSE
|
||||
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc.
|
||||
<https://fsf.org/>
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim copies of this
|
||||
license document, but changing it is not allowed.
|
||||
|
||||
This version of the GNU Lesser General Public License incorporates the
|
||||
terms and conditions of version 3 of the GNU General Public License,
|
||||
supplemented by the additional permissions listed below.
|
||||
|
||||
#### 0. Additional Definitions.
|
||||
|
||||
As used herein, "this License" refers to version 3 of the GNU Lesser
|
||||
General Public License, and the "GNU GPL" refers to version 3 of the
|
||||
GNU General Public License.
|
||||
|
||||
"The Library" refers to a covered work governed by this License, other
|
||||
than an Application or a Combined Work as defined below.
|
||||
|
||||
An "Application" is any work that makes use of an interface provided
|
||||
by the Library, but which is not otherwise based on the Library.
|
||||
Defining a subclass of a class defined by the Library is deemed a mode
|
||||
of using an interface provided by the Library.
|
||||
|
||||
A "Combined Work" is a work produced by combining or linking an
|
||||
Application with the Library. The particular version of the Library
|
||||
with which the Combined Work was made is also called the "Linked
|
||||
Version".
|
||||
|
||||
The "Minimal Corresponding Source" for a Combined Work means the
|
||||
Corresponding Source for the Combined Work, excluding any source code
|
||||
for portions of the Combined Work that, considered in isolation, are
|
||||
based on the Application, and not on the Linked Version.
|
||||
|
||||
The "Corresponding Application Code" for a Combined Work means the
|
||||
object code and/or source code for the Application, including any data
|
||||
and utility programs needed for reproducing the Combined Work from the
|
||||
Application, but excluding the System Libraries of the Combined Work.
|
||||
|
||||
#### 1. Exception to Section 3 of the GNU GPL.
|
||||
|
||||
You may convey a covered work under sections 3 and 4 of this License
|
||||
without being bound by section 3 of the GNU GPL.
|
||||
|
||||
#### 2. Conveying Modified Versions.
|
||||
|
||||
If you modify a copy of the Library, and, in your modifications, a
|
||||
facility refers to a function or data to be supplied by an Application
|
||||
that uses the facility (other than as an argument passed when the
|
||||
facility is invoked), then you may convey a copy of the modified
|
||||
version:
|
||||
|
||||
- a) under this License, provided that you make a good faith effort
|
||||
to ensure that, in the event an Application does not supply the
|
||||
function or data, the facility still operates, and performs
|
||||
whatever part of its purpose remains meaningful, or
|
||||
- b) under the GNU GPL, with none of the additional permissions of
|
||||
this License applicable to that copy.
|
||||
|
||||
#### 3. Object Code Incorporating Material from Library Header Files.
|
||||
|
||||
The object code form of an Application may incorporate material from a
|
||||
header file that is part of the Library. You may convey such object
|
||||
code under terms of your choice, provided that, if the incorporated
|
||||
material is not limited to numerical parameters, data structure
|
||||
layouts and accessors, or small macros, inline functions and templates
|
||||
(ten or fewer lines in length), you do both of the following:
|
||||
|
||||
- a) Give prominent notice with each copy of the object code that
|
||||
the Library is used in it and that the Library and its use are
|
||||
covered by this License.
|
||||
- b) Accompany the object code with a copy of the GNU GPL and this
|
||||
license document.
|
||||
|
||||
#### 4. Combined Works.
|
||||
|
||||
You may convey a Combined Work under terms of your choice that, taken
|
||||
together, effectively do not restrict modification of the portions of
|
||||
the Library contained in the Combined Work and reverse engineering for
|
||||
debugging such modifications, if you also do each of the following:
|
||||
|
||||
- a) Give prominent notice with each copy of the Combined Work that
|
||||
the Library is used in it and that the Library and its use are
|
||||
covered by this License.
|
||||
- b) Accompany the Combined Work with a copy of the GNU GPL and this
|
||||
license document.
|
||||
- c) For a Combined Work that displays copyright notices during
|
||||
execution, include the copyright notice for the Library among
|
||||
these notices, as well as a reference directing the user to the
|
||||
copies of the GNU GPL and this license document.
|
||||
- d) Do one of the following:
|
||||
- 0) Convey the Minimal Corresponding Source under the terms of
|
||||
this License, and the Corresponding Application Code in a form
|
||||
suitable for, and under terms that permit, the user to
|
||||
recombine or relink the Application with a modified version of
|
||||
the Linked Version to produce a modified Combined Work, in the
|
||||
manner specified by section 6 of the GNU GPL for conveying
|
||||
Corresponding Source.
|
||||
- 1) Use a suitable shared library mechanism for linking with
|
||||
the Library. A suitable mechanism is one that (a) uses at run
|
||||
time a copy of the Library already present on the user's
|
||||
computer system, and (b) will operate properly with a modified
|
||||
version of the Library that is interface-compatible with the
|
||||
Linked Version.
|
||||
- e) Provide Installation Information, but only if you would
|
||||
otherwise be required to provide such information under section 6
|
||||
of the GNU GPL, and only to the extent that such information is
|
||||
necessary to install and execute a modified version of the
|
||||
Combined Work produced by recombining or relinking the Application
|
||||
with a modified version of the Linked Version. (If you use option
|
||||
4d0, the Installation Information must accompany the Minimal
|
||||
Corresponding Source and Corresponding Application Code. If you
|
||||
use option 4d1, you must provide the Installation Information in
|
||||
the manner specified by section 6 of the GNU GPL for conveying
|
||||
Corresponding Source.)
|
||||
|
||||
#### 5. Combined Libraries.
|
||||
|
||||
You may place library facilities that are a work based on the Library
|
||||
side by side in a single library together with other library
|
||||
facilities that are not Applications and are not covered by this
|
||||
License, and convey such a combined library under terms of your
|
||||
choice, if you do both of the following:
|
||||
|
||||
- a) Accompany the combined library with a copy of the same work
|
||||
based on the Library, uncombined with any other library
|
||||
facilities, conveyed under the terms of this License.
|
||||
- b) Give prominent notice with the combined library that part of it
|
||||
is a work based on the Library, and explaining where to find the
|
||||
accompanying uncombined form of the same work.
|
||||
|
||||
#### 6. Revised Versions of the GNU Lesser General Public License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions
|
||||
of the GNU Lesser General Public License from time to time. Such new
|
||||
versions will be similar in spirit to the present version, but may
|
||||
differ in detail to address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Library
|
||||
as you received it specifies that a certain numbered version of the
|
||||
GNU Lesser General Public License "or any later version" applies to
|
||||
it, you have the option of following the terms and conditions either
|
||||
of that published version or of any later version published by the
|
||||
Free Software Foundation. If the Library as you received it does not
|
||||
specify a version number of the GNU Lesser General Public License, you
|
||||
may choose any version of the GNU Lesser General Public License ever
|
||||
published by the Free Software Foundation.
|
||||
|
||||
If the Library as you received it specifies that a proxy can decide
|
||||
whether future versions of the GNU Lesser General Public License shall
|
||||
apply, that proxy's public statement of acceptance of any version is
|
||||
permanent authorization for you to choose that version for the
|
||||
Library.
|
16
Notices.txt
Normal file
16
Notices.txt
Normal file
|
@ -0,0 +1,16 @@
|
|||
Copyright 2024 Jacob Lifshay
|
||||
|
||||
This file is part of Fayalite.
|
||||
|
||||
Fayalite is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Lesser General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
Fayalite is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Lesser General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Lesser General Public License
|
||||
along with Fayalite. If not, see <https://www.gnu.org/licenses/>.
|
18
crates/fayalite-proc-macros-impl/Cargo.toml
Normal file
18
crates/fayalite-proc-macros-impl/Cargo.toml
Normal file
|
@ -0,0 +1,18 @@
|
|||
# SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
# See Notices.txt for copyright information
|
||||
[package]
|
||||
name = "fayalite-proc-macros-impl"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
workspace = "../.."
|
||||
license = "LGPL-3.0-or-later"
|
||||
|
||||
[dependencies]
|
||||
base16ct = "0.2.0"
|
||||
num-bigint = "0.4.4"
|
||||
prettyplease = "0.2.20"
|
||||
proc-macro2 = "1.0.78"
|
||||
quote = "1.0.35"
|
||||
sha2 = "0.10.8"
|
||||
syn = { version = "2.0.53", features = ["full", "fold", "visit", "extra-traits"] }
|
||||
tempfile = "3.10.1"
|
4
crates/fayalite-proc-macros-impl/build.rs
Normal file
4
crates/fayalite-proc-macros-impl/build.rs
Normal file
|
@ -0,0 +1,4 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
// build.rs to make cargo set env!("OUT_DIR")
|
||||
fn main() {}
|
248
crates/fayalite-proc-macros-impl/src/fold.rs
Normal file
248
crates/fayalite-proc-macros-impl/src/fold.rs
Normal file
|
@ -0,0 +1,248 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
pub(crate) trait DoFold<State: ?Sized + syn::fold::Fold> {
|
||||
fn do_fold(self, state: &mut State) -> Self;
|
||||
}
|
||||
|
||||
impl<T: DoFold<State>, State: ?Sized + syn::fold::Fold> DoFold<State> for Box<T> {
|
||||
fn do_fold(mut self, state: &mut State) -> Self {
|
||||
*self = T::do_fold(*self, state);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: DoFold<State>, State: ?Sized + syn::fold::Fold> DoFold<State> for Option<T> {
|
||||
fn do_fold(self, state: &mut State) -> Self {
|
||||
self.map(|v| T::do_fold(v, state))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: DoFold<State>, State: ?Sized + syn::fold::Fold> DoFold<State> for Vec<T> {
|
||||
fn do_fold(self, state: &mut State) -> Self {
|
||||
Vec::from_iter(self.into_iter().map(|v| T::do_fold(v, state)))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: DoFold<State>, P: DoFold<State>, State: ?Sized + syn::fold::Fold> DoFold<State>
|
||||
for Punctuated<T, P>
|
||||
{
|
||||
fn do_fold(self, state: &mut State) -> Self {
|
||||
Punctuated::from_iter(self.into_pairs().map(|v| {
|
||||
let (v, p) = v.into_tuple().do_fold(state);
|
||||
Pair::new(v, p)
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_fold_tuple {
|
||||
($($var0:ident: $T0:ident, $($var:ident: $T:ident,)*)?) => {
|
||||
$(impl_fold_tuple!($($var: $T,)*);)?
|
||||
impl_fold_tuple!(@impl $($var0: $T0, $($var: $T,)*)?);
|
||||
};
|
||||
(@impl $($var:ident: $T:ident,)*) => {
|
||||
impl<State: ?Sized + syn::fold::Fold, $($T: DoFold<State>,)*> DoFold<State> for ($($T,)*) {
|
||||
#[allow(clippy::unused_unit)]
|
||||
fn do_fold(self, state: &mut State) -> Self {
|
||||
let _ = state;
|
||||
let ($($var,)*) = self;
|
||||
$(let $var = $var.do_fold(state);)*
|
||||
($($var,)*)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_fold_tuple!(
|
||||
v0: T0,
|
||||
v1: T1,
|
||||
v2: T2,
|
||||
v3: T3,
|
||||
v4: T4,
|
||||
v5: T5,
|
||||
v6: T6,
|
||||
v7: T7,
|
||||
v8: T8,
|
||||
v9: T9,
|
||||
v10: T10,
|
||||
v11: T11,
|
||||
);
|
||||
|
||||
macro_rules! no_op_fold {
|
||||
($ty:ty) => {
|
||||
impl<State: ?Sized + syn::fold::Fold> $crate::fold::DoFold<State> for $ty {
|
||||
fn do_fold(self, _state: &mut State) -> Self {
|
||||
self
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub(crate) use no_op_fold;
|
||||
|
||||
macro_rules! impl_fold {
|
||||
(
|
||||
struct $Struct:ident<$($T:ident,)*> $(where ($($where:tt)*))? {
|
||||
$($field:ident: $field_ty:ty,)*
|
||||
}
|
||||
) => {
|
||||
impl<State: ?Sized + syn::fold::Fold, $($T,)*> $crate::fold::DoFold<State> for $Struct<$($T,)*>
|
||||
where
|
||||
$($T: $crate::fold::DoFold<State>,)*
|
||||
$($where)*
|
||||
{
|
||||
fn do_fold(self, state: &mut State) -> Self {
|
||||
let _ = state;
|
||||
let Self {
|
||||
$($field,)*
|
||||
} = self;
|
||||
Self {
|
||||
$($field: <$field_ty as $crate::fold::DoFold<State>>::do_fold($field, state),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
(
|
||||
struct $Struct:ident<$($T:ident,)*>(
|
||||
$field0_ty:ty $(,)?
|
||||
)
|
||||
$(where ($($where:tt)*))?;
|
||||
) => {
|
||||
impl<State: ?Sized + syn::fold::Fold, $($T,)*> $crate::fold::DoFold<State> for $Struct<$($T,)*>
|
||||
where
|
||||
$($T: $crate::fold::DoFold<State>,)*
|
||||
$($where)*
|
||||
{
|
||||
fn do_fold(self, state: &mut State) -> Self {
|
||||
let _ = state;
|
||||
let Self(
|
||||
v0,
|
||||
) = self;
|
||||
Self(
|
||||
<$field0_ty as $crate::fold::DoFold<State>>::do_fold(v0, state),
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
(
|
||||
enum $Enum:ident<$($T:ident,)*> $(where ($($where:tt)*))? {
|
||||
$($Variant:ident $({
|
||||
$($brace_field:ident: $brace_field_ty:ty,)*
|
||||
})?
|
||||
$((
|
||||
$($paren_field_ty:ty),* $(,)?
|
||||
))?,)*
|
||||
}
|
||||
) => {
|
||||
impl<State: ?Sized + syn::fold::Fold, $($T,)*> $crate::fold::DoFold<State> for $Enum<$($T,)*>
|
||||
where
|
||||
$($T: $crate::fold::DoFold<State>,)*
|
||||
$($where)*
|
||||
{
|
||||
fn do_fold(self, state: &mut State) -> Self {
|
||||
let _ = state;
|
||||
$crate::fold::impl_fold! {
|
||||
@enum_variants self, state => ()
|
||||
$($Variant $({
|
||||
$($brace_field: $brace_field_ty,)*
|
||||
})?
|
||||
$((
|
||||
$($paren_field_ty,)*
|
||||
))?,)*
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
(
|
||||
@enum_variants $self:expr, $state:expr => ($($generated_arms:tt)*)
|
||||
) => {
|
||||
match $self {
|
||||
$($generated_arms)*
|
||||
}
|
||||
};
|
||||
(
|
||||
@enum_variants $self:expr, $state:expr => ($($generated_arms:tt)*)
|
||||
$Variant:ident {
|
||||
$($field:tt: $field_ty:ty,)*
|
||||
},
|
||||
$($rest:tt)*
|
||||
) => {
|
||||
$crate::fold::impl_fold! {
|
||||
@enum_variants $self, $state => (
|
||||
$($generated_arms)*
|
||||
Self::$Variant {
|
||||
$($field,)*
|
||||
} => Self::$Variant {
|
||||
$($field: <$field_ty as $crate::fold::DoFold<State>>::do_fold($field, $state),)*
|
||||
},
|
||||
)
|
||||
$($rest)*
|
||||
}
|
||||
};
|
||||
(
|
||||
@enum_variants $self:expr, $state:expr => ($($generated_arms:tt)*)
|
||||
$Variant:ident(
|
||||
$field0_ty:ty $(,)?
|
||||
),
|
||||
$($rest:tt)*
|
||||
) => {
|
||||
$crate::fold::impl_fold! {
|
||||
@enum_variants $self, $state => (
|
||||
$($generated_arms)*
|
||||
Self::$Variant(v0) => Self::$Variant(
|
||||
<$field0_ty as $crate::fold::DoFold<State>>::do_fold(v0, $state),
|
||||
),
|
||||
)
|
||||
$($rest)*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub(crate) use impl_fold;
|
||||
use syn::punctuated::{Pair, Punctuated};
|
||||
|
||||
macro_rules! forward_fold {
|
||||
($ty:ty => $fn:ident) => {
|
||||
impl<State: syn::fold::Fold + ?Sized> DoFold<State> for $ty {
|
||||
fn do_fold(self, state: &mut State) -> Self {
|
||||
<State as syn::fold::Fold>::$fn(state, self)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
forward_fold!(syn::Attribute => fold_attribute);
|
||||
forward_fold!(syn::AttrStyle => fold_attr_style);
|
||||
forward_fold!(syn::Expr => fold_expr);
|
||||
forward_fold!(syn::ExprArray => fold_expr_array);
|
||||
forward_fold!(syn::ExprCall => fold_expr_call);
|
||||
forward_fold!(syn::ExprIf => fold_expr_if);
|
||||
forward_fold!(syn::ExprMatch => fold_expr_match);
|
||||
forward_fold!(syn::ExprPath => fold_expr_path);
|
||||
forward_fold!(syn::ExprStruct => fold_expr_struct);
|
||||
forward_fold!(syn::ExprTuple => fold_expr_tuple);
|
||||
forward_fold!(syn::Ident => fold_ident);
|
||||
forward_fold!(syn::Member => fold_member);
|
||||
forward_fold!(syn::Path => fold_path);
|
||||
forward_fold!(syn::Type => fold_type);
|
||||
forward_fold!(syn::TypePath => fold_type_path);
|
||||
forward_fold!(syn::WherePredicate => fold_where_predicate);
|
||||
no_op_fold!(syn::parse::Nothing);
|
||||
no_op_fold!(syn::token::Brace);
|
||||
no_op_fold!(syn::token::Bracket);
|
||||
no_op_fold!(syn::token::Paren);
|
||||
no_op_fold!(syn::Token![_]);
|
||||
no_op_fold!(syn::Token![,]);
|
||||
no_op_fold!(syn::Token![;]);
|
||||
no_op_fold!(syn::Token![:]);
|
||||
no_op_fold!(syn::Token![..]);
|
||||
no_op_fold!(syn::Token![.]);
|
||||
no_op_fold!(syn::Token![#]);
|
||||
no_op_fold!(syn::Token![=]);
|
||||
no_op_fold!(syn::Token![=>]);
|
||||
no_op_fold!(syn::Token![|]);
|
||||
no_op_fold!(syn::Token![enum]);
|
||||
no_op_fold!(syn::Token![extern]);
|
||||
no_op_fold!(syn::Token![let]);
|
||||
no_op_fold!(syn::Token![mut]);
|
||||
no_op_fold!(syn::Token![struct]);
|
||||
no_op_fold!(syn::Token![where]);
|
624
crates/fayalite-proc-macros-impl/src/lib.rs
Normal file
624
crates/fayalite-proc-macros-impl/src/lib.rs
Normal file
|
@ -0,0 +1,624 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
#![cfg_attr(test, recursion_limit = "512")]
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{quote, ToTokens};
|
||||
use std::io::{ErrorKind, Write};
|
||||
use syn::{
|
||||
bracketed, parenthesized,
|
||||
parse::{Parse, ParseStream, Parser},
|
||||
parse_quote, AttrStyle, Attribute, Error, Item, Token,
|
||||
};
|
||||
|
||||
mod fold;
|
||||
mod module;
|
||||
mod value_derive_common;
|
||||
mod value_derive_enum;
|
||||
mod value_derive_struct;
|
||||
|
||||
mod kw {
|
||||
pub(crate) use syn::token::{
|
||||
Enum as enum_, Extern as extern_, Struct as struct_, Where as where_,
|
||||
};
|
||||
|
||||
macro_rules! custom_keyword {
|
||||
($kw:ident) => {
|
||||
syn::custom_keyword!($kw);
|
||||
|
||||
impl quote::IdentFragment for $kw {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
f.write_str(stringify!($kw))
|
||||
}
|
||||
|
||||
fn span(&self) -> Option<proc_macro2::Span> {
|
||||
Some(self.span)
|
||||
}
|
||||
}
|
||||
|
||||
crate::fold::no_op_fold!($kw);
|
||||
};
|
||||
}
|
||||
|
||||
custom_keyword!(clock_domain);
|
||||
custom_keyword!(connect_inexact);
|
||||
custom_keyword!(fixed_type);
|
||||
custom_keyword!(flip);
|
||||
custom_keyword!(hdl);
|
||||
custom_keyword!(input);
|
||||
custom_keyword!(instance);
|
||||
custom_keyword!(m);
|
||||
custom_keyword!(memory);
|
||||
custom_keyword!(memory_array);
|
||||
custom_keyword!(memory_with_init);
|
||||
custom_keyword!(no_reset);
|
||||
custom_keyword!(outline_generated);
|
||||
custom_keyword!(output);
|
||||
custom_keyword!(reg_builder);
|
||||
custom_keyword!(reset);
|
||||
custom_keyword!(reset_default);
|
||||
custom_keyword!(skip);
|
||||
custom_keyword!(target);
|
||||
custom_keyword!(wire);
|
||||
}
|
||||
|
||||
type Pound = Token![#]; // work around https://github.com/rust-lang/rust/issues/50676
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct HdlAttr<T> {
|
||||
pub(crate) pound_token: Pound,
|
||||
pub(crate) style: AttrStyle,
|
||||
pub(crate) bracket_token: syn::token::Bracket,
|
||||
pub(crate) hdl: kw::hdl,
|
||||
pub(crate) paren_token: Option<syn::token::Paren>,
|
||||
pub(crate) body: T,
|
||||
}
|
||||
|
||||
crate::fold::impl_fold! {
|
||||
struct HdlAttr<T,> {
|
||||
pound_token: Pound,
|
||||
style: AttrStyle,
|
||||
bracket_token: syn::token::Bracket,
|
||||
hdl: kw::hdl,
|
||||
paren_token: Option<syn::token::Paren>,
|
||||
body: T,
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl<T> HdlAttr<T> {
|
||||
pub(crate) fn split_body(self) -> (HdlAttr<()>, T) {
|
||||
let Self {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
hdl,
|
||||
paren_token,
|
||||
body,
|
||||
} = self;
|
||||
(
|
||||
HdlAttr {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
hdl,
|
||||
paren_token,
|
||||
body: (),
|
||||
},
|
||||
body,
|
||||
)
|
||||
}
|
||||
pub(crate) fn replace_body<T2>(self, body: T2) -> HdlAttr<T2> {
|
||||
let Self {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
hdl,
|
||||
paren_token,
|
||||
body: _,
|
||||
} = self;
|
||||
HdlAttr {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
hdl,
|
||||
paren_token,
|
||||
body,
|
||||
}
|
||||
}
|
||||
pub(crate) fn as_ref(&self) -> HdlAttr<&T> {
|
||||
let Self {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
hdl,
|
||||
paren_token,
|
||||
ref body,
|
||||
} = *self;
|
||||
HdlAttr {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
hdl,
|
||||
paren_token,
|
||||
body,
|
||||
}
|
||||
}
|
||||
pub(crate) fn try_map<R, E, F: FnOnce(T) -> Result<R, E>>(self, f: F) -> Result<HdlAttr<R>, E> {
|
||||
let Self {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
hdl,
|
||||
paren_token,
|
||||
body,
|
||||
} = self;
|
||||
Ok(HdlAttr {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
hdl,
|
||||
paren_token,
|
||||
body: f(body)?,
|
||||
})
|
||||
}
|
||||
pub(crate) fn map<R, F: FnOnce(T) -> R>(self, f: F) -> HdlAttr<R> {
|
||||
let Self {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
hdl,
|
||||
paren_token,
|
||||
body,
|
||||
} = self;
|
||||
HdlAttr {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
hdl,
|
||||
paren_token,
|
||||
body: f(body),
|
||||
}
|
||||
}
|
||||
fn to_attr(&self) -> Attribute
|
||||
where
|
||||
T: ToTokens,
|
||||
{
|
||||
parse_quote! { #self }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Default> Default for HdlAttr<T> {
|
||||
fn default() -> Self {
|
||||
T::default().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<T> for HdlAttr<T> {
|
||||
fn from(body: T) -> Self {
|
||||
HdlAttr {
|
||||
pound_token: Default::default(),
|
||||
style: AttrStyle::Outer,
|
||||
bracket_token: Default::default(),
|
||||
hdl: Default::default(),
|
||||
paren_token: Default::default(),
|
||||
body,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ToTokens> ToTokens for HdlAttr<T> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.pound_token.to_tokens(tokens);
|
||||
match self.style {
|
||||
AttrStyle::Inner(style) => style.to_tokens(tokens),
|
||||
AttrStyle::Outer => {}
|
||||
};
|
||||
self.bracket_token.surround(tokens, |tokens| {
|
||||
self.hdl.to_tokens(tokens);
|
||||
match self.paren_token {
|
||||
Some(paren_token) => {
|
||||
paren_token.surround(tokens, |tokens| self.body.to_tokens(tokens))
|
||||
}
|
||||
None => {
|
||||
let body = self.body.to_token_stream();
|
||||
if !body.is_empty() {
|
||||
syn::token::Paren(self.hdl.span)
|
||||
.surround(tokens, |tokens| tokens.extend([body]));
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn is_hdl_attr(attr: &Attribute) -> bool {
|
||||
attr.path().is_ident("hdl")
|
||||
}
|
||||
|
||||
impl<T: Parse> HdlAttr<T> {
|
||||
fn parse_and_take_attr(attrs: &mut Vec<Attribute>) -> syn::Result<Option<Self>> {
|
||||
let mut retval = None;
|
||||
let mut errors = Errors::new();
|
||||
attrs.retain(|attr| {
|
||||
if is_hdl_attr(attr) {
|
||||
if retval.is_some() {
|
||||
errors.push(Error::new_spanned(attr, "more than one #[hdl] attribute"));
|
||||
}
|
||||
errors.unwrap_or_default(Self::parse_attr(attr).map(|v| retval = Some(v)));
|
||||
false
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
errors.finish()?;
|
||||
Ok(retval)
|
||||
}
|
||||
fn parse_and_leave_attr(attrs: &[Attribute]) -> syn::Result<Option<Self>> {
|
||||
let mut retval = None;
|
||||
let mut errors = Errors::new();
|
||||
for attr in attrs {
|
||||
if is_hdl_attr(attr) {
|
||||
if retval.is_some() {
|
||||
errors.push(Error::new_spanned(attr, "more than one #[hdl] attribute"));
|
||||
}
|
||||
errors.unwrap_or_default(Self::parse_attr(attr).map(|v| retval = Some(v)));
|
||||
}
|
||||
}
|
||||
errors.finish()?;
|
||||
Ok(retval)
|
||||
}
|
||||
fn parse_attr(attr: &Attribute) -> syn::Result<Self> {
|
||||
match attr.style {
|
||||
AttrStyle::Outer => Parser::parse2(Self::parse_outer, attr.to_token_stream()),
|
||||
AttrStyle::Inner(_) => Parser::parse2(Self::parse_inner, attr.to_token_stream()),
|
||||
}
|
||||
}
|
||||
fn parse_starting_with_brackets(
|
||||
pound_token: Token![#],
|
||||
style: AttrStyle,
|
||||
input: ParseStream,
|
||||
) -> syn::Result<Self> {
|
||||
let bracket_content;
|
||||
let bracket_token = bracketed!(bracket_content in input);
|
||||
let hdl = bracket_content.parse()?;
|
||||
let paren_content;
|
||||
let body;
|
||||
let paren_token;
|
||||
if bracket_content.is_empty() {
|
||||
body = match syn::parse2(TokenStream::default()) {
|
||||
Ok(body) => body,
|
||||
Err(_) => {
|
||||
parenthesized!(paren_content in bracket_content);
|
||||
unreachable!();
|
||||
}
|
||||
};
|
||||
paren_token = None;
|
||||
} else {
|
||||
paren_token = Some(parenthesized!(paren_content in bracket_content));
|
||||
body = paren_content.parse()?;
|
||||
}
|
||||
Ok(Self {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
hdl,
|
||||
paren_token,
|
||||
body,
|
||||
})
|
||||
}
|
||||
fn parse_inner(input: ParseStream) -> syn::Result<Self> {
|
||||
let pound_token = input.parse()?;
|
||||
let style = AttrStyle::Inner(input.parse()?);
|
||||
Self::parse_starting_with_brackets(pound_token, style, input)
|
||||
}
|
||||
fn parse_outer(input: ParseStream) -> syn::Result<Self> {
|
||||
let pound_token = input.parse()?;
|
||||
let style = AttrStyle::Outer;
|
||||
Self::parse_starting_with_brackets(pound_token, style, input)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Errors {
|
||||
error: Option<Error>,
|
||||
finished: bool,
|
||||
}
|
||||
|
||||
impl Drop for Errors {
|
||||
fn drop(&mut self) {
|
||||
if !std::thread::panicking() {
|
||||
assert!(self.finished, "didn't run finish");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Errors {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
error: None,
|
||||
finished: false,
|
||||
}
|
||||
}
|
||||
pub(crate) fn push(&mut self, e: Error) -> &mut Self {
|
||||
match self.error {
|
||||
Some(ref mut old) => old.combine(e),
|
||||
None => self.error = Some(e),
|
||||
}
|
||||
self
|
||||
}
|
||||
pub(crate) fn push_result(&mut self, e: syn::Result<()>) -> &mut Self {
|
||||
self.ok(e);
|
||||
self
|
||||
}
|
||||
pub(crate) fn error(
|
||||
&mut self,
|
||||
tokens: impl ToTokens,
|
||||
message: impl std::fmt::Display,
|
||||
) -> &mut Self {
|
||||
self.push(Error::new_spanned(tokens, message));
|
||||
self
|
||||
}
|
||||
pub(crate) fn ok<T>(&mut self, v: syn::Result<T>) -> Option<T> {
|
||||
match v {
|
||||
Ok(v) => Some(v),
|
||||
Err(e) => {
|
||||
self.push(e);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
pub(crate) fn unwrap_or_else<T>(
|
||||
&mut self,
|
||||
v: syn::Result<T>,
|
||||
fallback: impl FnOnce() -> T,
|
||||
) -> T {
|
||||
match v {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
self.push(e);
|
||||
fallback()
|
||||
}
|
||||
}
|
||||
}
|
||||
pub(crate) fn unwrap_or<T>(&mut self, v: syn::Result<T>, fallback: T) -> T {
|
||||
self.unwrap_or_else(v, || fallback)
|
||||
}
|
||||
pub(crate) fn unwrap_or_default<T: Default>(&mut self, v: syn::Result<T>) -> T {
|
||||
self.unwrap_or_else(v, T::default)
|
||||
}
|
||||
pub(crate) fn finish(&mut self) -> syn::Result<()> {
|
||||
self.finished = true;
|
||||
match self.error.take() {
|
||||
Some(e) => Err(e),
|
||||
None => Ok(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Errors {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_extra_traits_for_options {
|
||||
(
|
||||
#[no_ident_fragment]
|
||||
$enum_vis:vis enum $option_enum_name:ident {
|
||||
$($Variant:ident($key:ident),)*
|
||||
}
|
||||
) => {
|
||||
impl Copy for $option_enum_name {}
|
||||
};
|
||||
(
|
||||
$enum_vis:vis enum $option_enum_name:ident {
|
||||
$($Variant:ident($key:ident),)*
|
||||
}
|
||||
) => {
|
||||
impl Copy for $option_enum_name {}
|
||||
|
||||
impl quote::IdentFragment for $option_enum_name {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
let _ = f;
|
||||
match *self {
|
||||
$(Self::$Variant(ref v) => quote::IdentFragment::fmt(&v.0, f),)*
|
||||
}
|
||||
}
|
||||
|
||||
fn span(&self) -> Option<proc_macro2::Span> {
|
||||
match *self {
|
||||
$(Self::$Variant(ref v) => quote::IdentFragment::span(&v.0),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl $option_enum_name {
|
||||
#[allow(dead_code)]
|
||||
$enum_vis fn span(&self) -> proc_macro2::Span {
|
||||
quote::IdentFragment::span(self).unwrap()
|
||||
}
|
||||
}
|
||||
};
|
||||
(
|
||||
$(#[no_ident_fragment])?
|
||||
$enum_vis:vis enum $option_enum_name:ident {
|
||||
$($Variant:ident($key:ident $(, $value:ty)?),)*
|
||||
}
|
||||
) => {};
|
||||
}
|
||||
|
||||
pub(crate) use impl_extra_traits_for_options;
|
||||
|
||||
macro_rules! options {
|
||||
(
|
||||
#[options = $options_name:ident]
|
||||
$(#[$($enum_meta:tt)*])*
|
||||
$enum_vis:vis enum $option_enum_name:ident {
|
||||
$($Variant:ident($key:ident $(, $value:ty)?),)*
|
||||
}
|
||||
) => {
|
||||
crate::options! {
|
||||
$(#[$($enum_meta)*])*
|
||||
$enum_vis enum $option_enum_name {
|
||||
$($Variant($key $(, $value)?),)*
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
$enum_vis struct $options_name {
|
||||
$($enum_vis $key: Option<(crate::kw::$key, $(syn::token::Paren, $value)?)>,)*
|
||||
}
|
||||
|
||||
crate::fold::impl_fold! {
|
||||
struct $options_name<> {
|
||||
$($key: Option<(crate::kw::$key, $(syn::token::Paren, $value)?)>,)*
|
||||
}
|
||||
}
|
||||
|
||||
impl syn::parse::Parse for $options_name {
|
||||
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
|
||||
#![allow(unused_mut, unused_variables, unreachable_code)]
|
||||
let mut retval = Self::default();
|
||||
while !input.is_empty() {
|
||||
let old_input = input.fork();
|
||||
match input.parse::<$option_enum_name>()? {
|
||||
$($option_enum_name::$Variant(v) => {
|
||||
if retval.$key.replace(v).is_some() {
|
||||
return Err(old_input.error(concat!("duplicate ", stringify!($key), " option")));
|
||||
}
|
||||
})*
|
||||
}
|
||||
if input.is_empty() {
|
||||
break;
|
||||
}
|
||||
input.parse::<syn::Token![,]>()?;
|
||||
}
|
||||
Ok(retval)
|
||||
}
|
||||
}
|
||||
|
||||
impl quote::ToTokens for $options_name {
|
||||
#[allow(unused_mut, unused_variables, unused_assignments)]
|
||||
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
|
||||
let mut separator: Option<syn::Token![,]> = None;
|
||||
$(if let Some(v) = &self.$key {
|
||||
separator.to_tokens(tokens);
|
||||
separator = Some(Default::default());
|
||||
v.0.to_tokens(tokens);
|
||||
$(v.1.surround(tokens, |tokens| <$value as quote::ToTokens>::to_tokens(&v.2, tokens));)?
|
||||
})*
|
||||
}
|
||||
}
|
||||
};
|
||||
(
|
||||
$(#[$($enum_meta:tt)*])*
|
||||
$enum_vis:vis enum $option_enum_name:ident {
|
||||
$($Variant:ident($key:ident $(, $value:ty)?),)*
|
||||
}
|
||||
) => {
|
||||
#[derive(Clone, Debug)]
|
||||
$enum_vis enum $option_enum_name {
|
||||
$($Variant((crate::kw::$key, $(syn::token::Paren, $value)?)),)*
|
||||
}
|
||||
|
||||
crate::impl_extra_traits_for_options! {
|
||||
$(#[$($enum_meta)*])*
|
||||
$enum_vis enum $option_enum_name {
|
||||
$($Variant($key $(, $value)?),)*
|
||||
}
|
||||
}
|
||||
|
||||
crate::fold::impl_fold! {
|
||||
enum $option_enum_name<> {
|
||||
$($Variant((crate::kw::$key, $(syn::token::Paren, $value)?)),)*
|
||||
}
|
||||
}
|
||||
|
||||
impl syn::parse::Parse for $option_enum_name {
|
||||
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
|
||||
let lookahead = input.lookahead1();
|
||||
$(
|
||||
if lookahead.peek(crate::kw::$key) {
|
||||
#[allow(unused_variables)]
|
||||
let paren_content: syn::parse::ParseBuffer;
|
||||
return Ok($option_enum_name::$Variant((
|
||||
input.parse()?,
|
||||
$(
|
||||
syn::parenthesized!(paren_content in input),
|
||||
paren_content.parse::<$value>()?,
|
||||
)?
|
||||
)));
|
||||
}
|
||||
)*
|
||||
Err(lookahead.error())
|
||||
}
|
||||
}
|
||||
|
||||
impl quote::ToTokens for $option_enum_name {
|
||||
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
|
||||
let _ = tokens;
|
||||
match *self {
|
||||
$($option_enum_name::$Variant(ref v) => {
|
||||
v.0.to_tokens(tokens);
|
||||
$(
|
||||
let value: &$value = &v.2;
|
||||
v.1.surround(tokens, |tokens| value.to_tokens(tokens));
|
||||
)?
|
||||
})*
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub(crate) use options;
|
||||
|
||||
pub(crate) fn outline_generated(contents: TokenStream, prefix: &str) -> TokenStream {
|
||||
let out_dir = env!("OUT_DIR");
|
||||
let mut file = tempfile::Builder::new()
|
||||
.prefix(prefix)
|
||||
.rand_bytes(6)
|
||||
.suffix(".tmp.rs")
|
||||
.tempfile_in(out_dir)
|
||||
.unwrap();
|
||||
let contents = prettyplease::unparse(&parse_quote! { #contents });
|
||||
let hash = <sha2::Sha256 as sha2::Digest>::digest(&contents);
|
||||
let hash = base16ct::HexDisplay(&hash[..5]);
|
||||
file.write_all(contents.as_bytes()).unwrap();
|
||||
let dest_file = std::path::Path::new(out_dir).join(format!("{prefix}{hash:x}.rs"));
|
||||
// don't write if it already exists so cargo doesn't try to recompile constantly.
|
||||
match file.persist_noclobber(&dest_file) {
|
||||
Err(e) if e.error.kind() == ErrorKind::AlreadyExists => {}
|
||||
e => {
|
||||
e.unwrap();
|
||||
}
|
||||
}
|
||||
eprintln!("generated {}", dest_file.display());
|
||||
let dest_file = dest_file.to_str().unwrap();
|
||||
|
||||
quote! {
|
||||
include!(#dest_file);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn module(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
|
||||
let options = syn::parse2::<module::ConfigOptions>(attr)?;
|
||||
let options = HdlAttr::from(options);
|
||||
let func = syn::parse2::<module::ModuleFn>(quote! { #options #item })?;
|
||||
let mut contents = func.generate();
|
||||
if options.body.outline_generated.is_some() {
|
||||
contents = outline_generated(contents, "module-");
|
||||
}
|
||||
Ok(contents)
|
||||
}
|
||||
|
||||
pub fn value_derive(item: TokenStream) -> syn::Result<TokenStream> {
|
||||
let item = syn::parse2::<Item>(item)?;
|
||||
match item {
|
||||
Item::Enum(item) => value_derive_enum::value_derive_enum(item),
|
||||
Item::Struct(item) => value_derive_struct::value_derive_struct(item),
|
||||
_ => Err(syn::Error::new(
|
||||
Span::call_site(),
|
||||
"derive(Value) can only be used on structs or enums",
|
||||
)),
|
||||
}
|
||||
}
|
282
crates/fayalite-proc-macros-impl/src/module.rs
Normal file
282
crates/fayalite-proc-macros-impl/src/module.rs
Normal file
|
@ -0,0 +1,282 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
is_hdl_attr,
|
||||
module::transform_body::{HdlLet, HdlLetKindIO},
|
||||
options, Errors, HdlAttr,
|
||||
};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{format_ident, quote, quote_spanned, ToTokens};
|
||||
use syn::{
|
||||
parse::{Parse, ParseStream},
|
||||
parse_quote,
|
||||
visit::{visit_pat, Visit},
|
||||
Attribute, Block, Error, FnArg, Ident, ItemFn, ItemStruct, ReturnType, Signature, Visibility,
|
||||
};
|
||||
|
||||
mod transform_body;
|
||||
|
||||
options! {
|
||||
#[options = ConfigOptions]
|
||||
#[no_ident_fragment]
|
||||
pub(crate) enum ConfigOption {
|
||||
OutlineGenerated(outline_generated),
|
||||
Extern(extern_),
|
||||
}
|
||||
}
|
||||
|
||||
options! {
|
||||
pub(crate) enum ModuleIOKind {
|
||||
Input(input),
|
||||
Output(output),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn check_name_conflicts_with_module_builder(name: &Ident) -> syn::Result<()> {
|
||||
if name == "m" {
|
||||
Err(Error::new_spanned(
|
||||
name,
|
||||
"name conflicts with implicit `m: &mut ModuleBuilder<_>`",
|
||||
))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct CheckNameConflictsWithModuleBuilderVisitor<'a> {
|
||||
pub(crate) errors: &'a mut Errors,
|
||||
}
|
||||
|
||||
impl Visit<'_> for CheckNameConflictsWithModuleBuilderVisitor<'_> {
|
||||
// TODO: change this to only check for identifiers defining new variables
|
||||
fn visit_ident(&mut self, node: &Ident) {
|
||||
self.errors
|
||||
.push_result(check_name_conflicts_with_module_builder(node));
|
||||
}
|
||||
}
|
||||
|
||||
fn retain_struct_attrs<F: FnMut(&Attribute) -> bool>(item: &mut ItemStruct, mut f: F) {
|
||||
item.attrs.retain(&mut f);
|
||||
for field in item.fields.iter_mut() {
|
||||
field.attrs.retain(&mut f);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) type ModuleIO = HdlLet<HdlLetKindIO>;
|
||||
|
||||
pub(crate) struct ModuleFn {
|
||||
attrs: Vec<Attribute>,
|
||||
config_options: HdlAttr<ConfigOptions>,
|
||||
module_kind: ModuleKind,
|
||||
vis: Visibility,
|
||||
sig: Signature,
|
||||
block: Box<Block>,
|
||||
io: Vec<ModuleIO>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
|
||||
pub(crate) enum ModuleKind {
|
||||
Extern,
|
||||
Normal,
|
||||
}
|
||||
|
||||
impl Parse for ModuleFn {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let ItemFn {
|
||||
mut attrs,
|
||||
vis,
|
||||
mut sig,
|
||||
block,
|
||||
} = input.parse()?;
|
||||
let Signature {
|
||||
ref constness,
|
||||
ref asyncness,
|
||||
ref unsafety,
|
||||
ref abi,
|
||||
fn_token: _,
|
||||
ident: _,
|
||||
ref generics,
|
||||
paren_token: _,
|
||||
ref mut inputs,
|
||||
ref variadic,
|
||||
ref output,
|
||||
} = sig;
|
||||
let mut errors = Errors::new();
|
||||
let config_options = errors
|
||||
.unwrap_or_default(HdlAttr::parse_and_take_attr(&mut attrs))
|
||||
.unwrap_or_default();
|
||||
let ConfigOptions {
|
||||
outline_generated: _,
|
||||
extern_,
|
||||
} = config_options.body;
|
||||
let module_kind = match extern_ {
|
||||
Some(_) => ModuleKind::Extern,
|
||||
None => ModuleKind::Normal,
|
||||
};
|
||||
for fn_arg in inputs {
|
||||
match fn_arg {
|
||||
FnArg::Receiver(_) => {
|
||||
errors.push(syn::Error::new_spanned(fn_arg, "self not allowed here"));
|
||||
}
|
||||
FnArg::Typed(fn_arg) => {
|
||||
visit_pat(
|
||||
&mut CheckNameConflictsWithModuleBuilderVisitor {
|
||||
errors: &mut errors,
|
||||
},
|
||||
&fn_arg.pat,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(constness) = constness {
|
||||
errors.push(syn::Error::new_spanned(constness, "const not allowed here"));
|
||||
}
|
||||
if let Some(asyncness) = asyncness {
|
||||
errors.push(syn::Error::new_spanned(asyncness, "async not allowed here"));
|
||||
}
|
||||
if let Some(unsafety) = unsafety {
|
||||
errors.push(syn::Error::new_spanned(unsafety, "unsafe not allowed here"));
|
||||
}
|
||||
if let Some(abi) = abi {
|
||||
errors.push(syn::Error::new_spanned(abi, "extern not allowed here"));
|
||||
}
|
||||
if !generics.params.is_empty() {
|
||||
errors.push(syn::Error::new_spanned(
|
||||
&generics.params,
|
||||
"generics are not supported yet",
|
||||
));
|
||||
}
|
||||
if let Some(variadic) = variadic {
|
||||
errors.push(syn::Error::new_spanned(variadic, "... not allowed here"));
|
||||
}
|
||||
if !matches!(output, ReturnType::Default) {
|
||||
errors.push(syn::Error::new_spanned(
|
||||
output,
|
||||
"return type not allowed here",
|
||||
));
|
||||
}
|
||||
let body_results = errors.ok(transform_body::transform_body(module_kind, block));
|
||||
errors.finish()?;
|
||||
let (block, io) = body_results.unwrap();
|
||||
Ok(Self {
|
||||
attrs,
|
||||
config_options,
|
||||
module_kind,
|
||||
vis,
|
||||
sig,
|
||||
block,
|
||||
io,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ModuleFn {
|
||||
pub(crate) fn generate(self) -> TokenStream {
|
||||
let Self {
|
||||
attrs,
|
||||
config_options,
|
||||
module_kind,
|
||||
vis,
|
||||
sig,
|
||||
block,
|
||||
io,
|
||||
} = self;
|
||||
let ConfigOptions {
|
||||
outline_generated: _,
|
||||
extern_: _,
|
||||
} = config_options.body;
|
||||
let mut outer_sig = sig.clone();
|
||||
let mut body_sig = sig;
|
||||
let param_names =
|
||||
Vec::from_iter(outer_sig.inputs.iter_mut().enumerate().map(|(index, arg)| {
|
||||
let FnArg::Typed(arg) = arg else {
|
||||
unreachable!("already checked");
|
||||
};
|
||||
let name = if let syn::Pat::Ident(pat) = &*arg.pat {
|
||||
pat.ident.clone()
|
||||
} else {
|
||||
format_ident!("__param{}", index)
|
||||
};
|
||||
*arg.pat = syn::Pat::Ident(syn::PatIdent {
|
||||
attrs: vec![],
|
||||
by_ref: None,
|
||||
mutability: None,
|
||||
ident: name.clone(),
|
||||
subpat: None,
|
||||
});
|
||||
name
|
||||
}));
|
||||
let module_kind_ty = match module_kind {
|
||||
ModuleKind::Extern => quote! { ::fayalite::module::ExternModule },
|
||||
ModuleKind::Normal => quote! { ::fayalite::module::NormalModule },
|
||||
};
|
||||
let fn_name = &outer_sig.ident;
|
||||
body_sig.ident = parse_quote! {__body};
|
||||
body_sig.inputs.insert(
|
||||
0,
|
||||
parse_quote! {m: &mut ::fayalite::module::ModuleBuilder<#fn_name, #module_kind_ty>},
|
||||
);
|
||||
let body_fn = ItemFn {
|
||||
attrs: vec![],
|
||||
vis: Visibility::Inherited,
|
||||
sig: body_sig,
|
||||
block,
|
||||
};
|
||||
outer_sig.output =
|
||||
parse_quote! {-> ::fayalite::intern::Interned<::fayalite::module::Module<#fn_name>>};
|
||||
let io_flips = io
|
||||
.iter()
|
||||
.map(|io| match io.kind.kind {
|
||||
ModuleIOKind::Input((input,)) => quote_spanned! {input.span=>
|
||||
#[hdl(flip)]
|
||||
},
|
||||
ModuleIOKind::Output(_) => quote! {},
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let io_types = io.iter().map(|io| &io.kind.ty).collect::<Vec<_>>();
|
||||
let io_names = io.iter().map(|io| &io.name).collect::<Vec<_>>();
|
||||
let fn_name_str = fn_name.to_string();
|
||||
let block = parse_quote! {{
|
||||
#body_fn
|
||||
::fayalite::module::ModuleBuilder::run(#fn_name_str, |m| __body(m, #(#param_names,)*))
|
||||
}};
|
||||
let fixed_type = io.iter().all(|io| io.kind.ty_expr.is_none());
|
||||
let struct_options = if fixed_type {
|
||||
quote! { #[hdl(fixed_type)] }
|
||||
} else {
|
||||
quote! {}
|
||||
};
|
||||
let the_struct: ItemStruct = parse_quote! {
|
||||
#[derive(::fayalite::__std::clone::Clone,
|
||||
::fayalite::__std::hash::Hash,
|
||||
::fayalite::__std::cmp::PartialEq,
|
||||
::fayalite::__std::cmp::Eq,
|
||||
::fayalite::__std::fmt::Debug)]
|
||||
#[allow(non_camel_case_types)]
|
||||
#struct_options
|
||||
#vis struct #fn_name {
|
||||
#(
|
||||
#io_flips
|
||||
#vis #io_names: #io_types,)*
|
||||
}
|
||||
};
|
||||
let mut struct_without_hdl_attrs = the_struct.clone();
|
||||
let mut struct_without_derives = the_struct;
|
||||
retain_struct_attrs(&mut struct_without_hdl_attrs, |attr| !is_hdl_attr(attr));
|
||||
retain_struct_attrs(&mut struct_without_derives, |attr| {
|
||||
!attr.path().is_ident("derive")
|
||||
});
|
||||
let outer_fn = ItemFn {
|
||||
attrs,
|
||||
vis,
|
||||
sig: outer_sig,
|
||||
block,
|
||||
};
|
||||
let mut retval = outer_fn.into_token_stream();
|
||||
struct_without_hdl_attrs.to_tokens(&mut retval);
|
||||
retval.extend(
|
||||
crate::value_derive_struct::value_derive_struct(struct_without_derives).unwrap(),
|
||||
);
|
||||
retval
|
||||
}
|
||||
}
|
1566
crates/fayalite-proc-macros-impl/src/module/transform_body.rs
Normal file
1566
crates/fayalite-proc-macros-impl/src/module/transform_body.rs
Normal file
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,530 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{module::transform_body::Visitor, options, Errors, HdlAttr};
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{format_ident, quote_spanned, ToTokens, TokenStreamExt};
|
||||
use syn::{
|
||||
parse::Nothing,
|
||||
parse_quote, parse_quote_spanned,
|
||||
punctuated::{Pair, Punctuated},
|
||||
spanned::Spanned,
|
||||
token::{Brace, Paren},
|
||||
Attribute, Expr, ExprArray, ExprCall, ExprGroup, ExprPath, ExprStruct, ExprTuple, FieldValue,
|
||||
Ident, Index, Member, Path, PathArguments, PathSegment, Token, TypePath,
|
||||
};
|
||||
|
||||
options! {
|
||||
#[options = AggregateLiteralOptions]
|
||||
#[no_ident_fragment]
|
||||
pub(crate) enum AggregateLiteralOption {
|
||||
Struct(struct_),
|
||||
Enum(enum_),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct StructOrEnumPath {
|
||||
pub(crate) ty: TypePath,
|
||||
pub(crate) variant: Option<(TypePath, Ident)>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub(crate) struct SingleSegmentVariant {
|
||||
pub(crate) name: &'static str,
|
||||
pub(crate) make_type_path: fn(Span, &PathArguments) -> Path,
|
||||
}
|
||||
|
||||
impl StructOrEnumPath {
|
||||
pub(crate) const SINGLE_SEGMENT_VARIANTS: &'static [SingleSegmentVariant] = {
|
||||
fn make_option_type_path(span: Span, arguments: &PathArguments) -> Path {
|
||||
let arguments = if arguments.is_none() {
|
||||
quote_spanned! {span=>
|
||||
<_>
|
||||
}
|
||||
} else {
|
||||
arguments.to_token_stream()
|
||||
};
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::__std::option::Option #arguments
|
||||
}
|
||||
}
|
||||
fn make_result_type_path(span: Span, arguments: &PathArguments) -> Path {
|
||||
let arguments = if arguments.is_none() {
|
||||
quote_spanned! {span=>
|
||||
<_, _>
|
||||
}
|
||||
} else {
|
||||
arguments.to_token_stream()
|
||||
};
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::__std::result::Result #arguments
|
||||
}
|
||||
}
|
||||
&[
|
||||
SingleSegmentVariant {
|
||||
name: "Some",
|
||||
make_type_path: make_option_type_path,
|
||||
},
|
||||
SingleSegmentVariant {
|
||||
name: "None",
|
||||
make_type_path: make_option_type_path,
|
||||
},
|
||||
SingleSegmentVariant {
|
||||
name: "Ok",
|
||||
make_type_path: make_result_type_path,
|
||||
},
|
||||
SingleSegmentVariant {
|
||||
name: "Err",
|
||||
make_type_path: make_result_type_path,
|
||||
},
|
||||
]
|
||||
};
|
||||
pub(crate) fn new(
|
||||
errors: &mut Errors,
|
||||
path: TypePath,
|
||||
options: &AggregateLiteralOptions,
|
||||
) -> Result<Self, ()> {
|
||||
let Path {
|
||||
leading_colon,
|
||||
segments,
|
||||
} = &path.path;
|
||||
let qself_position = path.qself.as_ref().map(|qself| qself.position).unwrap_or(0);
|
||||
let variant_name = if qself_position < segments.len() {
|
||||
Some(segments.last().unwrap().ident.clone())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let enum_type = 'guess_enum_type: {
|
||||
if options.enum_.is_some() {
|
||||
if let Some((struct_,)) = options.struct_ {
|
||||
errors.error(
|
||||
struct_,
|
||||
"can't specify both #[hdl(enum)] and #[hdl(struct)]",
|
||||
);
|
||||
}
|
||||
break 'guess_enum_type Some(None);
|
||||
}
|
||||
if options.struct_.is_some() {
|
||||
break 'guess_enum_type None;
|
||||
}
|
||||
if path.qself.is_none() && leading_colon.is_none() && segments.len() == 1 {
|
||||
let PathSegment { ident, arguments } = &segments[0];
|
||||
for &SingleSegmentVariant {
|
||||
name,
|
||||
make_type_path,
|
||||
} in Self::SINGLE_SEGMENT_VARIANTS
|
||||
{
|
||||
if ident == name {
|
||||
break 'guess_enum_type Some(Some(TypePath {
|
||||
qself: None,
|
||||
path: make_type_path(ident.span(), arguments),
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
if segments.len() == qself_position + 2
|
||||
&& segments[qself_position + 1].arguments.is_none()
|
||||
&& (path.qself.is_some()
|
||||
|| segments[qself_position].ident.to_string().as_bytes()[0]
|
||||
.is_ascii_uppercase())
|
||||
{
|
||||
let mut ty = path.clone();
|
||||
ty.path.segments.pop();
|
||||
ty.path.segments.pop_punct();
|
||||
break 'guess_enum_type Some(Some(ty));
|
||||
}
|
||||
None
|
||||
};
|
||||
if let Some(enum_type) = enum_type {
|
||||
let ty = if let Some(enum_type) = enum_type {
|
||||
enum_type
|
||||
} else {
|
||||
if qself_position >= segments.len() {
|
||||
errors.error(path, "#[hdl]: can't figure out enum's type");
|
||||
return Err(());
|
||||
}
|
||||
let mut ty = path.clone();
|
||||
ty.path.segments.pop();
|
||||
ty.path.segments.pop_punct();
|
||||
ty
|
||||
};
|
||||
let Some(variant_name) = variant_name else {
|
||||
errors.error(path, "#[hdl]: can't figure out enum's variant name");
|
||||
return Err(());
|
||||
};
|
||||
Ok(Self {
|
||||
ty,
|
||||
variant: Some((path, variant_name)),
|
||||
})
|
||||
} else {
|
||||
Ok(Self {
|
||||
ty: path,
|
||||
variant: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) enum BraceOrParen {
|
||||
Brace(Brace),
|
||||
Paren(Paren),
|
||||
}
|
||||
|
||||
impl BraceOrParen {
|
||||
pub(crate) fn surround(self, tokens: &mut TokenStream, f: impl FnOnce(&mut TokenStream)) {
|
||||
match self {
|
||||
BraceOrParen::Brace(v) => v.surround(tokens, f),
|
||||
BraceOrParen::Paren(v) => v.surround(tokens, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct StructOrEnumLiteralField {
|
||||
pub(crate) attrs: Vec<Attribute>,
|
||||
pub(crate) member: Member,
|
||||
pub(crate) colon_token: Option<Token![:]>,
|
||||
pub(crate) expr: Expr,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct StructOrEnumLiteral {
|
||||
pub(crate) attrs: Vec<Attribute>,
|
||||
pub(crate) path: TypePath,
|
||||
pub(crate) brace_or_paren: BraceOrParen,
|
||||
pub(crate) fields: Punctuated<StructOrEnumLiteralField, Token![,]>,
|
||||
pub(crate) dot2_token: Option<Token![..]>,
|
||||
pub(crate) rest: Option<Box<Expr>>,
|
||||
}
|
||||
|
||||
impl StructOrEnumLiteral {
|
||||
pub(crate) fn map_field_exprs(self, mut f: impl FnMut(Expr) -> Expr) -> Self {
|
||||
self.map_fields(|mut field| {
|
||||
field.expr = f(field.expr);
|
||||
field
|
||||
})
|
||||
}
|
||||
pub(crate) fn map_fields(
|
||||
self,
|
||||
mut f: impl FnMut(StructOrEnumLiteralField) -> StructOrEnumLiteralField,
|
||||
) -> Self {
|
||||
let Self {
|
||||
attrs,
|
||||
path,
|
||||
brace_or_paren,
|
||||
fields,
|
||||
dot2_token,
|
||||
rest,
|
||||
} = self;
|
||||
let fields = Punctuated::from_iter(fields.into_pairs().map(|p| {
|
||||
let (field, comma) = p.into_tuple();
|
||||
Pair::new(f(field), comma)
|
||||
}));
|
||||
Self {
|
||||
attrs,
|
||||
path,
|
||||
brace_or_paren,
|
||||
fields,
|
||||
dot2_token,
|
||||
rest,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ExprStruct> for StructOrEnumLiteral {
|
||||
fn from(value: ExprStruct) -> Self {
|
||||
let ExprStruct {
|
||||
attrs,
|
||||
qself,
|
||||
path,
|
||||
brace_token,
|
||||
fields,
|
||||
dot2_token,
|
||||
rest,
|
||||
} = value;
|
||||
Self {
|
||||
attrs,
|
||||
path: TypePath { qself, path },
|
||||
brace_or_paren: BraceOrParen::Brace(brace_token),
|
||||
fields: Punctuated::from_iter(fields.into_pairs().map(|v| {
|
||||
let (
|
||||
FieldValue {
|
||||
attrs,
|
||||
member,
|
||||
colon_token,
|
||||
expr,
|
||||
},
|
||||
comma,
|
||||
) = v.into_tuple();
|
||||
Pair::new(
|
||||
StructOrEnumLiteralField {
|
||||
attrs,
|
||||
member,
|
||||
colon_token,
|
||||
expr,
|
||||
},
|
||||
comma,
|
||||
)
|
||||
})),
|
||||
dot2_token,
|
||||
rest,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn expr_to_member(expr: &Expr) -> Option<Member> {
|
||||
syn::parse2(expr.to_token_stream()).ok()
|
||||
}
|
||||
|
||||
impl ToTokens for StructOrEnumLiteral {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
attrs,
|
||||
path,
|
||||
brace_or_paren,
|
||||
fields,
|
||||
dot2_token,
|
||||
rest,
|
||||
} = self;
|
||||
tokens.append_all(attrs);
|
||||
path.to_tokens(tokens);
|
||||
brace_or_paren.surround(tokens, |tokens| {
|
||||
match brace_or_paren {
|
||||
BraceOrParen::Brace(_) => {
|
||||
for (
|
||||
StructOrEnumLiteralField {
|
||||
attrs,
|
||||
member,
|
||||
mut colon_token,
|
||||
expr,
|
||||
},
|
||||
comma,
|
||||
) in fields.pairs().map(|v| v.into_tuple())
|
||||
{
|
||||
tokens.append_all(attrs);
|
||||
if Some(member) != expr_to_member(expr).as_ref() {
|
||||
colon_token = Some(<Token![:]>::default());
|
||||
}
|
||||
member.to_tokens(tokens);
|
||||
colon_token.to_tokens(tokens);
|
||||
expr.to_tokens(tokens);
|
||||
comma.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
BraceOrParen::Paren(_) => {
|
||||
for (
|
||||
StructOrEnumLiteralField {
|
||||
attrs,
|
||||
member: _,
|
||||
colon_token: _,
|
||||
expr,
|
||||
},
|
||||
comma,
|
||||
) in fields.pairs().map(|v| v.into_tuple())
|
||||
{
|
||||
tokens.append_all(attrs);
|
||||
expr.to_tokens(tokens);
|
||||
comma.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(rest) = rest {
|
||||
dot2_token.unwrap_or_default().to_tokens(tokens);
|
||||
rest.to_tokens(tokens);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitor {
|
||||
pub(crate) fn process_hdl_array(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<Nothing>,
|
||||
mut expr_array: ExprArray,
|
||||
) -> Expr {
|
||||
self.require_normal_module(hdl_attr);
|
||||
for elem in &mut expr_array.elems {
|
||||
*elem = parse_quote_spanned! {elem.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&(#elem))
|
||||
};
|
||||
}
|
||||
parse_quote! {::fayalite::expr::ToExpr::to_expr(&#expr_array)}
|
||||
}
|
||||
pub(crate) fn process_struct_enum(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<AggregateLiteralOptions>,
|
||||
mut literal: StructOrEnumLiteral,
|
||||
) -> Expr {
|
||||
let span = hdl_attr.hdl.span;
|
||||
if let Some(rest) = literal.rest.take() {
|
||||
self.errors
|
||||
.error(rest, "#[hdl] struct functional update syntax not supported");
|
||||
}
|
||||
let mut next_var = 0usize;
|
||||
let mut new_var = || -> Ident {
|
||||
let retval = format_ident!("__v{}", next_var, span = span);
|
||||
next_var += 1;
|
||||
retval
|
||||
};
|
||||
let infallible_var = new_var();
|
||||
let retval_var = new_var();
|
||||
let mut lets = vec![];
|
||||
let mut build_steps = vec![];
|
||||
let literal = literal.map_field_exprs(|expr| {
|
||||
let field_var = new_var();
|
||||
lets.push(quote_spanned! {span=>
|
||||
let #field_var = ::fayalite::expr::ToExpr::to_expr(&#expr);
|
||||
});
|
||||
parse_quote! { #field_var }
|
||||
});
|
||||
let Ok(StructOrEnumPath { ty, variant }) =
|
||||
StructOrEnumPath::new(&mut self.errors, literal.path.clone(), &hdl_attr.body)
|
||||
else {
|
||||
return parse_quote_spanned! {span=>
|
||||
{}
|
||||
};
|
||||
};
|
||||
for StructOrEnumLiteralField {
|
||||
attrs: _,
|
||||
member,
|
||||
colon_token: _,
|
||||
expr,
|
||||
} in literal.fields.iter()
|
||||
{
|
||||
let field_fn = format_ident!("field_{}", member);
|
||||
build_steps.push(quote_spanned! {span=>
|
||||
let #retval_var = #retval_var.#field_fn(#expr);
|
||||
});
|
||||
}
|
||||
let check_literal = literal.map_field_exprs(|expr| {
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::expr::value_from_expr_type(#expr, #infallible_var)
|
||||
}
|
||||
});
|
||||
let make_expr_fn = if let Some((_variant_path, variant_ident)) = &variant {
|
||||
let variant_fn = format_ident!("variant_{}", variant_ident);
|
||||
build_steps.push(quote_spanned! {span=>
|
||||
let #retval_var = #retval_var.#variant_fn();
|
||||
});
|
||||
quote_spanned! {span=>
|
||||
::fayalite::expr::make_enum_expr
|
||||
}
|
||||
} else {
|
||||
build_steps.push(quote_spanned! {span=>
|
||||
let #retval_var = #retval_var.build();
|
||||
});
|
||||
quote_spanned! {span=>
|
||||
::fayalite::expr::make_bundle_expr
|
||||
}
|
||||
};
|
||||
let variant_or_type =
|
||||
variant.map_or_else(|| ty.clone(), |(variant_path, _variant_ident)| variant_path);
|
||||
parse_quote_spanned! {span=>
|
||||
{
|
||||
#(#lets)*
|
||||
#make_expr_fn::<#ty>(|#infallible_var| {
|
||||
let #retval_var = #check_literal;
|
||||
match #retval_var {
|
||||
#variant_or_type { .. } => #retval_var,
|
||||
#[allow(unreachable_patterns)]
|
||||
_ => match #infallible_var {},
|
||||
}
|
||||
}, |#retval_var| {
|
||||
#(#build_steps)*
|
||||
#retval_var
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
pub(crate) fn process_hdl_struct(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<AggregateLiteralOptions>,
|
||||
expr_struct: ExprStruct,
|
||||
) -> Expr {
|
||||
self.require_normal_module(&hdl_attr);
|
||||
self.process_struct_enum(hdl_attr, expr_struct.into())
|
||||
}
|
||||
pub(crate) fn process_hdl_tuple(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<Nothing>,
|
||||
expr_tuple: ExprTuple,
|
||||
) -> Expr {
|
||||
self.require_normal_module(hdl_attr);
|
||||
parse_quote_spanned! {expr_tuple.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&#expr_tuple)
|
||||
}
|
||||
}
|
||||
pub(crate) fn process_hdl_path(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<Nothing>,
|
||||
expr_path: ExprPath,
|
||||
) -> Expr {
|
||||
self.require_normal_module(hdl_attr);
|
||||
parse_quote_spanned! {expr_path.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&#expr_path)
|
||||
}
|
||||
}
|
||||
pub(crate) fn process_hdl_call(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<AggregateLiteralOptions>,
|
||||
expr_call: ExprCall,
|
||||
) -> Expr {
|
||||
self.require_normal_module(&hdl_attr);
|
||||
let ExprCall {
|
||||
attrs: mut literal_attrs,
|
||||
func,
|
||||
paren_token,
|
||||
args,
|
||||
} = expr_call;
|
||||
let mut path_expr = *func;
|
||||
let path = loop {
|
||||
break match path_expr {
|
||||
Expr::Group(ExprGroup {
|
||||
attrs,
|
||||
group_token: _,
|
||||
expr,
|
||||
}) => {
|
||||
literal_attrs.extend(attrs);
|
||||
path_expr = *expr;
|
||||
continue;
|
||||
}
|
||||
Expr::Path(ExprPath { attrs, qself, path }) => {
|
||||
literal_attrs.extend(attrs);
|
||||
TypePath { qself, path }
|
||||
}
|
||||
_ => {
|
||||
self.errors.error(&path_expr, "missing tuple struct's name");
|
||||
return parse_quote_spanned! {path_expr.span()=>
|
||||
{}
|
||||
};
|
||||
}
|
||||
};
|
||||
};
|
||||
let fields = Punctuated::from_iter(args.into_pairs().enumerate().map(|(index, p)| {
|
||||
let (expr, comma) = p.into_tuple();
|
||||
let mut index = Index::from(index);
|
||||
index.span = hdl_attr.hdl.span;
|
||||
Pair::new(
|
||||
StructOrEnumLiteralField {
|
||||
attrs: vec![],
|
||||
member: Member::Unnamed(index),
|
||||
colon_token: None,
|
||||
expr,
|
||||
},
|
||||
comma,
|
||||
)
|
||||
}));
|
||||
self.process_struct_enum(
|
||||
hdl_attr,
|
||||
StructOrEnumLiteral {
|
||||
attrs: literal_attrs,
|
||||
path,
|
||||
brace_or_paren: BraceOrParen::Paren(paren_token),
|
||||
fields,
|
||||
dot2_token: None,
|
||||
rest: None,
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,625 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
fold::impl_fold,
|
||||
module::transform_body::{
|
||||
expand_aggregate_literals::{AggregateLiteralOptions, StructOrEnumPath},
|
||||
with_debug_clone_and_fold, Visitor,
|
||||
},
|
||||
Errors, HdlAttr,
|
||||
};
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
use syn::{
|
||||
fold::{fold_arm, fold_expr_match, fold_pat, Fold},
|
||||
parse::Nothing,
|
||||
parse_quote_spanned,
|
||||
punctuated::{Pair, Punctuated},
|
||||
spanned::Spanned,
|
||||
token::{Brace, Paren},
|
||||
Arm, Attribute, Expr, ExprMatch, FieldPat, Ident, Index, Member, Pat, PatIdent, PatOr,
|
||||
PatParen, PatPath, PatRest, PatStruct, PatTupleStruct, PatWild, Path, Token, TypePath,
|
||||
};
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatBinding<> {
|
||||
ident: Ident,
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for MatchPatBinding {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self { ident } = self;
|
||||
ident.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatParen<P> {
|
||||
paren_token: Paren,
|
||||
pat: Box<P>,
|
||||
}
|
||||
}
|
||||
|
||||
impl<P: ToTokens> ToTokens for MatchPatParen<P> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self { paren_token, pat } = self;
|
||||
paren_token.surround(tokens, |tokens| pat.to_tokens(tokens));
|
||||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatOr<P> {
|
||||
leading_vert: Option<Token![|]>,
|
||||
cases: Punctuated<P, Token![|]>,
|
||||
}
|
||||
}
|
||||
|
||||
impl<P: ToTokens> ToTokens for MatchPatOr<P> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
leading_vert,
|
||||
cases,
|
||||
} = self;
|
||||
leading_vert.to_tokens(tokens);
|
||||
cases.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatWild<> {
|
||||
underscore_token: Token![_],
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for MatchPatWild {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self { underscore_token } = self;
|
||||
underscore_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatStructField<> {
|
||||
member: Member,
|
||||
colon_token: Option<Token![:]>,
|
||||
pat: MatchPatSimple,
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for MatchPatStructField {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
member,
|
||||
colon_token,
|
||||
pat,
|
||||
} = self;
|
||||
member.to_tokens(tokens);
|
||||
colon_token.to_tokens(tokens);
|
||||
pat.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl MatchPatStructField {
|
||||
fn parse(state: &mut HdlMatchParseState<'_>, field_pat: FieldPat) -> Result<Self, ()> {
|
||||
let FieldPat {
|
||||
attrs: _,
|
||||
member,
|
||||
colon_token,
|
||||
pat,
|
||||
} = field_pat;
|
||||
Ok(Self {
|
||||
member,
|
||||
colon_token,
|
||||
pat: MatchPatSimple::parse(state, *pat)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatStruct<> {
|
||||
resolved_path: Path,
|
||||
brace_token: Brace,
|
||||
fields: Punctuated<MatchPatStructField, Token![,]>,
|
||||
rest: Option<Token![..]>,
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for MatchPatStruct {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
resolved_path,
|
||||
brace_token,
|
||||
fields,
|
||||
rest,
|
||||
} = self;
|
||||
resolved_path.to_tokens(tokens);
|
||||
brace_token.surround(tokens, |tokens| {
|
||||
fields.to_tokens(tokens);
|
||||
rest.to_tokens(tokens);
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum MatchPatSimple {
|
||||
Paren(MatchPatParen<MatchPatSimple>),
|
||||
Or(MatchPatOr<MatchPatSimple>),
|
||||
Binding(MatchPatBinding),
|
||||
Wild(MatchPatWild),
|
||||
}
|
||||
|
||||
impl_fold! {
|
||||
enum MatchPatSimple<> {
|
||||
Paren(MatchPatParen<MatchPatSimple>),
|
||||
Or(MatchPatOr<MatchPatSimple>),
|
||||
Binding(MatchPatBinding),
|
||||
Wild(MatchPatWild),
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for MatchPatSimple {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
Self::Or(v) => v.to_tokens(tokens),
|
||||
Self::Paren(v) => v.to_tokens(tokens),
|
||||
Self::Binding(v) => v.to_tokens(tokens),
|
||||
Self::Wild(v) => v.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn is_pat_ident_a_struct_or_enum_name(ident: &Ident) -> bool {
|
||||
ident
|
||||
.to_string()
|
||||
.starts_with(|ch: char| ch.is_ascii_uppercase())
|
||||
}
|
||||
|
||||
trait ParseMatchPat: Sized {
|
||||
fn simple(v: MatchPatSimple) -> Self;
|
||||
fn or(v: MatchPatOr<Self>) -> Self;
|
||||
fn paren(v: MatchPatParen<Self>) -> Self;
|
||||
fn struct_(
|
||||
state: &mut HdlMatchParseState<'_>,
|
||||
v: MatchPatStruct,
|
||||
struct_error_spanned: &dyn ToTokens,
|
||||
) -> Result<Self, ()>;
|
||||
fn parse(state: &mut HdlMatchParseState<'_>, pat: Pat) -> Result<Self, ()> {
|
||||
match pat {
|
||||
Pat::Ident(PatIdent {
|
||||
attrs: _,
|
||||
by_ref,
|
||||
mutability,
|
||||
ident,
|
||||
subpat,
|
||||
}) => {
|
||||
if let Some(by_ref) = by_ref {
|
||||
state
|
||||
.errors
|
||||
.error(by_ref, "ref not allowed in #[hdl] patterns");
|
||||
}
|
||||
if let Some(mutability) = mutability {
|
||||
state
|
||||
.errors
|
||||
.error(mutability, "mut not allowed in #[hdl] patterns");
|
||||
}
|
||||
if let Some((at_token, _)) = subpat {
|
||||
state
|
||||
.errors
|
||||
.error(at_token, "@ not allowed in #[hdl] patterns");
|
||||
}
|
||||
if is_pat_ident_a_struct_or_enum_name(&ident) {
|
||||
let ident_span = ident.span();
|
||||
let resolved_path = state.resolve_enum_struct_path(TypePath {
|
||||
qself: None,
|
||||
path: ident.clone().into(),
|
||||
})?;
|
||||
Self::struct_(
|
||||
state,
|
||||
MatchPatStruct {
|
||||
resolved_path,
|
||||
brace_token: Brace(ident_span),
|
||||
fields: Punctuated::new(),
|
||||
rest: None,
|
||||
},
|
||||
&ident,
|
||||
)
|
||||
} else {
|
||||
Ok(Self::simple(MatchPatSimple::Binding(MatchPatBinding {
|
||||
ident,
|
||||
})))
|
||||
}
|
||||
}
|
||||
Pat::Or(PatOr {
|
||||
attrs: _,
|
||||
leading_vert,
|
||||
cases,
|
||||
}) => Ok(Self::or(MatchPatOr {
|
||||
leading_vert,
|
||||
cases: cases
|
||||
.into_pairs()
|
||||
.filter_map(|pair| {
|
||||
let (pat, punct) = pair.into_tuple();
|
||||
let pat = Self::parse(state, pat).ok()?;
|
||||
Some(Pair::new(pat, punct))
|
||||
})
|
||||
.collect(),
|
||||
})),
|
||||
Pat::Paren(PatParen {
|
||||
attrs: _,
|
||||
paren_token,
|
||||
pat,
|
||||
}) => Ok(Self::paren(MatchPatParen {
|
||||
paren_token,
|
||||
pat: Box::new(Self::parse(state, *pat)?),
|
||||
})),
|
||||
Pat::Path(PatPath {
|
||||
attrs: _,
|
||||
qself,
|
||||
path,
|
||||
}) => {
|
||||
let path = TypePath { qself, path };
|
||||
let path_span = path.span();
|
||||
let resolved_path = state.resolve_enum_struct_path(path.clone())?;
|
||||
Self::struct_(
|
||||
state,
|
||||
MatchPatStruct {
|
||||
resolved_path,
|
||||
brace_token: Brace(path_span),
|
||||
fields: Punctuated::new(),
|
||||
rest: None,
|
||||
},
|
||||
&path,
|
||||
)
|
||||
}
|
||||
Pat::Struct(PatStruct {
|
||||
attrs: _,
|
||||
qself,
|
||||
path,
|
||||
brace_token,
|
||||
fields,
|
||||
rest,
|
||||
}) => {
|
||||
let fields = fields
|
||||
.into_pairs()
|
||||
.filter_map(|pair| {
|
||||
let (field_pat, punct) = pair.into_tuple();
|
||||
let field_pat = MatchPatStructField::parse(state, field_pat).ok()?;
|
||||
Some(Pair::new(field_pat, punct))
|
||||
})
|
||||
.collect();
|
||||
let path = TypePath { qself, path };
|
||||
let resolved_path = state.resolve_enum_struct_path(path.clone())?;
|
||||
Self::struct_(
|
||||
state,
|
||||
MatchPatStruct {
|
||||
resolved_path,
|
||||
brace_token,
|
||||
fields,
|
||||
rest: rest.map(
|
||||
|PatRest {
|
||||
attrs: _,
|
||||
dot2_token,
|
||||
}| dot2_token,
|
||||
),
|
||||
},
|
||||
&path,
|
||||
)
|
||||
}
|
||||
Pat::TupleStruct(PatTupleStruct {
|
||||
attrs: _,
|
||||
qself,
|
||||
path,
|
||||
paren_token,
|
||||
mut elems,
|
||||
}) => {
|
||||
let rest = if let Some(&Pat::Rest(PatRest {
|
||||
attrs: _,
|
||||
dot2_token,
|
||||
})) = elems.last()
|
||||
{
|
||||
elems.pop();
|
||||
Some(dot2_token)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let fields = elems
|
||||
.into_pairs()
|
||||
.enumerate()
|
||||
.filter_map(|(index, pair)| {
|
||||
let (pat, punct) = pair.into_tuple();
|
||||
let pat = MatchPatSimple::parse(state, pat).ok()?;
|
||||
let mut index = Index::from(index);
|
||||
index.span = state.span;
|
||||
let field = MatchPatStructField {
|
||||
member: index.into(),
|
||||
colon_token: Some(Token),
|
||||
pat,
|
||||
};
|
||||
Some(Pair::new(field, punct))
|
||||
})
|
||||
.collect();
|
||||
let path = TypePath { qself, path };
|
||||
let resolved_path = state.resolve_enum_struct_path(path.clone())?;
|
||||
Self::struct_(
|
||||
state,
|
||||
MatchPatStruct {
|
||||
resolved_path,
|
||||
brace_token: Brace {
|
||||
span: paren_token.span,
|
||||
},
|
||||
fields,
|
||||
rest,
|
||||
},
|
||||
&path,
|
||||
)
|
||||
}
|
||||
Pat::Rest(_) => {
|
||||
state
|
||||
.errors
|
||||
.error(pat, "not allowed here in #[hdl] patterns");
|
||||
Err(())
|
||||
}
|
||||
Pat::Wild(PatWild {
|
||||
attrs: _,
|
||||
underscore_token,
|
||||
}) => Ok(Self::simple(MatchPatSimple::Wild(MatchPatWild {
|
||||
underscore_token,
|
||||
}))),
|
||||
Pat::Tuple(_) | Pat::Slice(_) | Pat::Const(_) | Pat::Lit(_) | Pat::Range(_) => {
|
||||
state
|
||||
.errors
|
||||
.error(pat, "not yet implemented in #[hdl] patterns");
|
||||
Err(())
|
||||
}
|
||||
_ => {
|
||||
state.errors.error(pat, "not allowed in #[hdl] patterns");
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ParseMatchPat for MatchPatSimple {
|
||||
fn simple(v: MatchPatSimple) -> Self {
|
||||
v
|
||||
}
|
||||
|
||||
fn or(v: MatchPatOr<Self>) -> Self {
|
||||
Self::Or(v)
|
||||
}
|
||||
|
||||
fn paren(v: MatchPatParen<Self>) -> Self {
|
||||
Self::Paren(v)
|
||||
}
|
||||
|
||||
fn struct_(
|
||||
state: &mut HdlMatchParseState<'_>,
|
||||
_v: MatchPatStruct,
|
||||
struct_error_spanned: &dyn ToTokens,
|
||||
) -> Result<Self, ()> {
|
||||
state.errors.error(
|
||||
struct_error_spanned,
|
||||
"not yet implemented inside structs/enums in #[hdl] patterns",
|
||||
);
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum MatchPat {
|
||||
Simple(MatchPatSimple),
|
||||
Or(MatchPatOr<MatchPat>),
|
||||
Paren(MatchPatParen<MatchPat>),
|
||||
Struct(MatchPatStruct),
|
||||
}
|
||||
|
||||
impl_fold! {
|
||||
enum MatchPat<> {
|
||||
Simple(MatchPatSimple),
|
||||
Or(MatchPatOr<MatchPat>),
|
||||
Paren(MatchPatParen<MatchPat>),
|
||||
Struct(MatchPatStruct),
|
||||
}
|
||||
}
|
||||
|
||||
impl ParseMatchPat for MatchPat {
|
||||
fn simple(v: MatchPatSimple) -> Self {
|
||||
Self::Simple(v)
|
||||
}
|
||||
|
||||
fn or(v: MatchPatOr<Self>) -> Self {
|
||||
Self::Or(v)
|
||||
}
|
||||
|
||||
fn paren(v: MatchPatParen<Self>) -> Self {
|
||||
Self::Paren(v)
|
||||
}
|
||||
|
||||
fn struct_(
|
||||
_state: &mut HdlMatchParseState<'_>,
|
||||
v: MatchPatStruct,
|
||||
_struct_error_spanned: &dyn ToTokens,
|
||||
) -> Result<Self, ()> {
|
||||
Ok(Self::Struct(v))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for MatchPat {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
Self::Simple(v) => v.to_tokens(tokens),
|
||||
Self::Or(v) => v.to_tokens(tokens),
|
||||
Self::Paren(v) => v.to_tokens(tokens),
|
||||
Self::Struct(v) => v.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchArm<> {
|
||||
attrs: Vec<Attribute>,
|
||||
pat: MatchPat,
|
||||
fat_arrow_token: Token![=>],
|
||||
body: Box<Expr>,
|
||||
comma: Option<Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
impl MatchArm {
|
||||
fn parse(state: &mut HdlMatchParseState<'_>, arm: Arm) -> Result<Self, ()> {
|
||||
let Arm {
|
||||
attrs,
|
||||
pat,
|
||||
guard,
|
||||
fat_arrow_token,
|
||||
body,
|
||||
comma,
|
||||
} = arm;
|
||||
if let Some((if_, _)) = guard {
|
||||
state
|
||||
.errors
|
||||
.error(if_, "#[hdl] match arm if clauses are not implemented");
|
||||
}
|
||||
Ok(Self {
|
||||
attrs,
|
||||
pat: MatchPat::parse(state, pat)?,
|
||||
fat_arrow_token,
|
||||
body,
|
||||
comma,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for MatchArm {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
attrs,
|
||||
pat,
|
||||
fat_arrow_token,
|
||||
body,
|
||||
comma,
|
||||
} = self;
|
||||
tokens.append_all(attrs);
|
||||
pat.to_tokens(tokens);
|
||||
fat_arrow_token.to_tokens(tokens);
|
||||
body.to_tokens(tokens);
|
||||
comma.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
struct RewriteAsCheckMatch {
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl Fold for RewriteAsCheckMatch {
|
||||
fn fold_field_pat(&mut self, mut i: FieldPat) -> FieldPat {
|
||||
i.colon_token = Some(Token));
|
||||
i
|
||||
}
|
||||
fn fold_pat(&mut self, i: Pat) -> Pat {
|
||||
match i {
|
||||
Pat::Ident(PatIdent {
|
||||
attrs,
|
||||
by_ref,
|
||||
mutability,
|
||||
ident,
|
||||
subpat: None,
|
||||
}) if is_pat_ident_a_struct_or_enum_name(&ident) => {
|
||||
parse_quote_spanned! {ident.span()=>
|
||||
#(#attrs)*
|
||||
#by_ref
|
||||
#mutability
|
||||
#ident {}
|
||||
}
|
||||
}
|
||||
_ => fold_pat(self, i),
|
||||
}
|
||||
}
|
||||
fn fold_pat_ident(&mut self, mut i: PatIdent) -> PatIdent {
|
||||
i.by_ref = Some(Token));
|
||||
i.mutability = None;
|
||||
i
|
||||
}
|
||||
fn fold_arm(&mut self, mut i: Arm) -> Arm {
|
||||
i.body = parse_quote_spanned! {self.span=>
|
||||
match __infallible {}
|
||||
};
|
||||
i.comma.get_or_insert_with(|| Token);
|
||||
fold_arm(self, i)
|
||||
}
|
||||
fn fold_expr_match(&mut self, mut i: ExprMatch) -> ExprMatch {
|
||||
i.expr = parse_quote_spanned! {self.span=>
|
||||
__match_value
|
||||
};
|
||||
fold_expr_match(self, i)
|
||||
}
|
||||
fn fold_expr(&mut self, i: Expr) -> Expr {
|
||||
// don't recurse into expressions
|
||||
i
|
||||
}
|
||||
}
|
||||
|
||||
struct HdlMatchParseState<'a> {
|
||||
errors: &'a mut Errors,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl HdlMatchParseState<'_> {
|
||||
fn resolve_enum_struct_path(&mut self, path: TypePath) -> Result<Path, ()> {
|
||||
let StructOrEnumPath { ty, variant } =
|
||||
StructOrEnumPath::new(self.errors, path, &AggregateLiteralOptions::default())?;
|
||||
Ok(if let Some((_variant_path, variant_name)) = variant {
|
||||
parse_quote_spanned! {self.span=>
|
||||
__MatchTy::<#ty>::#variant_name
|
||||
}
|
||||
} else {
|
||||
parse_quote_spanned! {self.span=>
|
||||
__MatchTy::<#ty>
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitor {
|
||||
pub(crate) fn process_hdl_match(
|
||||
&mut self,
|
||||
_hdl_attr: HdlAttr<Nothing>,
|
||||
expr_match: ExprMatch,
|
||||
) -> Expr {
|
||||
let span = expr_match.match_token.span();
|
||||
let check_match = RewriteAsCheckMatch { span }.fold_expr_match(expr_match.clone());
|
||||
let ExprMatch {
|
||||
attrs: _,
|
||||
match_token,
|
||||
expr,
|
||||
brace_token: _,
|
||||
arms,
|
||||
} = expr_match;
|
||||
self.require_normal_module(match_token);
|
||||
let mut state = HdlMatchParseState {
|
||||
errors: &mut self.errors,
|
||||
span,
|
||||
};
|
||||
let arms = Vec::from_iter(
|
||||
arms.into_iter()
|
||||
.filter_map(|arm| MatchArm::parse(&mut state, arm).ok()),
|
||||
);
|
||||
parse_quote_spanned! {span=>
|
||||
{
|
||||
type __MatchTy<V> = <<V as ::fayalite::expr::ToExpr>::Type as ::fayalite::ty::Type>::MatchVariant;
|
||||
let __match_expr = ::fayalite::expr::ToExpr::to_expr(&(#expr));
|
||||
::fayalite::expr::check_match_expr(__match_expr, |__match_value, __infallible| {
|
||||
#[allow(unused_variables)]
|
||||
#check_match
|
||||
});
|
||||
for __match_variant in m.match_(__match_expr) {
|
||||
let (__match_variant, __scope) = ::fayalite::ty::MatchVariantAndInactiveScope::match_activate_scope(__match_variant);
|
||||
#match_token __match_variant {
|
||||
#(#arms)*
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
746
crates/fayalite-proc-macros-impl/src/value_derive_common.rs
Normal file
746
crates/fayalite-proc-macros-impl/src/value_derive_common.rs
Normal file
|
@ -0,0 +1,746 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{fold::impl_fold, kw, Errors, HdlAttr};
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{format_ident, quote, quote_spanned, ToTokens};
|
||||
use std::collections::{BTreeMap, HashMap, HashSet};
|
||||
use syn::{
|
||||
fold::{fold_generics, Fold},
|
||||
parse::{Parse, ParseStream},
|
||||
parse_quote, parse_quote_spanned,
|
||||
punctuated::Punctuated,
|
||||
spanned::Spanned,
|
||||
token::{Brace, Paren, Where},
|
||||
Block, ConstParam, Expr, Field, Fields, FieldsNamed, FieldsUnnamed, GenericParam, Generics,
|
||||
Ident, Index, ItemImpl, Lifetime, LifetimeParam, Member, Path, Token, Type, TypeParam,
|
||||
TypePath, Visibility, WhereClause, WherePredicate,
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct Bounds(pub(crate) Punctuated<WherePredicate, Token![,]>);
|
||||
|
||||
impl_fold! {
|
||||
struct Bounds<>(Punctuated<WherePredicate, Token![,]>);
|
||||
}
|
||||
|
||||
impl Parse for Bounds {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
Ok(Bounds(Punctuated::parse_terminated(input)?))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Option<WhereClause>> for Bounds {
|
||||
fn from(value: Option<WhereClause>) -> Self {
|
||||
Self(value.map_or_else(Punctuated::new, |v| v.predicates))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Bounds {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.0.to_tokens(tokens)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct ParsedField<O> {
|
||||
pub(crate) options: HdlAttr<O>,
|
||||
pub(crate) vis: Visibility,
|
||||
pub(crate) name: Member,
|
||||
pub(crate) ty: Type,
|
||||
}
|
||||
|
||||
impl<O> ParsedField<O> {
|
||||
pub(crate) fn var_name(&self) -> Ident {
|
||||
format_ident!("__v_{}", self.name)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_field_name(
|
||||
index: usize,
|
||||
name: Option<Ident>,
|
||||
ty_span: impl FnOnce() -> Span,
|
||||
) -> Member {
|
||||
match name {
|
||||
Some(name) => Member::Named(name),
|
||||
None => Member::Unnamed(Index {
|
||||
index: index as _,
|
||||
span: ty_span(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_field_names(fields: &Fields) -> impl Iterator<Item = Member> + '_ {
|
||||
fields
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, field)| get_field_name(index, field.ident.clone(), || field.ty.span()))
|
||||
}
|
||||
|
||||
impl<O: Parse + Default> ParsedField<O> {
|
||||
pub(crate) fn parse_fields(
|
||||
errors: &mut Errors,
|
||||
fields: &mut Fields,
|
||||
in_enum: bool,
|
||||
) -> (FieldsKind, Vec<ParsedField<O>>) {
|
||||
let mut unit_fields = Punctuated::new();
|
||||
let (fields_kind, fields) = match fields {
|
||||
Fields::Named(fields) => (FieldsKind::Named(fields.brace_token), &mut fields.named),
|
||||
Fields::Unnamed(fields) => {
|
||||
(FieldsKind::Unnamed(fields.paren_token), &mut fields.unnamed)
|
||||
}
|
||||
Fields::Unit => (FieldsKind::Unit, &mut unit_fields),
|
||||
};
|
||||
let fields = fields
|
||||
.iter_mut()
|
||||
.enumerate()
|
||||
.map(|(index, field)| {
|
||||
let options = errors
|
||||
.unwrap_or_default(HdlAttr::parse_and_take_attr(&mut field.attrs))
|
||||
.unwrap_or_default();
|
||||
let name = get_field_name(index, field.ident.clone(), || field.ty.span());
|
||||
if in_enum && !matches!(field.vis, Visibility::Inherited) {
|
||||
errors.error(&field.vis, "field visibility not allowed in enums");
|
||||
}
|
||||
ParsedField {
|
||||
options,
|
||||
vis: field.vis.clone(),
|
||||
name,
|
||||
ty: field.ty.clone(),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
(fields_kind, fields)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) enum FieldsKind {
|
||||
Unit,
|
||||
Named(Brace),
|
||||
Unnamed(Paren),
|
||||
}
|
||||
|
||||
impl FieldsKind {
|
||||
pub(crate) fn into_fields_named(
|
||||
brace_token: Brace,
|
||||
fields: impl IntoIterator<Item = syn::Field>,
|
||||
) -> Fields {
|
||||
Fields::Named(FieldsNamed {
|
||||
brace_token,
|
||||
named: Punctuated::from_iter(fields),
|
||||
})
|
||||
}
|
||||
pub(crate) fn into_fields_unnamed(
|
||||
paren_token: Paren,
|
||||
fields: impl IntoIterator<Item = syn::Field>,
|
||||
) -> Fields {
|
||||
Fields::Unnamed(FieldsUnnamed {
|
||||
paren_token,
|
||||
unnamed: Punctuated::from_iter(fields),
|
||||
})
|
||||
}
|
||||
pub(crate) fn into_fields(self, fields: impl IntoIterator<Item = syn::Field>) -> Fields {
|
||||
match self {
|
||||
FieldsKind::Unit => {
|
||||
let mut fields = fields.into_iter().peekable();
|
||||
let Some(first_field) = fields.peek() else {
|
||||
return Fields::Unit;
|
||||
};
|
||||
if first_field.ident.is_some() {
|
||||
Self::into_fields_named(Default::default(), fields)
|
||||
} else {
|
||||
Self::into_fields_unnamed(Default::default(), fields)
|
||||
}
|
||||
}
|
||||
FieldsKind::Named(brace_token) => Self::into_fields_named(brace_token, fields),
|
||||
FieldsKind::Unnamed(paren_token) => Self::into_fields_unnamed(paren_token, fields),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_target(target: &Option<(kw::target, Paren, Path)>, item_ident: &Ident) -> Path {
|
||||
match target {
|
||||
Some((_, _, target)) => target.clone(),
|
||||
None => item_ident.clone().into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct ValueDeriveGenerics {
|
||||
pub(crate) generics: Generics,
|
||||
pub(crate) fixed_type_generics: Generics,
|
||||
}
|
||||
|
||||
impl ValueDeriveGenerics {
|
||||
pub(crate) fn get(mut generics: Generics, where_: &Option<(Where, Paren, Bounds)>) -> Self {
|
||||
let mut fixed_type_generics = generics.clone();
|
||||
if let Some((_, _, bounds)) = where_ {
|
||||
generics
|
||||
.make_where_clause()
|
||||
.predicates
|
||||
.extend(bounds.0.iter().cloned());
|
||||
fixed_type_generics
|
||||
.where_clause
|
||||
.clone_from(&generics.where_clause);
|
||||
} else {
|
||||
let type_params = Vec::from_iter(generics.type_params().map(|v| v.ident.clone()));
|
||||
let predicates = &mut generics.make_where_clause().predicates;
|
||||
let fixed_type_predicates = &mut fixed_type_generics.make_where_clause().predicates;
|
||||
for type_param in type_params {
|
||||
predicates.push(parse_quote! {#type_param: ::fayalite::ty::Value});
|
||||
predicates.push(parse_quote! {<#type_param as ::fayalite::expr::ToExpr>::Type: ::fayalite::ty::Type<Value = #type_param>});
|
||||
fixed_type_predicates.push(parse_quote! {#type_param: ::fayalite::ty::Value});
|
||||
fixed_type_predicates.push(parse_quote! {<#type_param as ::fayalite::expr::ToExpr>::Type: ::fayalite::ty::FixedType<Value = #type_param>});
|
||||
fixed_type_predicates.push(parse_quote! {<<#type_param as ::fayalite::expr::ToExpr>::Type as ::fayalite::ty::Type>::MaskType: ::fayalite::ty::FixedType});
|
||||
}
|
||||
}
|
||||
Self {
|
||||
generics,
|
||||
fixed_type_generics,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn derive_clone_hash_eq_partialeq_for_struct<Name: ToTokens>(
|
||||
the_struct_ident: &Ident,
|
||||
generics: &Generics,
|
||||
field_names: &[Name],
|
||||
) -> TokenStream {
|
||||
let (impl_generics, type_generics, where_clause) = generics.split_for_impl();
|
||||
quote! {
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::__std::clone::Clone for #the_struct_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
#(#field_names: ::fayalite::__std::clone::Clone::clone(&self.#field_names),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::__std::hash::Hash for #the_struct_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
#[allow(unused_variables)]
|
||||
fn hash<__H: ::fayalite::__std::hash::Hasher>(&self, hasher: &mut __H) {
|
||||
#(::fayalite::__std::hash::Hash::hash(&self.#field_names, hasher);)*
|
||||
}
|
||||
}
|
||||
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::__std::cmp::Eq for #the_struct_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
}
|
||||
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::__std::cmp::PartialEq for #the_struct_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
#[allow(unused_variables)]
|
||||
#[allow(clippy::nonminimal_bool)]
|
||||
fn eq(&self, other: &Self) -> ::fayalite::__std::primitive::bool {
|
||||
true
|
||||
#(&& ::fayalite::__std::cmp::PartialEq::eq(&self.#field_names, &other.#field_names))*
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn append_field(fields: &mut Fields, mut field: Field) -> Member {
|
||||
let ident = field.ident.clone().expect("ident is supplied");
|
||||
match fields {
|
||||
Fields::Named(FieldsNamed { named, .. }) => {
|
||||
named.push(field);
|
||||
Member::Named(ident)
|
||||
}
|
||||
Fields::Unnamed(FieldsUnnamed { unnamed, .. }) => {
|
||||
field.ident = None;
|
||||
field.colon_token = None;
|
||||
let index = unnamed.len();
|
||||
unnamed.push(field);
|
||||
Member::Unnamed(index.into())
|
||||
}
|
||||
Fields::Unit => {
|
||||
*fields = Fields::Named(FieldsNamed {
|
||||
brace_token: Default::default(),
|
||||
named: Punctuated::from_iter([field]),
|
||||
});
|
||||
Member::Named(ident)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct BuilderField {
|
||||
pub(crate) names: HashSet<Member>,
|
||||
pub(crate) mapped_value: Expr,
|
||||
pub(crate) mapped_type: Type,
|
||||
pub(crate) where_clause: Option<WhereClause>,
|
||||
pub(crate) builder_field_name: Ident,
|
||||
pub(crate) type_param: Ident,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct Builder {
|
||||
struct_name: Ident,
|
||||
vis: Visibility,
|
||||
fields: BTreeMap<String, BuilderField>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct BuilderWithFields {
|
||||
struct_name: Ident,
|
||||
vis: Visibility,
|
||||
phantom_type_param: Ident,
|
||||
phantom_type_field: Ident,
|
||||
fields: Vec<(String, BuilderField)>,
|
||||
}
|
||||
|
||||
impl Builder {
|
||||
pub(crate) fn new(struct_name: Ident, vis: Visibility) -> Self {
|
||||
Self {
|
||||
struct_name,
|
||||
vis,
|
||||
fields: BTreeMap::new(),
|
||||
}
|
||||
}
|
||||
pub(crate) fn insert_field(
|
||||
&mut self,
|
||||
name: Member,
|
||||
map_value: impl FnOnce(&Ident) -> Expr,
|
||||
map_type: impl FnOnce(&Ident) -> Type,
|
||||
where_clause: impl FnOnce(&Ident) -> Option<WhereClause>,
|
||||
) {
|
||||
self.fields
|
||||
.entry(name.to_token_stream().to_string())
|
||||
.or_insert_with_key(|name| {
|
||||
let builder_field_name =
|
||||
format_ident!("field_{}", name, span = self.struct_name.span());
|
||||
let type_param = format_ident!("__T_{}", name, span = self.struct_name.span());
|
||||
BuilderField {
|
||||
names: HashSet::new(),
|
||||
mapped_value: map_value(&builder_field_name),
|
||||
mapped_type: map_type(&type_param),
|
||||
where_clause: where_clause(&type_param),
|
||||
builder_field_name,
|
||||
type_param,
|
||||
}
|
||||
})
|
||||
.names
|
||||
.insert(name);
|
||||
}
|
||||
pub(crate) fn finish_filling_in_fields(self) -> BuilderWithFields {
|
||||
let Self {
|
||||
struct_name,
|
||||
vis,
|
||||
fields,
|
||||
} = self;
|
||||
let fields = Vec::from_iter(fields);
|
||||
BuilderWithFields {
|
||||
phantom_type_param: Ident::new("__Phantom", struct_name.span()),
|
||||
phantom_type_field: Ident::new("__phantom", struct_name.span()),
|
||||
struct_name,
|
||||
vis,
|
||||
fields,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BuilderWithFields {
|
||||
pub(crate) fn get_field(&self, name: &Member) -> Option<(usize, &BuilderField)> {
|
||||
let index = self
|
||||
.fields
|
||||
.binary_search_by_key(&&*name.to_token_stream().to_string(), |v| &*v.0)
|
||||
.ok()?;
|
||||
Some((index, &self.fields[index].1))
|
||||
}
|
||||
pub(crate) fn ty(
|
||||
&self,
|
||||
specified_fields: impl IntoIterator<Item = (Member, Type)>,
|
||||
phantom_type: Option<&Type>,
|
||||
other_fields_are_any_type: bool,
|
||||
) -> TypePath {
|
||||
let Self {
|
||||
struct_name,
|
||||
vis: _,
|
||||
phantom_type_param,
|
||||
phantom_type_field: _,
|
||||
fields,
|
||||
} = self;
|
||||
let span = struct_name.span();
|
||||
let mut arguments =
|
||||
Vec::from_iter(fields.iter().map(|(_, BuilderField { type_param, .. })| {
|
||||
if other_fields_are_any_type {
|
||||
parse_quote_spanned! {span=>
|
||||
#type_param
|
||||
}
|
||||
} else {
|
||||
parse_quote_spanned! {span=>
|
||||
()
|
||||
}
|
||||
}
|
||||
}));
|
||||
for (name, ty) in specified_fields {
|
||||
let Some((index, _)) = self.get_field(&name) else {
|
||||
panic!("field not found: {}", name.to_token_stream());
|
||||
};
|
||||
arguments[index] = ty;
|
||||
}
|
||||
let phantom_type_param = phantom_type.is_none().then_some(phantom_type_param);
|
||||
parse_quote_spanned! {span=>
|
||||
#struct_name::<#phantom_type_param #phantom_type #(, #arguments)*>
|
||||
}
|
||||
}
|
||||
pub(crate) fn append_generics(
|
||||
&self,
|
||||
specified_fields: impl IntoIterator<Item = Member>,
|
||||
has_phantom_type_param: bool,
|
||||
other_fields_are_any_type: bool,
|
||||
generics: &mut Generics,
|
||||
) {
|
||||
let Self {
|
||||
struct_name: _,
|
||||
vis: _,
|
||||
phantom_type_param,
|
||||
phantom_type_field: _,
|
||||
fields,
|
||||
} = self;
|
||||
if has_phantom_type_param {
|
||||
generics.params.push(GenericParam::from(TypeParam::from(
|
||||
phantom_type_param.clone(),
|
||||
)));
|
||||
}
|
||||
if !other_fields_are_any_type {
|
||||
return;
|
||||
}
|
||||
let mut type_params = Vec::from_iter(
|
||||
fields
|
||||
.iter()
|
||||
.map(|(_, BuilderField { type_param, .. })| Some(type_param)),
|
||||
);
|
||||
for name in specified_fields {
|
||||
let Some((index, _)) = self.get_field(&name) else {
|
||||
panic!("field not found: {}", name.to_token_stream());
|
||||
};
|
||||
type_params[index] = None;
|
||||
}
|
||||
generics.params.extend(
|
||||
type_params
|
||||
.into_iter()
|
||||
.filter_map(|v| Some(GenericParam::from(TypeParam::from(v?.clone())))),
|
||||
);
|
||||
}
|
||||
pub(crate) fn make_build_method(
|
||||
&self,
|
||||
build_fn_name: &Ident,
|
||||
specified_fields: impl IntoIterator<Item = (Member, Type)>,
|
||||
generics: &Generics,
|
||||
phantom_type: &Type,
|
||||
return_ty: &Type,
|
||||
mut body: Block,
|
||||
) -> ItemImpl {
|
||||
let Self {
|
||||
struct_name,
|
||||
vis,
|
||||
phantom_type_param: _,
|
||||
phantom_type_field,
|
||||
fields,
|
||||
} = self;
|
||||
let span = struct_name.span();
|
||||
let field_names = Vec::from_iter(fields.iter().map(|v| &v.1.builder_field_name));
|
||||
let (impl_generics, _type_generics, where_clause) = generics.split_for_impl();
|
||||
let empty_arg = parse_quote_spanned! {span=>
|
||||
()
|
||||
};
|
||||
let mut ty_arguments = vec![empty_arg; fields.len()];
|
||||
let empty_field_pat = quote_spanned! {span=>
|
||||
: _
|
||||
};
|
||||
let mut field_pats = vec![Some(empty_field_pat); fields.len()];
|
||||
for (name, ty) in specified_fields {
|
||||
let Some((index, _)) = self.get_field(&name) else {
|
||||
panic!("field not found: {}", name.to_token_stream());
|
||||
};
|
||||
ty_arguments[index] = ty;
|
||||
field_pats[index] = None;
|
||||
}
|
||||
body.stmts.insert(
|
||||
0,
|
||||
parse_quote_spanned! {span=>
|
||||
let Self {
|
||||
#(#field_names #field_pats,)*
|
||||
#phantom_type_field: _,
|
||||
} = self;
|
||||
},
|
||||
);
|
||||
parse_quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
impl #impl_generics #struct_name<#phantom_type #(, #ty_arguments)*>
|
||||
#where_clause
|
||||
{
|
||||
#[allow(non_snake_case, dead_code)]
|
||||
#vis fn #build_fn_name(self) -> #return_ty
|
||||
#body
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for BuilderWithFields {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
struct_name,
|
||||
vis,
|
||||
phantom_type_param,
|
||||
phantom_type_field,
|
||||
fields,
|
||||
} = self;
|
||||
let span = struct_name.span();
|
||||
let mut any_generics = Generics::default();
|
||||
self.append_generics([], true, true, &mut any_generics);
|
||||
let empty_ty = self.ty([], None, false);
|
||||
let field_names = Vec::from_iter(fields.iter().map(|v| &v.1.builder_field_name));
|
||||
let field_type_params = Vec::from_iter(fields.iter().map(|v| &v.1.type_param));
|
||||
quote_spanned! {span=>
|
||||
#[allow(non_camel_case_types)]
|
||||
#[non_exhaustive]
|
||||
#vis struct #struct_name #any_generics {
|
||||
#(#field_names: #field_type_params,)*
|
||||
#phantom_type_field: ::fayalite::__std::marker::PhantomData<#phantom_type_param>,
|
||||
}
|
||||
|
||||
#[automatically_derived]
|
||||
impl<#phantom_type_param> #empty_ty {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
#(#field_names: (),)*
|
||||
#phantom_type_field: ::fayalite::__std::marker::PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
for (field_index, (_, field)) in self.fields.iter().enumerate() {
|
||||
let initial_fields = &fields[..field_index];
|
||||
let final_fields = &fields[field_index..][1..];
|
||||
let initial_type_params =
|
||||
Vec::from_iter(initial_fields.iter().map(|v| &v.1.type_param));
|
||||
let final_type_params = Vec::from_iter(final_fields.iter().map(|v| &v.1.type_param));
|
||||
let initial_field_names =
|
||||
Vec::from_iter(initial_fields.iter().map(|v| &v.1.builder_field_name));
|
||||
let final_field_names =
|
||||
Vec::from_iter(final_fields.iter().map(|v| &v.1.builder_field_name));
|
||||
let BuilderField {
|
||||
names: _,
|
||||
mapped_value,
|
||||
mapped_type,
|
||||
where_clause,
|
||||
builder_field_name,
|
||||
type_param,
|
||||
} = field;
|
||||
quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
#[allow(non_camel_case_types, dead_code)]
|
||||
impl<#phantom_type_param #(, #initial_type_params)* #(, #final_type_params)*> #struct_name<#phantom_type_param #(, #initial_type_params)*, () #(, #final_type_params)*> {
|
||||
#vis fn #builder_field_name<#type_param>(self, #builder_field_name: #type_param) -> #struct_name<#phantom_type_param #(, #initial_type_params)*, #mapped_type #(, #final_type_params)*>
|
||||
#where_clause
|
||||
{
|
||||
let Self {
|
||||
#(#initial_field_names,)*
|
||||
#builder_field_name: (),
|
||||
#(#final_field_names,)*
|
||||
#phantom_type_field: _,
|
||||
} = self;
|
||||
let #builder_field_name = #mapped_value;
|
||||
#struct_name {
|
||||
#(#field_names,)*
|
||||
#phantom_type_field: ::fayalite::__std::marker::PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct MapIdents {
|
||||
pub(crate) map: HashMap<Ident, Ident>,
|
||||
}
|
||||
|
||||
impl Fold for &MapIdents {
|
||||
fn fold_ident(&mut self, i: Ident) -> Ident {
|
||||
self.map.get(&i).cloned().unwrap_or(i)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct DupGenerics<M> {
|
||||
pub(crate) combined: Generics,
|
||||
pub(crate) maps: M,
|
||||
}
|
||||
|
||||
pub(crate) fn merge_punctuated<T, P: Default>(
|
||||
target: &mut Punctuated<T, P>,
|
||||
source: Punctuated<T, P>,
|
||||
make_punct: impl FnOnce() -> P,
|
||||
) {
|
||||
if source.is_empty() {
|
||||
return;
|
||||
}
|
||||
if target.is_empty() {
|
||||
*target = source;
|
||||
return;
|
||||
}
|
||||
if !target.trailing_punct() {
|
||||
target.push_punct(make_punct());
|
||||
}
|
||||
target.extend(source.into_pairs());
|
||||
}
|
||||
|
||||
pub(crate) fn merge_generics(target: &mut Generics, source: Generics) {
|
||||
let Generics {
|
||||
lt_token,
|
||||
params,
|
||||
gt_token,
|
||||
where_clause,
|
||||
} = source;
|
||||
let span = lt_token.map(|v| v.span).unwrap_or_else(|| params.span());
|
||||
target.lt_token = target.lt_token.or(lt_token);
|
||||
merge_punctuated(&mut target.params, params, || Token);
|
||||
target.gt_token = target.gt_token.or(gt_token);
|
||||
if let Some(where_clause) = where_clause {
|
||||
if let Some(target_where_clause) = &mut target.where_clause {
|
||||
let WhereClause {
|
||||
where_token,
|
||||
predicates,
|
||||
} = where_clause;
|
||||
let span = where_token.span;
|
||||
target_where_clause.where_token = where_token;
|
||||
merge_punctuated(&mut target_where_clause.predicates, predicates, || {
|
||||
Token
|
||||
});
|
||||
} else {
|
||||
target.where_clause = Some(where_clause);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DupGenerics<Vec<MapIdents>> {
|
||||
pub(crate) fn new_dyn(generics: &Generics, count: usize) -> Self {
|
||||
let mut maps = Vec::from_iter((0..count).map(|_| MapIdents {
|
||||
map: HashMap::new(),
|
||||
}));
|
||||
for param in &generics.params {
|
||||
let (GenericParam::Lifetime(LifetimeParam {
|
||||
lifetime: Lifetime { ident, .. },
|
||||
..
|
||||
})
|
||||
| GenericParam::Type(TypeParam { ident, .. })
|
||||
| GenericParam::Const(ConstParam { ident, .. })) = param;
|
||||
for (i, map_idents) in maps.iter_mut().enumerate() {
|
||||
map_idents
|
||||
.map
|
||||
.insert(ident.clone(), format_ident!("__{}_{}", ident, i));
|
||||
}
|
||||
}
|
||||
let mut combined = Generics::default();
|
||||
for map_idents in maps.iter() {
|
||||
merge_generics(
|
||||
&mut combined,
|
||||
fold_generics(&mut { map_idents }, generics.clone()),
|
||||
);
|
||||
}
|
||||
Self { combined, maps }
|
||||
}
|
||||
}
|
||||
|
||||
impl<const COUNT: usize> DupGenerics<[MapIdents; COUNT]> {
|
||||
pub(crate) fn new(generics: &Generics) -> Self {
|
||||
let DupGenerics { combined, maps } = DupGenerics::new_dyn(generics, COUNT);
|
||||
Self {
|
||||
combined,
|
||||
maps: maps.try_into().ok().unwrap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn add_where_predicate(
|
||||
target: &mut Generics,
|
||||
span: Span,
|
||||
where_predicate: WherePredicate,
|
||||
) {
|
||||
let WhereClause {
|
||||
where_token: _,
|
||||
predicates,
|
||||
} = target.where_clause.get_or_insert_with(|| WhereClause {
|
||||
where_token: Token,
|
||||
predicates: Punctuated::new(),
|
||||
});
|
||||
if !predicates.empty_or_trailing() {
|
||||
predicates.push_punct(Token);
|
||||
}
|
||||
predicates.push_value(where_predicate);
|
||||
}
|
||||
|
||||
pub(crate) fn make_connect_impl(
|
||||
connect_inexact: Option<(crate::kw::connect_inexact,)>,
|
||||
generics: &Generics,
|
||||
ty_ident: &Ident,
|
||||
field_types: impl IntoIterator<Item = Type>,
|
||||
) -> TokenStream {
|
||||
let span = ty_ident.span();
|
||||
let impl_generics;
|
||||
let combined_generics;
|
||||
let where_clause;
|
||||
let lhs_generics;
|
||||
let lhs_type_generics;
|
||||
let rhs_generics;
|
||||
let rhs_type_generics;
|
||||
if connect_inexact.is_some() {
|
||||
let DupGenerics {
|
||||
mut combined,
|
||||
maps: [lhs_map, rhs_map],
|
||||
} = DupGenerics::new(generics);
|
||||
for field_type in field_types {
|
||||
let lhs_type = (&lhs_map).fold_type(field_type.clone());
|
||||
let rhs_type = (&rhs_map).fold_type(field_type);
|
||||
add_where_predicate(
|
||||
&mut combined,
|
||||
span,
|
||||
parse_quote_spanned! {span=>
|
||||
#lhs_type: ::fayalite::ty::Connect<#rhs_type>
|
||||
},
|
||||
);
|
||||
}
|
||||
combined_generics = combined;
|
||||
(impl_generics, _, where_clause) = combined_generics.split_for_impl();
|
||||
lhs_generics = (&lhs_map).fold_generics(generics.clone());
|
||||
(_, lhs_type_generics, _) = lhs_generics.split_for_impl();
|
||||
rhs_generics = (&rhs_map).fold_generics(generics.clone());
|
||||
(_, rhs_type_generics, _) = rhs_generics.split_for_impl();
|
||||
} else {
|
||||
let mut generics = generics.clone();
|
||||
for field_type in field_types {
|
||||
add_where_predicate(
|
||||
&mut generics,
|
||||
span,
|
||||
parse_quote_spanned! {span=>
|
||||
#field_type: ::fayalite::ty::Connect<#field_type>
|
||||
},
|
||||
);
|
||||
}
|
||||
combined_generics = generics;
|
||||
(impl_generics, lhs_type_generics, where_clause) = combined_generics.split_for_impl();
|
||||
rhs_type_generics = lhs_type_generics.clone();
|
||||
}
|
||||
quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
#[allow(non_camel_case_types)]
|
||||
impl #impl_generics ::fayalite::ty::Connect<#ty_ident #rhs_type_generics> for #ty_ident #lhs_type_generics
|
||||
#where_clause
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
901
crates/fayalite-proc-macros-impl/src/value_derive_enum.rs
Normal file
901
crates/fayalite-proc-macros-impl/src/value_derive_enum.rs
Normal file
|
@ -0,0 +1,901 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
value_derive_common::{
|
||||
append_field, derive_clone_hash_eq_partialeq_for_struct, get_field_names, get_target,
|
||||
make_connect_impl, Bounds, Builder, FieldsKind, ParsedField, ValueDeriveGenerics,
|
||||
},
|
||||
value_derive_struct::{self, ParsedStruct, ParsedStructNames, StructOptions},
|
||||
Errors, HdlAttr,
|
||||
};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{format_ident, quote, quote_spanned, ToTokens};
|
||||
use syn::{
|
||||
parse_quote, parse_quote_spanned, punctuated::Punctuated, spanned::Spanned, token::Brace,
|
||||
Field, FieldMutability, Fields, FieldsNamed, Generics, Ident, Index, ItemEnum, ItemStruct,
|
||||
Member, Path, Token, Type, Variant, Visibility,
|
||||
};
|
||||
|
||||
crate::options! {
|
||||
#[options = EnumOptions]
|
||||
enum EnumOption {
|
||||
OutlineGenerated(outline_generated),
|
||||
ConnectInexact(connect_inexact),
|
||||
Bounds(where_, Bounds),
|
||||
Target(target, Path),
|
||||
}
|
||||
}
|
||||
|
||||
crate::options! {
|
||||
#[options = VariantOptions]
|
||||
enum VariantOption {}
|
||||
}
|
||||
|
||||
crate::options! {
|
||||
#[options = FieldOptions]
|
||||
enum FieldOption {}
|
||||
}
|
||||
|
||||
enum VariantValue {
|
||||
None,
|
||||
Direct {
|
||||
value_type: Type,
|
||||
},
|
||||
Struct {
|
||||
value_struct: ItemStruct,
|
||||
parsed_struct: ParsedStruct,
|
||||
},
|
||||
}
|
||||
|
||||
impl VariantValue {
|
||||
fn is_none(&self) -> bool {
|
||||
matches!(self, Self::None)
|
||||
}
|
||||
fn value_ty(&self) -> Option<Type> {
|
||||
match self {
|
||||
VariantValue::None => None,
|
||||
VariantValue::Direct { value_type } => Some(value_type.clone()),
|
||||
VariantValue::Struct { value_struct, .. } => {
|
||||
let (_, type_generics, _) = value_struct.generics.split_for_impl();
|
||||
let ident = &value_struct.ident;
|
||||
Some(parse_quote! { #ident #type_generics })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ParsedVariant {
|
||||
options: HdlAttr<VariantOptions>,
|
||||
ident: Ident,
|
||||
fields_kind: FieldsKind,
|
||||
fields: Vec<ParsedField<FieldOptions>>,
|
||||
value: VariantValue,
|
||||
}
|
||||
|
||||
impl ParsedVariant {
|
||||
fn parse(
|
||||
errors: &mut Errors,
|
||||
variant: Variant,
|
||||
enum_options: &EnumOptions,
|
||||
enum_vis: &Visibility,
|
||||
enum_ident: &Ident,
|
||||
enum_generics: &Generics,
|
||||
) -> Self {
|
||||
let target = get_target(&enum_options.target, enum_ident);
|
||||
let Variant {
|
||||
mut attrs,
|
||||
ident,
|
||||
fields,
|
||||
discriminant,
|
||||
} = variant;
|
||||
if let Some((eq, _)) = discriminant {
|
||||
errors.error(eq, "#[derive(Value)]: discriminants not allowed");
|
||||
}
|
||||
let variant_options = errors
|
||||
.unwrap_or_default(HdlAttr::parse_and_take_attr(&mut attrs))
|
||||
.unwrap_or_default();
|
||||
let (fields_kind, parsed_fields) =
|
||||
ParsedField::parse_fields(errors, &mut fields.clone(), true);
|
||||
let value = match (&fields_kind, &*parsed_fields) {
|
||||
(FieldsKind::Unit, _) => VariantValue::None,
|
||||
(
|
||||
FieldsKind::Unnamed(_),
|
||||
[ParsedField {
|
||||
options,
|
||||
vis: _,
|
||||
name: Member::Unnamed(Index { index: 0, span: _ }),
|
||||
ty,
|
||||
}],
|
||||
) => {
|
||||
let FieldOptions {} = options.body;
|
||||
VariantValue::Direct {
|
||||
value_type: ty.clone(),
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
let variant_value_struct_ident =
|
||||
format_ident!("__{}__{}", enum_ident, ident, span = ident.span());
|
||||
let variant_type_struct_ident =
|
||||
format_ident!("__{}__{}__Type", enum_ident, ident, span = ident.span());
|
||||
let mut value_struct_fields = fields.clone();
|
||||
let (_, type_generics, _) = enum_generics.split_for_impl();
|
||||
append_field(
|
||||
&mut value_struct_fields,
|
||||
Field {
|
||||
attrs: vec![HdlAttr::from(value_derive_struct::FieldOptions {
|
||||
flip: None,
|
||||
skip: Some(Default::default()),
|
||||
})
|
||||
.to_attr()],
|
||||
vis: enum_vis.clone(),
|
||||
mutability: FieldMutability::None,
|
||||
ident: Some(Ident::new("__phantom", ident.span())),
|
||||
colon_token: None,
|
||||
ty: parse_quote_spanned! {ident.span()=>
|
||||
::fayalite::__std::marker::PhantomData<#target #type_generics>
|
||||
},
|
||||
},
|
||||
);
|
||||
let (value_struct_fields_kind, value_struct_parsed_fields) =
|
||||
ParsedField::parse_fields(errors, &mut value_struct_fields, false);
|
||||
let value_struct = ItemStruct {
|
||||
attrs: vec![parse_quote! { #[allow(non_camel_case_types)] }],
|
||||
vis: enum_vis.clone(),
|
||||
struct_token: Token),
|
||||
ident: variant_value_struct_ident.clone(),
|
||||
generics: enum_generics.clone(),
|
||||
fields: value_struct_fields,
|
||||
semi_token: None,
|
||||
};
|
||||
VariantValue::Struct {
|
||||
value_struct,
|
||||
parsed_struct: ParsedStruct {
|
||||
options: StructOptions {
|
||||
outline_generated: None,
|
||||
fixed_type: Some(Default::default()),
|
||||
where_: Some((
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
ValueDeriveGenerics::get(
|
||||
enum_generics.clone(),
|
||||
&enum_options.where_,
|
||||
)
|
||||
.fixed_type_generics
|
||||
.where_clause
|
||||
.into(),
|
||||
)),
|
||||
target: None,
|
||||
connect_inexact: enum_options.connect_inexact,
|
||||
}
|
||||
.into(),
|
||||
vis: enum_vis.clone(),
|
||||
struct_token: Default::default(),
|
||||
generics: enum_generics.clone(),
|
||||
fields_kind: value_struct_fields_kind,
|
||||
fields: value_struct_parsed_fields,
|
||||
semi_token: None, // it will fill in the semicolon if needed
|
||||
skip_check_fields: true,
|
||||
names: ParsedStructNames {
|
||||
ident: variant_value_struct_ident.clone(),
|
||||
type_struct_debug_ident: Some(format!("{enum_ident}::{ident}::Type")),
|
||||
type_struct_ident: variant_type_struct_ident,
|
||||
match_variant_ident: None,
|
||||
builder_struct_ident: None,
|
||||
mask_match_variant_ident: None,
|
||||
mask_type_ident: None,
|
||||
mask_type_debug_ident: Some(format!(
|
||||
"AsMask<{enum_ident}::{ident}>::Type"
|
||||
)),
|
||||
mask_value_ident: None,
|
||||
mask_value_debug_ident: Some(format!("AsMask<{enum_ident}::{ident}>")),
|
||||
mask_builder_struct_ident: None,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
ParsedVariant {
|
||||
options: variant_options,
|
||||
ident,
|
||||
fields_kind,
|
||||
fields: parsed_fields,
|
||||
value,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ParsedEnum {
|
||||
options: HdlAttr<EnumOptions>,
|
||||
vis: Visibility,
|
||||
enum_token: Token![enum],
|
||||
ident: Ident,
|
||||
generics: Generics,
|
||||
brace_token: Brace,
|
||||
variants: Vec<ParsedVariant>,
|
||||
}
|
||||
|
||||
impl ParsedEnum {
|
||||
fn parse(item: ItemEnum) -> syn::Result<Self> {
|
||||
let ItemEnum {
|
||||
mut attrs,
|
||||
vis,
|
||||
enum_token,
|
||||
ident,
|
||||
generics,
|
||||
brace_token,
|
||||
variants,
|
||||
} = item;
|
||||
let mut errors = Errors::new();
|
||||
let enum_options = errors
|
||||
.unwrap_or_default(HdlAttr::parse_and_take_attr(&mut attrs))
|
||||
.unwrap_or_default();
|
||||
let variants = variants
|
||||
.into_iter()
|
||||
.map(|variant| {
|
||||
ParsedVariant::parse(
|
||||
&mut errors,
|
||||
variant,
|
||||
&enum_options.body,
|
||||
&vis,
|
||||
&ident,
|
||||
&generics,
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
errors.finish()?;
|
||||
Ok(ParsedEnum {
|
||||
options: enum_options,
|
||||
vis,
|
||||
enum_token,
|
||||
ident,
|
||||
generics,
|
||||
brace_token,
|
||||
variants,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ParsedEnum {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
options,
|
||||
vis,
|
||||
enum_token,
|
||||
ident: enum_ident,
|
||||
generics: enum_generics,
|
||||
brace_token,
|
||||
variants,
|
||||
} = self;
|
||||
let EnumOptions {
|
||||
outline_generated: _,
|
||||
connect_inexact,
|
||||
where_,
|
||||
target,
|
||||
} = &options.body;
|
||||
let target = get_target(target, enum_ident);
|
||||
let ValueDeriveGenerics {
|
||||
generics: _,
|
||||
fixed_type_generics,
|
||||
} = ValueDeriveGenerics::get(enum_generics.clone(), where_);
|
||||
let (fixed_type_impl_generics, fixed_type_type_generics, fixed_type_where_clause) =
|
||||
fixed_type_generics.split_for_impl();
|
||||
let type_struct_ident = format_ident!("__{}__Type", enum_ident);
|
||||
let mut field_checks = vec![];
|
||||
let mut make_type_struct_variant_type = |variant: &ParsedVariant| {
|
||||
let VariantOptions {} = variant.options.body;
|
||||
let (value_struct, parsed_struct) = match &variant.value {
|
||||
VariantValue::None => {
|
||||
return None;
|
||||
}
|
||||
VariantValue::Direct { value_type } => {
|
||||
field_checks.push(quote_spanned! {value_type.span()=>
|
||||
__check_field::<#value_type>();
|
||||
});
|
||||
return Some(parse_quote! { <#value_type as ::fayalite::expr::ToExpr>::Type });
|
||||
}
|
||||
VariantValue::Struct {
|
||||
value_struct,
|
||||
parsed_struct,
|
||||
} => (value_struct, parsed_struct),
|
||||
};
|
||||
value_struct.to_tokens(tokens);
|
||||
parsed_struct.to_tokens(tokens);
|
||||
let mut field_names = Vec::from_iter(get_field_names(&value_struct.fields));
|
||||
derive_clone_hash_eq_partialeq_for_struct(
|
||||
&value_struct.ident,
|
||||
&fixed_type_generics,
|
||||
&field_names,
|
||||
)
|
||||
.to_tokens(tokens);
|
||||
field_names = Vec::from_iter(
|
||||
field_names
|
||||
.into_iter()
|
||||
.zip(parsed_struct.fields.iter())
|
||||
.filter_map(|(member, field)| {
|
||||
field.options.body.skip.is_none().then_some(member)
|
||||
}),
|
||||
);
|
||||
let field_name_strs =
|
||||
Vec::from_iter(field_names.iter().map(|v| v.to_token_stream().to_string()));
|
||||
let debug_ident = format!("{enum_ident}::{}", variant.ident);
|
||||
let debug_body = match variant.fields_kind {
|
||||
FieldsKind::Unit => quote! {
|
||||
f.debug_struct(#debug_ident).finish()
|
||||
},
|
||||
FieldsKind::Named(_) => quote! {
|
||||
f.debug_struct(#debug_ident)#(.field(#field_name_strs, &self.#field_names))*.finish()
|
||||
},
|
||||
FieldsKind::Unnamed(_) => quote! {
|
||||
f.debug_tuple(#debug_ident)#(.field(&self.#field_names))*.finish()
|
||||
},
|
||||
};
|
||||
let value_struct_ident = &value_struct.ident;
|
||||
quote! {
|
||||
#[automatically_derived]
|
||||
impl #fixed_type_impl_generics ::fayalite::__std::fmt::Debug for #value_struct_ident #fixed_type_type_generics
|
||||
#fixed_type_where_clause
|
||||
{
|
||||
fn fmt(&self, f: &mut ::fayalite::__std::fmt::Formatter<'_>) -> ::fayalite::__std::fmt::Result {
|
||||
#debug_body
|
||||
}
|
||||
}
|
||||
}.to_tokens(tokens);
|
||||
Some(
|
||||
parse_quote! { <#value_struct_ident #fixed_type_type_generics as ::fayalite::expr::ToExpr>::Type },
|
||||
)
|
||||
};
|
||||
let type_struct_variants = Punctuated::from_iter(variants.iter().filter_map(|variant| {
|
||||
let VariantOptions {} = variant.options.body;
|
||||
Some(Field {
|
||||
attrs: vec![],
|
||||
vis: vis.clone(),
|
||||
mutability: FieldMutability::None,
|
||||
ident: Some(variant.ident.clone()),
|
||||
colon_token: None, // it will fill in the colon if needed
|
||||
ty: make_type_struct_variant_type(variant)?,
|
||||
})
|
||||
}));
|
||||
let type_struct = ItemStruct {
|
||||
attrs: vec![
|
||||
parse_quote! {#[allow(non_camel_case_types)]},
|
||||
parse_quote! {#[allow(non_snake_case)]},
|
||||
],
|
||||
vis: vis.clone(),
|
||||
struct_token: Token,
|
||||
ident: type_struct_ident,
|
||||
generics: fixed_type_generics.clone(),
|
||||
fields: Fields::Named(FieldsNamed {
|
||||
brace_token: *brace_token,
|
||||
named: type_struct_variants,
|
||||
}),
|
||||
semi_token: None,
|
||||
};
|
||||
let type_struct_ident = &type_struct.ident;
|
||||
let type_struct_debug_ident = format!("{enum_ident}::Type");
|
||||
type_struct.to_tokens(tokens);
|
||||
let non_empty_variant_names = Vec::from_iter(
|
||||
variants
|
||||
.iter()
|
||||
.filter(|v| !v.value.is_none())
|
||||
.map(|v| v.ident.clone()),
|
||||
);
|
||||
let non_empty_variant_name_strs =
|
||||
Vec::from_iter(non_empty_variant_names.iter().map(|v| v.to_string()));
|
||||
let debug_type_body = quote! {
|
||||
f.debug_struct(#type_struct_debug_ident)#(.field(#non_empty_variant_name_strs, &self.#non_empty_variant_names))*.finish()
|
||||
};
|
||||
derive_clone_hash_eq_partialeq_for_struct(
|
||||
type_struct_ident,
|
||||
&fixed_type_generics,
|
||||
&non_empty_variant_names,
|
||||
)
|
||||
.to_tokens(tokens);
|
||||
let variant_names = Vec::from_iter(variants.iter().map(|v| &v.ident));
|
||||
let variant_name_strs = Vec::from_iter(variant_names.iter().map(|v| v.to_string()));
|
||||
let (variant_field_pats, variant_to_canonical_values): (Vec<_>, Vec<_>) = variants
|
||||
.iter()
|
||||
.map(|v| {
|
||||
let field_names: Vec<_> = v.fields.iter().map(|field| &field.name).collect();
|
||||
let var_names: Vec<_> = v.fields.iter().map(|field| field.var_name()).collect();
|
||||
let field_pats = quote! {
|
||||
#(#field_names: #var_names,)*
|
||||
};
|
||||
let to_canonical_value = match &v.value {
|
||||
VariantValue::None => quote! { ::fayalite::__std::option::Option::None },
|
||||
VariantValue::Direct { .. } => {
|
||||
debug_assert_eq!(var_names.len(), 1);
|
||||
quote! {
|
||||
::fayalite::__std::option::Option::Some(
|
||||
::fayalite::ty::DynValueTrait::to_canonical_dyn(#(#var_names)*),
|
||||
)
|
||||
}
|
||||
}
|
||||
VariantValue::Struct {
|
||||
value_struct,
|
||||
parsed_struct,
|
||||
} => {
|
||||
let value_struct_ident = &value_struct.ident;
|
||||
let phantom_field_name = &parsed_struct.fields.last().expect("missing phantom field").name;
|
||||
let type_generics = fixed_type_type_generics.as_turbofish();
|
||||
quote! {
|
||||
::fayalite::__std::option::Option::Some(
|
||||
::fayalite::ty::DynValueTrait::to_canonical_dyn(
|
||||
&#value_struct_ident #type_generics {
|
||||
#(#field_names: ::fayalite::__std::clone::Clone::clone(#var_names),)*
|
||||
#phantom_field_name: ::fayalite::__std::marker::PhantomData,
|
||||
},
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
(field_pats, to_canonical_value)
|
||||
})
|
||||
.unzip();
|
||||
let mut match_enum_variants = Punctuated::new();
|
||||
let mut match_enum_debug_arms = vec![];
|
||||
let mut match_enum_arms = vec![];
|
||||
let mut variant_vars = vec![];
|
||||
let mut from_canonical_type_variant_lets = vec![];
|
||||
let mut non_empty_variant_vars = vec![];
|
||||
let mut enum_type_variants = vec![];
|
||||
let mut enum_type_variants_hint = vec![];
|
||||
let match_enum_ident = format_ident!("__{}__MatchEnum", enum_ident);
|
||||
let mut builder = Builder::new(format_ident!("__{}__Builder", enum_ident), vis.clone());
|
||||
for variant in variants.iter() {
|
||||
for field in variant.fields.iter() {
|
||||
builder.insert_field(
|
||||
field.name.clone(),
|
||||
|v| {
|
||||
parse_quote_spanned! {v.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&#v)
|
||||
}
|
||||
},
|
||||
|t| {
|
||||
parse_quote_spanned! {t.span()=>
|
||||
::fayalite::expr::Expr<<<#t as ::fayalite::expr::ToExpr>::Type as ::fayalite::ty::Type>::Value>
|
||||
}
|
||||
},
|
||||
|t| {
|
||||
parse_quote_spanned! {t.span()=>
|
||||
where
|
||||
#t: ::fayalite::expr::ToExpr,
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
let builder = builder.finish_filling_in_fields();
|
||||
builder.to_tokens(tokens);
|
||||
for (variant_index, variant) in variants.iter().enumerate() {
|
||||
let variant_var = format_ident!("__v_{}", variant.ident);
|
||||
let variant_name = &variant.ident;
|
||||
let variant_name_str = variant.ident.to_string();
|
||||
match_enum_variants.push(Variant {
|
||||
attrs: vec![],
|
||||
ident: variant.ident.clone(),
|
||||
fields: variant.fields_kind.into_fields(variant.fields.iter().map(
|
||||
|ParsedField {
|
||||
options,
|
||||
vis,
|
||||
name,
|
||||
ty,
|
||||
}| {
|
||||
let FieldOptions {} = options.body;
|
||||
Field {
|
||||
attrs: vec![],
|
||||
vis: vis.clone(),
|
||||
mutability: FieldMutability::None,
|
||||
ident: if let Member::Named(name) = name {
|
||||
Some(name.clone())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
colon_token: None,
|
||||
ty: parse_quote! { ::fayalite::expr::Expr<#ty> },
|
||||
}
|
||||
},
|
||||
)),
|
||||
discriminant: None,
|
||||
});
|
||||
let match_enum_field_names = Vec::from_iter(variant.fields.iter().map(
|
||||
|ParsedField {
|
||||
options,
|
||||
vis: _,
|
||||
name,
|
||||
ty: _,
|
||||
}| {
|
||||
let FieldOptions {} = options.body;
|
||||
name
|
||||
},
|
||||
));
|
||||
let match_enum_field_name_strs = Vec::from_iter(variant.fields.iter().map(
|
||||
|ParsedField {
|
||||
options,
|
||||
vis: _,
|
||||
name,
|
||||
ty: _,
|
||||
}| {
|
||||
let FieldOptions {} = options.body;
|
||||
name.to_token_stream().to_string()
|
||||
},
|
||||
));
|
||||
let match_enum_debug_vars = Vec::from_iter(variant.fields.iter().map(
|
||||
|ParsedField {
|
||||
options,
|
||||
vis: _,
|
||||
name,
|
||||
ty: _,
|
||||
}| {
|
||||
let FieldOptions {} = options.body;
|
||||
format_ident!("__v_{}", name)
|
||||
},
|
||||
));
|
||||
match_enum_debug_arms.push(match variant.fields_kind {
|
||||
FieldsKind::Unit | FieldsKind::Named(_) => quote! {
|
||||
Self::#variant_name {
|
||||
#(#match_enum_field_names: ref #match_enum_debug_vars,)*
|
||||
} => f.debug_struct(#variant_name_str)
|
||||
#(.field(#match_enum_field_name_strs, #match_enum_debug_vars))*
|
||||
.finish(),
|
||||
},
|
||||
FieldsKind::Unnamed(_) => quote! {
|
||||
Self::#variant_name(
|
||||
#(ref #match_enum_debug_vars,)*
|
||||
) => f.debug_tuple(#variant_name_str)
|
||||
#(.field(#match_enum_debug_vars))*
|
||||
.finish(),
|
||||
},
|
||||
});
|
||||
if let Some(value_ty) = variant.value.value_ty() {
|
||||
from_canonical_type_variant_lets.push(quote! {
|
||||
let #variant_var = #variant_var.from_canonical_type_helper_has_value(#variant_name_str);
|
||||
});
|
||||
non_empty_variant_vars.push(variant_var.clone());
|
||||
enum_type_variants.push(quote! {
|
||||
::fayalite::enum_::VariantType {
|
||||
name: ::fayalite::intern::Intern::intern(#variant_name_str),
|
||||
ty: ::fayalite::__std::option::Option::Some(
|
||||
::fayalite::ty::DynType::canonical_dyn(&self.#variant_name),
|
||||
),
|
||||
}
|
||||
});
|
||||
enum_type_variants_hint.push(quote! {
|
||||
::fayalite::enum_::VariantType {
|
||||
name: ::fayalite::intern::Intern::intern(#variant_name_str),
|
||||
ty: ::fayalite::__std::option::Option::Some(
|
||||
::fayalite::bundle::TypeHint::<<#value_ty as ::fayalite::expr::ToExpr>::Type>::intern_dyn(),
|
||||
),
|
||||
}
|
||||
});
|
||||
} else {
|
||||
from_canonical_type_variant_lets.push(quote! {
|
||||
#variant_var.from_canonical_type_helper_no_value(#variant_name_str);
|
||||
});
|
||||
enum_type_variants.push(quote! {
|
||||
::fayalite::enum_::VariantType {
|
||||
name: ::fayalite::intern::Intern::intern(#variant_name_str),
|
||||
ty: ::fayalite::__std::option::Option::None,
|
||||
}
|
||||
});
|
||||
enum_type_variants_hint.push(quote! {
|
||||
::fayalite::enum_::VariantType {
|
||||
name: ::fayalite::intern::Intern::intern(#variant_name_str),
|
||||
ty: ::fayalite::__std::option::Option::None,
|
||||
}
|
||||
});
|
||||
}
|
||||
variant_vars.push(variant_var);
|
||||
match_enum_arms.push(match &variant.value {
|
||||
VariantValue::None => quote! {
|
||||
#variant_index => #match_enum_ident::#variant_name,
|
||||
},
|
||||
VariantValue::Direct { value_type } => quote! {
|
||||
#variant_index => #match_enum_ident::#variant_name {
|
||||
#(#match_enum_field_names)*: ::fayalite::expr::ToExpr::to_expr(
|
||||
&__variant_access.downcast_unchecked::<
|
||||
<#value_type as ::fayalite::expr::ToExpr>::Type>(),
|
||||
),
|
||||
},
|
||||
},
|
||||
VariantValue::Struct {
|
||||
value_struct: ItemStruct { ident, .. },
|
||||
..
|
||||
} => quote! {
|
||||
#variant_index => {
|
||||
let __variant_access = ::fayalite::expr::ToExpr::to_expr(
|
||||
&__variant_access.downcast_unchecked::<
|
||||
<#ident #fixed_type_type_generics as ::fayalite::expr::ToExpr>::Type,
|
||||
>(),
|
||||
);
|
||||
#match_enum_ident::#variant_name {
|
||||
#(#match_enum_field_names: (*__variant_access).#match_enum_field_names,)*
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
let builder_field_and_types = Vec::from_iter(variant.fields.iter().map(
|
||||
|ParsedField {
|
||||
options,
|
||||
vis: _,
|
||||
name,
|
||||
ty,
|
||||
}| {
|
||||
let FieldOptions {} = options.body;
|
||||
(name, ty)
|
||||
},
|
||||
));
|
||||
let builder_field_vars = Vec::from_iter(
|
||||
builder_field_and_types
|
||||
.iter()
|
||||
.map(|(name, _)| &builder.get_field(name).unwrap().1.builder_field_name),
|
||||
);
|
||||
let build_body = match &variant.value {
|
||||
VariantValue::None => parse_quote! {
|
||||
{
|
||||
::fayalite::expr::ToExpr::to_expr(
|
||||
&::fayalite::expr::ops::EnumLiteral::<#type_struct_ident #fixed_type_type_generics>::new_unchecked(
|
||||
::fayalite::__std::option::Option::None,
|
||||
#variant_index,
|
||||
::fayalite::ty::FixedType::fixed_type(),
|
||||
),
|
||||
)
|
||||
}
|
||||
},
|
||||
VariantValue::Direct { value_type: _ } => parse_quote! {
|
||||
{
|
||||
::fayalite::expr::ToExpr::to_expr(
|
||||
&::fayalite::expr::ops::EnumLiteral::<#type_struct_ident #fixed_type_type_generics>::new_unchecked(
|
||||
::fayalite::__std::option::Option::Some(#(#builder_field_vars)*.to_canonical_dyn()),
|
||||
#variant_index,
|
||||
::fayalite::ty::FixedType::fixed_type(),
|
||||
),
|
||||
)
|
||||
}
|
||||
},
|
||||
VariantValue::Struct {
|
||||
parsed_struct:
|
||||
ParsedStruct {
|
||||
names:
|
||||
ParsedStructNames {
|
||||
type_struct_ident: field_type_struct_ident,
|
||||
..
|
||||
},
|
||||
..
|
||||
},
|
||||
..
|
||||
} => parse_quote! {
|
||||
{
|
||||
let __builder = <#field_type_struct_ident #fixed_type_type_generics as ::fayalite::bundle::BundleType>::builder();
|
||||
#(let __builder = __builder.#builder_field_vars(#builder_field_vars);)*
|
||||
::fayalite::expr::ToExpr::to_expr(
|
||||
&::fayalite::expr::ops::EnumLiteral::<#type_struct_ident #fixed_type_type_generics>::new_unchecked(
|
||||
::fayalite::__std::option::Option::Some(__builder.build().to_canonical_dyn()),
|
||||
#variant_index,
|
||||
::fayalite::ty::FixedType::fixed_type(),
|
||||
),
|
||||
)
|
||||
}
|
||||
},
|
||||
};
|
||||
builder
|
||||
.make_build_method(
|
||||
&format_ident!("variant_{}", variant_name),
|
||||
variant.fields.iter().map(
|
||||
|ParsedField {
|
||||
options,
|
||||
vis: _,
|
||||
name,
|
||||
ty,
|
||||
}| {
|
||||
let FieldOptions {} = options.body;
|
||||
(name.clone(), parse_quote! { ::fayalite::expr::Expr<#ty> })
|
||||
},
|
||||
),
|
||||
&fixed_type_generics,
|
||||
&parse_quote! {#type_struct_ident #fixed_type_type_generics},
|
||||
&parse_quote! { ::fayalite::expr::Expr<#target #fixed_type_type_generics> },
|
||||
build_body,
|
||||
)
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
let match_enum = ItemEnum {
|
||||
attrs: vec![parse_quote! {#[allow(non_camel_case_types)]}],
|
||||
vis: vis.clone(),
|
||||
enum_token: *enum_token,
|
||||
ident: match_enum_ident,
|
||||
generics: fixed_type_generics.clone(),
|
||||
brace_token: *brace_token,
|
||||
variants: match_enum_variants,
|
||||
};
|
||||
let match_enum_ident = &match_enum.ident;
|
||||
match_enum.to_tokens(tokens);
|
||||
make_connect_impl(
|
||||
*connect_inexact,
|
||||
&fixed_type_generics,
|
||||
type_struct_ident,
|
||||
variants.iter().flat_map(|variant| {
|
||||
variant.fields.iter().map(|field| {
|
||||
let ty = &field.ty;
|
||||
parse_quote_spanned! {field.name.span()=>
|
||||
<#ty as ::fayalite::expr::ToExpr>::Type
|
||||
}
|
||||
})
|
||||
}),
|
||||
)
|
||||
.to_tokens(tokens);
|
||||
let variant_count = variants.len();
|
||||
let empty_builder_ty = builder.ty([], Some(&parse_quote! { Self }), false);
|
||||
quote! {
|
||||
#[automatically_derived]
|
||||
impl #fixed_type_impl_generics ::fayalite::__std::fmt::Debug for #match_enum_ident #fixed_type_type_generics
|
||||
#fixed_type_where_clause
|
||||
{
|
||||
fn fmt(&self, f: &mut ::fayalite::__std::fmt::Formatter<'_>) -> ::fayalite::__std::fmt::Result {
|
||||
match *self {
|
||||
#(#match_enum_debug_arms)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[automatically_derived]
|
||||
impl #fixed_type_impl_generics ::fayalite::ty::FixedType for #type_struct_ident #fixed_type_type_generics
|
||||
#fixed_type_where_clause
|
||||
{
|
||||
fn fixed_type() -> Self {
|
||||
Self {
|
||||
#(#non_empty_variant_names: ::fayalite::ty::FixedType::fixed_type(),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn __check_field<T: ::fayalite::ty::Value>()
|
||||
where
|
||||
<T as ::fayalite::expr::ToExpr>::Type: ::fayalite::ty::Type<Value = T>,
|
||||
{}
|
||||
fn __check_fields #fixed_type_impl_generics(_: #target #fixed_type_type_generics)
|
||||
#fixed_type_where_clause
|
||||
{
|
||||
#(#field_checks)*
|
||||
}
|
||||
|
||||
#[automatically_derived]
|
||||
impl #fixed_type_impl_generics ::fayalite::__std::fmt::Debug for #type_struct_ident #fixed_type_type_generics
|
||||
#fixed_type_where_clause
|
||||
{
|
||||
fn fmt(&self, f: &mut ::fayalite::__std::fmt::Formatter<'_>) -> ::fayalite::__std::fmt::Result {
|
||||
#debug_type_body
|
||||
}
|
||||
}
|
||||
|
||||
#[automatically_derived]
|
||||
impl #fixed_type_impl_generics ::fayalite::ty::Type for #type_struct_ident #fixed_type_type_generics
|
||||
#fixed_type_where_clause
|
||||
{
|
||||
type CanonicalType = ::fayalite::enum_::DynEnumType;
|
||||
type Value = #target #fixed_type_type_generics;
|
||||
type CanonicalValue = ::fayalite::enum_::DynEnum;
|
||||
type MaskType = ::fayalite::int::UIntType<1>;
|
||||
type MaskValue = ::fayalite::int::UInt<1>;
|
||||
type MatchVariant = #match_enum_ident #fixed_type_type_generics;
|
||||
type MatchActiveScope = ::fayalite::module::Scope;
|
||||
type MatchVariantAndInactiveScope = ::fayalite::enum_::EnumMatchVariantAndInactiveScope<Self>;
|
||||
type MatchVariantsIter = ::fayalite::enum_::EnumMatchVariantsIter<Self>;
|
||||
fn match_variants<IO: ::fayalite::bundle::BundleValue>(
|
||||
this: ::fayalite::expr::Expr<<Self as ::fayalite::ty::Type>::Value>,
|
||||
module_builder: &mut ::fayalite::module::ModuleBuilder<IO, ::fayalite::module::NormalModule>,
|
||||
source_location: ::fayalite::source_location::SourceLocation,
|
||||
) -> <Self as ::fayalite::ty::Type>::MatchVariantsIter
|
||||
where
|
||||
<IO as ::fayalite::expr::ToExpr>::Type: ::fayalite::bundle::BundleType<Value = IO>,
|
||||
{
|
||||
module_builder.enum_match_variants_helper(this, source_location)
|
||||
}
|
||||
fn mask_type(&self) -> <Self as ::fayalite::ty::Type>::MaskType {
|
||||
::fayalite::int::UIntType::new()
|
||||
}
|
||||
fn canonical(&self) -> <Self as ::fayalite::ty::Type>::CanonicalType {
|
||||
let variants = ::fayalite::enum_::EnumType::variants(self);
|
||||
::fayalite::enum_::DynEnumType::new(variants)
|
||||
}
|
||||
fn source_location(&self) -> ::fayalite::source_location::SourceLocation {
|
||||
::fayalite::source_location::SourceLocation::caller()
|
||||
}
|
||||
fn type_enum(&self) -> ::fayalite::ty::TypeEnum {
|
||||
::fayalite::ty::TypeEnum::EnumType(::fayalite::ty::Type::canonical(self))
|
||||
}
|
||||
#[allow(non_snake_case)]
|
||||
fn from_canonical_type(t: <Self as ::fayalite::ty::Type>::CanonicalType) -> Self {
|
||||
let [#(#variant_vars),*] = *::fayalite::enum_::EnumType::variants(&t) else {
|
||||
::fayalite::__std::panic!("wrong number of variants");
|
||||
};
|
||||
#(#from_canonical_type_variant_lets)*
|
||||
Self {
|
||||
#(#non_empty_variant_names: #non_empty_variant_vars,)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[automatically_derived]
|
||||
#[allow(clippy::init_numbered_fields)]
|
||||
impl #fixed_type_impl_generics ::fayalite::enum_::EnumType for #type_struct_ident #fixed_type_type_generics
|
||||
#fixed_type_where_clause
|
||||
{
|
||||
type Builder = #empty_builder_ty;
|
||||
fn match_activate_scope(
|
||||
v: <Self as ::fayalite::ty::Type>::MatchVariantAndInactiveScope,
|
||||
) -> (<Self as ::fayalite::ty::Type>::MatchVariant, <Self as ::fayalite::ty::Type>::MatchActiveScope) {
|
||||
let (__variant_access, __scope) = v.activate();
|
||||
(
|
||||
match ::fayalite::expr::ops::VariantAccess::variant_index(&*__variant_access) {
|
||||
#(#match_enum_arms)*
|
||||
#variant_count.. => ::fayalite::__std::panic!("invalid variant index"),
|
||||
},
|
||||
__scope,
|
||||
)
|
||||
}
|
||||
fn builder() -> <Self as ::fayalite::enum_::EnumType>::Builder {
|
||||
#empty_builder_ty::new()
|
||||
}
|
||||
fn variants(&self) -> ::fayalite::intern::Interned<[::fayalite::enum_::VariantType<::fayalite::intern::Interned<dyn ::fayalite::ty::DynCanonicalType>>]> {
|
||||
::fayalite::intern::Intern::intern(&[#(#enum_type_variants,)*][..])
|
||||
}
|
||||
fn variants_hint() -> ::fayalite::enum_::VariantsHint {
|
||||
::fayalite::enum_::VariantsHint::new([#(#enum_type_variants_hint,)*], false)
|
||||
}
|
||||
}
|
||||
|
||||
#[automatically_derived]
|
||||
impl #fixed_type_impl_generics ::fayalite::expr::ToExpr for #target #fixed_type_type_generics
|
||||
#fixed_type_where_clause
|
||||
{
|
||||
type Type = #type_struct_ident #fixed_type_type_generics;
|
||||
fn ty(&self) -> <Self as ::fayalite::expr::ToExpr>::Type {
|
||||
::fayalite::ty::FixedType::fixed_type()
|
||||
}
|
||||
fn to_expr(&self) -> ::fayalite::expr::Expr<Self> {
|
||||
::fayalite::expr::Expr::from_value(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[automatically_derived]
|
||||
impl #fixed_type_impl_generics ::fayalite::ty::Value for #target #fixed_type_type_generics
|
||||
#fixed_type_where_clause
|
||||
{
|
||||
fn to_canonical(&self) -> <<Self as ::fayalite::expr::ToExpr>::Type as ::fayalite::ty::Type>::CanonicalValue {
|
||||
let __ty = ::fayalite::ty::Type::canonical(&::fayalite::expr::ToExpr::ty(self));
|
||||
match self {
|
||||
#(Self::#variant_names { #variant_field_pats } => {
|
||||
::fayalite::enum_::DynEnum::new_by_name(
|
||||
__ty,
|
||||
::fayalite::intern::Intern::intern(#variant_name_strs),
|
||||
#variant_to_canonical_values,
|
||||
)
|
||||
})*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[automatically_derived]
|
||||
impl #fixed_type_impl_generics ::fayalite::enum_::EnumValue for #target #fixed_type_type_generics
|
||||
#fixed_type_where_clause
|
||||
{
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn value_derive_enum(item: ItemEnum) -> syn::Result<TokenStream> {
|
||||
let item = ParsedEnum::parse(item)?;
|
||||
let outline_generated = item.options.body.outline_generated;
|
||||
let mut contents = quote! {
|
||||
const _: () = {
|
||||
#item
|
||||
};
|
||||
};
|
||||
if outline_generated.is_some() {
|
||||
contents = crate::outline_generated(contents, "value-enum-");
|
||||
}
|
||||
Ok(contents)
|
||||
}
|
709
crates/fayalite-proc-macros-impl/src/value_derive_struct.rs
Normal file
709
crates/fayalite-proc-macros-impl/src/value_derive_struct.rs
Normal file
|
@ -0,0 +1,709 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
value_derive_common::{
|
||||
append_field, derive_clone_hash_eq_partialeq_for_struct, get_target, make_connect_impl,
|
||||
Bounds, Builder, FieldsKind, ParsedField, ValueDeriveGenerics,
|
||||
},
|
||||
Errors, HdlAttr,
|
||||
};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{format_ident, quote, quote_spanned, ToTokens};
|
||||
use syn::{
|
||||
parse_quote, parse_quote_spanned, spanned::Spanned, FieldMutability, Generics, Ident,
|
||||
ItemStruct, Member, Path, Token, Visibility,
|
||||
};
|
||||
|
||||
crate::options! {
|
||||
#[options = StructOptions]
|
||||
pub(crate) enum StructOption {
|
||||
OutlineGenerated(outline_generated),
|
||||
FixedType(fixed_type),
|
||||
ConnectInexact(connect_inexact),
|
||||
Bounds(where_, Bounds),
|
||||
Target(target, Path),
|
||||
}
|
||||
}
|
||||
|
||||
crate::options! {
|
||||
#[options = FieldOptions]
|
||||
pub(crate) enum FieldOption {
|
||||
Flip(flip),
|
||||
Skip(skip),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct ParsedStructNames<I, S> {
|
||||
pub(crate) ident: Ident,
|
||||
pub(crate) type_struct_debug_ident: S,
|
||||
pub(crate) type_struct_ident: Ident,
|
||||
pub(crate) match_variant_ident: I,
|
||||
pub(crate) builder_struct_ident: I,
|
||||
pub(crate) mask_match_variant_ident: I,
|
||||
pub(crate) mask_type_ident: I,
|
||||
pub(crate) mask_type_debug_ident: S,
|
||||
pub(crate) mask_value_ident: I,
|
||||
pub(crate) mask_value_debug_ident: S,
|
||||
pub(crate) mask_builder_struct_ident: I,
|
||||
}
|
||||
|
||||
pub(crate) struct ParsedStruct {
|
||||
pub(crate) options: HdlAttr<StructOptions>,
|
||||
pub(crate) vis: Visibility,
|
||||
pub(crate) struct_token: Token![struct],
|
||||
pub(crate) generics: Generics,
|
||||
pub(crate) fields_kind: FieldsKind,
|
||||
pub(crate) fields: Vec<ParsedField<FieldOptions>>,
|
||||
pub(crate) semi_token: Option<Token![;]>,
|
||||
pub(crate) skip_check_fields: bool,
|
||||
pub(crate) names: ParsedStructNames<Option<Ident>, Option<String>>,
|
||||
}
|
||||
|
||||
impl ParsedStruct {
|
||||
pub(crate) fn parse(item: &mut ItemStruct) -> syn::Result<Self> {
|
||||
let ItemStruct {
|
||||
attrs,
|
||||
vis,
|
||||
struct_token,
|
||||
ident,
|
||||
generics,
|
||||
fields,
|
||||
semi_token,
|
||||
} = item;
|
||||
let mut errors = Errors::new();
|
||||
let struct_options = errors
|
||||
.unwrap_or_default(HdlAttr::parse_and_take_attr(attrs))
|
||||
.unwrap_or_default();
|
||||
let (fields_kind, fields) = ParsedField::parse_fields(&mut errors, fields, false);
|
||||
errors.finish()?;
|
||||
Ok(ParsedStruct {
|
||||
options: struct_options,
|
||||
vis: vis.clone(),
|
||||
struct_token: *struct_token,
|
||||
generics: generics.clone(),
|
||||
fields_kind,
|
||||
fields,
|
||||
semi_token: *semi_token,
|
||||
skip_check_fields: false,
|
||||
names: ParsedStructNames {
|
||||
ident: ident.clone(),
|
||||
type_struct_debug_ident: None,
|
||||
type_struct_ident: format_ident!("__{}__Type", ident),
|
||||
match_variant_ident: None,
|
||||
builder_struct_ident: None,
|
||||
mask_match_variant_ident: None,
|
||||
mask_type_ident: None,
|
||||
mask_type_debug_ident: None,
|
||||
mask_value_ident: None,
|
||||
mask_value_debug_ident: None,
|
||||
mask_builder_struct_ident: None,
|
||||
},
|
||||
})
|
||||
}
|
||||
pub(crate) fn write_body(
|
||||
&self,
|
||||
target: Path,
|
||||
names: ParsedStructNames<&Ident, &String>,
|
||||
is_for_mask: bool,
|
||||
tokens: &mut TokenStream,
|
||||
) {
|
||||
let Self {
|
||||
options,
|
||||
vis,
|
||||
struct_token,
|
||||
generics,
|
||||
fields_kind,
|
||||
fields,
|
||||
semi_token,
|
||||
skip_check_fields,
|
||||
names: _,
|
||||
} = self;
|
||||
let skip_check_fields = *skip_check_fields || is_for_mask;
|
||||
let ParsedStructNames {
|
||||
ident: struct_ident,
|
||||
type_struct_debug_ident,
|
||||
type_struct_ident,
|
||||
match_variant_ident,
|
||||
builder_struct_ident,
|
||||
mask_match_variant_ident: _,
|
||||
mask_type_ident,
|
||||
mask_type_debug_ident: _,
|
||||
mask_value_ident,
|
||||
mask_value_debug_ident,
|
||||
mask_builder_struct_ident: _,
|
||||
} = names;
|
||||
let StructOptions {
|
||||
outline_generated: _,
|
||||
where_,
|
||||
target: _,
|
||||
fixed_type,
|
||||
connect_inexact,
|
||||
} = &options.body;
|
||||
let ValueDeriveGenerics {
|
||||
generics,
|
||||
fixed_type_generics,
|
||||
} = ValueDeriveGenerics::get(generics.clone(), where_);
|
||||
let (impl_generics, type_generics, where_clause) = generics.split_for_impl();
|
||||
let unskipped_fields = fields
|
||||
.iter()
|
||||
.filter(|field| field.options.body.skip.is_none());
|
||||
let _field_names = Vec::from_iter(fields.iter().map(|field| field.name.clone()));
|
||||
let unskipped_field_names =
|
||||
Vec::from_iter(unskipped_fields.clone().map(|field| field.name.clone()));
|
||||
let unskipped_field_name_strs = Vec::from_iter(
|
||||
unskipped_field_names
|
||||
.iter()
|
||||
.map(|field_name| field_name.to_token_stream().to_string()),
|
||||
);
|
||||
let unskipped_field_vars = Vec::from_iter(
|
||||
unskipped_field_names
|
||||
.iter()
|
||||
.map(|field_name| format_ident!("__v_{}", field_name)),
|
||||
);
|
||||
let unskipped_field_flips = Vec::from_iter(
|
||||
unskipped_fields
|
||||
.clone()
|
||||
.map(|field| field.options.body.flip.is_some()),
|
||||
);
|
||||
let mut any_fields_skipped = false;
|
||||
let type_fields = Vec::from_iter(fields.iter().filter_map(|field| {
|
||||
let ParsedField {
|
||||
options,
|
||||
vis,
|
||||
name,
|
||||
ty,
|
||||
} = field;
|
||||
let FieldOptions { flip: _, skip } = &options.body;
|
||||
if skip.is_some() {
|
||||
any_fields_skipped = true;
|
||||
return None;
|
||||
}
|
||||
let ty = if is_for_mask {
|
||||
parse_quote! { ::fayalite::ty::AsMask<#ty> }
|
||||
} else {
|
||||
ty.to_token_stream()
|
||||
};
|
||||
Some(syn::Field {
|
||||
attrs: vec![],
|
||||
vis: vis.clone(),
|
||||
mutability: FieldMutability::None,
|
||||
ident: match name.clone() {
|
||||
Member::Named(name) => Some(name),
|
||||
Member::Unnamed(_) => None,
|
||||
},
|
||||
colon_token: None,
|
||||
ty: parse_quote! { <#ty as ::fayalite::expr::ToExpr>::Type },
|
||||
})
|
||||
}));
|
||||
let field_types = Vec::from_iter(type_fields.iter().map(|field| field.ty.clone()));
|
||||
let match_variant_fields = Vec::from_iter(fields.iter().zip(&type_fields).map(
|
||||
|(parsed_field, type_field)| {
|
||||
let field_ty = &parsed_field.ty;
|
||||
syn::Field {
|
||||
ty: parse_quote! { ::fayalite::expr::Expr<#field_ty> },
|
||||
..type_field.clone()
|
||||
}
|
||||
},
|
||||
));
|
||||
|
||||
let mask_value_fields = Vec::from_iter(fields.iter().zip(&type_fields).map(
|
||||
|(parsed_field, type_field)| {
|
||||
let field_ty = &parsed_field.ty;
|
||||
syn::Field {
|
||||
ty: parse_quote! { ::fayalite::ty::AsMask<#field_ty> },
|
||||
..type_field.clone()
|
||||
}
|
||||
},
|
||||
));
|
||||
|
||||
let mut type_struct_fields = fields_kind.into_fields(type_fields);
|
||||
let mut match_variant_fields = fields_kind.into_fields(match_variant_fields);
|
||||
let mut mask_value_fields = fields_kind.into_fields(mask_value_fields);
|
||||
let phantom_data_field_name = any_fields_skipped.then(|| {
|
||||
let phantom_data_field_name = Ident::new("__phantom_data", type_struct_ident.span());
|
||||
let member = append_field(
|
||||
&mut type_struct_fields,
|
||||
syn::Field {
|
||||
attrs: vec![],
|
||||
vis: vis.clone(),
|
||||
mutability: FieldMutability::None,
|
||||
ident: Some(phantom_data_field_name.clone()),
|
||||
colon_token: None,
|
||||
ty: parse_quote_spanned! {type_struct_ident.span()=>
|
||||
::fayalite::__std::marker::PhantomData<#struct_ident #type_generics>
|
||||
},
|
||||
},
|
||||
);
|
||||
append_field(
|
||||
&mut match_variant_fields,
|
||||
syn::Field {
|
||||
attrs: vec![],
|
||||
vis: Visibility::Inherited,
|
||||
mutability: FieldMutability::None,
|
||||
ident: Some(phantom_data_field_name.clone()),
|
||||
colon_token: None,
|
||||
ty: parse_quote_spanned! {type_struct_ident.span()=>
|
||||
::fayalite::__std::marker::PhantomData<#struct_ident #type_generics>
|
||||
},
|
||||
},
|
||||
);
|
||||
append_field(
|
||||
&mut mask_value_fields,
|
||||
syn::Field {
|
||||
attrs: vec![],
|
||||
vis: Visibility::Inherited,
|
||||
mutability: FieldMutability::None,
|
||||
ident: Some(phantom_data_field_name),
|
||||
colon_token: None,
|
||||
ty: parse_quote_spanned! {type_struct_ident.span()=>
|
||||
::fayalite::__std::marker::PhantomData<#struct_ident #type_generics>
|
||||
},
|
||||
},
|
||||
);
|
||||
member
|
||||
});
|
||||
let phantom_data_field_name_slice = phantom_data_field_name.as_slice();
|
||||
let type_struct = ItemStruct {
|
||||
attrs: vec![parse_quote! {#[allow(non_camel_case_types)]}],
|
||||
vis: vis.clone(),
|
||||
struct_token: *struct_token,
|
||||
ident: type_struct_ident.clone(),
|
||||
generics: generics.clone(),
|
||||
fields: type_struct_fields,
|
||||
semi_token: *semi_token,
|
||||
};
|
||||
type_struct.to_tokens(tokens);
|
||||
let match_variant_struct = ItemStruct {
|
||||
attrs: vec![parse_quote! {#[allow(non_camel_case_types)]}],
|
||||
vis: vis.clone(),
|
||||
struct_token: *struct_token,
|
||||
ident: match_variant_ident.clone(),
|
||||
generics: generics.clone(),
|
||||
fields: match_variant_fields,
|
||||
semi_token: *semi_token,
|
||||
};
|
||||
match_variant_struct.to_tokens(tokens);
|
||||
let mask_type_body = if is_for_mask {
|
||||
quote! {
|
||||
::fayalite::__std::clone::Clone::clone(self)
|
||||
}
|
||||
} else {
|
||||
let mask_value_struct = ItemStruct {
|
||||
attrs: vec![parse_quote! {#[allow(non_camel_case_types)]}],
|
||||
vis: vis.clone(),
|
||||
struct_token: *struct_token,
|
||||
ident: mask_value_ident.clone(),
|
||||
generics: generics.clone(),
|
||||
fields: mask_value_fields,
|
||||
semi_token: *semi_token,
|
||||
};
|
||||
mask_value_struct.to_tokens(tokens);
|
||||
let debug_mask_value_body = match fields_kind {
|
||||
FieldsKind::Unit => quote! {
|
||||
f.debug_struct(#mask_value_debug_ident).finish()
|
||||
},
|
||||
FieldsKind::Named(_) => quote! {
|
||||
f.debug_struct(#mask_value_debug_ident)#(.field(#unskipped_field_name_strs, &self.#unskipped_field_names))*.finish()
|
||||
},
|
||||
FieldsKind::Unnamed(_) => quote! {
|
||||
f.debug_tuple(#mask_value_debug_ident)#(.field(&self.#unskipped_field_names))*.finish()
|
||||
},
|
||||
};
|
||||
quote! {
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::__std::fmt::Debug for #mask_value_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn fmt(&self, f: &mut ::fayalite::__std::fmt::Formatter<'_>) -> ::fayalite::__std::fmt::Result {
|
||||
#debug_mask_value_body
|
||||
}
|
||||
}
|
||||
}.to_tokens(tokens);
|
||||
quote! {
|
||||
#mask_type_ident {
|
||||
#(#unskipped_field_names: ::fayalite::ty::Type::mask_type(&self.#unskipped_field_names),)*
|
||||
#(#phantom_data_field_name_slice: ::fayalite::__std::marker::PhantomData,)*
|
||||
}
|
||||
}
|
||||
};
|
||||
let debug_type_body = match fields_kind {
|
||||
FieldsKind::Unit => quote! {
|
||||
f.debug_struct(#type_struct_debug_ident).finish()
|
||||
},
|
||||
FieldsKind::Named(_) => quote! {
|
||||
f.debug_struct(#type_struct_debug_ident)#(.field(#unskipped_field_name_strs, &self.#unskipped_field_names))*.finish()
|
||||
},
|
||||
FieldsKind::Unnamed(_) => quote! {
|
||||
f.debug_tuple(#type_struct_debug_ident)#(.field(&self.#unskipped_field_names))*.finish()
|
||||
},
|
||||
};
|
||||
for the_struct_ident in [&type_struct_ident, match_variant_ident]
|
||||
.into_iter()
|
||||
.chain(is_for_mask.then_some(mask_value_ident))
|
||||
{
|
||||
derive_clone_hash_eq_partialeq_for_struct(
|
||||
the_struct_ident,
|
||||
&generics,
|
||||
&Vec::from_iter(
|
||||
unskipped_field_names
|
||||
.iter()
|
||||
.cloned()
|
||||
.chain(phantom_data_field_name.clone()),
|
||||
),
|
||||
)
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
let check_v = format_ident!("__v");
|
||||
let field_checks = Vec::from_iter(fields.iter().map(|ParsedField { ty, name, .. }| {
|
||||
quote_spanned! {ty.span()=>
|
||||
__check_field(#check_v.#name);
|
||||
}
|
||||
}));
|
||||
if fixed_type.is_some() {
|
||||
let (impl_generics, type_generics, where_clause) = fixed_type_generics.split_for_impl();
|
||||
quote! {
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::ty::FixedType for #type_struct_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn fixed_type() -> Self {
|
||||
Self {
|
||||
#(#unskipped_field_names: ::fayalite::ty::FixedType::fixed_type(),)*
|
||||
#(#phantom_data_field_name_slice: ::fayalite::__std::marker::PhantomData,)*
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
if !skip_check_fields {
|
||||
quote! {
|
||||
fn __check_field<T: ::fayalite::ty::Value>(_v: T)
|
||||
where
|
||||
<T as ::fayalite::expr::ToExpr>::Type: ::fayalite::ty::Type<Value = T>,
|
||||
{}
|
||||
fn __check_fields #impl_generics(#check_v: #target #type_generics)
|
||||
#where_clause
|
||||
{
|
||||
#(#field_checks)*
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
let mut builder = Builder::new(builder_struct_ident.clone(), vis.clone());
|
||||
for field in unskipped_fields.clone() {
|
||||
builder.insert_field(
|
||||
field.name.clone(),
|
||||
|v| {
|
||||
parse_quote_spanned! {v.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&#v)
|
||||
}
|
||||
},
|
||||
|t| {
|
||||
parse_quote_spanned! {t.span()=>
|
||||
::fayalite::expr::Expr<<<#t as ::fayalite::expr::ToExpr>::Type as ::fayalite::ty::Type>::Value>
|
||||
}
|
||||
},
|
||||
|t| {
|
||||
parse_quote_spanned! {t.span()=>
|
||||
where
|
||||
#t: ::fayalite::expr::ToExpr,
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
let builder = builder.finish_filling_in_fields();
|
||||
builder.to_tokens(tokens);
|
||||
let build_type_fields =
|
||||
Vec::from_iter(unskipped_fields.clone().map(|ParsedField { name, .. }| {
|
||||
let builder_field_name = &builder.get_field(name).unwrap().1.builder_field_name;
|
||||
quote_spanned! {struct_ident.span()=>
|
||||
#name: ::fayalite::expr::ToExpr::ty(&#builder_field_name)
|
||||
}
|
||||
}));
|
||||
let build_expr_fields =
|
||||
Vec::from_iter(unskipped_fields.clone().map(|ParsedField { name, .. }| {
|
||||
let builder_field_name = &builder.get_field(name).unwrap().1.builder_field_name;
|
||||
quote_spanned! {struct_ident.span()=>
|
||||
#builder_field_name.to_canonical_dyn()
|
||||
}
|
||||
}));
|
||||
let build_specified_fields = unskipped_fields.clone().map(
|
||||
|ParsedField {
|
||||
options: _,
|
||||
vis: _,
|
||||
name,
|
||||
ty,
|
||||
}| {
|
||||
let ty = if is_for_mask {
|
||||
parse_quote_spanned! {name.span()=>
|
||||
::fayalite::expr::Expr<::fayalite::ty::AsMask<#ty>>
|
||||
}
|
||||
} else {
|
||||
parse_quote_spanned! {name.span()=>
|
||||
::fayalite::expr::Expr<#ty>
|
||||
}
|
||||
};
|
||||
(name.clone(), ty)
|
||||
},
|
||||
);
|
||||
let build_body = parse_quote_spanned! {struct_ident.span()=>
|
||||
{
|
||||
::fayalite::expr::ToExpr::to_expr(
|
||||
&::fayalite::expr::ops::BundleLiteral::new_unchecked(
|
||||
::fayalite::intern::Intern::intern(&[#(
|
||||
#build_expr_fields,
|
||||
)*][..]),
|
||||
#type_struct_ident {
|
||||
#(#build_type_fields,)*
|
||||
#(#phantom_data_field_name_slice: ::fayalite::__std::marker::PhantomData,)*
|
||||
},
|
||||
),
|
||||
)
|
||||
}
|
||||
};
|
||||
builder
|
||||
.make_build_method(
|
||||
&Ident::new("build", struct_ident.span()),
|
||||
build_specified_fields,
|
||||
&generics,
|
||||
&parse_quote_spanned! {struct_ident.span()=>
|
||||
#type_struct_ident #type_generics
|
||||
},
|
||||
&parse_quote_spanned! {struct_ident.span()=>
|
||||
::fayalite::expr::Expr<#target #type_generics>
|
||||
},
|
||||
build_body,
|
||||
)
|
||||
.to_tokens(tokens);
|
||||
make_connect_impl(
|
||||
*connect_inexact,
|
||||
&generics,
|
||||
&type_struct_ident,
|
||||
unskipped_fields.clone().map(|field| {
|
||||
let ty = &field.ty;
|
||||
parse_quote_spanned! {field.name.span()=>
|
||||
<#ty as ::fayalite::expr::ToExpr>::Type
|
||||
}
|
||||
}),
|
||||
)
|
||||
.to_tokens(tokens);
|
||||
let empty_builder_ty = builder.ty([], Some(&parse_quote! { Self }), false);
|
||||
quote! {
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::__std::fmt::Debug for #type_struct_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn fmt(&self, f: &mut ::fayalite::__std::fmt::Formatter<'_>) -> ::fayalite::__std::fmt::Result {
|
||||
#debug_type_body
|
||||
}
|
||||
}
|
||||
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::ty::Type for #type_struct_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
type CanonicalType = ::fayalite::bundle::DynBundleType;
|
||||
type Value = #target #type_generics;
|
||||
type CanonicalValue = ::fayalite::bundle::DynBundle;
|
||||
type MaskType = #mask_type_ident #type_generics;
|
||||
type MaskValue = #mask_value_ident #type_generics;
|
||||
type MatchVariant = #match_variant_ident #type_generics;
|
||||
type MatchActiveScope = ();
|
||||
type MatchVariantAndInactiveScope = ::fayalite::ty::MatchVariantWithoutScope<#match_variant_ident #type_generics>;
|
||||
type MatchVariantsIter = ::fayalite::__std::iter::Once<<Self as ::fayalite::ty::Type>::MatchVariantAndInactiveScope>;
|
||||
#[allow(unused_variables)]
|
||||
fn match_variants<IO: ::fayalite::bundle::BundleValue>(
|
||||
this: ::fayalite::expr::Expr<<Self as ::fayalite::ty::Type>::Value>,
|
||||
module_builder: &mut ::fayalite::module::ModuleBuilder<IO, ::fayalite::module::NormalModule>,
|
||||
source_location: ::fayalite::source_location::SourceLocation,
|
||||
) -> <Self as ::fayalite::ty::Type>::MatchVariantsIter
|
||||
where
|
||||
<IO as ::fayalite::expr::ToExpr>::Type: ::fayalite::bundle::BundleType<Value = IO>,
|
||||
{
|
||||
::fayalite::__std::iter::once(::fayalite::ty::MatchVariantWithoutScope(#match_variant_ident {
|
||||
#(#unskipped_field_names: this.field(#unskipped_field_name_strs),)*
|
||||
#(#phantom_data_field_name_slice: ::fayalite::__std::marker::PhantomData,)*
|
||||
}))
|
||||
}
|
||||
fn mask_type(&self) -> <Self as ::fayalite::ty::Type>::MaskType {
|
||||
#mask_type_body
|
||||
}
|
||||
fn canonical(&self) -> <Self as ::fayalite::ty::Type>::CanonicalType {
|
||||
let fields = ::fayalite::bundle::BundleType::fields(self);
|
||||
::fayalite::bundle::DynBundleType::new(fields)
|
||||
}
|
||||
fn source_location(&self) -> ::fayalite::source_location::SourceLocation {
|
||||
::fayalite::source_location::SourceLocation::caller()
|
||||
}
|
||||
fn type_enum(&self) -> ::fayalite::ty::TypeEnum {
|
||||
::fayalite::ty::TypeEnum::BundleType(::fayalite::ty::Type::canonical(self))
|
||||
}
|
||||
fn from_canonical_type(t: <Self as ::fayalite::ty::Type>::CanonicalType) -> Self {
|
||||
let [#(#unskipped_field_vars),*] = *::fayalite::bundle::BundleType::fields(&t) else {
|
||||
::fayalite::__std::panic!("wrong number of fields");
|
||||
};
|
||||
Self {
|
||||
#(#unskipped_field_names: #unskipped_field_vars.from_canonical_type_helper(#unskipped_field_name_strs, #unskipped_field_flips),)*
|
||||
#(#phantom_data_field_name_slice: ::fayalite::__std::marker::PhantomData,)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::ty::TypeWithDeref for #type_struct_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
#[allow(unused_variables)]
|
||||
fn expr_deref(this: &::fayalite::expr::Expr<<Self as ::fayalite::ty::Type>::Value>) -> &<Self as ::fayalite::ty::Type>::MatchVariant {
|
||||
::fayalite::intern::Interned::<_>::into_inner(::fayalite::intern::Intern::intern_sized(
|
||||
#match_variant_ident {
|
||||
#(#unskipped_field_names: this.field(#unskipped_field_name_strs),)*
|
||||
#(#phantom_data_field_name_slice: ::fayalite::__std::marker::PhantomData,)*
|
||||
}
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::bundle::BundleType for #type_struct_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
type Builder = #empty_builder_ty;
|
||||
fn builder() -> <Self as ::fayalite::bundle::BundleType>::Builder {
|
||||
#empty_builder_ty::new()
|
||||
}
|
||||
fn fields(&self) -> ::fayalite::intern::Interned<[::fayalite::bundle::FieldType<::fayalite::intern::Interned<dyn ::fayalite::ty::DynCanonicalType>>]> {
|
||||
::fayalite::intern::Intern::intern(&[#(
|
||||
::fayalite::bundle::FieldType {
|
||||
name: ::fayalite::intern::Intern::intern(#unskipped_field_name_strs),
|
||||
flipped: #unskipped_field_flips,
|
||||
ty: ::fayalite::ty::DynType::canonical_dyn(&self.#unskipped_field_names),
|
||||
},
|
||||
)*][..])
|
||||
}
|
||||
fn fields_hint() -> ::fayalite::bundle::FieldsHint {
|
||||
::fayalite::bundle::FieldsHint::new([#(
|
||||
::fayalite::bundle::FieldType {
|
||||
name: ::fayalite::intern::Intern::intern(#unskipped_field_name_strs),
|
||||
flipped: #unskipped_field_flips,
|
||||
ty: ::fayalite::bundle::TypeHint::<#field_types>::intern_dyn(),
|
||||
},
|
||||
)*], false)
|
||||
}
|
||||
}
|
||||
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::expr::ToExpr for #target #type_generics
|
||||
#where_clause
|
||||
{
|
||||
type Type = #type_struct_ident #type_generics;
|
||||
fn ty(&self) -> <Self as ::fayalite::expr::ToExpr>::Type {
|
||||
#type_struct_ident {
|
||||
#(#unskipped_field_names: ::fayalite::expr::ToExpr::ty(&self.#unskipped_field_names),)*
|
||||
#(#phantom_data_field_name_slice: ::fayalite::__std::marker::PhantomData,)*
|
||||
}
|
||||
}
|
||||
fn to_expr(&self) -> ::fayalite::expr::Expr<Self> {
|
||||
::fayalite::expr::Expr::from_value(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::ty::Value for #target #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn to_canonical(&self) -> <<Self as ::fayalite::expr::ToExpr>::Type as ::fayalite::ty::Type>::CanonicalValue {
|
||||
let ty = ::fayalite::ty::Type::canonical(&::fayalite::expr::ToExpr::ty(self));
|
||||
::fayalite::bundle::DynBundle::new(ty, ::fayalite::__std::sync::Arc::new([
|
||||
#(::fayalite::ty::DynValueTrait::to_canonical_dyn(&self.#unskipped_field_names),)*
|
||||
]))
|
||||
}
|
||||
}
|
||||
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::bundle::BundleValue for #target #type_generics
|
||||
#where_clause
|
||||
{
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ParsedStruct {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let ParsedStructNames {
|
||||
ident: struct_ident,
|
||||
type_struct_debug_ident,
|
||||
type_struct_ident,
|
||||
match_variant_ident,
|
||||
builder_struct_ident,
|
||||
mask_match_variant_ident,
|
||||
mask_type_ident,
|
||||
mask_type_debug_ident,
|
||||
mask_value_ident,
|
||||
mask_value_debug_ident,
|
||||
mask_builder_struct_ident,
|
||||
} = &self.names;
|
||||
macro_rules! unwrap_or_set {
|
||||
($(let $var:ident =? $fallback_value:expr;)*) => {
|
||||
$(let $var = $var.clone().unwrap_or_else(|| $fallback_value);)*
|
||||
};
|
||||
}
|
||||
unwrap_or_set! {
|
||||
let type_struct_debug_ident =? format!("{struct_ident}::Type");
|
||||
let match_variant_ident =? format_ident!("__{}__MatchVariant", struct_ident);
|
||||
let builder_struct_ident =? format_ident!("__{}__Builder", struct_ident);
|
||||
let mask_match_variant_ident =? format_ident!("__AsMask__{}__MatchVariant", struct_ident);
|
||||
let mask_type_ident =? format_ident!("__AsMask__{}__Type", struct_ident);
|
||||
let mask_type_debug_ident =? format!("AsMask<{struct_ident}>::Type");
|
||||
let mask_value_ident =? format_ident!("__AsMask__{}", struct_ident);
|
||||
let mask_value_debug_ident =? format!("AsMask<{struct_ident}>");
|
||||
let mask_builder_struct_ident =? format_ident!("__AsMask__{}__Builder", struct_ident);
|
||||
}
|
||||
let target = get_target(&self.options.body.target, struct_ident);
|
||||
let names = ParsedStructNames {
|
||||
ident: struct_ident.clone(),
|
||||
type_struct_debug_ident: &type_struct_debug_ident,
|
||||
type_struct_ident: type_struct_ident.clone(),
|
||||
match_variant_ident: &match_variant_ident,
|
||||
builder_struct_ident: &builder_struct_ident,
|
||||
mask_match_variant_ident: &mask_match_variant_ident,
|
||||
mask_type_ident: &mask_type_ident,
|
||||
mask_type_debug_ident: &mask_type_debug_ident,
|
||||
mask_value_ident: &mask_value_ident,
|
||||
mask_value_debug_ident: &mask_value_debug_ident,
|
||||
mask_builder_struct_ident: &mask_builder_struct_ident,
|
||||
};
|
||||
self.write_body(target, names, false, tokens);
|
||||
let mask_names = ParsedStructNames {
|
||||
ident: mask_value_ident.clone(),
|
||||
type_struct_debug_ident: &mask_type_debug_ident,
|
||||
type_struct_ident: mask_type_ident.clone(),
|
||||
match_variant_ident: &mask_match_variant_ident,
|
||||
builder_struct_ident: &mask_builder_struct_ident,
|
||||
mask_match_variant_ident: &mask_match_variant_ident,
|
||||
mask_type_ident: &mask_type_ident,
|
||||
mask_type_debug_ident: &mask_type_debug_ident,
|
||||
mask_value_ident: &mask_value_ident,
|
||||
mask_value_debug_ident: &mask_value_debug_ident,
|
||||
mask_builder_struct_ident: &mask_builder_struct_ident,
|
||||
};
|
||||
self.write_body(mask_value_ident.clone().into(), mask_names, true, tokens);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn value_derive_struct(mut item: ItemStruct) -> syn::Result<TokenStream> {
|
||||
let item = ParsedStruct::parse(&mut item)?;
|
||||
let outline_generated = item.options.body.outline_generated;
|
||||
let mut contents = quote! {
|
||||
const _: () = {
|
||||
#item
|
||||
};
|
||||
};
|
||||
if outline_generated.is_some() {
|
||||
contents = crate::outline_generated(contents, "value-struct-");
|
||||
}
|
||||
Ok(contents)
|
||||
}
|
14
crates/fayalite-proc-macros/Cargo.toml
Normal file
14
crates/fayalite-proc-macros/Cargo.toml
Normal file
|
@ -0,0 +1,14 @@
|
|||
# SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
# See Notices.txt for copyright information
|
||||
[package]
|
||||
name = "fayalite-proc-macros"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
workspace = "../.."
|
||||
license = "LGPL-3.0-or-later"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
fayalite-proc-macros-impl = { version = "=0.1.0", path = "../fayalite-proc-macros-impl" }
|
20
crates/fayalite-proc-macros/src/lib.rs
Normal file
20
crates/fayalite-proc-macros/src/lib.rs
Normal file
|
@ -0,0 +1,20 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
#[proc_macro_attribute]
|
||||
pub fn hdl_module(
|
||||
attr: proc_macro::TokenStream,
|
||||
item: proc_macro::TokenStream,
|
||||
) -> proc_macro::TokenStream {
|
||||
match fayalite_proc_macros_impl::module(attr.into(), item.into()) {
|
||||
Ok(retval) => retval.into(),
|
||||
Err(err) => err.into_compile_error().into(),
|
||||
}
|
||||
}
|
||||
|
||||
#[proc_macro_derive(Value, attributes(hdl))]
|
||||
pub fn value_derive(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
match fayalite_proc_macros_impl::value_derive(item.into()) {
|
||||
Ok(retval) => retval.into(),
|
||||
Err(err) => err.into_compile_error().into(),
|
||||
}
|
||||
}
|
18
crates/fayalite-visit-gen/Cargo.toml
Normal file
18
crates/fayalite-visit-gen/Cargo.toml
Normal file
|
@ -0,0 +1,18 @@
|
|||
# SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
# See Notices.txt for copyright information
|
||||
[package]
|
||||
name = "fayalite-visit-gen"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
workspace = "../.."
|
||||
license = "LGPL-3.0-or-later"
|
||||
|
||||
[dependencies]
|
||||
indexmap = { version = "2.2.6", features = ["serde"] }
|
||||
prettyplease = "0.2.20"
|
||||
proc-macro2 = "1.0.83"
|
||||
quote = "1.0.36"
|
||||
serde = { version = "1.0.202", features = ["derive"] }
|
||||
serde_json = { version = "1.0.117", features = ["preserve_order"] }
|
||||
syn = { version = "2.0.66", features = ["full", "extra-traits"] }
|
||||
thiserror = "1.0.61"
|
613
crates/fayalite-visit-gen/src/ast.rs
Normal file
613
crates/fayalite-visit-gen/src/ast.rs
Normal file
|
@ -0,0 +1,613 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use indexmap::IndexMap;
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{IdentFragment, ToTokens, TokenStreamExt};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
fmt::{self, Write},
|
||||
iter::FusedIterator,
|
||||
str::FromStr,
|
||||
};
|
||||
use thiserror::Error;
|
||||
|
||||
macro_rules! impl_try_from_str {
|
||||
($ty:ty) => {
|
||||
impl TryFrom<&'_ str> for $ty {
|
||||
type Error = <Self as FromStr>::Err;
|
||||
|
||||
fn try_from(v: &str) -> Result<Self, Self::Error> {
|
||||
v.parse()
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<String> for $ty {
|
||||
type Error = <Self as FromStr>::Err;
|
||||
|
||||
fn try_from(v: String) -> Result<Self, Self::Error> {
|
||||
v.parse()
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Hash)]
|
||||
#[serde(into = "String", try_from = "String")]
|
||||
pub struct Ident(pub String);
|
||||
|
||||
impl ToTokens for Ident {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
syn::Ident::from(self).to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl IdentFragment for Ident {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl Ident {
|
||||
pub fn is_start_char(ch: char) -> bool {
|
||||
ch == '_' || ch.is_ascii_alphabetic()
|
||||
}
|
||||
pub fn is_continue_char(ch: char) -> bool {
|
||||
ch == '_' || ch.is_ascii_alphanumeric()
|
||||
}
|
||||
pub fn is_ident(v: &str) -> bool {
|
||||
!v.is_empty()
|
||||
&& v.starts_with(Self::is_start_char)
|
||||
&& v.trim_start_matches(Self::is_continue_char).is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Ident> for Path {
|
||||
fn from(value: Ident) -> Self {
|
||||
Path(value.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Ident> for String {
|
||||
fn from(value: Ident) -> Self {
|
||||
value.0
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Ident> for syn::Ident {
|
||||
fn from(value: Ident) -> Self {
|
||||
From::from(&value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&'_ Ident> for syn::Ident {
|
||||
fn from(value: &Ident) -> Self {
|
||||
syn::Ident::new(&value.0, Span::call_site())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Error)]
|
||||
#[error("invalid identifier")]
|
||||
pub struct IdentParseError;
|
||||
|
||||
impl_try_from_str!(Ident);
|
||||
|
||||
impl FromStr for Ident {
|
||||
type Err = IdentParseError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
if Self::is_ident(s) {
|
||||
Ok(Self(s.into()))
|
||||
} else {
|
||||
Err(IdentParseError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Hash)]
|
||||
#[serde(into = "String", try_from = "String")]
|
||||
|
||||
pub struct Path(String);
|
||||
|
||||
impl Path {
|
||||
pub fn iter(&self) -> PathIter<'_> {
|
||||
PathIter(&self.0)
|
||||
}
|
||||
pub fn last(&self) -> Ident {
|
||||
self.iter().next_back().unwrap()
|
||||
}
|
||||
pub fn is_path(s: &str) -> bool {
|
||||
if s.is_empty() {
|
||||
false
|
||||
} else {
|
||||
s.split("::").all(Ident::is_ident)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PathIter<'a>(&'a str);
|
||||
|
||||
impl Iterator for PathIter<'_> {
|
||||
type Item = Ident;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.0.is_empty() {
|
||||
None
|
||||
} else if let Some((first, rest)) = self.0.split_once("::") {
|
||||
self.0 = rest;
|
||||
Some(Ident(first.into()))
|
||||
} else {
|
||||
let retval = self.0;
|
||||
self.0 = &self.0[..0];
|
||||
Some(Ident(retval.into()))
|
||||
}
|
||||
}
|
||||
|
||||
fn last(mut self) -> Option<Self::Item> {
|
||||
self.next_back()
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedIterator for PathIter<'_> {}
|
||||
|
||||
impl DoubleEndedIterator for PathIter<'_> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
if self.0.is_empty() {
|
||||
None
|
||||
} else if let Some((rest, last)) = self.0.rsplit_once("::") {
|
||||
self.0 = rest;
|
||||
Some(Ident(last.into()))
|
||||
} else {
|
||||
let retval = self.0;
|
||||
self.0 = &self.0[..0];
|
||||
Some(Ident(retval.into()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Path {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_separated(self.iter(), <syn::Token![::]>::default());
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Error)]
|
||||
#[error("invalid path")]
|
||||
pub struct PathParseError;
|
||||
|
||||
impl From<Path> for String {
|
||||
fn from(value: Path) -> Self {
|
||||
value.0
|
||||
}
|
||||
}
|
||||
|
||||
impl_try_from_str!(Path);
|
||||
|
||||
impl FromStr for Path {
|
||||
type Err = PathParseError;
|
||||
|
||||
fn from_str(value: &str) -> Result<Self, Self::Err> {
|
||||
if value.is_empty() {
|
||||
Err(PathParseError)
|
||||
} else if value.split("::").all(Ident::is_ident) {
|
||||
Ok(Self(value.into()))
|
||||
} else {
|
||||
Err(PathParseError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct Definitions {
|
||||
pub types: std::collections::BTreeMap<Path, Definition>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct Definition {
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub fn_name_suffix: Option<Ident>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub generics: Option<Generics>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub fold_where: Option<WherePredicates>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub visit_where: Option<WherePredicates>,
|
||||
pub data: Data,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(tag = "$kind")]
|
||||
pub enum Data {
|
||||
ManualImpl,
|
||||
Opaque,
|
||||
Enum(Variants),
|
||||
Struct(Fields),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Hash)]
|
||||
#[serde(into = "String", try_from = "String")]
|
||||
pub struct FieldNameIdent {
|
||||
pub ident: Ident,
|
||||
pub is_getter: bool,
|
||||
}
|
||||
|
||||
impl FieldNameIdent {
|
||||
pub fn to_member(&self) -> Option<syn::Member> {
|
||||
let Self {
|
||||
ref ident,
|
||||
is_getter,
|
||||
} = *self;
|
||||
if is_getter {
|
||||
None
|
||||
} else {
|
||||
Some(syn::Ident::from(ident).into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for FieldNameIdent {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
ref ident,
|
||||
is_getter,
|
||||
} = *self;
|
||||
ident.to_tokens(tokens);
|
||||
if is_getter {
|
||||
syn::token::Paren::default().surround(tokens, |_| {});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FieldNameIdent> for String {
|
||||
fn from(value: FieldNameIdent) -> Self {
|
||||
let mut retval = value.ident.0;
|
||||
if value.is_getter {
|
||||
retval.push_str("()");
|
||||
}
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Error)]
|
||||
#[error("invalid field name")]
|
||||
pub struct FieldNameParseError;
|
||||
|
||||
impl_try_from_str!(FieldNameIdent);
|
||||
|
||||
impl FromStr for FieldNameIdent {
|
||||
type Err = FieldNameParseError;
|
||||
|
||||
fn from_str(value: &str) -> Result<Self, Self::Err> {
|
||||
let ident = value.strip_suffix("()");
|
||||
let is_getter = ident.is_some();
|
||||
let ident = ident.unwrap_or(value);
|
||||
if let Ok(ident) = ident.parse() {
|
||||
Ok(Self { ident, is_getter })
|
||||
} else {
|
||||
Err(FieldNameParseError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default, Hash)]
|
||||
#[serde(into = "String", try_from = "String")]
|
||||
pub struct WherePredicates(pub syn::punctuated::Punctuated<syn::WherePredicate, syn::Token![,]>);
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
#[error("invalid `where` predicates")]
|
||||
pub struct WherePredicatesParseError;
|
||||
|
||||
impl_try_from_str!(WherePredicates);
|
||||
|
||||
impl FromStr for WherePredicates {
|
||||
type Err = WherePredicatesParseError;
|
||||
|
||||
fn from_str(value: &str) -> Result<Self, Self::Err> {
|
||||
Ok(Self(
|
||||
syn::parse::Parser::parse_str(syn::punctuated::Punctuated::parse_terminated, value)
|
||||
.map_err(|_| WherePredicatesParseError)?,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<WherePredicates> for String {
|
||||
fn from(value: WherePredicates) -> Self {
|
||||
value.0.into_token_stream().to_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<WherePredicates> for syn::WhereClause {
|
||||
fn from(value: WherePredicates) -> Self {
|
||||
syn::WhereClause {
|
||||
where_token: Default::default(),
|
||||
predicates: value.0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<syn::WhereClause> for WherePredicates {
|
||||
fn from(value: syn::WhereClause) -> Self {
|
||||
Self(value.predicates)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for WherePredicates {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.0.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum SerializedGenerics {
|
||||
Where {
|
||||
generics: String,
|
||||
#[serde(rename = "where")]
|
||||
where_predicates: WherePredicates,
|
||||
},
|
||||
NoWhere(String),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default, Hash)]
|
||||
#[serde(into = "SerializedGenerics", try_from = "SerializedGenerics")]
|
||||
pub struct Generics(pub syn::Generics);
|
||||
|
||||
impl ToTokens for Generics {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.0.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Generics> for SerializedGenerics {
|
||||
fn from(mut value: Generics) -> Self {
|
||||
match value.0.where_clause.take() {
|
||||
Some(where_clause) => Self::Where {
|
||||
generics: value.0.into_token_stream().to_string(),
|
||||
where_predicates: where_clause.into(),
|
||||
},
|
||||
None => Self::NoWhere(value.0.into_token_stream().to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
#[error("invalid generics")]
|
||||
pub struct GenericsParseError;
|
||||
|
||||
impl TryFrom<SerializedGenerics> for Generics {
|
||||
type Error = GenericsParseError;
|
||||
|
||||
fn try_from(value: SerializedGenerics) -> Result<Self, Self::Error> {
|
||||
let (generics, where_clause) = match value {
|
||||
SerializedGenerics::Where {
|
||||
generics,
|
||||
where_predicates,
|
||||
} => (generics, Some(where_predicates.into())),
|
||||
SerializedGenerics::NoWhere(generics) => (generics, None),
|
||||
};
|
||||
let Ok(mut generics) = syn::parse_str::<syn::Generics>(&generics) else {
|
||||
return Err(GenericsParseError);
|
||||
};
|
||||
generics.where_clause = where_clause;
|
||||
Ok(Self(generics))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)]
|
||||
#[serde(into = "String", try_from = "String")]
|
||||
pub struct PathWithGenerics {
|
||||
pub path: Path,
|
||||
pub generics: Option<syn::AngleBracketedGenericArguments>,
|
||||
}
|
||||
|
||||
impl ToTokens for PathWithGenerics {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self { path, generics } = self;
|
||||
path.to_tokens(tokens);
|
||||
if let Some(generics) = generics {
|
||||
<syn::Token![::]>::default().to_tokens(tokens);
|
||||
generics.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PathWithGenerics> for String {
|
||||
fn from(value: PathWithGenerics) -> Self {
|
||||
let PathWithGenerics { path, generics } = value;
|
||||
let mut retval = String::from(path);
|
||||
if let Some(generics) = generics {
|
||||
write!(retval, "{}", generics.to_token_stream()).unwrap();
|
||||
}
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Error)]
|
||||
#[error("invalid path with optional generics")]
|
||||
pub struct PathWithGenericsParseError;
|
||||
|
||||
impl_try_from_str!(PathWithGenerics);
|
||||
|
||||
impl FromStr for PathWithGenerics {
|
||||
type Err = PathWithGenericsParseError;
|
||||
|
||||
fn from_str(value: &str) -> Result<Self, Self::Err> {
|
||||
let (path, generics) = if let Some(lt_pos) = value.find('<') {
|
||||
let (path, generics) = value.split_at(lt_pos);
|
||||
let path = path.strip_suffix("::").unwrap_or(path);
|
||||
match syn::parse_str(generics) {
|
||||
Ok(generics) => (path, Some(generics)),
|
||||
Err(_) => return Err(PathWithGenericsParseError),
|
||||
}
|
||||
} else {
|
||||
(value, None)
|
||||
};
|
||||
if let Ok(path) = path.parse() {
|
||||
Ok(Self { path, generics })
|
||||
} else {
|
||||
Err(PathWithGenericsParseError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Hash)]
|
||||
#[serde(into = "String", try_from = "String")]
|
||||
pub enum FieldName {
|
||||
Index(usize),
|
||||
Ident(FieldNameIdent),
|
||||
}
|
||||
|
||||
impl FieldName {
|
||||
pub fn to_member(&self) -> Option<syn::Member> {
|
||||
match self {
|
||||
&FieldName::Index(index) => Some(index.into()),
|
||||
FieldName::Ident(ident) => ident.to_member(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for FieldName {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
&FieldName::Index(index) => syn::Index::from(index).to_tokens(tokens),
|
||||
FieldName::Ident(ident) => ident.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FieldName> for String {
|
||||
fn from(value: FieldName) -> Self {
|
||||
match value {
|
||||
FieldName::Index(index) => index.to_string(),
|
||||
FieldName::Ident(ident) => ident.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl_try_from_str!(FieldName);
|
||||
|
||||
impl FromStr for FieldName {
|
||||
type Err = FieldNameParseError;
|
||||
|
||||
fn from_str(value: &str) -> Result<Self, Self::Err> {
|
||||
if !value.is_empty()
|
||||
&& value
|
||||
.trim_start_matches(|ch: char| ch.is_ascii_digit())
|
||||
.is_empty()
|
||||
{
|
||||
if let Ok(index) = value.parse() {
|
||||
Ok(Self::Index(index))
|
||||
} else {
|
||||
Err(FieldNameParseError)
|
||||
}
|
||||
} else {
|
||||
value.parse().map(Self::Ident)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct Fields {
|
||||
#[serde(
|
||||
default,
|
||||
rename = "$constructor",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub constructor: Option<PathWithGenerics>,
|
||||
#[serde(flatten)]
|
||||
pub fields: IndexMap<FieldName, Field>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(transparent)]
|
||||
pub struct Variants {
|
||||
pub variants: IndexMap<Ident, Option<Field>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)]
|
||||
pub enum Field {
|
||||
Opaque,
|
||||
Visible,
|
||||
RefVisible,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::ast;
|
||||
|
||||
#[test]
|
||||
fn test_serialize() {
|
||||
let definitions = ast::Definitions {
|
||||
types: FromIterator::from_iter([
|
||||
(
|
||||
ast::Path("Module".into()),
|
||||
ast::Definition {
|
||||
fn_name_suffix: None,
|
||||
generics: Some(
|
||||
ast::SerializedGenerics::Where {
|
||||
generics: "<T: BundleValue>".into(),
|
||||
where_predicates: "T::Type: BundleType<Value = T>,"
|
||||
.parse()
|
||||
.unwrap(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
),
|
||||
fold_where: None,
|
||||
visit_where: None,
|
||||
data: ast::Data::Struct(ast::Fields {
|
||||
constructor: Some("Module::new_unchecked".parse().unwrap()),
|
||||
fields: FromIterator::from_iter([(
|
||||
"name_id()".parse().unwrap(),
|
||||
ast::Field::Visible,
|
||||
)]),
|
||||
}),
|
||||
},
|
||||
),
|
||||
(
|
||||
ast::Path("NameId".into()),
|
||||
ast::Definition {
|
||||
fn_name_suffix: None,
|
||||
generics: None,
|
||||
fold_where: None,
|
||||
visit_where: None,
|
||||
data: ast::Data::Struct(ast::Fields {
|
||||
constructor: None,
|
||||
fields: FromIterator::from_iter([
|
||||
("0".try_into().unwrap(), ast::Field::Opaque),
|
||||
("1".try_into().unwrap(), ast::Field::Opaque),
|
||||
]),
|
||||
}),
|
||||
},
|
||||
),
|
||||
]),
|
||||
};
|
||||
let definitions_str = serde_json::to_string_pretty(&definitions).unwrap();
|
||||
println!("{definitions_str}");
|
||||
assert_eq!(
|
||||
definitions_str,
|
||||
r#"{
|
||||
"types": {
|
||||
"Module": {
|
||||
"generics": {
|
||||
"generics": "< T : BundleValue >",
|
||||
"where": "T :: Type : BundleType < Value = T > ,"
|
||||
},
|
||||
"data": {
|
||||
"$kind": "Struct",
|
||||
"$constructor": "Module::new_unchecked",
|
||||
"name_id()": "Visible"
|
||||
}
|
||||
},
|
||||
"NameId": {
|
||||
"data": {
|
||||
"$kind": "Struct",
|
||||
"0": "Opaque",
|
||||
"1": "Opaque"
|
||||
}
|
||||
}
|
||||
}
|
||||
}"#
|
||||
);
|
||||
}
|
||||
}
|
426
crates/fayalite-visit-gen/src/lib.rs
Normal file
426
crates/fayalite-visit-gen/src/lib.rs
Normal file
|
@ -0,0 +1,426 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{format_ident, quote, ToTokens};
|
||||
use std::{collections::BTreeMap, fs};
|
||||
use syn::{fold::Fold, parse_quote};
|
||||
|
||||
pub mod ast;
|
||||
|
||||
fn map_camel_case_to_snake_case(s: &str) -> String {
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
enum State {
|
||||
Start,
|
||||
Lowercase,
|
||||
PushedUpper(char),
|
||||
}
|
||||
let mut state = State::Start;
|
||||
let mut retval = String::new();
|
||||
for ch in s.chars() {
|
||||
state = match ch {
|
||||
'A'..='Z' => {
|
||||
match state {
|
||||
State::Start => {}
|
||||
State::Lowercase => retval.push('_'),
|
||||
State::PushedUpper(upper) => retval.push(upper.to_ascii_lowercase()),
|
||||
}
|
||||
State::PushedUpper(ch)
|
||||
}
|
||||
_ => {
|
||||
match state {
|
||||
State::PushedUpper(upper) => {
|
||||
retval.push(upper.to_ascii_lowercase());
|
||||
}
|
||||
State::Start | State::Lowercase => {}
|
||||
}
|
||||
retval.push(ch);
|
||||
State::Lowercase
|
||||
}
|
||||
};
|
||||
}
|
||||
match state {
|
||||
State::Lowercase | State::Start => {}
|
||||
State::PushedUpper(upper) => retval.push(upper.to_ascii_lowercase()),
|
||||
}
|
||||
retval
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct DefinitionState {
|
||||
fn_name_suffix: syn::Ident,
|
||||
generics: syn::Generics,
|
||||
fold_generics: syn::Generics,
|
||||
folder_generics: syn::Generics,
|
||||
visit_generics: syn::Generics,
|
||||
visitor_generics: syn::Generics,
|
||||
}
|
||||
|
||||
impl DefinitionState {
|
||||
fn folder_fn_name(&self) -> syn::Ident {
|
||||
format_ident!("fold_{}", self.fn_name_suffix)
|
||||
}
|
||||
fn visitor_fn_name(&self) -> syn::Ident {
|
||||
format_ident!("visit_{}", self.fn_name_suffix)
|
||||
}
|
||||
fn folder_fn(&self, path: &ast::Path) -> TokenStream {
|
||||
let folder_fn_name = self.folder_fn_name();
|
||||
let (impl_generics, type_generics, where_clause) = self.folder_generics.split_for_impl();
|
||||
quote! {
|
||||
fn #folder_fn_name #impl_generics(&mut self, v: #path #type_generics) -> Result<#path #type_generics, Self::Error> #where_clause {
|
||||
Fold::default_fold(v, self)
|
||||
}
|
||||
}
|
||||
}
|
||||
fn visitor_fn(&self, path: &ast::Path) -> TokenStream {
|
||||
let visitor_fn_name = self.visitor_fn_name();
|
||||
let (impl_generics, type_generics, where_clause) = self.visitor_generics.split_for_impl();
|
||||
quote! {
|
||||
fn #visitor_fn_name #impl_generics(&mut self, v: &#path #type_generics) -> Result<(), Self::Error> #where_clause {
|
||||
Visit::default_visit(v, self)
|
||||
}
|
||||
}
|
||||
}
|
||||
fn fold_impl(&self, path: &ast::Path, body: impl ToTokens) -> TokenStream {
|
||||
let folder_fn_name = self.folder_fn_name();
|
||||
let (_, self_type_generics, _) = self.generics.split_for_impl();
|
||||
let (trait_impl_generics, _, trait_where_clause) = self.fold_generics.split_for_impl();
|
||||
quote! {
|
||||
#[automatically_derived]
|
||||
#[allow(clippy::init_numbered_fields)]
|
||||
impl #trait_impl_generics Fold<State> for #path #self_type_generics #trait_where_clause {
|
||||
fn fold(self, state: &mut State) -> Result<Self, State::Error> {
|
||||
state.#folder_fn_name(self)
|
||||
}
|
||||
fn default_fold(self, state: &mut State) -> Result<Self, State::Error> {
|
||||
#body
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
fn visit_impl(&self, path: &ast::Path, body: impl ToTokens) -> TokenStream {
|
||||
let visitor_fn_name = self.visitor_fn_name();
|
||||
let (_, self_type_generics, _) = self.generics.split_for_impl();
|
||||
let (trait_impl_generics, _, trait_where_clause) = self.visit_generics.split_for_impl();
|
||||
quote! {
|
||||
#[automatically_derived]
|
||||
impl #trait_impl_generics Visit<State> for #path #self_type_generics #trait_where_clause {
|
||||
fn visit(&self, state: &mut State) -> Result<(), State::Error> {
|
||||
state.#visitor_fn_name(self)
|
||||
}
|
||||
fn default_visit(&self, state: &mut State) -> Result<(), State::Error> {
|
||||
#body
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct GenerateState<'a> {
|
||||
def_states: BTreeMap<&'a ast::Path, DefinitionState>,
|
||||
definitions: &'a ast::Definitions,
|
||||
}
|
||||
|
||||
struct MapStateToSelf;
|
||||
|
||||
impl syn::fold::Fold for MapStateToSelf {
|
||||
fn fold_ident(&mut self, i: syn::Ident) -> syn::Ident {
|
||||
if i == "State" {
|
||||
syn::Ident::new("Self", i.span())
|
||||
} else {
|
||||
i
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> GenerateState<'a> {
|
||||
fn make_definition_state(&mut self, path: &'a ast::Path) -> syn::Result<()> {
|
||||
let ast::Definition {
|
||||
fn_name_suffix,
|
||||
generics,
|
||||
fold_where,
|
||||
visit_where,
|
||||
data: _,
|
||||
} = self.definitions.types.get(path).ok_or_else(|| {
|
||||
syn::Error::new(
|
||||
Span::call_site(),
|
||||
format!("can't find named type: {path:?}"),
|
||||
)
|
||||
})?;
|
||||
let fn_name_suffix = fn_name_suffix
|
||||
.as_ref()
|
||||
.map(syn::Ident::from)
|
||||
.unwrap_or_else(|| format_ident!("{}", map_camel_case_to_snake_case(&path.last().0)));
|
||||
let generics = generics.clone().map(|v| v.0).unwrap_or_default();
|
||||
let mut fold_generics = generics.clone();
|
||||
let mut folder_generics = generics.clone();
|
||||
fold_generics
|
||||
.params
|
||||
.insert(0, parse_quote! {State: ?Sized + Folder});
|
||||
if let Some(fold_where) = fold_where {
|
||||
fold_generics
|
||||
.make_where_clause()
|
||||
.predicates
|
||||
.extend(fold_where.0.iter().cloned());
|
||||
folder_generics.make_where_clause().predicates.extend(
|
||||
fold_where
|
||||
.0
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|v| MapStateToSelf.fold_where_predicate(v)),
|
||||
);
|
||||
}
|
||||
let mut visit_generics = generics.clone();
|
||||
let mut visitor_generics = generics.clone();
|
||||
visit_generics
|
||||
.params
|
||||
.insert(0, parse_quote! {State: ?Sized + Visitor});
|
||||
if let Some(visit_where) = visit_where {
|
||||
visit_generics
|
||||
.make_where_clause()
|
||||
.predicates
|
||||
.extend(visit_where.0.iter().cloned());
|
||||
visitor_generics.make_where_clause().predicates.extend(
|
||||
visit_where
|
||||
.0
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|v| MapStateToSelf.fold_where_predicate(v)),
|
||||
);
|
||||
}
|
||||
self.def_states.insert(
|
||||
path,
|
||||
DefinitionState {
|
||||
fn_name_suffix,
|
||||
generics,
|
||||
fold_generics,
|
||||
folder_generics,
|
||||
visit_generics,
|
||||
visitor_generics,
|
||||
},
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
fn new(ast: &'a ast::Definitions) -> syn::Result<Self> {
|
||||
let mut retval = GenerateState {
|
||||
def_states: BTreeMap::new(),
|
||||
definitions: ast,
|
||||
};
|
||||
let ast::Definitions { types } = ast;
|
||||
for path in types.keys() {
|
||||
retval.make_definition_state(path)?;
|
||||
}
|
||||
Ok(retval)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate(ast: &ast::Definitions) -> syn::Result<String> {
|
||||
let state = GenerateState::new(ast)?;
|
||||
let mut visitor_fns = vec![];
|
||||
let mut visit_impls = vec![];
|
||||
let mut folder_fns = vec![];
|
||||
let mut fold_impls = vec![];
|
||||
for (&def_path, def_state) in state.def_states.iter() {
|
||||
folder_fns.push(def_state.folder_fn(def_path));
|
||||
visitor_fns.push(def_state.visitor_fn(def_path));
|
||||
let fold_body;
|
||||
let visit_body;
|
||||
let ast::Definition {
|
||||
fn_name_suffix: _,
|
||||
generics: _,
|
||||
fold_where: _,
|
||||
visit_where: _,
|
||||
data,
|
||||
} = ast.types.get(def_path).unwrap();
|
||||
match data {
|
||||
ast::Data::ManualImpl => {
|
||||
continue;
|
||||
}
|
||||
ast::Data::Opaque => {
|
||||
fold_body = quote! {
|
||||
let _ = state;
|
||||
Ok(self)
|
||||
};
|
||||
visit_body = quote! {
|
||||
let _ = state;
|
||||
Ok(())
|
||||
};
|
||||
}
|
||||
ast::Data::Struct(ast::Fields {
|
||||
constructor,
|
||||
fields,
|
||||
}) => {
|
||||
let mut visit_members = vec![];
|
||||
let mut fold_members = vec![];
|
||||
for (field_name, field) in fields {
|
||||
let fold_member_name = if constructor.is_some() {
|
||||
None
|
||||
} else {
|
||||
let member = field_name.to_member();
|
||||
if member.is_none() {
|
||||
return Err(syn::Error::new(Span::call_site(), format!("struct must have `$constructor` since it contains a non-plain field: {def_path:?} {field_name:?}")));
|
||||
}
|
||||
member
|
||||
};
|
||||
let fold_member_name = fold_member_name.as_slice();
|
||||
let fold_member = match field {
|
||||
ast::Field::Opaque => {
|
||||
quote! {
|
||||
#(#fold_member_name:)* self.#field_name
|
||||
}
|
||||
}
|
||||
ast::Field::Visible => {
|
||||
visit_members.push(quote! {
|
||||
Visit::visit(&self.#field_name, state)?;
|
||||
});
|
||||
quote! {
|
||||
#(#fold_member_name:)* Fold::fold(self.#field_name, state)?
|
||||
}
|
||||
}
|
||||
ast::Field::RefVisible => {
|
||||
visit_members.push(quote! {
|
||||
Visit::visit(self.#field_name, state)?;
|
||||
});
|
||||
quote! {
|
||||
#(#fold_member_name:)* Fold::fold(self.#field_name.clone(), state)?
|
||||
}
|
||||
}
|
||||
};
|
||||
fold_members.push(fold_member);
|
||||
}
|
||||
let match_members = constructor
|
||||
.is_none()
|
||||
.then(|| {
|
||||
fields
|
||||
.keys()
|
||||
.map(|k| k.to_member())
|
||||
.collect::<Option<Vec<_>>>()
|
||||
.map(|members| {
|
||||
if members.is_empty() {
|
||||
quote! {
|
||||
let _ = state;
|
||||
let Self {} = self;
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
let Self {
|
||||
#(#members: _,)*
|
||||
} = self;
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
.flatten();
|
||||
visit_body = quote! {
|
||||
#match_members
|
||||
#(#visit_members)*
|
||||
Ok(())
|
||||
};
|
||||
let fold_body_tail = if let Some(constructor) = constructor {
|
||||
quote! {
|
||||
Ok(#constructor(#(#fold_members),*))
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
Ok(Self {
|
||||
#(#fold_members,)*
|
||||
})
|
||||
}
|
||||
};
|
||||
fold_body = quote! {
|
||||
#match_members
|
||||
#fold_body_tail
|
||||
};
|
||||
}
|
||||
ast::Data::Enum(ast::Variants { variants }) => {
|
||||
let mut fold_arms = vec![];
|
||||
let mut visit_arms = vec![];
|
||||
let mut state_unused = true;
|
||||
for (variant_name, variant_field) in variants {
|
||||
let fold_arm;
|
||||
let visit_arm;
|
||||
match variant_field {
|
||||
Some(ast::Field::Visible) => {
|
||||
state_unused = false;
|
||||
fold_arm = quote! {
|
||||
Self::#variant_name(v) => Fold::fold(v, state).map(Self::#variant_name),
|
||||
};
|
||||
visit_arm = quote! {
|
||||
Self::#variant_name(v) => Visit::visit(v, state),
|
||||
};
|
||||
}
|
||||
Some(ast::Field::RefVisible) => {
|
||||
return Err(syn::Error::new(
|
||||
Span::call_site(),
|
||||
"enum variant field must not be RefVisible",
|
||||
));
|
||||
}
|
||||
Some(ast::Field::Opaque) => {
|
||||
fold_arm = quote! {
|
||||
Self::#variant_name(_) => Ok(self),
|
||||
};
|
||||
visit_arm = quote! {
|
||||
Self::#variant_name(_) => Ok(()),
|
||||
};
|
||||
}
|
||||
None => {
|
||||
fold_arm = quote! {
|
||||
Self::#variant_name => Ok(self),
|
||||
};
|
||||
visit_arm = quote! {
|
||||
Self::#variant_name => Ok(()),
|
||||
};
|
||||
}
|
||||
}
|
||||
fold_arms.push(fold_arm);
|
||||
visit_arms.push(visit_arm);
|
||||
}
|
||||
let ignore_state = state_unused.then(|| {
|
||||
quote! {
|
||||
let _ = state;
|
||||
}
|
||||
});
|
||||
visit_body = quote! {
|
||||
#ignore_state
|
||||
match self {
|
||||
#(#visit_arms)*
|
||||
}
|
||||
};
|
||||
fold_body = quote! {
|
||||
#ignore_state
|
||||
match self {
|
||||
#(#fold_arms)*
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
fold_impls.push(def_state.fold_impl(def_path, fold_body));
|
||||
visit_impls.push(def_state.visit_impl(def_path, visit_body));
|
||||
}
|
||||
Ok(prettyplease::unparse(&parse_quote! {
|
||||
pub trait Visitor {
|
||||
type Error;
|
||||
|
||||
#(#visitor_fns)*
|
||||
}
|
||||
|
||||
#(#visit_impls)*
|
||||
|
||||
pub trait Folder {
|
||||
type Error;
|
||||
|
||||
#(#folder_fns)*
|
||||
}
|
||||
|
||||
#(#fold_impls)*
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn error_at_call_site<T: std::fmt::Display>(e: T) -> syn::Error {
|
||||
syn::Error::new(Span::call_site(), e)
|
||||
}
|
||||
|
||||
pub fn parse_and_generate(path: impl AsRef<std::path::Path>) -> syn::Result<String> {
|
||||
let input = fs::read_to_string(path).map_err(error_at_call_site)?;
|
||||
let ast: ast::Definitions = serde_json::from_str(&input).map_err(error_at_call_site)?;
|
||||
generate(&ast)
|
||||
}
|
24
crates/fayalite/Cargo.toml
Normal file
24
crates/fayalite/Cargo.toml
Normal file
|
@ -0,0 +1,24 @@
|
|||
# SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
# See Notices.txt for copyright information
|
||||
[package]
|
||||
name = "fayalite"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
workspace = "../.."
|
||||
license = "LGPL-3.0-or-later"
|
||||
|
||||
[dependencies]
|
||||
bitvec = { version = "1.0.1", features = ["serde"] }
|
||||
hashbrown = "0.14.3"
|
||||
num-bigint = "0.4.4"
|
||||
num-traits = "0.2.16"
|
||||
paste = "1.0.14"
|
||||
fayalite-proc-macros = { version = "=0.1.0", path = "../fayalite-proc-macros" }
|
||||
serde = { version = "1.0.202", features = ["derive"] }
|
||||
serde_json = "1.0.117"
|
||||
|
||||
[dev-dependencies]
|
||||
trybuild = "1.0"
|
||||
|
||||
[build-dependencies]
|
||||
fayalite-visit-gen = { version = "=0.1.0", path = "../fayalite-visit-gen" }
|
15
crates/fayalite/build.rs
Normal file
15
crates/fayalite/build.rs
Normal file
|
@ -0,0 +1,15 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use fayalite_visit_gen::parse_and_generate;
|
||||
use std::{env, fs, path::Path};
|
||||
|
||||
fn main() {
|
||||
let path = "visit_types.json";
|
||||
println!("cargo::rerun-if-changed={path}");
|
||||
println!("cargo::rerun-if-changed=build.rs");
|
||||
let generated = parse_and_generate(path).map_err(|e| panic!("{e}")).unwrap();
|
||||
let out_dir = env::var_os("OUT_DIR").unwrap();
|
||||
let out_path = Path::new(&out_dir).join("visit.rs");
|
||||
fs::write(&out_path, generated).unwrap();
|
||||
// println!("cargo::warning=generated {}", out_path.display());
|
||||
}
|
196
crates/fayalite/src/annotations.rs
Normal file
196
crates/fayalite/src/annotations.rs
Normal file
|
@ -0,0 +1,196 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
expr::Target,
|
||||
intern::{Intern, Interned},
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
fmt,
|
||||
hash::{Hash, Hasher},
|
||||
ops::Deref,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
struct CustomFirrtlAnnotationFieldsImpl {
|
||||
value: serde_json::Map<String, serde_json::Value>,
|
||||
serialized: Interned<str>,
|
||||
}
|
||||
|
||||
impl Hash for CustomFirrtlAnnotationFieldsImpl {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.serialized.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for CustomFirrtlAnnotationFieldsImpl {}
|
||||
|
||||
impl PartialEq for CustomFirrtlAnnotationFieldsImpl {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.serialized == other.serialized
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct CustomFirrtlAnnotationFields(Interned<CustomFirrtlAnnotationFieldsImpl>);
|
||||
|
||||
impl fmt::Debug for CustomFirrtlAnnotationFields {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.0.value.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for CustomFirrtlAnnotationFields {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
serde_json::Map::<String, serde_json::Value>::deserialize(deserializer).map(Self::from)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for CustomFirrtlAnnotationFields {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
self.0.value.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for CustomFirrtlAnnotationFields {
|
||||
type Target = serde_json::Map<String, serde_json::Value>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0.value
|
||||
}
|
||||
}
|
||||
|
||||
impl From<serde_json::Map<String, serde_json::Value>> for CustomFirrtlAnnotationFields {
|
||||
fn from(value: serde_json::Map<String, serde_json::Value>) -> Self {
|
||||
let serialized =
|
||||
serde_json::to_string(&value).expect("serialization of JSON should succeed");
|
||||
Self(Intern::intern_sized(CustomFirrtlAnnotationFieldsImpl {
|
||||
value,
|
||||
serialized: Intern::intern_owned(serialized),
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct NotAJsonObject(pub serde_json::Value);
|
||||
|
||||
impl fmt::Display for NotAJsonObject {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("not a JSON object")
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for NotAJsonObject {}
|
||||
|
||||
impl TryFrom<serde_json::Value> for CustomFirrtlAnnotationFields {
|
||||
type Error = NotAJsonObject;
|
||||
|
||||
fn try_from(value: serde_json::Value) -> Result<Self, Self::Error> {
|
||||
match value {
|
||||
serde_json::Value::Object(value) => Ok(value.into()),
|
||||
_ => Err(NotAJsonObject(value)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CustomFirrtlAnnotationFields> for serde_json::Map<String, serde_json::Value> {
|
||||
fn from(value: CustomFirrtlAnnotationFields) -> Self {
|
||||
Self::clone(&value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CustomFirrtlAnnotationFields> for serde_json::Value {
|
||||
fn from(value: CustomFirrtlAnnotationFields) -> Self {
|
||||
serde_json::Value::Object(value.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
|
||||
pub struct CustomFirrtlAnnotation {
|
||||
pub class: Interned<str>,
|
||||
#[serde(flatten)]
|
||||
pub additional_fields: CustomFirrtlAnnotationFields,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[non_exhaustive]
|
||||
pub enum Annotation {
|
||||
DontTouch,
|
||||
CustomFirrtl(CustomFirrtlAnnotation),
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct TargetedAnnotation {
|
||||
target: Interned<Target>,
|
||||
annotation: Annotation,
|
||||
}
|
||||
|
||||
impl TargetedAnnotation {
|
||||
#[track_caller]
|
||||
pub fn new(target: Interned<Target>, annotation: Annotation) -> Self {
|
||||
Self::assert_valid_target(target);
|
||||
Self { target, annotation }
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn assert_valid_target(target: Interned<Target>) {
|
||||
assert!(target.is_static(), "can't annotate non-static targets");
|
||||
}
|
||||
pub fn target(&self) -> Interned<Target> {
|
||||
self.target
|
||||
}
|
||||
pub fn annotation(&self) -> &Annotation {
|
||||
&self.annotation
|
||||
}
|
||||
}
|
||||
|
||||
pub trait IntoAnnotations {
|
||||
type IntoAnnotations: IntoIterator<Item = Annotation>;
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations;
|
||||
}
|
||||
|
||||
impl IntoAnnotations for Annotation {
|
||||
type IntoAnnotations = [Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[self]
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoAnnotations for Box<Annotation> {
|
||||
type IntoAnnotations = [Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[*self]
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoAnnotations for &'_ Annotation {
|
||||
type IntoAnnotations = [Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[self.clone()]
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoAnnotations for &'_ mut Annotation {
|
||||
type IntoAnnotations = [Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[self.clone()]
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IntoIterator<Item = Annotation>> IntoAnnotations for T {
|
||||
type IntoAnnotations = Self;
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
self
|
||||
}
|
||||
}
|
729
crates/fayalite/src/array.rs
Normal file
729
crates/fayalite/src/array.rs
Normal file
|
@ -0,0 +1,729 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
bundle::{BundleType, BundleValue},
|
||||
expr::{
|
||||
ops::{ArrayIndex, ArrayLiteral, ExprIndex},
|
||||
Expr, ToExpr,
|
||||
},
|
||||
intern::{Intern, Interned, InternedCompare, Memoize},
|
||||
module::{
|
||||
transform::visit::{Fold, Folder, Visit, Visitor},
|
||||
ModuleBuilder, NormalModule,
|
||||
},
|
||||
source_location::SourceLocation,
|
||||
ty::{
|
||||
CanonicalType, CanonicalTypeKind, CanonicalValue, Connect, DynCanonicalType,
|
||||
DynCanonicalValue, DynType, DynValueTrait, FixedType, MatchVariantWithoutScope, Type,
|
||||
TypeEnum, Value, ValueEnum,
|
||||
},
|
||||
util::{ConstBool, GenericConstBool, MakeMutSlice},
|
||||
};
|
||||
use bitvec::{slice::BitSlice, vec::BitVec};
|
||||
use std::{
|
||||
any::Any,
|
||||
borrow::{Borrow, BorrowMut},
|
||||
fmt,
|
||||
hash::Hash,
|
||||
marker::PhantomData,
|
||||
ops::IndexMut,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
mod sealed {
|
||||
pub trait Sealed {}
|
||||
}
|
||||
|
||||
pub trait ValueArrayOrSlice:
|
||||
sealed::Sealed
|
||||
+ BorrowMut<[<Self as ValueArrayOrSlice>::Element]>
|
||||
+ AsRef<[<Self as ValueArrayOrSlice>::Element]>
|
||||
+ AsMut<[<Self as ValueArrayOrSlice>::Element]>
|
||||
+ Hash
|
||||
+ fmt::Debug
|
||||
+ Eq
|
||||
+ Send
|
||||
+ Sync
|
||||
+ 'static
|
||||
+ IndexMut<usize, Output = <Self as ValueArrayOrSlice>::Element>
|
||||
+ ToOwned
|
||||
+ InternedCompare
|
||||
{
|
||||
type Element: Value<Type = <Self as ValueArrayOrSlice>::ElementType>;
|
||||
type ElementType: Type<Value = <Self as ValueArrayOrSlice>::Element>;
|
||||
type LenType: 'static + Copy + Ord + fmt::Debug + Hash + Send + Sync;
|
||||
type Match: 'static
|
||||
+ Clone
|
||||
+ Eq
|
||||
+ fmt::Debug
|
||||
+ Hash
|
||||
+ Send
|
||||
+ Sync
|
||||
+ BorrowMut<[Expr<Self::Element>]>;
|
||||
type MaskVA: ValueArrayOrSlice<
|
||||
Element = <Self::ElementType as Type>::MaskValue,
|
||||
ElementType = <Self::ElementType as Type>::MaskType,
|
||||
LenType = Self::LenType,
|
||||
MaskVA = Self::MaskVA,
|
||||
> + ?Sized;
|
||||
type IsFixedLen: GenericConstBool;
|
||||
const FIXED_LEN_TYPE: Option<Self::LenType>;
|
||||
fn make_match(array: Expr<Array<Self>>) -> Self::Match;
|
||||
fn len_from_len_type(v: Self::LenType) -> usize;
|
||||
#[allow(clippy::result_unit_err)]
|
||||
fn try_len_type_from_len(v: usize) -> Result<Self::LenType, ()>;
|
||||
fn len_type(&self) -> Self::LenType;
|
||||
fn len(&self) -> usize;
|
||||
fn is_empty(&self) -> bool;
|
||||
fn iter(&self) -> std::slice::Iter<Self::Element> {
|
||||
Borrow::<[_]>::borrow(self).iter()
|
||||
}
|
||||
fn clone_to_arc(&self) -> Arc<Self>;
|
||||
fn arc_make_mut(v: &mut Arc<Self>) -> &mut Self;
|
||||
fn arc_to_arc_slice(self: Arc<Self>) -> Arc<[Self::Element]>;
|
||||
}
|
||||
|
||||
impl<T> sealed::Sealed for [T] {}
|
||||
|
||||
impl<V: Value> ValueArrayOrSlice for [V]
|
||||
where
|
||||
V::Type: Type<Value = V>,
|
||||
{
|
||||
type Element = V;
|
||||
type ElementType = V::Type;
|
||||
type LenType = usize;
|
||||
type Match = Box<[Expr<V>]>;
|
||||
type MaskVA = [<Self::ElementType as Type>::MaskValue];
|
||||
type IsFixedLen = ConstBool<false>;
|
||||
const FIXED_LEN_TYPE: Option<Self::LenType> = None;
|
||||
|
||||
fn make_match(array: Expr<Array<Self>>) -> Self::Match {
|
||||
(0..array.canonical_type().len())
|
||||
.map(|index| ArrayIndex::<V::Type>::new_unchecked(array.canonical(), index).to_expr())
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn len_from_len_type(v: Self::LenType) -> usize {
|
||||
v
|
||||
}
|
||||
|
||||
fn try_len_type_from_len(v: usize) -> Result<Self::LenType, ()> {
|
||||
Ok(v)
|
||||
}
|
||||
|
||||
fn len_type(&self) -> Self::LenType {
|
||||
self.len()
|
||||
}
|
||||
|
||||
fn len(&self) -> usize {
|
||||
<[_]>::len(self)
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
<[_]>::is_empty(self)
|
||||
}
|
||||
|
||||
fn clone_to_arc(&self) -> Arc<Self> {
|
||||
Arc::from(self)
|
||||
}
|
||||
|
||||
fn arc_make_mut(v: &mut Arc<Self>) -> &mut Self {
|
||||
MakeMutSlice::make_mut_slice(v)
|
||||
}
|
||||
|
||||
fn arc_to_arc_slice(self: Arc<Self>) -> Arc<[Self::Element]> {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, const N: usize> sealed::Sealed for [T; N] {}
|
||||
|
||||
impl<V: Value, const N: usize> ValueArrayOrSlice for [V; N]
|
||||
where
|
||||
V::Type: Type<Value = V>,
|
||||
{
|
||||
type Element = V;
|
||||
type ElementType = V::Type;
|
||||
type LenType = ();
|
||||
type Match = [Expr<V>; N];
|
||||
type MaskVA = [<Self::ElementType as Type>::MaskValue; N];
|
||||
type IsFixedLen = ConstBool<true>;
|
||||
const FIXED_LEN_TYPE: Option<Self::LenType> = Some(());
|
||||
|
||||
fn make_match(array: Expr<Array<Self>>) -> Self::Match {
|
||||
std::array::from_fn(|index| {
|
||||
ArrayIndex::<V::Type>::new_unchecked(array.canonical(), index).to_expr()
|
||||
})
|
||||
}
|
||||
|
||||
fn len_from_len_type(_v: Self::LenType) -> usize {
|
||||
N
|
||||
}
|
||||
|
||||
fn try_len_type_from_len(v: usize) -> Result<Self::LenType, ()> {
|
||||
if v == N {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
|
||||
fn len_type(&self) -> Self::LenType {}
|
||||
|
||||
fn len(&self) -> usize {
|
||||
N
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
N == 0
|
||||
}
|
||||
|
||||
fn clone_to_arc(&self) -> Arc<Self> {
|
||||
Arc::new(self.clone())
|
||||
}
|
||||
|
||||
fn arc_make_mut(v: &mut Arc<Self>) -> &mut Self {
|
||||
Arc::make_mut(v)
|
||||
}
|
||||
|
||||
fn arc_to_arc_slice(self: Arc<Self>) -> Arc<[Self::Element]> {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
pub struct ArrayType<VA: ValueArrayOrSlice + ?Sized> {
|
||||
element: VA::ElementType,
|
||||
len: VA::LenType,
|
||||
bit_width: usize,
|
||||
}
|
||||
|
||||
pub trait ArrayTypeTrait:
|
||||
Type<
|
||||
CanonicalType = ArrayType<[DynCanonicalValue]>,
|
||||
Value = Array<<Self as ArrayTypeTrait>::ValueArrayOrSlice>,
|
||||
CanonicalValue = Array<[DynCanonicalValue]>,
|
||||
MaskType = ArrayType<
|
||||
<<Self as ArrayTypeTrait>::ValueArrayOrSlice as ValueArrayOrSlice>::MaskVA,
|
||||
>,
|
||||
> + From<ArrayType<<Self as ArrayTypeTrait>::ValueArrayOrSlice>>
|
||||
+ Into<ArrayType<<Self as ArrayTypeTrait>::ValueArrayOrSlice>>
|
||||
+ BorrowMut<ArrayType<<Self as ArrayTypeTrait>::ValueArrayOrSlice>>
|
||||
+ sealed::Sealed
|
||||
+ Connect<Self>
|
||||
{
|
||||
type ValueArrayOrSlice: ValueArrayOrSlice<Element = Self::Element, ElementType = Self::ElementType>
|
||||
+ ?Sized;
|
||||
type Element: Value<Type = Self::ElementType>;
|
||||
type ElementType: Type<Value = Self::Element>;
|
||||
}
|
||||
|
||||
impl<VA: ValueArrayOrSlice + ?Sized> sealed::Sealed for ArrayType<VA> {}
|
||||
|
||||
impl<VA: ValueArrayOrSlice + ?Sized> ArrayTypeTrait for ArrayType<VA> {
|
||||
type ValueArrayOrSlice = VA;
|
||||
type Element = VA::Element;
|
||||
type ElementType = VA::ElementType;
|
||||
}
|
||||
|
||||
impl<VA: ValueArrayOrSlice + ?Sized> Clone for ArrayType<VA> {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
element: self.element.clone(),
|
||||
len: self.len,
|
||||
bit_width: self.bit_width,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<VA: ValueArrayOrSlice + ?Sized> Copy for ArrayType<VA> where VA::ElementType: Copy {}
|
||||
|
||||
impl<VA: ValueArrayOrSlice + ?Sized> ArrayType<VA> {
|
||||
pub fn element(&self) -> &VA::ElementType {
|
||||
&self.element
|
||||
}
|
||||
pub fn len(&self) -> usize {
|
||||
VA::len_from_len_type(self.len)
|
||||
}
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
pub fn bit_width(&self) -> usize {
|
||||
self.bit_width
|
||||
}
|
||||
pub fn into_slice_type(self) -> ArrayType<[VA::Element]> {
|
||||
ArrayType {
|
||||
len: self.len(),
|
||||
element: self.element,
|
||||
bit_width: self.bit_width,
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn new_with_len(element: VA::ElementType, len: usize) -> Self {
|
||||
Self::new_with_len_type(
|
||||
element,
|
||||
VA::try_len_type_from_len(len).expect("length should match"),
|
||||
)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn new_with_len_type(element: VA::ElementType, len: VA::LenType) -> Self {
|
||||
let Some(bit_width) = VA::len_from_len_type(len).checked_mul(element.bit_width()) else {
|
||||
panic!("array is too big: bit-width overflowed");
|
||||
};
|
||||
ArrayType {
|
||||
element,
|
||||
len,
|
||||
bit_width,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<VA: ValueArrayOrSlice + ?Sized, State: ?Sized + Folder> Fold<State> for ArrayType<VA>
|
||||
where
|
||||
VA::ElementType: Fold<State>,
|
||||
{
|
||||
fn fold(self, state: &mut State) -> Result<Self, State::Error> {
|
||||
state.fold_array_type(self)
|
||||
}
|
||||
fn default_fold(self, state: &mut State) -> Result<Self, State::Error> {
|
||||
Ok(Self::new_with_len_type(self.element.fold(state)?, self.len))
|
||||
}
|
||||
}
|
||||
|
||||
impl<VA: ValueArrayOrSlice + ?Sized, State: ?Sized + Visitor> Visit<State> for ArrayType<VA>
|
||||
where
|
||||
VA::ElementType: Visit<State>,
|
||||
{
|
||||
fn visit(&self, state: &mut State) -> Result<(), State::Error> {
|
||||
state.visit_array_type(self)
|
||||
}
|
||||
fn default_visit(&self, state: &mut State) -> Result<(), State::Error> {
|
||||
self.element.visit(state)
|
||||
}
|
||||
}
|
||||
|
||||
impl<V: Value, const N: usize> ArrayType<[V; N]>
|
||||
where
|
||||
V::Type: Type<Value = V>,
|
||||
{
|
||||
pub fn new_array(element: V::Type) -> Self {
|
||||
ArrayType::new_with_len_type(element, ())
|
||||
}
|
||||
}
|
||||
|
||||
impl<V: Value, const N: usize> FixedType for ArrayType<[V; N]>
|
||||
where
|
||||
V::Type: FixedType<Value = V>,
|
||||
{
|
||||
fn fixed_type() -> Self {
|
||||
Self::new_array(FixedType::fixed_type())
|
||||
}
|
||||
}
|
||||
|
||||
impl<V: Value> ArrayType<[V]>
|
||||
where
|
||||
V::Type: Type<Value = V>,
|
||||
{
|
||||
pub fn new_slice(element: V::Type, len: usize) -> Self {
|
||||
ArrayType::new_with_len_type(element, len)
|
||||
}
|
||||
}
|
||||
|
||||
impl<VA: ValueArrayOrSlice + ?Sized> Type for ArrayType<VA> {
|
||||
type CanonicalType = ArrayType<[DynCanonicalValue]>;
|
||||
type Value = Array<VA>;
|
||||
type CanonicalValue = Array<[DynCanonicalValue]>;
|
||||
type MaskType = ArrayType<VA::MaskVA>;
|
||||
type MaskValue = Array<VA::MaskVA>;
|
||||
type MatchVariant = VA::Match;
|
||||
type MatchActiveScope = ();
|
||||
type MatchVariantAndInactiveScope = MatchVariantWithoutScope<VA::Match>;
|
||||
type MatchVariantsIter = std::iter::Once<Self::MatchVariantAndInactiveScope>;
|
||||
|
||||
fn match_variants<IO: BundleValue>(
|
||||
this: Expr<Self::Value>,
|
||||
module_builder: &mut ModuleBuilder<IO, NormalModule>,
|
||||
source_location: SourceLocation,
|
||||
) -> Self::MatchVariantsIter
|
||||
where
|
||||
IO::Type: BundleType<Value = IO>,
|
||||
{
|
||||
let _ = module_builder;
|
||||
let _ = source_location;
|
||||
std::iter::once(MatchVariantWithoutScope(VA::make_match(this)))
|
||||
}
|
||||
|
||||
fn mask_type(&self) -> Self::MaskType {
|
||||
#[derive(Clone, Hash, Eq, PartialEq)]
|
||||
struct ArrayMaskTypeMemoize<T: ArrayTypeTrait>(PhantomData<T>);
|
||||
impl<T: ArrayTypeTrait> Copy for ArrayMaskTypeMemoize<T> {}
|
||||
impl<T: ArrayTypeTrait> Memoize for ArrayMaskTypeMemoize<T> {
|
||||
type Input = ArrayType<T::ValueArrayOrSlice>;
|
||||
type InputOwned = ArrayType<T::ValueArrayOrSlice>;
|
||||
type Output = <ArrayType<T::ValueArrayOrSlice> as Type>::MaskType;
|
||||
|
||||
fn inner(self, input: &Self::Input) -> Self::Output {
|
||||
ArrayType::new_with_len_type(input.element.mask_type(), input.len)
|
||||
}
|
||||
}
|
||||
ArrayMaskTypeMemoize::<Self>(PhantomData).get(self)
|
||||
}
|
||||
|
||||
fn canonical(&self) -> Self::CanonicalType {
|
||||
ArrayType {
|
||||
element: self.element.canonical_dyn(),
|
||||
len: self.len(),
|
||||
bit_width: self.bit_width,
|
||||
}
|
||||
}
|
||||
|
||||
fn source_location(&self) -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
|
||||
fn type_enum(&self) -> TypeEnum {
|
||||
TypeEnum::ArrayType(self.canonical())
|
||||
}
|
||||
|
||||
fn from_canonical_type(t: Self::CanonicalType) -> Self {
|
||||
Self {
|
||||
element: VA::ElementType::from_dyn_canonical_type(t.element),
|
||||
len: VA::try_len_type_from_len(t.len).expect("length should match"),
|
||||
bit_width: t.bit_width,
|
||||
}
|
||||
}
|
||||
|
||||
fn as_dyn_canonical_type_impl(this: &Self) -> Option<&dyn DynCanonicalType> {
|
||||
Some(<dyn Any>::downcast_ref::<ArrayType<[DynCanonicalValue]>>(
|
||||
this,
|
||||
)?)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Lhs: ValueArrayOrSlice + ?Sized, Rhs: ValueArrayOrSlice + ?Sized> Connect<ArrayType<Rhs>>
|
||||
for ArrayType<Lhs>
|
||||
{
|
||||
}
|
||||
|
||||
impl CanonicalType for ArrayType<[DynCanonicalValue]> {
|
||||
const CANONICAL_TYPE_KIND: CanonicalTypeKind = CanonicalTypeKind::ArrayType;
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
pub struct Array<VA: ValueArrayOrSlice + ?Sized> {
|
||||
element_ty: VA::ElementType,
|
||||
value: Arc<VA>,
|
||||
}
|
||||
|
||||
impl<VA: ValueArrayOrSlice + ?Sized> Clone for Array<VA> {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
element_ty: self.element_ty.clone(),
|
||||
value: self.value.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<VA: ValueArrayOrSlice + ?Sized> ToExpr for Array<VA> {
|
||||
type Type = ArrayType<VA>;
|
||||
|
||||
fn ty(&self) -> Self::Type {
|
||||
ArrayType::new_with_len_type(self.element_ty.clone(), self.value.len_type())
|
||||
}
|
||||
|
||||
fn to_expr(&self) -> Expr<<Self::Type as Type>::Value> {
|
||||
Expr::from_value(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<VA: ValueArrayOrSlice + ?Sized> Value for Array<VA> {
|
||||
fn to_canonical(&self) -> <Self::Type as Type>::CanonicalValue {
|
||||
Array {
|
||||
element_ty: self.element_ty.canonical_dyn(),
|
||||
value: AsRef::<[_]>::as_ref(&*self.value)
|
||||
.iter()
|
||||
.map(|v| v.to_canonical_dyn())
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
fn to_bits_impl(this: &Self) -> Interned<BitSlice> {
|
||||
#[derive(Hash, Eq, PartialEq)]
|
||||
struct ArrayToBitsMemoize<VA: ValueArrayOrSlice + ?Sized>(PhantomData<VA>);
|
||||
impl<VA: ValueArrayOrSlice + ?Sized> Clone for ArrayToBitsMemoize<VA> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
impl<VA: ValueArrayOrSlice + ?Sized> Copy for ArrayToBitsMemoize<VA> {}
|
||||
impl<VA: ValueArrayOrSlice + ?Sized> Memoize for ArrayToBitsMemoize<VA> {
|
||||
type Input = Array<VA>;
|
||||
type InputOwned = Array<VA>;
|
||||
type Output = Interned<BitSlice>;
|
||||
|
||||
fn inner(self, input: &Self::Input) -> Self::Output {
|
||||
let mut bits = BitVec::with_capacity(input.ty().bit_width());
|
||||
for element in AsRef::<[_]>::as_ref(&*input.value).iter() {
|
||||
bits.extend_from_bitslice(&element.to_bits());
|
||||
}
|
||||
Intern::intern_owned(bits)
|
||||
}
|
||||
}
|
||||
ArrayToBitsMemoize::<VA>(PhantomData).get(this)
|
||||
}
|
||||
}
|
||||
|
||||
impl CanonicalValue for Array<[DynCanonicalValue]> {
|
||||
fn value_enum_impl(this: &Self) -> ValueEnum {
|
||||
ValueEnum::Array(this.clone())
|
||||
}
|
||||
fn to_bits_impl(this: &Self) -> Interned<BitSlice> {
|
||||
Value::to_bits_impl(this)
|
||||
}
|
||||
}
|
||||
|
||||
impl<VA: ValueArrayOrSlice + ?Sized> Array<VA> {
|
||||
pub fn element_ty(&self) -> &VA::ElementType {
|
||||
&self.element_ty
|
||||
}
|
||||
pub fn len(&self) -> usize {
|
||||
VA::len_from_len_type(self.value.len_type())
|
||||
}
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
pub fn value(&self) -> &Arc<VA> {
|
||||
&self.value
|
||||
}
|
||||
pub fn set_element(&mut self, index: usize, element: VA::Element) {
|
||||
assert_eq!(self.element_ty, element.ty());
|
||||
VA::arc_make_mut(&mut self.value)[index] = element;
|
||||
}
|
||||
pub fn new(element_ty: VA::ElementType, value: Arc<VA>) -> Self {
|
||||
for element in value.iter() {
|
||||
assert_eq!(element_ty, element.ty());
|
||||
}
|
||||
Self { element_ty, value }
|
||||
}
|
||||
pub fn into_slice(self) -> Array<[VA::Element]> {
|
||||
Array {
|
||||
element_ty: self.element_ty,
|
||||
value: self.value.arc_to_arc_slice(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<VA: ValueArrayOrSlice + ?Sized, T: Into<Arc<VA>>> From<T> for Array<VA>
|
||||
where
|
||||
VA::ElementType: FixedType,
|
||||
{
|
||||
fn from(value: T) -> Self {
|
||||
Self::new(FixedType::fixed_type(), value.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: ToExpr<Type = T>, T: FixedType> ToExpr for [E] {
|
||||
type Type = ArrayType<[T::Value]>;
|
||||
|
||||
fn ty(&self) -> Self::Type {
|
||||
ArrayType::new_with_len_type(FixedType::fixed_type(), self.len())
|
||||
}
|
||||
|
||||
fn to_expr(&self) -> Expr<<Self::Type as Type>::Value> {
|
||||
let elements = Intern::intern_owned(Vec::from_iter(
|
||||
self.iter().map(|v| v.to_expr().to_canonical_dyn()),
|
||||
));
|
||||
ArrayLiteral::new_unchecked(elements, self.ty()).to_expr()
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: ToExpr<Type = T>, T: FixedType> ToExpr for Vec<E> {
|
||||
type Type = ArrayType<[T::Value]>;
|
||||
|
||||
fn ty(&self) -> Self::Type {
|
||||
<[E]>::ty(self)
|
||||
}
|
||||
|
||||
fn to_expr(&self) -> Expr<<Self::Type as Type>::Value> {
|
||||
<[E]>::to_expr(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: ToExpr<Type = T>, T: FixedType> ToExpr for [E; 0] {
|
||||
type Type = ArrayType<[T::Value; 0]>;
|
||||
|
||||