diff --git a/.forgejo/workflows/test.yml b/.forgejo/workflows/test.yml index 71f4a3b..001168f 100644 --- a/.forgejo/workflows/test.yml +++ b/.forgejo/workflows/test.yml @@ -1,19 +1,25 @@ +# SPDX-License-Identifier: LGPL-3.0-or-later +# See Notices.txt for copyright information on: [push, pull_request] jobs: test: runs-on: debian-12 + container: + image: git.libre-chip.org/libre-chip/fayalite-deps:latest steps: - - uses: https://code.forgejo.org/actions/checkout@v3 + - uses: actions/checkout@v3 with: fetch-depth: 0 - run: | - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain 1.79.0 - source "$HOME/.cargo/env" - echo "$PATH" >> "$GITHUB_PATH" - - uses: https://github.com/Swatinem/rust-cache@v2 + scripts/check-copyright.sh + - uses: https://git.libre-chip.org/mirrors/rust-cache@v2 with: save-if: ${{ github.ref == 'refs/heads/master' }} - run: cargo test - - run: cargo test --features=unstable-doc + - run: cargo build --tests --features=unstable-doc + - run: cargo test --doc --features=unstable-doc - run: cargo doc --features=unstable-doc + - run: FAYALITE_TEST_HASHER=always_zero cargo test --test=module --features=unstable-doc,unstable-test-hasher + - run: cargo run --example blinky yosys-nextpnr-xray --platform=arty-a7-100t --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db -o target/blinky-out + - run: cargo run --example tx_only_uart yosys-nextpnr-xray --platform=arty-a7-100t --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db -o target/tx_only_uart-out diff --git a/.gitignore b/.gitignore index ccb5166..0655406 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,4 @@ +# SPDX-License-Identifier: LGPL-3.0-or-later +# See Notices.txt for copyright information /target -.vscode \ No newline at end of file +.vscode diff --git a/Cargo.lock b/Cargo.lock index 1c17ac7..be5f3bc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,18 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 - -[[package]] -name = "ahash" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01" -dependencies = [ - "cfg-if", - "once_cell", - "version_check", - "zerocopy", -] +version = 4 [[package]] name = "allocator-api2" @@ -37,9 +25,9 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.7" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" [[package]] name = "anstyle-parse" @@ -56,7 +44,7 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391" dependencies = [ - "windows-sys", + "windows-sys 0.52.0", ] [[package]] @@ -66,9 +54,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" dependencies = [ "anstyle", - "windows-sys", + "windows-sys 0.52.0", ] +[[package]] +name = "arrayref" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" + +[[package]] +name = "arrayvec" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" + [[package]] name = "autocfg" version = "1.1.0" @@ -81,6 +81,12 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + [[package]] name = "basic-toml" version = "0.1.8" @@ -109,6 +115,20 @@ dependencies = [ "wyz", ] +[[package]] +name = "blake3" +version = "1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d82033247fd8e890df8f740e407ad4d038debb9eb1f40533fffb32e7d17dc6f7" +dependencies = [ + "arrayref", + "arrayvec", + "cc", + "cfg-if", + "constant_time_eq", + "serde", +] + [[package]] name = "block-buffer" version = "0.10.4" @@ -118,6 +138,15 @@ dependencies = [ "generic-array", ] +[[package]] +name = "cc" +version = "1.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e80e3b6a3ab07840e1cae9b0666a63970dc28e8ed5ffbcdacbfc760c281bfc1" +dependencies = [ + "shlex", +] + [[package]] name = "cfg-if" version = "1.0.0" @@ -126,9 +155,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "clap" -version = "4.5.9" +version = "4.5.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64acc1846d54c1fe936a78dc189c34e28d3f5afc348403f28ecf53660b9b8462" +checksum = "e2134bb3ea021b78629caa971416385309e0131b351b25e01dc16fb54e1b5fae" dependencies = [ "clap_builder", "clap_derive", @@ -136,9 +165,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.9" +version = "4.5.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fb8393d67ba2e7bfaf28a23458e4e2b543cc73a99595511eb207fdb8aede942" +checksum = "c2ba64afa3c0a6df7fa517765e31314e983f51dda798ffba27b988194fb65dc9" dependencies = [ "anstream", "anstyle", @@ -147,10 +176,19 @@ dependencies = [ ] [[package]] -name = "clap_derive" -version = "4.5.8" +name = "clap_complete" +version = "4.5.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bac35c6dafb060fd4d275d9a4ffae97917c13a6327903a8be2153cd964f7085" +checksum = "75bf0b32ad2e152de789bb635ea4d3078f6b838ad7974143e99b99f45a04af4a" +dependencies = [ + "clap", +] + +[[package]] +name = "clap_derive" +version = "4.5.47" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbfd7eae0b0f1a6e63d4b13c9c478de77c2eb546fba158ad50b4203dc24b9f9c" dependencies = [ "heck", "proc-macro2", @@ -160,9 +198,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.7.1" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70" +checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" [[package]] name = "colorchoice" @@ -170,6 +208,12 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" +[[package]] +name = "constant_time_eq" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" + [[package]] name = "cpufeatures" version = "0.2.12" @@ -189,6 +233,27 @@ dependencies = [ "typenum", ] +[[package]] +name = "ctor" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edb49164822f3ee45b17acd4a208cfc1251410cf0cad9a833234c9890774dd9f" +dependencies = [ + "quote", + "syn", +] + +[[package]] +name = "derive_destructure2" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64b697ac90ff296f0fc031ee5a61c7ac31fb9fff50e3fb32873b09223613fc0c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "digest" version = "0.10.7" @@ -218,7 +283,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" dependencies = [ "libc", - "windows-sys", + "windows-sys 0.52.0", ] [[package]] @@ -239,32 +304,41 @@ checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" [[package]] name = "fayalite" -version = "0.2.0" +version = "0.3.0" dependencies = [ + "base64", "bitvec", + "blake3", "clap", + "clap_complete", + "ctor", "eyre", "fayalite-proc-macros", "fayalite-visit-gen", "hashbrown", + "jobslot", "num-bigint", "num-traits", + "ordered-float", + "petgraph", "serde", "serde_json", + "tempfile", "trybuild", + "vec_map", "which", ] [[package]] name = "fayalite-proc-macros" -version = "0.2.0" +version = "0.3.0" dependencies = [ "fayalite-proc-macros-impl", ] [[package]] name = "fayalite-proc-macros-impl" -version = "0.2.0" +version = "0.3.0" dependencies = [ "base16ct", "num-bigint", @@ -278,7 +352,7 @@ dependencies = [ [[package]] name = "fayalite-visit-gen" -version = "0.2.0" +version = "0.3.0" dependencies = [ "indexmap", "prettyplease", @@ -290,6 +364,18 @@ dependencies = [ "thiserror", ] +[[package]] +name = "fixedbitset" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + [[package]] name = "funty" version = "2.0.0" @@ -306,6 +392,18 @@ dependencies = [ "version_check", ] +[[package]] +name = "getrandom" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasi", +] + [[package]] name = "glob" version = "0.3.1" @@ -314,12 +412,13 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "hashbrown" -version = "0.14.3" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" dependencies = [ - "ahash", "allocator-api2", + "equivalent", + "foldhash", ] [[package]] @@ -334,7 +433,7 @@ version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" dependencies = [ - "windows-sys", + "windows-sys 0.52.0", ] [[package]] @@ -345,9 +444,9 @@ checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" [[package]] name = "indexmap" -version = "2.2.6" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" dependencies = [ "equivalent", "hashbrown", @@ -367,10 +466,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" [[package]] -name = "libc" -version = "0.2.153" +name = "jobslot" +version = "0.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" +checksum = "58715c67c327da7f1558708348d68c207fd54900c4ae0529e29305d04d795b8c" +dependencies = [ + "cfg-if", + "derive_destructure2", + "getrandom", + "libc", + "scopeguard", + "windows-sys 0.61.2", +] + +[[package]] +name = "libc" +version = "0.2.176" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174" [[package]] name = "linux-raw-sys" @@ -380,11 +493,10 @@ checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" [[package]] name = "num-bigint" -version = "0.4.4" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ - "autocfg", "num-integer", "num-traits", ] @@ -413,6 +525,29 @@ version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +[[package]] +name = "ordered-float" +version = "5.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f4779c6901a562440c3786d08192c6fbda7c1c2060edd10006b05ee35d10f2d" +dependencies = [ + "num-traits", + "rand", + "serde", +] + +[[package]] +name = "petgraph" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a98c6720655620a521dcc722d0ad66cd8afd5d86e34a89ef691c50b7b24de06" +dependencies = [ + "fixedbitset", + "hashbrown", + "indexmap", + "serde", +] + [[package]] name = "prettyplease" version = "0.2.20" @@ -425,9 +560,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.83" +version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b33eb56c327dec362a9e55b3ad14f9d2f0904fb5a5b03b513ab5465399e9f43" +checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" dependencies = [ "unicode-ident", ] @@ -441,12 +576,37 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + [[package]] name = "radium" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "rand_core", + "serde", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "serde", +] + [[package]] name = "rustix" version = "0.38.31" @@ -457,7 +617,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys", - "windows-sys", + "windows-sys 0.52.0", ] [[package]] @@ -466,6 +626,12 @@ version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + [[package]] name = "serde" version = "1.0.202" @@ -509,6 +675,12 @@ dependencies = [ "digest", ] +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + [[package]] name = "strsim" version = "0.11.1" @@ -517,9 +689,9 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "syn" -version = "2.0.66" +version = "2.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5" +checksum = "9c786062daee0d6db1132800e623df74274a0a87322d8e183338e01b3d98d058" dependencies = [ "proc-macro2", "quote", @@ -541,7 +713,7 @@ dependencies = [ "cfg-if", "fastrand", "rustix", - "windows-sys", + "windows-sys 0.52.0", ] [[package]] @@ -606,12 +778,36 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +[[package]] +name = "vec_map" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" + [[package]] name = "version_check" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +[[package]] +name = "wasi" +version = "0.14.7+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c" +dependencies = [ + "wasip2", +] + +[[package]] +name = "wasip2" +version = "1.0.1+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" +dependencies = [ + "wit-bindgen", +] + [[package]] name = "which" version = "6.0.1" @@ -655,6 +851,12 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + [[package]] name = "windows-sys" version = "0.52.0" @@ -665,14 +867,24 @@ dependencies = [ ] [[package]] -name = "windows-targets" -version = "0.52.4" +name = "windows-sys" +version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", "windows_i686_gnu", + "windows_i686_gnullvm", "windows_i686_msvc", "windows_x86_64_gnu", "windows_x86_64_gnullvm", @@ -681,45 +893,51 @@ dependencies = [ [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winsafe" @@ -727,6 +945,12 @@ version = "0.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" +[[package]] +name = "wit-bindgen" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" + [[package]] name = "wyz" version = "0.5.1" @@ -735,23 +959,3 @@ checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" dependencies = [ "tap", ] - -[[package]] -name = "zerocopy" -version = "0.7.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" -dependencies = [ - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.7.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] diff --git a/Cargo.toml b/Cargo.toml index f9c13cb..2380ea7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,31 +5,42 @@ resolver = "2" members = ["crates/*"] [workspace.package] -version = "0.2.0" +version = "0.3.0" license = "LGPL-3.0-or-later" -edition = "2021" +edition = "2024" repository = "https://git.libre-chip.org/libre-chip/fayalite" keywords = ["hdl", "hardware", "semiconductors", "firrtl", "fpga"] categories = ["simulation", "development-tools", "compilers"] -rust-version = "1.79" +rust-version = "1.89.0" [workspace.dependencies] -fayalite-proc-macros = { version = "=0.2.0", path = "crates/fayalite-proc-macros" } -fayalite-proc-macros-impl = { version = "=0.2.0", path = "crates/fayalite-proc-macros-impl" } -fayalite-visit-gen = { version = "=0.2.0", path = "crates/fayalite-visit-gen" } +fayalite-proc-macros = { version = "=0.3.0", path = "crates/fayalite-proc-macros" } +fayalite-proc-macros-impl = { version = "=0.3.0", path = "crates/fayalite-proc-macros-impl" } +fayalite-visit-gen = { version = "=0.3.0", path = "crates/fayalite-visit-gen" } base16ct = "0.2.0" +base64 = "0.22.1" bitvec = { version = "1.0.1", features = ["serde"] } -hashbrown = "0.14.3" -indexmap = { version = "2.2.6", features = ["serde"] } -num-bigint = "0.4.4" +blake3 = { version = "1.5.4", features = ["serde"] } +clap = { version = "4.5.9", features = ["derive", "env", "string"] } +clap_complete = "4.5.58" +ctor = "0.2.8" +eyre = "0.6.12" +hashbrown = "0.15.2" +indexmap = { version = "2.5.0", features = ["serde"] } +jobslot = "0.2.23" +num-bigint = "0.4.6" num-traits = "0.2.16" +ordered-float = { version = "5.1.0", features = ["serde"] } +petgraph = "0.8.1" prettyplease = "0.2.20" proc-macro2 = "1.0.83" quote = "1.0.36" serde = { version = "1.0.202", features = ["derive"] } serde_json = { version = "1.0.117", features = ["preserve_order"] } sha2 = "0.10.8" -syn = { version = "2.0.66", features = ["full", "fold", "visit", "extra-traits"] } +syn = { version = "2.0.93", features = ["full", "fold", "visit", "extra-traits"] } tempfile = "3.10.1" thiserror = "1.0.61" trybuild = "1.0" +vec_map = "0.8.2" +which = "6.0.1" diff --git a/README.md b/README.md index 6e14e9f..8e7f275 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,84 @@ + # Fayalite Fayalite is a library for designing digital hardware -- a hardware description language (HDL) embedded in the Rust programming language. Fayalite's semantics are based on [FIRRTL] as interpreted by [LLVM CIRCT](https://circt.llvm.org/docs/Dialects/FIRRTL/FIRRTLAnnotations/). [FIRRTL]: https://github.com/chipsalliance/firrtl-spec + +# Building the [Blinky example] for the Arty A7 100T on Linux + +[Blinky example]: crates/fayalite/examples/blinky.rs + +This uses the container image containing all the external programs and files that Fayalite needs to build for FPGAs, the sources for the container image are in https://git.libre-chip.org/libre-chip/fayalite-deps + +Steps: + +Install podman (or docker). + +Run: +```bash +podman run --rm --security-opt label=disable --volume="$(pwd):$(pwd)" -w="$(pwd)" -it git.libre-chip.org/libre-chip/fayalite-deps:latest cargo run --example blinky yosys-nextpnr-xray --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db --platform arty-a7-100t -o target/blinky-out +``` + +To actually program the FPGA, you'll need to install [openFPGALoader] on your host OS: + +[openFPGALoader]: https://github.com/trabucayre/openFPGALoader + +On Debian 12: +```bash +sudo apt update && sudo apt install openfpgaloader +``` + +Then program the FPGA: +```bash +sudo openFPGALoader --board arty_a7_100t target/blinky-out/blinky.bit +``` + +This will program the FPGA but leave the Flash chip unmodified, so the FPGA will revert when the board is power-cycled. + +To program the Flash also, so it stays programmed when power-cycling the board: + +```bash +sudo openFPGALoader --board arty_a7_100t -f target/blinky-out/blinky.bit +``` + +# Building the [Transmit-only UART example] for the Arty A7 100T on Linux + +[Transmit-only UART example]: crates/fayalite/examples/tx_only_uart.rs + +Follow the steps above of building the Blinky example, but replace `blinky` with `tx_only_uart`. + +View the output using [tio](https://github.com/tio/tio) which you can install in Debian using `apt`. + +Find the correct USB device: +```bash +sudo tio --list +``` + +You want the device with a name like (note the `if01`, `if00` is presumably the JTAG port): +`/dev/serial/by-id/usb-Digilent_Digilent_USB_Device_210319B4A51E-if01-port0` + +Connect to the serial port: +```bash +sudo tio -b115200 /dev/serial/by-id/put-your-device-id-here +``` + +You'll see (repeating endlessly): +```text +Hello World from Fayalite!!! +Hello World from Fayalite!!! +Hello World from Fayalite!!! +``` + +Press Ctrl+T then `q` to exit tio. + +# Funding + +## NLnet Grants + +* [Libre-Chip CPU with proof of No Spectre bugs](https://nlnet.nl/project/Libre-Chip-proof/) 2024-12-324 [(progress)](https://git.libre-chip.org/libre-chip/grant-tracking/src/branch/master/nlnet-2024-12-324/progress.md) + +This project was funded through the [NGI0 Commons Fund](https://nlnet.nl/commonsfund), a fund established by [NLnet](https://nlnet.nl/) with financial support from the European Commission's [Next Generation Internet](https://ngi.eu) programme, under the aegis of [DG Communications Networks, Content and Technology](https://commission.europa.eu/about-european-commission/departments-and-executive-agencies/communications-networks-content-and-technology_en) under grant agreement № [101135429](https://cordis.europa.eu/project/id/101135429). Additional funding is made available by the [Swiss State Secretariat for Education, Research and Innovation](https://www.sbfi.admin.ch/sbfi/en/home.html) (SERI). diff --git a/crates/fayalite-proc-macros-impl/Cargo.toml b/crates/fayalite-proc-macros-impl/Cargo.toml index 31c4465..d56f03d 100644 --- a/crates/fayalite-proc-macros-impl/Cargo.toml +++ b/crates/fayalite-proc-macros-impl/Cargo.toml @@ -13,11 +13,11 @@ rust-version.workspace = true version.workspace = true [dependencies] -base16ct = { workspace = true } -num-bigint = { workspace = true } -prettyplease = { workspace = true } -proc-macro2 = { workspace = true } -quote = { workspace = true } -sha2 = { workspace = true } -syn = { workspace = true } -tempfile = { workspace = true } +base16ct.workspace = true +num-bigint.workspace = true +prettyplease.workspace = true +proc-macro2.workspace = true +quote.workspace = true +sha2.workspace = true +syn.workspace = true +tempfile.workspace = true diff --git a/crates/fayalite-proc-macros-impl/src/fold.rs b/crates/fayalite-proc-macros-impl/src/fold.rs index 1e1ff42..22e7b82 100644 --- a/crates/fayalite-proc-macros-impl/src/fold.rs +++ b/crates/fayalite-proc-macros-impl/src/fold.rs @@ -220,29 +220,36 @@ forward_fold!(syn::ExprArray => fold_expr_array); forward_fold!(syn::ExprCall => fold_expr_call); forward_fold!(syn::ExprIf => fold_expr_if); forward_fold!(syn::ExprMatch => fold_expr_match); +forward_fold!(syn::ExprMethodCall => fold_expr_method_call); forward_fold!(syn::ExprPath => fold_expr_path); forward_fold!(syn::ExprRepeat => fold_expr_repeat); forward_fold!(syn::ExprStruct => fold_expr_struct); forward_fold!(syn::ExprTuple => fold_expr_tuple); +forward_fold!(syn::FieldPat => fold_field_pat); forward_fold!(syn::Ident => fold_ident); forward_fold!(syn::Member => fold_member); forward_fold!(syn::Path => fold_path); forward_fold!(syn::Type => fold_type); forward_fold!(syn::TypePath => fold_type_path); forward_fold!(syn::WherePredicate => fold_where_predicate); +no_op_fold!(proc_macro2::Span); no_op_fold!(syn::parse::Nothing); no_op_fold!(syn::token::Brace); no_op_fold!(syn::token::Bracket); +no_op_fold!(syn::token::Group); no_op_fold!(syn::token::Paren); no_op_fold!(syn::Token![_]); no_op_fold!(syn::Token![,]); no_op_fold!(syn::Token![;]); no_op_fold!(syn::Token![:]); +no_op_fold!(syn::Token![::]); no_op_fold!(syn::Token![..]); no_op_fold!(syn::Token![.]); no_op_fold!(syn::Token![#]); +no_op_fold!(syn::Token![<]); no_op_fold!(syn::Token![=]); no_op_fold!(syn::Token![=>]); +no_op_fold!(syn::Token![>]); no_op_fold!(syn::Token![|]); no_op_fold!(syn::Token![enum]); no_op_fold!(syn::Token![extern]); @@ -251,3 +258,4 @@ no_op_fold!(syn::Token![mut]); no_op_fold!(syn::Token![static]); no_op_fold!(syn::Token![struct]); no_op_fold!(syn::Token![where]); +no_op_fold!(usize); diff --git a/crates/fayalite-proc-macros-impl/src/hdl_bundle.rs b/crates/fayalite-proc-macros-impl/src/hdl_bundle.rs new file mode 100644 index 0000000..09189bd --- /dev/null +++ b/crates/fayalite-proc-macros-impl/src/hdl_bundle.rs @@ -0,0 +1,1203 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information +use crate::{ + Errors, HdlAttr, PairsIterExt, + hdl_type_common::{ + ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedField, ParsedFieldsNamed, ParsedGenerics, + SplitForImpl, TypesParser, WrappedInConst, common_derives, get_target, + }, + kw, +}; +use proc_macro2::TokenStream; +use quote::{ToTokens, format_ident, quote_spanned}; +use syn::{ + AngleBracketedGenericArguments, Attribute, Field, FieldMutability, Fields, FieldsNamed, + GenericParam, Generics, Ident, ItemStruct, Path, Token, Type, Visibility, parse_quote, + parse_quote_spanned, + punctuated::{Pair, Punctuated}, + spanned::Spanned, + token::Brace, +}; + +#[derive(Clone, Debug)] +pub(crate) struct ParsedBundle { + pub(crate) attrs: Vec, + pub(crate) options: HdlAttr, + pub(crate) vis: Visibility, + pub(crate) struct_token: Token![struct], + pub(crate) ident: Ident, + pub(crate) generics: MaybeParsed, + pub(crate) fields: MaybeParsed, + pub(crate) field_flips: Vec>>, + pub(crate) mask_type_ident: Ident, + pub(crate) mask_type_match_variant_ident: Ident, + pub(crate) mask_type_sim_value_ident: Ident, + pub(crate) match_variant_ident: Ident, + pub(crate) sim_value_ident: Ident, + pub(crate) builder_ident: Ident, + pub(crate) mask_type_builder_ident: Ident, +} + +impl ParsedBundle { + fn parse_field( + errors: &mut Errors, + field: &mut Field, + index: usize, + ) -> Option> { + let Field { + attrs, + vis: _, + mutability, + ident, + colon_token, + ty, + } = field; + let ident = ident.get_or_insert_with(|| format_ident!("_{}", index, span = ty.span())); + if !matches!(mutability, FieldMutability::None) { + // FIXME: use mutability as the spanned tokens, + // blocked on https://github.com/dtolnay/syn/issues/1717 + errors.error(&ident, "field mutability is not supported"); + *mutability = FieldMutability::None; + } + *mutability = FieldMutability::None; + colon_token.get_or_insert(Token![:](ident.span())); + errors.unwrap_or_default(HdlAttr::parse_and_take_attr(attrs)) + } + fn parse(item: ItemStruct) -> syn::Result { + let ItemStruct { + mut attrs, + vis, + struct_token, + ident, + mut generics, + fields, + semi_token, + } = item; + let mut errors = Errors::new(); + let mut options = errors + .unwrap_or_default(HdlAttr::::parse_and_take_attr( + &mut attrs, + )) + .unwrap_or_default(); + errors.ok(options.body.validate()); + let ItemOptions { + outline_generated: _, + target: _, + custom_bounds, + no_static: _, + no_runtime_generics: _, + cmp_eq: _, + } = options.body; + let mut fields = match fields { + syn::Fields::Named(fields) => fields, + syn::Fields::Unnamed(fields) => { + errors.error(&fields, "#[hdl] struct must use curly braces: {}"); + FieldsNamed { + brace_token: Brace(fields.paren_token.span), + named: fields.unnamed, + } + } + syn::Fields::Unit => { + errors.error(&fields, "#[hdl] struct must use curly braces: {}"); + FieldsNamed { + brace_token: Brace(semi_token.unwrap_or_default().span), + named: Punctuated::default(), + } + } + }; + let mut field_flips = Vec::with_capacity(fields.named.len()); + for (index, field) in fields.named.iter_mut().enumerate() { + field_flips.push(Self::parse_field(&mut errors, field, index)); + } + let generics = if custom_bounds.is_some() { + MaybeParsed::Unrecognized(generics) + } else if let Some(generics) = errors.ok(ParsedGenerics::parse(&mut generics)) { + MaybeParsed::Parsed(generics) + } else { + MaybeParsed::Unrecognized(generics) + }; + let fields = TypesParser::maybe_run(generics.as_ref(), fields, &mut errors); + errors.finish()?; + Ok(Self { + attrs, + options, + vis, + struct_token, + generics, + fields, + field_flips, + mask_type_ident: format_ident!("__{}__MaskType", ident), + mask_type_match_variant_ident: format_ident!("__{}__MaskType__MatchVariant", ident), + mask_type_sim_value_ident: format_ident!("__{}__MaskType__SimValue", ident), + match_variant_ident: format_ident!("__{}__MatchVariant", ident), + sim_value_ident: format_ident!("__{}__SimValue", ident), + mask_type_builder_ident: format_ident!("__{}__MaskType__Builder", ident), + builder_ident: format_ident!("__{}__Builder", ident), + ident, + }) + } +} + +#[derive(Clone, Debug)] +struct Builder { + vis: Visibility, + struct_token: Token![struct], + ident: Ident, + target: Path, + generics: Generics, + fields: FieldsNamed, +} + +#[derive(Copy, Clone, Eq, PartialEq, Debug)] +enum BuilderFieldState { + Unfilled, + Generic, + Filled, +} + +impl Builder { + fn phantom_field_name(&self) -> Ident { + format_ident!("__phantom", span = self.ident.span()) + } + fn phantom_field(&self) -> Field { + let target = &self.target; + let type_generics = self.generics.split_for_impl().1; + Field { + attrs: vec![], + vis: Visibility::Inherited, + mutability: FieldMutability::None, + ident: Some(self.phantom_field_name()), + colon_token: Some(Token![:](self.ident.span())), + ty: parse_quote_spanned! {self.ident.span()=> + ::fayalite::__std::marker::PhantomData<#target #type_generics> + }, + } + } + fn builder_struct_generics( + &self, + mut get_field_state: impl FnMut(usize) -> BuilderFieldState, + ) -> Generics { + let mut retval = self.generics.clone(); + for param in retval.params.iter_mut() { + match param { + GenericParam::Lifetime(_) => {} + GenericParam::Type(param) => param.default = None, + GenericParam::Const(param) => param.default = None, + } + } + for (field_index, field) in self.fields.named.iter().enumerate() { + match get_field_state(field_index) { + BuilderFieldState::Unfilled | BuilderFieldState::Filled => continue, + BuilderFieldState::Generic => {} + } + if !retval.params.empty_or_trailing() { + retval.params.push_punct(Token![,](self.ident.span())); + } + retval.params.push_value(GenericParam::Type( + type_var_for_field_name(field.ident.as_ref().unwrap()).into(), + )); + } + retval + } + fn builder_struct_ty( + &self, + mut get_field_state: impl FnMut(usize) -> BuilderFieldState, + ) -> Type { + let mut ty_arguments: AngleBracketedGenericArguments = if self.generics.params.is_empty() { + parse_quote_spanned! {self.ident.span()=> + <> + } + } else { + let builder_type_generics = self.generics.split_for_impl().1; + parse_quote! { #builder_type_generics } + }; + for (field_index, Field { ident, ty, .. }) in self.fields.named.iter().enumerate() { + let ident = ident.as_ref().unwrap(); + if !ty_arguments.args.empty_or_trailing() { + ty_arguments.args.push_punct(Token![,](self.ident.span())); + } + ty_arguments + .args + .push_value(match get_field_state(field_index) { + BuilderFieldState::Unfilled => parse_quote_spanned! {self.ident.span()=> + ::fayalite::bundle::Unfilled<#ty> + }, + BuilderFieldState::Generic => { + let type_var = type_var_for_field_name(ident); + parse_quote_spanned! {self.ident.span()=> + #type_var + } + } + BuilderFieldState::Filled => parse_quote_spanned! {self.ident.span()=> + ::fayalite::expr::Expr<#ty> + }, + }); + } + let ident = &self.ident; + parse_quote_spanned! {ident.span()=> + #ident #ty_arguments + } + } +} + +fn type_var_for_field_name(ident: &Ident) -> Ident { + format_ident!("__T_{}", ident) +} + +fn field_fn_for_field_name(ident: &Ident) -> Ident { + format_ident!("field_{}", ident) +} + +impl ToTokens for Builder { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + vis, + struct_token, + ident, + target, + generics: _, + fields, + } = self; + let phantom_field_name = self.phantom_field_name(); + let builder_struct = ItemStruct { + attrs: vec![parse_quote_spanned! {ident.span()=> + #[allow(non_camel_case_types, dead_code)] + }], + vis: vis.clone(), + struct_token: *struct_token, + ident: ident.clone(), + generics: self.builder_struct_generics(|_| BuilderFieldState::Generic), + fields: Fields::Named(FieldsNamed { + brace_token: fields.brace_token, + named: Punctuated::from_iter( + [Pair::Punctuated( + self.phantom_field(), + Token![,](self.ident.span()), + )] + .into_iter() + .chain(fields.named.pairs().map_pair_value_ref(|field| { + let ident = field.ident.as_ref().unwrap(); + let type_var = type_var_for_field_name(ident); + Field { + vis: Visibility::Inherited, + ty: parse_quote_spanned! {ident.span()=> + #type_var + }, + ..field.clone() + } + })), + ), + }), + semi_token: None, + }; + builder_struct.to_tokens(tokens); + let field_idents = Vec::from_iter( + self.fields + .named + .iter() + .map(|field| field.ident.as_ref().unwrap()), + ); + for ( + field_index, + Field { + vis, + ident: field_ident, + ty, + .. + }, + ) in self.fields.named.iter().enumerate() + { + let field_ident = field_ident.as_ref().unwrap(); + let fn_ident = field_fn_for_field_name(field_ident); + let fn_generics = self.builder_struct_generics(|i| { + if i == field_index { + BuilderFieldState::Unfilled + } else { + BuilderFieldState::Generic + } + }); + let (impl_generics, _, where_clause) = fn_generics.split_for_impl(); + let unfilled_ty = self.builder_struct_ty(|i| { + if i == field_index { + BuilderFieldState::Unfilled + } else { + BuilderFieldState::Generic + } + }); + let filled_ty = self.builder_struct_ty(|i| { + if i == field_index { + BuilderFieldState::Filled + } else { + BuilderFieldState::Generic + } + }); + let pat_fields = + Vec::from_iter(self.fields.named.iter().enumerate().map(|(i, field)| { + let field_ident = field.ident.as_ref().unwrap(); + if field_index == i { + quote_spanned! {self.ident.span()=> + #field_ident: _, + } + } else { + quote_spanned! {self.ident.span()=> + #field_ident, + } + } + })); + quote_spanned! {self.ident.span()=> + #[allow(non_camel_case_types, non_snake_case, dead_code)] + impl #impl_generics #unfilled_ty + #where_clause + { + #vis fn #fn_ident( + self, + #field_ident: impl ::fayalite::expr::ToExpr, + ) -> #filled_ty { + let Self { + #phantom_field_name: _, + #(#pat_fields)* + } = self; + let #field_ident = ::fayalite::expr::ToExpr::to_expr(&#field_ident); + #ident { + #phantom_field_name: ::fayalite::__std::marker::PhantomData, + #(#field_idents,)* + } + } + } + } + .to_tokens(tokens); + } + let unfilled_generics = self.builder_struct_generics(|_| BuilderFieldState::Unfilled); + let unfilled_ty = self.builder_struct_ty(|_| BuilderFieldState::Unfilled); + let (unfilled_impl_generics, _, unfilled_where_clause) = unfilled_generics.split_for_impl(); + quote_spanned! {self.ident.span()=> + #[automatically_derived] + #[allow(non_camel_case_types, dead_code)] + impl #unfilled_impl_generics ::fayalite::__std::default::Default for #unfilled_ty + #unfilled_where_clause + { + fn default() -> Self { + #ident { + #phantom_field_name: ::fayalite::__std::marker::PhantomData, + #(#field_idents: ::fayalite::__std::default::Default::default(),)* + } + } + } + } + .to_tokens(tokens); + let filled_generics = self.builder_struct_generics(|_| BuilderFieldState::Filled); + let filled_ty = self.builder_struct_ty(|_| BuilderFieldState::Filled); + let (filled_impl_generics, _, filled_where_clause) = filled_generics.split_for_impl(); + let type_generics = self.generics.split_for_impl().1; + quote_spanned! {self.ident.span()=> + #[automatically_derived] + #[allow(non_camel_case_types, dead_code)] + impl #filled_impl_generics ::fayalite::expr::ToExpr for #filled_ty + #filled_where_clause + { + type Type = #target #type_generics; + fn to_expr( + &self, + ) -> ::fayalite::expr::Expr<::Type> { + let __ty = #target { + #(#field_idents: ::fayalite::expr::Expr::ty(self.#field_idents),)* + }; + let __field_values = [ + #(::fayalite::expr::Expr::canonical(self.#field_idents),)* + ]; + ::fayalite::expr::ToExpr::to_expr( + &::fayalite::expr::ops::BundleLiteral::new( + __ty, + ::fayalite::intern::Intern::intern(&__field_values[..]), + ), + ) + } + } + } + .to_tokens(tokens); + } +} + +impl ToTokens for ParsedBundle { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + attrs, + options, + vis, + struct_token, + ident, + generics, + fields, + field_flips, + mask_type_ident, + mask_type_match_variant_ident, + mask_type_sim_value_ident, + match_variant_ident, + sim_value_ident, + builder_ident, + mask_type_builder_ident, + } = self; + let span = ident.span(); + let ItemOptions { + outline_generated: _, + target, + custom_bounds: _, + no_static, + no_runtime_generics, + cmp_eq, + } = &options.body; + let target = get_target(target, ident); + let mut item_attrs = attrs.clone(); + item_attrs.push(common_derives(span)); + ItemStruct { + attrs: item_attrs, + vis: vis.clone(), + struct_token: *struct_token, + ident: ident.clone(), + generics: generics.into(), + fields: Fields::Named(fields.clone().into()), + semi_token: None, + } + .to_tokens(tokens); + let (impl_generics, type_generics, where_clause) = generics.split_for_impl(); + if let (MaybeParsed::Parsed(generics), MaybeParsed::Parsed(fields), None) = + (generics, fields, no_runtime_generics) + { + generics.make_runtime_generics(tokens, vis, ident, &target, |context| { + let fields: Vec<_> = fields + .named + .iter() + .map(|ParsedField { ident, ty, .. }| { + let ident = ident.as_ref().unwrap(); + let expr = ty.make_hdl_type_expr(context); + quote_spanned! {span=> + #ident: #expr, + } + }) + .collect(); + parse_quote_spanned! {span=> + #target { + #(#fields)* + } + } + }) + } + let mut wrapped_in_const = WrappedInConst::new(tokens, span); + let tokens = wrapped_in_const.inner(); + let builder = Builder { + vis: vis.clone(), + struct_token: *struct_token, + ident: builder_ident.clone(), + target: target.clone(), + generics: generics.into(), + fields: fields.clone().into(), + }; + builder.to_tokens(tokens); + let unfilled_builder_ty = builder.builder_struct_ty(|_| BuilderFieldState::Unfilled); + let filled_builder_ty = builder.builder_struct_ty(|_| BuilderFieldState::Filled); + let mut mask_type_fields = FieldsNamed::from(fields.clone()); + for Field { ty, .. } in &mut mask_type_fields.named { + *ty = parse_quote_spanned! {span=> + <#ty as ::fayalite::ty::Type>::MaskType + }; + } + let mask_type_builder = Builder { + vis: vis.clone(), + struct_token: *struct_token, + ident: mask_type_builder_ident.clone(), + target: mask_type_ident.clone().into(), + generics: generics.into(), + fields: mask_type_fields.clone(), + }; + mask_type_builder.to_tokens(tokens); + let unfilled_mask_type_builder_ty = + mask_type_builder.builder_struct_ty(|_| BuilderFieldState::Unfilled); + let filled_mask_type_builder_ty = + mask_type_builder.builder_struct_ty(|_| BuilderFieldState::Filled); + ItemStruct { + attrs: vec![ + common_derives(span), + parse_quote_spanned! {span=> + #[allow(non_camel_case_types, dead_code)] + }, + ], + vis: vis.clone(), + struct_token: *struct_token, + ident: mask_type_ident.clone(), + generics: generics.into(), + fields: Fields::Named(mask_type_fields.clone()), + semi_token: None, + } + .to_tokens(tokens); + let mut mask_type_match_variant_fields = mask_type_fields.clone(); + for Field { ty, .. } in &mut mask_type_match_variant_fields.named { + *ty = parse_quote_spanned! {span=> + ::fayalite::expr::Expr<#ty> + }; + } + ItemStruct { + attrs: vec![ + common_derives(span), + parse_quote_spanned! {span=> + #[allow(non_camel_case_types, dead_code)] + }, + ], + vis: vis.clone(), + struct_token: *struct_token, + ident: mask_type_match_variant_ident.clone(), + generics: generics.into(), + fields: Fields::Named(mask_type_match_variant_fields), + semi_token: None, + } + .to_tokens(tokens); + let mut match_variant_fields = FieldsNamed::from(fields.clone()); + for Field { ty, .. } in &mut match_variant_fields.named { + *ty = parse_quote_spanned! {span=> + ::fayalite::expr::Expr<#ty> + }; + } + ItemStruct { + attrs: vec![ + common_derives(span), + parse_quote_spanned! {span=> + #[allow(non_camel_case_types, dead_code)] + }, + ], + vis: vis.clone(), + struct_token: *struct_token, + ident: match_variant_ident.clone(), + generics: generics.into(), + fields: Fields::Named(match_variant_fields), + semi_token: None, + } + .to_tokens(tokens); + let mut mask_type_sim_value_fields = mask_type_fields; + for Field { ty, .. } in &mut mask_type_sim_value_fields.named { + *ty = parse_quote_spanned! {span=> + ::fayalite::sim::value::SimValue<#ty> + }; + } + ItemStruct { + attrs: vec![ + parse_quote_spanned! {span=> + #[::fayalite::__std::prelude::v1::derive( + ::fayalite::__std::fmt::Debug, + ::fayalite::__std::clone::Clone, + )] + }, + parse_quote_spanned! {span=> + #[allow(non_camel_case_types, dead_code)] + }, + ], + vis: vis.clone(), + struct_token: *struct_token, + ident: mask_type_sim_value_ident.clone(), + generics: generics.into(), + fields: Fields::Named(mask_type_sim_value_fields), + semi_token: None, + } + .to_tokens(tokens); + let mut sim_value_fields = FieldsNamed::from(fields.clone()); + for Field { ty, .. } in &mut sim_value_fields.named { + *ty = parse_quote_spanned! {span=> + ::fayalite::sim::value::SimValue<#ty> + }; + } + ItemStruct { + attrs: vec![ + parse_quote_spanned! {span=> + #[::fayalite::__std::prelude::v1::derive( + ::fayalite::__std::fmt::Debug, + ::fayalite::__std::clone::Clone, + )] + }, + parse_quote_spanned! {span=> + #[allow(non_camel_case_types, dead_code)] + }, + ], + vis: vis.clone(), + struct_token: *struct_token, + ident: sim_value_ident.clone(), + generics: generics.into(), + fields: Fields::Named(sim_value_fields), + semi_token: None, + } + .to_tokens(tokens); + let this_token = Ident::new("__this", span); + let fields_token = Ident::new("__fields", span); + let self_token = Token![self](span); + let match_variant_body_fields = Vec::from_iter(fields.named().into_iter().map(|field| { + let ident: &Ident = field.ident().as_ref().unwrap(); + let ident_str = ident.to_string(); + quote_spanned! {span=> + #ident: ::fayalite::expr::Expr::field(#this_token, #ident_str), + } + })); + let mask_type_body_fields = Vec::from_iter(fields.named().into_iter().map(|field| { + let ident: &Ident = field.ident().as_ref().unwrap(); + quote_spanned! {span=> + #ident: ::fayalite::ty::Type::mask_type(&#self_token.#ident), + } + })); + let from_canonical_body_fields = + Vec::from_iter(fields.named().into_iter().enumerate().zip(field_flips).map( + |((index, field), flip)| { + let ident: &Ident = field.ident().as_ref().unwrap(); + let ident_str = ident.to_string(); + let not_flipped = flip.is_none().then(|| Token![!](span)); + quote_spanned! {span=> + #ident: { + let ::fayalite::bundle::BundleField { + name: __name, + flipped: __flipped, + ty: __ty, + } = #fields_token[#index]; + ::fayalite::__std::assert_eq!(&*__name, #ident_str); + ::fayalite::__std::assert!(#not_flipped __flipped); + ::fayalite::ty::Type::from_canonical(__ty) + }, + } + }, + )); + let fields_body_fields = Vec::from_iter(fields.named().into_iter().zip(field_flips).map( + |(field, flip)| { + let ident: &Ident = field.ident().as_ref().unwrap(); + let ident_str = ident.to_string(); + let flipped = flip.is_some(); + quote_spanned! {span=> + ::fayalite::bundle::BundleField { + name: ::fayalite::intern::Intern::intern(#ident_str), + flipped: #flipped, + ty: ::fayalite::ty::Type::canonical(&#self_token.#ident), + }, + } + }, + )); + let sim_value_from_opaque_fields = + Vec::from_iter(fields.named().into_iter().map(|field| { + let ident: &Ident = field.ident().as_ref().unwrap(); + quote_spanned! {span=> + #ident: v.field_from_opaque(), + } + })); + let sim_value_clone_from_opaque_fields = + Vec::from_iter(fields.named().into_iter().map(|field| { + let ident: &Ident = field.ident().as_ref().unwrap(); + quote_spanned! {span=> + v.field_clone_from_opaque(&mut value.#ident); + } + })); + let sim_value_to_opaque_fields = Vec::from_iter(fields.named().into_iter().map(|field| { + let ident: &Ident = field.ident().as_ref().unwrap(); + quote_spanned! {span=> + v.field(&value.#ident); + } + })); + let to_sim_value_fields = Vec::from_iter(fields.named().into_iter().map(|field| { + let ident: &Ident = field.ident().as_ref().unwrap(); + quote_spanned! {span=> + #ident: ::fayalite::sim::value::SimValue::ty(&self.#ident), + } + })); + let fields_len = fields.named().into_iter().len(); + quote_spanned! {span=> + #[automatically_derived] + impl #impl_generics ::fayalite::ty::Type for #mask_type_ident #type_generics + #where_clause + { + type BaseType = ::fayalite::bundle::Bundle; + type MaskType = #mask_type_ident #type_generics; + type SimValue = #mask_type_sim_value_ident #type_generics; + type MatchVariant = #mask_type_match_variant_ident #type_generics; + type MatchActiveScope = (); + type MatchVariantAndInactiveScope = ::fayalite::ty::MatchVariantWithoutScope< + ::MatchVariant, + >; + type MatchVariantsIter = ::fayalite::__std::iter::Once< + ::MatchVariantAndInactiveScope, + >; + fn match_variants( + #this_token: ::fayalite::expr::Expr, + __source_location: ::fayalite::source_location::SourceLocation, + ) -> ::MatchVariantsIter { + let __retval = #mask_type_match_variant_ident { + #(#match_variant_body_fields)* + }; + ::fayalite::__std::iter::once(::fayalite::ty::MatchVariantWithoutScope(__retval)) + } + fn mask_type(&#self_token) -> ::MaskType { + *#self_token + } + fn canonical(&#self_token) -> ::fayalite::ty::CanonicalType { + ::fayalite::ty::Type::canonical(&::fayalite::bundle::Bundle::new(::fayalite::bundle::BundleType::fields(#self_token))) + } + #[track_caller] + fn from_canonical(__canonical_type: ::fayalite::ty::CanonicalType) -> Self { + let ::fayalite::ty::CanonicalType::Bundle(__bundle) = __canonical_type else { + ::fayalite::__std::panic!("expected bundle"); + }; + let #fields_token = ::fayalite::bundle::BundleType::fields(&__bundle); + ::fayalite::__std::assert_eq!(#fields_token.len(), #fields_len, "bundle has wrong number of fields"); + Self { + #(#from_canonical_body_fields)* + } + } + fn source_location() -> ::fayalite::source_location::SourceLocation { + ::fayalite::source_location::SourceLocation::caller() + } + fn sim_value_from_opaque( + &self, + opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>, + ) -> ::SimValue { + #![allow(unused_mut, unused_variables)] + let mut v = ::fayalite::bundle::BundleSimValueFromOpaque::new(*self, opaque); + #mask_type_sim_value_ident { + #(#sim_value_from_opaque_fields)* + } + } + fn sim_value_clone_from_opaque( + &self, + value: &mut ::SimValue, + opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>, + ) { + #![allow(unused_mut, unused_variables)] + let mut v = ::fayalite::bundle::BundleSimValueFromOpaque::new(*self, opaque); + #(#sim_value_clone_from_opaque_fields)* + } + fn sim_value_to_opaque<'__w>( + &self, + value: &::SimValue, + writer: ::fayalite::ty::OpaqueSimValueWriter<'__w>, + ) -> ::fayalite::ty::OpaqueSimValueWritten<'__w> { + #![allow(unused_mut, unused_variables)] + let mut v = ::fayalite::bundle::BundleSimValueToOpaque::new(*self, writer); + #(#sim_value_to_opaque_fields)* + v.finish() + } + } + #[automatically_derived] + impl #impl_generics ::fayalite::bundle::BundleType for #mask_type_ident #type_generics + #where_clause + { + type Builder = #unfilled_mask_type_builder_ty; + type FilledBuilder = #filled_mask_type_builder_ty; + fn fields(&#self_token) -> ::fayalite::intern::Interned<[::fayalite::bundle::BundleField]> { + ::fayalite::intern::Intern::intern(&[#(#fields_body_fields)*][..]) + } + } + impl #impl_generics #mask_type_ident #type_generics + #where_clause + { + #vis fn __bundle_builder() -> #unfilled_mask_type_builder_ty { + ::fayalite::__std::default::Default::default() + } + } + #[automatically_derived] + impl #impl_generics ::fayalite::ty::TypeWithDeref for #mask_type_ident #type_generics + #where_clause + { + fn expr_deref(#this_token: &::fayalite::expr::Expr) -> &::MatchVariant { + let #this_token = *#this_token; + let __retval = #mask_type_match_variant_ident { + #(#match_variant_body_fields)* + }; + ::fayalite::intern::Interned::into_inner(::fayalite::intern::Intern::intern_sized(__retval)) + } + } + #[automatically_derived] + impl #impl_generics ::fayalite::sim::value::ToSimValue for #mask_type_sim_value_ident #type_generics + #where_clause + { + type Type = #mask_type_ident #type_generics; + + fn to_sim_value( + &self, + ) -> ::fayalite::sim::value::SimValue< + ::Type, + > { + let ty = #mask_type_ident { + #(#to_sim_value_fields)* + }; + ::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self)) + } + fn into_sim_value( + self, + ) -> ::fayalite::sim::value::SimValue< + ::Type, + > { + let ty = #mask_type_ident { + #(#to_sim_value_fields)* + }; + ::fayalite::sim::value::SimValue::from_value(ty, self) + } + } + #[automatically_derived] + impl #impl_generics ::fayalite::sim::value::ToSimValueWithType<#mask_type_ident #type_generics> + for #mask_type_sim_value_ident #type_generics + #where_clause + { + fn to_sim_value_with_type( + &self, + ty: #mask_type_ident #type_generics, + ) -> ::fayalite::sim::value::SimValue<#mask_type_ident #type_generics> { + ::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self)) + } + fn into_sim_value_with_type( + self, + ty: #mask_type_ident #type_generics, + ) -> ::fayalite::sim::value::SimValue<#mask_type_ident #type_generics> { + ::fayalite::sim::value::SimValue::from_value(ty, self) + } + } + #[automatically_derived] + impl #impl_generics ::fayalite::ty::Type for #target #type_generics + #where_clause + { + type BaseType = ::fayalite::bundle::Bundle; + type MaskType = #mask_type_ident #type_generics; + type SimValue = #sim_value_ident #type_generics; + type MatchVariant = #match_variant_ident #type_generics; + type MatchActiveScope = (); + type MatchVariantAndInactiveScope = ::fayalite::ty::MatchVariantWithoutScope< + ::MatchVariant, + >; + type MatchVariantsIter = ::fayalite::__std::iter::Once< + ::MatchVariantAndInactiveScope, + >; + fn match_variants( + #this_token: ::fayalite::expr::Expr, + __source_location: ::fayalite::source_location::SourceLocation, + ) -> ::MatchVariantsIter { + let __retval = #match_variant_ident { + #(#match_variant_body_fields)* + }; + ::fayalite::__std::iter::once(::fayalite::ty::MatchVariantWithoutScope(__retval)) + } + fn mask_type(&#self_token) -> ::MaskType { + #mask_type_ident { + #(#mask_type_body_fields)* + } + } + fn canonical(&#self_token) -> ::fayalite::ty::CanonicalType { + ::fayalite::ty::Type::canonical(&::fayalite::bundle::Bundle::new(::fayalite::bundle::BundleType::fields(#self_token))) + } + #[track_caller] + fn from_canonical(__canonical_type: ::fayalite::ty::CanonicalType) -> Self { + let ::fayalite::ty::CanonicalType::Bundle(__bundle) = __canonical_type else { + ::fayalite::__std::panic!("expected bundle"); + }; + let #fields_token = ::fayalite::bundle::BundleType::fields(&__bundle); + ::fayalite::__std::assert_eq!(#fields_token.len(), #fields_len, "bundle has wrong number of fields"); + Self { + #(#from_canonical_body_fields)* + } + } + fn source_location() -> ::fayalite::source_location::SourceLocation { + ::fayalite::source_location::SourceLocation::caller() + } + fn sim_value_from_opaque( + &self, + opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>, + ) -> ::SimValue { + #![allow(unused_mut, unused_variables)] + let mut v = ::fayalite::bundle::BundleSimValueFromOpaque::new(*self, opaque); + #sim_value_ident { + #(#sim_value_from_opaque_fields)* + } + } + fn sim_value_clone_from_opaque( + &self, + value: &mut ::SimValue, + opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>, + ) { + #![allow(unused_mut, unused_variables)] + let mut v = ::fayalite::bundle::BundleSimValueFromOpaque::new(*self, opaque); + #(#sim_value_clone_from_opaque_fields)* + } + fn sim_value_to_opaque<'__w>( + &self, + value: &::SimValue, + writer: ::fayalite::ty::OpaqueSimValueWriter<'__w>, + ) -> ::fayalite::ty::OpaqueSimValueWritten<'__w> { + #![allow(unused_mut, unused_variables)] + let mut v = ::fayalite::bundle::BundleSimValueToOpaque::new(*self, writer); + #(#sim_value_to_opaque_fields)* + v.finish() + } + } + #[automatically_derived] + impl #impl_generics ::fayalite::bundle::BundleType for #target #type_generics + #where_clause + { + type Builder = #unfilled_builder_ty; + type FilledBuilder = #filled_builder_ty; + fn fields(&#self_token) -> ::fayalite::intern::Interned<[::fayalite::bundle::BundleField]> { + ::fayalite::intern::Intern::intern(&[#(#fields_body_fields)*][..]) + } + } + impl #impl_generics #target #type_generics + #where_clause + { + #vis fn __bundle_builder() -> #unfilled_builder_ty { + ::fayalite::__std::default::Default::default() + } + } + #[automatically_derived] + impl #impl_generics ::fayalite::ty::TypeWithDeref for #target #type_generics + #where_clause + { + fn expr_deref(#this_token: &::fayalite::expr::Expr) -> &::MatchVariant { + let #this_token = *#this_token; + let __retval = #match_variant_ident { + #(#match_variant_body_fields)* + }; + ::fayalite::intern::Interned::into_inner(::fayalite::intern::Intern::intern_sized(__retval)) + } + } + #[automatically_derived] + impl #impl_generics ::fayalite::sim::value::ToSimValue for #sim_value_ident #type_generics + #where_clause + { + type Type = #target #type_generics; + + fn to_sim_value( + &self, + ) -> ::fayalite::sim::value::SimValue< + ::Type, + > { + let ty = #target { + #(#to_sim_value_fields)* + }; + ::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self)) + } + fn into_sim_value( + self, + ) -> ::fayalite::sim::value::SimValue< + ::Type, + > { + let ty = #target { + #(#to_sim_value_fields)* + }; + ::fayalite::sim::value::SimValue::from_value(ty, self) + } + } + #[automatically_derived] + impl #impl_generics ::fayalite::sim::value::ToSimValueWithType<#target #type_generics> + for #sim_value_ident #type_generics + #where_clause + { + fn to_sim_value_with_type( + &self, + ty: #target #type_generics, + ) -> ::fayalite::sim::value::SimValue<#target #type_generics> { + ::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self)) + } + fn into_sim_value_with_type( + self, + ty: #target #type_generics, + ) -> ::fayalite::sim::value::SimValue<#target #type_generics> { + ::fayalite::sim::value::SimValue::from_value(ty, self) + } + } + } + .to_tokens(tokens); + if let Some((cmp_eq,)) = cmp_eq { + let mut expr_where_clause = + Generics::from(generics) + .where_clause + .unwrap_or_else(|| syn::WhereClause { + where_token: Token![where](span), + predicates: Punctuated::new(), + }); + let mut sim_value_where_clause = expr_where_clause.clone(); + let mut fields_sim_value_eq = vec![]; + let mut fields_cmp_eq = vec![]; + let mut fields_cmp_ne = vec![]; + for field in fields.named() { + let field_ident = field.ident(); + let field_ty = field.ty(); + expr_where_clause + .predicates + .push(parse_quote_spanned! {cmp_eq.span=> + #field_ty: ::fayalite::expr::ops::ExprPartialEq<#field_ty> + }); + sim_value_where_clause + .predicates + .push(parse_quote_spanned! {cmp_eq.span=> + #field_ty: ::fayalite::sim::value::SimValuePartialEq<#field_ty> + }); + fields_sim_value_eq.push(quote_spanned! {span=> + ::fayalite::sim::value::SimValuePartialEq::sim_value_eq(&__lhs.#field_ident, &__rhs.#field_ident) + }); + fields_cmp_eq.push(quote_spanned! {span=> + ::fayalite::expr::ops::ExprPartialEq::cmp_eq(__lhs.#field_ident, __rhs.#field_ident) + }); + fields_cmp_ne.push(quote_spanned! {span=> + ::fayalite::expr::ops::ExprPartialEq::cmp_ne(__lhs.#field_ident, __rhs.#field_ident) + }); + } + let sim_value_eq_body; + let cmp_eq_body; + let cmp_ne_body; + if fields_len == 0 { + sim_value_eq_body = quote_spanned! {span=> + true + }; + cmp_eq_body = quote_spanned! {span=> + ::fayalite::expr::ToExpr::to_expr(&true) + }; + cmp_ne_body = quote_spanned! {span=> + ::fayalite::expr::ToExpr::to_expr(&false) + }; + } else { + sim_value_eq_body = quote_spanned! {span=> + #(#fields_sim_value_eq)&&* + }; + cmp_eq_body = quote_spanned! {span=> + #(#fields_cmp_eq)&* + }; + cmp_ne_body = quote_spanned! {span=> + #(#fields_cmp_ne)|* + }; + }; + quote_spanned! {span=> + #[automatically_derived] + impl #impl_generics ::fayalite::expr::ops::ExprPartialEq for #target #type_generics + #expr_where_clause + { + fn cmp_eq( + __lhs: ::fayalite::expr::Expr, + __rhs: ::fayalite::expr::Expr, + ) -> ::fayalite::expr::Expr<::fayalite::int::Bool> { + #cmp_eq_body + } + fn cmp_ne( + __lhs: ::fayalite::expr::Expr, + __rhs: ::fayalite::expr::Expr, + ) -> ::fayalite::expr::Expr<::fayalite::int::Bool> { + #cmp_ne_body + } + } + #[automatically_derived] + impl #impl_generics ::fayalite::sim::value::SimValuePartialEq for #target #type_generics + #sim_value_where_clause + { + fn sim_value_eq( + __lhs: &::fayalite::sim::value::SimValue, + __rhs: &::fayalite::sim::value::SimValue, + ) -> bool { + #sim_value_eq_body + } + } + } + .to_tokens(tokens); + } + if let (None, MaybeParsed::Parsed(generics)) = (no_static, &self.generics) { + let static_generics = generics.clone().for_static_type(); + let (static_impl_generics, static_type_generics, static_where_clause) = + static_generics.split_for_impl(); + let static_type_body_fields = Vec::from_iter(fields.named().into_iter().map(|field| { + let ident: &Ident = field.ident().as_ref().unwrap(); + let ty = field.ty(); + quote_spanned! {span=> + #ident: <#ty as ::fayalite::ty::StaticType>::TYPE, + } + })); + let static_mask_type_body_fields = + Vec::from_iter(fields.named().into_iter().map(|field| { + let ident: &Ident = field.ident().as_ref().unwrap(); + let ty = field.ty(); + quote_spanned! {span=> + #ident: <#ty as ::fayalite::ty::StaticType>::MASK_TYPE, + } + })); + let type_properties = format_ident!("__type_properties", span = span); + let type_properties_fields = Vec::from_iter(fields.named().into_iter().zip(field_flips).map(|(field, field_flip)| { + let flipped = field_flip.is_some(); + let ty = field.ty(); + quote_spanned! {span=> + let #type_properties = #type_properties.field(#flipped, <#ty as ::fayalite::ty::StaticType>::TYPE_PROPERTIES); + } + })); + let type_properties_mask_fields = Vec::from_iter(fields.named().into_iter().zip(field_flips).map(|(field, field_flip)| { + let flipped = field_flip.is_some(); + let ty = field.ty(); + quote_spanned! {span=> + let #type_properties = #type_properties.field(#flipped, <#ty as ::fayalite::ty::StaticType>::MASK_TYPE_PROPERTIES); + } + })); + quote_spanned! {span=> + #[automatically_derived] + impl #static_impl_generics ::fayalite::__std::default::Default for #mask_type_ident #static_type_generics + #static_where_clause + { + fn default() -> Self { + ::TYPE + } + } + #[automatically_derived] + impl #static_impl_generics ::fayalite::ty::StaticType for #mask_type_ident #static_type_generics + #static_where_clause + { + const TYPE: Self = Self { + #(#static_mask_type_body_fields)* + }; + const MASK_TYPE: ::MaskType = Self { + #(#static_mask_type_body_fields)* + }; + const TYPE_PROPERTIES: ::fayalite::ty::TypeProperties = { + let #type_properties = ::fayalite::bundle::BundleTypePropertiesBuilder::new(); + #(#type_properties_mask_fields)* + #type_properties.finish() + }; + const MASK_TYPE_PROPERTIES: ::fayalite::ty::TypeProperties = { + let #type_properties = ::fayalite::bundle::BundleTypePropertiesBuilder::new(); + #(#type_properties_mask_fields)* + #type_properties.finish() + }; + } + #[automatically_derived] + impl #static_impl_generics ::fayalite::__std::default::Default + for #target #static_type_generics + #static_where_clause + { + fn default() -> Self { + ::TYPE + } + } + #[automatically_derived] + impl #static_impl_generics ::fayalite::ty::StaticType for #target #static_type_generics + #static_where_clause + { + const TYPE: Self = Self { + #(#static_type_body_fields)* + }; + const MASK_TYPE: ::MaskType = #mask_type_ident { + #(#static_mask_type_body_fields)* + }; + const TYPE_PROPERTIES: ::fayalite::ty::TypeProperties = { + let #type_properties = ::fayalite::bundle::BundleTypePropertiesBuilder::new(); + #(#type_properties_fields)* + #type_properties.finish() + }; + const MASK_TYPE_PROPERTIES: ::fayalite::ty::TypeProperties = { + let #type_properties = ::fayalite::bundle::BundleTypePropertiesBuilder::new(); + #(#type_properties_mask_fields)* + #type_properties.finish() + }; + } + } + .to_tokens(tokens); + } + } +} + +pub(crate) fn hdl_bundle(item: ItemStruct) -> syn::Result { + let item = ParsedBundle::parse(item)?; + let outline_generated = item.options.body.outline_generated; + let mut contents = item.to_token_stream(); + if outline_generated.is_some() { + contents = crate::outline_generated(contents, "hdl-bundle-"); + } + Ok(contents) +} diff --git a/crates/fayalite-proc-macros-impl/src/hdl_enum.rs b/crates/fayalite-proc-macros-impl/src/hdl_enum.rs new file mode 100644 index 0000000..47a5df1 --- /dev/null +++ b/crates/fayalite-proc-macros-impl/src/hdl_enum.rs @@ -0,0 +1,1063 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information +use crate::{ + Errors, HdlAttr, PairsIterExt, + hdl_type_common::{ + ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics, ParsedType, SplitForImpl, + TypesParser, WrappedInConst, common_derives, get_target, + }, + kw, +}; +use proc_macro2::TokenStream; +use quote::{ToTokens, format_ident, quote_spanned}; +use syn::{ + Attribute, Field, FieldMutability, Fields, FieldsNamed, FieldsUnnamed, Generics, Ident, + ItemEnum, ItemStruct, Token, Type, Variant, Visibility, parse_quote_spanned, + punctuated::{Pair, Punctuated}, + token::{Brace, Paren}, +}; + +crate::options! { + #[options = VariantOptions] + pub(crate) enum VariantOption {} +} + +crate::options! { + #[options = FieldOptions] + pub(crate) enum FieldOption {} +} + +#[derive(Clone, Debug)] +pub(crate) struct ParsedVariantField { + pub(crate) paren_token: Paren, + pub(crate) attrs: Vec, + pub(crate) options: HdlAttr, + pub(crate) ty: MaybeParsed, + pub(crate) comma_token: Option, +} + +#[derive(Clone, Debug)] +pub(crate) struct ParsedVariant { + pub(crate) attrs: Vec, + pub(crate) options: HdlAttr, + pub(crate) ident: Ident, + pub(crate) field: Option, +} + +impl ParsedVariant { + fn parse( + errors: &mut Errors, + variant: Variant, + generics: &MaybeParsed, + ) -> Self { + let Variant { + mut attrs, + ident, + fields, + discriminant, + } = variant; + let options = errors + .unwrap_or_default(HdlAttr::parse_and_take_attr(&mut attrs)) + .unwrap_or_default(); + let field = match fields { + Fields::Unnamed(FieldsUnnamed { + paren_token, + unnamed, + }) if unnamed.len() == 1 => { + let (field, comma_token) = unnamed.into_pairs().next().unwrap().into_tuple(); + let Field { + mut attrs, + vis, + mutability, + ident: _, + colon_token: _, + ty, + } = field; + let options = errors + .unwrap_or_default(HdlAttr::parse_and_take_attr(&mut attrs)) + .unwrap_or_default(); + if !matches!(vis, Visibility::Inherited) { + errors.error( + &vis, + "enum variant fields must not have a visibility specifier", + ); + } + if !matches!(mutability, FieldMutability::None) { + // FIXME: use mutability as the spanned tokens, + // blocked on https://github.com/dtolnay/syn/issues/1717 + errors.error(&ty, "field mutability is not supported"); + } + Some(ParsedVariantField { + paren_token, + attrs, + options, + ty: TypesParser::maybe_run(generics.as_ref(), ty, errors), + comma_token, + }) + } + Fields::Unit => None, + Fields::Unnamed(fields) if fields.unnamed.is_empty() => None, + Fields::Named(fields) if fields.named.is_empty() => None, + Fields::Unnamed(_) | Fields::Named(_) => { + errors.error( + fields, + "enum variant must either have no fields or a single parenthesized field", + ); + None + } + }; + if let Some((eq, _)) = discriminant { + errors.error(eq, "custom enum discriminants are not allowed"); + } + Self { + attrs, + options, + ident, + field, + } + } +} + +#[derive(Clone, Debug)] +pub(crate) struct ParsedEnum { + pub(crate) attrs: Vec, + pub(crate) options: HdlAttr, + pub(crate) vis: Visibility, + pub(crate) enum_token: Token![enum], + pub(crate) ident: Ident, + pub(crate) generics: MaybeParsed, + pub(crate) brace_token: Brace, + pub(crate) variants: Punctuated, + pub(crate) match_variant_ident: Ident, + pub(crate) sim_value_ident: Ident, + pub(crate) sim_builder_ident: Ident, + pub(crate) sim_builder_ty_field_ident: Ident, +} + +impl ParsedEnum { + fn parse(item: ItemEnum) -> syn::Result { + let ItemEnum { + mut attrs, + vis, + enum_token, + ident, + mut generics, + brace_token, + variants, + } = item; + let mut errors = Errors::new(); + let mut options = errors + .unwrap_or_default(HdlAttr::::parse_and_take_attr( + &mut attrs, + )) + .unwrap_or_default(); + errors.ok(options.body.validate()); + let ItemOptions { + outline_generated: _, + target: _, + custom_bounds, + no_static: _, + no_runtime_generics: _, + cmp_eq, + } = options.body; + if let Some((cmp_eq,)) = cmp_eq { + errors.error(cmp_eq, "#[hdl(cmp_eq)] is not yet implemented for enums"); + } + attrs.retain(|attr| { + if attr.path().is_ident("repr") { + errors.error(attr, "#[repr] is not supported on #[hdl] enums"); + false + } else { + true + } + }); + let generics = if custom_bounds.is_some() { + MaybeParsed::Unrecognized(generics) + } else if let Some(generics) = errors.ok(ParsedGenerics::parse(&mut generics)) { + MaybeParsed::Parsed(generics) + } else { + MaybeParsed::Unrecognized(generics) + }; + let variants = Punctuated::from_iter( + variants + .into_pairs() + .map_pair_value(|v| ParsedVariant::parse(&mut errors, v, &generics)), + ); + errors.finish()?; + Ok(Self { + attrs, + options, + vis, + enum_token, + generics, + brace_token, + variants, + match_variant_ident: format_ident!("__{}__MatchVariant", ident), + sim_value_ident: format_ident!("__{}__SimValue", ident), + sim_builder_ident: format_ident!("__{}__SimBuilder", ident), + sim_builder_ty_field_ident: format_ident!("__ty", span = ident.span()), + ident, + }) + } +} + +impl ToTokens for ParsedEnum { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + attrs, + options, + vis, + enum_token, + ident, + generics, + brace_token, + variants, + match_variant_ident, + sim_value_ident, + sim_builder_ident, + sim_builder_ty_field_ident, + } = self; + let span = ident.span(); + let ItemOptions { + outline_generated: _, + target, + custom_bounds: _, + no_static, + no_runtime_generics, + cmp_eq: _, // TODO: implement cmp_eq for enums + } = &options.body; + let target = get_target(target, ident); + let mut struct_attrs = attrs.clone(); + struct_attrs.push(common_derives(span)); + struct_attrs.push(parse_quote_spanned! {span=> + #[allow(non_snake_case)] + }); + let struct_fields = Punctuated::from_iter(variants.pairs().map_pair_value_ref( + |ParsedVariant { + attrs: _, + options, + ident, + field, + }| { + let VariantOptions {} = options.body; + let colon_token; + let ty = if let Some(ParsedVariantField { + paren_token, + attrs: _, + options, + ty, + comma_token: _, + }) = field + { + let FieldOptions {} = options.body; + colon_token = Token![:](paren_token.span.open()); + ty.clone().into() + } else { + colon_token = Token![:](span); + parse_quote_spanned! {span=> + () + } + }; + Field { + attrs: vec![], + vis: vis.clone(), + mutability: FieldMutability::None, + ident: Some(ident.clone()), + colon_token: Some(colon_token), + ty, + } + }, + )); + ItemStruct { + attrs: struct_attrs, + vis: vis.clone(), + struct_token: Token![struct](enum_token.span), + ident: ident.clone(), + generics: generics.into(), + fields: if struct_fields.is_empty() { + Fields::Unit + } else { + Fields::Named(FieldsNamed { + brace_token: *brace_token, + named: struct_fields, + }) + }, + semi_token: None, + } + .to_tokens(tokens); + let (impl_generics, type_generics, where_clause) = generics.split_for_impl(); + if let (MaybeParsed::Parsed(generics), None) = (generics, no_runtime_generics) { + generics.make_runtime_generics(tokens, vis, ident, &target, |context| { + let fields: Vec<_> = variants + .iter() + .map(|ParsedVariant { ident, field, .. }| { + if let Some(ParsedVariantField { + ty: MaybeParsed::Parsed(ty), + .. + }) = field + { + let expr = ty.make_hdl_type_expr(context); + quote_spanned! {span=> + #ident: #expr, + } + } else { + quote_spanned! {span=> + #ident: (), + } + } + }) + .collect(); + parse_quote_spanned! {span=> + #target { + #(#fields)* + } + } + }) + } + let mut wrapped_in_const = WrappedInConst::new(tokens, span); + let tokens = wrapped_in_const.inner(); + { + let mut wrapped_in_const = WrappedInConst::new(tokens, span); + let tokens = wrapped_in_const.inner(); + let mut enum_attrs = attrs.clone(); + enum_attrs.push(parse_quote_spanned! {span=> + #[allow(dead_code)] + }); + ItemEnum { + attrs: enum_attrs, + vis: vis.clone(), + enum_token: *enum_token, + ident: ident.clone(), + generics: generics.into(), + brace_token: *brace_token, + variants: Punctuated::from_iter(variants.pairs().map_pair_value_ref( + |ParsedVariant { + attrs, + options: _, + ident, + field, + }| Variant { + attrs: attrs.clone(), + ident: ident.clone(), + fields: match field { + Some(ParsedVariantField { + paren_token, + attrs, + options: _, + ty, + comma_token, + }) => Fields::Unnamed(FieldsUnnamed { + paren_token: *paren_token, + unnamed: Punctuated::from_iter([Pair::new( + Field { + attrs: attrs.clone(), + vis: Visibility::Inherited, + mutability: FieldMutability::None, + ident: None, + colon_token: None, + ty: ty.clone().into(), + }, + *comma_token, + )]), + }), + None => Fields::Unit, + }, + discriminant: None, + }, + )), + } + .to_tokens(tokens); + } + let mut enum_attrs = attrs.clone(); + enum_attrs.push(parse_quote_spanned! {span=> + #[allow(dead_code, non_camel_case_types)] + }); + ItemEnum { + attrs: enum_attrs, + vis: vis.clone(), + enum_token: *enum_token, + ident: match_variant_ident.clone(), + generics: generics.into(), + brace_token: *brace_token, + variants: Punctuated::from_iter(variants.pairs().map_pair_value_ref( + |ParsedVariant { + attrs, + options: _, + ident, + field, + }| Variant { + attrs: attrs.clone(), + ident: ident.clone(), + fields: match field { + Some(ParsedVariantField { + paren_token, + attrs, + options: _, + ty, + comma_token, + }) => Fields::Unnamed(FieldsUnnamed { + paren_token: *paren_token, + unnamed: Punctuated::from_iter([Pair::new( + Field { + attrs: attrs.clone(), + vis: Visibility::Inherited, + mutability: FieldMutability::None, + ident: None, + colon_token: None, + ty: parse_quote_spanned! {span=> + ::fayalite::expr::Expr<#ty> + }, + }, + *comma_token, + )]), + }), + None => Fields::Unit, + }, + discriminant: None, + }, + )), + } + .to_tokens(tokens); + let mut struct_attrs = attrs.clone(); + struct_attrs.push(parse_quote_spanned! {span=> + #[allow(dead_code, non_camel_case_types)] + }); + ItemStruct { + attrs: struct_attrs, + vis: vis.clone(), + struct_token: Token![struct](enum_token.span), + ident: sim_builder_ident.clone(), + generics: generics.into(), + fields: FieldsNamed { + brace_token: *brace_token, + named: Punctuated::from_iter([Field { + attrs: vec![], + vis: Visibility::Inherited, + mutability: FieldMutability::None, + ident: Some(sim_builder_ty_field_ident.clone()), + colon_token: Some(Token![:](span)), + ty: parse_quote_spanned! {span=> + #target #type_generics + }, + }]), + } + .into(), + semi_token: None, + } + .to_tokens(tokens); + let mut enum_attrs = attrs.clone(); + enum_attrs.push(parse_quote_spanned! {span=> + #[::fayalite::__std::prelude::v1::derive( + ::fayalite::__std::fmt::Debug, + ::fayalite::__std::clone::Clone, + )] + }); + enum_attrs.push(parse_quote_spanned! {span=> + #[allow(dead_code, non_camel_case_types)] + }); + let sim_value_has_unknown_variant = !variants.len().is_power_of_two(); + let sim_value_unknown_variant_name = sim_value_has_unknown_variant.then(|| { + let mut name = String::new(); + let unknown = "Unknown"; + loop { + let orig_len = name.len(); + name.push_str(unknown); + if variants.iter().all(|v| v.ident != name) { + break Ident::new(&name, span); + } + name.truncate(orig_len); + name.push('_'); + } + }); + let sim_value_unknown_variant = + sim_value_unknown_variant_name + .as_ref() + .map(|unknown_variant_name| { + Pair::End(parse_quote_spanned! {span=> + #unknown_variant_name(::fayalite::enum_::UnknownVariantSimValue) + }) + }); + ItemEnum { + attrs: enum_attrs, + vis: vis.clone(), + enum_token: *enum_token, + ident: sim_value_ident.clone(), + generics: generics.into(), + brace_token: *brace_token, + variants: Punctuated::from_iter( + variants + .pairs() + .map_pair_value_ref( + |ParsedVariant { + attrs, + options: _, + ident, + field, + }| Variant { + attrs: attrs.clone(), + ident: ident.clone(), + fields: match field { + Some(ParsedVariantField { + paren_token, + attrs, + options: _, + ty, + comma_token, + }) => Fields::Unnamed(FieldsUnnamed { + paren_token: *paren_token, + unnamed: Punctuated::from_iter([ + Pair::new( + Field { + attrs: attrs.clone(), + vis: Visibility::Inherited, + mutability: FieldMutability::None, + ident: None, + colon_token: None, + ty: parse_quote_spanned! {span=> + ::fayalite::sim::value::SimValue<#ty> + }, + }, + Some(comma_token.unwrap_or(Token![,](ident.span()))), + ), + Pair::new( + Field { + attrs: vec![], + vis: Visibility::Inherited, + mutability: FieldMutability::None, + ident: None, + colon_token: None, + ty: parse_quote_spanned! {span=> + ::fayalite::enum_::EnumPaddingSimValue + }, + }, + None, + ), + ]), + }), + None => Fields::Unnamed(parse_quote_spanned! {span=> + (::fayalite::enum_::EnumPaddingSimValue) + }), + }, + discriminant: None, + }, + ) + .chain(sim_value_unknown_variant), + ), + } + .to_tokens(tokens); + let self_token = Token![self](span); + for (index, ParsedVariant { ident, field, .. }) in variants.iter().enumerate() { + if let Some(ParsedVariantField { ty, .. }) = field { + quote_spanned! {span=> + impl #impl_generics #target #type_generics + #where_clause + { + #[allow(non_snake_case, dead_code)] + #vis fn #ident<__V: ::fayalite::expr::ToExpr>( + #self_token, + v: __V, + ) -> ::fayalite::expr::Expr { + ::fayalite::expr::ToExpr::to_expr( + &::fayalite::expr::ops::EnumLiteral::new_by_index( + #self_token, + #index, + ::fayalite::__std::option::Option::Some( + ::fayalite::expr::Expr::canonical( + ::fayalite::expr::ToExpr::to_expr(&v), + ), + ), + ), + ) + } + } + impl #impl_generics #sim_builder_ident #type_generics + #where_clause + { + #[allow(non_snake_case, dead_code)] + #vis fn #ident<__V: ::fayalite::sim::value::ToSimValueWithType<#ty>>( + #self_token, + v: __V, + ) -> ::fayalite::sim::value::SimValue<#target #type_generics> { + let v = ::fayalite::sim::value::ToSimValueWithType::into_sim_value_with_type( + v, + #self_token.#sim_builder_ty_field_ident.#ident, + ); + ::fayalite::sim::value::SimValue::from_value( + #self_token.#sim_builder_ty_field_ident, + #sim_value_ident::#ident(v, ::fayalite::enum_::EnumPaddingSimValue::new()), + ) + } + } + } + } else { + quote_spanned! {span=> + impl #impl_generics #target #type_generics + #where_clause + { + #[allow(non_snake_case, dead_code)] + #vis fn #ident(#self_token) -> ::fayalite::expr::Expr { + ::fayalite::expr::ToExpr::to_expr( + &::fayalite::expr::ops::EnumLiteral::new_by_index( + #self_token, + #index, + ::fayalite::__std::option::Option::None, + ), + ) + } + } + impl #impl_generics #sim_builder_ident #type_generics + #where_clause + { + #[allow(non_snake_case, dead_code)] + #vis fn #ident(#self_token) -> ::fayalite::sim::value::SimValue<#target #type_generics> { + ::fayalite::sim::value::SimValue::from_value( + #self_token.#sim_builder_ty_field_ident, + #sim_value_ident::#ident(::fayalite::enum_::EnumPaddingSimValue::new()), + ) + } + } + } + } + .to_tokens(tokens); + } + let variants_token = Ident::new("variants", span); + let from_canonical_body_fields = Vec::from_iter(variants.iter().enumerate().map( + |(index, ParsedVariant { ident, field, .. })| { + let ident_str = ident.to_string(); + let val = if field.is_some() { + let missing_value_msg = format!("expected variant {ident} to have a field"); + quote_spanned! {span=> + ::fayalite::ty::Type::from_canonical(ty.expect(#missing_value_msg)) + } + } else { + quote_spanned! {span=> + ::fayalite::__std::assert!(ty.is_none()); + } + }; + quote_spanned! {span=> + #ident: { + let ::fayalite::enum_::EnumVariant { + name, + ty, + } = #variants_token[#index]; + ::fayalite::__std::assert_eq!(&*name, #ident_str); + #val + }, + } + }, + )); + let variant_access_token = Ident::new("variant_access", span); + let match_active_scope_match_arms = Vec::from_iter(variants.iter().enumerate().map( + |(index, ParsedVariant { ident, field, .. })| { + if field.is_some() { + quote_spanned! {span=> + #index => #match_variant_ident::#ident( + ::fayalite::expr::ToExpr::to_expr( + &::fayalite::expr::ops::VariantAccess::new_by_index( + #variant_access_token.base(), + #variant_access_token.variant_index(), + ), + ), + ), + } + } else { + quote_spanned! {span=> + #index => #match_variant_ident::#ident, + } + } + }, + )); + let variants_body_variants = Vec::from_iter(variants.iter().map( + |ParsedVariant { + attrs: _, + options, + ident, + field, + }| { + let VariantOptions {} = options.body; + let ident_str = ident.to_string(); + match field { + Some(ParsedVariantField { options, .. }) => { + let FieldOptions {} = options.body; + quote_spanned! {span=> + ::fayalite::enum_::EnumVariant { + name: ::fayalite::intern::Intern::intern(#ident_str), + ty: ::fayalite::__std::option::Option::Some( + ::fayalite::ty::Type::canonical(&#self_token.#ident), + ), + }, + } + } + None => quote_spanned! {span=> + ::fayalite::enum_::EnumVariant { + name: ::fayalite::intern::Intern::intern(#ident_str), + ty: ::fayalite::__std::option::Option::None, + }, + }, + } + }, + )); + let sim_value_from_opaque_unknown_match_arm = if let Some(sim_value_unknown_variant_name) = + &sim_value_unknown_variant_name + { + quote_spanned! {span=> + _ => #sim_value_ident::#sim_value_unknown_variant_name(v.unknown_variant_from_opaque()), + } + } else { + quote_spanned! {span=> + _ => ::fayalite::__std::unreachable!(), + } + }; + let sim_value_from_opaque_match_arms = Vec::from_iter( + variants + .iter() + .enumerate() + .map( + |( + index, + ParsedVariant { + attrs: _, + options: _, + ident, + field, + }, + )| { + if let Some(_) = field { + quote_spanned! {span=> + #index => { + let (field, padding) = v.variant_with_field_from_opaque(); + #sim_value_ident::#ident(field, padding) + } + } + } else { + quote_spanned! {span=> + #index => #sim_value_ident::#ident( + v.variant_no_field_from_opaque(), + ), + } + } + }, + ) + .chain([sim_value_from_opaque_unknown_match_arm]), + ); + let sim_value_clone_from_opaque_unknown_match_arm = + if let Some(sim_value_unknown_variant_name) = &sim_value_unknown_variant_name { + quote_spanned! {span=> + _ => if let #sim_value_ident::#sim_value_unknown_variant_name(value) = value { + v.unknown_variant_clone_from_opaque(value); + } else { + *value = #sim_value_ident::#sim_value_unknown_variant_name( + v.unknown_variant_from_opaque(), + ); + }, + } + } else { + quote_spanned! {span=> + _ => ::fayalite::__std::unreachable!(), + } + }; + let sim_value_clone_from_opaque_match_arms = Vec::from_iter( + variants + .iter() + .enumerate() + .map( + |( + index, + ParsedVariant { + attrs: _, + options: _, + ident, + field, + }, + )| { + if let Some(_) = field { + quote_spanned! {span=> + #index => if let #sim_value_ident::#ident(field, padding) = value { + v.variant_with_field_clone_from_opaque(field, padding); + } else { + let (field, padding) = v.variant_with_field_from_opaque(); + *value = #sim_value_ident::#ident(field, padding); + }, + } + } else { + quote_spanned! {span=> + #index => if let #sim_value_ident::#ident(padding) = value { + v.variant_no_field_clone_from_opaque(padding); + } else { + *value = #sim_value_ident::#ident( + v.variant_no_field_from_opaque(), + ); + }, + } + } + }, + ) + .chain([sim_value_clone_from_opaque_unknown_match_arm]), + ); + let sim_value_to_opaque_match_arms = Vec::from_iter( + variants + .iter() + .enumerate() + .map( + |( + index, + ParsedVariant { + attrs: _, + options: _, + ident, + field, + }, + )| { + if let Some(_) = field { + quote_spanned! {span=> + #sim_value_ident::#ident(field, padding) => { + v.variant_with_field_to_opaque(#index, field, padding) + } + } + } else { + quote_spanned! {span=> + #sim_value_ident::#ident(padding) => { + v.variant_no_field_to_opaque(#index, padding) + } + } + } + }, + ) + .chain(sim_value_unknown_variant_name.as_ref().map( + |sim_value_unknown_variant_name| { + quote_spanned! {span=> + #sim_value_ident::#sim_value_unknown_variant_name(value) => { + v.unknown_variant_to_opaque(value) + } + } + }, + )), + ); + let variants_len = variants.len(); + quote_spanned! {span=> + #[automatically_derived] + impl #impl_generics ::fayalite::ty::Type for #target #type_generics + #where_clause + { + type BaseType = ::fayalite::enum_::Enum; + type MaskType = ::fayalite::int::Bool; + type SimValue = #sim_value_ident #type_generics; + type MatchVariant = #match_variant_ident #type_generics; + type MatchActiveScope = ::fayalite::module::Scope; + type MatchVariantAndInactiveScope = ::fayalite::enum_::EnumMatchVariantAndInactiveScope; + type MatchVariantsIter = ::fayalite::enum_::EnumMatchVariantsIter; + + fn match_variants( + this: ::fayalite::expr::Expr, + source_location: ::fayalite::source_location::SourceLocation, + ) -> ::MatchVariantsIter { + ::fayalite::module::enum_match_variants_helper(this, source_location) + } + fn mask_type(&#self_token) -> ::MaskType { + ::fayalite::int::Bool + } + fn canonical(&#self_token) -> ::fayalite::ty::CanonicalType { + ::fayalite::ty::CanonicalType::Enum(::fayalite::enum_::Enum::new(::fayalite::enum_::EnumType::variants(#self_token))) + } + #[track_caller] + #[allow(non_snake_case)] + fn from_canonical(canonical_type: ::fayalite::ty::CanonicalType) -> Self { + let ::fayalite::ty::CanonicalType::Enum(enum_) = canonical_type else { + ::fayalite::__std::panic!("expected enum"); + }; + let #variants_token = ::fayalite::enum_::EnumType::variants(&enum_); + ::fayalite::__std::assert_eq!(#variants_token.len(), #variants_len, "enum has wrong number of variants"); + Self { + #(#from_canonical_body_fields)* + } + } + fn source_location() -> ::fayalite::source_location::SourceLocation { + ::fayalite::source_location::SourceLocation::caller() + } + fn sim_value_from_opaque( + &self, + opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>, + ) -> ::SimValue { + let v = ::fayalite::enum_::EnumSimValueFromOpaque::new(*self, opaque); + match v.discriminant() { + #(#sim_value_from_opaque_match_arms)* + } + } + fn sim_value_clone_from_opaque( + &self, + value: &mut ::SimValue, + opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>, + ) { + let v = ::fayalite::enum_::EnumSimValueFromOpaque::new(*self, opaque); + match v.discriminant() { + #(#sim_value_clone_from_opaque_match_arms)* + } + } + fn sim_value_to_opaque<'__w>( + &self, + value: &::SimValue, + writer: ::fayalite::ty::OpaqueSimValueWriter<'__w>, + ) -> ::fayalite::ty::OpaqueSimValueWritten<'__w> { + let v = ::fayalite::enum_::EnumSimValueToOpaque::new(*self, writer); + match value { + #(#sim_value_to_opaque_match_arms)* + } + } + } + #[automatically_derived] + impl #impl_generics ::fayalite::enum_::EnumType for #target #type_generics + #where_clause + { + type SimBuilder = #sim_builder_ident #type_generics; + fn match_activate_scope( + v: ::MatchVariantAndInactiveScope, + ) -> (::MatchVariant, ::MatchActiveScope) { + let (#variant_access_token, scope) = v.activate(); + ( + match #variant_access_token.variant_index() { + #(#match_active_scope_match_arms)* + #variants_len.. => ::fayalite::__std::panic!("invalid variant index"), + }, + scope, + ) + } + fn variants(&#self_token) -> ::fayalite::intern::Interned<[::fayalite::enum_::EnumVariant]> { + ::fayalite::intern::Intern::intern(&[ + #(#variants_body_variants)* + ][..]) + } + } + #[automatically_derived] + impl #impl_generics ::fayalite::sim::value::ToSimValueWithType<#target #type_generics> + for #sim_value_ident #type_generics + #where_clause + { + fn to_sim_value_with_type( + &self, + ty: #target #type_generics, + ) -> ::fayalite::sim::value::SimValue<#target #type_generics> { + ::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self)) + } + fn into_sim_value_with_type( + self, + ty: #target #type_generics, + ) -> ::fayalite::sim::value::SimValue<#target #type_generics> { + ::fayalite::sim::value::SimValue::from_value(ty, self) + } + } + #[automatically_derived] + impl #impl_generics ::fayalite::__std::convert::From<#target #type_generics> + for #sim_builder_ident #type_generics + #where_clause + { + fn from(#sim_builder_ty_field_ident: #target #type_generics) -> Self { + Self { #sim_builder_ty_field_ident } + } + } + } + .to_tokens(tokens); + if let (None, MaybeParsed::Parsed(generics)) = (no_static, &self.generics) { + let static_generics = generics.clone().for_static_type(); + let (static_impl_generics, static_type_generics, static_where_clause) = + static_generics.split_for_impl(); + let static_type_body_variants = + Vec::from_iter(variants.iter().map(|ParsedVariant { ident, field, .. }| { + if field.is_some() { + quote_spanned! {span=> + #ident: ::fayalite::ty::StaticType::TYPE, + } + } else { + quote_spanned! {span=> + #ident: (), + } + } + })); + let type_properties = format_ident!("__type_properties", span = span); + let type_properties_variants = + Vec::from_iter(variants.iter().map(|ParsedVariant { field, .. }| { + let variant = if let Some(ParsedVariantField { ty, .. }) = field { + quote_spanned! {span=> + ::fayalite::__std::option::Option::Some( + <#ty as ::fayalite::ty::StaticType>::TYPE_PROPERTIES, + ) + } + } else { + quote_spanned! {span=> + ::fayalite::__std::option::Option::None + } + }; + quote_spanned! {span=> + let #type_properties = #type_properties.variant(#variant); + } + })); + quote_spanned! {span=> + #[automatically_derived] + impl #static_impl_generics ::fayalite::__std::default::Default + for #target #static_type_generics + #static_where_clause + { + fn default() -> Self { + ::TYPE + } + } + #[automatically_derived] + impl #static_impl_generics ::fayalite::ty::StaticType + for #target #static_type_generics + #static_where_clause + { + const TYPE: Self = Self { + #(#static_type_body_variants)* + }; + const MASK_TYPE: ::MaskType = + ::fayalite::int::Bool; + const TYPE_PROPERTIES: ::fayalite::ty::TypeProperties = { + let #type_properties = ::fayalite::enum_::EnumTypePropertiesBuilder::new(); + #(#type_properties_variants)* + #type_properties.finish() + }; + const MASK_TYPE_PROPERTIES: ::fayalite::ty::TypeProperties = + <::fayalite::int::Bool as ::fayalite::ty::StaticType>::TYPE_PROPERTIES; + } + #[automatically_derived] + impl #static_impl_generics ::fayalite::sim::value::ToSimValue + for #sim_value_ident #static_type_generics + #static_where_clause + { + type Type = #target #static_type_generics; + + fn to_sim_value( + &self, + ) -> ::fayalite::sim::value::SimValue< + ::Type, + > { + ::fayalite::sim::value::SimValue::from_value( + ::fayalite::ty::StaticType::TYPE, + ::fayalite::__std::clone::Clone::clone(self), + ) + } + fn into_sim_value( + self, + ) -> ::fayalite::sim::value::SimValue< + ::Type, + > { + ::fayalite::sim::value::SimValue::from_value( + ::fayalite::ty::StaticType::TYPE, + self, + ) + } + } + } + .to_tokens(tokens); + } + } +} + +pub(crate) fn hdl_enum(item: ItemEnum) -> syn::Result { + let item = ParsedEnum::parse(item)?; + let outline_generated = item.options.body.outline_generated; + let mut contents = item.to_token_stream(); + if outline_generated.is_some() { + contents = crate::outline_generated(contents, "hdl-enum-"); + } + Ok(contents) +} diff --git a/crates/fayalite-proc-macros-impl/src/hdl_type_alias.rs b/crates/fayalite-proc-macros-impl/src/hdl_type_alias.rs new file mode 100644 index 0000000..d4a035b --- /dev/null +++ b/crates/fayalite-proc-macros-impl/src/hdl_type_alias.rs @@ -0,0 +1,139 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information +use crate::{ + Errors, HdlAttr, + hdl_type_common::{ + ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics, ParsedType, TypesParser, + get_target, + }, + kw, +}; +use proc_macro2::TokenStream; +use quote::ToTokens; +use syn::{Attribute, Generics, Ident, ItemType, Token, Type, Visibility, parse_quote_spanned}; + +#[derive(Clone, Debug)] +pub(crate) struct ParsedTypeAlias { + pub(crate) attrs: Vec, + pub(crate) options: HdlAttr, + pub(crate) vis: Visibility, + pub(crate) type_token: Token![type], + pub(crate) ident: Ident, + pub(crate) generics: MaybeParsed, + pub(crate) eq_token: Token![=], + pub(crate) ty: MaybeParsed, + pub(crate) semi_token: Token![;], +} + +impl ParsedTypeAlias { + fn parse(item: ItemType) -> syn::Result { + let ItemType { + mut attrs, + vis, + type_token, + ident, + mut generics, + eq_token, + ty, + semi_token, + } = item; + let mut errors = Errors::new(); + let mut options = errors + .unwrap_or_default(HdlAttr::::parse_and_take_attr( + &mut attrs, + )) + .unwrap_or_default(); + errors.ok(options.body.validate()); + let ItemOptions { + outline_generated: _, + target: _, + custom_bounds, + no_static, + no_runtime_generics: _, + cmp_eq, + } = options.body; + if let Some((no_static,)) = no_static { + errors.error(no_static, "no_static is not valid on type aliases"); + } + if let Some((cmp_eq,)) = cmp_eq { + errors.error(cmp_eq, "cmp_eq is not valid on type aliases"); + } + let generics = if custom_bounds.is_some() { + MaybeParsed::Unrecognized(generics) + } else if let Some(generics) = errors.ok(ParsedGenerics::parse(&mut generics)) { + MaybeParsed::Parsed(generics) + } else { + MaybeParsed::Unrecognized(generics) + }; + let ty = TypesParser::maybe_run(generics.as_ref(), *ty, &mut errors); + errors.finish()?; + Ok(Self { + attrs, + options, + vis, + type_token, + ident, + generics, + eq_token, + ty, + semi_token, + }) + } +} + +impl ToTokens for ParsedTypeAlias { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + attrs, + options, + vis, + type_token, + ident, + generics, + eq_token, + ty, + semi_token, + } = self; + let ItemOptions { + outline_generated: _, + target, + custom_bounds: _, + no_static: _, + no_runtime_generics, + cmp_eq: _, + } = &options.body; + let target = get_target(target, ident); + let mut type_attrs = attrs.clone(); + type_attrs.push(parse_quote_spanned! {ident.span()=> + #[allow(type_alias_bounds)] + }); + ItemType { + attrs: type_attrs, + vis: vis.clone(), + type_token: *type_token, + ident: ident.clone(), + generics: generics.into(), + eq_token: *eq_token, + ty: Box::new(ty.clone().into()), + semi_token: *semi_token, + } + .to_tokens(tokens); + if let (MaybeParsed::Parsed(generics), MaybeParsed::Parsed(ty), None) = + (generics, ty, no_runtime_generics) + { + generics.make_runtime_generics(tokens, vis, ident, &target, |context| { + ty.make_hdl_type_expr(context) + }) + } + } +} + +pub(crate) fn hdl_type_alias_impl(item: ItemType) -> syn::Result { + let item = ParsedTypeAlias::parse(item)?; + let outline_generated = item.options.body.outline_generated; + let mut contents = item.to_token_stream(); + if outline_generated.is_some() { + contents = crate::outline_generated(contents, "hdl-type-alias-"); + } + Ok(contents) +} diff --git a/crates/fayalite-proc-macros-impl/src/hdl_type_common.rs b/crates/fayalite-proc-macros-impl/src/hdl_type_common.rs new file mode 100644 index 0000000..1206f11 --- /dev/null +++ b/crates/fayalite-proc-macros-impl/src/hdl_type_common.rs @@ -0,0 +1,4388 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information +use crate::{Errors, HdlAttr, PairsIterExt, fold::impl_fold, kw}; +use proc_macro2::{Span, TokenStream}; +use quote::{ToTokens, format_ident, quote_spanned}; +use std::{collections::HashMap, fmt, mem}; +use syn::{ + AngleBracketedGenericArguments, Attribute, Block, ConstParam, Expr, ExprBlock, ExprGroup, + ExprIndex, ExprParen, ExprPath, ExprTuple, Field, FieldMutability, Fields, FieldsNamed, + FieldsUnnamed, GenericArgument, GenericParam, Generics, Ident, ImplGenerics, Index, ItemStruct, + Path, PathArguments, PathSegment, PredicateType, QSelf, Stmt, Token, Turbofish, Type, + TypeGenerics, TypeGroup, TypeParam, TypeParen, TypePath, TypeTuple, Visibility, WhereClause, + WherePredicate, + parse::{Parse, ParseStream}, + parse_quote, parse_quote_spanned, + punctuated::{Pair, Punctuated}, + spanned::Spanned, + token::{Brace, Bracket, Paren}, +}; + +crate::options! { + #[options = ItemOptions] + pub(crate) enum ItemOption { + OutlineGenerated(outline_generated), + Target(target, Path), + CustomBounds(custom_bounds), + NoStatic(no_static), + NoRuntimeGenerics(no_runtime_generics), + CmpEq(cmp_eq), + } +} + +impl ItemOptions { + pub(crate) fn validate(&mut self) -> syn::Result<()> { + if let Self { + custom_bounds: Some((custom_bounds,)), + no_static: None, + .. + } = self + { + self.no_static = Some((kw::no_static(custom_bounds.span),)); + } + Ok(()) + } +} + +pub(crate) struct WrappedInConst<'a> { + outer: &'a mut TokenStream, + span: Span, + inner: TokenStream, +} + +impl<'a> WrappedInConst<'a> { + pub(crate) fn new(outer: &'a mut TokenStream, span: Span) -> Self { + Self { + outer, + span, + inner: TokenStream::new(), + } + } + pub(crate) fn inner(&mut self) -> &mut TokenStream { + &mut self.inner + } +} + +impl Drop for WrappedInConst<'_> { + fn drop(&mut self) { + let inner = &self.inner; + quote_spanned! {self.span=> + #[allow(clippy::type_complexity)] + const _: () = { + #inner + }; + } + .to_tokens(self.outer); + } +} + +pub(crate) fn get_target(target: &Option<(kw::target, Paren, Path)>, item_ident: &Ident) -> Path { + match target { + Some((_, _, target)) => target.clone(), + None => item_ident.clone().into(), + } +} + +pub(crate) fn common_derives(span: Span) -> Attribute { + parse_quote_spanned! {span=> + #[::fayalite::__std::prelude::v1::derive( + ::fayalite::__std::fmt::Debug, + ::fayalite::__std::cmp::Eq, + ::fayalite::__std::cmp::PartialEq, + ::fayalite::__std::hash::Hash, + ::fayalite::__std::marker::Copy, + ::fayalite::__std::clone::Clone, + )] + } +} + +crate::options! { + #[options = LifetimeParamOptions] + enum LifetimeParamOption {} +} + +crate::options! { + #[options = TypeParamOptions] + pub(crate) enum TypeParamOption {} +} + +crate::options! { + #[options = ConstParamOptions] + pub(crate) enum ConstParamOption {} +} + +macro_rules! parse_failed { + ($parser:ident, $tokens:expr, $message:expr) => {{ + $parser.errors().error($tokens, $message); + return Err(ParseFailed); + }}; +} + +#[derive(Copy, Clone, Debug)] +pub(crate) enum ExprDelimiter { + Group(syn::token::Group), + Brace(Brace), + Paren(Paren), +} + +impl_fold! { + enum ExprDelimiter<> { + Group(syn::token::Group), + Brace(Brace), + Paren(Paren), + } +} + +impl ExprDelimiter { + pub(crate) fn surround(self, tokens: &mut TokenStream, f: F) { + match self { + ExprDelimiter::Group(v) => v.surround(tokens, f), + ExprDelimiter::Brace(v) => v.surround(tokens, f), + ExprDelimiter::Paren(v) => v.surround(tokens, f), + } + } +} + +#[derive(Debug, Clone)] +pub(crate) struct ParsedExprDelimited { + pub(crate) delim: ExprDelimiter, + pub(crate) expr: Box, +} + +impl_fold! { + struct ParsedExprDelimited<> { + delim: ExprDelimiter, + expr: Box, + } +} + +impl From for Expr { + fn from(value: ParsedExprDelimited) -> Self { + let ParsedExprDelimited { delim, expr } = value; + let expr = expr.into(); + match delim { + ExprDelimiter::Group(group_token) => Expr::Group(ExprGroup { + attrs: vec![], + group_token, + expr: Box::new(expr), + }), + ExprDelimiter::Brace(brace_token) => Expr::Block(ExprBlock { + attrs: vec![], + label: None, + block: Block { + brace_token, + stmts: vec![Stmt::Expr(expr, None)], + }, + }), + ExprDelimiter::Paren(paren_token) => Expr::Paren(ExprParen { + attrs: vec![], + paren_token, + expr: Box::new(expr), + }), + } + } +} + +impl ToTokens for ParsedExprDelimited { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { delim, expr } = self; + delim.surround(tokens, |tokens| expr.to_tokens(tokens)); + } +} + +#[derive(Debug, Clone)] +pub(crate) struct ParsedExprNamedParamConst { + pub(crate) ident: Ident, + pub(crate) param_index: usize, +} + +impl_fold! { + struct ParsedExprNamedParamConst<> { + ident: Ident, + param_index: usize, + } +} + +impl ToTokens for ParsedExprNamedParamConst { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + ident, + param_index: _, + } = self; + ident.to_tokens(tokens); + } +} + +impl From for Expr { + fn from(value: ParsedExprNamedParamConst) -> Self { + Expr::Path(ExprPath { + attrs: vec![], + qself: None, + path: value.ident.into(), + }) + } +} + +#[derive(Clone, Debug)] +pub(crate) enum ParsedExpr { + Delimited(ParsedExprDelimited), + NamedParamConst(ParsedExprNamedParamConst), + Other(Box), +} + +impl ParsedExpr { + pub(crate) fn named_param_const(mut self: &Self) -> Option<&ParsedExprNamedParamConst> { + loop { + match self { + ParsedExpr::Delimited(ParsedExprDelimited { expr, .. }) => self = &**expr, + ParsedExpr::NamedParamConst(retval) => return Some(retval), + ParsedExpr::Other(_) => return None, + } + } + } +} + +impl_fold! { + enum ParsedExpr<> { + Delimited(ParsedExprDelimited), + NamedParamConst(ParsedExprNamedParamConst), + Other(Box), + } +} + +impl ToTokens for ParsedExpr { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + ParsedExpr::Delimited(v) => v.to_tokens(tokens), + ParsedExpr::NamedParamConst(v) => v.to_tokens(tokens), + ParsedExpr::Other(v) => v.to_tokens(tokens), + } + } +} + +impl From for Expr { + fn from(value: ParsedExpr) -> Self { + match value { + ParsedExpr::Delimited(expr) => expr.into(), + ParsedExpr::NamedParamConst(expr) => expr.into(), + ParsedExpr::Other(expr) => *expr, + } + } +} + +impl From> for Expr { + fn from(value: Box) -> Self { + (*value).into() + } +} + +impl ParseTypes for ParsedExpr { + fn parse_types(input: &mut Expr, parser: &mut TypesParser<'_>) -> Result { + match input { + Expr::Block(ExprBlock { + attrs, + label: None, + block: Block { brace_token, stmts }, + }) if attrs.is_empty() && stmts.len() == 1 => { + if let Stmt::Expr(expr, None) = &mut stmts[0] { + return Ok(ParsedExpr::Delimited(ParsedExprDelimited { + delim: ExprDelimiter::Brace(*brace_token), + expr: Box::new(parser.parse(expr)?), + })); + } + } + Expr::Group(ExprGroup { + attrs, + group_token, + expr, + }) if attrs.is_empty() => { + return Ok(ParsedExpr::Delimited(ParsedExprDelimited { + delim: ExprDelimiter::Group(*group_token), + expr: parser.parse(expr)?, + })); + } + Expr::Paren(ExprParen { + attrs, + paren_token, + expr, + }) if attrs.is_empty() => { + return Ok(ParsedExpr::Delimited(ParsedExprDelimited { + delim: ExprDelimiter::Paren(*paren_token), + expr: parser.parse(expr)?, + })); + } + Expr::Path(ExprPath { + attrs, + qself: None, + path, + }) if attrs.is_empty() => { + if let Some((param_index, ident)) = parser.get_named_param(path) { + return Ok(Self::NamedParamConst( + match parser.generics().params[param_index] { + ParsedGenericParam::Const(_) => ParsedExprNamedParamConst { + ident: ident.clone(), + param_index, + }, + ParsedGenericParam::Type(ParsedTypeParam { ref ident, .. }) + | ParsedGenericParam::SizeType(ParsedSizeTypeParam { + ref ident, .. + }) => { + parser + .errors + .error(ident, "type provided when a constant was expected"); + return Err(ParseFailed); + } + }, + )); + } + } + _ => {} + } + Ok(ParsedExpr::Other(Box::new(input.clone()))) + } +} + +#[derive(Copy, Clone, Debug)] +pub(crate) enum TypeDelimiter { + Group(syn::token::Group), + Paren(Paren), +} + +impl_fold! { + enum TypeDelimiter<> { + Group(syn::token::Group), + Paren(Paren), + } +} + +impl TypeDelimiter { + pub(crate) fn surround(self, tokens: &mut TokenStream, f: F) { + match self { + TypeDelimiter::Group(v) => v.surround(tokens, f), + TypeDelimiter::Paren(v) => v.surround(tokens, f), + } + } +} + +#[derive(Debug, Clone)] +pub(crate) struct ParsedTypeDelimited { + pub(crate) delim: TypeDelimiter, + pub(crate) elem: Box, +} + +impl_fold! { + struct ParsedTypeDelimited<> { + delim: TypeDelimiter, + elem: Box, + } +} + +impl From for Type { + fn from(value: ParsedTypeDelimited) -> Self { + let ParsedTypeDelimited { delim, elem } = value; + let elem = Box::new(elem.into()); + match delim { + TypeDelimiter::Group(group_token) => Type::Group(TypeGroup { group_token, elem }), + TypeDelimiter::Paren(paren_token) => Type::Paren(TypeParen { paren_token, elem }), + } + } +} + +impl ToTokens for ParsedTypeDelimited { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { delim, elem } = self; + delim.surround(tokens, |tokens| elem.to_tokens(tokens)); + } +} + +#[derive(Debug, Clone)] +pub(crate) struct ParsedTypeTuple { + pub(crate) paren_token: Paren, + pub(crate) elems: Punctuated, +} + +impl_fold! { + struct ParsedTypeTuple<> { + paren_token: Paren, + elems: Punctuated, + } +} + +impl From for Type { + fn from(value: ParsedTypeTuple) -> Self { + let ParsedTypeTuple { paren_token, elems } = value; + Type::Tuple(TypeTuple { + paren_token, + elems: Punctuated::from_iter(elems.into_pairs().map_pair_value(Into::into)), + }) + } +} + +impl ToTokens for ParsedTypeTuple { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { paren_token, elems } = self; + paren_token.surround(tokens, |tokens| { + elems.to_tokens(tokens); + if elems.len() == 1 && !elems.trailing_punct() { + // trailing comma needed to distinguish from just parenthesis + Token![,](paren_token.span.close()).to_tokens(tokens); + } + }) + } +} + +impl ParseTypes for ParsedTypeTuple { + fn parse_types(ty: &mut TypeTuple, parser: &mut TypesParser<'_>) -> Result { + Ok(Self { + paren_token: ty.paren_token, + elems: parser.parse(&mut ty.elems)?, + }) + } +} + +#[derive(Debug, Clone)] +pub(crate) enum ParsedGenericArgument { + Type(ParsedType), + Const(ParsedExpr), +} + +impl_fold! { + enum ParsedGenericArgument<> { + Type(ParsedType), + Const(ParsedExpr), + } +} + +impl ToTokens for ParsedGenericArgument { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Type(ty) => ty.to_tokens(tokens), + Self::Const(expr) => expr.to_tokens(tokens), + } + } +} + +impl ParseTypes for ParsedGenericArgument { + fn parse_types( + arg: &mut GenericArgument, + parser: &mut TypesParser<'_>, + ) -> Result { + match arg { + GenericArgument::Type(ty) => { + { + let mut ty = &*ty; + while let Type::Group(TypeGroup { elem, .. }) = ty { + ty = &**elem; + } + if let Type::Path(TypePath { qself: None, path }) = ty { + if let Some((param_index, ident)) = parser.get_named_param(path) { + match parser.generics.params[param_index] { + ParsedGenericParam::Type(_) | ParsedGenericParam::SizeType(_) => {} + ParsedGenericParam::Const(_) => { + return Ok(Self::Const(ParsedExpr::NamedParamConst( + ParsedExprNamedParamConst { + ident: ident.clone(), + param_index, + }, + ))); + } + } + } + } + } + Ok(Self::Type(parser.parse(ty)?)) + } + GenericArgument::Const(expr) => Ok(Self::Const(parser.parse(expr)?)), + _ => parse_failed!(parser, arg, "expected type or const generic argument"), + } + } +} + +impl From for GenericArgument { + fn from(value: ParsedGenericArgument) -> Self { + match value { + ParsedGenericArgument::Type(ty) => Self::Type(ty.into()), + ParsedGenericArgument::Const(expr) => Self::Const(expr.into()), + } + } +} + +#[derive(Debug, Clone)] +pub(crate) struct ParsedGenericArguments { + pub(crate) colon2_token: Option, + pub(crate) lt_token: Token![<], + pub(crate) args: Punctuated, + pub(crate) gt_token: Token![>], +} + +impl_fold! { + struct ParsedGenericArguments<> { + colon2_token: Option, + lt_token: Token![<], + args: Punctuated, + gt_token: Token![>], + } +} + +impl ToTokens for ParsedGenericArguments { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + colon2_token, + lt_token, + args, + gt_token, + } = self; + colon2_token.to_tokens(tokens); + lt_token.to_tokens(tokens); + args.to_tokens(tokens); + gt_token.to_tokens(tokens); + } +} + +impl ParseTypes for ParsedGenericArguments { + fn parse_types( + args: &mut AngleBracketedGenericArguments, + parser: &mut TypesParser<'_>, + ) -> Result { + let AngleBracketedGenericArguments { + colon2_token, + lt_token, + ref mut args, + gt_token, + } = *args; + Ok(Self { + colon2_token, + lt_token, + args: parser.parse(args)?, + gt_token, + }) + } +} + +impl From for AngleBracketedGenericArguments { + fn from(value: ParsedGenericArguments) -> Self { + let ParsedGenericArguments { + colon2_token, + lt_token, + args, + gt_token, + } = value; + AngleBracketedGenericArguments { + colon2_token, + lt_token, + args: Punctuated::from_iter(args.into_pairs().map_pair_value(Into::into)), + gt_token, + } + } +} + +#[derive(Debug, Clone)] +pub(crate) struct ParsedTypeNamed { + pub(crate) path: Path, + pub(crate) args: Option, +} + +impl_fold! { + struct ParsedTypeNamed<> { + path: Path, + args: Option, + } +} + +impl From for Type { + fn from(value: ParsedTypeNamed) -> Self { + let ParsedTypeNamed { path, args } = value; + Type::Path(TypePath { + qself: None, + path: Path { + leading_colon: path.leading_colon, + segments: { + let mut segments = path.segments.clone(); + if let Some(args) = args { + segments.last_mut().unwrap().arguments = + PathArguments::AngleBracketed(args.into()); + } + segments + }, + }, + }) + } +} + +impl ToTokens for ParsedTypeNamed { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { path, args } = self; + path.to_tokens(tokens); + args.to_tokens(tokens); + } +} + +#[derive(Debug, Clone)] +pub(crate) struct ParsedTypeNamedParamType { + pub(crate) ident: Ident, + pub(crate) param_index: usize, +} + +impl_fold! { + struct ParsedTypeNamedParamType<> { + ident: Ident, + param_index: usize, + } +} + +impl ToTokens for ParsedTypeNamedParamType { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + ident, + param_index: _, + } = self; + ident.to_tokens(tokens); + } +} + +#[derive(Debug, Clone)] +pub(crate) struct ParsedTypeNamedParamSizeType { + pub(crate) ident: Ident, + pub(crate) param_index: usize, +} + +impl_fold! { + struct ParsedTypeNamedParamSizeType<> { + ident: Ident, + param_index: usize, + } +} + +impl ToTokens for ParsedTypeNamedParamSizeType { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + ident, + param_index: _, + } = self; + ident.to_tokens(tokens); + } +} + +#[derive(Debug, Clone)] +pub(crate) enum ParsedTypeNamedParam { + Type(ParsedTypeNamedParamType), + SizeType(ParsedTypeNamedParamSizeType), +} + +impl_fold! { + enum ParsedTypeNamedParam<> { + Type(ParsedTypeNamedParamType), + SizeType(ParsedTypeNamedParamSizeType), + } +} + +impl ParsedTypeNamedParam { + pub(crate) fn into_ident(self) -> Ident { + match self { + Self::Type(v) => v.ident, + Self::SizeType(v) => v.ident, + } + } + #[allow(dead_code)] + pub(crate) fn ident(&self) -> &Ident { + match self { + Self::Type(v) => &v.ident, + Self::SizeType(v) => &v.ident, + } + } + #[allow(dead_code)] + pub(crate) fn param_index(&self) -> usize { + match self { + Self::Type(v) => v.param_index, + Self::SizeType(v) => v.param_index, + } + } +} + +impl From for Type { + fn from(value: ParsedTypeNamedParam) -> Self { + Type::Path(TypePath { + qself: None, + path: value.into_ident().into(), + }) + } +} + +impl ToTokens for ParsedTypeNamedParam { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Type(v) => v.to_tokens(tokens), + Self::SizeType(v) => v.to_tokens(tokens), + } + } +} + +#[derive(Debug, Clone)] +pub(crate) struct ParsedTypeConstUsize { + pub(crate) const_usize: known_items::ConstUsize, + pub(crate) lt_token: Token![<], + pub(crate) value: ParsedExpr, + pub(crate) gt_token: Token![>], +} + +impl_fold! { + struct ParsedTypeConstUsize<> { + const_usize: known_items::ConstUsize, + lt_token: Token![<], + value: ParsedExpr, + gt_token: Token![>], + } +} + +impl From for Path { + fn from(value: ParsedTypeConstUsize) -> Self { + let ParsedTypeConstUsize { + const_usize, + lt_token, + value, + gt_token, + } = value; + let path = const_usize.path; + let args = Punctuated::from_iter([GenericArgument::Const(value.into())]); + let args = AngleBracketedGenericArguments { + colon2_token: Some(Token![::](lt_token.span)), + lt_token, + args, + gt_token, + }; + let mut segments = path.segments.clone(); + segments.last_mut().unwrap().arguments = PathArguments::AngleBracketed(args); + Path { + leading_colon: path.leading_colon, + segments, + } + } +} + +impl From for Type { + fn from(value: ParsedTypeConstUsize) -> Self { + Type::Path(TypePath { + qself: None, + path: value.into(), + }) + } +} + +impl MakeHdlTypeExpr for ParsedTypeConstUsize { + fn make_hdl_type_expr(&self, context: &MakeHdlTypeExprContext) -> Expr { + if let Some(named_param_const) = self.value.named_param_const() { + named_param_const.make_hdl_type_expr(context) + } else { + Expr::Path(ExprPath { + attrs: vec![], + qself: None, + path: self.clone().into(), + }) + } + } +} + +impl ToTokens for ParsedTypeConstUsize { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + const_usize, + lt_token, + value, + gt_token, + } = self; + const_usize.to_tokens(tokens); + lt_token.to_tokens(tokens); + value.to_tokens(tokens); + gt_token.to_tokens(tokens); + } +} + +impl ParsedTypeConstUsize { + fn try_from_named( + named: ParsedTypeNamed, + parser: &mut TypesParser<'_>, + ) -> Result, ParseFailed> { + let ParsedTypeNamed { path, args } = named; + let const_usize = match known_items::ConstUsize::parse_path(path) { + Ok(const_usize) => const_usize, + Err(path) => return Ok(Err(ParsedTypeNamed { path, args })), + }; + let Some(ParsedGenericArguments { + colon2_token: _, + lt_token, + args, + gt_token, + }) = args + else { + parser + .errors() + .error(const_usize, "ConstUsize type is missing generic arguments"); + return Err(ParseFailed); + }; + let args_len = args.len(); + let mut args = args.into_iter(); + let (Some(value), None) = (args.next(), args.next()) else { + parser.errors().error( + const_usize, + format_args!("ConstUsize type takes 1 generic argument, but got {args_len}"), + ); + return Err(ParseFailed); + }; + let ParsedGenericArgument::Const(value) = value else { + parser.errors().error(value, "expected a constant"); + return Err(ParseFailed); + }; + Ok(Ok(Self { + const_usize, + lt_token, + value, + gt_token, + })) + } +} + +macro_rules! make_parsed_type_or_const { + ( + @count_arg($min:ident, $max:ident) + #[type(default = |$type_arg_default_span:ident| $type_arg_default:expr)] + $type_arg:ident: $type_arg_ty:ty, + ) => { + let $max = $max + 1; + }; + ( + @count_arg($min:ident, $max:ident) + #[type] + $type_arg:ident: $type_arg_ty:ty, + ) => { + let $min = $min + 1; + let $max = $max + 1; + }; + ( + @default_fn + #[type(default = |$type_arg_default_span:ident| $type_arg_default:expr)] + $type_arg:ident: $type_arg_ty:ty, + ) => { + |$type_arg_default_span| $type_arg_default + }; + ( + @default_fn + #[type] + $type_arg:ident: $type_arg_ty:ty, + ) => { + |_| unreachable!() + }; + ( + @parse_arg($is_const:ident, $parser:ident, $span:expr) + #[type$((default = |$type_arg_default_span:ident| $type_arg_default:expr))?] + $type_arg:ident: $type_arg_ty:ty, + $(#[separator] + $separator:ident: $separator_ty:ty,)? + ) => { + $(let $separator;)? + let $type_arg = Box::new(if let Some($type_arg) = $type_arg { + let ($type_arg, _punct) = $type_arg.into_tuple(); + $($separator = _punct;)? + let ParsedGenericArgument::Type($type_arg) = $type_arg else { + $parser.errors().error($type_arg, "expected a type"); + return Err(ParseFailed); + }; + $type_arg + } else { + $($separator = None;)? + let default_fn = make_parsed_type_or_const! { + @default_fn + #[type$((default = |$type_arg_default_span| $type_arg_default))?] + $type_arg: $type_arg_ty, + }; + default_fn($span) + }); + $(let $separator = $separator.unwrap_or_else(|| { + let mut $separator = <$separator_ty>::default(); + $separator.span = $span; + $separator + });)? + }; + ( + @parse_arg($is_const:ident, $parser:ident, $span:expr) + #[const] + $const_arg:ident: $const_arg_ty:ty, + #[type$((default = |$type_arg_default_span:ident| $type_arg_default:expr))?] + $type_arg:ident: $type_arg_ty:ty, + $(#[separator] + $separator:ident: $separator_ty:ty,)? + ) => { + $(let $separator;)? + let ($type_arg, $const_arg) = if let Some($type_arg) = $type_arg { + let ($type_arg, _punct) = $type_arg.into_tuple(); + $($separator = _punct;)? + if $is_const { + let ParsedGenericArgument::Const($const_arg) = $type_arg else { + $parser.errors().error($type_arg, "expected a constant"); + return Err(ParseFailed); + }; + let $type_arg = Box::new(ParsedType::ConstUsize(ParsedTypeConstUsize { + const_usize: known_items::ConstUsize($span), + lt_token: Token![<]($span), + value: $const_arg.clone(), + gt_token: Token![>]($span), + })); + ($type_arg, Some(Box::new($const_arg))) + } else { + let ParsedGenericArgument::Type($type_arg) = $type_arg else { + $parser.errors().error($type_arg, "expected a type"); + return Err(ParseFailed); + }; + (Box::new($type_arg), None) + } + } else { + $($separator = None;)? + let default_fn = make_parsed_type_or_const! { + @default_fn + #[type$((default = |$type_arg_default_span| $type_arg_default))?] + $type_arg: $type_arg_ty, + }; + (Box::new(default_fn($span)), None) + }; + $(let $separator = $separator.unwrap_or_else(|| { + let mut $separator = <$separator_ty>::default(); + $separator.span = $span; + $separator + });)? + }; + ( + $(#[$struct_meta:meta])* + $vis:vis struct $struct_name:ident { + $const_name_field:ident: known_items::$const_name:ident, + $type_name_field:ident: known_items::$type_name:ident, + $lt_token:ident: $lt_token_ty:ty, + $( + $( + #[const] + $const_arg:ident: $const_arg_ty:ty, + )? + #[type$((default = |$type_arg_default_span:ident| $type_arg_default:expr))?] + $type_arg:ident: $type_arg_ty:ty, + $( + #[separator] + $separator:ident: $separator_ty:ty, + )? + )* + $gt_token:ident: $gt_token_ty:ty, + } + ) => { + $(#[$struct_meta])* + $vis struct $struct_name { + $vis $const_name_field: known_items::$const_name, + $vis $type_name_field: known_items::$type_name, + $vis $lt_token: $lt_token_ty, + $( + $($vis $const_arg: $const_arg_ty,)? + $vis $type_arg: $type_arg_ty, + $($vis $separator: $separator_ty,)? + )* + $vis $gt_token: $gt_token_ty, + } + + impl_fold! { + struct $struct_name<> { + $const_name_field: known_items::$const_name, + $type_name_field: known_items::$type_name, + $lt_token: $lt_token_ty, + $( + $($const_arg: $const_arg_ty,)? + $type_arg: $type_arg_ty, + $($separator: $separator_ty,)? + )* + $gt_token: $gt_token_ty, + } + } + + impl $struct_name { + $vis fn try_from_named( + named: ParsedTypeNamed, + parser: &mut TypesParser<'_>, + ) -> Result, ParseFailed> { + let ParsedTypeNamed { path, args } = named; + let $const_name_field; + let $type_name_field; + let parsed_path = known_items::$const_name::parse_path(path); + let parsed_path = parsed_path.map_err(known_items::$type_name::parse_path); + let is_const = match parsed_path { + Ok(const_path) => { + $type_name_field = known_items::$type_name(const_path.span); + $const_name_field = const_path; + true + } + Err(Ok(type_path)) => { + $const_name_field = known_items::$const_name(type_path.span); + $type_name_field = type_path; + false + } + Err(Err(path)) => return Ok(Err(ParsedTypeNamed { path, args })), + }; + let min_expected_args = 0; + let max_expected_args = 0; + $(make_parsed_type_or_const! { + @count_arg(min_expected_args, max_expected_args) + #[type$((default = |$type_arg_default_span| $type_arg_default))?] + $type_arg: $type_arg_ty, + })* + let ParsedGenericArguments { + colon2_token: _, + $lt_token, + args, + $gt_token, + } = if let Some(args) = args { + args + } else { + if min_expected_args > 0 { + parser + .errors() + .error($const_name_field, "type requires generic arguments"); + return Err(ParseFailed); + } + ParsedGenericArguments { + colon2_token: None, + lt_token: Token![<]($const_name_field.span), + args: Punctuated::new(), + gt_token: Token![>]($const_name_field.span), + } + }; + let args_len = args.len(); + if args_len < min_expected_args { + parser.errors().error( + $const_name_field, + format_args!("wrong number of generic arguments supplied: got {args_len}, expected at least {min_expected_args}"), + ); + return Err(ParseFailed); + } + if args_len > max_expected_args { + parser.errors().error( + &$const_name_field, + format_args!("wrong number of generic arguments supplied: got {args_len}, expected at most {max_expected_args}"), + ); + } + let mut args = args.into_pairs(); + $(let $type_arg = args.next();)* + $(make_parsed_type_or_const! { + @parse_arg(is_const, parser, $const_name_field.span) + $( + #[const] + $const_arg: $const_arg_ty, + )? + #[type$((default = |$type_arg_default_span| $type_arg_default))?] + $type_arg: $type_arg_ty, + $( + #[separator] + $separator: $separator_ty, + )? + })* + Ok(Ok(Self { + $const_name_field, + $type_name_field, + $lt_token, + $( + $($const_arg,)? + $type_arg, + $($separator,)? + )* + $gt_token, + })) + } + #[allow(dead_code)] + $vis fn is_const(&self) -> bool { + matches!(self, Self { + $($($const_arg: Some(_),)?)* + .. + }) + } + } + + impl From<$struct_name> for Type { + fn from(value: $struct_name) -> Type { + let $struct_name { + $const_name_field, + $type_name_field, + $lt_token, + $( + $($const_arg,)? + $type_arg, + $($separator,)? + )* + $gt_token, + } = value; + let (path, args) = if let ($($(Some($const_arg),)?)*) = ($($($const_arg,)?)*) { + let path = $const_name_field.path; + let mut args = Punctuated::new(); + $( + args.push(( + ($(|| GenericArgument::Const($const_arg.into()),)? + || GenericArgument::Type($type_arg.into()), + ).0)()); + $(args.push_punct($separator);)? + )* + (path, args) + } else { + let path = $type_name_field.path; + let mut args = Punctuated::new(); + $( + args.push(GenericArgument::Type($type_arg.into())); + $(args.push_punct($separator);)? + )* + (path, args) + }; + let args = AngleBracketedGenericArguments { + colon2_token: Some(Token![::]($lt_token.span)), + $lt_token, + args, + $gt_token, + }; + let mut segments = path.segments.clone(); + segments.last_mut().unwrap().arguments = PathArguments::AngleBracketed(args); + Type::Path(TypePath { + qself: None, + path: Path { + leading_colon: path.leading_colon, + segments, + }, + }) + } + } + + impl MakeHdlTypeExpr for $struct_name { + fn make_hdl_type_expr(&self, context: &MakeHdlTypeExprContext) -> Expr { + let $struct_name { + $const_name_field, + $type_name_field: _, + $lt_token: _, + $( + $($const_arg: _,)? + $type_arg, + $($separator: _,)? + )* + $gt_token: _, + } = self; + let span = $const_name_field.span; + if context.is_const { + return parse_quote_spanned! {span=> + ::fayalite::ty::StaticType::TYPE + }; + } + let mut retval = Expr::Path(ExprPath { + attrs: vec![], + qself: None, + path: $const_name_field.path.clone(), + }); + $( + retval = Expr::Index(ExprIndex { + attrs: vec![], + expr: Box::new(retval), + bracket_token: Bracket(span), + index: Box::new($type_arg.make_hdl_type_expr(context)), + }); + )* + retval + } + } + + impl ToTokens for $struct_name { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + $const_name_field, + $type_name_field, + $lt_token, + $( + $($const_arg,)? + $type_arg, + $($separator,)? + )* + $gt_token, + } = self; + if let ($($(Some($const_arg),)?)*) = ($($($const_arg,)?)*) { + $const_name_field.to_tokens(tokens); + $lt_token.to_tokens(tokens); + $( + if (( + $(|| { + $const_arg.to_tokens(tokens); + false + },)? + || true, + ).0)() { + $type_arg.to_tokens(tokens); + } + $($separator.to_tokens(tokens);)? + )* + $gt_token.to_tokens(tokens); + } else { + $type_name_field.to_tokens(tokens); + $lt_token.to_tokens(tokens); + $( + $type_arg.to_tokens(tokens); + $($separator.to_tokens(tokens);)? + )* + $gt_token.to_tokens(tokens); + } + } + } + }; +} + +make_parsed_type_or_const! { + #[derive(Debug, Clone)] + pub(crate) struct ParsedTypeArray { + array: known_items::Array, + array_type: known_items::ArrayType, + lt_token: Token![<], + #[type(default = |span| ParsedType::CanonicalType(known_items::CanonicalType(span)))] + element: Box, + #[separator] + comma_token: Token![,], + #[const] + const_len: Option>, + #[type(default = |span| ParsedType::DynSize(known_items::DynSize(span)))] + type_len: Box, + gt_token: Token![>], + } +} + +make_parsed_type_or_const! { + #[derive(Debug, Clone)] + pub(crate) struct ParsedTypeUInt { + uint: known_items::UInt, + uint_type: known_items::UIntType, + lt_token: Token![<], + #[const] + const_width: Option>, + #[type(default = |span| ParsedType::DynSize(known_items::DynSize(span)))] + type_width: Box, + gt_token: Token![>], + } +} + +make_parsed_type_or_const! { + #[derive(Debug, Clone)] + pub(crate) struct ParsedTypeSInt { + uint: known_items::SInt, + uint_type: known_items::SIntType, + lt_token: Token![<], + #[const] + const_width: Option>, + #[type(default = |span| ParsedType::DynSize(known_items::DynSize(span)))] + type_width: Box, + gt_token: Token![>], + } +} + +#[derive(Debug, Clone)] +pub(crate) struct ParsedTypePhantomData { + pub(crate) phantom_data: known_items::PhantomData, + pub(crate) lt_token: Token![<], + pub(crate) ty: Box, + pub(crate) gt_token: Token![>], +} + +impl_fold! { + struct ParsedTypePhantomData<> { + phantom_data: known_items::PhantomData, + lt_token: Token![<], + ty: Box, + gt_token: Token![>], + } +} + +impl ParsedTypePhantomData { + pub(crate) fn try_from_named( + named: ParsedTypeNamed, + parser: &mut TypesParser<'_>, + ) -> Result, ParseFailed> { + let ParsedTypeNamed { path, args } = named; + let parsed_path = known_items::PhantomData::parse_path(path); + let phantom_data = match parsed_path { + Ok(phantom_data) => phantom_data, + Err(path) => return Ok(Err(ParsedTypeNamed { path, args })), + }; + let Some(ParsedGenericArguments { + colon2_token: _, + lt_token, + args, + gt_token, + }) = args + else { + parser + .errors() + .error(phantom_data, "PhantomData requires generic arguments"); + return Err(ParseFailed); + }; + let args_len = args.len(); + if args_len != 1 { + parser.errors().error( + phantom_data, + format_args!( + "wrong number of generic arguments supplied: got {args_len}, expected 1" + ), + ); + return Err(ParseFailed); + } + let ty = args.into_iter().next().unwrap(); + let ParsedGenericArgument::Type(ty) = ty else { + parser.errors().error(ty, "expected a type"); + return Err(ParseFailed); + }; + Ok(Ok(Self { + phantom_data, + lt_token, + ty: Box::new(ty), + gt_token, + })) + } +} + +impl From for Type { + fn from(value: ParsedTypePhantomData) -> Type { + let ParsedTypePhantomData { + phantom_data, + lt_token, + ty, + gt_token, + } = value; + let path = phantom_data.path; + let mut args = Punctuated::new(); + args.push(GenericArgument::Type(ty.into())); + let args = AngleBracketedGenericArguments { + colon2_token: Some(Token![::](lt_token.span)), + lt_token, + args, + gt_token, + }; + let mut segments = path.segments; + segments.last_mut().unwrap().arguments = PathArguments::AngleBracketed(args); + Type::Path(TypePath { + qself: None, + path: Path { + leading_colon: path.leading_colon, + segments, + }, + }) + } +} + +impl MakeHdlTypeExpr for ParsedTypePhantomData { + fn make_hdl_type_expr(&self, _context: &MakeHdlTypeExprContext) -> Expr { + let ParsedTypePhantomData { + phantom_data, + lt_token: _, + ty: _, + gt_token: _, + } = self; + Expr::Path(ExprPath { + attrs: vec![], + qself: None, + path: phantom_data.path.clone(), + }) + } +} + +impl ToTokens for ParsedTypePhantomData { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + phantom_data, + lt_token, + ty, + gt_token, + } = self; + phantom_data.to_tokens(tokens); + lt_token.to_tokens(tokens); + ty.to_tokens(tokens); + gt_token.to_tokens(tokens); + } +} + +#[derive(Debug, Clone)] +pub(crate) enum ParsedType { + Delimited(ParsedTypeDelimited), + Named(ParsedTypeNamed), + NamedParam(ParsedTypeNamedParam), + Tuple(ParsedTypeTuple), + ConstUsize(ParsedTypeConstUsize), + PhantomData(ParsedTypePhantomData), + Array(ParsedTypeArray), + UInt(ParsedTypeUInt), + SInt(ParsedTypeSInt), + CanonicalType(known_items::CanonicalType), + DynSize(known_items::DynSize), +} + +impl_fold! { + enum ParsedType<> { + Delimited(ParsedTypeDelimited), + Named(ParsedTypeNamed), + NamedParam(ParsedTypeNamedParam), + Tuple(ParsedTypeTuple), + ConstUsize(ParsedTypeConstUsize), + PhantomData(ParsedTypePhantomData), + Array(ParsedTypeArray), + UInt(ParsedTypeUInt), + SInt(ParsedTypeSInt), + CanonicalType(known_items::CanonicalType), + DynSize(known_items::DynSize), + } +} + +impl From for Type { + fn from(value: ParsedType) -> Self { + match value { + ParsedType::Delimited(v) => v.into(), + ParsedType::Named(v) => v.into(), + ParsedType::NamedParam(v) => v.into(), + ParsedType::Tuple(v) => v.into(), + ParsedType::ConstUsize(v) => v.into(), + ParsedType::PhantomData(v) => v.into(), + ParsedType::Array(v) => v.into(), + ParsedType::UInt(v) => v.into(), + ParsedType::SInt(v) => v.into(), + ParsedType::CanonicalType(v) => v.into(), + ParsedType::DynSize(v) => v.into(), + } + } +} + +impl From for Type { + fn from(value: known_items::CanonicalType) -> Self { + Self::Path(TypePath { + qself: None, + path: value.path, + }) + } +} + +impl From for Type { + fn from(value: known_items::DynSize) -> Self { + Self::Path(TypePath { + qself: None, + path: value.path, + }) + } +} + +impl From> for Type { + fn from(value: MaybeParsed) -> Self { + match value { + MaybeParsed::Unrecognized(value) => value, + MaybeParsed::Parsed(value) => value.into(), + } + } +} + +impl From> for Type { + fn from(value: Box) -> Self { + (*value).into() + } +} + +impl ToTokens for ParsedType { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + ParsedType::Delimited(ty) => ty.to_tokens(tokens), + ParsedType::NamedParam(ty) => ty.to_tokens(tokens), + ParsedType::Named(ty) => ty.to_tokens(tokens), + ParsedType::Tuple(ty) => ty.to_tokens(tokens), + ParsedType::ConstUsize(ty) => ty.to_tokens(tokens), + ParsedType::PhantomData(ty) => ty.to_tokens(tokens), + ParsedType::Array(ty) => ty.to_tokens(tokens), + ParsedType::UInt(ty) => ty.to_tokens(tokens), + ParsedType::SInt(ty) => ty.to_tokens(tokens), + ParsedType::CanonicalType(ty) => ty.to_tokens(tokens), + ParsedType::DynSize(ty) => ty.to_tokens(tokens), + } + } +} + +impl ParsedType { + #[allow(dead_code)] + pub(crate) fn unwrap_delimiters(mut self) -> Self { + loop { + match self { + ParsedType::Delimited(ty) => self = *ty.elem, + _ => return self, + } + } + } + #[allow(dead_code)] + pub(crate) fn unwrap_delimiters_ref(mut self: &Self) -> &Self { + loop { + match self { + ParsedType::Delimited(ty) => self = &*ty.elem, + _ => return self, + } + } + } + #[allow(dead_code)] + pub(crate) fn unwrap_delimiters_mut(mut self: &mut Self) -> &mut Self { + loop { + match self { + ParsedType::Delimited(ty) => self = &mut *ty.elem, + _ => return self, + } + } + } +} + +impl ParseTypes for ParsedType { + fn parse_types(path: &mut Path, parser: &mut TypesParser<'_>) -> Result { + let Path { + leading_colon, + ref mut segments, + } = *path; + if segments.is_empty() { + parse_failed!(parser, path, "path must not be empty"); + } + let mut args = None; + let segments = Punctuated::from_iter(segments.pairs_mut().map_pair_value_mut(|segment| { + let PathSegment { ident, arguments } = segment; + if args.is_some() { + parser + .errors() + .error(&ident, "associated types/consts are not yet implemented"); + } + args = match arguments { + PathArguments::None => None, + PathArguments::AngleBracketed(args) => parser.parse(args).ok(), + PathArguments::Parenthesized(_) => { + parser + .errors() + .error(&segment, "function traits are not allowed"); + None + } + }; + PathSegment::from(segment.ident.clone()) + })); + let named = ParsedTypeNamed { + path: Path { + leading_colon, + segments, + }, + args, + }; + if let Some((param_index, ident)) = parser.get_named_param(&named.path) { + return Ok(Self::NamedParam( + match parser.generics().params[param_index] { + ParsedGenericParam::Type(_) => { + ParsedTypeNamedParam::Type(ParsedTypeNamedParamType { + ident: ident.clone(), + param_index, + }) + } + ParsedGenericParam::SizeType(_) => { + ParsedTypeNamedParam::SizeType(ParsedTypeNamedParamSizeType { + ident: ident.clone(), + param_index, + }) + } + ParsedGenericParam::Const(ParsedConstParam { ref ident, .. }) => { + parser + .errors + .error(ident, "constant provided when a type was expected"); + return Err(ParseFailed); + } + }, + )); + } + let named = match ParsedTypeArray::try_from_named(named, parser)? { + Ok(v) => return Ok(Self::Array(v)), + Err(named) => named, + }; + let named = match ParsedTypeConstUsize::try_from_named(named, parser)? { + Ok(v) => return Ok(Self::ConstUsize(v)), + Err(named) => named, + }; + let named = match ParsedTypePhantomData::try_from_named(named, parser)? { + Ok(v) => return Ok(Self::PhantomData(v)), + Err(named) => named, + }; + let named = match ParsedTypeUInt::try_from_named(named, parser)? { + Ok(v) => return Ok(Self::UInt(v)), + Err(named) => named, + }; + let named = match ParsedTypeSInt::try_from_named(named, parser)? { + Ok(v) => return Ok(Self::SInt(v)), + Err(named) => named, + }; + let named = match known_items::CanonicalType::try_from_named(named, parser)? { + Ok(v) => return Ok(Self::CanonicalType(v)), + Err(named) => named, + }; + let named = match known_items::DynSize::try_from_named(named, parser)? { + Ok(v) => return Ok(Self::DynSize(v)), + Err(named) => named, + }; + Ok(Self::Named(named)) + } +} + +impl ParseTypes for ParsedType { + fn parse_types(ty: &mut TypePath, parser: &mut TypesParser<'_>) -> Result { + let TypePath { qself, path } = ty; + if let Some(_qself) = qself { + // TODO: implement + parse_failed!( + parser, + ty, + "associated types/consts are not yet implemented" + ); + } else { + parser.parse(path) + } + } +} + +impl ParseTypes for ParsedType { + fn parse_types(ty: &mut Type, parser: &mut TypesParser<'_>) -> Result { + Ok(match ty { + Type::Array(_) => parse_failed!( + parser, + ty, + "Rust array types are not allowed here, \ + use Array or ArrayType instead" + ), + Type::BareFn(_) => parse_failed!(parser, ty, "fn() types are not allowed here"), + Type::Group(TypeGroup { group_token, elem }) => Self::Delimited(ParsedTypeDelimited { + delim: TypeDelimiter::Group(*group_token), + elem: parser.parse(elem)?, + }), + Type::ImplTrait(_) => { + parse_failed!(parser, ty, "impl Trait types are not allowed here") + } + Type::Infer(_) => parse_failed!(parser, ty, "inferred types are not allowed here"), + Type::Macro(_) => parse_failed!(parser, ty, "macro types are not allowed here"), + Type::Never(_) => parse_failed!(parser, ty, "the never type is not allowed here"), + Type::Paren(TypeParen { paren_token, elem }) => Self::Delimited(ParsedTypeDelimited { + delim: TypeDelimiter::Paren(*paren_token), + elem: parser.parse(elem)?, + }), + Type::Path(ty) => parser.parse(ty)?, + Type::Ptr(_) => parse_failed!(parser, ty, "pointer types are not allowed here"), + Type::Reference(_) => parse_failed!(parser, ty, "reference types are not allowed here"), + Type::Slice(_) => parse_failed!(parser, ty, "slice types are not allowed here"), + Type::TraitObject(_) => { + parse_failed!(parser, ty, "dyn Trait types are not allowed here") + } + Type::Tuple(ty) => Self::Tuple(parser.parse(ty)?), + Type::Verbatim(_) => parse_failed!(parser, ty, "unknown type kind"), + _ => parse_failed!(parser, ty, "unknown type kind"), + }) + } +} + +#[derive(Debug, Clone)] +pub(crate) enum ParsedConstGenericType { + Usize(known_items::usize), +} + +impl_fold! { + enum ParsedConstGenericType<> { + Usize(known_items::usize), + } +} + +impl From for Type { + fn from(value: ParsedConstGenericType) -> Self { + match value { + ParsedConstGenericType::Usize(v) => parse_quote! { #v }, + } + } +} + +impl From> for Type { + fn from(value: MaybeParsed) -> Self { + match value { + MaybeParsed::Unrecognized(value) => value, + MaybeParsed::Parsed(value) => value.into(), + } + } +} + +impl From> for Type { + fn from(value: Box) -> Self { + (*value).into() + } +} + +impl ToTokens for ParsedConstGenericType { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + ParsedConstGenericType::Usize(ty) => ty.to_tokens(tokens), + } + } +} + +impl ParseTypes for ParsedConstGenericType { + fn parse_types(path: &mut Path, parser: &mut TypesParser<'_>) -> Result { + let Path { + leading_colon, + ref mut segments, + } = *path; + if segments.is_empty() { + parse_failed!(parser, path, "path must not be empty"); + } + let mut args = None; + let segments = Punctuated::from_iter(segments.pairs_mut().map_pair_value_mut(|segment| { + let PathSegment { ident, arguments } = segment; + if args.is_some() { + parser + .errors() + .error(&ident, "associated types/consts are not yet implemented"); + } + args = match arguments { + PathArguments::None => None, + PathArguments::AngleBracketed(args) => parser.parse(args).ok(), + PathArguments::Parenthesized(_) => { + parser + .errors() + .error(&segment, "function traits are not allowed"); + None + } + }; + PathSegment::from(segment.ident.clone()) + })); + let named = ParsedTypeNamed { + path: Path { + leading_colon, + segments, + }, + args, + }; + let named = match known_items::usize::try_from_named(named, parser)? { + Ok(v) => return Ok(Self::Usize(v)), + Err(named) => named, + }; + parser.errors.error( + named, + "const parameter types other than `usize` are not yet implemented", + ); + Err(ParseFailed) + } +} + +impl ParseTypes for ParsedConstGenericType { + fn parse_types(ty: &mut TypePath, parser: &mut TypesParser<'_>) -> Result { + let TypePath { qself, path } = ty; + if let Some(_qself) = qself { + parse_failed!( + parser, + ty, + "associated types/consts are not yet implemented" + ); + } else { + parser.parse(path) + } + } +} + +impl ParseTypes for ParsedConstGenericType { + fn parse_types(ty: &mut Type, parser: &mut TypesParser<'_>) -> Result { + Ok(match ty { + Type::Path(ty) => parser.parse(ty)?, + _ => parse_failed!(parser, ty, "unsupported const generic type"), + }) + } +} + +pub(crate) struct ParseFailed; + +impl fmt::Display for ParseFailed { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("unknown parse failure") + } +} + +pub(crate) struct TypesParser<'a> { + generics: &'a ParsedGenerics, + /// used when parsing generic parameter defaults so + /// earlier parameter defaults can't refer to later parameters + cur_param_index: Option, + errors: &'a mut Errors, +} + +impl<'a> TypesParser<'a> { + pub(crate) fn run_with_errors, I>( + generics: &ParsedGenerics, + input: &mut I, + errors: &mut Errors, + ) -> Result { + TypesParser { + generics, + cur_param_index: None, + errors, + } + .parse(input) + } + pub(crate) fn maybe_run, I, G>( + generics: MaybeParsed<&ParsedGenerics, G>, + mut input: I, + errors: &mut Errors, + ) -> MaybeParsed { + let MaybeParsed::Parsed(generics) = generics else { + return MaybeParsed::Unrecognized(input); + }; + match Self::run_with_errors(generics, &mut input, errors) { + Ok(v) => MaybeParsed::Parsed(v), + Err(ParseFailed {}) => MaybeParsed::Unrecognized(input), + } + } + pub(crate) fn generics(&self) -> &'a ParsedGenerics { + self.generics + } + pub(crate) fn errors(&mut self) -> &mut Errors { + self.errors + } + pub(crate) fn parse, I>(&mut self, input: &mut I) -> Result { + T::parse_types(input, self) + } + pub(crate) fn get_named_param<'b>(&mut self, path: &'b Path) -> Option<(usize, &'b Ident)> { + let ident = path.get_ident()?; + let param_index = *self.generics().param_name_to_index_map.get(ident)?; + if self + .cur_param_index + .is_some_and(|cur_param_index| param_index >= cur_param_index) + { + self.errors + .error(path, "cannot use forward declared identifier here"); + } + Some((param_index, ident)) + } +} + +pub(crate) trait ParseTypes: Sized { + fn parse_types(input: &mut I, parser: &mut TypesParser<'_>) -> Result; +} + +impl, I> ParseTypes> for Box { + fn parse_types(input: &mut Box, parser: &mut TypesParser<'_>) -> Result { + Ok(Box::new(parser.parse(&mut **input)?)) + } +} + +impl, I, P: Clone> ParseTypes> for Punctuated { + fn parse_types( + input: &mut Punctuated, + parser: &mut TypesParser<'_>, + ) -> Result { + let retval = Punctuated::from_iter( + input + .pairs_mut() + .filter_map_pair_value_mut(|input| parser.parse(input).ok()), + ); + Ok(retval) + } +} + +#[derive(Debug, Clone)] +pub(crate) enum UnparsedGenericParam { + Type { + attrs: Vec, + options: HdlAttr, + ident: Ident, + colon_token: Token![:], + bounds: ParsedBounds, + mask_type_bounds: ParsedTypeBounds, + }, + Const { + attrs: Vec, + options: HdlAttr, + const_token: Token![const], + ident: Ident, + colon_token: Token![:], + ty: ParsedConstGenericType, + bounds: Option, + }, +} + +pub(crate) mod known_items { + use proc_macro2::{Ident, Span, TokenStream}; + use quote::ToTokens; + use syn::{ + Path, PathArguments, PathSegment, Token, + parse::{Parse, ParseStream}, + }; + + macro_rules! impl_known_item_body { + ($known_item:ident) => { + #[derive(Clone, Debug)] + #[allow(dead_code, non_camel_case_types)] + pub(crate) struct $known_item { + pub(crate) path: Path, + pub(crate) span: Span, + } + + #[allow(non_snake_case, dead_code)] + pub(crate) fn $known_item(span: Span) -> $known_item { + let segments = $known_item::PATH_SEGMENTS[0].iter() + .copied() + .map(|seg| PathSegment::from(Ident::new(seg, span))) + .collect(); + $known_item { + path: Path { + leading_colon: Some(Token![::](span)), + segments, + }, + span, + } + } + + impl ToTokens for $known_item { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.path.to_tokens(tokens); + } + } + + impl $known_item { + pub(crate) fn parse_path(path: Path) -> Result { + if let Some(ident) = path.get_ident() { + if ident == stringify!($known_item) { + return Ok(Self { span: ident.span(), path }); + } + } + for &path_segments in Self::PATH_SEGMENTS.iter() { + if path.segments.len() == path_segments.len() + && path + .segments + .iter() + .zip(path_segments) + .all(|(seg, expected)| { + matches!(seg.arguments, PathArguments::None) + && seg.ident == *expected + }) + { + return Ok(Self { span: path.segments.last().unwrap().ident.span(), path }); + } + } + Err(path) + } + #[allow(dead_code)] + pub(crate) fn parse_path_with_arguments(mut path: Path) -> Result<(Self, PathArguments), Path> { + let Some(last_segment) = path.segments.last_mut() else { + return Err(path); + }; + let arguments = std::mem::replace(&mut last_segment.arguments, PathArguments::None); + match Self::parse_path(path) { + Ok(retval) => Ok((retval, arguments)), + Err(mut path) => { + path.segments.last_mut().unwrap().arguments = arguments; + Err(path) + } + } + } + } + + impl Parse for $known_item { + fn parse(input: ParseStream) -> syn::Result { + Self::parse_path(Path::parse_mod_style(input)?).map_err(|path| { + syn::Error::new_spanned( + path, + concat!("expected ", stringify!($known_item)), + ) + }) + } + } + + crate::fold::no_op_fold!($known_item); + + impl $known_item { + #[allow(dead_code)] + pub(crate) fn try_from_named( + named: super::ParsedTypeNamed, + parser: &mut super::TypesParser<'_>, + ) -> Result, super::ParseFailed> { + let super::ParsedTypeNamed { path, args } = named; + let path = match Self::parse_path(path) { + Ok(path) => path, + Err(path) => return Ok(Err(super::ParsedTypeNamed { path, args })), + }; + if let Some(args) = args.filter(|args| !args.args.is_empty()) { + parser.errors().error( + &path, + format_args!("wrong number of generic arguments supplied: got {}, expected at most 0", args.args.len()), + ); + } + Ok(Ok(path)) + } + } + }; + } + + macro_rules! impl_known_item { + ($(#[alias = $(::$alias:ident)+])* [$(::$seg:ident)+] ::$known_item:ident) => { + impl_known_item_body!($known_item); + + impl $known_item { + pub(crate) const PATH_SEGMENTS: &'static [&'static [&'static str]] = &[ + &[ + $(stringify!($seg),)+ + stringify!($known_item), + ], + $(&[ + $(stringify!($alias),)+ + ],)* + ]; + } + }; + ($(#[alias = $(::$alias:ident)+])* $([$(::$head:ident)*])? ::$next:ident $(::$tail:ident)+) => { + impl_known_item!($(#[alias = $(::$alias)+])* [$($(::$head)*)? ::$next] $(::$tail)+); + }; + } + + impl_known_item!(::fayalite::array::Array); + impl_known_item!(::fayalite::array::ArrayType); + impl_known_item!(::fayalite::bundle::BundleType); + impl_known_item!(::fayalite::enum_::EnumType); + impl_known_item!(::fayalite::int::BoolOrIntType); + impl_known_item!(::fayalite::int::DynSize); + impl_known_item!(::fayalite::int::IntType); + impl_known_item!(::fayalite::int::KnownSize); + impl_known_item!(::fayalite::int::SInt); + impl_known_item!(::fayalite::int::SIntType); + impl_known_item!(::fayalite::int::Size); + impl_known_item!(::fayalite::int::UInt); + impl_known_item!(::fayalite::int::UIntType); + impl_known_item!(::fayalite::reset::ResetType); + impl_known_item!(::fayalite::ty::CanonicalType); + impl_known_item!(::fayalite::ty::StaticType); + impl_known_item!(::fayalite::ty::Type); + impl_known_item!(::fayalite::ty::Type::MaskType); + impl_known_item!(::fayalite::util::ConstUsize); + impl_known_item!( + #[alias = ::std::primitive::usize] + #[alias = ::core::primitive::usize] + ::fayalite::__std::primitive::usize + ); + impl_known_item!( + #[alias = ::std::marker::PhantomData] + #[alias = ::core::marker::PhantomData] + ::fayalite::__std::marker::PhantomData + ); +} + +macro_rules! impl_bounds { + ( + #[struct = $struct_type:ident] + $vis:vis enum $enum_type:ident { + $( + $Variant:ident, + )* + $( + #[unknown] + $Unknown:ident, + )? + } + ) => { + #[derive(Clone, Debug)] + $vis enum $enum_type { + $($Variant(known_items::$Variant),)* + $($Unknown(syn::TypeParamBound),)? + } + + $(impl From for $enum_type { + fn from(v: known_items::$Variant) -> Self { + Self::$Variant(v) + } + })* + + impl ToTokens for $enum_type { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + $(Self::$Variant(v) => v.to_tokens(tokens),)* + $(Self::$Unknown(v) => v.to_tokens(tokens),)? + } + } + } + + impl $enum_type { + $vis fn parse_path(path: Path) -> Result { + #![allow(unreachable_code)] + $(let path = match known_items::$Variant::parse_path(path) { + Ok(v) => return Ok(Self::$Variant(v)), + Err(path) => path, + };)* + $(return Ok(Self::$Unknown(syn::TraitBound { + paren_token: None, + modifier: syn::TraitBoundModifier::None, + lifetimes: None, + path, + }.into()));)? + Err(path) + } + $vis fn parse_type_param_bound(mut type_param_bound: syn::TypeParamBound) -> Result { + #![allow(unreachable_code)] + if let syn::TypeParamBound::Trait(mut trait_bound) = type_param_bound { + if let syn::TraitBound { + paren_token: _, + modifier: syn::TraitBoundModifier::None, + lifetimes: None, + path: _, + } = trait_bound { + match Self::parse_path(trait_bound.path) { + Ok(retval) => return Ok(retval), + Err(path) => trait_bound.path = path, + } + } + type_param_bound = trait_bound.into(); + } + $(return Ok(Self::$Unknown(type_param_bound));)? + Err(type_param_bound) + } + } + + impl Parse for $enum_type { + fn parse(input: ParseStream) -> syn::Result { + Self::parse_type_param_bound(input.parse()?) + .map_err(|type_param_bound| syn::Error::new_spanned( + type_param_bound, + format_args!("expected one of: {}", [$(stringify!($Variant)),*].join(", ")), + )) + } + } + + #[derive(Clone, Debug, Default)] + #[allow(non_snake_case)] + $vis struct $struct_type { + $($vis $Variant: Option,)* + $($vis $Unknown: Vec,)? + } + + impl ToTokens for $struct_type { + #[allow(unused_mut, unused_variables, unused_assignments)] + fn to_tokens(&self, tokens: &mut TokenStream) { + let mut separator = None; + $(if let Some(v) = &self.$Variant { + separator.to_tokens(tokens); + separator = Some(::default()); + v.to_tokens(tokens); + })* + $(for v in &self.$Unknown { + separator.to_tokens(tokens); + separator = Some(::default()); + v.to_tokens(tokens); + })* + } + } + + const _: () = { + #[derive(Clone, Debug)] + #[allow(non_snake_case)] + $vis struct Iter { + $($Variant: Option,)* + $($Unknown: std::vec::IntoIter,)? + } + + impl IntoIterator for $struct_type { + type Item = $enum_type; + type IntoIter = Iter; + + fn into_iter(self) -> Self::IntoIter { + Iter { + $($Variant: self.$Variant,)* + $($Unknown: self.$Unknown.into_iter(),)? + } + } + } + + impl Iterator for Iter { + type Item = $enum_type; + + fn next(&mut self) -> Option { + $( + if let Some(value) = self.$Variant.take() { + return Some($enum_type::$Variant(value)); + } + )* + $( + if let Some(value) = self.$Unknown.next() { + return Some($enum_type::$Unknown(value)); + } + )? + None + } + + #[allow(unused_mut, unused_variables)] + fn fold B>(mut self, mut init: B, mut f: F) -> B { + $( + if let Some(value) = self.$Variant.take() { + init = f(init, $enum_type::$Variant(value)); + } + )* + $( + if let Some(value) = self.$Unknown.next() { + init = f(init, $enum_type::$Unknown(value)); + } + )? + init + } + } + }; + + impl Extend<$enum_type> for $struct_type { + fn extend>(&mut self, iter: T) { + iter.into_iter().for_each(|v| match v { + $($enum_type::$Variant(v) => { + self.$Variant = Some(v); + })* + $($enum_type::$Unknown(v) => { + self.$Unknown.push(v); + })? + }); + } + } + + impl FromIterator<$enum_type> for $struct_type { + fn from_iter>(iter: T) -> Self { + let mut retval = Self::default(); + retval.extend(iter); + retval + } + } + + impl Extend<$struct_type> for $struct_type { + fn extend>(&mut self, iter: T) { + iter.into_iter().for_each(|v| { + $(if let Some(v) = v.$Variant { + self.$Variant = Some(v); + })* + $(self.$Unknown.extend(v.$Unknown);)* + }); + } + } + + impl FromIterator<$struct_type> for $struct_type { + fn from_iter>(iter: T) -> Self { + let mut retval = Self::default(); + retval.extend(iter); + retval + } + } + + impl Parse for $struct_type { + fn parse(input: ParseStream) -> syn::Result { + let mut retval = Self::default(); + while !input.is_empty() { + retval.extend([input.parse::<$enum_type>()?]); + if input.is_empty() { + break; + } + input.parse::()?; + } + Ok(retval) + } + } + + impl $struct_type { + #[allow(dead_code)] + $vis fn add_implied_bounds(&mut self) { + let orig_bounds = self.clone(); + self.extend( + self.clone() + .into_iter() + .map($enum_type::implied_bounds), + ); + self.extend([orig_bounds]); // keep spans of explicitly provided bounds + } + } + }; +} + +impl_bounds! { + #[struct = ParsedBounds] + pub(crate) enum ParsedBound { + BoolOrIntType, + BundleType, + EnumType, + IntType, + KnownSize, + ResetType, + Size, + StaticType, + Type, + #[unknown] + Unknown, + } +} + +impl_bounds! { + #[struct = ParsedTypeBounds] + pub(crate) enum ParsedTypeBound { + BoolOrIntType, + BundleType, + EnumType, + IntType, + ResetType, + StaticType, + Type, + #[unknown] + Unknown, + } +} + +impl From for ParsedBound { + fn from(value: ParsedTypeBound) -> Self { + match value { + ParsedTypeBound::BoolOrIntType(v) => ParsedBound::BoolOrIntType(v), + ParsedTypeBound::BundleType(v) => ParsedBound::BundleType(v), + ParsedTypeBound::EnumType(v) => ParsedBound::EnumType(v), + ParsedTypeBound::IntType(v) => ParsedBound::IntType(v), + ParsedTypeBound::ResetType(v) => ParsedBound::ResetType(v), + ParsedTypeBound::StaticType(v) => ParsedBound::StaticType(v), + ParsedTypeBound::Type(v) => ParsedBound::Type(v), + ParsedTypeBound::Unknown(v) => ParsedBound::Unknown(v), + } + } +} + +impl From for ParsedBounds { + fn from(value: ParsedTypeBounds) -> Self { + let ParsedTypeBounds { + BoolOrIntType, + BundleType, + EnumType, + IntType, + ResetType, + StaticType, + Type, + Unknown, + } = value; + Self { + BoolOrIntType, + BundleType, + EnumType, + IntType, + KnownSize: None, + ResetType, + Size: None, + StaticType, + Type, + Unknown, + } + } +} + +impl ParsedTypeBound { + fn implied_bounds(self) -> ParsedTypeBounds { + let span = self.span(); + match self { + Self::BoolOrIntType(v) => ParsedTypeBounds::from_iter([ + ParsedTypeBound::from(v), + ParsedTypeBound::Type(known_items::Type(span)), + ]), + Self::BundleType(v) => ParsedTypeBounds::from_iter([ + ParsedTypeBound::from(v), + ParsedTypeBound::Type(known_items::Type(span)), + ]), + Self::EnumType(v) => ParsedTypeBounds::from_iter([ + ParsedTypeBound::from(v), + ParsedTypeBound::Type(known_items::Type(span)), + ]), + Self::IntType(v) => ParsedTypeBounds::from_iter([ + ParsedTypeBound::from(v), + ParsedTypeBound::BoolOrIntType(known_items::BoolOrIntType(span)), + ParsedTypeBound::Type(known_items::Type(span)), + ]), + Self::ResetType(v) => ParsedTypeBounds::from_iter([ + ParsedTypeBound::from(v), + ParsedTypeBound::StaticType(known_items::StaticType(span)), + ParsedTypeBound::Type(known_items::Type(span)), + ]), + Self::StaticType(v) => ParsedTypeBounds::from_iter([ + ParsedTypeBound::from(v), + ParsedTypeBound::Type(known_items::Type(span)), + ]), + Self::Type(v) => ParsedTypeBounds::from_iter([ParsedTypeBound::from(v)]), + Self::Unknown(v) => ParsedTypeBounds::from_iter([ParsedTypeBound::Unknown(v)]), + } + } +} + +impl_bounds! { + #[struct = ParsedSizeTypeBounds] + pub(crate) enum ParsedSizeTypeBound { + KnownSize, + Size, + } +} + +impl From for ParsedBound { + fn from(value: ParsedSizeTypeBound) -> Self { + match value { + ParsedSizeTypeBound::KnownSize(v) => ParsedBound::KnownSize(v), + ParsedSizeTypeBound::Size(v) => ParsedBound::Size(v), + } + } +} + +impl From for ParsedBounds { + fn from(value: ParsedSizeTypeBounds) -> Self { + let ParsedSizeTypeBounds { KnownSize, Size } = value; + Self { + BoolOrIntType: None, + BundleType: None, + EnumType: None, + IntType: None, + KnownSize, + ResetType: None, + Size, + StaticType: None, + Type: None, + Unknown: vec![], + } + } +} + +impl ParsedSizeTypeBound { + fn implied_bounds(self) -> ParsedSizeTypeBounds { + let span = self.span(); + match self { + Self::KnownSize(v) => ParsedSizeTypeBounds::from_iter([ + ParsedSizeTypeBound::from(v), + ParsedSizeTypeBound::Size(known_items::Size(span)), + ]), + Self::Size(v) => ParsedSizeTypeBounds::from_iter([ParsedSizeTypeBound::from(v)]), + } + } +} + +#[derive(Clone, Debug)] +pub(crate) enum ParsedBoundsCategory { + Type(ParsedTypeBounds), + SizeType(ParsedSizeTypeBounds), +} + +impl ParsedBounds { + fn categorize(self, errors: &mut Errors, span: Span) -> ParsedBoundsCategory { + let mut type_bounds = None; + let mut size_type_bounds = None; + let mut unknown_bounds = vec![]; + self.into_iter().for_each(|bound| match bound.categorize() { + ParsedBoundCategory::Type(bound) => { + type_bounds + .get_or_insert_with(ParsedTypeBounds::default) + .extend([bound]); + } + ParsedBoundCategory::SizeType(bound) => { + size_type_bounds + .get_or_insert_with(ParsedSizeTypeBounds::default) + .extend([bound]); + } + ParsedBoundCategory::Unknown(bound) => unknown_bounds.push(bound), + }); + match (type_bounds, size_type_bounds, unknown_bounds.is_empty()) { + (None, None, true) => ParsedBoundsCategory::Type(ParsedTypeBounds { + Type: Some(known_items::Type(span)), + ..Default::default() + }), + (None, None, false) => { + errors.error( + unknown_bounds.remove(0), + "unknown bounds: must use at least one known bound (such as `Type`) with any unknown bounds", + ); + ParsedBoundsCategory::Type(ParsedTypeBounds { + Unknown: unknown_bounds, + ..Default::default() + }) + } + (None, Some(bounds), true) => ParsedBoundsCategory::SizeType(bounds), + (None, Some(bounds), false) => { + // TODO: implement + errors.error( + unknown_bounds.remove(0), + "unknown bounds with `Size` bounds are not implemented", + ); + ParsedBoundsCategory::SizeType(bounds) + } + (Some(bounds), None, _) => ParsedBoundsCategory::Type(ParsedTypeBounds { + Unknown: unknown_bounds, + ..bounds + }), + (Some(type_bounds), Some(size_type_bounds), _) => { + errors.error( + size_type_bounds + .Size + .unwrap_or_else(|| known_items::Size(span)), + "conflicting bounds: can't use `Size` bounds with `Type` bounds", + ); + ParsedBoundsCategory::Type(type_bounds) + } + } + } +} + +#[derive(Clone, Debug)] +pub(crate) enum ParsedBoundCategory { + Type(ParsedTypeBound), + SizeType(ParsedSizeTypeBound), + Unknown(syn::TypeParamBound), +} + +impl ParsedBound { + fn categorize(self) -> ParsedBoundCategory { + match self { + Self::BoolOrIntType(v) => ParsedBoundCategory::Type(ParsedTypeBound::BoolOrIntType(v)), + Self::BundleType(v) => ParsedBoundCategory::Type(ParsedTypeBound::BundleType(v)), + Self::EnumType(v) => ParsedBoundCategory::Type(ParsedTypeBound::EnumType(v)), + Self::IntType(v) => ParsedBoundCategory::Type(ParsedTypeBound::IntType(v)), + Self::KnownSize(v) => ParsedBoundCategory::SizeType(ParsedSizeTypeBound::KnownSize(v)), + Self::ResetType(v) => ParsedBoundCategory::Type(ParsedTypeBound::ResetType(v)), + Self::Size(v) => ParsedBoundCategory::SizeType(ParsedSizeTypeBound::Size(v)), + Self::StaticType(v) => ParsedBoundCategory::Type(ParsedTypeBound::StaticType(v)), + Self::Type(v) => ParsedBoundCategory::Type(ParsedTypeBound::Type(v)), + Self::Unknown(v) => ParsedBoundCategory::Unknown(v), + } + } + fn implied_bounds(self) -> ParsedBounds { + match self.categorize() { + ParsedBoundCategory::Type(v) => v.implied_bounds().into(), + ParsedBoundCategory::SizeType(v) => v.implied_bounds().into(), + ParsedBoundCategory::Unknown(v) => ParsedBounds::from_iter([ParsedBound::Unknown(v)]), + } + } +} + +#[derive(Debug, Clone)] +pub(crate) struct ParsedTypeParam { + pub(crate) attrs: Vec, + pub(crate) options: HdlAttr, + pub(crate) ident: Ident, + pub(crate) colon_token: Token![:], + pub(crate) bounds: ParsedTypeBounds, + pub(crate) default: Option<(Token![=], ParsedType)>, +} + +impl ToTokens for ParsedTypeParam { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + attrs, + options, + ident, + colon_token, + bounds, + default, + } = self; + let TypeParamOptions {} = options.body; + for attr in attrs { + attr.to_tokens(tokens); + } + ident.to_tokens(tokens); + colon_token.to_tokens(tokens); + bounds.to_tokens(tokens); + if let Some((eq, ty)) = default { + eq.to_tokens(tokens); + ty.to_tokens(tokens); + } + } +} + +#[derive(Debug, Clone)] +pub(crate) struct ParsedSizeTypeParam { + pub(crate) attrs: Vec, + pub(crate) options: HdlAttr, + pub(crate) ident: Ident, + pub(crate) colon_token: Token![:], + pub(crate) bounds: ParsedSizeTypeBounds, + pub(crate) default: Option<(Token![=], ParsedType)>, +} + +impl ToTokens for ParsedSizeTypeParam { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + attrs, + options, + ident, + colon_token, + bounds, + default, + } = self; + let TypeParamOptions {} = options.body; + for attr in attrs { + attr.to_tokens(tokens); + } + ident.to_tokens(tokens); + colon_token.to_tokens(tokens); + bounds.to_tokens(tokens); + if let Some((eq, ty)) = default { + eq.to_tokens(tokens); + ty.to_tokens(tokens); + } + } +} + +#[derive(Debug, Clone)] +pub(crate) struct ParsedConstParamWhereBounds { + pub(crate) const_usize: known_items::ConstUsize, + pub(crate) lt_token: Token![<], + pub(crate) ident: Ident, + pub(crate) gt_token: Token![>], + pub(crate) colon_token: Token![:], + pub(crate) bounds: ParsedSizeTypeBounds, +} + +#[derive(Debug, Clone)] +pub(crate) struct ParsedConstParam { + pub(crate) attrs: Vec, + pub(crate) options: HdlAttr, + pub(crate) const_token: Token![const], + pub(crate) ident: Ident, + pub(crate) colon_token: Token![:], + pub(crate) ty: ParsedConstGenericType, + pub(crate) bounds: ParsedConstParamWhereBounds, +} + +impl ToTokens for ParsedConstParam { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + attrs, + options, + const_token, + ident, + colon_token, + ty, + bounds: _, + } = self; + let ConstParamOptions {} = options.body; + for attr in attrs { + attr.to_tokens(tokens); + } + const_token.to_tokens(tokens); + ident.to_tokens(tokens); + colon_token.to_tokens(tokens); + ty.to_tokens(tokens); + } +} + +#[derive(Debug, Clone)] +pub(crate) enum ParsedGenericParam { + Type(ParsedTypeParam), + SizeType(ParsedSizeTypeParam), + Const(ParsedConstParam), +} + +impl ToTokens for ParsedGenericParam { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + ParsedGenericParam::Type(v) => v.to_tokens(tokens), + ParsedGenericParam::SizeType(v) => v.to_tokens(tokens), + ParsedGenericParam::Const(v) => v.to_tokens(tokens), + } + } +} + +impl ParsedGenericParam { + pub(crate) fn ident(&self) -> &Ident { + match self { + ParsedGenericParam::Type(v) => &v.ident, + ParsedGenericParam::SizeType(v) => &v.ident, + ParsedGenericParam::Const(v) => &v.ident, + } + } +} + +#[derive(Debug, Clone, Default)] +pub(crate) struct ParsedGenerics { + pub(crate) lt_token: Option, + pub(crate) params: Punctuated, + pub(crate) gt_token: Option]>, + pub(crate) param_name_to_index_map: HashMap, +} + +impl ParsedGenerics { + pub(crate) fn for_static_type(mut self) -> ParsedGenerics { + for param in self.params.iter_mut() { + match param { + ParsedGenericParam::Type(ParsedTypeParam { ident, bounds, .. }) => { + bounds + .StaticType + .get_or_insert_with(|| known_items::StaticType(ident.span())); + } + ParsedGenericParam::SizeType(ParsedSizeTypeParam { ident, bounds, .. }) => { + bounds + .KnownSize + .get_or_insert_with(|| known_items::KnownSize(ident.span())); + } + ParsedGenericParam::Const(ParsedConstParam { ident, bounds, .. }) => { + bounds + .bounds + .KnownSize + .get_or_insert_with(|| known_items::KnownSize(ident.span())); + } + } + } + self + } + pub(crate) fn to_generics(&self, param_count: Option) -> Generics { + let param_count = param_count.unwrap_or(self.params.len()); + if param_count == 0 || self.params.is_empty() { + return Generics::default(); + } + let where_clause = ParsedGenericsWhereClause { + generics: self, + param_count, + } + .to_token_stream(); + let where_clause = if where_clause.is_empty() { + None + } else { + Some(parse_quote! { #where_clause }) + }; + Generics { + lt_token: self.lt_token, + params: self + .params + .pairs() + .take(param_count) + .map_pair_value_ref(|param| parse_quote! { #param }) + .collect(), + gt_token: self.gt_token, + where_clause, + } + } + pub(crate) fn generics_accumulation_type( + &self, + vis: Visibility, + ident: Ident, + param_count: usize, + ) -> ItemStruct { + let span = ident.span(); + ItemStruct { + attrs: vec![ + common_derives(span), + parse_quote_spanned! {span=> + #[allow(non_camel_case_types)] + }, + ], + vis, + struct_token: Token![struct](span), + ident: format_ident!("__{}__GenericsAccumulation{}", ident, param_count), + generics: self.to_generics(Some(param_count)), + fields: if self.params.is_empty() || param_count == 0 { + Fields::Unnamed(parse_quote_spanned! {span=> + (()) + }) + } else { + FieldsUnnamed { + paren_token: Paren(span), + unnamed: self + .params + .pairs() + .take(param_count) + .map_pair_value_ref(|param| match param { + ParsedGenericParam::Type(param) => { + let ident = ¶m.ident; + parse_quote! { #ident } + } + ParsedGenericParam::SizeType(param) => { + let ident = ¶m.ident; + parse_quote_spanned! {span=> + <#ident as ::fayalite::int::Size>::SizeType + } + } + ParsedGenericParam::Const(param) => { + let ident = ¶m.ident; + parse_quote_spanned! {span=> + <::fayalite::util::ConstUsize<#ident> + as ::fayalite::int::Size>::SizeType + } + } + }) + .collect(), + } + .into() + }, + semi_token: Some(Token![;](span)), + } + } + pub(crate) fn make_runtime_generics( + &self, + tokens: &mut TokenStream, + vis: &Visibility, + ident: &Ident, + target: &Path, + mut target_expr: impl FnMut(&MakeHdlTypeExprContext) -> Expr, + ) { + if self.params.is_empty() { + let target_expr = target_expr(&MakeHdlTypeExprContext { + named_param_values: vec![], + is_const: true, + }); + quote_spanned! {ident.span()=> + #[allow(non_upper_case_globals, dead_code)] + #vis const #ident: #target = #target_expr; + } + .to_tokens(tokens); + return; + } + let generics_accumulation_types = + Vec::from_iter((0..self.params.len()).map(|param_count| { + self.generics_accumulation_type(vis.clone(), ident.clone(), param_count) + })); + generics_accumulation_types[0].to_tokens(tokens); + let generics_accumulation_ident_0 = &generics_accumulation_types[0].ident; + quote_spanned! {ident.span()=> + #[allow(non_upper_case_globals, dead_code)] + #vis const #ident: #generics_accumulation_ident_0 = #generics_accumulation_ident_0(()); + } + .to_tokens(tokens); + let mut wrapped_in_const = WrappedInConst::new(tokens, ident.span()); + let tokens = wrapped_in_const.inner(); + for i in &generics_accumulation_types[1..] { + i.to_tokens(tokens) + } + let final_generics = Generics::from(self); + let self_members: Vec = (0..self.params.len()) + .map(|index| { + let mut member = Index::from(index); + member.span = ident.span(); + parse_quote_spanned! {ident.span()=> + self.#member + } + }) + .collect(); + let param_token = Ident::new("__param", ident.span()); + for (param_count, (generics_accumulation_type, next_param)) in generics_accumulation_types + .iter() + .zip(&self.params) + .enumerate() + { + let cur_generics = &generics_accumulation_type.generics; + let cur_target = &generics_accumulation_type.ident; + let next_param_count = param_count + 1; + let is_last = next_param_count == self.params.len(); + let next_target = if is_last { + target.clone() + } else { + generics_accumulation_types[next_param_count] + .ident + .clone() + .into() + }; + let make_next_target_args = |next_arg: Option| { + let generics = if is_last { + &final_generics + } else { + &generics_accumulation_types[next_param_count].generics + }; + let type_generics = generics.split_for_impl().1; + let turbofish = type_generics.as_turbofish(); + let mut retval: AngleBracketedGenericArguments = parse_quote! { #turbofish }; + if let Some(next_arg) = next_arg { + *retval.args.last_mut().unwrap() = next_arg; + } + retval + }; + let (cur_impl_generics, cur_type_generics, cur_where_clause) = + cur_generics.split_for_impl(); + match next_param { + ParsedGenericParam::Type(ParsedTypeParam { + ident: param_ident, + default, + .. + }) => { + let next_generics = if is_last { + &final_generics + } else { + &generics_accumulation_types[next_param_count].generics + }; + let (_next_impl_generics, next_type_generics, _next_where_clause) = + next_generics.split_for_impl(); + let next_turbofish = next_type_generics.as_turbofish(); + let mut param: Expr = parse_quote_spanned! {ident.span()=> + #param_token + }; + let mut generics = next_generics.clone(); + let mut index_type = param_ident.clone(); + if let Some((_, default)) = default { + index_type = format_ident!("__Param", span = ident.span()); + generics.params.push(parse_quote! { #index_type }); + generics.make_where_clause().predicates.push(parse_quote_spanned! {ident.span()=> + #index_type: ::fayalite::ty::TypeOrDefault<#default, Type = #param_ident> + }); + let default_expr = default.make_hdl_type_expr(&MakeHdlTypeExprContext { + named_param_values: self_members[..param_count].to_vec(), + is_const: false, + }); + param = parse_quote_spanned! {ident.span()=> + ::fayalite::ty::TypeOrDefault::get(#param_token, || #default_expr) + }; + let context = MakeHdlTypeExprContext { + named_param_values: self_members[..param_count] + .iter() + .cloned() + .chain([default_expr]) + .collect(), + is_const: false, + }; + let output_expr = { + let next_turbofish = next_type_generics.as_turbofish(); + if is_last { + target_expr(&context) + } else { + let args = &context.named_param_values[..next_param_count]; + parse_quote_spanned! {ident.span()=> + #next_target #next_turbofish(#(#args),*) + } + } + }; + let next_target_args = make_next_target_args(Some(GenericArgument::Type( + default.clone().into(), + ))); + quote_spanned! {ident.span()=> + impl #cur_impl_generics ::fayalite::ty::FillInDefaultedGenerics + for #cur_target #cur_type_generics + #cur_where_clause + { + type Type = #next_target #next_target_args; + fn fill_in_defaulted_generics( + self, + ) -> ::Type { + #output_expr + } + } + } + .to_tokens(tokens); + } + let (impl_generics, _type_generics, where_clause) = generics.split_for_impl(); + let context = MakeHdlTypeExprContext { + named_param_values: self_members[..param_count] + .iter() + .cloned() + .chain([param]) + .collect(), + is_const: false, + }; + let output_expr = if is_last { + target_expr(&context) + } else { + let args = &context.named_param_values[..next_param_count]; + parse_quote_spanned! {ident.span()=> + #next_target #next_turbofish(#(#args),*) + } + }; + quote_spanned! {ident.span()=> + #[allow(non_upper_case_globals)] + #[automatically_derived] + impl #impl_generics ::fayalite::__std::ops::Index<#index_type> + for #cur_target #cur_type_generics + #where_clause + { + type Output = #next_target #next_type_generics; + + fn index(&self, #param_token: #index_type) -> &Self::Output { + ::fayalite::intern::Interned::into_inner( + ::fayalite::intern::Intern::intern_sized(#output_expr), + ) + } + } + } + .to_tokens(tokens); + } + ParsedGenericParam::SizeType(ParsedSizeTypeParam { + ident: param_ident, + bounds, + default, + .. + }) => { + { + let context = MakeHdlTypeExprContext { + named_param_values: self_members[..param_count] + .iter() + .cloned() + .chain([parse_quote_spanned! {ident.span()=> + #param_token + }]) + .collect(), + is_const: false, + }; + let next_target_args = + make_next_target_args(Some(parse_quote_spanned! {ident.span()=> + <#param_ident as ::fayalite::int::SizeType>::Size + })); + let output_expr = if is_last { + target_expr(&context) + } else { + let args = &context.named_param_values[..next_param_count]; + parse_quote_spanned! {ident.span()=> + #next_target #next_target_args(#(#args),*) + } + }; + let mut next_generics = cur_generics.clone(); + next_generics + .params + .push(parse_quote_spanned! {ident.span()=> + #param_ident: ::fayalite::int::SizeType + }); + next_generics.make_where_clause().predicates.push( + parse_quote_spanned! {ident.span()=> + <#param_ident as ::fayalite::int::SizeType>::Size: #bounds + }, + ); + let (next_impl_generics, _next_type_generics, next_where_clause) = + next_generics.split_for_impl(); + quote_spanned! {ident.span()=> + #[allow(non_upper_case_globals)] + #[automatically_derived] + impl #next_impl_generics ::fayalite::__std::ops::Index<#param_ident> + for #cur_target #cur_type_generics + #next_where_clause + { + type Output = #next_target #next_target_args; + + fn index(&self, #param_token: #param_ident) -> &Self::Output { + ::fayalite::intern::Interned::into_inner( + ::fayalite::intern::Intern::intern_sized(#output_expr), + ) + } + } + } + .to_tokens(tokens); + } + if let Some((_, default)) = default { + let _ = default; + todo!(); + } + } + ParsedGenericParam::Const(ParsedConstParam { + attrs: _, + options: _, + const_token, + ident: param_ident, + colon_token, + ty: ParsedConstGenericType::Usize(ty), + bounds: ParsedConstParamWhereBounds { bounds, .. }, + }) => { + let context = MakeHdlTypeExprContext { + named_param_values: self_members[..param_count] + .iter() + .cloned() + .chain([parse_quote_spanned! {ident.span()=> + #param_token + }]) + .collect(), + is_const: false, + }; + let next_target_args = + make_next_target_args(Some(parse_quote! { #param_ident })); + let output_expr = if is_last { + target_expr(&context) + } else { + let args = &context.named_param_values[..next_param_count]; + parse_quote_spanned! {ident.span()=> + #next_target #next_target_args(#(#args),*) + } + }; + let mut next_generics = cur_generics.clone(); + next_generics + .params + .push(parse_quote! { #const_token #param_ident #colon_token #ty }); + next_generics + .params + .push(parse_quote_spanned! {ident.span()=> + __Param: ::fayalite::int::SizeType< + Size = ::fayalite::util::ConstUsize<#param_ident>, + > + }); + next_generics.make_where_clause().predicates.push( + parse_quote_spanned! {ident.span()=> + ::fayalite::util::ConstUsize<#param_ident>: ::fayalite::int::Size + #bounds + }, + ); + let (next_impl_generics, _next_type_generics, next_where_clause) = + next_generics.split_for_impl(); + quote_spanned! {ident.span()=> + #[allow(non_upper_case_globals)] + #[automatically_derived] + impl #next_impl_generics ::fayalite::__std::ops::Index<__Param> + for #cur_target #cur_type_generics + #next_where_clause + { + type Output = #next_target #next_target_args; + + fn index(&self, #param_token: __Param) -> &Self::Output { + ::fayalite::intern::Interned::into_inner( + ::fayalite::intern::Intern::intern_sized(#output_expr), + ) + } + } + } + .to_tokens(tokens); + } + }; + } + } + pub(crate) fn parse<'a>(generics: &'a mut Generics) -> syn::Result { + let Generics { + lt_token, + params: input_params, + gt_token, + where_clause, + } = generics; + let mut errors = Errors::new(); + let mut predicates: Vec = Vec::with_capacity(input_params.len()); + struct LateParsedParam<'a> { + default: Option<(Token![=], &'a mut Type)>, + const_param_type: Option<&'a mut Type>, + } + let mut late_parsed_params: Vec> = + Vec::with_capacity(input_params.len()); + let mut unparsed_params: Punctuated = Punctuated::new(); + for input_param in input_params.pairs_mut() { + let (input_param, punct) = input_param.into_tuple(); + let (unparsed_param, late_parsed_param) = match input_param { + GenericParam::Lifetime(param) => { + errors.unwrap_or_default( + HdlAttr::::parse_and_take_attr( + &mut param.attrs, + ), + ); + errors.error(param, "lifetime generics are not supported by #[hdl]"); + continue; + } + GenericParam::Type(TypeParam { + attrs, + ident, + colon_token, + bounds, + eq_token, + default, + }) => { + let span = ident.span(); + let options = errors + .unwrap_or_default( + HdlAttr::::parse_and_take_attr(attrs), + ) + .unwrap_or_default(); + let colon_token = colon_token.unwrap_or_else(|| Token![:](span)); + if !bounds.is_empty() { + predicates.push(WherePredicate::Type(PredicateType { + lifetimes: None, + bounded_ty: parse_quote! { #ident }, + colon_token, + bounds: bounds.clone(), + })); + } + ( + UnparsedGenericParam::Type { + attrs: attrs.clone(), + options, + ident: ident.clone(), + colon_token, + bounds: ParsedBounds::default(), + mask_type_bounds: ParsedTypeBounds::default(), + }, + LateParsedParam { + default: default + .as_mut() + .map(|v| (eq_token.unwrap_or_else(|| Token![=](span)), v)), + const_param_type: None, + }, + ) + } + GenericParam::Const(ConstParam { + attrs, + const_token, + ident, + colon_token, + ty, + eq_token, + default, + }) => { + let options = errors + .unwrap_or_default( + HdlAttr::::parse_and_take_attr(attrs), + ) + .unwrap_or_default(); + if let Some(default) = default { + let _ = eq_token; + errors.error( + default, + "const generics' default values are not yet implemented", + ); + } + ( + UnparsedGenericParam::Const { + attrs: attrs.clone(), + options, + const_token: *const_token, + ident: ident.clone(), + colon_token: *colon_token, + ty: ParsedConstGenericType::Usize(known_items::usize(ident.span())), + bounds: None, + }, + LateParsedParam { + default: None, + const_param_type: Some(ty), + }, + ) + } + }; + late_parsed_params.push(late_parsed_param); + unparsed_params.extend([Pair::new(unparsed_param, punct.cloned())]); + } + let param_name_to_index_map: HashMap = unparsed_params + .iter() + .enumerate() + .map(|(index, param)| { + let (UnparsedGenericParam::Type { ident, .. } + | UnparsedGenericParam::Const { ident, .. }) = param; + (ident.clone(), index) + }) + .collect(); + if let Some(where_clause) = where_clause { + predicates.extend(where_clause.predicates.iter().cloned()); + } + for predicate in predicates { + let WherePredicate::Type(PredicateType { + lifetimes: None, + bounded_ty: + Type::Path(TypePath { + qself, + path: bounded_ty, + }), + colon_token, + bounds: unparsed_bounds, + }) = predicate + else { + errors.error(predicate, "unsupported where predicate kind"); + continue; + }; + if let Some(qself) = &qself { + if let QSelf { + lt_token: _, + ty: base_ty, + position: 3, + as_token: Some(_), + gt_token: _, + } = qself + { + if bounded_ty.segments.len() == 4 && unparsed_bounds.len() == 1 { + if let ( + Ok(_), + Type::Path(TypePath { + qself: None, + path: base_ty, + }), + ) = ( + known_items::MaskType::parse_path(bounded_ty.clone()), + &**base_ty, + ) { + let Some(&index) = base_ty + .get_ident() + .and_then(|base_ty| param_name_to_index_map.get(base_ty)) + else { + errors.error( + TypePath { + qself: Some(qself.clone()), + path: bounded_ty, + }, + "unsupported where predicate kind", + ); + continue; + }; + let parsed_bounds = match &mut unparsed_params[index] { + UnparsedGenericParam::Type { + mask_type_bounds, .. + } => mask_type_bounds, + UnparsedGenericParam::Const { ident, .. } => { + errors.error( + bounded_ty, + format_args!( + "expected type, found const parameter `{ident}`" + ), + ); + continue; + } + }; + parsed_bounds.extend(errors.ok(syn::parse2::( + unparsed_bounds.to_token_stream(), + ))); + continue; + } + } + } + errors.error( + TypePath { + qself: Some(qself.clone()), + path: bounded_ty, + }, + "unsupported where predicate kind", + ); + continue; + } + if let Ok(( + const_usize, + PathArguments::AngleBracketed(AngleBracketedGenericArguments { + colon2_token: _, + lt_token, + args, + gt_token, + }), + )) = known_items::ConstUsize::parse_path_with_arguments(bounded_ty.clone()) + { + if args.len() != 1 { + errors.error(const_usize, "ConstUsize must have one argument"); + continue; + } + let GenericArgument::Type(Type::Path(TypePath { + qself: None, + path: arg, + })) = &args[0] + else { + errors.error( + const_usize, + "the only supported ConstUsize argument is a const generic parameter", + ); + continue; + }; + let arg = arg.get_ident(); + let Some((arg, &index)) = + arg.and_then(|arg| Some((arg, param_name_to_index_map.get(arg)?))) + else { + errors.error( + const_usize, + "the only supported ConstUsize argument is a const generic parameter", + ); + continue; + }; + let parsed_bounds = match &mut unparsed_params[index] { + UnparsedGenericParam::Const { bounds, .. } => bounds, + UnparsedGenericParam::Type { ident, .. } => { + errors.error( + bounded_ty, + format_args!("expected const generic parameter, found type `{ident}`"), + ); + continue; + } + }; + parsed_bounds + .get_or_insert_with(|| ParsedConstParamWhereBounds { + const_usize, + lt_token, + ident: arg.clone(), + gt_token, + colon_token, + bounds: ParsedSizeTypeBounds::default(), + }) + .bounds + .extend(errors.ok(syn::parse2::( + unparsed_bounds.to_token_stream(), + ))); + continue; + } + let Some(&index) = bounded_ty + .get_ident() + .and_then(|bounded_ty| param_name_to_index_map.get(bounded_ty)) + else { + errors.error( + bounded_ty, + "where predicate bounded type must be one of the generic type \ + parameters or `ConstUsize`", + ); + continue; + }; + let parsed_bounds = match &mut unparsed_params[index] { + UnparsedGenericParam::Type { bounds, .. } => bounds, + UnparsedGenericParam::Const { ident, .. } => { + errors.error( + bounded_ty, + format_args!("expected type, found const parameter `{ident}`"), + ); + continue; + } + }; + parsed_bounds.extend(errors.ok(syn::parse2::( + unparsed_bounds.to_token_stream(), + ))); + } + let params = + Punctuated::from_iter(unparsed_params.into_pairs().map_pair_value( + |param| match param { + UnparsedGenericParam::Type { + attrs, + options, + ident, + colon_token, + mut bounds, + mask_type_bounds, + } => { + for bound in mask_type_bounds { + bounds + .Type + .get_or_insert_with(|| known_items::Type(bound.span())); + match bound { + ParsedTypeBound::BoolOrIntType(_) + | ParsedTypeBound::BundleType(_) + | ParsedTypeBound::EnumType(_) + | ParsedTypeBound::IntType(_) + | ParsedTypeBound::ResetType(_) => { + errors.error(bound, "bounds on mask types are not implemented"); + } + ParsedTypeBound::StaticType(bound) => { + if bounds.StaticType.is_none() { + errors.error( + bound, + "StaticType bound on mask type without corresponding \ + StaticType bound on original type is not implemented", + ); + } + } + ParsedTypeBound::Type(_) => {} + ParsedTypeBound::Unknown(_) => { + errors.error( + bound, + "unknown bounds on mask types are not implemented", + ); + } + } + } + bounds.add_implied_bounds(); + match bounds.categorize(&mut errors, ident.span()) { + ParsedBoundsCategory::Type(bounds) => { + ParsedGenericParam::Type(ParsedTypeParam { + attrs, + options, + ident, + colon_token, + bounds, + default: None, + }) + } + ParsedBoundsCategory::SizeType(bounds) => { + ParsedGenericParam::SizeType(ParsedSizeTypeParam { + attrs, + options, + ident, + colon_token, + bounds, + default: None, + }) + } + } + } + UnparsedGenericParam::Const { + attrs, + options, + const_token, + ident, + colon_token, + ty, + bounds, + } => { + let span = ident.span(); + let mut bounds = bounds.unwrap_or_else(|| ParsedConstParamWhereBounds { + const_usize: known_items::ConstUsize(span), + lt_token: Token![<](span), + ident: ident.clone(), + gt_token: Token![>](span), + colon_token: Token![:](span), + bounds: ParsedSizeTypeBounds { + KnownSize: None, + Size: Some(known_items::Size(span)), + }, + }); + bounds + .bounds + .Size + .get_or_insert_with(|| known_items::Size(span)); + bounds.bounds.add_implied_bounds(); + ParsedGenericParam::Const(ParsedConstParam { + bounds, + attrs, + options, + const_token, + ident, + colon_token, + ty, + }) + } + }, + )); + let mut retval = Self { + lt_token: *lt_token, + params, + gt_token: *gt_token, + param_name_to_index_map, + }; + for ( + cur_param_index, + LateParsedParam { + default, + const_param_type, + }, + ) in late_parsed_params.into_iter().enumerate() + { + let mut parser = TypesParser { + generics: &retval, + cur_param_index: Some(cur_param_index), + errors: &mut errors, + }; + let parsed_default = default.and_then(|(eq, ty)| { + let ty = parser.parse(ty).ok()?; + Some((eq, ty)) + }); + let parsed_const_param_type = const_param_type.and_then(|ty| parser.parse(ty).ok()); + match &mut retval.params[cur_param_index] { + ParsedGenericParam::Type(ParsedTypeParam { default, .. }) + | ParsedGenericParam::SizeType(ParsedSizeTypeParam { default, .. }) => { + *default = parsed_default; + } + ParsedGenericParam::Const(ParsedConstParam { + attrs: _, + options: _, + const_token: _, + ident: _, + colon_token: _, + ty, + bounds: _, + }) => { + if let Some(parsed_const_param_type) = parsed_const_param_type { + *ty = parsed_const_param_type; + } + } + } + } + errors.finish()?; + Ok(retval) + } +} + +impl ToTokens for ParsedGenerics { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + lt_token, + params, + gt_token, + param_name_to_index_map: _, + } = self; + if params.is_empty() { + return; + } + lt_token.unwrap_or_default().to_tokens(tokens); + params.to_tokens(tokens); + gt_token.unwrap_or_default().to_tokens(tokens); + } +} + +#[derive(Debug, Clone)] +pub(crate) struct ParsedGenericsImplGenerics<'a> { + generics: &'a ParsedGenerics, + param_count: usize, +} + +impl ToTokens for ParsedGenericsImplGenerics<'_> { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + generics, + param_count, + } = *self; + let ParsedGenerics { + lt_token, + params, + gt_token, + param_name_to_index_map: _, + } = generics; + if params.is_empty() || param_count == 0 { + return; + } + lt_token.unwrap_or_default().to_tokens(tokens); + for param in params.pairs().take(param_count) { + let (param, punct) = param.into_tuple(); + match param { + ParsedGenericParam::Type(ParsedTypeParam { + attrs: _, + options, + ident, + colon_token, + bounds, + default: _, + }) => { + let TypeParamOptions {} = options.body; + ident.to_tokens(tokens); + colon_token.to_tokens(tokens); + bounds.to_tokens(tokens); + } + ParsedGenericParam::SizeType(ParsedSizeTypeParam { + attrs: _, + options, + ident, + colon_token, + bounds, + default: _, + }) => { + let TypeParamOptions {} = options.body; + ident.to_tokens(tokens); + colon_token.to_tokens(tokens); + bounds.to_tokens(tokens); + } + ParsedGenericParam::Const(ParsedConstParam { + attrs: _, + options, + const_token, + ident, + colon_token, + ty, + bounds: _, + }) => { + let ConstParamOptions {} = options.body; + const_token.to_tokens(tokens); + ident.to_tokens(tokens); + colon_token.to_tokens(tokens); + ty.to_tokens(tokens); + } + } + punct.to_tokens(tokens); + } + gt_token.unwrap_or_default().to_tokens(tokens); + } +} + +#[derive(Debug, Clone)] +pub(crate) struct ParsedGenericsTurbofish<'a> { + generics: &'a ParsedGenerics, + param_count: usize, +} + +impl ToTokens for ParsedGenericsTurbofish<'_> { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + generics, + param_count, + } = *self; + if generics.params.is_empty() || param_count == 0 { + return; + } + Token![::](generics.lt_token.unwrap_or_default().span).to_tokens(tokens); + ParsedGenericsTypeGenerics { + generics, + param_count, + } + .to_tokens(tokens); + } +} + +#[derive(Debug, Clone)] +pub(crate) struct ParsedGenericsTypeGenerics<'a> { + generics: &'a ParsedGenerics, + param_count: usize, +} + +impl ToTokens for ParsedGenericsTypeGenerics<'_> { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + generics, + param_count, + } = *self; + let ParsedGenerics { + lt_token, + params, + gt_token, + param_name_to_index_map: _, + } = generics; + if params.is_empty() || param_count == 0 { + return; + } + lt_token.unwrap_or_default().to_tokens(tokens); + for param in params.pairs().take(param_count) { + let (param, punct) = param.into_tuple(); + param.ident().to_tokens(tokens); + punct.to_tokens(tokens); + } + gt_token.unwrap_or_default().to_tokens(tokens); + } +} + +#[derive(Debug, Clone)] +#[allow(dead_code)] +pub(crate) struct ParsedGenericsWhereClause<'a> { + generics: &'a ParsedGenerics, + param_count: usize, +} + +impl ToTokens for ParsedGenericsWhereClause<'_> { + fn to_tokens(&self, tokens: &mut TokenStream) { + let mut need_where_token = true; + let mut where_token = |span: Span| -> Option { + mem::replace(&mut need_where_token, false).then(|| Token![where](span)) + }; + for param in self.generics.params.pairs().take(self.param_count) { + let (param, comma_token) = param.into_tuple(); + match param { + ParsedGenericParam::Type(ParsedTypeParam { + ident, + colon_token, + bounds, + .. + }) => { + if let Some(static_type) = &bounds.StaticType { + where_token(static_type.span).to_tokens(tokens); + quote_spanned! {static_type.span=> + <#ident as ::fayalite::ty::Type>::MaskType #colon_token #static_type #comma_token + } + .to_tokens(tokens); + } + } + ParsedGenericParam::SizeType(_) => {} + ParsedGenericParam::Const(ParsedConstParam { + ident: _, + bounds: + ParsedConstParamWhereBounds { + const_usize, + lt_token, + ident, + gt_token, + colon_token, + bounds, + }, + .. + }) => { + where_token(ident.span()).to_tokens(tokens); + const_usize.to_tokens(tokens); + lt_token.to_tokens(tokens); + ident.to_tokens(tokens); + gt_token.to_tokens(tokens); + colon_token.to_tokens(tokens); + bounds.to_tokens(tokens); + comma_token.to_tokens(tokens); + } + } + } + } +} + +#[allow(dead_code)] +pub(crate) trait AsTurbofish { + type Turbofish<'a>: 'a + ToTokens + Clone + fmt::Debug + where + Self: 'a; + fn as_turbofish(&self) -> Self::Turbofish<'_>; +} + +impl AsTurbofish for TypeGenerics<'_> { + type Turbofish<'a> + = Turbofish<'a> + where + Self: 'a; + + fn as_turbofish(&self) -> Self::Turbofish<'_> { + TypeGenerics::as_turbofish(self) + } +} + +impl AsTurbofish for ParsedGenericsTypeGenerics<'_> { + type Turbofish<'a> + = ParsedGenericsTurbofish<'a> + where + Self: 'a; + + fn as_turbofish(&self) -> Self::Turbofish<'_> { + let Self { + generics, + param_count, + } = *self; + ParsedGenericsTurbofish { + generics, + param_count, + } + } +} + +pub(crate) trait SplitForImpl { + type ImplGenerics<'a>: 'a + ToTokens + Clone + fmt::Debug + where + Self: 'a; + type TypeGenerics<'a>: 'a + AsTurbofish + ToTokens + Clone + fmt::Debug + where + Self: 'a; + type WhereClause<'a>: 'a + ToTokens + Clone + fmt::Debug + where + Self: 'a; + fn split_for_impl( + &self, + ) -> ( + Self::ImplGenerics<'_>, + Self::TypeGenerics<'_>, + Self::WhereClause<'_>, + ); +} + +impl SplitForImpl for Generics { + type ImplGenerics<'a> = ImplGenerics<'a>; + type TypeGenerics<'a> = TypeGenerics<'a>; + type WhereClause<'a> = Option<&'a WhereClause>; + fn split_for_impl( + &self, + ) -> ( + Self::ImplGenerics<'_>, + Self::TypeGenerics<'_>, + Self::WhereClause<'_>, + ) { + Generics::split_for_impl(self) + } +} + +impl SplitForImpl for ParsedGenerics { + type ImplGenerics<'a> + = ParsedGenericsImplGenerics<'a> + where + Self: 'a; + + type TypeGenerics<'a> + = ParsedGenericsTypeGenerics<'a> + where + Self: 'a; + + type WhereClause<'a> + = ParsedGenericsWhereClause<'a> + where + Self: 'a; + + fn split_for_impl( + &self, + ) -> ( + Self::ImplGenerics<'_>, + Self::TypeGenerics<'_>, + Self::WhereClause<'_>, + ) { + ( + ParsedGenericsImplGenerics { + generics: self, + param_count: self.params.len(), + }, + ParsedGenericsTypeGenerics { + generics: self, + param_count: self.params.len(), + }, + ParsedGenericsWhereClause { + generics: self, + param_count: self.params.len(), + }, + ) + } +} + +impl From<&'_ ParsedGenerics> for Generics { + fn from(value: &ParsedGenerics) -> Self { + value.to_generics(None) + } +} + +impl From for Generics { + fn from(value: ParsedGenerics) -> Self { + From::<&ParsedGenerics>::from(&value) + } +} + +#[derive(Debug, Copy, Clone)] +pub(crate) enum MaybeParsed { + Unrecognized(U), + Parsed(P), +} + +impl MaybeParsed { + pub(crate) fn as_ref(&self) -> MaybeParsed<&P, &U> { + match self { + MaybeParsed::Unrecognized(v) => MaybeParsed::Unrecognized(v), + MaybeParsed::Parsed(v) => MaybeParsed::Parsed(v), + } + } + #[allow(dead_code)] + pub(crate) fn as_mut(&mut self) -> MaybeParsed<&mut P, &mut U> { + match self { + MaybeParsed::Unrecognized(v) => MaybeParsed::Unrecognized(v), + MaybeParsed::Parsed(v) => MaybeParsed::Parsed(v), + } + } + pub(crate) fn map PR, UF: FnOnce(U) -> UR, PR, UR>( + self, + pf: PF, + uf: UF, + ) -> MaybeParsed { + match self { + MaybeParsed::Unrecognized(v) => MaybeParsed::Unrecognized(uf(v)), + MaybeParsed::Parsed(v) => MaybeParsed::Parsed(pf(v)), + } + } +} + +#[derive(Debug, Clone)] +pub(crate) struct MaybeParsedIter(pub(crate) MaybeParsed); + +impl Iterator for MaybeParsedIter { + type Item = MaybeParsed; + + fn next(&mut self) -> Option { + match &mut self.0 { + MaybeParsed::Unrecognized(i) => i.next().map(MaybeParsed::Unrecognized), + MaybeParsed::Parsed(i) => i.next().map(MaybeParsed::Parsed), + } + } + + fn size_hint(&self) -> (usize, Option) { + match &self.0 { + MaybeParsed::Unrecognized(i) => i.size_hint(), + MaybeParsed::Parsed(i) => i.size_hint(), + } + } +} + +impl ExactSizeIterator for MaybeParsedIter {} + +impl DoubleEndedIterator for MaybeParsedIter { + fn next_back(&mut self) -> Option { + match &mut self.0 { + MaybeParsed::Unrecognized(i) => i.next_back().map(MaybeParsed::Unrecognized), + MaybeParsed::Parsed(i) => i.next_back().map(MaybeParsed::Parsed), + } + } +} + +impl IntoIterator for MaybeParsed { + type Item = MaybeParsed; + type IntoIter = MaybeParsedIter; + + fn into_iter(self) -> Self::IntoIter { + MaybeParsedIter(match self { + MaybeParsed::Unrecognized(v) => MaybeParsed::Unrecognized(v.into_iter()), + MaybeParsed::Parsed(v) => MaybeParsed::Parsed(v.into_iter()), + }) + } +} + +impl MaybeParsed { + #[allow(dead_code)] + pub(crate) fn iter<'a>( + &'a self, + ) -> MaybeParsedIter<<&'a P as IntoIterator>::IntoIter, <&'a U as IntoIterator>::IntoIter> + where + &'a P: IntoIterator, + &'a U: IntoIterator, + { + self.into_iter() + } + #[allow(dead_code)] + pub(crate) fn iter_mut<'a>( + &'a mut self, + ) -> MaybeParsedIter<<&'a mut P as IntoIterator>::IntoIter, <&'a mut U as IntoIterator>::IntoIter> + where + &'a mut P: IntoIterator, + &'a mut U: IntoIterator, + { + self.into_iter() + } +} + +impl<'a, P, U> IntoIterator for &'a MaybeParsed +where + &'a P: IntoIterator, + &'a U: IntoIterator, +{ + type Item = MaybeParsed<<&'a P as IntoIterator>::Item, <&'a U as IntoIterator>::Item>; + type IntoIter = + MaybeParsedIter<<&'a P as IntoIterator>::IntoIter, <&'a U as IntoIterator>::IntoIter>; + + fn into_iter(self) -> Self::IntoIter { + MaybeParsedIter(match self { + MaybeParsed::Unrecognized(v) => MaybeParsed::Unrecognized(v.into_iter()), + MaybeParsed::Parsed(v) => MaybeParsed::Parsed(v.into_iter()), + }) + } +} + +impl<'a, P, U> IntoIterator for &'a mut MaybeParsed +where + &'a mut P: IntoIterator, + &'a mut U: IntoIterator, +{ + type Item = MaybeParsed<<&'a mut P as IntoIterator>::Item, <&'a mut U as IntoIterator>::Item>; + type IntoIter = MaybeParsedIter< + <&'a mut P as IntoIterator>::IntoIter, + <&'a mut U as IntoIterator>::IntoIter, + >; + + fn into_iter(self) -> Self::IntoIter { + MaybeParsedIter(match self { + MaybeParsed::Unrecognized(v) => MaybeParsed::Unrecognized(v.into_iter()), + MaybeParsed::Parsed(v) => MaybeParsed::Parsed(v.into_iter()), + }) + } +} + +impl From> for Generics { + fn from(value: MaybeParsed) -> Self { + match value { + MaybeParsed::Unrecognized(value) => value, + MaybeParsed::Parsed(value) => value.into(), + } + } +} + +impl From<&'_ MaybeParsed> for Generics { + fn from(value: &MaybeParsed) -> Self { + match value { + MaybeParsed::Unrecognized(value) => value.clone(), + MaybeParsed::Parsed(value) => value.into(), + } + } +} + +impl ToTokens for MaybeParsed { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + MaybeParsed::Unrecognized(v) => v.to_tokens(tokens), + MaybeParsed::Parsed(v) => v.to_tokens(tokens), + } + } + + fn to_token_stream(&self) -> TokenStream { + match self { + MaybeParsed::Unrecognized(v) => v.to_token_stream(), + MaybeParsed::Parsed(v) => v.to_token_stream(), + } + } + + fn into_token_stream(self) -> TokenStream { + match self { + MaybeParsed::Unrecognized(v) => v.into_token_stream(), + MaybeParsed::Parsed(v) => v.into_token_stream(), + } + } +} + +impl AsTurbofish for MaybeParsed { + type Turbofish<'a> + = MaybeParsed, U::Turbofish<'a>> + where + Self: 'a; + + fn as_turbofish(&self) -> Self::Turbofish<'_> { + match self { + MaybeParsed::Unrecognized(v) => MaybeParsed::Unrecognized(v.as_turbofish()), + MaybeParsed::Parsed(v) => MaybeParsed::Parsed(v.as_turbofish()), + } + } +} + +impl SplitForImpl for MaybeParsed { + type ImplGenerics<'a> + = MaybeParsed, U::ImplGenerics<'a>> + where + Self: 'a; + type TypeGenerics<'a> + = MaybeParsed, U::TypeGenerics<'a>> + where + Self: 'a; + type WhereClause<'a> + = MaybeParsed, U::WhereClause<'a>> + where + Self: 'a; + + fn split_for_impl( + &self, + ) -> ( + Self::ImplGenerics<'_>, + Self::TypeGenerics<'_>, + Self::WhereClause<'_>, + ) { + match self { + MaybeParsed::Unrecognized(v) => { + let (i, t, w) = v.split_for_impl(); + ( + MaybeParsed::Unrecognized(i), + MaybeParsed::Unrecognized(t), + MaybeParsed::Unrecognized(w), + ) + } + MaybeParsed::Parsed(v) => { + let (i, t, w) = v.split_for_impl(); + ( + MaybeParsed::Parsed(i), + MaybeParsed::Parsed(t), + MaybeParsed::Parsed(w), + ) + } + } + } +} + +#[derive(Debug, Clone)] +pub(crate) struct ParsedField { + pub(crate) attrs: Vec, + pub(crate) vis: Visibility, + pub(crate) ident: Option, + pub(crate) colon_token: Option, + pub(crate) ty: ParsedType, +} + +impl ParseTypes for ParsedField { + fn parse_types(input: &mut Field, parser: &mut TypesParser<'_>) -> Result { + let Field { + attrs, + vis, + mutability, + ident, + colon_token, + ty, + } = input; + if !matches!(mutability, FieldMutability::None) { + // FIXME: use mutability as the spanned tokens, + // blocked on https://github.com/dtolnay/syn/issues/1717 + parser + .errors() + .error(&ident, "field mutability is not supported"); + *mutability = FieldMutability::None; + } + *mutability = FieldMutability::None; + Ok(Self { + attrs: attrs.clone(), + vis: vis.clone(), + ident: ident.clone(), + colon_token: *colon_token, + ty: parser.parse(ty)?, + }) + } +} + +impl From for Field { + fn from(value: ParsedField) -> Self { + Self { + attrs: value.attrs, + vis: value.vis, + mutability: FieldMutability::None, + ident: value.ident, + colon_token: value.colon_token, + ty: value.ty.into(), + } + } +} + +impl<'a> MaybeParsed<&'a ParsedField, &'a Field> { + pub(crate) fn ident(self) -> &'a Option { + match self { + MaybeParsed::Unrecognized(v) => &v.ident, + MaybeParsed::Parsed(v) => &v.ident, + } + } + pub(crate) fn ty(self) -> MaybeParsed<&'a ParsedType, &'a Type> { + match self { + MaybeParsed::Unrecognized(v) => MaybeParsed::Unrecognized(&v.ty), + MaybeParsed::Parsed(v) => MaybeParsed::Parsed(&v.ty), + } + } +} + +#[derive(Debug, Clone)] +pub(crate) struct ParsedFieldsNamed { + pub(crate) brace_token: Brace, + pub(crate) named: Punctuated, +} + +impl ParseTypes for ParsedFieldsNamed { + fn parse_types( + input: &mut FieldsNamed, + parser: &mut TypesParser<'_>, + ) -> Result { + let FieldsNamed { brace_token, named } = input; + Ok(Self { + brace_token: *brace_token, + named: parser.parse(named)?, + }) + } +} + +impl From for FieldsNamed { + fn from(value: ParsedFieldsNamed) -> Self { + Self { + brace_token: value.brace_token, + named: value + .named + .into_pairs() + .map_pair_value(Into::into) + .collect(), + } + } +} + +impl MaybeParsed { + pub(crate) fn named( + &self, + ) -> MaybeParsed<&Punctuated, &Punctuated> { + self.as_ref().map(|v| &v.named, |v| &v.named) + } +} + +impl From> for FieldsNamed { + fn from(value: MaybeParsed) -> Self { + match value { + MaybeParsed::Unrecognized(value) => value, + MaybeParsed::Parsed(value) => value.into(), + } + } +} + +#[derive(Debug)] +pub(crate) struct MakeHdlTypeExprContext { + pub(crate) named_param_values: Vec, + pub(crate) is_const: bool, +} + +pub(crate) trait MakeHdlTypeExpr { + fn make_hdl_type_expr(&self, context: &MakeHdlTypeExprContext) -> Expr; +} + +impl MakeHdlTypeExpr for ParsedType { + fn make_hdl_type_expr(&self, context: &MakeHdlTypeExprContext) -> Expr { + match self { + Self::Delimited(v) => v.make_hdl_type_expr(context), + Self::Named(v) => v.make_hdl_type_expr(context), + Self::NamedParam(v) => v.make_hdl_type_expr(context), + Self::Tuple(v) => v.make_hdl_type_expr(context), + Self::ConstUsize(v) => v.make_hdl_type_expr(context), + Self::PhantomData(v) => v.make_hdl_type_expr(context), + Self::Array(v) => v.make_hdl_type_expr(context), + Self::UInt(v) => v.make_hdl_type_expr(context), + Self::SInt(v) => v.make_hdl_type_expr(context), + Self::CanonicalType(v) => v.make_hdl_type_expr(context), + Self::DynSize(v) => v.make_hdl_type_expr(context), + } + } +} + +impl MakeHdlTypeExpr for known_items::CanonicalType { + fn make_hdl_type_expr(&self, _context: &MakeHdlTypeExprContext) -> Expr { + Expr::Path(ExprPath { + attrs: vec![], + qself: None, + path: self.path.clone(), + }) + } +} + +impl MakeHdlTypeExpr for known_items::DynSize { + fn make_hdl_type_expr(&self, _context: &MakeHdlTypeExprContext) -> Expr { + Expr::Path(ExprPath { + attrs: vec![], + qself: None, + path: self.path.clone(), + }) + } +} + +impl MakeHdlTypeExpr for ParsedExprDelimited { + fn make_hdl_type_expr(&self, context: &MakeHdlTypeExprContext) -> Expr { + self.expr.make_hdl_type_expr(context) + } +} + +impl MakeHdlTypeExpr for ParsedExprNamedParamConst { + fn make_hdl_type_expr(&self, context: &MakeHdlTypeExprContext) -> Expr { + context.named_param_values[self.param_index].clone() + } +} + +impl MakeHdlTypeExpr for ParsedExpr { + fn make_hdl_type_expr(&self, context: &MakeHdlTypeExprContext) -> Expr { + match self { + ParsedExpr::Delimited(expr) => expr.make_hdl_type_expr(context), + ParsedExpr::NamedParamConst(expr) => expr.make_hdl_type_expr(context), + ParsedExpr::Other(expr) => { + let span = expr.span(); + let const_usize = known_items::ConstUsize(span); + parse_quote_spanned! {expr.span()=> + #const_usize::<{ #expr }> + } + } + } + } +} + +impl MakeHdlTypeExpr for ParsedTypeDelimited { + fn make_hdl_type_expr(&self, context: &MakeHdlTypeExprContext) -> Expr { + self.elem.make_hdl_type_expr(context) + } +} + +impl MakeHdlTypeExpr for ParsedTypeNamed { + fn make_hdl_type_expr(&self, context: &MakeHdlTypeExprContext) -> Expr { + let Self { path, args } = self; + let span = path + .segments + .last() + .map(|v| v.ident.span()) + .unwrap_or_else(Span::call_site); + if context.is_const { + return parse_quote_spanned! {span=> + ::fayalite::ty::StaticType::TYPE + }; + } + let mut retval = Expr::Path(ExprPath { + attrs: vec![], + qself: None, + path: path.clone(), + }); + if let Some(ParsedGenericArguments { lt_token, args, .. }) = args { + for arg in args { + retval = Expr::Index(ExprIndex { + attrs: vec![], + expr: Box::new(retval), + bracket_token: Bracket(lt_token.span), + index: Box::new(arg.make_hdl_type_expr(context)), + }); + } + } + Expr::Call(syn::ExprCall { + attrs: vec![], + func: parse_quote_spanned! {span=> + ::fayalite::ty::FillInDefaultedGenerics::fill_in_defaulted_generics + }, + paren_token: Paren(span), + args: Punctuated::from_iter([retval]), + }) + } +} + +impl MakeHdlTypeExpr for ParsedGenericArgument { + fn make_hdl_type_expr(&self, context: &MakeHdlTypeExprContext) -> Expr { + match self { + ParsedGenericArgument::Type(v) => v.make_hdl_type_expr(context), + ParsedGenericArgument::Const(v) => v.make_hdl_type_expr(context), + } + } +} + +impl MakeHdlTypeExpr for ParsedTypeNamedParam { + fn make_hdl_type_expr(&self, context: &MakeHdlTypeExprContext) -> Expr { + match self { + Self::Type(v) => v.make_hdl_type_expr(context), + Self::SizeType(v) => v.make_hdl_type_expr(context), + } + } +} + +impl MakeHdlTypeExpr for ParsedTypeNamedParamType { + fn make_hdl_type_expr(&self, context: &MakeHdlTypeExprContext) -> Expr { + context.named_param_values[self.param_index].clone() + } +} + +impl MakeHdlTypeExpr for ParsedTypeNamedParamSizeType { + fn make_hdl_type_expr(&self, context: &MakeHdlTypeExprContext) -> Expr { + context.named_param_values[self.param_index].clone() + } +} + +impl MakeHdlTypeExpr for ParsedTypeTuple { + fn make_hdl_type_expr(&self, context: &MakeHdlTypeExprContext) -> Expr { + let Self { paren_token, elems } = self; + Expr::Tuple(ExprTuple { + attrs: vec![], + paren_token: *paren_token, + elems: elems + .pairs() + .map_pair_value_ref(|v| v.make_hdl_type_expr(context)) + .collect(), + }) + } +} diff --git a/crates/fayalite-proc-macros-impl/src/lib.rs b/crates/fayalite-proc-macros-impl/src/lib.rs index 920a27b..13336fa 100644 --- a/crates/fayalite-proc-macros-impl/src/lib.rs +++ b/crates/fayalite-proc-macros-impl/src/lib.rs @@ -2,26 +2,46 @@ // See Notices.txt for copyright information #![cfg_attr(test, recursion_limit = "512")] use proc_macro2::{Span, TokenStream}; -use quote::{quote, ToTokens}; -use std::io::{ErrorKind, Write}; +use quote::{ToTokens, quote}; +use std::{ + collections::{HashMap, hash_map::Entry}, + io::{ErrorKind, Write}, +}; use syn::{ - bracketed, parenthesized, + AttrStyle, Attribute, Error, Ident, Item, ItemFn, LitBool, LitStr, Meta, Token, bracketed, + ext::IdentExt, + parenthesized, parse::{Parse, ParseStream, Parser}, parse_quote, - punctuated::Pair, - AttrStyle, Attribute, Error, Item, Token, + punctuated::{Pair, Punctuated}, + spanned::Spanned, + token::{Bracket, Paren}, }; mod fold; +mod hdl_bundle; +mod hdl_enum; +mod hdl_type_alias; +mod hdl_type_common; mod module; -mod value_derive_common; -mod value_derive_enum; -mod value_derive_struct; +mod process_cfg; + +pub(crate) trait CustomToken: + Copy + + Spanned + + ToTokens + + std::fmt::Debug + + Eq + + std::hash::Hash + + Default + + quote::IdentFragment + + Parse +{ + const IDENT_STR: &'static str; +} mod kw { - pub(crate) use syn::token::{ - Enum as enum_, Extern as extern_, Static as static_, Struct as struct_, Where as where_, - }; + pub(crate) use syn::token::Extern as extern_; macro_rules! custom_keyword { ($kw:ident) => { @@ -38,13 +58,27 @@ mod kw { } crate::fold::no_op_fold!($kw); + + impl crate::CustomToken for $kw { + const IDENT_STR: &'static str = stringify!($kw); + } }; } + custom_keyword!(__evaluated_cfgs); + custom_keyword!(add_platform_io); + custom_keyword!(all); + custom_keyword!(any); + custom_keyword!(cfg); + custom_keyword!(cfg_attr); custom_keyword!(clock_domain); + custom_keyword!(cmp_eq); custom_keyword!(connect_inexact); + custom_keyword!(custom_bounds); custom_keyword!(flip); custom_keyword!(hdl); + custom_keyword!(hdl_module); + custom_keyword!(incomplete_wire); custom_keyword!(input); custom_keyword!(instance); custom_keyword!(m); @@ -52,11 +86,14 @@ mod kw { custom_keyword!(memory_array); custom_keyword!(memory_with_init); custom_keyword!(no_reset); + custom_keyword!(no_runtime_generics); + custom_keyword!(no_static); + custom_keyword!(not); custom_keyword!(outline_generated); custom_keyword!(output); custom_keyword!(reg_builder); custom_keyword!(reset); - custom_keyword!(reset_default); + custom_keyword!(sim); custom_keyword!(skip); custom_keyword!(target); custom_keyword!(wire); @@ -65,34 +102,34 @@ mod kw { type Pound = Token![#]; // work around https://github.com/rust-lang/rust/issues/50676 #[derive(Clone, Debug)] -pub(crate) struct HdlAttr { +pub(crate) struct HdlAttr { pub(crate) pound_token: Pound, pub(crate) style: AttrStyle, pub(crate) bracket_token: syn::token::Bracket, - pub(crate) hdl: kw::hdl, + pub(crate) kw: KW, pub(crate) paren_token: Option, pub(crate) body: T, } crate::fold::impl_fold! { - struct HdlAttr { + struct HdlAttr { pound_token: Pound, style: AttrStyle, bracket_token: syn::token::Bracket, - hdl: kw::hdl, + kw: KW, paren_token: Option, body: T, } } #[allow(dead_code)] -impl HdlAttr { - pub(crate) fn split_body(self) -> (HdlAttr<()>, T) { +impl HdlAttr { + pub(crate) fn split_body(self) -> (HdlAttr<(), KW>, T) { let Self { pound_token, style, bracket_token, - hdl, + kw, paren_token, body, } = self; @@ -101,19 +138,19 @@ impl HdlAttr { pound_token, style, bracket_token, - hdl, + kw, paren_token, body: (), }, body, ) } - pub(crate) fn replace_body(self, body: T2) -> HdlAttr { + pub(crate) fn replace_body(self, body: T2) -> HdlAttr { let Self { pound_token, style, bracket_token, - hdl, + kw, paren_token, body: _, } = self; @@ -121,17 +158,20 @@ impl HdlAttr { pound_token, style, bracket_token, - hdl, + kw, paren_token, body, } } - pub(crate) fn as_ref(&self) -> HdlAttr<&T> { + pub(crate) fn as_ref(&self) -> HdlAttr<&T, KW> + where + KW: Clone, + { let Self { pound_token, style, bracket_token, - hdl, + ref kw, paren_token, ref body, } = *self; @@ -139,17 +179,20 @@ impl HdlAttr { pound_token, style, bracket_token, - hdl, + kw: kw.clone(), paren_token, body, } } - pub(crate) fn try_map Result>(self, f: F) -> Result, E> { + pub(crate) fn try_map Result>( + self, + f: F, + ) -> Result, E> { let Self { pound_token, style, bracket_token, - hdl, + kw, paren_token, body, } = self; @@ -157,17 +200,17 @@ impl HdlAttr { pound_token, style, bracket_token, - hdl, + kw, paren_token, body: f(body)?, }) } - pub(crate) fn map R>(self, f: F) -> HdlAttr { + pub(crate) fn map R>(self, f: F) -> HdlAttr { let Self { pound_token, style, bracket_token, - hdl, + kw, paren_token, body, } = self; @@ -175,7 +218,7 @@ impl HdlAttr { pound_token, style, bracket_token, - hdl, + kw, paren_token, body: f(body), } @@ -183,31 +226,32 @@ impl HdlAttr { fn to_attr(&self) -> Attribute where T: ToTokens, + KW: ToTokens, { parse_quote! { #self } } } -impl Default for HdlAttr { +impl Default for HdlAttr { fn default() -> Self { T::default().into() } } -impl From for HdlAttr { +impl From for HdlAttr { fn from(body: T) -> Self { HdlAttr { pound_token: Default::default(), style: AttrStyle::Outer, bracket_token: Default::default(), - hdl: Default::default(), + kw: Default::default(), paren_token: Default::default(), body, } } } -impl ToTokens for HdlAttr { +impl ToTokens for HdlAttr { fn to_tokens(&self, tokens: &mut TokenStream) { self.pound_token.to_tokens(tokens); match self.style { @@ -215,7 +259,7 @@ impl ToTokens for HdlAttr { AttrStyle::Outer => {} }; self.bracket_token.surround(tokens, |tokens| { - self.hdl.to_tokens(tokens); + self.kw.to_tokens(tokens); match self.paren_token { Some(paren_token) => { paren_token.surround(tokens, |tokens| self.body.to_tokens(tokens)) @@ -223,7 +267,7 @@ impl ToTokens for HdlAttr { None => { let body = self.body.to_token_stream(); if !body.is_empty() { - syn::token::Paren(self.hdl.span) + syn::token::Paren(self.kw.span()) .surround(tokens, |tokens| tokens.extend([body])); } } @@ -232,18 +276,24 @@ impl ToTokens for HdlAttr { } } -fn is_hdl_attr(attr: &Attribute) -> bool { - attr.path().is_ident("hdl") +fn is_hdl_attr(attr: &Attribute) -> bool { + attr.path().is_ident(KW::IDENT_STR) } -impl HdlAttr { - fn parse_and_take_attr(attrs: &mut Vec) -> syn::Result> { +impl HdlAttr { + fn parse_and_take_attr(attrs: &mut Vec) -> syn::Result> + where + KW: ToTokens, + { let mut retval = None; let mut errors = Errors::new(); attrs.retain(|attr| { - if is_hdl_attr(attr) { + if let Ok(kw) = syn::parse2::(attr.path().to_token_stream()) { if retval.is_some() { - errors.push(Error::new_spanned(attr, "more than one #[hdl] attribute")); + errors.push(Error::new_spanned( + attr, + format_args!("more than one #[{}] attribute", kw.to_token_stream()), + )); } errors.unwrap_or_default(Self::parse_attr(attr).map(|v| retval = Some(v))); false @@ -254,13 +304,19 @@ impl HdlAttr { errors.finish()?; Ok(retval) } - fn parse_and_leave_attr(attrs: &[Attribute]) -> syn::Result> { + fn parse_and_leave_attr(attrs: &[Attribute]) -> syn::Result> + where + KW: ToTokens, + { let mut retval = None; let mut errors = Errors::new(); for attr in attrs { - if is_hdl_attr(attr) { + if let Ok(kw) = syn::parse2::(attr.path().to_token_stream()) { if retval.is_some() { - errors.push(Error::new_spanned(attr, "more than one #[hdl] attribute")); + errors.push(Error::new_spanned( + attr, + format_args!("more than one #[{}] attribute", kw.to_token_stream()), + )); } errors.unwrap_or_default(Self::parse_attr(attr).map(|v| retval = Some(v))); } @@ -281,7 +337,7 @@ impl HdlAttr { ) -> syn::Result { let bracket_content; let bracket_token = bracketed!(bracket_content in input); - let hdl = bracket_content.parse()?; + let kw = bracket_content.parse()?; let paren_content; let body; let paren_token; @@ -302,7 +358,7 @@ impl HdlAttr { pound_token, style, bracket_token, - hdl, + kw, paren_token, body, }) @@ -519,6 +575,26 @@ macro_rules! impl_extra_traits_for_options { ) => { impl Copy for $option_enum_name {} + impl PartialEq for $option_enum_name { + fn eq(&self, other: &Self) -> bool { + self.cmp(other).is_eq() + } + } + + impl Eq for $option_enum_name {} + + impl PartialOrd for $option_enum_name { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } + } + + impl Ord for $option_enum_name { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.variant().cmp(&other.variant()) + } + } + impl quote::IdentFragment for $option_enum_name { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let _ = f; @@ -554,6 +630,66 @@ pub(crate) use impl_extra_traits_for_options; macro_rules! options { ( #[options = $options_name:ident] + $($tt:tt)* + ) => { + crate::options! { + #[options = $options_name, punct = syn::Token![,], allow_duplicates = false] + $($tt)* + } + }; + ( + #[options = $options_name:ident, punct = $Punct:ty, allow_duplicates = true] + $(#[$($enum_meta:tt)*])* + $enum_vis:vis enum $option_enum_name:ident { + $($Variant:ident($key:ident $(, $value:ty)?),)* + } + ) => { + crate::options! { + #[options = $options_name, punct = $Punct, allow_duplicates = (true)] + $(#[$($enum_meta)*])* + $enum_vis enum $option_enum_name { + $($Variant($key $(, $value)?),)* + } + } + + impl Extend<$option_enum_name> for $options_name { + fn extend>(&mut self, iter: T) { + iter.into_iter().for_each(|v| match v { + $($option_enum_name::$Variant(v) => { + self.$key = Some(v); + })* + }); + } + } + + impl FromIterator<$option_enum_name> for $options_name { + fn from_iter>(iter: T) -> Self { + let mut retval = Self::default(); + retval.extend(iter); + retval + } + } + + impl Extend<$options_name> for $options_name { + fn extend>(&mut self, iter: T) { + iter.into_iter().for_each(|v| { + $(if let Some(v) = v.$key { + self.$key = Some(v); + })* + }); + } + } + + impl FromIterator<$options_name> for $options_name { + fn from_iter>(iter: T) -> Self { + let mut retval = Self::default(); + retval.extend(iter); + retval + } + } + }; + ( + #[options = $options_name:ident, punct = $Punct:ty, allow_duplicates = $allow_duplicates:expr] $(#[$($enum_meta:tt)*])* $enum_vis:vis enum $option_enum_name:ident { $($Variant:ident($key:ident $(, $value:ty)?),)* @@ -567,8 +703,11 @@ macro_rules! options { } #[derive(Clone, Debug, Default)] + #[allow(non_snake_case)] $enum_vis struct $options_name { - $($enum_vis $key: Option<(crate::kw::$key, $(syn::token::Paren, $value)?)>,)* + $( + $enum_vis $key: Option<(crate::kw::$key, $(syn::token::Paren, $value)?)>, + )* } crate::fold::impl_fold! { @@ -577,6 +716,43 @@ macro_rules! options { } } + const _: () = { + #[derive(Clone, Debug)] + $enum_vis struct Iter($enum_vis $options_name); + + impl IntoIterator for $options_name { + type Item = $option_enum_name; + type IntoIter = Iter; + + fn into_iter(self) -> Self::IntoIter { + Iter(self) + } + } + + impl Iterator for Iter { + type Item = $option_enum_name; + + fn next(&mut self) -> Option { + $( + if let Some(value) = self.0.$key.take() { + return Some($option_enum_name::$Variant(value)); + } + )* + None + } + + #[allow(unused_mut, unused_variables)] + fn fold B>(mut self, mut init: B, mut f: F) -> B { + $( + if let Some(value) = self.0.$key.take() { + init = f(init, $option_enum_name::$Variant(value)); + } + )* + init + } + } + }; + impl syn::parse::Parse for $options_name { fn parse(input: syn::parse::ParseStream) -> syn::Result { #![allow(unused_mut, unused_variables, unreachable_code)] @@ -585,7 +761,7 @@ macro_rules! options { let old_input = input.fork(); match input.parse::<$option_enum_name>()? { $($option_enum_name::$Variant(v) => { - if retval.$key.replace(v).is_some() { + if retval.$key.replace(v).is_some() && !$allow_duplicates { return Err(old_input.error(concat!("duplicate ", stringify!($key), " option"))); } })* @@ -593,7 +769,7 @@ macro_rules! options { if input.is_empty() { break; } - input.parse::()?; + input.parse::<$Punct>()?; } Ok(retval) } @@ -602,7 +778,7 @@ macro_rules! options { impl quote::ToTokens for $options_name { #[allow(unused_mut, unused_variables, unused_assignments)] fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { - let mut separator: Option = None; + let mut separator: Option<$Punct> = None; $(if let Some(v) = &self.$key { separator.to_tokens(tokens); separator = Some(Default::default()); @@ -673,9 +849,24 @@ macro_rules! options { } } } + + impl $option_enum_name { + #[allow(dead_code)] + fn variant(&self) -> usize { + #[repr(usize)] + enum Variant { + $($Variant,)* + __Last, // so it doesn't complain about zero-variant enums + } + match *self { + $(Self::$Variant(..) => Variant::$Variant as usize,)* + } + } + } }; } +use crate::hdl_type_alias::hdl_type_alias_impl; pub(crate) use options; pub(crate) fn outline_generated(contents: TokenStream, prefix: &str) -> TokenStream { @@ -686,6 +877,15 @@ pub(crate) fn outline_generated(contents: TokenStream, prefix: &str) -> TokenStr .suffix(".tmp.rs") .tempfile_in(out_dir) .unwrap(); + struct PrintOnPanic<'a>(&'a TokenStream); + impl Drop for PrintOnPanic<'_> { + fn drop(&mut self) { + if std::thread::panicking() { + println!("{}", self.0); + } + } + } + let _print_on_panic = PrintOnPanic(&contents); let contents = prettyplease::unparse(&parse_quote! { #contents }); let hash = ::digest(&contents); let hash = base16ct::HexDisplay(&hash[..5]); @@ -706,25 +906,372 @@ pub(crate) fn outline_generated(contents: TokenStream, prefix: &str) -> TokenStr } } -pub fn module(attr: TokenStream, item: TokenStream) -> syn::Result { - let options = syn::parse2::(attr)?; - let options = HdlAttr::from(options); - let func = syn::parse2::(quote! { #options #item })?; +fn hdl_module_impl(item: ItemFn) -> syn::Result { + let func = module::ModuleFn::parse_from_fn(item)?; + let options = func.config_options(); let mut contents = func.generate(); - if options.body.outline_generated.is_some() { + if options.outline_generated.is_some() { contents = outline_generated(contents, "module-"); } Ok(contents) } -pub fn value_derive(item: TokenStream) -> syn::Result { - let item = syn::parse2::(item)?; +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub(crate) enum CfgExpr { + Option { + ident: Ident, + value: Option<(Token![=], LitStr)>, + }, + All { + all: kw::all, + paren: Paren, + exprs: Punctuated, + }, + Any { + any: kw::any, + paren: Paren, + exprs: Punctuated, + }, + Not { + not: kw::not, + paren: Paren, + expr: Box, + trailing_comma: Option, + }, +} + +impl Parse for CfgExpr { + fn parse(input: ParseStream) -> syn::Result { + match input.cursor().ident() { + Some((_, cursor)) if cursor.eof() => { + return Ok(CfgExpr::Option { + ident: input.call(Ident::parse_any)?, + value: None, + }); + } + _ => {} + } + if input.peek(Ident::peek_any) && input.peek2(Token![=]) { + return Ok(CfgExpr::Option { + ident: input.call(Ident::parse_any)?, + value: Some((input.parse()?, input.parse()?)), + }); + } + let contents; + if input.peek(kw::all) { + Ok(CfgExpr::All { + all: input.parse()?, + paren: parenthesized!(contents in input), + exprs: contents.call(Punctuated::parse_terminated)?, + }) + } else if input.peek(kw::any) { + Ok(CfgExpr::Any { + any: input.parse()?, + paren: parenthesized!(contents in input), + exprs: contents.call(Punctuated::parse_terminated)?, + }) + } else if input.peek(kw::not) { + Ok(CfgExpr::Not { + not: input.parse()?, + paren: parenthesized!(contents in input), + expr: contents.parse()?, + trailing_comma: contents.parse()?, + }) + } else { + Err(input.error("expected cfg-pattern")) + } + } +} + +impl ToTokens for CfgExpr { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + CfgExpr::Option { ident, value } => { + ident.to_tokens(tokens); + if let Some((eq, value)) = value { + eq.to_tokens(tokens); + value.to_tokens(tokens); + } + } + CfgExpr::All { all, paren, exprs } => { + all.to_tokens(tokens); + paren.surround(tokens, |tokens| exprs.to_tokens(tokens)); + } + CfgExpr::Any { any, paren, exprs } => { + any.to_tokens(tokens); + paren.surround(tokens, |tokens| exprs.to_tokens(tokens)); + } + CfgExpr::Not { + not, + paren, + expr, + trailing_comma, + } => { + not.to_tokens(tokens); + paren.surround(tokens, |tokens| { + expr.to_tokens(tokens); + trailing_comma.to_tokens(tokens); + }); + } + } + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub(crate) struct Cfg { + cfg: kw::cfg, + paren: Paren, + expr: CfgExpr, + trailing_comma: Option, +} + +impl Cfg { + fn parse_meta(meta: &Meta) -> syn::Result { + syn::parse2(meta.to_token_stream()) + } +} + +impl ToTokens for Cfg { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + cfg, + paren, + expr, + trailing_comma, + } = self; + cfg.to_tokens(tokens); + paren.surround(tokens, |tokens| { + expr.to_tokens(tokens); + trailing_comma.to_tokens(tokens); + }); + } +} + +impl Parse for Cfg { + fn parse(input: ParseStream) -> syn::Result { + let contents; + Ok(Self { + cfg: input.parse()?, + paren: parenthesized!(contents in input), + expr: contents.parse()?, + trailing_comma: contents.parse()?, + }) + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub(crate) struct CfgAttr { + cfg_attr: kw::cfg_attr, + paren: Paren, + expr: CfgExpr, + comma: Token![,], + attrs: Punctuated, +} + +impl CfgAttr { + pub(crate) fn to_cfg(&self) -> Cfg { + Cfg { + cfg: kw::cfg(self.cfg_attr.span), + paren: self.paren, + expr: self.expr.clone(), + trailing_comma: None, + } + } + fn parse_meta(meta: &Meta) -> syn::Result { + syn::parse2(meta.to_token_stream()) + } +} + +impl Parse for CfgAttr { + fn parse(input: ParseStream) -> syn::Result { + let contents; + Ok(Self { + cfg_attr: input.parse()?, + paren: parenthesized!(contents in input), + expr: contents.parse()?, + comma: contents.parse()?, + attrs: contents.call(Punctuated::parse_terminated)?, + }) + } +} + +pub(crate) struct CfgAndValue { + cfg: Cfg, + eq_token: Token![=], + value: LitBool, +} + +impl Parse for CfgAndValue { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + cfg: input.parse()?, + eq_token: input.parse()?, + value: input.parse()?, + }) + } +} + +pub(crate) struct Cfgs { + pub(crate) bracket: Bracket, + pub(crate) cfgs_map: HashMap, + pub(crate) cfgs_list: Vec, +} + +impl Default for Cfgs { + fn default() -> Self { + Self { + bracket: Default::default(), + cfgs_map: Default::default(), + cfgs_list: Default::default(), + } + } +} + +impl Cfgs { + fn insert_cfg(&mut self, cfg: Cfg, value: T) { + match self.cfgs_map.entry(cfg) { + Entry::Occupied(_) => {} + Entry::Vacant(entry) => { + self.cfgs_list.push(entry.key().clone()); + entry.insert(value); + } + } + } +} + +impl Parse for Cfgs { + fn parse(input: ParseStream) -> syn::Result { + let contents; + let bracket = bracketed!(contents in input); + let mut cfgs_map = HashMap::new(); + let mut cfgs_list = Vec::new(); + for CfgAndValue { + cfg, + eq_token, + value, + } in contents.call(Punctuated::::parse_terminated)? + { + let _ = eq_token; + match cfgs_map.entry(cfg) { + Entry::Occupied(_) => {} + Entry::Vacant(entry) => { + cfgs_list.push(entry.key().clone()); + entry.insert(value.value); + } + } + } + Ok(Self { + bracket, + cfgs_map, + cfgs_list, + }) + } +} + +impl Parse for Cfgs<()> { + fn parse(input: ParseStream) -> syn::Result { + let contents; + let bracket = bracketed!(contents in input); + let mut cfgs_map = HashMap::new(); + let mut cfgs_list = Vec::new(); + for cfg in contents.call(Punctuated::::parse_terminated)? { + match cfgs_map.entry(cfg) { + Entry::Occupied(_) => {} + Entry::Vacant(entry) => { + cfgs_list.push(entry.key().clone()); + entry.insert(()); + } + } + } + Ok(Self { + bracket, + cfgs_map, + cfgs_list, + }) + } +} + +impl ToTokens for Cfgs<()> { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + bracket, + cfgs_map: _, + cfgs_list, + } = self; + bracket.surround(tokens, |tokens| { + for cfg in cfgs_list { + cfg.to_tokens(tokens); + ::default().to_tokens(tokens); + } + }); + } +} + +fn hdl_main( + kw: impl CustomToken, + attr: TokenStream, + item: TokenStream, +) -> syn::Result { + fn parse_evaluated_cfgs_attr( + input: ParseStream, + parse_inner: impl FnOnce(ParseStream) -> syn::Result, + ) -> syn::Result { + let _: Token![#] = input.parse()?; + let bracket_content; + bracketed!(bracket_content in input); + let _: kw::__evaluated_cfgs = bracket_content.parse()?; + let paren_content; + parenthesized!(paren_content in bracket_content); + parse_inner(&paren_content) + } + let (evaluated_cfgs, item): (_, TokenStream) = Parser::parse2( + |input: ParseStream| { + let peek = input.fork(); + if parse_evaluated_cfgs_attr(&peek, |_| Ok(())).is_ok() { + let evaluated_cfgs = parse_evaluated_cfgs_attr(input, Cfgs::::parse)?; + Ok((Some(evaluated_cfgs), input.parse()?)) + } else { + Ok((None, input.parse()?)) + } + }, + item, + )?; + let cfgs = if let Some(cfgs) = evaluated_cfgs { + cfgs + } else { + let cfgs = process_cfg::collect_cfgs(syn::parse2(item.clone())?)?; + if cfgs.cfgs_list.is_empty() { + Cfgs::default() + } else { + return Ok(quote! { + ::fayalite::__cfg_expansion_helper! { + [] + #cfgs + {#[::fayalite::#kw(#attr)]} { #item } + } + }); + } + }; + let item = syn::parse2(quote! { #[#kw(#attr)] #item })?; + let Some(item) = process_cfg::process_cfgs(item, cfgs)? else { + return Ok(TokenStream::new()); + }; match item { - Item::Enum(item) => value_derive_enum::value_derive_enum(item), - Item::Struct(item) => value_derive_struct::value_derive_struct(item), + Item::Enum(item) => hdl_enum::hdl_enum(item), + Item::Struct(item) => hdl_bundle::hdl_bundle(item), + Item::Fn(item) => hdl_module_impl(item), + Item::Type(item) => hdl_type_alias_impl(item), _ => Err(syn::Error::new( Span::call_site(), - "derive(Value) can only be used on structs or enums", + "top-level #[hdl] can only be used on structs, enums, type aliases, or functions", )), } } + +pub fn hdl_module(attr: TokenStream, item: TokenStream) -> syn::Result { + hdl_main(kw::hdl_module::default(), attr, item) +} + +pub fn hdl_attr(attr: TokenStream, item: TokenStream) -> syn::Result { + hdl_main(kw::hdl::default(), attr, item) +} diff --git a/crates/fayalite-proc-macros-impl/src/module.rs b/crates/fayalite-proc-macros-impl/src/module.rs index ff5c1e5..5628ff9 100644 --- a/crates/fayalite-proc-macros-impl/src/module.rs +++ b/crates/fayalite-proc-macros-impl/src/module.rs @@ -1,19 +1,20 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information use crate::{ - is_hdl_attr, - module::transform_body::{HdlLet, HdlLetKindIO}, - options, Errors, HdlAttr, PairsIterExt, + Errors, HdlAttr, PairsIterExt, + hdl_type_common::{ParsedGenerics, SplitForImpl}, + kw, + module::transform_body::{HdlLet, HdlLetKindIO, ModuleIOOrAddPlatformIO}, + options, }; use proc_macro2::TokenStream; -use quote::{format_ident, quote, quote_spanned, ToTokens}; +use quote::{ToTokens, format_ident, quote, quote_spanned}; use std::collections::HashSet; use syn::{ - parse::{Parse, ParseStream}, - parse_quote, - visit::{visit_pat, Visit}, Attribute, Block, ConstParam, Error, FnArg, GenericParam, Generics, Ident, ItemFn, ItemStruct, LifetimeParam, ReturnType, Signature, TypeParam, Visibility, WhereClause, WherePredicate, + parse_quote, + visit::{Visit, visit_pat}, }; mod transform_body; @@ -38,7 +39,7 @@ pub(crate) fn check_name_conflicts_with_module_builder(name: &Ident) -> syn::Res if name == "m" { Err(Error::new_spanned( name, - "name conflicts with implicit `m: &mut ModuleBuilder<_>`", + "name conflicts with implicit `m: &ModuleBuilder`", )) } else { Ok(()) @@ -57,26 +58,39 @@ impl Visit<'_> for CheckNameConflictsWithModuleBuilderVisitor<'_> { } } -fn retain_struct_attrs bool>(item: &mut ItemStruct, mut f: F) { - item.attrs.retain(&mut f); - for field in item.fields.iter_mut() { - field.attrs.retain(&mut f); - } -} - pub(crate) type ModuleIO = HdlLet; -pub(crate) struct ModuleFn { +struct ModuleFnModule { attrs: Vec, - config_options: HdlAttr, + config_options: HdlAttr, module_kind: ModuleKind, vis: Visibility, sig: Signature, block: Box, - io: Vec, - struct_generics: Generics, + struct_generics: Option, + the_struct: TokenStream, } +enum ModuleFnImpl { + Module(ModuleFnModule), + Fn { + attrs: Vec, + config_options: HdlAttr, + vis: Visibility, + sig: Signature, + block: Box, + }, +} + +options! { + pub(crate) enum HdlOrHdlModule { + Hdl(hdl), + HdlModule(hdl_module), + } +} + +pub(crate) struct ModuleFn(ModuleFnImpl); + #[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] pub(crate) enum ModuleKind { Extern, @@ -96,14 +110,25 @@ impl Visit<'_> for ContainsSkippedIdent<'_> { } } -impl Parse for ModuleFn { - fn parse(input: ParseStream) -> syn::Result { +impl ModuleFn { + pub(crate) fn config_options(&self) -> ConfigOptions { + let (ModuleFnImpl::Module(ModuleFnModule { + config_options: HdlAttr { body, .. }, + .. + }) + | ModuleFnImpl::Fn { + config_options: HdlAttr { body, .. }, + .. + }) = &self.0; + body.clone() + } + pub(crate) fn parse_from_fn(item: ItemFn) -> syn::Result { let ItemFn { mut attrs, vis, mut sig, block, - } = input.parse()?; + } = item; let Signature { ref constness, ref asyncness, @@ -118,43 +143,60 @@ impl Parse for ModuleFn { ref output, } = sig; let mut errors = Errors::new(); - let config_options = errors - .unwrap_or_default(HdlAttr::parse_and_take_attr(&mut attrs)) - .unwrap_or_default(); + let Some(mut config_options) = + errors.unwrap_or_default( + HdlAttr::::parse_and_take_attr(&mut attrs), + ) + else { + errors.error(sig.ident, "missing #[hdl] or #[hdl_module] attribute"); + errors.finish()?; + unreachable!(); + }; let ConfigOptions { outline_generated: _, extern_, } = config_options.body; - let module_kind = match extern_ { - Some(_) => ModuleKind::Extern, - None => ModuleKind::Normal, + let module_kind = match (config_options.kw, extern_) { + (HdlOrHdlModule::Hdl(_), None) => None, + (HdlOrHdlModule::Hdl(_), Some(extern2)) => { + config_options.body.extern_ = None; + errors.error( + extern2.0, + "extern can only be used as #[hdl_module(extern)]", + ); + None + } + (HdlOrHdlModule::HdlModule(_), None) => Some(ModuleKind::Normal), + (HdlOrHdlModule::HdlModule(_), Some(_)) => Some(ModuleKind::Extern), }; - for fn_arg in inputs { - match fn_arg { - FnArg::Receiver(_) => { - errors.push(syn::Error::new_spanned(fn_arg, "self not allowed here")); - } - FnArg::Typed(fn_arg) => { - visit_pat( - &mut CheckNameConflictsWithModuleBuilderVisitor { - errors: &mut errors, - }, - &fn_arg.pat, - ); + if let HdlOrHdlModule::HdlModule(_) = config_options.kw { + for fn_arg in inputs { + match fn_arg { + FnArg::Receiver(_) => { + errors.push(syn::Error::new_spanned(fn_arg, "self not allowed here")); + } + FnArg::Typed(fn_arg) => { + visit_pat( + &mut CheckNameConflictsWithModuleBuilderVisitor { + errors: &mut errors, + }, + &fn_arg.pat, + ); + } } } - } - if let Some(constness) = constness { - errors.push(syn::Error::new_spanned(constness, "const not allowed here")); - } - if let Some(asyncness) = asyncness { - errors.push(syn::Error::new_spanned(asyncness, "async not allowed here")); - } - if let Some(unsafety) = unsafety { - errors.push(syn::Error::new_spanned(unsafety, "unsafe not allowed here")); - } - if let Some(abi) = abi { - errors.push(syn::Error::new_spanned(abi, "extern not allowed here")); + if let Some(constness) = constness { + errors.push(syn::Error::new_spanned(constness, "const not allowed here")); + } + if let Some(asyncness) = asyncness { + errors.push(syn::Error::new_spanned(asyncness, "async not allowed here")); + } + if let Some(unsafety) = unsafety { + errors.push(syn::Error::new_spanned(unsafety, "unsafe not allowed here")); + } + if let Some(abi) = abi { + errors.push(syn::Error::new_spanned(abi, "extern not allowed here")); + } } let mut skipped_idents = HashSet::new(); let struct_generic_params = generics @@ -162,14 +204,17 @@ impl Parse for ModuleFn { .pairs_mut() .filter_map_pair_value_mut(|v| match v { GenericParam::Lifetime(LifetimeParam { attrs, .. }) => { - errors - .unwrap_or_default(HdlAttr::::parse_and_take_attr(attrs)); + errors.unwrap_or_default( + HdlAttr::::parse_and_take_attr(attrs), + ); None } GenericParam::Type(TypeParam { attrs, ident, .. }) | GenericParam::Const(ConstParam { attrs, ident, .. }) => { if errors - .unwrap_or_default(HdlAttr::::parse_and_take_attr(attrs)) + .unwrap_or_default( + HdlAttr::::parse_and_take_attr(attrs), + ) .is_some() { skipped_idents.insert(ident.clone()); @@ -183,6 +228,7 @@ impl Parse for ModuleFn { let struct_where_clause = generics .where_clause .as_mut() + .filter(|_| matches!(config_options.kw, HdlOrHdlModule::HdlModule(_))) .map(|where_clause| WhereClause { where_token: where_clause.where_token, predicates: where_clause @@ -205,49 +251,173 @@ impl Parse for ModuleFn { }) .collect(), }); - let struct_generics = Generics { - lt_token: generics.lt_token, - params: struct_generic_params, - gt_token: generics.gt_token, - where_clause: struct_where_clause, + let struct_generics = if let HdlOrHdlModule::HdlModule(_) = config_options.kw { + let mut struct_generics = Generics { + lt_token: generics.lt_token, + params: struct_generic_params, + gt_token: generics.gt_token, + where_clause: struct_where_clause, + }; + if let Some(variadic) = variadic { + errors.push(syn::Error::new_spanned(variadic, "... not allowed here")); + } + if !matches!(output, ReturnType::Default) { + errors.push(syn::Error::new_spanned( + output, + "return type not allowed here", + )); + } + errors.ok(ParsedGenerics::parse(&mut struct_generics)) + } else { + Some(ParsedGenerics::default()) }; - if let Some(variadic) = variadic { - errors.push(syn::Error::new_spanned(variadic, "... not allowed here")); - } - if !matches!(output, ReturnType::Default) { - errors.push(syn::Error::new_spanned( - output, - "return type not allowed here", - )); - } - let body_results = errors.ok(transform_body::transform_body(module_kind, block)); + let body_results = struct_generics.as_ref().and_then(|struct_generics| { + errors.ok(transform_body::transform_body( + module_kind, + block, + struct_generics, + )) + }); errors.finish()?; + let struct_generics = struct_generics.unwrap(); let (block, io) = body_results.unwrap(); - Ok(Self { + let config_options = match config_options { + HdlAttr { + pound_token, + style, + bracket_token, + kw: HdlOrHdlModule::Hdl((kw,)), + paren_token, + body, + } => { + debug_assert!(matches!(io, ModuleIOOrAddPlatformIO::ModuleIO(v) if v.is_empty())); + return Ok(Self(ModuleFnImpl::Fn { + attrs, + config_options: HdlAttr { + pound_token, + style, + bracket_token, + kw, + paren_token, + body, + }, + vis, + sig, + block, + })); + } + HdlAttr { + pound_token, + style, + bracket_token, + kw: HdlOrHdlModule::HdlModule((kw,)), + paren_token, + body, + } => HdlAttr { + pound_token, + style, + bracket_token, + kw, + paren_token, + body, + }, + }; + let io = match io { + ModuleIOOrAddPlatformIO::ModuleIO(io) => io, + ModuleIOOrAddPlatformIO::AddPlatformIO => { + return Ok(Self(ModuleFnImpl::Module(ModuleFnModule { + attrs, + config_options, + module_kind: module_kind.unwrap(), + vis, + sig, + block, + struct_generics: None, + the_struct: TokenStream::new(), + }))); + } + }; + let (_struct_impl_generics, _struct_type_generics, struct_where_clause) = + struct_generics.split_for_impl(); + let struct_where_clause: Option = parse_quote! { #struct_where_clause }; + if let Some(struct_where_clause) = &struct_where_clause { + sig.generics + .where_clause + .get_or_insert_with(|| WhereClause { + where_token: struct_where_clause.where_token, + predicates: Default::default(), + }) + .predicates + .extend(struct_where_clause.predicates.clone()); + } + let fn_name = &sig.ident; + let io_flips = io + .iter() + .map(|io| match io.kind.kind { + ModuleIOKind::Input((input,)) => quote_spanned! {input.span=> + #[hdl(flip)] + }, + ModuleIOKind::Output(_) => quote! {}, + }) + .collect::>(); + let io_types = io.iter().map(|io| &io.kind.ty).collect::>(); + let io_names = io.iter().map(|io| &io.name).collect::>(); + let the_struct: ItemStruct = parse_quote! { + #[allow(non_camel_case_types)] + #[hdl(no_runtime_generics, no_static)] + #vis struct #fn_name #struct_generics #struct_where_clause { + #( + #io_flips + #vis #io_names: #io_types,)* + } + }; + let the_struct = crate::hdl_bundle::hdl_bundle(the_struct)?; + Ok(Self(ModuleFnImpl::Module(ModuleFnModule { attrs, config_options, - module_kind, + module_kind: module_kind.unwrap(), vis, sig, block, - io, - struct_generics, - }) + struct_generics: Some(struct_generics), + the_struct, + }))) } } impl ModuleFn { pub(crate) fn generate(self) -> TokenStream { - let Self { + let ModuleFnModule { attrs, config_options, module_kind, vis, sig, - block, - io, + mut block, struct_generics, - } = self; + the_struct, + } = match self.0 { + ModuleFnImpl::Module(v) => v, + ModuleFnImpl::Fn { + attrs, + config_options, + vis, + sig, + block, + } => { + let ConfigOptions { + outline_generated: _, + extern_: _, + } = config_options.body; + return ItemFn { + attrs, + vis, + sig, + block, + } + .into_token_stream(); + } + }; let ConfigOptions { outline_generated: _, extern_: _, @@ -273,18 +443,28 @@ impl ModuleFn { }); name })); - let module_kind_ty = match module_kind { - ModuleKind::Extern => quote! { ::fayalite::module::ExternModule }, - ModuleKind::Normal => quote! { ::fayalite::module::NormalModule }, + let module_kind_value = match module_kind { + ModuleKind::Extern => quote! { ::fayalite::module::ModuleKind::Extern }, + ModuleKind::Normal => quote! { ::fayalite::module::ModuleKind::Normal }, }; let fn_name = &outer_sig.ident; - let (_struct_impl_generics, struct_type_generics, struct_where_clause) = - struct_generics.split_for_impl(); - let struct_ty = quote! {#fn_name #struct_type_generics}; + let struct_ty = match struct_generics { + Some(struct_generics) => { + let (_struct_impl_generics, struct_type_generics, _struct_where_clause) = + struct_generics.split_for_impl(); + quote! {#fn_name #struct_type_generics} + } + None => quote! {::fayalite::bundle::Bundle}, + }; body_sig.ident = parse_quote! {__body}; - body_sig.inputs.insert( + body_sig + .inputs + .insert(0, parse_quote! { m: &::fayalite::module::ModuleBuilder }); + block.stmts.insert( 0, - parse_quote! {m: &mut ::fayalite::module::ModuleBuilder<#struct_ty, #module_kind_ty>}, + parse_quote! { + let _ = m; + }, ); let body_fn = ItemFn { attrs: vec![], @@ -294,50 +474,26 @@ impl ModuleFn { }; outer_sig.output = parse_quote! {-> ::fayalite::intern::Interned<::fayalite::module::Module<#struct_ty>>}; - let io_flips = io - .iter() - .map(|io| match io.kind.kind { - ModuleIOKind::Input((input,)) => quote_spanned! {input.span=> - #[hdl(flip)] - }, - ModuleIOKind::Output(_) => quote! {}, - }) - .collect::>(); - let io_types = io.iter().map(|io| &io.kind.ty).collect::>(); - let io_names = io.iter().map(|io| &io.name).collect::>(); let fn_name_str = fn_name.to_string(); let (_, body_type_generics, _) = body_fn.sig.generics.split_for_impl(); let body_turbofish_type_generics = body_type_generics.as_turbofish(); - let block = parse_quote! {{ - #body_fn - ::fayalite::module::ModuleBuilder::run(#fn_name_str, |m| __body #body_turbofish_type_generics(m, #(#param_names,)*)) - }}; - let static_type = io.iter().all(|io| io.kind.ty_expr.is_none()); - let struct_options = if static_type { - quote! { #[hdl(static)] } + let body_lambda = if param_names.is_empty() { + quote! { + __body #body_turbofish_type_generics + } } else { - quote! {} - }; - let the_struct: ItemStruct = parse_quote! { - #[derive(::fayalite::__std::clone::Clone, - ::fayalite::__std::hash::Hash, - ::fayalite::__std::cmp::PartialEq, - ::fayalite::__std::cmp::Eq, - ::fayalite::__std::fmt::Debug)] - #[allow(non_camel_case_types)] - #struct_options - #vis struct #fn_name #struct_generics #struct_where_clause { - #( - #io_flips - #vis #io_names: #io_types,)* + quote! { + |m| __body #body_turbofish_type_generics(m, #(#param_names,)*) } }; - let mut struct_without_hdl_attrs = the_struct.clone(); - let mut struct_without_derives = the_struct; - retain_struct_attrs(&mut struct_without_hdl_attrs, |attr| !is_hdl_attr(attr)); - retain_struct_attrs(&mut struct_without_derives, |attr| { - !attr.path().is_ident("derive") - }); + let block = parse_quote! {{ + #body_fn + ::fayalite::module::ModuleBuilder::run( + #fn_name_str, + #module_kind_value, + #body_lambda, + ) + }}; let outer_fn = ItemFn { attrs, vis, @@ -345,10 +501,7 @@ impl ModuleFn { block, }; let mut retval = outer_fn.into_token_stream(); - struct_without_hdl_attrs.to_tokens(&mut retval); - retval.extend( - crate::value_derive_struct::value_derive_struct(struct_without_derives).unwrap(), - ); + retval.extend(the_struct); retval } } diff --git a/crates/fayalite-proc-macros-impl/src/module/transform_body.rs b/crates/fayalite-proc-macros-impl/src/module/transform_body.rs index d2b3e77..7b41f5e 100644 --- a/crates/fayalite-proc-macros-impl/src/module/transform_body.rs +++ b/crates/fayalite-proc-macros-impl/src/module/transform_body.rs @@ -1,36 +1,49 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information use crate::{ - fold::{impl_fold, DoFold}, + Errors, HdlAttr, + fold::{DoFold, impl_fold}, + hdl_type_common::{ + ParseFailed, ParseTypes, ParsedGenerics, ParsedType, TypesParser, known_items, + }, is_hdl_attr, kw, - module::{check_name_conflicts_with_module_builder, ModuleIO, ModuleIOKind, ModuleKind}, - options, Errors, HdlAttr, + module::{ModuleIO, ModuleIOKind, ModuleKind, check_name_conflicts_with_module_builder}, + options, }; -use num_bigint::{BigInt, Sign}; +use num_bigint::BigInt; use proc_macro2::{Span, TokenStream}; -use quote::{quote, quote_spanned, ToTokens}; +use quote::{ToTokens, quote, quote_spanned}; use std::{borrow::Borrow, convert::Infallible}; use syn::{ - fold::{fold_expr, fold_expr_lit, fold_expr_unary, fold_local, fold_stmt, Fold}, + Attribute, Block, Error, Expr, ExprIf, ExprLet, ExprLit, ExprRepeat, ExprUnary, + GenericArgument, Ident, Item, Lit, LitStr, Local, LocalInit, Pat, Token, Type, UnOp, + fold::{Fold, fold_expr, fold_expr_lit, fold_expr_unary, fold_local, fold_stmt}, parenthesized, - parse::{Nothing, Parse, ParseStream}, + parse::{Parse, ParseStream}, parse_quote, parse_quote_spanned, spanned::Spanned, token::Paren, - Attribute, Block, Error, Expr, ExprIf, ExprLet, ExprLit, ExprRepeat, ExprUnary, - GenericArgument, Ident, Item, Lit, LitStr, Local, LocalInit, Pat, Token, Type, UnOp, }; mod expand_aggregate_literals; mod expand_match; +options! { + #[options = ExprOptions] + pub(crate) enum ExprOption { + Sim(sim), + } +} + options! { pub(crate) enum LetFnKind { Input(input), Output(output), + AddPlatformIO(add_platform_io), Instance(instance), RegBuilder(reg_builder), Wire(wire), + IncompleteWire(incomplete_wire), Memory(memory), MemoryArray(memory_array), MemoryWithInit(memory_with_init), @@ -87,9 +100,9 @@ macro_rules! with_debug_clone_and_fold { pub(crate) use with_debug_clone_and_fold; with_debug_clone_and_fold! { - pub(crate) struct HdlLetKindIO { + pub(crate) struct HdlLetKindIO { pub(crate) colon_token: Token![:], - pub(crate) ty: Box, + pub(crate) ty: Box, pub(crate) m: kw::m, pub(crate) dot_token: Token![.], pub(crate) kind: Kind, @@ -98,6 +111,32 @@ with_debug_clone_and_fold! { } } +impl, I> ParseTypes> for HdlLetKindIO { + fn parse_types( + input: &mut HdlLetKindIO, + parser: &mut TypesParser<'_>, + ) -> Result { + let HdlLetKindIO { + colon_token, + ty, + m, + dot_token, + kind, + paren, + ty_expr, + } = input; + Ok(Self { + colon_token: *colon_token, + ty: ParseTypes::parse_types(ty, parser)?, + m: *m, + dot_token: *dot_token, + kind: kind.clone(), + paren: *paren, + ty_expr: ty_expr.clone(), + }) + } +} + pub(crate) fn parse_single_fn_arg(input: ParseStream) -> syn::Result> { let retval = input.parse()?; let _: Option = input.parse()?; @@ -145,17 +184,19 @@ impl HdlLetKindToTokens for HdlLetKindIO { #[derive(Clone, Debug)] pub(crate) struct HdlLetKindInstance { - pub(crate) m: kw::m, - pub(crate) dot_token: Token![.], pub(crate) instance: kw::instance, pub(crate) paren: Paren, pub(crate) module: Box, } +impl ParseTypes for HdlLetKindInstance { + fn parse_types(input: &mut Self, _parser: &mut TypesParser<'_>) -> Result { + Ok(input.clone()) + } +} + impl_fold! { struct HdlLetKindInstance<> { - m: kw::m, - dot_token: Token![.], instance: kw::instance, paren: Paren, module: Box, @@ -167,19 +208,58 @@ impl HdlLetKindToTokens for HdlLetKindInstance { fn expr_to_tokens(&self, tokens: &mut TokenStream) { let Self { - m, - dot_token, instance, paren, module, } = self; - m.to_tokens(tokens); - dot_token.to_tokens(tokens); instance.to_tokens(tokens); paren.surround(tokens, |tokens| module.to_tokens(tokens)); } } +#[derive(Clone, Debug)] +pub(crate) struct HdlLetKindAddPlatformIO { + pub(crate) m: kw::m, + pub(crate) dot_token: Token![.], + pub(crate) add_platform_io: kw::add_platform_io, + pub(crate) paren: Paren, + pub(crate) platform_io_builder: Box, +} + +impl ParseTypes for HdlLetKindAddPlatformIO { + fn parse_types(input: &mut Self, _parser: &mut TypesParser<'_>) -> Result { + Ok(input.clone()) + } +} + +impl_fold! { + struct HdlLetKindAddPlatformIO<> { + m: kw::m, + dot_token: Token![.], + add_platform_io: kw::add_platform_io, + paren: Paren, + platform_io_builder: Box, + } +} + +impl HdlLetKindToTokens for HdlLetKindAddPlatformIO { + fn ty_to_tokens(&self, _tokens: &mut TokenStream) {} + + fn expr_to_tokens(&self, tokens: &mut TokenStream) { + let Self { + m, + dot_token, + add_platform_io, + paren, + platform_io_builder, + } = self; + m.to_tokens(tokens); + dot_token.to_tokens(tokens); + add_platform_io.to_tokens(tokens); + paren.surround(tokens, |tokens| platform_io_builder.to_tokens(tokens)); + } +} + #[derive(Clone, Debug)] pub(crate) struct RegBuilderClockDomain { pub(crate) dot_token: Token![.], @@ -237,11 +317,6 @@ pub(crate) enum RegBuilderReset { paren: Paren, init_expr: Box, }, - ResetDefault { - dot_token: Token![.], - reset_default: kw::reset_default, - paren: Paren, - }, } impl_fold! { @@ -258,11 +333,6 @@ impl_fold! { paren: Paren, init_expr: Box, }, - ResetDefault { - dot_token: Token![.], - reset_default: kw::reset_default, - paren: Paren, - }, } } @@ -284,11 +354,6 @@ impl Parse for RegBuilderReset { paren: parenthesized!(paren_contents in input), init_expr: paren_contents.call(parse_single_fn_arg)?, }), - RegBuilderMethod::ResetDefault(reset_default) => Ok(Self::ResetDefault { - dot_token, - reset_default, - paren: parenthesized!(paren_contents in input), - }), } } } @@ -316,15 +381,6 @@ impl ToTokens for RegBuilderReset { reset.to_tokens(tokens); paren.surround(tokens, |tokens| init_expr.to_tokens(tokens)); } - RegBuilderReset::ResetDefault { - dot_token, - reset_default, - paren, - } => { - dot_token.to_tokens(tokens); - reset_default.to_tokens(tokens); - paren.surround(tokens, |_| {}); - } } } } @@ -373,27 +429,27 @@ make_builder_method_enum! { NoReset(no_reset), #[cond = need_reset] Reset(reset), - #[cond = need_reset] - ResetDefault(reset_default), } } #[derive(Clone, Debug)] pub(crate) struct HdlLetKindRegBuilder { pub(crate) ty: Option<(Token![:], Box)>, - pub(crate) m: kw::m, - pub(crate) dot_token: Token![.], pub(crate) reg_builder: kw::reg_builder, pub(crate) reg_builder_paren: Paren, pub(crate) clock_domain: Option, pub(crate) reset: RegBuilderReset, } +impl ParseTypes for HdlLetKindRegBuilder { + fn parse_types(input: &mut Self, _parser: &mut TypesParser<'_>) -> Result { + Ok(input.clone()) + } +} + impl_fold! { struct HdlLetKindRegBuilder<> { ty: Option<(Token![:], Box)>, - m: kw::m, - dot_token: Token![.], reg_builder: kw::reg_builder, reg_builder_paren: Paren, clock_domain: Option, @@ -406,32 +462,26 @@ impl HdlLetKindRegBuilder { input: ParseStream, parsed_ty: Option<(Token![:], Box)>, _after_ty: Token![=], - m: kw::m, - dot_token: Token![.], + m_dot: Option<(kw::m, Token![.])>, reg_builder: kw::reg_builder, ) -> syn::Result { + check_empty_m_dot(m_dot, reg_builder)?; let _reg_builder_paren_inner; let reg_builder_paren = parenthesized!(_reg_builder_paren_inner in input); let mut clock_domain = None; match RegBuilderMethod::parse_dot_prefixed(&input.fork(), true, true)?.1 { RegBuilderMethod::ClockDomain(_) => clock_domain = Some(input.parse()?), - RegBuilderMethod::NoReset(_) - | RegBuilderMethod::Reset(_) - | RegBuilderMethod::ResetDefault(_) => {} + RegBuilderMethod::NoReset(_) | RegBuilderMethod::Reset(_) => {} } let reset = input.parse()?; if clock_domain.is_none() { match RegBuilderMethod::parse_dot_prefixed(&input.fork(), true, false)?.1 { RegBuilderMethod::ClockDomain(_) => clock_domain = Some(input.parse()?), - RegBuilderMethod::NoReset(_) - | RegBuilderMethod::Reset(_) - | RegBuilderMethod::ResetDefault(_) => unreachable!(), + RegBuilderMethod::NoReset(_) | RegBuilderMethod::Reset(_) => unreachable!(), } } Ok(Self { ty: parsed_ty, - m, - dot_token, reg_builder, reg_builder_paren, clock_domain, @@ -451,15 +501,11 @@ impl HdlLetKindToTokens for HdlLetKindRegBuilder { fn expr_to_tokens(&self, tokens: &mut TokenStream) { let Self { ty: _, - m, - dot_token, reg_builder, reg_builder_paren, clock_domain, reset, } = self; - m.to_tokens(tokens); - dot_token.to_tokens(tokens); reg_builder.to_tokens(tokens); reg_builder_paren.surround(tokens, |_tokens| {}); clock_domain.to_tokens(tokens); @@ -470,18 +516,20 @@ impl HdlLetKindToTokens for HdlLetKindRegBuilder { #[derive(Clone, Debug)] pub(crate) struct HdlLetKindWire { pub(crate) ty: Option<(Token![:], Box)>, - pub(crate) m: kw::m, - pub(crate) dot_token: Token![.], pub(crate) wire: kw::wire, pub(crate) paren: Paren, pub(crate) ty_expr: Option>, } +impl ParseTypes for HdlLetKindWire { + fn parse_types(input: &mut Self, _parser: &mut TypesParser<'_>) -> Result { + Ok(input.clone()) + } +} + impl_fold! { struct HdlLetKindWire<> { ty: Option<(Token![:], Box)>, - m: kw::m, - dot_token: Token![.], wire: kw::wire, paren: Paren, ty_expr: Option>, @@ -499,19 +547,50 @@ impl HdlLetKindToTokens for HdlLetKindWire { fn expr_to_tokens(&self, tokens: &mut TokenStream) { let Self { ty: _, - m, - dot_token, wire, paren, ty_expr, } = self; - m.to_tokens(tokens); - dot_token.to_tokens(tokens); wire.to_tokens(tokens); paren.surround(tokens, |tokens| ty_expr.to_tokens(tokens)); } } +options! { + pub(crate) enum LetFnKindIncomplete { + IncompleteWire(incomplete_wire), + } +} + +#[derive(Clone, Debug)] +pub(crate) struct HdlLetKindIncomplete { + pub(crate) kind: LetFnKindIncomplete, + pub(crate) paren: Paren, +} + +impl ParseTypes for HdlLetKindIncomplete { + fn parse_types(input: &mut Self, _parser: &mut TypesParser<'_>) -> Result { + Ok(input.clone()) + } +} + +impl_fold! { + struct HdlLetKindIncomplete<> { + kind: LetFnKindIncomplete, + paren: Paren, + } +} + +impl HdlLetKindToTokens for HdlLetKindIncomplete { + fn ty_to_tokens(&self, _tokens: &mut TokenStream) {} + + fn expr_to_tokens(&self, tokens: &mut TokenStream) { + let Self { kind, paren } = self; + kind.to_tokens(tokens); + paren.surround(tokens, |_| {}); + } +} + options! { pub(crate) enum MemoryFnName { Memory(memory), @@ -627,16 +706,18 @@ impl ToTokens for MemoryFn { #[derive(Clone, Debug)] pub(crate) struct HdlLetKindMemory { pub(crate) ty: Option<(Token![:], Box)>, - pub(crate) m: kw::m, - pub(crate) dot_token: Token![.], pub(crate) memory_fn: MemoryFn, } +impl ParseTypes for HdlLetKindMemory { + fn parse_types(input: &mut Self, _parser: &mut TypesParser<'_>) -> Result { + Ok(input.clone()) + } +} + impl_fold! { struct HdlLetKindMemory<> { ty: Option<(Token![:], Box)>, - m: kw::m, - dot_token: Token![.], memory_fn: MemoryFn, } } @@ -650,14 +731,7 @@ impl HdlLetKindToTokens for HdlLetKindMemory { } fn expr_to_tokens(&self, tokens: &mut TokenStream) { - let Self { - ty: _, - m, - dot_token, - memory_fn, - } = self; - m.to_tokens(tokens); - dot_token.to_tokens(tokens); + let Self { ty: _, memory_fn } = self; memory_fn.to_tokens(tokens); } } @@ -667,22 +741,22 @@ impl HdlLetKindMemory { input: ParseStream, parsed_ty: Option<(Token![:], Box)>, _after_ty: Token![=], - m: kw::m, - dot_token: Token![.], + m_dot: Option<(kw::m, Token![.])>, memory_fn_name: MemoryFnName, ) -> syn::Result { + check_empty_m_dot(m_dot, memory_fn_name)?; Ok(Self { ty: parsed_ty, - m, - dot_token, memory_fn: MemoryFn::parse_rest(input, memory_fn_name)?, }) } } #[derive(Clone, Debug)] -pub(crate) enum HdlLetKind { - IO(HdlLetKindIO), +pub(crate) enum HdlLetKind { + IO(HdlLetKindIO), + AddPlatformIO(HdlLetKindAddPlatformIO), + Incomplete(HdlLetKindIncomplete), Instance(HdlLetKindInstance), RegBuilder(HdlLetKindRegBuilder), Wire(HdlLetKindWire), @@ -690,8 +764,10 @@ pub(crate) enum HdlLetKind { } impl_fold! { - enum HdlLetKind<> { - IO(HdlLetKindIO), + enum HdlLetKind { + IO(HdlLetKindIO), + AddPlatformIO(HdlLetKindAddPlatformIO), + Incomplete(HdlLetKindIncomplete), Instance(HdlLetKindInstance), RegBuilder(HdlLetKindRegBuilder), Wire(HdlLetKindWire), @@ -699,6 +775,33 @@ impl_fold! { } } +impl, I> ParseTypes> for HdlLetKind { + fn parse_types( + input: &mut HdlLetKind, + parser: &mut TypesParser<'_>, + ) -> Result { + match input { + HdlLetKind::IO(input) => ParseTypes::parse_types(input, parser).map(HdlLetKind::IO), + HdlLetKind::AddPlatformIO(input) => { + ParseTypes::parse_types(input, parser).map(HdlLetKind::AddPlatformIO) + } + HdlLetKind::Incomplete(input) => { + ParseTypes::parse_types(input, parser).map(HdlLetKind::Incomplete) + } + HdlLetKind::Instance(input) => { + ParseTypes::parse_types(input, parser).map(HdlLetKind::Instance) + } + HdlLetKind::RegBuilder(input) => { + ParseTypes::parse_types(input, parser).map(HdlLetKind::RegBuilder) + } + HdlLetKind::Wire(input) => ParseTypes::parse_types(input, parser).map(HdlLetKind::Wire), + HdlLetKind::Memory(input) => { + ParseTypes::parse_types(input, parser).map(HdlLetKind::Memory) + } + } + } +} + fn parsed_ty_or_err( parsed_ty: Option<(Token![:], Box)>, after_ty: Token![=], @@ -710,15 +813,15 @@ fn parsed_ty_or_err( } } -impl HdlLetKindIO { +impl HdlLetKindIO { fn rest_of_parse( input: ParseStream, parsed_ty: Option<(Token![:], Box)>, after_ty: Token![=], - m: kw::m, - dot_token: Token![.], + m_dot: Option<(kw::m, Token![.])>, kind: ModuleIOKind, ) -> syn::Result { + let (m, dot_token) = unwrap_m_dot(m_dot, kind)?; let (colon_token, ty) = parsed_ty_or_err(parsed_ty, after_ty)?; let paren_contents; Ok(Self { @@ -733,7 +836,36 @@ impl HdlLetKindIO { } } -impl HdlLetKindParse for HdlLetKind { +fn check_empty_m_dot(m_dot: Option<(kw::m, Token![.])>, kind: impl ToTokens) -> syn::Result<()> { + if let Some((m, dot_token)) = m_dot { + Err(Error::new_spanned( + quote! {#m #dot_token #kind}, + format_args!( + "{} is a free function, not a method of ModuleBuilder: try removing the `m.`", + kind.to_token_stream() + ), + )) + } else { + Ok(()) + } +} + +fn unwrap_m_dot( + m_dot: Option<(kw::m, Token![.])>, + kind: impl ToTokens, +) -> syn::Result<(kw::m, Token![.])> { + m_dot.ok_or_else(|| { + Error::new_spanned( + &kind, + format_args!( + "{} is a ModuleBuilder method, not a free function: try prefixing it with `m.`", + kind.to_token_stream() + ), + ) + }) +} + +impl HdlLetKindParse for HdlLetKind { type ParsedTy = Option<(Token![:], Box)>; fn parse_ty(input: ParseStream) -> syn::Result { @@ -753,16 +885,20 @@ impl HdlLetKindParse for HdlLetKind { after_ty: Token![=], input: ParseStream, ) -> syn::Result { - let m = input.parse()?; - let dot_token = input.parse()?; + let m_dot = if input.peek(kw::m) && input.peek2(Token![.]) { + let m = input.parse()?; + let dot_token = input.parse()?; + Some((m, dot_token)) + } else { + None + }; let kind: LetFnKind = input.parse()?; match kind { LetFnKind::Input(input_token) => HdlLetKindIO::rest_of_parse( input, parsed_ty, after_ty, - m, - dot_token, + m_dot, ModuleIOKind::Input(input_token), ) .map(Self::IO), @@ -770,11 +906,27 @@ impl HdlLetKindParse for HdlLetKind { input, parsed_ty, after_ty, - m, - dot_token, + m_dot, ModuleIOKind::Output(output), ) .map(Self::IO), + LetFnKind::AddPlatformIO((add_platform_io,)) => { + if let Some(parsed_ty) = parsed_ty { + return Err(Error::new_spanned( + parsed_ty.1, + "type annotation not allowed for instance", + )); + } + let (m, dot_token) = unwrap_m_dot(m_dot, kind)?; + let paren_contents; + Ok(Self::AddPlatformIO(HdlLetKindAddPlatformIO { + m, + dot_token, + add_platform_io, + paren: parenthesized!(paren_contents in input), + platform_io_builder: paren_contents.call(parse_single_fn_arg)?, + })) + } LetFnKind::Instance((instance,)) => { if let Some(parsed_ty) = parsed_ty { return Err(Error::new_spanned( @@ -782,41 +934,47 @@ impl HdlLetKindParse for HdlLetKind { "type annotation not allowed for instance", )); } + check_empty_m_dot(m_dot, kind)?; let paren_contents; Ok(Self::Instance(HdlLetKindInstance { - m, - dot_token, instance, paren: parenthesized!(paren_contents in input), module: paren_contents.call(parse_single_fn_arg)?, })) } - LetFnKind::RegBuilder((reg_builder,)) => HdlLetKindRegBuilder::rest_of_parse( - input, - parsed_ty, - after_ty, - m, - dot_token, - reg_builder, - ) - .map(Self::RegBuilder), + LetFnKind::RegBuilder((reg_builder,)) => { + HdlLetKindRegBuilder::rest_of_parse(input, parsed_ty, after_ty, m_dot, reg_builder) + .map(Self::RegBuilder) + } LetFnKind::Wire((wire,)) => { + check_empty_m_dot(m_dot, wire)?; let paren_contents; Ok(Self::Wire(HdlLetKindWire { ty: parsed_ty, - m, - dot_token, wire, paren: parenthesized!(paren_contents in input), ty_expr: paren_contents.call(parse_optional_fn_arg)?, })) } + LetFnKind::IncompleteWire(incomplete_wire) => { + if let Some(parsed_ty) = parsed_ty { + return Err(Error::new_spanned( + parsed_ty.1, + "type annotation not allowed for incomplete_wire", + )); + } + check_empty_m_dot(m_dot, kind)?; + let _paren_contents; + Ok(Self::Incomplete(HdlLetKindIncomplete { + kind: LetFnKindIncomplete::IncompleteWire(incomplete_wire), + paren: parenthesized!(_paren_contents in input), + })) + } LetFnKind::Memory(fn_name) => HdlLetKindMemory::rest_of_parse( input, parsed_ty, after_ty, - m, - dot_token, + m_dot, MemoryFnName::Memory(fn_name), ) .map(Self::Memory), @@ -824,8 +982,7 @@ impl HdlLetKindParse for HdlLetKind { input, parsed_ty, after_ty, - m, - dot_token, + m_dot, MemoryFnName::MemoryArray(fn_name), ) .map(Self::Memory), @@ -833,8 +990,7 @@ impl HdlLetKindParse for HdlLetKind { input, parsed_ty, after_ty, - m, - dot_token, + m_dot, MemoryFnName::MemoryWithInit(fn_name), ) .map(Self::Memory), @@ -846,6 +1002,8 @@ impl HdlLetKindToTokens for HdlLetKind { fn ty_to_tokens(&self, tokens: &mut TokenStream) { match self { HdlLetKind::IO(v) => v.ty_to_tokens(tokens), + HdlLetKind::AddPlatformIO(v) => v.ty_to_tokens(tokens), + HdlLetKind::Incomplete(v) => v.ty_to_tokens(tokens), HdlLetKind::Instance(v) => v.ty_to_tokens(tokens), HdlLetKind::RegBuilder(v) => v.ty_to_tokens(tokens), HdlLetKind::Wire(v) => v.ty_to_tokens(tokens), @@ -856,6 +1014,8 @@ impl HdlLetKindToTokens for HdlLetKind { fn expr_to_tokens(&self, tokens: &mut TokenStream) { match self { HdlLetKind::IO(v) => v.expr_to_tokens(tokens), + HdlLetKind::AddPlatformIO(v) => v.expr_to_tokens(tokens), + HdlLetKind::Incomplete(v) => v.expr_to_tokens(tokens), HdlLetKind::Instance(v) => v.expr_to_tokens(tokens), HdlLetKind::RegBuilder(v) => v.expr_to_tokens(tokens), HdlLetKind::Wire(v) => v.expr_to_tokens(tokens), @@ -868,7 +1028,7 @@ with_debug_clone_and_fold! { #[allow(dead_code)] pub(crate) struct HdlLet { pub(crate) attrs: Vec, - pub(crate) hdl_attr: HdlAttr, + pub(crate) hdl_attr: HdlAttr, pub(crate) let_token: Token![let], pub(crate) mut_token: Option, pub(crate) name: Ident, @@ -878,6 +1038,34 @@ with_debug_clone_and_fold! { } } +impl, I> ParseTypes> for HdlLet { + fn parse_types( + input: &mut HdlLet, + parser: &mut TypesParser<'_>, + ) -> Result { + let HdlLet { + attrs, + hdl_attr, + let_token, + mut_token, + name, + eq_token, + kind, + semi_token, + } = input; + Ok(Self { + attrs: attrs.clone(), + hdl_attr: hdl_attr.clone(), + let_token: *let_token, + mut_token: *mut_token, + name: name.clone(), + eq_token: *eq_token, + kind: T::parse_types(kind, parser)?, + semi_token: *semi_token, + }) + } +} + impl HdlLet { pub(crate) fn try_map( self, @@ -997,7 +1185,7 @@ fn parse_quote_let_pat>( } } -fn wrap_ty_with_expr(ty: impl ToTokens) -> Type { +pub(crate) fn wrap_ty_with_expr(ty: impl ToTokens) -> Type { parse_quote_spanned! {ty.span()=> ::fayalite::expr::Expr<#ty> } @@ -1006,7 +1194,7 @@ fn wrap_ty_with_expr(ty: impl ToTokens) -> Type { fn unwrap_or_static_type(expr: Option, span: Span) -> TokenStream { expr.map(ToTokens::into_token_stream).unwrap_or_else(|| { quote_spanned! {span=> - ::fayalite::ty::StaticType::static_type() + ::fayalite::ty::StaticType::TYPE } }) } @@ -1026,30 +1214,42 @@ impl ToTokens for ImplicitName { } } -struct Visitor { - module_kind: ModuleKind, +struct Visitor<'a> { + module_kind: Option, errors: Errors, - io: Vec, + io: ModuleIOOrAddPlatformIO, block_depth: usize, + parsed_generics: &'a ParsedGenerics, } -impl Visitor { - fn take_hdl_attr(&mut self, attrs: &mut Vec) -> Option> { +impl Visitor<'_> { + fn take_hdl_attr( + &mut self, + attrs: &mut Vec, + ) -> Option> { self.errors.unwrap_or( HdlAttr::parse_and_take_attr(attrs), Some(syn::parse2::(quote! {}).unwrap().into()), ) } - fn require_normal_module(&mut self, spanned: impl ToTokens) { + fn require_normal_module_or_fn(&mut self, spanned: impl ToTokens) { match self.module_kind { - ModuleKind::Extern => { + Some(ModuleKind::Extern) => { self.errors .error(spanned, "not allowed in #[hdl_module(extern)]"); } - ModuleKind::Normal => {} + Some(ModuleKind::Normal) | None => {} } } - fn process_hdl_if(&mut self, hdl_attr: HdlAttr, expr_if: ExprIf) -> Expr { + fn require_module(&mut self, spanned: impl ToTokens) { + match self.module_kind { + None => { + self.errors.error(spanned, "not allowed in #[hdl] fn"); + } + Some(_) => {} + } + } + fn process_hdl_if(&mut self, hdl_attr: HdlAttr, expr_if: ExprIf) -> Expr { let ExprIf { attrs, if_token, @@ -1057,10 +1257,10 @@ impl Visitor { then_branch, else_branch, } = expr_if; - self.require_normal_module(if_token); - let else_expr = else_branch.unzip().1.map(|else_expr| match *else_expr { - Expr::If(expr_if) => self.process_hdl_if(hdl_attr.clone(), expr_if), - expr => expr, + let (else_token, else_expr) = else_branch.unzip(); + let else_expr = else_expr.map(|else_expr| match *else_expr { + Expr::If(expr_if) => Box::new(self.process_hdl_if(hdl_attr.clone(), expr_if)), + _ => else_expr, }); if let Expr::Let(ExprLet { attrs: let_attrs, @@ -1082,11 +1282,23 @@ impl Visitor { }, ); } - if let Some(else_expr) = else_expr { + let ExprOptions { sim } = hdl_attr.body; + if sim.is_some() { + ExprIf { + attrs, + if_token, + cond: parse_quote_spanned! {if_token.span=> + *::fayalite::sim::value::SimValue::<::fayalite::int::Bool>::value(&::fayalite::sim::value::ToSimValue::into_sim_value(#cond)) + }, + then_branch, + else_branch: else_token.zip(else_expr), + } + .into() + } else if let Some(else_expr) = else_expr { parse_quote_spanned! {if_token.span=> #(#attrs)* { - let mut __scope = m.if_(#cond); + let mut __scope = ::fayalite::module::if_(#cond); let _: () = #then_branch; let mut __scope = __scope.else_(); let _: () = #else_expr; @@ -1096,7 +1308,7 @@ impl Visitor { parse_quote_spanned! {if_token.span=> #(#attrs)* { - let mut __scope = m.if_(#cond); + let mut __scope = ::fayalite::module::if_(#cond); let _: () = #then_branch; } } @@ -1122,11 +1334,12 @@ impl Visitor { .to_tokens(expr); }); let mut attrs = hdl_let.attrs.clone(); + self.require_module(kind); match self.module_kind { - ModuleKind::Extern => attrs.push(parse_quote_spanned! {hdl_let.let_token.span=> + Some(ModuleKind::Extern) => attrs.push(parse_quote_spanned! {hdl_let.let_token.span=> #[allow(unused_variables)] }), - ModuleKind::Normal => {} + Some(ModuleKind::Normal) | None => {} } let let_stmt = Local { attrs, @@ -1144,7 +1357,81 @@ impl Visitor { }), semi_token: hdl_let.semi_token, }; - self.io.push(hdl_let); + match &mut self.io { + ModuleIOOrAddPlatformIO::ModuleIO(io) => io.push(hdl_let), + ModuleIOOrAddPlatformIO::AddPlatformIO => { + self.errors.error( + kind, + "can't have other inputs/outputs in a module using m.add_platform_io()", + ); + } + } + let_stmt + } + fn process_hdl_let_add_platform_io( + &mut self, + hdl_let: HdlLet, + ) -> Local { + let HdlLet { + mut attrs, + hdl_attr: _, + let_token, + mut_token, + ref name, + eq_token, + kind: + HdlLetKindAddPlatformIO { + m, + dot_token, + add_platform_io, + paren, + platform_io_builder, + }, + semi_token, + } = hdl_let; + let mut expr = quote! {#m #dot_token #add_platform_io}; + paren.surround(&mut expr, |expr| { + let name_str = ImplicitName { + name, + span: name.span(), + }; + quote_spanned! {name.span()=> + #name_str, #platform_io_builder + } + .to_tokens(expr); + }); + self.require_module(add_platform_io); + attrs.push(parse_quote_spanned! {let_token.span=> + #[allow(unused_variables)] + }); + let let_stmt = Local { + attrs, + let_token, + pat: parse_quote! { #mut_token #name }, + init: Some(LocalInit { + eq_token, + expr: parse_quote! { #expr }, + diverge: None, + }), + semi_token, + }; + match &mut self.io { + ModuleIOOrAddPlatformIO::ModuleIO(io) => { + for io in io { + self.errors.error( + io.kind.kind, + "can't have other inputs/outputs in a module using m.add_platform_io()", + ); + } + } + ModuleIOOrAddPlatformIO::AddPlatformIO => { + self.errors.error( + add_platform_io, + "can't use m.add_platform_io() more than once in a single module", + ); + } + } + self.io = ModuleIOOrAddPlatformIO::AddPlatformIO; let_stmt } fn process_hdl_let_instance(&mut self, hdl_let: HdlLet) -> Local { @@ -1157,16 +1444,14 @@ impl Visitor { eq_token, kind: HdlLetKindInstance { - m, - dot_token, instance, paren, module, }, semi_token, } = hdl_let; - self.require_normal_module(instance); - let mut expr = quote! {#m #dot_token #instance}; + self.require_normal_module_or_fn(instance); + let mut expr = instance.to_token_stream(); paren.surround(&mut expr, |expr| { let name_str = ImplicitName { name: &name, @@ -1191,11 +1476,9 @@ impl Visitor { } fn process_hdl_let_reg_builder(&mut self, hdl_let: HdlLet) -> Local { let name = &hdl_let.name; - let m = hdl_let.kind.m; - let dot = hdl_let.kind.dot_token; let reg_builder = hdl_let.kind.reg_builder; - self.require_normal_module(reg_builder); - let mut expr = quote! {#m #dot #reg_builder}; + self.require_normal_module_or_fn(reg_builder); + let mut expr = reg_builder.to_token_stream(); hdl_let.kind.reg_builder_paren.surround(&mut expr, |expr| { let name_str = ImplicitName { name, @@ -1219,7 +1502,7 @@ impl Visitor { no_reset.to_tokens(&mut expr); paren.surround(&mut expr, |expr| ty_expr.to_tokens(expr)); } - RegBuilderReset::Reset { .. } | RegBuilderReset::ResetDefault { .. } => { + RegBuilderReset::Reset { .. } => { hdl_let.kind.reset.to_tokens(&mut expr); } } @@ -1244,12 +1527,10 @@ impl Visitor { } fn process_hdl_let_wire(&mut self, hdl_let: HdlLet) -> Local { let name = &hdl_let.name; - let m = hdl_let.kind.m; - let dot = hdl_let.kind.dot_token; let wire = hdl_let.kind.wire; - self.require_normal_module(wire); + self.require_normal_module_or_fn(wire); let ty_expr = unwrap_or_static_type(hdl_let.kind.ty_expr.as_ref(), wire.span()); - let mut expr = quote! {#m #dot #wire}; + let mut expr = wire.to_token_stream(); hdl_let.kind.paren.surround(&mut expr, |expr| { let name_str = ImplicitName { name, @@ -1277,20 +1558,46 @@ impl Visitor { semi_token: hdl_let.semi_token, } } + fn process_hdl_let_incomplete(&mut self, hdl_let: HdlLet) -> Local { + let name = &hdl_let.name; + let kind = hdl_let.kind.kind; + self.require_normal_module_or_fn(kind); + let mut expr = kind.to_token_stream(); + hdl_let.kind.paren.surround(&mut expr, |expr| { + ImplicitName { + name, + span: name.span(), + } + .to_tokens(expr); + }); + let mut_token = &hdl_let.mut_token; + Local { + attrs: hdl_let.attrs.clone(), + let_token: hdl_let.let_token, + pat: parse_quote! { #mut_token #name }, + init: Some(LocalInit { + eq_token: hdl_let.eq_token, + expr: parse_quote! { #expr }, + diverge: None, + }), + semi_token: hdl_let.semi_token, + } + } fn process_hdl_let_memory(&mut self, hdl_let: HdlLet) -> Local { let name = &hdl_let.name; - let m = hdl_let.kind.m; - let dot = hdl_let.kind.dot_token; let memory_fn = hdl_let.kind.memory_fn; let memory_fn_name = memory_fn.name(); - self.require_normal_module(memory_fn_name); - let mut expr = quote! {#m #dot #memory_fn_name}; + self.require_normal_module_or_fn(memory_fn_name); + let mut expr = memory_fn_name.to_token_stream(); let (paren, arg) = match memory_fn { MemoryFn::Memory { memory, paren, ty_expr, - } => (paren, unwrap_or_static_type(ty_expr.as_ref(), memory.span())), + } => ( + paren, + unwrap_or_static_type(ty_expr.as_ref(), memory.span()), + ), MemoryFn::MemoryArray { memory_array, paren, @@ -1345,6 +1652,8 @@ impl Visitor { } the_match! { IO => process_hdl_let_io, + AddPlatformIO => process_hdl_let_add_platform_io, + Incomplete => process_hdl_let_incomplete, Instance => process_hdl_let_instance, RegBuilder => process_hdl_let_reg_builder, Wire => process_hdl_let_wire, @@ -1377,16 +1686,17 @@ impl Visitor { let value: BigInt = self .errors .ok(base10_digits.parse().map_err(|e| Error::new(span, e)))?; - let (negative, bytes) = match value.sign() { - Sign::Minus => (true, value.magnitude().to_bytes_le()), - Sign::NoSign => (false, vec![]), - Sign::Plus => (false, value.magnitude().to_bytes_le()), + let bytes = value.to_signed_bytes_le(); + let path = if signed { + known_items::SInt(span).path + } else { + known_items::UInt(span).path }; Some(parse_quote_spanned! {span=> - ::fayalite::int::make_int_literal::<#signed, #width>( - #negative, + <#path<#width> as ::fayalite::int::BoolOrIntType>::le_bytes_to_expr_wrapping( &[#(#bytes,)*], - ).to_int_expr() + #width, + ) }) } fn process_literal(&mut self, literal: ExprLit) -> Expr { @@ -1439,7 +1749,7 @@ impl Visitor { } } -fn empty_let() -> Local { +pub(crate) fn empty_let() -> Local { Local { attrs: vec![], let_token: Default::default(), @@ -1449,7 +1759,7 @@ fn empty_let() -> Local { } } -impl Fold for Visitor { +impl Fold for Visitor<'_> { fn fold_item(&mut self, item: Item) -> Item { // don't process item interiors item @@ -1461,7 +1771,7 @@ impl Fold for Visitor { } fn fold_attribute(&mut self, attr: Attribute) -> Attribute { - if is_hdl_attr(&attr) { + if is_hdl_attr::(&attr) { self.errors .error(&attr, "#[hdl] attribute not supported here"); } @@ -1521,26 +1831,52 @@ impl Fold for Visitor { Repeat => process_hdl_repeat, Struct => process_hdl_struct, Tuple => process_hdl_tuple, + MethodCall => process_hdl_method_call, Call => process_hdl_call, - Path => process_hdl_path, } } } - fn fold_local(&mut self, let_stmt: Local) -> Local { + fn fold_local(&mut self, mut let_stmt: Local) -> Local { match self .errors - .ok(HdlAttr::::parse_and_leave_attr(&let_stmt.attrs)) - { + .ok(HdlAttr::::parse_and_leave_attr( + &let_stmt.attrs, + )) { None => return empty_let(), Some(None) => return fold_local(self, let_stmt), Some(Some(HdlAttr { .. })) => {} }; - let hdl_let = syn::parse2::(let_stmt.into_token_stream()); + let mut pat = &let_stmt.pat; + if let Pat::Type(pat_type) = pat { + pat = &pat_type.pat; + } + let Pat::Ident(syn::PatIdent { + attrs: _, + by_ref: None, + mutability: _, + ident: _, + subpat: None, + }) = pat + else { + let hdl_attr = + HdlAttr::::parse_and_take_attr(&mut let_stmt.attrs) + .ok() + .flatten() + .expect("already checked above"); + let let_stmt = fold_local(self, let_stmt); + return self.process_hdl_let_pat(hdl_attr, let_stmt); + }; + let hdl_let = syn::parse2::>>(let_stmt.into_token_stream()); let Some(hdl_let) = self.errors.ok(hdl_let) else { return empty_let(); }; - let hdl_let = hdl_let.do_fold(self); + let mut hdl_let = hdl_let.do_fold(self); + let Ok(hdl_let) = + TypesParser::run_with_errors(self.parsed_generics, &mut hdl_let, &mut self.errors) + else { + return empty_let(); + }; self.process_hdl_let(hdl_let) } @@ -1560,15 +1896,22 @@ impl Fold for Visitor { } } +pub(crate) enum ModuleIOOrAddPlatformIO { + ModuleIO(Vec), + AddPlatformIO, +} + pub(crate) fn transform_body( - module_kind: ModuleKind, + module_kind: Option, mut body: Box, -) -> syn::Result<(Box, Vec)> { + parsed_generics: &ParsedGenerics, +) -> syn::Result<(Box, ModuleIOOrAddPlatformIO)> { let mut visitor = Visitor { module_kind, errors: Errors::new(), - io: vec![], + io: ModuleIOOrAddPlatformIO::ModuleIO(vec![]), block_depth: 0, + parsed_generics, }; *body = syn::fold::fold_block(&mut visitor, *body); visitor.errors.finish()?; diff --git a/crates/fayalite-proc-macros-impl/src/module/transform_body/expand_aggregate_literals.rs b/crates/fayalite-proc-macros-impl/src/module/transform_body/expand_aggregate_literals.rs index bfa4a51..1aabb19 100644 --- a/crates/fayalite-proc-macros-impl/src/module/transform_body/expand_aggregate_literals.rs +++ b/crates/fayalite-proc-macros-impl/src/module/transform_body/expand_aggregate_literals.rs @@ -1,540 +1,273 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information -use crate::{module::transform_body::Visitor, options, Errors, HdlAttr, PairsIterExt}; -use proc_macro2::{Span, TokenStream}; -use quote::{format_ident, quote_spanned, ToTokens, TokenStreamExt}; + +use crate::{ + HdlAttr, kw, + module::transform_body::{ + ExprOptions, Visitor, + expand_match::{EnumPath, parse_enum_path}, + }, +}; +use quote::{format_ident, quote_spanned}; +use std::mem; use syn::{ - parse::Nothing, - parse_quote, parse_quote_spanned, - punctuated::{Pair, Punctuated}, - spanned::Spanned, - token::{Brace, Paren}, - Attribute, Expr, ExprArray, ExprCall, ExprGroup, ExprPath, ExprRepeat, ExprStruct, ExprTuple, - FieldValue, Ident, Index, Member, Path, PathArguments, PathSegment, Token, TypePath, + Expr, ExprArray, ExprCall, ExprGroup, ExprMethodCall, ExprParen, ExprPath, ExprRepeat, + ExprStruct, ExprTuple, FieldValue, Token, TypePath, parse_quote_spanned, + punctuated::Punctuated, spanned::Spanned, token::Paren, }; -options! { - #[options = AggregateLiteralOptions] - #[no_ident_fragment] - pub(crate) enum AggregateLiteralOption { - Struct(struct_), - Enum(enum_), - } -} - -#[derive(Clone, Debug)] -pub(crate) struct StructOrEnumPath { - pub(crate) ty: TypePath, - pub(crate) variant: Option<(TypePath, Ident)>, -} - -#[derive(Debug, Copy, Clone)] -pub(crate) struct SingleSegmentVariant { - pub(crate) name: &'static str, - pub(crate) make_type_path: fn(Span, &PathArguments) -> Path, -} - -impl StructOrEnumPath { - pub(crate) const SINGLE_SEGMENT_VARIANTS: &'static [SingleSegmentVariant] = { - fn make_option_type_path(span: Span, arguments: &PathArguments) -> Path { - let arguments = if arguments.is_none() { - quote_spanned! {span=> - <_> - } - } else { - arguments.to_token_stream() - }; - parse_quote_spanned! {span=> - ::fayalite::__std::option::Option #arguments - } - } - fn make_result_type_path(span: Span, arguments: &PathArguments) -> Path { - let arguments = if arguments.is_none() { - quote_spanned! {span=> - <_, _> - } - } else { - arguments.to_token_stream() - }; - parse_quote_spanned! {span=> - ::fayalite::__std::result::Result #arguments - } - } - &[ - SingleSegmentVariant { - name: "Some", - make_type_path: make_option_type_path, - }, - SingleSegmentVariant { - name: "None", - make_type_path: make_option_type_path, - }, - SingleSegmentVariant { - name: "Ok", - make_type_path: make_result_type_path, - }, - SingleSegmentVariant { - name: "Err", - make_type_path: make_result_type_path, - }, - ] - }; - pub(crate) fn new( - errors: &mut Errors, - path: TypePath, - options: &AggregateLiteralOptions, - ) -> Result { - let Path { - leading_colon, - segments, - } = &path.path; - let qself_position = path.qself.as_ref().map(|qself| qself.position).unwrap_or(0); - let variant_name = if qself_position < segments.len() { - Some(segments.last().unwrap().ident.clone()) - } else { - None - }; - let enum_type = 'guess_enum_type: { - if options.enum_.is_some() { - if let Some((struct_,)) = options.struct_ { - errors.error( - struct_, - "can't specify both #[hdl(enum)] and #[hdl(struct)]", - ); - } - break 'guess_enum_type Some(None); - } - if options.struct_.is_some() { - break 'guess_enum_type None; - } - if path.qself.is_none() && leading_colon.is_none() && segments.len() == 1 { - let PathSegment { ident, arguments } = &segments[0]; - for &SingleSegmentVariant { - name, - make_type_path, - } in Self::SINGLE_SEGMENT_VARIANTS - { - if ident == name { - break 'guess_enum_type Some(Some(TypePath { - qself: None, - path: make_type_path(ident.span(), arguments), - })); - } - } - } - if segments.len() == qself_position + 2 - && segments[qself_position + 1].arguments.is_none() - && (path.qself.is_some() - || segments[qself_position].ident.to_string().as_bytes()[0] - .is_ascii_uppercase()) - { - let mut ty = path.clone(); - ty.path.segments.pop(); - ty.path.segments.pop_punct(); - break 'guess_enum_type Some(Some(ty)); - } - None - }; - if let Some(enum_type) = enum_type { - let ty = if let Some(enum_type) = enum_type { - enum_type - } else { - if qself_position >= segments.len() { - errors.error(path, "#[hdl]: can't figure out enum's type"); - return Err(()); - } - let mut ty = path.clone(); - ty.path.segments.pop(); - ty.path.segments.pop_punct(); - ty - }; - let Some(variant_name) = variant_name else { - errors.error(path, "#[hdl]: can't figure out enum's variant name"); - return Err(()); - }; - Ok(Self { - ty, - variant: Some((path, variant_name)), - }) - } else { - Ok(Self { - ty: path, - variant: None, - }) - } - } -} - -#[derive(Copy, Clone, Debug)] -pub(crate) enum BraceOrParen { - Brace(Brace), - Paren(Paren), -} - -impl BraceOrParen { - pub(crate) fn surround(self, tokens: &mut TokenStream, f: impl FnOnce(&mut TokenStream)) { - match self { - BraceOrParen::Brace(v) => v.surround(tokens, f), - BraceOrParen::Paren(v) => v.surround(tokens, f), - } - } -} - -#[derive(Debug, Clone)] -pub(crate) struct StructOrEnumLiteralField { - pub(crate) attrs: Vec, - pub(crate) member: Member, - pub(crate) colon_token: Option, - pub(crate) expr: Expr, -} - -#[derive(Debug, Clone)] -pub(crate) struct StructOrEnumLiteral { - pub(crate) attrs: Vec, - pub(crate) path: TypePath, - pub(crate) brace_or_paren: BraceOrParen, - pub(crate) fields: Punctuated, - pub(crate) dot2_token: Option, - pub(crate) rest: Option>, -} - -impl StructOrEnumLiteral { - pub(crate) fn map_field_exprs(self, mut f: impl FnMut(Expr) -> Expr) -> Self { - self.map_fields(|mut field| { - field.expr = f(field.expr); - field - }) - } - pub(crate) fn map_fields( - self, - f: impl FnMut(StructOrEnumLiteralField) -> StructOrEnumLiteralField, - ) -> Self { - let Self { - attrs, - path, - brace_or_paren, - fields, - dot2_token, - rest, - } = self; - let fields = fields.into_pairs().map_pair_value(f).collect(); - Self { - attrs, - path, - brace_or_paren, - fields, - dot2_token, - rest, - } - } -} - -impl From for StructOrEnumLiteral { - fn from(value: ExprStruct) -> Self { - let ExprStruct { - attrs, - qself, - path, - brace_token, - fields, - dot2_token, - rest, - } = value; - Self { - attrs, - path: TypePath { qself, path }, - brace_or_paren: BraceOrParen::Brace(brace_token), - fields: fields - .into_pairs() - .map_pair_value( - |FieldValue { - attrs, - member, - colon_token, - expr, - }| StructOrEnumLiteralField { - attrs, - member, - colon_token, - expr, - }, - ) - .collect(), - dot2_token, - rest, - } - } -} - -fn expr_to_member(expr: &Expr) -> Option { - syn::parse2(expr.to_token_stream()).ok() -} - -impl ToTokens for StructOrEnumLiteral { - fn to_tokens(&self, tokens: &mut TokenStream) { - let Self { - attrs, - path, - brace_or_paren, - fields, - dot2_token, - rest, - } = self; - tokens.append_all(attrs); - path.to_tokens(tokens); - brace_or_paren.surround(tokens, |tokens| { - match brace_or_paren { - BraceOrParen::Brace(_) => { - for ( - StructOrEnumLiteralField { - attrs, - member, - mut colon_token, - expr, - }, - comma, - ) in fields.pairs().map(|v| v.into_tuple()) - { - tokens.append_all(attrs); - if Some(member) != expr_to_member(expr).as_ref() { - colon_token = Some(::default()); - } - member.to_tokens(tokens); - colon_token.to_tokens(tokens); - expr.to_tokens(tokens); - comma.to_tokens(tokens); - } - } - BraceOrParen::Paren(_) => { - for ( - StructOrEnumLiteralField { - attrs, - member: _, - colon_token: _, - expr, - }, - comma, - ) in fields.pairs().map(|v| v.into_tuple()) - { - tokens.append_all(attrs); - expr.to_tokens(tokens); - comma.to_tokens(tokens); - } - } - } - if let Some(rest) = rest { - dot2_token.unwrap_or_default().to_tokens(tokens); - rest.to_tokens(tokens); - } - }); - } -} - -impl Visitor { +impl Visitor<'_> { pub(crate) fn process_hdl_array( &mut self, - hdl_attr: HdlAttr, + hdl_attr: HdlAttr, mut expr_array: ExprArray, ) -> Expr { - self.require_normal_module(hdl_attr); - for elem in &mut expr_array.elems { - *elem = parse_quote_spanned! {elem.span()=> - ::fayalite::expr::ToExpr::to_expr(&(#elem)) - }; + let ExprOptions { sim } = hdl_attr.body; + let span = hdl_attr.kw.span; + if sim.is_some() { + for elem in &mut expr_array.elems { + *elem = parse_quote_spanned! {elem.span()=> + ::fayalite::sim::value::ToSimValue::to_sim_value(&(#elem)) + }; + } + parse_quote_spanned! {span=> + ::fayalite::sim::value::ToSimValue::into_sim_value(#expr_array) + } + } else { + for elem in &mut expr_array.elems { + *elem = parse_quote_spanned! {elem.span()=> + ::fayalite::expr::ToExpr::to_expr(&(#elem)) + }; + } + parse_quote_spanned! {span=> + ::fayalite::expr::ToExpr::to_expr(&#expr_array) + } } - parse_quote! {::fayalite::expr::ToExpr::to_expr(&#expr_array)} } pub(crate) fn process_hdl_repeat( &mut self, - hdl_attr: HdlAttr, + hdl_attr: HdlAttr, mut expr_repeat: ExprRepeat, ) -> Expr { - self.require_normal_module(hdl_attr); let repeated_value = &expr_repeat.expr; - *expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=> - ::fayalite::expr::ToExpr::to_expr(&(#repeated_value)) - }; - parse_quote! {::fayalite::expr::ToExpr::to_expr(&#expr_repeat)} - } - pub(crate) fn process_struct_enum( - &mut self, - hdl_attr: HdlAttr, - mut literal: StructOrEnumLiteral, - ) -> Expr { - let span = hdl_attr.hdl.span; - if let Some(rest) = literal.rest.take() { - self.errors - .error(rest, "#[hdl] struct functional update syntax not supported"); - } - let mut next_var = 0usize; - let mut new_var = || -> Ident { - let retval = format_ident!("__v{}", next_var, span = span); - next_var += 1; - retval - }; - let infallible_var = new_var(); - let retval_var = new_var(); - let mut lets = vec![]; - let mut build_steps = vec![]; - let literal = literal.map_field_exprs(|expr| { - let field_var = new_var(); - lets.push(quote_spanned! {span=> - let #field_var = ::fayalite::expr::ToExpr::to_expr(&#expr); - }); - parse_quote! { #field_var } - }); - let Ok(StructOrEnumPath { ty, variant }) = - StructOrEnumPath::new(&mut self.errors, literal.path.clone(), &hdl_attr.body) - else { - return parse_quote_spanned! {span=> - {} + let ExprOptions { sim } = hdl_attr.body; + let span = hdl_attr.kw.span; + if sim.is_some() { + *expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=> + ::fayalite::sim::value::ToSimValue::to_sim_value(&(#repeated_value)) }; - }; - for StructOrEnumLiteralField { - attrs: _, - member, - colon_token: _, - expr, - } in literal.fields.iter() - { - let field_fn = format_ident!("field_{}", member); - build_steps.push(quote_spanned! {span=> - let #retval_var = #retval_var.#field_fn(#expr); - }); - } - let check_literal = literal.map_field_exprs(|expr| { parse_quote_spanned! {span=> - ::fayalite::expr::value_from_expr_type(#expr, #infallible_var) - } - }); - let make_expr_fn = if let Some((_variant_path, variant_ident)) = &variant { - let variant_fn = format_ident!("variant_{}", variant_ident); - build_steps.push(quote_spanned! {span=> - let #retval_var = #retval_var.#variant_fn(); - }); - quote_spanned! {span=> - ::fayalite::expr::make_enum_expr + ::fayalite::sim::value::ToSimValue::into_sim_value(#expr_repeat) } } else { - build_steps.push(quote_spanned! {span=> - let #retval_var = #retval_var.build(); - }); - quote_spanned! {span=> - ::fayalite::expr::make_bundle_expr - } - }; - let variant_or_type = - variant.map_or_else(|| ty.clone(), |(variant_path, _variant_ident)| variant_path); - parse_quote_spanned! {span=> - { - #(#lets)* - #make_expr_fn::<#ty>(|#infallible_var| { - let #retval_var = #check_literal; - #[allow(unreachable_code)] - match #retval_var { - #variant_or_type { .. } => #retval_var, - #[allow(unreachable_patterns)] - _ => match #infallible_var {}, - } - }, |#retval_var| { - #(#build_steps)* - #retval_var - }) + *expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=> + ::fayalite::expr::ToExpr::to_expr(&(#repeated_value)) + }; + parse_quote_spanned! {span=> + ::fayalite::expr::ToExpr::to_expr(&#expr_repeat) } } } pub(crate) fn process_hdl_struct( &mut self, - hdl_attr: HdlAttr, - expr_struct: ExprStruct, + hdl_attr: HdlAttr, + mut expr_struct: ExprStruct, ) -> Expr { - self.require_normal_module(&hdl_attr); - self.process_struct_enum(hdl_attr, expr_struct.into()) + let name_span = expr_struct.path.segments.last().unwrap().ident.span(); + let ExprOptions { sim } = hdl_attr.body; + if sim.is_some() { + let ty_path = TypePath { + qself: expr_struct.qself.take(), + path: expr_struct.path, + }; + expr_struct.path = parse_quote_spanned! {name_span=> + __SimValue::<#ty_path> + }; + for field in &mut expr_struct.fields { + let expr = &field.expr; + field.expr = parse_quote_spanned! {field.member.span()=> + ::fayalite::sim::value::ToSimValue::to_sim_value(&(#expr)) + }; + } + return parse_quote_spanned! {name_span=> + { + type __SimValue = ::SimValue; + let value: ::fayalite::sim::value::SimValue<#ty_path> = ::fayalite::sim::value::ToSimValue::into_sim_value(#expr_struct); + value + } + }; + } + let builder_ident = format_ident!("__builder", span = name_span); + let empty_builder = if expr_struct.qself.is_some() + || expr_struct + .path + .segments + .iter() + .any(|seg| !seg.arguments.is_none()) + { + let ty = TypePath { + qself: expr_struct.qself, + path: expr_struct.path, + }; + let builder_ty = quote_spanned! {name_span=> + <#ty as ::fayalite::bundle::BundleType>::Builder + }; + quote_spanned! {name_span=> + <#builder_ty as ::fayalite::__std::default::Default>::default() + } + } else { + let path = ExprPath { + attrs: vec![], + qself: expr_struct.qself, + path: expr_struct.path, + }; + quote_spanned! {name_span=> + #path::__bundle_builder() + } + }; + let field_calls = Vec::from_iter(expr_struct.fields.iter().map( + |FieldValue { + attrs: _, + member, + colon_token: _, + expr, + }| { + let field_fn = format_ident!("field_{}", member); + quote_spanned! {member.span()=> + let #builder_ident = #builder_ident.#field_fn(#expr); + } + }, + )); + parse_quote_spanned! {name_span=> + { + let #builder_ident = #empty_builder; + #(#field_calls)* + ::fayalite::expr::ToExpr::to_expr(&#builder_ident) + } + } } pub(crate) fn process_hdl_tuple( &mut self, - hdl_attr: HdlAttr, - expr_tuple: ExprTuple, + hdl_attr: HdlAttr, + mut expr_tuple: ExprTuple, ) -> Expr { - self.require_normal_module(hdl_attr); - parse_quote_spanned! {expr_tuple.span()=> - ::fayalite::expr::ToExpr::to_expr(&#expr_tuple) - } - } - pub(crate) fn process_hdl_path( - &mut self, - hdl_attr: HdlAttr, - expr_path: ExprPath, - ) -> Expr { - self.require_normal_module(hdl_attr); - parse_quote_spanned! {expr_path.span()=> - ::fayalite::expr::ToExpr::to_expr(&#expr_path) + let ExprOptions { sim } = hdl_attr.body; + if sim.is_some() { + for element in &mut expr_tuple.elems { + *element = parse_quote_spanned! {element.span()=> + &(#element) + }; + } + parse_quote_spanned! {expr_tuple.span()=> + ::fayalite::sim::value::ToSimValue::into_sim_value(#expr_tuple) + } + } else { + parse_quote_spanned! {expr_tuple.span()=> + ::fayalite::expr::ToExpr::to_expr(&#expr_tuple) + } } } pub(crate) fn process_hdl_call( &mut self, - hdl_attr: HdlAttr, - expr_call: ExprCall, + hdl_attr: HdlAttr, + mut expr_call: ExprCall, ) -> Expr { - self.require_normal_module(&hdl_attr); - let ExprCall { - attrs: mut literal_attrs, - func, - paren_token, - args, - } = expr_call; - let mut path_expr = *func; - let path = loop { - break match path_expr { - Expr::Group(ExprGroup { - attrs, - group_token: _, - expr, - }) => { - literal_attrs.extend(attrs); - path_expr = *expr; - continue; + let span = hdl_attr.kw.span; + let mut func = &mut *expr_call.func; + let EnumPath { + variant_path: _, + enum_path, + variant_name, + } = loop { + match func { + Expr::Group(ExprGroup { expr, .. }) | Expr::Paren(ExprParen { expr, .. }) => { + func = &mut **expr; } - Expr::Path(ExprPath { attrs, qself, path }) => { - literal_attrs.extend(attrs); - TypePath { qself, path } + Expr::Path(_) => { + let Expr::Path(ExprPath { attrs, qself, path }) = + mem::replace(func, Expr::PLACEHOLDER) + else { + unreachable!(); + }; + match parse_enum_path(TypePath { qself, path }) { + Ok(path) => break path, + Err(path) => { + self.errors.error(&path, "unsupported enum variant path"); + let TypePath { qself, path } = path; + *func = ExprPath { attrs, qself, path }.into(); + return expr_call.into(); + } + } } _ => { - self.errors.error(&path_expr, "missing tuple struct's name"); - return parse_quote_spanned! {path_expr.span()=> - {} - }; + self.errors.error( + &expr_call.func, + "#[hdl] function call -- function must be a possibly-parenthesized path", + ); + return expr_call.into(); } - }; + } }; - let fields = args - .into_pairs() - .enumerate() - .map(|(index, p)| { - let (expr, comma) = p.into_tuple(); - let mut index = Index::from(index); - index.span = hdl_attr.hdl.span; - Pair::new( - StructOrEnumLiteralField { - attrs: vec![], - member: Member::Unnamed(index), - colon_token: None, - expr, - }, - comma, - ) - }) - .collect(); - self.process_struct_enum( + self.process_hdl_method_call( hdl_attr, - StructOrEnumLiteral { - attrs: literal_attrs, - path, - brace_or_paren: BraceOrParen::Paren(paren_token), - fields, - dot2_token: None, - rest: None, + ExprMethodCall { + attrs: expr_call.attrs, + receiver: parse_quote_spanned! {span=> + <#enum_path as ::fayalite::ty::StaticType>::TYPE + }, + dot_token: Token![.](span), + method: variant_name, + turbofish: None, + paren_token: expr_call.paren_token, + args: expr_call.args, }, ) } + pub(crate) fn process_hdl_method_call( + &mut self, + hdl_attr: HdlAttr, + mut expr_method_call: ExprMethodCall, + ) -> Expr { + let ExprOptions { sim } = hdl_attr.body; + let span = hdl_attr.kw.span; + // remove any number of groups and up to one paren + let mut receiver = &mut *expr_method_call.receiver; + let mut has_group = false; + let receiver = loop { + match receiver { + Expr::Group(ExprGroup { expr, .. }) => { + has_group = true; + receiver = expr; + } + Expr::Paren(ExprParen { expr, .. }) => break &mut **expr, + receiver @ Expr::Path(_) => break receiver, + _ => { + if !has_group { + self.errors.error( + &expr_method_call.receiver, + "#[hdl] on a method call needs parenthesized receiver", + ); + } + break &mut *expr_method_call.receiver; + } + } + }; + let func = if sim.is_some() { + parse_quote_spanned! {span=> + ::fayalite::enum_::enum_type_to_sim_builder + } + } else { + parse_quote_spanned! {span=> + ::fayalite::enum_::assert_is_enum_type + } + }; + *expr_method_call.receiver = ExprCall { + attrs: vec![], + func, + paren_token: Paren(span), + args: Punctuated::from_iter([mem::replace(receiver, Expr::PLACEHOLDER)]), + } + .into(); + expr_method_call.into() + } } diff --git a/crates/fayalite-proc-macros-impl/src/module/transform_body/expand_match.rs b/crates/fayalite-proc-macros-impl/src/module/transform_body/expand_match.rs index fe1a895..069f00d 100644 --- a/crates/fayalite-proc-macros-impl/src/module/transform_body/expand_match.rs +++ b/crates/fayalite-proc-macros-impl/src/module/transform_body/expand_match.rs @@ -1,26 +1,121 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information use crate::{ - fold::impl_fold, - module::transform_body::{ - expand_aggregate_literals::{AggregateLiteralOptions, StructOrEnumPath}, - with_debug_clone_and_fold, Visitor, - }, Errors, HdlAttr, PairsIterExt, + fold::{DoFold, impl_fold}, + kw, + module::transform_body::{ + ExprOptions, Visitor, empty_let, with_debug_clone_and_fold, wrap_ty_with_expr, + }, }; use proc_macro2::{Span, TokenStream}; -use quote::{ToTokens, TokenStreamExt}; +use quote::{ToTokens, TokenStreamExt, format_ident, quote_spanned}; +use std::collections::BTreeSet; use syn::{ - fold::{fold_arm, fold_expr_match, fold_pat, Fold}, - parse::Nothing, + Arm, Attribute, Expr, ExprMatch, FieldPat, Ident, Local, Member, Pat, PatIdent, PatOr, + PatParen, PatPath, PatRest, PatStruct, PatTuple, PatTupleStruct, PatWild, Path, PathSegment, + Token, TypePath, + fold::{Fold, fold_arm, fold_expr_match, fold_local, fold_pat}, parse_quote_spanned, - punctuated::{Pair, Punctuated}, + punctuated::Punctuated, spanned::Spanned, token::{Brace, Paren}, - Arm, Attribute, Expr, ExprMatch, FieldPat, Ident, Index, Member, Pat, PatIdent, PatOr, - PatParen, PatPath, PatRest, PatStruct, PatTupleStruct, PatWild, Path, Token, TypePath, }; +macro_rules! visit_trait { + ( + $($vis:vis fn $fn:ident($state:ident: _, $value:ident: &$Value:ty) $block:block)* + ) => { + trait VisitMatchPat<'a> { + $(fn $fn(&mut self, $value: &'a $Value) { + $fn(self, $value); + })* + } + + $($vis fn $fn<'a>($state: &mut (impl ?Sized + VisitMatchPat<'a>), $value: &'a $Value) $block)* + }; +} + +visit_trait! { + fn visit_match_pat_binding(_state: _, v: &MatchPatBinding) { + let MatchPatBinding { ident: _ } = v; + } + fn visit_match_pat_wild(_state: _, v: &MatchPatWild) { + let MatchPatWild { underscore_token: _ } = v; + } + fn visit_match_pat_rest(_state: _, v: &MatchPatRest) { + let MatchPatRest { dot2_token: _ } = v; + } + fn visit_match_pat_paren(state: _, v: &MatchPatParen) { + let MatchPatParen { paren_token: _, pat } = v; + state.visit_match_pat(pat); + } + fn visit_match_pat_paren_simple(state: _, v: &MatchPatParen) { + let MatchPatParen { paren_token: _, pat } = v; + state.visit_match_pat_simple(pat); + } + fn visit_match_pat_or(state: _, v: &MatchPatOr) { + let MatchPatOr { leading_vert: _, cases } = v; + for v in cases { + state.visit_match_pat(v); + } + } + fn visit_match_pat_or_simple(state: _, v: &MatchPatOr) { + let MatchPatOr { leading_vert: _, cases } = v; + for v in cases { + state.visit_match_pat_simple(v); + } + } + fn visit_match_pat_struct_field(state: _, v: &MatchPatStructField) { + let MatchPatStructField { field_name: _, colon_token: _, pat } = v; + state.visit_match_pat_simple(pat); + } + fn visit_match_pat_struct(state: _, v: &MatchPatStruct) { + let MatchPatStruct { match_span: _, path: _, brace_token: _, fields, rest: _ } = v; + for v in fields { + state.visit_match_pat_struct_field(v); + } + } + fn visit_match_pat_tuple(state: _, v: &MatchPatTuple) { + let MatchPatTuple { paren_token: _, fields } = v; + for v in fields { + state.visit_match_pat_simple(v); + } + } + fn visit_match_pat_enum_variant(state: _, v: &MatchPatEnumVariant) { + let MatchPatEnumVariant { + match_span:_, + sim:_, + variant_path: _, + enum_path: _, + variant_name: _, + field, + } = v; + if let Some((_, v)) = field { + state.visit_match_pat_simple(v); + } + } + fn visit_match_pat_simple(state: _, v: &MatchPatSimple) { + match v { + MatchPatSimple::Paren(v) => state.visit_match_pat_paren_simple(v), + MatchPatSimple::Or(v) => state.visit_match_pat_or_simple(v), + MatchPatSimple::Binding(v) => state.visit_match_pat_binding(v), + MatchPatSimple::Wild(v) => state.visit_match_pat_wild(v), + MatchPatSimple::Rest(v) => state.visit_match_pat_rest(v), + } + } + fn visit_match_pat(state: _, v: &MatchPat) { + match v { + MatchPat::Simple(v) => state.visit_match_pat_simple(v), + MatchPat::Or(v) => state.visit_match_pat_or(v), + MatchPat::Paren(v) => state.visit_match_pat_paren(v), + MatchPat::Struct(v) => state.visit_match_pat_struct(v), + MatchPat::Tuple(v) => state.visit_match_pat_tuple(v), + MatchPat::EnumVariant(v) => state.visit_match_pat_enum_variant(v), + } + } +} + with_debug_clone_and_fold! { struct MatchPatBinding<> { ident: Ident, @@ -55,6 +150,15 @@ with_debug_clone_and_fold! { } } +impl

MatchPatOr

{ + /// returns the first `|` between two patterns + fn first_inner_vert(&self) -> Option { + let mut pairs = self.cases.pairs(); + pairs.next_back(); + pairs.next().and_then(|v| v.into_tuple().1.copied()) + } +} + impl ToTokens for MatchPatOr

{ fn to_tokens(&self, tokens: &mut TokenStream) { let Self { @@ -79,9 +183,22 @@ impl ToTokens for MatchPatWild { } } +with_debug_clone_and_fold! { + struct MatchPatRest<> { + dot2_token: Token![..], + } +} + +impl ToTokens for MatchPatRest { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { dot2_token } = self; + dot2_token.to_tokens(tokens); + } +} + with_debug_clone_and_fold! { struct MatchPatStructField<> { - member: Member, + field_name: Ident, colon_token: Option, pat: MatchPatSimple, } @@ -90,12 +207,19 @@ with_debug_clone_and_fold! { impl ToTokens for MatchPatStructField { fn to_tokens(&self, tokens: &mut TokenStream) { let Self { - member, + field_name, colon_token, pat, } = self; - member.to_tokens(tokens); - colon_token.to_tokens(tokens); + field_name.to_tokens(tokens); + if let (None, MatchPatSimple::Binding(MatchPatBinding { ident })) = (colon_token, pat) { + if field_name == ident { + return; + } + } + colon_token + .unwrap_or_else(|| Token![:](field_name.span())) + .to_tokens(tokens); pat.to_tokens(tokens); } } @@ -108,8 +232,16 @@ impl MatchPatStructField { colon_token, pat, } = field_pat; + let field_name = if let Member::Named(field_name) = member { + field_name + } else { + state + .errors + .error(&member, "field name must not be a number"); + format_ident!("_{}", member) + }; Ok(Self { - member, + field_name, colon_token, pat: MatchPatSimple::parse(state, *pat)?, }) @@ -118,7 +250,8 @@ impl MatchPatStructField { with_debug_clone_and_fold! { struct MatchPatStruct<> { - resolved_path: Path, + match_span: Span, + path: Path, brace_token: Brace, fields: Punctuated, rest: Option, @@ -128,12 +261,16 @@ with_debug_clone_and_fold! { impl ToTokens for MatchPatStruct { fn to_tokens(&self, tokens: &mut TokenStream) { let Self { - resolved_path, + match_span, + path, brace_token, fields, rest, } = self; - resolved_path.to_tokens(tokens); + quote_spanned! {*match_span=> + __MatchTy::<#path> + } + .to_tokens(tokens); brace_token.surround(tokens, |tokens| { fields.to_tokens(tokens); rest.to_tokens(tokens); @@ -141,12 +278,84 @@ impl ToTokens for MatchPatStruct { } } +with_debug_clone_and_fold! { + struct MatchPatTuple<> { + paren_token: Paren, + fields: Punctuated, + } +} + +impl ToTokens for MatchPatTuple { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + paren_token, + fields, + } = self; + paren_token.surround(tokens, |tokens| { + fields.to_tokens(tokens); + }) + } +} + +with_debug_clone_and_fold! { + struct MatchPatEnumVariant<> { + match_span: Span, + sim: Option<(kw::sim,)>, + variant_path: Path, + enum_path: Path, + variant_name: Ident, + field: Option<(Paren, MatchPatSimple)>, + } +} + +impl ToTokens for MatchPatEnumVariant { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + match_span, + sim, + variant_path: _, + enum_path, + variant_name, + field, + } = self; + quote_spanned! {*match_span=> + __MatchTy::<#enum_path>::#variant_name + } + .to_tokens(tokens); + if sim.is_some() { + if let Some((paren_token, field)) = field { + paren_token.surround(tokens, |tokens| { + field.to_tokens(tokens); + match field { + MatchPatSimple::Paren(_) + | MatchPatSimple::Or(_) + | MatchPatSimple::Binding(_) + | MatchPatSimple::Wild(_) => quote_spanned! {*match_span=> + , _ + } + .to_tokens(tokens), + MatchPatSimple::Rest(_) => {} + } + }); + } else { + quote_spanned! {*match_span=> + (_) + } + .to_tokens(tokens); + } + } else if let Some((paren_token, field)) = field { + paren_token.surround(tokens, |tokens| field.to_tokens(tokens)); + } + } +} + #[derive(Debug, Clone)] enum MatchPatSimple { Paren(MatchPatParen), Or(MatchPatOr), Binding(MatchPatBinding), Wild(MatchPatWild), + Rest(MatchPatRest), } impl_fold! { @@ -155,6 +364,7 @@ impl_fold! { Or(MatchPatOr), Binding(MatchPatBinding), Wild(MatchPatWild), + Rest(MatchPatRest), } } @@ -165,25 +375,76 @@ impl ToTokens for MatchPatSimple { Self::Paren(v) => v.to_tokens(tokens), Self::Binding(v) => v.to_tokens(tokens), Self::Wild(v) => v.to_tokens(tokens), + Self::Rest(v) => v.to_tokens(tokens), } } } -fn is_pat_ident_a_struct_or_enum_name(ident: &Ident) -> bool { - ident - .to_string() - .starts_with(|ch: char| ch.is_ascii_uppercase()) +pub(crate) struct EnumPath { + pub(crate) variant_path: Path, + pub(crate) enum_path: Path, + pub(crate) variant_name: Ident, +} + +pub(crate) fn parse_enum_path(variant_path: TypePath) -> Result { + let TypePath { + qself: None, + path: variant_path, + } = variant_path + else { + return Err(variant_path); + }; + if variant_path.is_ident("HdlNone") || variant_path.is_ident("HdlSome") { + let ident = variant_path.get_ident().unwrap(); + return Ok(EnumPath { + enum_path: parse_quote_spanned! {ident.span()=> + ::fayalite::enum_::HdlOption::<_> + }, + variant_name: ident.clone(), + variant_path, + }); + } + if variant_path.segments.len() < 2 { + return Err(TypePath { + qself: None, + path: variant_path, + }); + } + let mut enum_path = variant_path.clone(); + let PathSegment { + ident: variant_name, + arguments, + } = enum_path.segments.pop().unwrap().into_value(); + if !arguments.is_none() { + return Err(TypePath { + qself: None, + path: variant_path, + }); + } + enum_path.segments.pop_punct(); + Ok(EnumPath { + variant_path, + enum_path, + variant_name, + }) +} + +fn parse_enum_ident(ident: Ident) -> Result { + parse_enum_path(TypePath { + qself: None, + path: ident.into(), + }) + .map_err(|p| p.path.segments.into_iter().next().unwrap().ident) } trait ParseMatchPat: Sized { fn simple(v: MatchPatSimple) -> Self; fn or(v: MatchPatOr) -> Self; fn paren(v: MatchPatParen) -> Self; - fn struct_( - state: &mut HdlMatchParseState<'_>, - v: MatchPatStruct, - struct_error_spanned: &dyn ToTokens, - ) -> Result; + fn struct_(state: &mut HdlMatchParseState<'_>, v: MatchPatStruct) -> Result; + fn tuple(state: &mut HdlMatchParseState<'_>, v: MatchPatTuple) -> Result; + fn enum_variant(state: &mut HdlMatchParseState<'_>, v: MatchPatEnumVariant) + -> Result; fn parse(state: &mut HdlMatchParseState<'_>, pat: Pat) -> Result { match pat { Pat::Ident(PatIdent { @@ -208,26 +469,25 @@ trait ParseMatchPat: Sized { .errors .error(at_token, "@ not allowed in #[hdl] patterns"); } - if is_pat_ident_a_struct_or_enum_name(&ident) { - let ident_span = ident.span(); - let resolved_path = state.resolve_enum_struct_path(TypePath { - qself: None, - path: ident.clone().into(), - })?; - Self::struct_( + match parse_enum_ident(ident) { + Ok(EnumPath { + variant_path, + enum_path, + variant_name, + }) => Self::enum_variant( state, - MatchPatStruct { - resolved_path, - brace_token: Brace(ident_span), - fields: Punctuated::new(), - rest: None, + MatchPatEnumVariant { + match_span: state.match_span, + sim: state.sim, + variant_path, + enum_path, + variant_name, + field: None, }, - &ident, - ) - } else { - Ok(Self::simple(MatchPatSimple::Binding(MatchPatBinding { + ), + Err(ident) => Ok(Self::simple(MatchPatSimple::Binding(MatchPatBinding { ident, - }))) + }))), } } Pat::Or(PatOr { @@ -254,18 +514,23 @@ trait ParseMatchPat: Sized { qself, path, }) => { - let path = TypePath { qself, path }; - let path_span = path.span(); - let resolved_path = state.resolve_enum_struct_path(path.clone())?; - Self::struct_( + let EnumPath { + variant_path, + enum_path, + variant_name, + } = parse_enum_path(TypePath { qself, path }).map_err(|path| { + state.errors.error(path, "unsupported enum variant path"); + })?; + Self::enum_variant( state, - MatchPatStruct { - resolved_path, - brace_token: Brace(path_span), - fields: Punctuated::new(), - rest: None, + MatchPatEnumVariant { + match_span: state.match_span, + sim: state.sim, + variant_path, + enum_path, + variant_name, + field: None, }, - &path, ) } Pat::Struct(PatStruct { @@ -282,12 +547,17 @@ trait ParseMatchPat: Sized { MatchPatStructField::parse(state, field_pat).ok() }) .collect(); - let path = TypePath { qself, path }; - let resolved_path = state.resolve_enum_struct_path(path.clone())?; + if qself.is_some() { + state + .errors + .error(TypePath { qself, path }, "unsupported struct path"); + return Err(()); + } Self::struct_( state, MatchPatStruct { - resolved_path, + match_span: state.match_span, + path, brace_token, fields, rest: rest.map( @@ -297,7 +567,6 @@ trait ParseMatchPat: Sized { }| dot2_token, ), }, - &path, ) } Pat::TupleStruct(PatTupleStruct { @@ -307,45 +576,46 @@ trait ParseMatchPat: Sized { paren_token, mut elems, }) => { - let rest = if let Some(&Pat::Rest(PatRest { - attrs: _, - dot2_token, - })) = elems.last() - { - elems.pop(); - Some(dot2_token) - } else { - None - }; - let fields = elems - .into_pairs() - .enumerate() - .filter_map(|(index, pair)| { - let (pat, punct) = pair.into_tuple(); - let pat = MatchPatSimple::parse(state, pat).ok()?; - let mut index = Index::from(index); - index.span = state.span; - let field = MatchPatStructField { - member: index.into(), - colon_token: Some(Token![:](state.span)), - pat, - }; - Some(Pair::new(field, punct)) + let EnumPath { + variant_path, + enum_path, + variant_name, + } = parse_enum_path(TypePath { qself, path }).map_err(|path| { + state.errors.error(path, "unsupported enum variant path"); + })?; + if elems.is_empty() { + let mut tokens = TokenStream::new(); + paren_token.surround(&mut tokens, |_| {}); + state.errors.error( + tokens, + "field-less enum variants must not be matched using parenthesis", + ); + } + if elems.len() != 1 { + state.errors.error( + variant_path, + "enum variant pattern must have exactly one field", + ); + return Err(()); + } + let field = elems.pop().unwrap().into_value(); + let field = if let Pat::Rest(rest) = field { + MatchPatSimple::Wild(MatchPatWild { + underscore_token: Token![_](rest.dot2_token.span()), }) - .collect(); - let path = TypePath { qself, path }; - let resolved_path = state.resolve_enum_struct_path(path.clone())?; - Self::struct_( + } else { + MatchPatSimple::parse(state, field)? + }; + Self::enum_variant( state, - MatchPatStruct { - resolved_path, - brace_token: Brace { - span: paren_token.span, - }, - fields, - rest, + MatchPatEnumVariant { + match_span: state.match_span, + sim: state.sim, + variant_path, + enum_path, + variant_name, + field: Some((paren_token, field)), }, - &path, ) } Pat::Rest(_) => { @@ -360,7 +630,34 @@ trait ParseMatchPat: Sized { }) => Ok(Self::simple(MatchPatSimple::Wild(MatchPatWild { underscore_token, }))), - Pat::Tuple(_) | Pat::Slice(_) | Pat::Const(_) | Pat::Lit(_) | Pat::Range(_) => { + Pat::Tuple(PatTuple { + attrs: _, + paren_token, + elems, + }) => { + let fields = elems + .into_pairs() + .filter_map_pair_value(|field_pat| { + if let Pat::Rest(PatRest { + attrs: _, + dot2_token, + }) = field_pat + { + Some(MatchPatSimple::Rest(MatchPatRest { dot2_token })) + } else { + MatchPatSimple::parse(state, field_pat).ok() + } + }) + .collect(); + Self::tuple( + state, + MatchPatTuple { + paren_token, + fields, + }, + ) + } + Pat::Slice(_) | Pat::Const(_) | Pat::Lit(_) | Pat::Range(_) => { state .errors .error(pat, "not yet implemented in #[hdl] patterns"); @@ -387,14 +684,29 @@ impl ParseMatchPat for MatchPatSimple { Self::Paren(v) } - fn struct_( + fn struct_(state: &mut HdlMatchParseState<'_>, v: MatchPatStruct) -> Result { + state.errors.error( + v.path, + "matching structs is not yet implemented inside structs/enums in #[hdl] patterns", + ); + Err(()) + } + + fn tuple(state: &mut HdlMatchParseState<'_>, v: MatchPatTuple) -> Result { + state.errors.push(syn::Error::new( + v.paren_token.span.open(), + "matching tuples is not yet implemented inside structs/enums in #[hdl] patterns", + )); + Err(()) + } + + fn enum_variant( state: &mut HdlMatchParseState<'_>, - _v: MatchPatStruct, - struct_error_spanned: &dyn ToTokens, + v: MatchPatEnumVariant, ) -> Result { state.errors.error( - struct_error_spanned, - "not yet implemented inside structs/enums in #[hdl] patterns", + v.variant_path, + "matching enum variants is not yet implemented inside structs/enums in #[hdl] patterns", ); Err(()) } @@ -406,6 +718,8 @@ enum MatchPat { Or(MatchPatOr), Paren(MatchPatParen), Struct(MatchPatStruct), + Tuple(MatchPatTuple), + EnumVariant(MatchPatEnumVariant), } impl_fold! { @@ -414,6 +728,8 @@ impl_fold! { Or(MatchPatOr), Paren(MatchPatParen), Struct(MatchPatStruct), + Tuple(MatchPatTuple), + EnumVariant(MatchPatEnumVariant), } } @@ -430,13 +746,20 @@ impl ParseMatchPat for MatchPat { Self::Paren(v) } - fn struct_( - _state: &mut HdlMatchParseState<'_>, - v: MatchPatStruct, - _struct_error_spanned: &dyn ToTokens, - ) -> Result { + fn struct_(_state: &mut HdlMatchParseState<'_>, v: MatchPatStruct) -> Result { Ok(Self::Struct(v)) } + + fn tuple(_state: &mut HdlMatchParseState<'_>, v: MatchPatTuple) -> Result { + Ok(Self::Tuple(v)) + } + + fn enum_variant( + _state: &mut HdlMatchParseState<'_>, + v: MatchPatEnumVariant, + ) -> Result { + Ok(Self::EnumVariant(v)) + } } impl ToTokens for MatchPat { @@ -446,6 +769,8 @@ impl ToTokens for MatchPat { Self::Or(v) => v.to_tokens(tokens), Self::Paren(v) => v.to_tokens(tokens), Self::Struct(v) => v.to_tokens(tokens), + Self::Tuple(v) => v.to_tokens(tokens), + Self::EnumVariant(v) => v.to_tokens(tokens), } } } @@ -507,27 +832,96 @@ struct RewriteAsCheckMatch { } impl Fold for RewriteAsCheckMatch { - fn fold_field_pat(&mut self, mut i: FieldPat) -> FieldPat { - i.colon_token = Some(Token![:](i.member.span())); - i - } - fn fold_pat(&mut self, i: Pat) -> Pat { - match i { - Pat::Ident(PatIdent { - attrs, - by_ref, - mutability, - ident, - subpat: None, - }) if is_pat_ident_a_struct_or_enum_name(&ident) => { - parse_quote_spanned! {ident.span()=> - #(#attrs)* - #by_ref - #mutability - #ident {} + fn fold_pat(&mut self, pat: Pat) -> Pat { + match pat { + Pat::Ident(mut pat_ident) => match parse_enum_ident(pat_ident.ident) { + Ok(EnumPath { + variant_path: _, + enum_path, + variant_name, + }) => parse_quote_spanned! {self.span=> + __MatchTy::<#enum_path>::#variant_name {} + }, + Err(ident) => { + pat_ident.ident = ident; + Pat::Ident(self.fold_pat_ident(pat_ident)) } + }, + Pat::Path(PatPath { + attrs: _, + qself, + path, + }) => match parse_enum_path(TypePath { qself, path }) { + Ok(EnumPath { + variant_path: _, + enum_path, + variant_name, + }) => parse_quote_spanned! {self.span=> + __MatchTy::<#enum_path>::#variant_name {} + }, + Err(type_path) => parse_quote_spanned! {self.span=> + __MatchTy::<#type_path> {} + }, + }, + Pat::Struct(PatStruct { + attrs: _, + qself, + path, + brace_token, + fields, + rest, + }) => { + let type_path = TypePath { qself, path }; + let path = parse_quote_spanned! {self.span=> + __MatchTy::<#type_path> + }; + let fields = fields.do_fold(self); + Pat::Struct(PatStruct { + attrs: vec![], + qself: None, + path, + brace_token, + fields, + rest, + }) } - _ => fold_pat(self, i), + Pat::TupleStruct(PatTupleStruct { + attrs, + qself, + path, + paren_token, + elems, + }) => match parse_enum_path(TypePath { qself, path }) { + Ok(EnumPath { + variant_path: _, + enum_path, + variant_name, + }) => { + let path = parse_quote_spanned! {self.span=> + __MatchTy::<#enum_path>::#variant_name + }; + let elems = Punctuated::from_iter( + elems.into_pairs().map_pair_value(|p| fold_pat(self, p)), + ); + Pat::TupleStruct(PatTupleStruct { + attrs, + qself: None, + path, + paren_token, + elems, + }) + } + Err(TypePath { qself, path }) => { + Pat::TupleStruct(self.fold_pat_tuple_struct(PatTupleStruct { + attrs, + qself, + path, + paren_token, + elems, + })) + } + }, + _ => fold_pat(self, pat), } } fn fold_pat_ident(&mut self, mut i: PatIdent) -> PatIdent { @@ -552,33 +946,177 @@ impl Fold for RewriteAsCheckMatch { // don't recurse into expressions i } -} - -struct HdlMatchParseState<'a> { - errors: &'a mut Errors, - span: Span, -} - -impl HdlMatchParseState<'_> { - fn resolve_enum_struct_path(&mut self, path: TypePath) -> Result { - let StructOrEnumPath { ty, variant } = - StructOrEnumPath::new(self.errors, path, &AggregateLiteralOptions::default())?; - Ok(if let Some((_variant_path, variant_name)) = variant { - parse_quote_spanned! {self.span=> - __MatchTy::<#ty>::#variant_name - } - } else { - parse_quote_spanned! {self.span=> - __MatchTy::<#ty> - } - }) + fn fold_local(&mut self, mut let_stmt: Local) -> Local { + if let Some(syn::LocalInit { + eq_token, + expr: _, + diverge, + }) = let_stmt.init.take() + { + let_stmt.init = Some(syn::LocalInit { + eq_token, + expr: parse_quote_spanned! {self.span=> + __match_value + }, + diverge: diverge.map(|(else_, _expr)| { + ( + else_, + parse_quote_spanned! {self.span=> + match __infallible {} + }, + ) + }), + }); + } + fold_local(self, let_stmt) } } -impl Visitor { +struct HdlMatchParseState<'a> { + sim: Option<(kw::sim,)>, + match_span: Span, + errors: &'a mut Errors, +} + +struct HdlLetPatVisitState<'a> { + errors: &'a mut Errors, + bindings: BTreeSet<&'a Ident>, +} + +impl<'a> VisitMatchPat<'a> for HdlLetPatVisitState<'a> { + fn visit_match_pat_binding(&mut self, v: &'a MatchPatBinding) { + self.bindings.insert(&v.ident); + } + + fn visit_match_pat_or(&mut self, v: &'a MatchPatOr) { + if let Some(first_inner_vert) = v.first_inner_vert() { + self.errors.error( + first_inner_vert, + "or-patterns are not supported in let statements", + ); + } + visit_match_pat_or(self, v); + } + + fn visit_match_pat_or_simple(&mut self, v: &'a MatchPatOr) { + if let Some(first_inner_vert) = v.first_inner_vert() { + self.errors.error( + first_inner_vert, + "or-patterns are not supported in let statements", + ); + } + visit_match_pat_or_simple(self, v); + } + + fn visit_match_pat_enum_variant(&mut self, v: &'a MatchPatEnumVariant) { + self.errors.error(v, "refutable pattern in let statement"); + } +} + +impl Visitor<'_> { + pub(crate) fn process_hdl_let_pat( + &mut self, + hdl_attr: HdlAttr, + mut let_stmt: Local, + ) -> Local { + let span = let_stmt.let_token.span(); + let ExprOptions { sim } = hdl_attr.body; + if let Pat::Type(pat) = &mut let_stmt.pat { + *pat.ty = wrap_ty_with_expr((*pat.ty).clone()); + } + let check_let_stmt = RewriteAsCheckMatch { span }.fold_local(let_stmt.clone()); + let Local { + attrs: _, + let_token, + pat, + init, + semi_token, + } = let_stmt; + let Some(syn::LocalInit { + eq_token, + expr, + diverge, + }) = init + else { + self.errors + .error(let_token, "#[hdl] let must be assigned a value"); + return empty_let(); + }; + if let Some((else_, _)) = diverge { + // TODO: implement let-else + self.errors + .error(else_, "#[hdl] let ... else { ... } is not implemented"); + return empty_let(); + } + let Ok(pat) = MatchPat::parse( + &mut HdlMatchParseState { + sim, + match_span: span, + errors: &mut self.errors, + }, + pat, + ) else { + return empty_let(); + }; + let mut state = HdlLetPatVisitState { + errors: &mut self.errors, + bindings: BTreeSet::new(), + }; + state.visit_match_pat(&pat); + let HdlLetPatVisitState { + errors: _, + bindings, + } = state; + let retval = if sim.is_some() { + parse_quote_spanned! {span=> + let (#(#bindings,)*) = { + type __MatchTy = ::SimValue; + let __match_value = ::fayalite::sim::value::ToSimValue::to_sim_value(&(#expr)); + #let_token #pat #eq_token ::fayalite::sim::value::SimValue::into_value(__match_value) #semi_token + (#(#bindings,)*) + }; + } + } else { + parse_quote_spanned! {span=> + let (#(#bindings,)* __scope,) = { + type __MatchTy = ::MatchVariant; + let __match_expr = ::fayalite::expr::ToExpr::to_expr(&(#expr)); + ::fayalite::expr::check_match_expr( + __match_expr, + |__match_value, __infallible| { + #[allow(unused_variables)] + #check_let_stmt + match __infallible {} + }, + ); + let mut __match_iter = ::fayalite::module::match_(__match_expr); + let ::fayalite::__std::option::Option::Some(__match_variant) = + ::fayalite::__std::iter::Iterator::next(&mut __match_iter) + else { + ::fayalite::__std::unreachable!("#[hdl] let with uninhabited type"); + }; + let ::fayalite::__std::option::Option::None = + ::fayalite::__std::iter::Iterator::next(&mut __match_iter) + else { + ::fayalite::__std::unreachable!("#[hdl] let with refutable pattern"); + }; + let (__match_variant, __scope) = + ::fayalite::ty::MatchVariantAndInactiveScope::match_activate_scope( + __match_variant, + ); + #let_token #pat #eq_token __match_variant #semi_token + (#(#bindings,)* __scope,) + }; + } + }; + match retval { + syn::Stmt::Local(retval) => retval, + _ => unreachable!(), + } + } pub(crate) fn process_hdl_match( &mut self, - _hdl_attr: HdlAttr, + hdl_attr: HdlAttr, expr_match: ExprMatch, ) -> Expr { let span = expr_match.match_token.span(); @@ -590,34 +1128,47 @@ impl Visitor { brace_token: _, arms, } = expr_match; - self.require_normal_module(match_token); + let ExprOptions { sim } = hdl_attr.body; let mut state = HdlMatchParseState { + sim, + match_span: span, errors: &mut self.errors, - span, }; let arms = Vec::from_iter( arms.into_iter() .filter_map(|arm| MatchArm::parse(&mut state, arm).ok()), ); - parse_quote_spanned! {span=> - { - type __MatchTy = - <::Type as ::fayalite::ty::Type>::MatchVariant; - let __match_expr = ::fayalite::expr::ToExpr::to_expr(&(#expr)); - ::fayalite::expr::check_match_expr(__match_expr, |__match_value, __infallible| { - #[allow(unused_variables)] - #check_match - }); - for __match_variant in m.match_(__match_expr) { - let (__match_variant, __scope) = - ::fayalite::ty::MatchVariantAndInactiveScope::match_activate_scope( - __match_variant, - ); - #match_token __match_variant { + let expr = if sim.is_some() { + quote_spanned! {span=> + { + type __MatchTy = ::SimValue; + let __match_expr = ::fayalite::sim::value::ToSimValue::to_sim_value(&(#expr)); + #match_token ::fayalite::sim::value::SimValue::into_value(__match_expr) { #(#arms)* } } } - } + } else { + quote_spanned! {span=> + { + type __MatchTy = ::MatchVariant; + let __match_expr = ::fayalite::expr::ToExpr::to_expr(&(#expr)); + ::fayalite::expr::check_match_expr(__match_expr, |__match_value, __infallible| { + #[allow(unused_variables)] + #check_match + }); + for __match_variant in ::fayalite::module::match_(__match_expr) { + let (__match_variant, __scope) = + ::fayalite::ty::MatchVariantAndInactiveScope::match_activate_scope( + __match_variant, + ); + #match_token __match_variant { + #(#arms)* + } + } + } + } + }; + syn::parse2(expr).unwrap() } } diff --git a/crates/fayalite-proc-macros-impl/src/process_cfg.rs b/crates/fayalite-proc-macros-impl/src/process_cfg.rs new file mode 100644 index 0000000..bcf2fa1 --- /dev/null +++ b/crates/fayalite-proc-macros-impl/src/process_cfg.rs @@ -0,0 +1,2527 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{Cfg, CfgAttr, Cfgs, Errors}; +use proc_macro2::Ident; +use std::{collections::VecDeque, marker::PhantomData}; +use syn::{ + Token, + punctuated::{Pair, Punctuated}, +}; + +struct State { + cfgs: Cfgs, + errors: Errors, + _phantom: PhantomData

, +} + +impl State

{ + #[must_use] + fn eval_cfg(&mut self, cfg: Cfg) -> bool { + struct MyDispatch<'a> { + cfg: Cfg, + _phantom: PhantomData<&'a ()>, + } + impl<'a> PhaseDispatch for MyDispatch<'a> { + type Args = &'a mut State

; + type Output = bool; + + fn dispatch_collect( + self, + args: Self::Args, + ) -> Self::Output { + args.cfgs.insert_cfg(self.cfg, ()); + true + } + + fn dispatch_process( + self, + args: Self::Args, + ) -> Self::Output { + if let Some(&retval) = args.cfgs.cfgs_map.get(&self.cfg) { + retval + } else { + args.errors.error(self.cfg, "unrecognized cfg -- cfg wasn't evaluated when running `__cfg_expansion_helper!`"); + true + } + } + } + P::dispatch( + MyDispatch { + cfg, + _phantom: PhantomData, + }, + self, + ) + } + #[must_use] + fn eval_cfgs( + &mut self, + mut attrs: Vec, + ) -> Option, P>> { + let mut queue = VecDeque::from(attrs); + attrs = Vec::with_capacity(queue.len()); // cfg_attr is rare, and cfg can't increase length + while let Some(attr) = queue.pop_front() { + if attr.path().is_ident("cfg") { + if let Some(cfg) = self.errors.ok(Cfg::parse_meta(&attr.meta)) { + if !self.eval_cfg(cfg) { + return None; + } + continue; + } + } else if attr.path().is_ident("cfg_attr") { + if let Some(cfg_attr) = self.errors.ok(CfgAttr::parse_meta(&attr.meta)) { + if self.eval_cfg(cfg_attr.to_cfg()) { + // push onto queue since cfg_attr(, cfg_attr(, )) is valid + for meta in cfg_attr.attrs { + queue.push_front(syn::Attribute { + pound_token: attr.pound_token, + style: attr.style, + bracket_token: attr.bracket_token, + meta, + }); + } + } + continue; + } + } + attrs.push(attr); + } + Some(Output::new(attrs)) + } + fn process_qself_and_path( + &mut self, + qself: Option, + path: syn::Path, + ) -> Option<(Output, P>, Output)> { + let qself = if let Some(syn::QSelf { + lt_token, + ty, + position, + as_token, + gt_token, + }) = qself + { + ty.process(self)?.map(|ty| { + Some(syn::QSelf { + lt_token, + ty, + position, + as_token, + gt_token, + }) + }) + } else { + Output::new(None) + }; + let syn::Path { + leading_colon, + segments, + } = path; + // path segments don't get removed + let path = segments.process(self)?.map(|segments| syn::Path { + leading_colon, + segments, + }); + Some((qself, path)) + } +} + +trait PhaseDispatch { + type Args; + type Output; + fn dispatch_collect(self, args: Self::Args) + -> Self::Output; + fn dispatch_process(self, args: Self::Args) + -> Self::Output; +} + +trait Phase: Sized + 'static { + type Output; + type CfgsValue; + fn output_new(v: T) -> Output; + fn output_map U>(v: Output, f: F) -> Output; + fn output_zip(t: Output, u: Output) -> Output<(T, U), Self>; + fn dispatch(d: D, args: D::Args) -> D::Output; +} + +struct CollectCfgsPhase; + +impl Phase for CollectCfgsPhase { + type Output = (); + type CfgsValue = (); + + fn output_new(_v: T) -> Output { + Output(()) + } + + fn output_map U>(_v: Output, _f: F) -> Output { + Output(()) + } + + fn output_zip(_t: Output, _u: Output) -> Output<(T, U), Self> { + Output(()) + } + + fn dispatch(d: D, args: D::Args) -> D::Output { + d.dispatch_collect(args) + } +} + +struct ProcessCfgsPhase; + +impl Phase for ProcessCfgsPhase { + type Output = T; + type CfgsValue = bool; + + fn output_new(v: T) -> Output { + Output(v) + } + + fn output_map U>(v: Output, f: F) -> Output { + Output(f(v.0)) + } + + fn output_zip(t: Output, u: Output) -> Output<(T, U), Self> { + Output((t.0, u.0)) + } + + fn dispatch(d: D, args: D::Args) -> D::Output { + d.dispatch_process(args) + } +} + +struct Output(P::Output); + +trait OutputZip: Sized { + type Output; + fn zip(self) -> Output; + fn call R>(self, f: F) -> Output { + self.zip().map(f) + } +} + +impl OutputZip

for () { + type Output = (); + + fn zip(self) -> Output { + Output::new(()) + } +} + +impl OutputZip

for (Output,) { + type Output = (T,); + + fn zip(self) -> Output { + self.0.map(|v| (v,)) + } +} + +macro_rules! impl_zip { + ($first_arg:ident: $first_T:ident, $($arg:ident: $T:ident),* $(,)?) => { + impl_zip!(@step [], [($first_arg: $first_T) $(($arg: $T))*], (),); + }; + ( + @impl($first_arg:tt,), + $tuple_pat:tt, + ) => {}; + ( + @impl(($first_arg:ident: $first_T:ident), + $(($arg:ident: $T:ident),)*), + $tuple_pat:tt, + ) => { + impl<$first_T, $($T,)* P: Phase> OutputZip

for (Output<$first_T, P>, $(Output<$T, P>),*) { + type Output = ($first_T, $($T),*); + fn zip(self) -> Output<($first_T, $($T),*), P> { + let (tuples, $($arg),*) = self; + $(let tuples = P::output_zip(tuples, $arg);)* + tuples.map(|$tuple_pat| ($first_arg, $($arg),*)) + } + } + }; + ( + @step [$($cur:tt)*], + [], + $tuple_pat:tt, + ) => {}; + ( + @step [$($cur:tt)*], + [($next_arg:ident: $next_T:ident) $($rest:tt)*], + (), + ) => { + impl_zip!(@impl($($cur,)* ($next_arg: $next_T),), $next_arg,); + impl_zip!(@step [$($cur)* ($next_arg: $next_T)], [$($rest)*], $next_arg,); + }; + ( + @step [$($cur:tt)*], + [($next_arg:ident: $next_T:ident) $($rest:tt)*], + $tuple_pat:tt, + ) => { + impl_zip!(@impl($($cur,)* ($next_arg: $next_T),), ($tuple_pat, $next_arg),); + impl_zip!(@step [$($cur)* ($next_arg: $next_T)], [$($rest)*], ($tuple_pat, $next_arg),); + }; +} + +impl_zip!(t0: T0, t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11); + +impl Copy for Output where P::Output: Copy {} + +impl Clone for Output +where + P::Output: Clone, +{ + fn clone(&self) -> Self { + Self(self.0.clone()) + } +} + +impl Output { + fn new(v: T) -> Self { + P::output_new(v) + } + fn map U>(self, f: F) -> Output { + P::output_map(self, f) + } +} + +trait Process: Sized { + #[must_use] + fn process(self, state: &mut State

) -> Option>; +} + +impl Process

for syn::Item { + fn process(self, _state: &mut State

) -> Option> { + // don't recurse into items + Some(Output::new(self)) + } +} + +impl Process

for Vec { + fn process(self, state: &mut State

) -> Option> { + state.eval_cfgs(self) + } +} + +impl, P: Phase> Process

for Box { + fn process(self, state: &mut State

) -> Option> { + Some(T::process(*self, state)?.map(Box::new)) + } +} + +trait ProcessVecElement { + const REMOVE_ELEMENTS: bool; +} + +impl ProcessVecElement for syn::Arm { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessVecElement for syn::Stmt { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessVecElement for syn::ForeignItem { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessVecElement for syn::ImplItem { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessVecElement for syn::Item { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessVecElement for syn::TraitItem { + const REMOVE_ELEMENTS: bool = true; +} + +impl + ProcessVecElement, P: Phase> Process

for Vec { + fn process(self, state: &mut State

) -> Option> { + let mut output = Output::new(Vec::new()); + for value in self { + if let Some(value) = value.process(state) { + output = (output, value).call(|(mut output, value)| { + output.push(value); + output + }); + } else if !T::REMOVE_ELEMENTS { + return None; + } + } + Some(output) + } +} + +trait ProcessOption { + /// if a configured-off value causes this value to be `None` instead of propagating the configuring-off + const REMOVE_VALUE: bool; +} + +impl ProcessOption for syn::Abi { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for syn::Block { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for syn::WhereClause { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for syn::Expr { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for syn::Type { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for Box { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for syn::AngleBracketedGenericArguments { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for syn::ImplRestriction { + const REMOVE_VALUE: bool = false; +} + +impl ProcessOption for syn::BoundLifetimes { + const REMOVE_VALUE: bool = false; +} + +impl ProcessOption for (Token![=], syn::Expr) { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for (Token![=], syn::Type) { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for (Token![if], Box) { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for (Token![else], Box) { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for (Token![&], Option) { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for (Token![as], Ident) { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for (Ident, Token![:]) { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for (Option, syn::Path, Token![for]) { + const REMOVE_VALUE: bool = false; +} + +impl ProcessOption for syn::BareVariadic { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for syn::Variadic { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for syn::LocalInit { + const REMOVE_VALUE: bool = false; +} + +impl ProcessOption for syn::Label { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for syn::PatRest { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for (Box, Token![:]) { + const REMOVE_VALUE: bool = false; +} + +impl ProcessOption for (Token![@], Box) { + const REMOVE_VALUE: bool = false; +} + +impl ProcessOption for (syn::token::Brace, Vec) { + const REMOVE_VALUE: bool = false; +} + +impl + ProcessOption, P: Phase> Process

for Option { + fn process(self, state: &mut State

) -> Option> { + if let Some(this) = self { + match this.process(state) { + Some(v) => Some(v.map(Some)), + None => { + if T::REMOVE_VALUE { + Some(Output::new(None)) + } else { + None + } + } + } + } else { + Some(Output::new(None)) + } + } +} + +trait ProcessPunctuatedElement { + const REMOVE_ELEMENTS: bool; +} + +impl + ProcessPunctuatedElement, P: Phase, Punct: Default> Process

+ for Punctuated +{ + fn process(self, state: &mut State

) -> Option> { + let mut output = Output::new(Punctuated::::new()); + for pair in self.into_pairs() { + let (value, punct) = pair.into_tuple(); + if let Some(value) = value.process(state) { + output = (output, value).call(|(mut output, value)| { + output.extend([Pair::new(value, punct)]); + output + }); + } else if !T::REMOVE_ELEMENTS { + return None; + } + } + Some(output) + } +} + +impl ProcessPunctuatedElement for syn::PathSegment { + const REMOVE_ELEMENTS: bool = false; +} + +impl ProcessPunctuatedElement for syn::Type { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::Expr { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::Pat { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::CapturedParam { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::GenericArgument { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::GenericParam { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::Lifetime { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::WherePredicate { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::Variant { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::FnArg { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::BareFnArg { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::TypeParamBound { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::FieldValue { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::Field { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::FieldPat { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::UseTree { + const REMOVE_ELEMENTS: bool = true; +} + +impl, U: Process

, P: Phase> Process

for (T, U) { + fn process(self, state: &mut State

) -> Option> { + let (t, u) = self; + let t = t.process(state)?; + let u = u.process(state)?; + Some((t, u).zip()) + } +} + +impl, U: Process

, V: Process

, P: Phase> Process

for (T, U, V) { + fn process(self, state: &mut State

) -> Option> { + let (t, u, v) = self; + let t = t.process(state)?; + let u = u.process(state)?; + let v = v.process(state)?; + Some((t, u, v).zip()) + } +} + +macro_rules! process_no_op { + ($ty:ty) => { + impl Process

for $ty { + fn process(self, _state: &mut State

) -> Option> { + Some(Output::new(self)) + } + } + + impl ProcessOption for $ty { + const REMOVE_VALUE: bool = false; + } + }; +} + +process_no_op!(Token![Self]); +process_no_op!(Token![abstract]); +process_no_op!(Token![as]); +process_no_op!(Token![async]); +process_no_op!(Token![auto]); +process_no_op!(Token![await]); +process_no_op!(Token![become]); +process_no_op!(Token![box]); +process_no_op!(Token![break]); +process_no_op!(Token![const]); +process_no_op!(Token![continue]); +process_no_op!(Token![crate]); +process_no_op!(Token![default]); +process_no_op!(Token![do]); +process_no_op!(Token![dyn]); +process_no_op!(Token![else]); +process_no_op!(Token![enum]); +process_no_op!(Token![extern]); +process_no_op!(Token![final]); +process_no_op!(Token![fn]); +process_no_op!(Token![for]); +process_no_op!(Token![if]); +process_no_op!(Token![impl]); +process_no_op!(Token![in]); +process_no_op!(Token![let]); +process_no_op!(Token![loop]); +process_no_op!(Token![macro]); +process_no_op!(Token![match]); +process_no_op!(Token![mod]); +process_no_op!(Token![move]); +process_no_op!(Token![mut]); +process_no_op!(Token![override]); +process_no_op!(Token![priv]); +process_no_op!(Token![pub]); +process_no_op!(Token![raw]); +process_no_op!(Token![ref]); +process_no_op!(Token![return]); +process_no_op!(Token![self]); +process_no_op!(Token![static]); +process_no_op!(Token![struct]); +process_no_op!(Token![super]); +process_no_op!(Token![trait]); +process_no_op!(Token![try]); +process_no_op!(Token![type]); +process_no_op!(Token![typeof]); +process_no_op!(Token![union]); +process_no_op!(Token![unsafe]); +process_no_op!(Token![unsized]); +process_no_op!(Token![use]); +process_no_op!(Token![virtual]); +process_no_op!(Token![where]); +process_no_op!(Token![while]); +process_no_op!(Token![yield]); + +process_no_op!(Token![!]); +process_no_op!(Token![!=]); +process_no_op!(Token![#]); +process_no_op!(Token![$]); +process_no_op!(Token![%]); +process_no_op!(Token![%=]); +process_no_op!(Token![&]); +process_no_op!(Token![&&]); +process_no_op!(Token![&=]); +process_no_op!(Token![*]); +process_no_op!(Token![*=]); +process_no_op!(Token![+]); +process_no_op!(Token![+=]); +process_no_op!(Token![,]); +process_no_op!(Token![-]); +process_no_op!(Token![-=]); +process_no_op!(Token![->]); +process_no_op!(Token![.]); +process_no_op!(Token![..]); +process_no_op!(Token![...]); +process_no_op!(Token![..=]); +process_no_op!(Token![/]); +process_no_op!(Token![/=]); +process_no_op!(Token![:]); +process_no_op!(Token![::]); +process_no_op!(Token![;]); +process_no_op!(Token![<]); +process_no_op!(Token![<-]); +process_no_op!(Token![<<]); +process_no_op!(Token![<<=]); +process_no_op!(Token![<=]); +process_no_op!(Token![=]); +process_no_op!(Token![==]); +process_no_op!(Token![=>]); +process_no_op!(Token![>]); +process_no_op!(Token![>=]); +process_no_op!(Token![>>]); +process_no_op!(Token![>>=]); +process_no_op!(Token![?]); +process_no_op!(Token![@]); +process_no_op!(Token![^]); +process_no_op!(Token![^=]); +process_no_op!(Token![_]); +process_no_op!(Token![|]); +process_no_op!(Token![|=]); +process_no_op!(Token![||]); +process_no_op!(Token![~]); + +process_no_op!(syn::token::Brace); +process_no_op!(syn::token::Bracket); +process_no_op!(syn::token::Paren); +process_no_op!(syn::token::Group); + +process_no_op!(Ident); +process_no_op!(syn::Index); +process_no_op!(syn::Lifetime); +process_no_op!(syn::LitBool); +process_no_op!(syn::LitByte); +process_no_op!(syn::LitByteStr); +process_no_op!(syn::LitChar); +process_no_op!(syn::LitCStr); +process_no_op!(syn::LitFloat); +process_no_op!(syn::LitInt); +process_no_op!(syn::LitStr); +process_no_op!(proc_macro2::TokenStream); +process_no_op!(proc_macro2::Literal); + +macro_rules! process_struct { + ($ty:path { + $($field:ident,)* + }) => { + impl Process

for $ty { + fn process(self, state: &mut State

) -> Option> { + let Self { + $($field,)* + } = self; + $(let $field = $field.process(state)?;)* + Some(($($field,)*).call(|($($field,)*)| Self { + $($field,)* + })) + } + } + }; + ($ty:path { + $($fields_before:ident,)* + #[qself] + $qself:ident, + $path:ident, + $($fields_after:ident,)* + }) => { + impl Process

for $ty { + fn process(self, state: &mut State

) -> Option> { + let Self { + $($fields_before,)* + $qself, + $path, + $($fields_after,)* + } = self; + $(let $fields_before = $fields_before.process(state)?;)* + let ($qself, $path) = state.process_qself_and_path($qself, $path)?; + $(let $fields_after = $fields_after.process(state)?;)* + Some(( + $($fields_before,)* + $qself, + $path, + $($fields_after,)* + ).call(|( + $($fields_before,)* + $qself, + $path, + $($fields_after,)* + )| Self { + $($fields_before,)* + $qself, + $path, + $($fields_after,)* + })) + } + } + }; +} + +process_struct! { + syn::Abi { + extern_token, + name, + } +} + +process_struct! { + syn::AngleBracketedGenericArguments { + colon2_token, + lt_token, + args, + gt_token, + } +} + +process_struct! { + syn::Arm { + attrs, + pat, + guard, + fat_arrow_token, + body, + comma, + } +} + +process_struct! { + syn::AssocConst { + ident, + generics, + eq_token, + value, + } +} + +process_struct! { + syn::AssocType { + ident, + generics, + eq_token, + ty, + } +} + +process_struct! { + syn::BareFnArg { + attrs, + name, + ty, + } +} + +process_struct! { + syn::BareVariadic { + attrs, + name, + dots, + comma, + } +} + +process_struct! { + syn::Block { + brace_token, + stmts, + } +} + +process_struct! { + syn::BoundLifetimes { + for_token, + lt_token, + lifetimes, + gt_token, + } +} + +process_struct! { + syn::ConstParam { + attrs, + const_token, + ident, + colon_token, + ty, + eq_token, + default, + } +} + +process_struct! { + syn::Constraint { + ident, + generics, + colon_token, + bounds, + } +} + +process_struct! { + syn::DataEnum { + enum_token, + brace_token, + variants, + } +} + +process_struct! { + syn::DataStruct { + struct_token, + fields, + semi_token, + } +} + +process_struct! { + syn::DataUnion { + union_token, + fields, + } +} + +process_struct! { + syn::DeriveInput { + attrs, + vis, + ident, + generics, + data, + } +} + +process_struct! { + syn::ExprArray { + attrs, + bracket_token, + elems, + } +} + +process_struct! { + syn::ExprAssign { + attrs, + left, + eq_token, + right, + } +} + +process_struct! { + syn::ExprAsync { + attrs, + async_token, + capture, + block, + } +} + +process_struct! { + syn::ExprAwait { + attrs, + base, + dot_token, + await_token, + } +} + +process_struct! { + syn::ExprBinary { + attrs, + left, + op, + right, + } +} + +process_struct! { + syn::ExprBlock { + attrs, + label, + block, + } +} + +process_struct! { + syn::ExprBreak { + attrs, + break_token, + label, + expr, + } +} + +process_struct! { + syn::ExprCall { + attrs, + func, + paren_token, + args, + } +} + +process_struct! { + syn::ExprCast { + attrs, + expr, + as_token, + ty, + } +} + +process_struct! { + syn::ExprClosure { + attrs, + lifetimes, + constness, + movability, + asyncness, + capture, + or1_token, + inputs, + or2_token, + output, + body, + } +} + +process_struct! { + syn::ExprConst { + attrs, + const_token, + block, + } +} + +process_struct! { + syn::ExprContinue { + attrs, + continue_token, + label, + } +} + +process_struct! { + syn::ExprField { + attrs, + base, + dot_token, + member, + } +} + +process_struct! { + syn::ExprForLoop { + attrs, + label, + for_token, + pat, + in_token, + expr, + body, + } +} + +process_struct! { + syn::ExprGroup { + attrs, + group_token, + expr, + } +} + +process_struct! { + syn::ExprIf { + attrs, + if_token, + cond, + then_branch, + else_branch, + } +} + +process_struct! { + syn::ExprIndex { + attrs, + expr, + bracket_token, + index, + } +} + +process_struct! { + syn::ExprInfer { + attrs, + underscore_token, + } +} + +process_struct! { + syn::ExprLet { + attrs, + let_token, + pat, + eq_token, + expr, + } +} + +process_struct! { + syn::ExprLit { + attrs, + lit, + } +} + +process_struct! { + syn::ExprLoop { + attrs, + label, + loop_token, + body, + } +} + +process_struct! { + syn::ExprMacro { + attrs, + mac, + } +} + +process_struct! { + syn::ExprMatch { + attrs, + match_token, + expr, + brace_token, + arms, + } +} + +process_struct! { + syn::ExprMethodCall { + attrs, + receiver, + dot_token, + method, + turbofish, + paren_token, + args, + } +} + +process_struct! { + syn::ExprParen { + attrs, + paren_token, + expr, + } +} + +process_struct! { + syn::ExprPath { + attrs, + #[qself] + qself, + path, + } +} + +process_struct! { + syn::ExprRange { + attrs, + start, + limits, + end, + } +} + +process_struct! { + syn::ExprRawAddr { + attrs, + and_token, + raw, + mutability, + expr, + } +} + +process_struct! { + syn::ExprReference { + attrs, + and_token, + mutability, + expr, + } +} + +process_struct! { + syn::ExprRepeat { + attrs, + bracket_token, + expr, + semi_token, + len, + } +} + +process_struct! { + syn::ExprReturn { + attrs, + return_token, + expr, + } +} + +process_struct! { + syn::ExprStruct { + attrs, + #[qself] + qself, + path, + brace_token, + fields, + dot2_token, + rest, + } +} + +process_struct! { + syn::ExprTry { + attrs, + expr, + question_token, + } +} + +process_struct! { + syn::ExprTryBlock { + attrs, + try_token, + block, + } +} + +process_struct! { + syn::ExprTuple { + attrs, + paren_token, + elems, + } +} + +process_struct! { + syn::ExprUnary { + attrs, + op, + expr, + } +} + +process_struct! { + syn::ExprUnsafe { + attrs, + unsafe_token, + block, + } +} + +process_struct! { + syn::ExprWhile { + attrs, + label, + while_token, + cond, + body, + } +} + +process_struct! { + syn::ExprYield { + attrs, + yield_token, + expr, + } +} + +process_struct! { + syn::Field { + attrs, + vis, + mutability, + ident, + colon_token, + ty, + } +} + +process_struct! { + syn::FieldPat { + attrs, + member, + colon_token, + pat, + } +} + +process_struct! { + syn::FieldValue { + attrs, + member, + colon_token, + expr, + } +} + +process_struct! { + syn::FieldsNamed { + brace_token, + named, + } +} + +process_struct! { + syn::FieldsUnnamed { + paren_token, + unnamed, + } +} + +process_struct! { + syn::ForeignItemFn { + attrs, + vis, + sig, + semi_token, + } +} + +process_struct! { + syn::ForeignItemMacro { + attrs, + mac, + semi_token, + } +} + +process_struct! { + syn::ForeignItemStatic { + attrs, + vis, + static_token, + mutability, + ident, + colon_token, + ty, + semi_token, + } +} + +process_struct! { + syn::ForeignItemType { + attrs, + vis, + type_token, + ident, + generics, + semi_token, + } +} + +process_struct! { + syn::Generics { + lt_token, + params, + gt_token, + where_clause, + } +} + +process_struct! { + syn::ImplItemConst { + attrs, + vis, + defaultness, + const_token, + ident, + generics, + colon_token, + ty, + eq_token, + expr, + semi_token, + } +} + +process_struct! { + syn::ImplItemFn { + attrs, + vis, + defaultness, + sig, + block, + } +} + +process_struct! { + syn::ImplItemMacro { + attrs, + mac, + semi_token, + } +} + +process_struct! { + syn::ImplItemType { + attrs, + vis, + defaultness, + type_token, + ident, + generics, + eq_token, + ty, + semi_token, + } +} + +process_struct! { + syn::ItemConst { + attrs, + vis, + const_token, + ident, + generics, + colon_token, + ty, + eq_token, + expr, + semi_token, + } +} + +process_struct! { + syn::ItemEnum { + attrs, + vis, + enum_token, + ident, + generics, + brace_token, + variants, + } +} + +process_struct! { + syn::ItemExternCrate { + attrs, + vis, + extern_token, + crate_token, + ident, + rename, + semi_token, + } +} + +process_struct! { + syn::ItemFn { + attrs, + vis, + sig, + block, + } +} + +process_struct! { + syn::ItemForeignMod { + attrs, + unsafety, + abi, + brace_token, + items, + } +} + +process_struct! { + syn::ItemImpl { + attrs, + defaultness, + unsafety, + impl_token, + generics, + trait_, + self_ty, + brace_token, + items, + } +} + +process_struct! { + syn::ItemMacro { + attrs, + ident, + mac, + semi_token, + } +} + +process_struct! { + syn::ItemMod { + attrs, + vis, + unsafety, + mod_token, + ident, + content, + semi, + } +} + +process_struct! { + syn::ItemStatic { + attrs, + vis, + static_token, + mutability, + ident, + colon_token, + ty, + eq_token, + expr, + semi_token, + } +} + +process_struct! { + syn::ItemStruct { + attrs, + vis, + struct_token, + ident, + generics, + fields, + semi_token, + } +} + +process_struct! { + syn::ItemTrait { + attrs, + vis, + unsafety, + auto_token, + restriction, + trait_token, + ident, + generics, + colon_token, + supertraits, + brace_token, + items, + } +} + +process_struct! { + syn::ItemTraitAlias { + attrs, + vis, + trait_token, + ident, + generics, + eq_token, + bounds, + semi_token, + } +} + +process_struct! { + syn::ItemType { + attrs, + vis, + type_token, + ident, + generics, + eq_token, + ty, + semi_token, + } +} + +process_struct! { + syn::ItemUnion { + attrs, + vis, + union_token, + ident, + generics, + fields, + } +} + +process_struct! { + syn::ItemUse { + attrs, + vis, + use_token, + leading_colon, + tree, + semi_token, + } +} + +process_struct! { + syn::Label { + name, + colon_token, + } +} + +process_struct! { + syn::LifetimeParam { + attrs, + lifetime, + colon_token, + bounds, + } +} + +process_struct! { + syn::Local { + attrs, + let_token, + pat, + init, + semi_token, + } +} + +process_struct! { + syn::LocalInit { + eq_token, + expr, + diverge, + } +} + +process_struct! { + syn::Macro { + path, + bang_token, + delimiter, + tokens, + } +} + +process_struct! { + syn::MetaList { + path, + delimiter, + tokens, + } +} + +process_struct! { + syn::MetaNameValue { + path, + eq_token, + value, + } +} + +process_struct! { + syn::ParenthesizedGenericArguments { + paren_token, + inputs, + output, + } +} + +process_struct! { + syn::PatIdent { + attrs, + by_ref, + mutability, + ident, + subpat, + } +} + +process_struct! { + syn::PatOr { + attrs, + leading_vert, + cases, + } +} + +process_struct! { + syn::PatParen { + attrs, + paren_token, + pat, + } +} + +process_struct! { + syn::PatReference { + attrs, + and_token, + mutability, + pat, + } +} + +process_struct! { + syn::PatRest { + attrs, + dot2_token, + } +} + +process_struct! { + syn::PatSlice { + attrs, + bracket_token, + elems, + } +} + +process_struct! { + syn::PatStruct { + attrs, + #[qself] + qself, + path, + brace_token, + fields, + rest, + } +} + +process_struct! { + syn::PatTuple { + attrs, + paren_token, + elems, + } +} + +process_struct! { + syn::PatTupleStruct { + attrs, + #[qself] + qself, + path, + paren_token, + elems, + } +} + +process_struct! { + syn::PatType { + attrs, + pat, + colon_token, + ty, + } +} + +process_struct! { + syn::PatWild { + attrs, + underscore_token, + } +} + +process_struct! { + syn::Path { + leading_colon, + segments, + } +} + +process_struct! { + syn::PathSegment { + ident, + arguments, + } +} + +process_struct! { + syn::PreciseCapture { + use_token, + lt_token, + params, + gt_token, + } +} + +process_struct! { + syn::PredicateLifetime { + lifetime, + colon_token, + bounds, + } +} + +process_struct! { + syn::PredicateType { + lifetimes, + bounded_ty, + colon_token, + bounds, + } +} + +process_struct! { + syn::Receiver { + attrs, + reference, + mutability, + self_token, + colon_token, + ty, + } +} + +process_struct! { + syn::Signature { + constness, + asyncness, + unsafety, + abi, + fn_token, + ident, + generics, + paren_token, + inputs, + variadic, + output, + } +} + +process_struct! { + syn::StmtMacro { + attrs, + mac, + semi_token, + } +} + +process_struct! { + syn::TraitBound { + paren_token, + modifier, + lifetimes, + path, + } +} + +process_struct! { + syn::TraitItemConst { + attrs, + const_token, + ident, + generics, + colon_token, + ty, + default, + semi_token, + } +} + +process_struct! { + syn::TraitItemFn { + attrs, + sig, + default, + semi_token, + } +} + +process_struct! { + syn::TraitItemMacro { + attrs, + mac, + semi_token, + } +} + +process_struct! { + syn::TraitItemType { + attrs, + type_token, + ident, + generics, + colon_token, + bounds, + default, + semi_token, + } +} + +process_struct! { + syn::TypeArray { + bracket_token, + elem, + semi_token, + len, + } +} + +process_struct! { + syn::TypeBareFn { + lifetimes, + unsafety, + abi, + fn_token, + paren_token, + inputs, + variadic, + output, + } +} + +process_struct! { + syn::TypeGroup { + group_token, + elem, + } +} + +process_struct! { + syn::TypeImplTrait { + impl_token, + bounds, + } +} + +process_struct! { + syn::TypeInfer { + underscore_token, + } +} + +process_struct! { + syn::TypeMacro { + mac, + } +} + +process_struct! { + syn::TypeNever { + bang_token, + } +} + +process_struct! { + syn::TypeParam { + attrs, + ident, + colon_token, + bounds, + eq_token, + default, + } +} + +process_struct! { + syn::TypeParen { + paren_token, + elem, + } +} + +process_struct! { + syn::TypePath { + #[qself] + qself, + path, + } +} + +process_struct! { + syn::TypePtr { + star_token, + const_token, + mutability, + elem, + } +} + +process_struct! { + syn::TypeReference { + and_token, + lifetime, + mutability, + elem, + } +} + +process_struct! { + syn::TypeSlice { + bracket_token, + elem, + } +} + +process_struct! { + syn::TypeTraitObject { + dyn_token, + bounds, + } +} + +process_struct! { + syn::TypeTuple { + paren_token, + elems, + } +} + +process_struct! { + syn::UseGlob { + star_token, + } +} + +process_struct! { + syn::UseGroup { + brace_token, + items, + } +} + +process_struct! { + syn::UseName { + ident, + } +} + +process_struct! { + syn::UsePath { + ident, + colon2_token, + tree, + } +} + +process_struct! { + syn::UseRename { + ident, + as_token, + rename, + } +} + +process_struct! { + syn::Variadic { + attrs, + pat, + dots, + comma, + } +} + +process_struct! { + syn::Variant { + attrs, + ident, + fields, + discriminant, + } +} + +process_struct! { + syn::VisRestricted { + pub_token, + paren_token, + in_token, + path, + } +} + +process_struct! { + syn::WhereClause { + where_token, + predicates, + } +} + +macro_rules! process_enum { + ($path:path { + $($variant:ident$(($($field:ident),* $(,)?))?,)* + }) => { + impl Process

for $path { + fn process(self, state: &mut State

) -> Option> { + match self { + $(Self::$variant$(($($field),*))? => Some(($($($field.process(state)?,)*)?).call(|($($($field,)*)?)| Self::$variant$(($($field),*))?)),)* + } + } + } + }; + ($path:path { + $($variant:ident$(($($field:ident),* $(,)?))?,)* + #[no_op] + _, + }) => { + impl Process

for $path { + fn process(self, state: &mut State

) -> Option> { + #![allow(unused_variables)] + match self { + $(Self::$variant$(($($field),*))? => Some(($($($field.process(state)?,)*)?).call(|($($($field,)*)?)| Self::$variant$(($($field),*))?)),)* + _ => Some(Output::new(self)), + } + } + } + }; +} + +process_enum! { + syn::AttrStyle { + Outer, + Inner(f0), + } +} + +process_enum! { + syn::BinOp { + Add(f0), + Sub(f0), + Mul(f0), + Div(f0), + Rem(f0), + And(f0), + Or(f0), + BitXor(f0), + BitAnd(f0), + BitOr(f0), + Shl(f0), + Shr(f0), + Eq(f0), + Lt(f0), + Le(f0), + Ne(f0), + Ge(f0), + Gt(f0), + AddAssign(f0), + SubAssign(f0), + MulAssign(f0), + DivAssign(f0), + RemAssign(f0), + BitXorAssign(f0), + BitAndAssign(f0), + BitOrAssign(f0), + ShlAssign(f0), + ShrAssign(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::CapturedParam { + Lifetime(f0), + Ident(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::Data { + Struct(f0), + Enum(f0), + Union(f0), + } +} + +process_enum! { + syn::Expr { + Array(f0), + Assign(f0), + Async(f0), + Await(f0), + Binary(f0), + Block(f0), + Break(f0), + Call(f0), + Cast(f0), + Closure(f0), + Const(f0), + Continue(f0), + Field(f0), + ForLoop(f0), + Group(f0), + If(f0), + Index(f0), + Infer(f0), + Let(f0), + Lit(f0), + Loop(f0), + Macro(f0), + Match(f0), + MethodCall(f0), + Paren(f0), + Path(f0), + Range(f0), + RawAddr(f0), + Reference(f0), + Repeat(f0), + Return(f0), + Struct(f0), + Try(f0), + TryBlock(f0), + Tuple(f0), + Unary(f0), + Unsafe(f0), + Verbatim(f0), + While(f0), + Yield(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::FieldMutability { + None, + #[no_op] + _, + } +} + +process_enum! { + syn::Fields { + Named(f0), + Unnamed(f0), + Unit, + } +} + +process_enum! { + syn::FnArg { + Receiver(f0), + Typed(f0), + } +} + +process_enum! { + syn::ForeignItem { + Fn(f0), + Static(f0), + Type(f0), + Macro(f0), + Verbatim(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::GenericArgument { + Lifetime(f0), + Type(f0), + Const(f0), + AssocType(f0), + AssocConst(f0), + Constraint(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::GenericParam { + Lifetime(f0), + Type(f0), + Const(f0), + } +} + +process_enum! { + syn::ImplItem { + Const(f0), + Fn(f0), + Type(f0), + Macro(f0), + Verbatim(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::ImplRestriction { + #[no_op] + _, + } +} + +process_enum! { + syn::Lit { + Str(f0), + ByteStr(f0), + CStr(f0), + Byte(f0), + Char(f0), + Int(f0), + Float(f0), + Bool(f0), + Verbatim(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::MacroDelimiter { + Paren(f0), + Brace(f0), + Bracket(f0), + } +} + +process_enum! { + syn::Member { + Named(f0), + Unnamed(f0), + } +} + +process_enum! { + syn::Meta { + Path(f0), + List(f0), + NameValue(f0), + } +} + +process_enum! { + syn::Pat { + Const(f0), + Ident(f0), + Lit(f0), + Macro(f0), + Or(f0), + Paren(f0), + Path(f0), + Range(f0), + Reference(f0), + Rest(f0), + Slice(f0), + Struct(f0), + Tuple(f0), + TupleStruct(f0), + Type(f0), + Verbatim(f0), + Wild(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::PathArguments { + None, + AngleBracketed(f0), + Parenthesized(f0), + } +} + +process_enum! { + syn::PointerMutability { + Const(f0), + Mut(f0), + } +} + +process_enum! { + syn::RangeLimits { + HalfOpen(f0), + Closed(f0), + } +} + +process_enum! { + syn::ReturnType { + Default, + Type(f0, f1), + } +} + +process_enum! { + syn::StaticMutability { + Mut(f0), + None, + #[no_op] + _, + } +} + +process_enum! { + syn::Stmt { + Local(f0), + Item(f0), + Expr(f0, f1), + Macro(f0), + } +} + +process_enum! { + syn::TraitBoundModifier { + None, + Maybe(f0), + } +} + +process_enum! { + syn::TraitItem { + Const(f0), + Fn(f0), + Type(f0), + Macro(f0), + Verbatim(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::Type { + Array(f0), + BareFn(f0), + Group(f0), + ImplTrait(f0), + Infer(f0), + Macro(f0), + Never(f0), + Paren(f0), + Path(f0), + Ptr(f0), + Reference(f0), + Slice(f0), + TraitObject(f0), + Tuple(f0), + Verbatim(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::TypeParamBound { + Trait(f0), + Lifetime(f0), + PreciseCapture(f0), + Verbatim(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::UnOp { + Deref(f0), + Not(f0), + Neg(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::UseTree { + Path(f0), + Name(f0), + Rename(f0), + Glob(f0), + Group(f0), + } +} + +process_enum! { + syn::Visibility { + Public(f0), + Restricted(f0), + Inherited, + } +} + +process_enum! { + syn::WherePredicate { + Lifetime(f0), + Type(f0), + #[no_op] + _, + } +} + +struct TopItem(syn::Item); + +impl Process

for TopItem { + fn process(self, state: &mut State

) -> Option> { + match self.0 { + syn::Item::Const(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Enum(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::ExternCrate(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Fn(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::ForeignMod(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Impl(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Macro(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Mod(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Static(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Struct(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Trait(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::TraitAlias(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Type(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Union(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Use(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + _ => Some(Output::new(self)), + } + } +} + +pub(crate) fn process_cfgs(item: syn::Item, cfgs: Cfgs) -> syn::Result> { + let mut state = State:: { + cfgs, + errors: Errors::new(), + _phantom: PhantomData, + }; + let retval = TopItem(item).process(&mut state).map(|v| v.0.0); + state.errors.finish()?; + Ok(retval) +} + +pub(crate) fn collect_cfgs(item: syn::Item) -> syn::Result> { + let mut state = State:: { + cfgs: Cfgs::default(), + errors: Errors::new(), + _phantom: PhantomData, + }; + let (None | Some(Output(()))) = TopItem(item).process(&mut state); + state.errors.finish()?; + Ok(state.cfgs) +} diff --git a/crates/fayalite-proc-macros-impl/src/value_derive_common.rs b/crates/fayalite-proc-macros-impl/src/value_derive_common.rs deleted file mode 100644 index 9f495ff..0000000 --- a/crates/fayalite-proc-macros-impl/src/value_derive_common.rs +++ /dev/null @@ -1,761 +0,0 @@ -// SPDX-License-Identifier: LGPL-3.0-or-later -// See Notices.txt for copyright information -use crate::{fold::impl_fold, kw, Errors, HdlAttr}; -use proc_macro2::{Span, TokenStream}; -use quote::{format_ident, quote, quote_spanned, ToTokens}; -use std::collections::{BTreeMap, HashMap, HashSet}; -use syn::{ - fold::{fold_generics, Fold}, - parse::{Parse, ParseStream}, - parse_quote, parse_quote_spanned, - punctuated::Punctuated, - spanned::Spanned, - token::{Brace, Paren, Where}, - Block, ConstParam, Expr, Field, Fields, FieldsNamed, FieldsUnnamed, GenericParam, Generics, - Ident, Index, ItemImpl, Lifetime, LifetimeParam, Member, Path, Token, Type, TypeParam, - TypePath, Visibility, WhereClause, WherePredicate, -}; - -#[derive(Clone, Debug)] -pub(crate) struct Bounds(pub(crate) Punctuated); - -impl_fold! { - struct Bounds<>(Punctuated); -} - -impl Parse for Bounds { - fn parse(input: ParseStream) -> syn::Result { - Ok(Bounds(Punctuated::parse_terminated(input)?)) - } -} - -impl From> for Bounds { - fn from(value: Option) -> Self { - Self(value.map_or_else(Punctuated::new, |v| v.predicates)) - } -} - -impl ToTokens for Bounds { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.0.to_tokens(tokens) - } -} - -#[derive(Debug, Clone)] -pub(crate) struct ParsedField { - pub(crate) options: HdlAttr, - pub(crate) vis: Visibility, - pub(crate) name: Member, - pub(crate) ty: Type, -} - -impl ParsedField { - pub(crate) fn var_name(&self) -> Ident { - format_ident!("__v_{}", self.name) - } -} - -pub(crate) fn get_field_name( - index: usize, - name: Option, - ty_span: impl FnOnce() -> Span, -) -> Member { - match name { - Some(name) => Member::Named(name), - None => Member::Unnamed(Index { - index: index as _, - span: ty_span(), - }), - } -} - -pub(crate) fn get_field_names(fields: &Fields) -> impl Iterator + '_ { - fields - .iter() - .enumerate() - .map(|(index, field)| get_field_name(index, field.ident.clone(), || field.ty.span())) -} - -impl ParsedField { - pub(crate) fn parse_fields( - errors: &mut Errors, - fields: &mut Fields, - in_enum: bool, - ) -> (FieldsKind, Vec>) { - let mut unit_fields = Punctuated::new(); - let (fields_kind, fields) = match fields { - Fields::Named(fields) => (FieldsKind::Named(fields.brace_token), &mut fields.named), - Fields::Unnamed(fields) => { - (FieldsKind::Unnamed(fields.paren_token), &mut fields.unnamed) - } - Fields::Unit => (FieldsKind::Unit, &mut unit_fields), - }; - let fields = fields - .iter_mut() - .enumerate() - .map(|(index, field)| { - let options = errors - .unwrap_or_default(HdlAttr::parse_and_take_attr(&mut field.attrs)) - .unwrap_or_default(); - let name = get_field_name(index, field.ident.clone(), || field.ty.span()); - if in_enum && !matches!(field.vis, Visibility::Inherited) { - errors.error(&field.vis, "field visibility not allowed in enums"); - } - ParsedField { - options, - vis: field.vis.clone(), - name, - ty: field.ty.clone(), - } - }) - .collect(); - (fields_kind, fields) - } -} - -#[derive(Copy, Clone, Debug)] -pub(crate) enum FieldsKind { - Unit, - Named(Brace), - Unnamed(Paren), -} - -impl FieldsKind { - pub(crate) fn into_fields_named( - brace_token: Brace, - fields: impl IntoIterator, - ) -> Fields { - Fields::Named(FieldsNamed { - brace_token, - named: Punctuated::from_iter(fields), - }) - } - pub(crate) fn into_fields_unnamed( - paren_token: Paren, - fields: impl IntoIterator, - ) -> Fields { - Fields::Unnamed(FieldsUnnamed { - paren_token, - unnamed: Punctuated::from_iter(fields), - }) - } - pub(crate) fn into_fields(self, fields: impl IntoIterator) -> Fields { - match self { - FieldsKind::Unit => { - let mut fields = fields.into_iter().peekable(); - let Some(first_field) = fields.peek() else { - return Fields::Unit; - }; - if first_field.ident.is_some() { - Self::into_fields_named(Default::default(), fields) - } else { - Self::into_fields_unnamed(Default::default(), fields) - } - } - FieldsKind::Named(brace_token) => Self::into_fields_named(brace_token, fields), - FieldsKind::Unnamed(paren_token) => Self::into_fields_unnamed(paren_token, fields), - } - } -} - -pub(crate) fn get_target(target: &Option<(kw::target, Paren, Path)>, item_ident: &Ident) -> Path { - match target { - Some((_, _, target)) => target.clone(), - None => item_ident.clone().into(), - } -} - -pub(crate) struct ValueDeriveGenerics { - pub(crate) generics: Generics, - pub(crate) static_type_generics: Generics, -} - -impl ValueDeriveGenerics { - pub(crate) fn get(mut generics: Generics, where_: &Option<(Where, Paren, Bounds)>) -> Self { - let mut static_type_generics = generics.clone(); - if let Some((_, _, bounds)) = where_ { - generics - .make_where_clause() - .predicates - .extend(bounds.0.iter().cloned()); - static_type_generics - .where_clause - .clone_from(&generics.where_clause); - } else { - let type_params = Vec::from_iter(generics.type_params().map(|v| v.ident.clone())); - let predicates = &mut generics.make_where_clause().predicates; - let static_type_predicates = &mut static_type_generics.make_where_clause().predicates; - for type_param in type_params { - predicates.push(parse_quote! {#type_param: ::fayalite::ty::Value>}); - static_type_predicates - .push(parse_quote! {#type_param: ::fayalite::ty::StaticValue}); - } - } - Self { - generics, - static_type_generics, - } - } -} - -pub(crate) fn derive_clone_hash_eq_partialeq_for_struct( - the_struct_ident: &Ident, - generics: &Generics, - field_names: &[Name], -) -> TokenStream { - let (impl_generics, type_generics, where_clause) = generics.split_for_impl(); - quote! { - #[automatically_derived] - impl #impl_generics ::fayalite::__std::clone::Clone for #the_struct_ident #type_generics - #where_clause - { - fn clone(&self) -> Self { - Self { - #(#field_names: ::fayalite::__std::clone::Clone::clone(&self.#field_names),)* - } - } - } - - #[automatically_derived] - impl #impl_generics ::fayalite::__std::hash::Hash for #the_struct_ident #type_generics - #where_clause - { - #[allow(unused_variables)] - fn hash<__H: ::fayalite::__std::hash::Hasher>(&self, hasher: &mut __H) { - #(::fayalite::__std::hash::Hash::hash(&self.#field_names, hasher);)* - } - } - - #[automatically_derived] - impl #impl_generics ::fayalite::__std::cmp::Eq for #the_struct_ident #type_generics - #where_clause - { - } - - #[automatically_derived] - impl #impl_generics ::fayalite::__std::cmp::PartialEq for #the_struct_ident #type_generics - #where_clause - { - #[allow(unused_variables)] - #[allow(clippy::nonminimal_bool)] - fn eq(&self, other: &Self) -> ::fayalite::__std::primitive::bool { - true #(&& ::fayalite::__std::cmp::PartialEq::eq( - &self.#field_names, - &other.#field_names, - ))* - } - } - } -} - -pub(crate) fn append_field(fields: &mut Fields, mut field: Field) -> Member { - let ident = field.ident.clone().expect("ident is supplied"); - match fields { - Fields::Named(FieldsNamed { named, .. }) => { - named.push(field); - Member::Named(ident) - } - Fields::Unnamed(FieldsUnnamed { unnamed, .. }) => { - field.ident = None; - field.colon_token = None; - let index = unnamed.len(); - unnamed.push(field); - Member::Unnamed(index.into()) - } - Fields::Unit => { - *fields = Fields::Named(FieldsNamed { - brace_token: Default::default(), - named: Punctuated::from_iter([field]), - }); - Member::Named(ident) - } - } -} - -#[derive(Clone, Debug)] -pub(crate) struct BuilderField { - pub(crate) names: HashSet, - pub(crate) mapped_value: Expr, - pub(crate) mapped_type: Type, - pub(crate) where_clause: Option, - pub(crate) builder_field_name: Ident, - pub(crate) type_param: Ident, -} - -#[derive(Debug)] -pub(crate) struct Builder { - struct_name: Ident, - vis: Visibility, - fields: BTreeMap, -} - -#[derive(Debug)] -pub(crate) struct BuilderWithFields { - struct_name: Ident, - vis: Visibility, - phantom_type_param: Ident, - phantom_type_field: Ident, - fields: Vec<(String, BuilderField)>, -} - -impl Builder { - pub(crate) fn new(struct_name: Ident, vis: Visibility) -> Self { - Self { - struct_name, - vis, - fields: BTreeMap::new(), - } - } - pub(crate) fn insert_field( - &mut self, - name: Member, - map_value: impl FnOnce(&Ident) -> Expr, - map_type: impl FnOnce(&Ident) -> Type, - where_clause: impl FnOnce(&Ident) -> Option, - ) { - self.fields - .entry(name.to_token_stream().to_string()) - .or_insert_with_key(|name| { - let builder_field_name = - format_ident!("field_{}", name, span = self.struct_name.span()); - let type_param = format_ident!("__T_{}", name, span = self.struct_name.span()); - BuilderField { - names: HashSet::new(), - mapped_value: map_value(&builder_field_name), - mapped_type: map_type(&type_param), - where_clause: where_clause(&type_param), - builder_field_name, - type_param, - } - }) - .names - .insert(name); - } - pub(crate) fn finish_filling_in_fields(self) -> BuilderWithFields { - let Self { - struct_name, - vis, - fields, - } = self; - let fields = Vec::from_iter(fields); - BuilderWithFields { - phantom_type_param: Ident::new("__Phantom", struct_name.span()), - phantom_type_field: Ident::new("__phantom", struct_name.span()), - struct_name, - vis, - fields, - } - } -} - -impl BuilderWithFields { - pub(crate) fn get_field(&self, name: &Member) -> Option<(usize, &BuilderField)> { - let index = self - .fields - .binary_search_by_key(&&*name.to_token_stream().to_string(), |v| &*v.0) - .ok()?; - Some((index, &self.fields[index].1)) - } - pub(crate) fn ty( - &self, - specified_fields: impl IntoIterator, - phantom_type: Option<&Type>, - other_fields_are_any_type: bool, - ) -> TypePath { - let Self { - struct_name, - vis: _, - phantom_type_param, - phantom_type_field: _, - fields, - } = self; - let span = struct_name.span(); - let mut arguments = - Vec::from_iter(fields.iter().map(|(_, BuilderField { type_param, .. })| { - if other_fields_are_any_type { - parse_quote_spanned! {span=> - #type_param - } - } else { - parse_quote_spanned! {span=> - () - } - } - })); - for (name, ty) in specified_fields { - let Some((index, _)) = self.get_field(&name) else { - panic!("field not found: {}", name.to_token_stream()); - }; - arguments[index] = ty; - } - let phantom_type_param = phantom_type.is_none().then_some(phantom_type_param); - parse_quote_spanned! {span=> - #struct_name::<#phantom_type_param #phantom_type #(, #arguments)*> - } - } - pub(crate) fn append_generics( - &self, - specified_fields: impl IntoIterator, - has_phantom_type_param: bool, - other_fields_are_any_type: bool, - generics: &mut Generics, - ) { - let Self { - struct_name: _, - vis: _, - phantom_type_param, - phantom_type_field: _, - fields, - } = self; - if has_phantom_type_param { - generics.params.push(GenericParam::from(TypeParam::from( - phantom_type_param.clone(), - ))); - } - if !other_fields_are_any_type { - return; - } - let mut type_params = Vec::from_iter( - fields - .iter() - .map(|(_, BuilderField { type_param, .. })| Some(type_param)), - ); - for name in specified_fields { - let Some((index, _)) = self.get_field(&name) else { - panic!("field not found: {}", name.to_token_stream()); - }; - type_params[index] = None; - } - generics.params.extend( - type_params - .into_iter() - .filter_map(|v| Some(GenericParam::from(TypeParam::from(v?.clone())))), - ); - } - pub(crate) fn make_build_method( - &self, - build_fn_name: &Ident, - specified_fields: impl IntoIterator, - generics: &Generics, - phantom_type: &Type, - return_ty: &Type, - mut body: Block, - ) -> ItemImpl { - let Self { - struct_name, - vis, - phantom_type_param: _, - phantom_type_field, - fields, - } = self; - let span = struct_name.span(); - let field_names = Vec::from_iter(fields.iter().map(|v| &v.1.builder_field_name)); - let (impl_generics, _type_generics, where_clause) = generics.split_for_impl(); - let empty_arg = parse_quote_spanned! {span=> - () - }; - let mut ty_arguments = vec![empty_arg; fields.len()]; - let empty_field_pat = quote_spanned! {span=> - : _ - }; - let mut field_pats = vec![Some(empty_field_pat); fields.len()]; - for (name, ty) in specified_fields { - let Some((index, _)) = self.get_field(&name) else { - panic!("field not found: {}", name.to_token_stream()); - }; - ty_arguments[index] = ty; - field_pats[index] = None; - } - body.stmts.insert( - 0, - parse_quote_spanned! {span=> - let Self { - #(#field_names #field_pats,)* - #phantom_type_field: _, - } = self; - }, - ); - parse_quote_spanned! {span=> - #[automatically_derived] - impl #impl_generics #struct_name<#phantom_type #(, #ty_arguments)*> - #where_clause - { - #[allow(non_snake_case, dead_code)] - #vis fn #build_fn_name(self) -> #return_ty - #body - } - } - } -} - -impl ToTokens for BuilderWithFields { - fn to_tokens(&self, tokens: &mut TokenStream) { - let Self { - struct_name, - vis, - phantom_type_param, - phantom_type_field, - fields, - } = self; - let span = struct_name.span(); - let mut any_generics = Generics::default(); - self.append_generics([], true, true, &mut any_generics); - let empty_ty = self.ty([], None, false); - let field_names = Vec::from_iter(fields.iter().map(|v| &v.1.builder_field_name)); - let field_type_params = Vec::from_iter(fields.iter().map(|v| &v.1.type_param)); - quote_spanned! {span=> - #[allow(non_camel_case_types)] - #[non_exhaustive] - #vis struct #struct_name #any_generics { - #(#field_names: #field_type_params,)* - #phantom_type_field: ::fayalite::__std::marker::PhantomData<#phantom_type_param>, - } - - #[automatically_derived] - impl<#phantom_type_param> #empty_ty { - fn new() -> Self { - Self { - #(#field_names: (),)* - #phantom_type_field: ::fayalite::__std::marker::PhantomData, - } - } - } - } - .to_tokens(tokens); - for (field_index, (_, field)) in self.fields.iter().enumerate() { - let initial_fields = &fields[..field_index]; - let final_fields = &fields[field_index..][1..]; - let initial_type_params = - Vec::from_iter(initial_fields.iter().map(|v| &v.1.type_param)); - let final_type_params = Vec::from_iter(final_fields.iter().map(|v| &v.1.type_param)); - let initial_field_names = - Vec::from_iter(initial_fields.iter().map(|v| &v.1.builder_field_name)); - let final_field_names = - Vec::from_iter(final_fields.iter().map(|v| &v.1.builder_field_name)); - let BuilderField { - names: _, - mapped_value, - mapped_type, - where_clause, - builder_field_name, - type_param, - } = field; - quote_spanned! {span=> - #[automatically_derived] - #[allow(non_camel_case_types, dead_code)] - impl<#phantom_type_param #(, #initial_type_params)* #(, #final_type_params)*> - #struct_name< - #phantom_type_param, - #(#initial_type_params,)* - (), #(#final_type_params,)* - > - { - #vis fn #builder_field_name<#type_param>( - self, - #builder_field_name: #type_param, - ) -> #struct_name< - #phantom_type_param, - #(#initial_type_params,)* - #mapped_type, - #(#final_type_params,)* - > - #where_clause - { - let Self { - #(#initial_field_names,)* - #builder_field_name: (), - #(#final_field_names,)* - #phantom_type_field: _, - } = self; - let #builder_field_name = #mapped_value; - #struct_name { - #(#field_names,)* - #phantom_type_field: ::fayalite::__std::marker::PhantomData, - } - } - } - } - .to_tokens(tokens); - } - } -} - -pub(crate) struct MapIdents { - pub(crate) map: HashMap, -} - -impl Fold for &MapIdents { - fn fold_ident(&mut self, i: Ident) -> Ident { - self.map.get(&i).cloned().unwrap_or(i) - } -} - -pub(crate) struct DupGenerics { - pub(crate) combined: Generics, - pub(crate) maps: M, -} - -pub(crate) fn merge_punctuated( - target: &mut Punctuated, - source: Punctuated, - make_punct: impl FnOnce() -> P, -) { - if source.is_empty() { - return; - } - if target.is_empty() { - *target = source; - return; - } - if !target.trailing_punct() { - target.push_punct(make_punct()); - } - target.extend(source.into_pairs()); -} - -pub(crate) fn merge_generics(target: &mut Generics, source: Generics) { - let Generics { - lt_token, - params, - gt_token, - where_clause, - } = source; - let span = lt_token.map(|v| v.span).unwrap_or_else(|| params.span()); - target.lt_token = target.lt_token.or(lt_token); - merge_punctuated(&mut target.params, params, || Token![,](span)); - target.gt_token = target.gt_token.or(gt_token); - if let Some(where_clause) = where_clause { - if let Some(target_where_clause) = &mut target.where_clause { - let WhereClause { - where_token, - predicates, - } = where_clause; - let span = where_token.span; - target_where_clause.where_token = where_token; - merge_punctuated(&mut target_where_clause.predicates, predicates, || { - Token![,](span) - }); - } else { - target.where_clause = Some(where_clause); - } - } -} - -impl DupGenerics> { - pub(crate) fn new_dyn(generics: &Generics, count: usize) -> Self { - let mut maps = Vec::from_iter((0..count).map(|_| MapIdents { - map: HashMap::new(), - })); - for param in &generics.params { - let (GenericParam::Lifetime(LifetimeParam { - lifetime: Lifetime { ident, .. }, - .. - }) - | GenericParam::Type(TypeParam { ident, .. }) - | GenericParam::Const(ConstParam { ident, .. })) = param; - for (i, map_idents) in maps.iter_mut().enumerate() { - map_idents - .map - .insert(ident.clone(), format_ident!("__{}_{}", ident, i)); - } - } - let mut combined = Generics::default(); - for map_idents in maps.iter() { - merge_generics( - &mut combined, - fold_generics(&mut { map_idents }, generics.clone()), - ); - } - Self { combined, maps } - } -} - -impl DupGenerics<[MapIdents; COUNT]> { - pub(crate) fn new(generics: &Generics) -> Self { - let DupGenerics { combined, maps } = DupGenerics::new_dyn(generics, COUNT); - Self { - combined, - maps: maps.try_into().ok().unwrap(), - } - } -} - -pub(crate) fn add_where_predicate( - target: &mut Generics, - span: Span, - where_predicate: WherePredicate, -) { - let WhereClause { - where_token: _, - predicates, - } = target.where_clause.get_or_insert_with(|| WhereClause { - where_token: Token![where](span), - predicates: Punctuated::new(), - }); - if !predicates.empty_or_trailing() { - predicates.push_punct(Token![,](span)); - } - predicates.push_value(where_predicate); -} - -pub(crate) fn make_connect_impl( - connect_inexact: Option<(crate::kw::connect_inexact,)>, - generics: &Generics, - ty_ident: &Ident, - field_types: impl IntoIterator, -) -> TokenStream { - let span = ty_ident.span(); - let impl_generics; - let combined_generics; - let where_clause; - let lhs_generics; - let lhs_type_generics; - let rhs_generics; - let rhs_type_generics; - if connect_inexact.is_some() { - let DupGenerics { - mut combined, - maps: [lhs_map, rhs_map], - } = DupGenerics::new(generics); - for field_type in field_types { - let lhs_type = (&lhs_map).fold_type(field_type.clone()); - let rhs_type = (&rhs_map).fold_type(field_type); - add_where_predicate( - &mut combined, - span, - parse_quote_spanned! {span=> - #lhs_type: ::fayalite::ty::Connect<#rhs_type> - }, - ); - } - combined_generics = combined; - (impl_generics, _, where_clause) = combined_generics.split_for_impl(); - lhs_generics = (&lhs_map).fold_generics(generics.clone()); - (_, lhs_type_generics, _) = lhs_generics.split_for_impl(); - rhs_generics = (&rhs_map).fold_generics(generics.clone()); - (_, rhs_type_generics, _) = rhs_generics.split_for_impl(); - } else { - let mut generics = generics.clone(); - for field_type in field_types { - add_where_predicate( - &mut generics, - span, - parse_quote_spanned! {span=> - #field_type: ::fayalite::ty::Connect<#field_type> - }, - ); - } - combined_generics = generics; - (impl_generics, lhs_type_generics, where_clause) = combined_generics.split_for_impl(); - rhs_type_generics = lhs_type_generics.clone(); - } - quote_spanned! {span=> - #[automatically_derived] - #[allow(non_camel_case_types)] - impl #impl_generics ::fayalite::ty::Connect<#ty_ident #rhs_type_generics> - for #ty_ident #lhs_type_generics - #where_clause - { - } - } -} diff --git a/crates/fayalite-proc-macros-impl/src/value_derive_enum.rs b/crates/fayalite-proc-macros-impl/src/value_derive_enum.rs deleted file mode 100644 index 4c6d66b..0000000 --- a/crates/fayalite-proc-macros-impl/src/value_derive_enum.rs +++ /dev/null @@ -1,975 +0,0 @@ -// SPDX-License-Identifier: LGPL-3.0-or-later -// See Notices.txt for copyright information -use crate::{ - value_derive_common::{ - append_field, derive_clone_hash_eq_partialeq_for_struct, get_field_names, get_target, - make_connect_impl, Bounds, Builder, FieldsKind, ParsedField, ValueDeriveGenerics, - }, - value_derive_struct::{self, ParsedStruct, ParsedStructNames, StructOptions}, - Errors, HdlAttr, -}; -use proc_macro2::TokenStream; -use quote::{format_ident, quote, quote_spanned, ToTokens}; -use syn::{ - parse_quote, parse_quote_spanned, punctuated::Punctuated, spanned::Spanned, token::Brace, - Field, FieldMutability, Fields, FieldsNamed, Generics, Ident, Index, ItemEnum, ItemStruct, - Member, Path, Token, Type, Variant, Visibility, -}; - -crate::options! { - #[options = EnumOptions] - enum EnumOption { - OutlineGenerated(outline_generated), - ConnectInexact(connect_inexact), - Bounds(where_, Bounds), - Target(target, Path), - } -} - -crate::options! { - #[options = VariantOptions] - enum VariantOption {} -} - -crate::options! { - #[options = FieldOptions] - enum FieldOption {} -} - -enum VariantValue { - None, - Direct { - value_type: Type, - }, - Struct { - value_struct: ItemStruct, - parsed_struct: ParsedStruct, - }, -} - -impl VariantValue { - fn is_none(&self) -> bool { - matches!(self, Self::None) - } - fn value_ty(&self) -> Option { - match self { - VariantValue::None => None, - VariantValue::Direct { value_type } => Some(value_type.clone()), - VariantValue::Struct { value_struct, .. } => { - let (_, type_generics, _) = value_struct.generics.split_for_impl(); - let ident = &value_struct.ident; - Some(parse_quote! { #ident #type_generics }) - } - } - } -} - -struct ParsedVariant { - options: HdlAttr, - ident: Ident, - fields_kind: FieldsKind, - fields: Vec>, - value: VariantValue, -} - -impl ParsedVariant { - fn parse( - errors: &mut Errors, - variant: Variant, - enum_options: &EnumOptions, - enum_vis: &Visibility, - enum_ident: &Ident, - enum_generics: &Generics, - ) -> Self { - let target = get_target(&enum_options.target, enum_ident); - let Variant { - mut attrs, - ident, - fields, - discriminant, - } = variant; - if let Some((eq, _)) = discriminant { - errors.error(eq, "#[derive(Value)]: discriminants not allowed"); - } - let variant_options = errors - .unwrap_or_default(HdlAttr::parse_and_take_attr(&mut attrs)) - .unwrap_or_default(); - let (fields_kind, parsed_fields) = - ParsedField::parse_fields(errors, &mut fields.clone(), true); - let value = match (&fields_kind, &*parsed_fields) { - (FieldsKind::Unit, _) => VariantValue::None, - ( - FieldsKind::Unnamed(_), - [ParsedField { - options, - vis: _, - name: Member::Unnamed(Index { index: 0, span: _ }), - ty, - }], - ) => { - let FieldOptions {} = options.body; - VariantValue::Direct { - value_type: ty.clone(), - } - } - _ => { - let variant_value_struct_ident = - format_ident!("__{}__{}", enum_ident, ident, span = ident.span()); - let variant_type_struct_ident = - format_ident!("__{}__{}__Type", enum_ident, ident, span = ident.span()); - let mut value_struct_fields = fields.clone(); - let (_, type_generics, _) = enum_generics.split_for_impl(); - append_field( - &mut value_struct_fields, - Field { - attrs: vec![HdlAttr::from(value_derive_struct::FieldOptions { - flip: None, - skip: Some(Default::default()), - }) - .to_attr()], - vis: enum_vis.clone(), - mutability: FieldMutability::None, - ident: Some(Ident::new("__phantom", ident.span())), - colon_token: None, - ty: parse_quote_spanned! {ident.span()=> - ::fayalite::__std::marker::PhantomData<#target #type_generics> - }, - }, - ); - let (value_struct_fields_kind, value_struct_parsed_fields) = - ParsedField::parse_fields(errors, &mut value_struct_fields, false); - let value_struct = ItemStruct { - attrs: vec![parse_quote! { #[allow(non_camel_case_types)] }], - vis: enum_vis.clone(), - struct_token: Token![struct](ident.span()), - ident: variant_value_struct_ident.clone(), - generics: enum_generics.clone(), - fields: value_struct_fields, - semi_token: None, - }; - VariantValue::Struct { - value_struct, - parsed_struct: ParsedStruct { - options: StructOptions { - outline_generated: None, - static_: Some(Default::default()), - where_: Some(( - Default::default(), - Default::default(), - ValueDeriveGenerics::get( - enum_generics.clone(), - &enum_options.where_, - ) - .static_type_generics - .where_clause - .into(), - )), - target: None, - connect_inexact: enum_options.connect_inexact, - } - .into(), - vis: enum_vis.clone(), - struct_token: Default::default(), - generics: enum_generics.clone(), - fields_kind: value_struct_fields_kind, - fields: value_struct_parsed_fields, - semi_token: None, // it will fill in the semicolon if needed - skip_check_fields: true, - names: ParsedStructNames { - ident: variant_value_struct_ident.clone(), - type_struct_debug_ident: Some(format!("{enum_ident}::{ident}::Type")), - type_struct_ident: variant_type_struct_ident, - match_variant_ident: None, - builder_struct_ident: None, - mask_match_variant_ident: None, - mask_type_ident: None, - mask_type_debug_ident: Some(format!( - "AsMask<{enum_ident}::{ident}>::Type" - )), - mask_value_ident: None, - mask_value_debug_ident: Some(format!("AsMask<{enum_ident}::{ident}>")), - mask_builder_struct_ident: None, - }, - }, - } - } - }; - ParsedVariant { - options: variant_options, - ident, - fields_kind, - fields: parsed_fields, - value, - } - } -} - -struct ParsedEnum { - options: HdlAttr, - vis: Visibility, - enum_token: Token![enum], - ident: Ident, - generics: Generics, - brace_token: Brace, - variants: Vec, -} - -impl ParsedEnum { - fn parse(item: ItemEnum) -> syn::Result { - let ItemEnum { - mut attrs, - vis, - enum_token, - ident, - generics, - brace_token, - variants, - } = item; - let mut errors = Errors::new(); - let enum_options = errors - .unwrap_or_default(HdlAttr::parse_and_take_attr(&mut attrs)) - .unwrap_or_default(); - let variants = variants - .into_iter() - .map(|variant| { - ParsedVariant::parse( - &mut errors, - variant, - &enum_options.body, - &vis, - &ident, - &generics, - ) - }) - .collect(); - errors.finish()?; - Ok(ParsedEnum { - options: enum_options, - vis, - enum_token, - ident, - generics, - brace_token, - variants, - }) - } -} - -impl ToTokens for ParsedEnum { - fn to_tokens(&self, tokens: &mut TokenStream) { - let Self { - options, - vis, - enum_token, - ident: enum_ident, - generics: enum_generics, - brace_token, - variants, - } = self; - let EnumOptions { - outline_generated: _, - connect_inexact, - where_, - target, - } = &options.body; - let target = get_target(target, enum_ident); - let ValueDeriveGenerics { - generics: _, - static_type_generics, - } = ValueDeriveGenerics::get(enum_generics.clone(), where_); - let (static_type_impl_generics, static_type_type_generics, static_type_where_clause) = - static_type_generics.split_for_impl(); - let type_struct_ident = format_ident!("__{}__Type", enum_ident); - let mut field_checks = vec![]; - let mut make_type_struct_variant_type = |variant: &ParsedVariant| { - let VariantOptions {} = variant.options.body; - let (value_struct, parsed_struct) = match &variant.value { - VariantValue::None => { - return None; - } - VariantValue::Direct { value_type } => { - field_checks.push(quote_spanned! {value_type.span()=> - __check_field::<#value_type>(); - }); - return Some(parse_quote! { <#value_type as ::fayalite::expr::ToExpr>::Type }); - } - VariantValue::Struct { - value_struct, - parsed_struct, - } => (value_struct, parsed_struct), - }; - value_struct.to_tokens(tokens); - parsed_struct.to_tokens(tokens); - let mut field_names = Vec::from_iter(get_field_names(&value_struct.fields)); - derive_clone_hash_eq_partialeq_for_struct( - &value_struct.ident, - &static_type_generics, - &field_names, - ) - .to_tokens(tokens); - field_names = Vec::from_iter( - field_names - .into_iter() - .zip(parsed_struct.fields.iter()) - .filter_map(|(member, field)| { - field.options.body.skip.is_none().then_some(member) - }), - ); - let field_name_strs = - Vec::from_iter(field_names.iter().map(|v| v.to_token_stream().to_string())); - let debug_ident = format!("{enum_ident}::{}", variant.ident); - let debug_body = match variant.fields_kind { - FieldsKind::Unit => quote! { - f.debug_struct(#debug_ident).finish() - }, - FieldsKind::Named(_) => quote! { - f.debug_struct(#debug_ident) - #(.field(#field_name_strs, &self.#field_names))* - .finish() - }, - FieldsKind::Unnamed(_) => quote! { - f.debug_tuple(#debug_ident)#(.field(&self.#field_names))*.finish() - }, - }; - let value_struct_ident = &value_struct.ident; - quote! { - #[automatically_derived] - impl #static_type_impl_generics ::fayalite::__std::fmt::Debug - for #value_struct_ident #static_type_type_generics - #static_type_where_clause - { - fn fmt( - &self, - f: &mut ::fayalite::__std::fmt::Formatter<'_>, - ) -> ::fayalite::__std::fmt::Result { - #debug_body - } - } - } - .to_tokens(tokens); - Some(parse_quote! { - < - #value_struct_ident #static_type_type_generics - as ::fayalite::expr::ToExpr - >::Type - }) - }; - let type_struct_variants = Punctuated::from_iter(variants.iter().filter_map(|variant| { - let VariantOptions {} = variant.options.body; - Some(Field { - attrs: vec![], - vis: vis.clone(), - mutability: FieldMutability::None, - ident: Some(variant.ident.clone()), - colon_token: None, // it will fill in the colon if needed - ty: make_type_struct_variant_type(variant)?, - }) - })); - let type_struct = ItemStruct { - attrs: vec![ - parse_quote! {#[allow(non_camel_case_types)]}, - parse_quote! {#[allow(non_snake_case)]}, - ], - vis: vis.clone(), - struct_token: Token![struct](enum_token.span), - ident: type_struct_ident, - generics: static_type_generics.clone(), - fields: Fields::Named(FieldsNamed { - brace_token: *brace_token, - named: type_struct_variants, - }), - semi_token: None, - }; - let type_struct_ident = &type_struct.ident; - let type_struct_debug_ident = format!("{enum_ident}::Type"); - type_struct.to_tokens(tokens); - let non_empty_variant_names = Vec::from_iter( - variants - .iter() - .filter(|v| !v.value.is_none()) - .map(|v| v.ident.clone()), - ); - let non_empty_variant_name_strs = - Vec::from_iter(non_empty_variant_names.iter().map(|v| v.to_string())); - let debug_type_body = quote! { - f.debug_struct(#type_struct_debug_ident) - #(.field(#non_empty_variant_name_strs, &self.#non_empty_variant_names))* - .finish() - }; - derive_clone_hash_eq_partialeq_for_struct( - type_struct_ident, - &static_type_generics, - &non_empty_variant_names, - ) - .to_tokens(tokens); - let variant_names = Vec::from_iter(variants.iter().map(|v| &v.ident)); - let variant_name_strs = Vec::from_iter(variant_names.iter().map(|v| v.to_string())); - let (variant_field_pats, variant_to_canonical_values): (Vec<_>, Vec<_>) = variants - .iter() - .map(|v| { - let field_names: Vec<_> = v.fields.iter().map(|field| &field.name).collect(); - let var_names: Vec<_> = v.fields.iter().map(|field| field.var_name()).collect(); - let field_pats = quote! { - #(#field_names: #var_names,)* - }; - let to_canonical_value = match &v.value { - VariantValue::None => quote! { ::fayalite::__std::option::Option::None }, - VariantValue::Direct { .. } => { - debug_assert_eq!(var_names.len(), 1); - quote! { - ::fayalite::__std::option::Option::Some( - ::fayalite::ty::DynValueTrait::to_canonical_dyn(#(#var_names)*), - ) - } - } - VariantValue::Struct { - value_struct, - parsed_struct, - } => { - let value_struct_ident = &value_struct.ident; - let phantom_field_name = &parsed_struct - .fields - .last() - .expect("missing phantom field") - .name; - let type_generics = static_type_type_generics.as_turbofish(); - quote! { - ::fayalite::__std::option::Option::Some( - ::fayalite::ty::DynValueTrait::to_canonical_dyn( - &#value_struct_ident #type_generics { - #(#field_names: - ::fayalite::__std::clone::Clone::clone(#var_names),)* - #phantom_field_name: ::fayalite::__std::marker::PhantomData, - }, - ), - ) - } - } - }; - (field_pats, to_canonical_value) - }) - .unzip(); - let mut match_enum_variants = Punctuated::new(); - let mut match_enum_debug_arms = vec![]; - let mut match_enum_arms = vec![]; - let mut variant_vars = vec![]; - let mut from_canonical_type_variant_lets = vec![]; - let mut non_empty_variant_vars = vec![]; - let mut enum_type_variants = vec![]; - let mut enum_type_variants_hint = vec![]; - let match_enum_ident = format_ident!("__{}__MatchEnum", enum_ident); - let mut builder = Builder::new(format_ident!("__{}__Builder", enum_ident), vis.clone()); - for variant in variants.iter() { - for field in variant.fields.iter() { - builder.insert_field( - field.name.clone(), - |v| { - parse_quote_spanned! {v.span()=> - ::fayalite::expr::ToExpr::to_expr(&#v) - } - }, - |t| { - parse_quote_spanned! {t.span()=> - ::fayalite::expr::Expr<< - <#t as ::fayalite::expr::ToExpr>::Type - as ::fayalite::ty::Type - >::Value> - } - }, - |t| { - parse_quote_spanned! {t.span()=> - where - #t: ::fayalite::expr::ToExpr, - } - }, - ); - } - } - let builder = builder.finish_filling_in_fields(); - builder.to_tokens(tokens); - for (variant_index, variant) in variants.iter().enumerate() { - let variant_var = format_ident!("__v_{}", variant.ident); - let variant_name = &variant.ident; - let variant_name_str = variant.ident.to_string(); - match_enum_variants.push(Variant { - attrs: vec![], - ident: variant.ident.clone(), - fields: variant.fields_kind.into_fields(variant.fields.iter().map( - |ParsedField { - options, - vis, - name, - ty, - }| { - let FieldOptions {} = options.body; - Field { - attrs: vec![], - vis: vis.clone(), - mutability: FieldMutability::None, - ident: if let Member::Named(name) = name { - Some(name.clone()) - } else { - None - }, - colon_token: None, - ty: parse_quote! { ::fayalite::expr::Expr<#ty> }, - } - }, - )), - discriminant: None, - }); - let match_enum_field_names = Vec::from_iter(variant.fields.iter().map( - |ParsedField { - options, - vis: _, - name, - ty: _, - }| { - let FieldOptions {} = options.body; - name - }, - )); - let match_enum_field_name_strs = Vec::from_iter(variant.fields.iter().map( - |ParsedField { - options, - vis: _, - name, - ty: _, - }| { - let FieldOptions {} = options.body; - name.to_token_stream().to_string() - }, - )); - let match_enum_debug_vars = Vec::from_iter(variant.fields.iter().map( - |ParsedField { - options, - vis: _, - name, - ty: _, - }| { - let FieldOptions {} = options.body; - format_ident!("__v_{}", name) - }, - )); - match_enum_debug_arms.push(match variant.fields_kind { - FieldsKind::Unit | FieldsKind::Named(_) => quote! { - Self::#variant_name { - #(#match_enum_field_names: ref #match_enum_debug_vars,)* - } => f.debug_struct(#variant_name_str) - #(.field(#match_enum_field_name_strs, #match_enum_debug_vars))* - .finish(), - }, - FieldsKind::Unnamed(_) => quote! { - Self::#variant_name( - #(ref #match_enum_debug_vars,)* - ) => f.debug_tuple(#variant_name_str) - #(.field(#match_enum_debug_vars))* - .finish(), - }, - }); - if let Some(value_ty) = variant.value.value_ty() { - from_canonical_type_variant_lets.push(quote! { - let #variant_var = - #variant_var.from_canonical_type_helper_has_value(#variant_name_str); - }); - non_empty_variant_vars.push(variant_var.clone()); - enum_type_variants.push(quote! { - ::fayalite::enum_::VariantType { - name: ::fayalite::intern::Intern::intern(#variant_name_str), - ty: ::fayalite::__std::option::Option::Some( - ::fayalite::ty::DynType::canonical_dyn(&self.#variant_name), - ), - } - }); - enum_type_variants_hint.push(quote! { - ::fayalite::enum_::VariantType { - name: ::fayalite::intern::Intern::intern(#variant_name_str), - ty: ::fayalite::__std::option::Option::Some( - ::fayalite::bundle::TypeHint::< - <#value_ty as ::fayalite::expr::ToExpr>::Type, - >::intern_dyn(), - ), - } - }); - } else { - from_canonical_type_variant_lets.push(quote! { - #variant_var.from_canonical_type_helper_no_value(#variant_name_str); - }); - enum_type_variants.push(quote! { - ::fayalite::enum_::VariantType { - name: ::fayalite::intern::Intern::intern(#variant_name_str), - ty: ::fayalite::__std::option::Option::None, - } - }); - enum_type_variants_hint.push(quote! { - ::fayalite::enum_::VariantType { - name: ::fayalite::intern::Intern::intern(#variant_name_str), - ty: ::fayalite::__std::option::Option::None, - } - }); - } - variant_vars.push(variant_var); - match_enum_arms.push(match &variant.value { - VariantValue::None => quote! { - #variant_index => #match_enum_ident::#variant_name, - }, - VariantValue::Direct { value_type } => quote! { - #variant_index => #match_enum_ident::#variant_name { - #(#match_enum_field_names)*: ::fayalite::expr::ToExpr::to_expr( - &__variant_access.downcast_unchecked::< - <#value_type as ::fayalite::expr::ToExpr>::Type>(), - ), - }, - }, - VariantValue::Struct { - value_struct: ItemStruct { ident, .. }, - .. - } => quote! { - #variant_index => { - let __variant_access = ::fayalite::expr::ToExpr::to_expr( - &__variant_access.downcast_unchecked::<< - #ident #static_type_type_generics - as ::fayalite::expr::ToExpr - >::Type>(), - ); - #match_enum_ident::#variant_name { - #(#match_enum_field_names: - (*__variant_access).#match_enum_field_names,)* - } - }, - }, - }); - let builder_field_and_types = Vec::from_iter(variant.fields.iter().map( - |ParsedField { - options, - vis: _, - name, - ty, - }| { - let FieldOptions {} = options.body; - (name, ty) - }, - )); - let builder_field_vars = Vec::from_iter( - builder_field_and_types - .iter() - .map(|(name, _)| &builder.get_field(name).unwrap().1.builder_field_name), - ); - let build_body = match &variant.value { - VariantValue::None => parse_quote! { - { - ::fayalite::expr::ToExpr::to_expr( - &::fayalite::expr::ops::EnumLiteral::< - #type_struct_ident #static_type_type_generics - >::new_unchecked( - ::fayalite::__std::option::Option::None, - #variant_index, - ::fayalite::ty::StaticType::static_type(), - ), - ) - } - }, - VariantValue::Direct { value_type: _ } => parse_quote! { - { - ::fayalite::expr::ToExpr::to_expr( - &::fayalite::expr::ops::EnumLiteral::< - #type_struct_ident #static_type_type_generics - >::new_unchecked( - ::fayalite::__std::option::Option::Some( - #(#builder_field_vars)*.to_canonical_dyn(), - ), - #variant_index, - ::fayalite::ty::StaticType::static_type(), - ), - ) - } - }, - VariantValue::Struct { - parsed_struct: - ParsedStruct { - names: - ParsedStructNames { - type_struct_ident: field_type_struct_ident, - .. - }, - .. - }, - .. - } => parse_quote! { - { - let __builder = < - #field_type_struct_ident #static_type_type_generics - as ::fayalite::bundle::BundleType - >::builder(); - #(let __builder = __builder.#builder_field_vars(#builder_field_vars);)* - ::fayalite::expr::ToExpr::to_expr( - &::fayalite::expr::ops::EnumLiteral::< - #type_struct_ident #static_type_type_generics - >::new_unchecked( - ::fayalite::__std::option::Option::Some( - __builder.build().to_canonical_dyn(), - ), - #variant_index, - ::fayalite::ty::StaticType::static_type(), - ), - ) - } - }, - }; - builder - .make_build_method( - &format_ident!("variant_{}", variant_name), - variant.fields.iter().map( - |ParsedField { - options, - vis: _, - name, - ty, - }| { - let FieldOptions {} = options.body; - (name.clone(), parse_quote! { ::fayalite::expr::Expr<#ty> }) - }, - ), - &static_type_generics, - &parse_quote! {#type_struct_ident #static_type_type_generics}, - &parse_quote! { ::fayalite::expr::Expr<#target #static_type_type_generics> }, - build_body, - ) - .to_tokens(tokens); - } - let match_enum = ItemEnum { - attrs: vec![parse_quote! {#[allow(non_camel_case_types)]}], - vis: vis.clone(), - enum_token: *enum_token, - ident: match_enum_ident, - generics: static_type_generics.clone(), - brace_token: *brace_token, - variants: match_enum_variants, - }; - let match_enum_ident = &match_enum.ident; - match_enum.to_tokens(tokens); - make_connect_impl( - *connect_inexact, - &static_type_generics, - type_struct_ident, - variants.iter().flat_map(|variant| { - variant.fields.iter().map(|field| { - let ty = &field.ty; - parse_quote_spanned! {field.name.span()=> - <#ty as ::fayalite::expr::ToExpr>::Type - } - }) - }), - ) - .to_tokens(tokens); - let variant_count = variants.len(); - let empty_builder_ty = builder.ty([], Some(&parse_quote! { Self }), false); - quote! { - #[automatically_derived] - impl #static_type_impl_generics ::fayalite::__std::fmt::Debug - for #match_enum_ident #static_type_type_generics - #static_type_where_clause - { - fn fmt( - &self, - f: &mut ::fayalite::__std::fmt::Formatter<'_>, - ) -> ::fayalite::__std::fmt::Result { - match *self { - #(#match_enum_debug_arms)* - } - } - } - - #[automatically_derived] - impl #static_type_impl_generics ::fayalite::ty::StaticType - for #type_struct_ident #static_type_type_generics - #static_type_where_clause - { - fn static_type() -> Self { - Self { - #(#non_empty_variant_names: ::fayalite::ty::StaticType::static_type(),)* - } - } - } - - fn __check_field() - where - ::Type: ::fayalite::ty::Type, - {} - fn __check_fields #static_type_impl_generics(_: #target #static_type_type_generics) - #static_type_where_clause - { - #(#field_checks)* - } - - #[automatically_derived] - impl #static_type_impl_generics ::fayalite::__std::fmt::Debug - for #type_struct_ident #static_type_type_generics - #static_type_where_clause - { - fn fmt( - &self, - f: &mut ::fayalite::__std::fmt::Formatter<'_>, - ) -> ::fayalite::__std::fmt::Result { - #debug_type_body - } - } - - #[automatically_derived] - impl #static_type_impl_generics ::fayalite::ty::Connect<::fayalite::type_deduction::UndeducedType> - for #type_struct_ident #static_type_type_generics - #static_type_where_clause - {} - - #[automatically_derived] - impl #static_type_impl_generics ::fayalite::ty::Type - for #type_struct_ident #static_type_type_generics - #static_type_where_clause - { - type CanonicalType = ::fayalite::enum_::DynEnumType; - type Value = #target #static_type_type_generics; - type CanonicalValue = ::fayalite::enum_::DynEnum; - type MaskType = ::fayalite::int::UIntType<1>; - type MaskValue = ::fayalite::int::UInt<1>; - type MatchVariant = #match_enum_ident #static_type_type_generics; - type MatchActiveScope = ::fayalite::module::Scope; - type MatchVariantAndInactiveScope = - ::fayalite::enum_::EnumMatchVariantAndInactiveScope; - type MatchVariantsIter = ::fayalite::enum_::EnumMatchVariantsIter; - fn match_variants( - this: ::fayalite::expr::Expr<::Value>, - module_builder: &mut ::fayalite::module::ModuleBuilder< - IO, - ::fayalite::module::NormalModule, - >, - source_location: ::fayalite::source_location::SourceLocation, - ) -> ::MatchVariantsIter - where - ::Type: - ::fayalite::bundle::BundleType, - { - module_builder.enum_match_variants_helper(this, source_location) - } - fn mask_type(&self) -> ::MaskType { - ::fayalite::int::UIntType::new() - } - fn canonical(&self) -> ::CanonicalType { - let variants = ::fayalite::enum_::EnumType::variants(self); - ::fayalite::enum_::DynEnumType::new(variants) - } - fn source_location(&self) -> ::fayalite::source_location::SourceLocation { - ::fayalite::source_location::SourceLocation::caller() - } - fn type_enum(&self) -> ::fayalite::ty::TypeEnum { - ::fayalite::ty::TypeEnum::EnumType(::fayalite::ty::Type::canonical(self)) - } - #[allow(non_snake_case)] - fn from_canonical_type(t: ::CanonicalType) -> Self { - let [#(#variant_vars),*] = *::fayalite::enum_::EnumType::variants(&t) else { - ::fayalite::__std::panic!("wrong number of variants"); - }; - #(#from_canonical_type_variant_lets)* - Self { - #(#non_empty_variant_names: #non_empty_variant_vars,)* - } - } - } - - #[automatically_derived] - #[allow(clippy::init_numbered_fields)] - impl #static_type_impl_generics ::fayalite::enum_::EnumType - for #type_struct_ident #static_type_type_generics - #static_type_where_clause - { - type Builder = #empty_builder_ty; - fn match_activate_scope( - v: ::MatchVariantAndInactiveScope, - ) -> ( - ::MatchVariant, - ::MatchActiveScope, - ) { - let (__variant_access, __scope) = v.activate(); - ( - match ::fayalite::expr::ops::VariantAccess::variant_index( - &*__variant_access, - ) { - #(#match_enum_arms)* - #variant_count.. => ::fayalite::__std::panic!("invalid variant index"), - }, - __scope, - ) - } - fn builder() -> ::Builder { - #empty_builder_ty::new() - } - fn variants(&self) -> ::fayalite::intern::Interned<[::fayalite::enum_::VariantType< - ::fayalite::intern::Interned, - >]> { - ::fayalite::intern::Intern::intern(&[#(#enum_type_variants,)*][..]) - } - fn variants_hint() -> ::fayalite::enum_::VariantsHint { - ::fayalite::enum_::VariantsHint::new([#(#enum_type_variants_hint,)*], false) - } - } - - #[automatically_derived] - impl #static_type_impl_generics ::fayalite::expr::ToExpr - for #target #static_type_type_generics - #static_type_where_clause - { - type Type = #type_struct_ident #static_type_type_generics; - fn ty(&self) -> ::Type { - ::fayalite::ty::StaticType::static_type() - } - fn to_expr(&self) -> ::fayalite::expr::Expr { - ::fayalite::expr::Expr::from_value(self) - } - } - - #[automatically_derived] - impl #static_type_impl_generics ::fayalite::ty::Value - for #target #static_type_type_generics - #static_type_where_clause - { - fn to_canonical(&self) -> < - ::Type - as ::fayalite::ty::Type - >::CanonicalValue - { - let __ty = ::fayalite::ty::Type::canonical(&::fayalite::expr::ToExpr::ty(self)); - match self { - #(Self::#variant_names { #variant_field_pats } => { - ::fayalite::enum_::DynEnum::new_by_name( - __ty, - ::fayalite::intern::Intern::intern(#variant_name_strs), - #variant_to_canonical_values, - ) - })* - } - } - } - - #[automatically_derived] - impl #static_type_impl_generics ::fayalite::enum_::EnumValue - for #target #static_type_type_generics - #static_type_where_clause - { - } - } - .to_tokens(tokens); - } -} - -pub(crate) fn value_derive_enum(item: ItemEnum) -> syn::Result { - let item = ParsedEnum::parse(item)?; - let outline_generated = item.options.body.outline_generated; - let mut contents = quote! { - const _: () = { - #item - }; - }; - if outline_generated.is_some() { - contents = crate::outline_generated(contents, "value-enum-"); - } - Ok(contents) -} diff --git a/crates/fayalite-proc-macros-impl/src/value_derive_struct.rs b/crates/fayalite-proc-macros-impl/src/value_derive_struct.rs deleted file mode 100644 index 6dd82d2..0000000 --- a/crates/fayalite-proc-macros-impl/src/value_derive_struct.rs +++ /dev/null @@ -1,771 +0,0 @@ -// SPDX-License-Identifier: LGPL-3.0-or-later -// See Notices.txt for copyright information -use crate::{ - value_derive_common::{ - append_field, derive_clone_hash_eq_partialeq_for_struct, get_target, make_connect_impl, - Bounds, Builder, FieldsKind, ParsedField, ValueDeriveGenerics, - }, - Errors, HdlAttr, -}; -use proc_macro2::TokenStream; -use quote::{format_ident, quote, quote_spanned, ToTokens}; -use syn::{ - parse_quote, parse_quote_spanned, spanned::Spanned, FieldMutability, Generics, Ident, - ItemStruct, Member, Path, Token, Visibility, -}; - -crate::options! { - #[options = StructOptions] - pub(crate) enum StructOption { - OutlineGenerated(outline_generated), - Static(static_), - ConnectInexact(connect_inexact), - Bounds(where_, Bounds), - Target(target, Path), - } -} - -crate::options! { - #[options = FieldOptions] - pub(crate) enum FieldOption { - Flip(flip), - Skip(skip), - } -} - -pub(crate) struct ParsedStructNames { - pub(crate) ident: Ident, - pub(crate) type_struct_debug_ident: S, - pub(crate) type_struct_ident: Ident, - pub(crate) match_variant_ident: I, - pub(crate) builder_struct_ident: I, - pub(crate) mask_match_variant_ident: I, - pub(crate) mask_type_ident: I, - pub(crate) mask_type_debug_ident: S, - pub(crate) mask_value_ident: I, - pub(crate) mask_value_debug_ident: S, - pub(crate) mask_builder_struct_ident: I, -} - -pub(crate) struct ParsedStruct { - pub(crate) options: HdlAttr, - pub(crate) vis: Visibility, - pub(crate) struct_token: Token![struct], - pub(crate) generics: Generics, - pub(crate) fields_kind: FieldsKind, - pub(crate) fields: Vec>, - pub(crate) semi_token: Option, - pub(crate) skip_check_fields: bool, - pub(crate) names: ParsedStructNames, Option>, -} - -impl ParsedStruct { - pub(crate) fn parse(item: &mut ItemStruct) -> syn::Result { - let ItemStruct { - attrs, - vis, - struct_token, - ident, - generics, - fields, - semi_token, - } = item; - let mut errors = Errors::new(); - let struct_options = errors - .unwrap_or_default(HdlAttr::parse_and_take_attr(attrs)) - .unwrap_or_default(); - let (fields_kind, fields) = ParsedField::parse_fields(&mut errors, fields, false); - errors.finish()?; - Ok(ParsedStruct { - options: struct_options, - vis: vis.clone(), - struct_token: *struct_token, - generics: generics.clone(), - fields_kind, - fields, - semi_token: *semi_token, - skip_check_fields: false, - names: ParsedStructNames { - ident: ident.clone(), - type_struct_debug_ident: None, - type_struct_ident: format_ident!("__{}__Type", ident), - match_variant_ident: None, - builder_struct_ident: None, - mask_match_variant_ident: None, - mask_type_ident: None, - mask_type_debug_ident: None, - mask_value_ident: None, - mask_value_debug_ident: None, - mask_builder_struct_ident: None, - }, - }) - } - pub(crate) fn write_body( - &self, - target: Path, - names: ParsedStructNames<&Ident, &String>, - is_for_mask: bool, - tokens: &mut TokenStream, - ) { - let Self { - options, - vis, - struct_token, - generics, - fields_kind, - fields, - semi_token, - skip_check_fields, - names: _, - } = self; - let skip_check_fields = *skip_check_fields || is_for_mask; - let ParsedStructNames { - ident: struct_ident, - type_struct_debug_ident, - type_struct_ident, - match_variant_ident, - builder_struct_ident, - mask_match_variant_ident: _, - mask_type_ident, - mask_type_debug_ident: _, - mask_value_ident, - mask_value_debug_ident, - mask_builder_struct_ident: _, - } = names; - let StructOptions { - outline_generated: _, - where_, - target: _, - static_, - connect_inexact, - } = &options.body; - let ValueDeriveGenerics { - generics, - static_type_generics, - } = ValueDeriveGenerics::get(generics.clone(), where_); - let (impl_generics, type_generics, where_clause) = generics.split_for_impl(); - let unskipped_fields = fields - .iter() - .filter(|field| field.options.body.skip.is_none()); - let _field_names = Vec::from_iter(fields.iter().map(|field| field.name.clone())); - let unskipped_field_names = - Vec::from_iter(unskipped_fields.clone().map(|field| field.name.clone())); - let unskipped_field_name_strs = Vec::from_iter( - unskipped_field_names - .iter() - .map(|field_name| field_name.to_token_stream().to_string()), - ); - let unskipped_field_vars = Vec::from_iter( - unskipped_field_names - .iter() - .map(|field_name| format_ident!("__v_{}", field_name)), - ); - let unskipped_field_flips = Vec::from_iter( - unskipped_fields - .clone() - .map(|field| field.options.body.flip.is_some()), - ); - let mut any_fields_skipped = false; - let type_fields = Vec::from_iter(fields.iter().filter_map(|field| { - let ParsedField { - options, - vis, - name, - ty, - } = field; - let FieldOptions { flip: _, skip } = &options.body; - if skip.is_some() { - any_fields_skipped = true; - return None; - } - let ty = if is_for_mask { - parse_quote! { ::fayalite::ty::AsMask<#ty> } - } else { - ty.to_token_stream() - }; - Some(syn::Field { - attrs: vec![], - vis: vis.clone(), - mutability: FieldMutability::None, - ident: match name.clone() { - Member::Named(name) => Some(name), - Member::Unnamed(_) => None, - }, - colon_token: None, - ty: parse_quote! { <#ty as ::fayalite::expr::ToExpr>::Type }, - }) - })); - let field_types = Vec::from_iter(type_fields.iter().map(|field| field.ty.clone())); - let match_variant_fields = Vec::from_iter(fields.iter().zip(&type_fields).map( - |(parsed_field, type_field)| { - let field_ty = &parsed_field.ty; - syn::Field { - ty: parse_quote! { ::fayalite::expr::Expr<#field_ty> }, - ..type_field.clone() - } - }, - )); - - let mask_value_fields = Vec::from_iter(fields.iter().zip(&type_fields).map( - |(parsed_field, type_field)| { - let field_ty = &parsed_field.ty; - syn::Field { - ty: parse_quote! { ::fayalite::ty::AsMask<#field_ty> }, - ..type_field.clone() - } - }, - )); - - let mut type_struct_fields = fields_kind.into_fields(type_fields); - let mut match_variant_fields = fields_kind.into_fields(match_variant_fields); - let mut mask_value_fields = fields_kind.into_fields(mask_value_fields); - let phantom_data_field_name = any_fields_skipped.then(|| { - let phantom_data_field_name = Ident::new("__phantom_data", type_struct_ident.span()); - let member = append_field( - &mut type_struct_fields, - syn::Field { - attrs: vec![], - vis: vis.clone(), - mutability: FieldMutability::None, - ident: Some(phantom_data_field_name.clone()), - colon_token: None, - ty: parse_quote_spanned! {type_struct_ident.span()=> - ::fayalite::__std::marker::PhantomData<#struct_ident #type_generics> - }, - }, - ); - append_field( - &mut match_variant_fields, - syn::Field { - attrs: vec![], - vis: Visibility::Inherited, - mutability: FieldMutability::None, - ident: Some(phantom_data_field_name.clone()), - colon_token: None, - ty: parse_quote_spanned! {type_struct_ident.span()=> - ::fayalite::__std::marker::PhantomData<#struct_ident #type_generics> - }, - }, - ); - append_field( - &mut mask_value_fields, - syn::Field { - attrs: vec![], - vis: Visibility::Inherited, - mutability: FieldMutability::None, - ident: Some(phantom_data_field_name), - colon_token: None, - ty: parse_quote_spanned! {type_struct_ident.span()=> - ::fayalite::__std::marker::PhantomData<#struct_ident #type_generics> - }, - }, - ); - member - }); - let phantom_data_field_name_slice = phantom_data_field_name.as_slice(); - let type_struct = ItemStruct { - attrs: vec![parse_quote! {#[allow(non_camel_case_types)]}], - vis: vis.clone(), - struct_token: *struct_token, - ident: type_struct_ident.clone(), - generics: generics.clone(), - fields: type_struct_fields, - semi_token: *semi_token, - }; - type_struct.to_tokens(tokens); - let match_variant_struct = ItemStruct { - attrs: vec![parse_quote! {#[allow(non_camel_case_types)]}], - vis: vis.clone(), - struct_token: *struct_token, - ident: match_variant_ident.clone(), - generics: generics.clone(), - fields: match_variant_fields, - semi_token: *semi_token, - }; - match_variant_struct.to_tokens(tokens); - let mask_type_body = if is_for_mask { - quote! { - ::fayalite::__std::clone::Clone::clone(self) - } - } else { - let mask_value_struct = ItemStruct { - attrs: vec![parse_quote! {#[allow(non_camel_case_types)]}], - vis: vis.clone(), - struct_token: *struct_token, - ident: mask_value_ident.clone(), - generics: generics.clone(), - fields: mask_value_fields, - semi_token: *semi_token, - }; - mask_value_struct.to_tokens(tokens); - let debug_mask_value_body = match fields_kind { - FieldsKind::Unit => quote! { - f.debug_struct(#mask_value_debug_ident).finish() - }, - FieldsKind::Named(_) => quote! { - f.debug_struct(#mask_value_debug_ident) - #(.field(#unskipped_field_name_strs, &self.#unskipped_field_names))* - .finish() - }, - FieldsKind::Unnamed(_) => quote! { - f.debug_tuple(#mask_value_debug_ident) - #(.field(&self.#unskipped_field_names))* - .finish() - }, - }; - quote! { - #[automatically_derived] - impl #impl_generics ::fayalite::__std::fmt::Debug - for #mask_value_ident #type_generics - #where_clause - { - fn fmt( - &self, - f: &mut ::fayalite::__std::fmt::Formatter<'_>, - ) -> ::fayalite::__std::fmt::Result { - #debug_mask_value_body - } - } - } - .to_tokens(tokens); - quote! { - #mask_type_ident { - #(#unskipped_field_names: - ::fayalite::ty::Type::mask_type(&self.#unskipped_field_names),)* - #(#phantom_data_field_name_slice: ::fayalite::__std::marker::PhantomData,)* - } - } - }; - let debug_type_body = match fields_kind { - FieldsKind::Unit => quote! { - f.debug_struct(#type_struct_debug_ident).finish() - }, - FieldsKind::Named(_) => quote! { - f.debug_struct(#type_struct_debug_ident) - #(.field(#unskipped_field_name_strs, &self.#unskipped_field_names))* - .finish() - }, - FieldsKind::Unnamed(_) => quote! { - f.debug_tuple(#type_struct_debug_ident) - #(.field(&self.#unskipped_field_names))* - .finish() - }, - }; - for the_struct_ident in [&type_struct_ident, match_variant_ident] - .into_iter() - .chain(is_for_mask.then_some(mask_value_ident)) - { - derive_clone_hash_eq_partialeq_for_struct( - the_struct_ident, - &generics, - &Vec::from_iter( - unskipped_field_names - .iter() - .cloned() - .chain(phantom_data_field_name.clone()), - ), - ) - .to_tokens(tokens); - } - let check_v = format_ident!("__v"); - let field_checks = Vec::from_iter(fields.iter().map(|ParsedField { ty, name, .. }| { - quote_spanned! {ty.span()=> - __check_field(#check_v.#name); - } - })); - if static_.is_some() { - let (impl_generics, type_generics, where_clause) = - static_type_generics.split_for_impl(); - quote! { - #[automatically_derived] - impl #impl_generics ::fayalite::ty::StaticType for #type_struct_ident #type_generics - #where_clause - { - fn static_type() -> Self { - Self { - #(#unskipped_field_names: ::fayalite::ty::StaticType::static_type(),)* - #(#phantom_data_field_name_slice: - ::fayalite::__std::marker::PhantomData,)* - } - } - } - } - .to_tokens(tokens); - } - if !skip_check_fields { - quote! { - fn __check_field(_v: T) - where - ::Type: ::fayalite::ty::Type, - {} - fn __check_fields #impl_generics(#check_v: #target #type_generics) - #where_clause - { - #(#field_checks)* - } - } - .to_tokens(tokens); - } - let mut builder = Builder::new(builder_struct_ident.clone(), vis.clone()); - for field in unskipped_fields.clone() { - builder.insert_field( - field.name.clone(), - |v| { - parse_quote_spanned! {v.span()=> - ::fayalite::expr::ToExpr::to_expr(&#v) - } - }, - |t| { - parse_quote_spanned! {t.span()=> - ::fayalite::expr::Expr<< - <#t as ::fayalite::expr::ToExpr>::Type - as ::fayalite::ty::Type - >::Value> - } - }, - |t| { - parse_quote_spanned! {t.span()=> - where - #t: ::fayalite::expr::ToExpr, - } - }, - ); - } - let builder = builder.finish_filling_in_fields(); - builder.to_tokens(tokens); - let build_type_fields = - Vec::from_iter(unskipped_fields.clone().map(|ParsedField { name, .. }| { - let builder_field_name = &builder.get_field(name).unwrap().1.builder_field_name; - quote_spanned! {struct_ident.span()=> - #name: ::fayalite::expr::ToExpr::ty(&#builder_field_name) - } - })); - let build_expr_fields = - Vec::from_iter(unskipped_fields.clone().map(|ParsedField { name, .. }| { - let builder_field_name = &builder.get_field(name).unwrap().1.builder_field_name; - quote_spanned! {struct_ident.span()=> - #builder_field_name.to_canonical_dyn() - } - })); - let build_specified_fields = unskipped_fields.clone().map( - |ParsedField { - options: _, - vis: _, - name, - ty, - }| { - let ty = if is_for_mask { - parse_quote_spanned! {name.span()=> - ::fayalite::expr::Expr<::fayalite::ty::AsMask<#ty>> - } - } else { - parse_quote_spanned! {name.span()=> - ::fayalite::expr::Expr<#ty> - } - }; - (name.clone(), ty) - }, - ); - let build_body = parse_quote_spanned! {struct_ident.span()=> - { - ::fayalite::expr::ToExpr::to_expr( - &::fayalite::expr::ops::BundleLiteral::new_unchecked( - ::fayalite::intern::Intern::intern(&[#( - #build_expr_fields, - )*][..]), - #type_struct_ident { - #(#build_type_fields,)* - #(#phantom_data_field_name_slice: - ::fayalite::__std::marker::PhantomData,)* - }, - ), - ) - } - }; - builder - .make_build_method( - &Ident::new("build", struct_ident.span()), - build_specified_fields, - &generics, - &parse_quote_spanned! {struct_ident.span()=> - #type_struct_ident #type_generics - }, - &parse_quote_spanned! {struct_ident.span()=> - ::fayalite::expr::Expr<#target #type_generics> - }, - build_body, - ) - .to_tokens(tokens); - make_connect_impl( - *connect_inexact, - &generics, - &type_struct_ident, - unskipped_fields.clone().map(|field| { - let ty = &field.ty; - parse_quote_spanned! {field.name.span()=> - <#ty as ::fayalite::expr::ToExpr>::Type - } - }), - ) - .to_tokens(tokens); - let empty_builder_ty = builder.ty([], Some(&parse_quote! { Self }), false); - quote! { - #[automatically_derived] - impl #impl_generics ::fayalite::__std::fmt::Debug for #type_struct_ident #type_generics - #where_clause - { - fn fmt( - &self, - f: &mut ::fayalite::__std::fmt::Formatter<'_>, - ) -> ::fayalite::__std::fmt::Result { - #debug_type_body - } - } - - #[automatically_derived] - impl #impl_generics ::fayalite::ty::Connect<::fayalite::type_deduction::UndeducedType> - for #type_struct_ident #type_generics - #where_clause - {} - - #[automatically_derived] - impl #impl_generics ::fayalite::ty::Type for #type_struct_ident #type_generics - #where_clause - { - type CanonicalType = ::fayalite::bundle::DynBundleType; - type Value = #target #type_generics; - type CanonicalValue = ::fayalite::bundle::DynBundle; - type MaskType = #mask_type_ident #type_generics; - type MaskValue = #mask_value_ident #type_generics; - type MatchVariant = #match_variant_ident #type_generics; - type MatchActiveScope = (); - type MatchVariantAndInactiveScope = ::fayalite::ty::MatchVariantWithoutScope< - #match_variant_ident #type_generics, - >; - type MatchVariantsIter = ::fayalite::__std::iter::Once< - ::MatchVariantAndInactiveScope, - >; - #[allow(unused_variables)] - fn match_variants( - this: ::fayalite::expr::Expr<::Value>, - module_builder: &mut ::fayalite::module::ModuleBuilder< - IO, - ::fayalite::module::NormalModule, - >, - source_location: ::fayalite::source_location::SourceLocation, - ) -> ::MatchVariantsIter - where - ::Type: - ::fayalite::bundle::BundleType, - { - ::fayalite::__std::iter::once(::fayalite::ty::MatchVariantWithoutScope( - #match_variant_ident { - #(#unskipped_field_names: this.field(#unskipped_field_name_strs),)* - #(#phantom_data_field_name_slice: - ::fayalite::__std::marker::PhantomData,)* - }, - )) - } - fn mask_type(&self) -> ::MaskType { - #mask_type_body - } - fn canonical(&self) -> ::CanonicalType { - let fields = ::fayalite::bundle::BundleType::fields(self); - ::fayalite::bundle::DynBundleType::new(fields) - } - fn source_location(&self) -> ::fayalite::source_location::SourceLocation { - ::fayalite::source_location::SourceLocation::caller() - } - fn type_enum(&self) -> ::fayalite::ty::TypeEnum { - ::fayalite::ty::TypeEnum::BundleType(::fayalite::ty::Type::canonical(self)) - } - fn from_canonical_type(t: ::CanonicalType) -> Self { - let [#(#unskipped_field_vars),*] = *::fayalite::bundle::BundleType::fields(&t) - else { - ::fayalite::__std::panic!("wrong number of fields"); - }; - Self { - #(#unskipped_field_names: #unskipped_field_vars.from_canonical_type_helper( - #unskipped_field_name_strs, - #unskipped_field_flips, - ),)* - #(#phantom_data_field_name_slice: ::fayalite::__std::marker::PhantomData,)* - } - } - } - - #[automatically_derived] - impl #impl_generics ::fayalite::ty::TypeWithDeref for #type_struct_ident #type_generics - #where_clause - { - #[allow(unused_variables)] - fn expr_deref(this: &::fayalite::expr::Expr<::Value>) - -> &::MatchVariant - { - ::fayalite::intern::Interned::<_>::into_inner( - ::fayalite::intern::Intern::intern_sized(#match_variant_ident { - #(#unskipped_field_names: this.field(#unskipped_field_name_strs),)* - #(#phantom_data_field_name_slice: - ::fayalite::__std::marker::PhantomData,)* - }), - ) - } - } - - #[automatically_derived] - impl #impl_generics ::fayalite::bundle::BundleType for #type_struct_ident #type_generics - #where_clause - { - type Builder = #empty_builder_ty; - fn builder() -> ::Builder { - #empty_builder_ty::new() - } - fn fields(&self) -> ::fayalite::intern::Interned< - [::fayalite::bundle::FieldType<::fayalite::intern::Interned< - dyn ::fayalite::ty::DynCanonicalType, - >>], - > - { - ::fayalite::intern::Intern::intern(&[#( - ::fayalite::bundle::FieldType { - name: ::fayalite::intern::Intern::intern(#unskipped_field_name_strs), - flipped: #unskipped_field_flips, - ty: ::fayalite::ty::DynType::canonical_dyn( - &self.#unskipped_field_names, - ), - }, - )*][..]) - } - fn fields_hint() -> ::fayalite::bundle::FieldsHint { - ::fayalite::bundle::FieldsHint::new([#( - ::fayalite::bundle::FieldType { - name: ::fayalite::intern::Intern::intern(#unskipped_field_name_strs), - flipped: #unskipped_field_flips, - ty: ::fayalite::bundle::TypeHint::<#field_types>::intern_dyn(), - }, - )*], false) - } - } - - #[automatically_derived] - impl #impl_generics ::fayalite::expr::ToExpr for #target #type_generics - #where_clause - { - type Type = #type_struct_ident #type_generics; - fn ty(&self) -> ::Type { - #type_struct_ident { - #(#unskipped_field_names: ::fayalite::expr::ToExpr::ty( - &self.#unskipped_field_names, - ),)* - #(#phantom_data_field_name_slice: ::fayalite::__std::marker::PhantomData,)* - } - } - fn to_expr(&self) -> ::fayalite::expr::Expr { - ::fayalite::expr::Expr::from_value(self) - } - } - - #[automatically_derived] - impl #impl_generics ::fayalite::ty::Value for #target #type_generics - #where_clause - { - fn to_canonical(&self) -> < - ::Type - as ::fayalite::ty::Type - >::CanonicalValue - { - let ty = ::fayalite::ty::Type::canonical(&::fayalite::expr::ToExpr::ty(self)); - ::fayalite::bundle::DynBundle::new(ty, ::fayalite::__std::sync::Arc::new([ - #(::fayalite::ty::DynValueTrait::to_canonical_dyn( - &self.#unskipped_field_names, - ),)* - ])) - } - } - - #[automatically_derived] - impl #impl_generics ::fayalite::bundle::BundleValue for #target #type_generics - #where_clause - { - } - } - .to_tokens(tokens); - } -} - -impl ToTokens for ParsedStruct { - fn to_tokens(&self, tokens: &mut TokenStream) { - let ParsedStructNames { - ident: struct_ident, - type_struct_debug_ident, - type_struct_ident, - match_variant_ident, - builder_struct_ident, - mask_match_variant_ident, - mask_type_ident, - mask_type_debug_ident, - mask_value_ident, - mask_value_debug_ident, - mask_builder_struct_ident, - } = &self.names; - macro_rules! unwrap_or_set { - ($(let $var:ident =? $fallback_value:expr;)*) => { - $(let $var = $var.clone().unwrap_or_else(|| $fallback_value);)* - }; - } - unwrap_or_set! { - let type_struct_debug_ident =? format!("{struct_ident}::Type"); - let match_variant_ident =? format_ident!("__{}__MatchVariant", struct_ident); - let builder_struct_ident =? format_ident!("__{}__Builder", struct_ident); - let mask_match_variant_ident =? format_ident!("__AsMask__{}__MatchVariant", struct_ident); - let mask_type_ident =? format_ident!("__AsMask__{}__Type", struct_ident); - let mask_type_debug_ident =? format!("AsMask<{struct_ident}>::Type"); - let mask_value_ident =? format_ident!("__AsMask__{}", struct_ident); - let mask_value_debug_ident =? format!("AsMask<{struct_ident}>"); - let mask_builder_struct_ident =? format_ident!("__AsMask__{}__Builder", struct_ident); - } - let target = get_target(&self.options.body.target, struct_ident); - let names = ParsedStructNames { - ident: struct_ident.clone(), - type_struct_debug_ident: &type_struct_debug_ident, - type_struct_ident: type_struct_ident.clone(), - match_variant_ident: &match_variant_ident, - builder_struct_ident: &builder_struct_ident, - mask_match_variant_ident: &mask_match_variant_ident, - mask_type_ident: &mask_type_ident, - mask_type_debug_ident: &mask_type_debug_ident, - mask_value_ident: &mask_value_ident, - mask_value_debug_ident: &mask_value_debug_ident, - mask_builder_struct_ident: &mask_builder_struct_ident, - }; - self.write_body(target, names, false, tokens); - let mask_names = ParsedStructNames { - ident: mask_value_ident.clone(), - type_struct_debug_ident: &mask_type_debug_ident, - type_struct_ident: mask_type_ident.clone(), - match_variant_ident: &mask_match_variant_ident, - builder_struct_ident: &mask_builder_struct_ident, - mask_match_variant_ident: &mask_match_variant_ident, - mask_type_ident: &mask_type_ident, - mask_type_debug_ident: &mask_type_debug_ident, - mask_value_ident: &mask_value_ident, - mask_value_debug_ident: &mask_value_debug_ident, - mask_builder_struct_ident: &mask_builder_struct_ident, - }; - self.write_body(mask_value_ident.clone().into(), mask_names, true, tokens); - } -} - -pub(crate) fn value_derive_struct(mut item: ItemStruct) -> syn::Result { - let item = ParsedStruct::parse(&mut item)?; - let outline_generated = item.options.body.outline_generated; - let mut contents = quote! { - const _: () = { - #item - }; - }; - if outline_generated.is_some() { - contents = crate::outline_generated(contents, "value-struct-"); - } - Ok(contents) -} diff --git a/crates/fayalite-proc-macros/Cargo.toml b/crates/fayalite-proc-macros/Cargo.toml index 08c630a..6941d12 100644 --- a/crates/fayalite-proc-macros/Cargo.toml +++ b/crates/fayalite-proc-macros/Cargo.toml @@ -16,4 +16,4 @@ version.workspace = true proc-macro = true [dependencies] -fayalite-proc-macros-impl = { workspace = true } +fayalite-proc-macros-impl.workspace = true diff --git a/crates/fayalite-proc-macros/src/lib.rs b/crates/fayalite-proc-macros/src/lib.rs index 7d0c65d..73dad09 100644 --- a/crates/fayalite-proc-macros/src/lib.rs +++ b/crates/fayalite-proc-macros/src/lib.rs @@ -2,7 +2,7 @@ // See Notices.txt for copyright information //! proc macros for `fayalite` //! -//! see `fayalite::hdl_module` and `fayalite::ty::Value` for docs +//! see `fayalite::hdl_module` and `fayalite::hdl` for docs // intentionally not documented here, see `fayalite::hdl_module` for docs #[proc_macro_attribute] @@ -10,16 +10,19 @@ pub fn hdl_module( attr: proc_macro::TokenStream, item: proc_macro::TokenStream, ) -> proc_macro::TokenStream { - match fayalite_proc_macros_impl::module(attr.into(), item.into()) { + match fayalite_proc_macros_impl::hdl_module(attr.into(), item.into()) { Ok(retval) => retval.into(), Err(err) => err.into_compile_error().into(), } } -// intentionally not documented here, see `fayalite::ty::Value` for docs -#[proc_macro_derive(Value, attributes(hdl))] -pub fn value_derive(item: proc_macro::TokenStream) -> proc_macro::TokenStream { - match fayalite_proc_macros_impl::value_derive(item.into()) { +// intentionally not documented here, see `fayalite::hdl` for docs +#[proc_macro_attribute] +pub fn hdl( + attr: proc_macro::TokenStream, + item: proc_macro::TokenStream, +) -> proc_macro::TokenStream { + match fayalite_proc_macros_impl::hdl_attr(attr.into(), item.into()) { Ok(retval) => retval.into(), Err(err) => err.into_compile_error().into(), } diff --git a/crates/fayalite-visit-gen/Cargo.toml b/crates/fayalite-visit-gen/Cargo.toml index 6da95d2..5a98947 100644 --- a/crates/fayalite-visit-gen/Cargo.toml +++ b/crates/fayalite-visit-gen/Cargo.toml @@ -13,11 +13,11 @@ rust-version.workspace = true version.workspace = true [dependencies] -indexmap = { workspace = true } -prettyplease = { workspace = true } -proc-macro2 = { workspace = true } -quote = { workspace = true } -serde = { workspace = true } -serde_json = { workspace = true } -syn = { workspace = true } -thiserror = { workspace = true } +indexmap.workspace = true +prettyplease.workspace = true +proc-macro2.workspace = true +quote.workspace = true +serde.workspace = true +serde_json.workspace = true +syn.workspace = true +thiserror.workspace = true diff --git a/crates/fayalite-visit-gen/src/lib.rs b/crates/fayalite-visit-gen/src/lib.rs index 008a4c6..81e4577 100644 --- a/crates/fayalite-visit-gen/src/lib.rs +++ b/crates/fayalite-visit-gen/src/lib.rs @@ -1,7 +1,7 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information use proc_macro2::{Span, TokenStream}; -use quote::{format_ident, quote, ToTokens}; +use quote::{ToTokens, format_ident, quote}; use std::{collections::BTreeMap, fs}; use syn::{fold::Fold, parse_quote}; diff --git a/crates/fayalite/Cargo.toml b/crates/fayalite/Cargo.toml index 555f7f5..fdf1c87 100644 --- a/crates/fayalite/Cargo.toml +++ b/crates/fayalite/Cargo.toml @@ -14,25 +14,36 @@ rust-version.workspace = true version.workspace = true [dependencies] -bitvec = { workspace = true } -hashbrown = { workspace = true } -num-bigint = { workspace = true } -num-traits = { workspace = true } -fayalite-proc-macros = { workspace = true } -serde = { workspace = true } -serde_json = { workspace = true } -clap = { version = "4.5.9", features = ["derive", "env"] } -eyre = "0.6.12" -which = "6.0.1" +base64.workspace = true +bitvec.workspace = true +blake3.workspace = true +clap.workspace = true +clap_complete.workspace = true +ctor.workspace = true +eyre.workspace = true +fayalite-proc-macros.workspace = true +hashbrown.workspace = true +jobslot.workspace = true +num-bigint.workspace = true +num-traits.workspace = true +ordered-float.workspace = true +petgraph.workspace = true +serde_json.workspace = true +serde.workspace = true +tempfile.workspace = true +vec_map.workspace = true +which.workspace = true [dev-dependencies] -trybuild = { workspace = true } +trybuild.workspace = true +serde = { workspace = true, features = ["rc"] } [build-dependencies] -fayalite-visit-gen = { workspace = true } +fayalite-visit-gen.workspace = true [features] unstable-doc = [] +unstable-test-hasher = [] [package.metadata.docs.rs] features = ["unstable-doc"] diff --git a/crates/fayalite/build.rs b/crates/fayalite/build.rs index 429e527..c6680d5 100644 --- a/crates/fayalite/build.rs +++ b/crates/fayalite/build.rs @@ -4,6 +4,10 @@ use fayalite_visit_gen::parse_and_generate; use std::{env, fs, path::Path}; fn main() { + println!("cargo::rustc-check-cfg=cfg(todo)"); + println!("cargo::rustc-check-cfg=cfg(cfg_false_for_tests)"); + println!("cargo::rustc-check-cfg=cfg(cfg_true_for_tests)"); + println!("cargo::rustc-cfg=cfg_true_for_tests"); let path = "visit_types.json"; println!("cargo::rerun-if-changed={path}"); println!("cargo::rerun-if-changed=build.rs"); diff --git a/crates/fayalite/examples/blinky.rs b/crates/fayalite/examples/blinky.rs index fcc0c5d..75799fd 100644 --- a/crates/fayalite/examples/blinky.rs +++ b/crates/fayalite/examples/blinky.rs @@ -1,51 +1,64 @@ -use clap::Parser; -use fayalite::{ - cli, - clock::{Clock, ClockDomain}, - hdl_module, - int::{DynUInt, DynUIntType, IntCmp, IntTypeTrait, UInt}, - reset::{SyncReset, ToReset}, -}; +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information +use fayalite::prelude::*; #[hdl_module] -fn blinky(clock_frequency: u64) { - #[hdl] - let clk: Clock = m.input(); - #[hdl] - let rst: SyncReset = m.input(); +fn blinky(platform_io_builder: PlatformIOBuilder<'_>) { + let clk_input = + platform_io_builder.peripherals_with_type::()[0].use_peripheral(); + let rst = platform_io_builder.peripherals_with_type::()[0].use_peripheral(); let cd = #[hdl] ClockDomain { - clk, - rst: rst.to_reset(), + clk: clk_input.clk, + rst, }; - let max_value = clock_frequency / 2 - 1; - let int_ty = DynUIntType::range_inclusive(0..=max_value); + let max_value = (Expr::ty(clk_input).frequency() / 2.0).round_ties_even() as u64 - 1; + let int_ty = UInt::range_inclusive(0..=max_value); #[hdl] - let counter: DynUInt = m.reg_builder().clock_domain(cd).reset(int_ty.literal(0)); + let counter_reg: UInt = reg_builder().clock_domain(cd).reset(0u8.cast_to(int_ty)); #[hdl] - let output_reg: UInt<1> = m.reg_builder().clock_domain(cd).reset_default(); + let output_reg: Bool = reg_builder().clock_domain(cd).reset(false); #[hdl] - if counter.cmp_eq(max_value) { - m.connect_any(counter, 0u8); - m.connect(output_reg, !output_reg); + let rgb_output_reg = reg_builder().clock_domain(cd).reset( + #[hdl] + peripherals::RgbLed { + r: false, + g: false, + b: false, + }, + ); + #[hdl] + if counter_reg.cmp_eq(max_value) { + connect_any(counter_reg, 0u8); + connect(output_reg, !output_reg); + connect(rgb_output_reg.r, !rgb_output_reg.r); + #[hdl] + if rgb_output_reg.r { + connect(rgb_output_reg.g, !rgb_output_reg.g); + #[hdl] + if rgb_output_reg.g { + connect(rgb_output_reg.b, !rgb_output_reg.b); + } + } } else { - m.connect_any(counter, counter + 1_hdl_u1); + connect_any(counter_reg, counter_reg + 1_hdl_u1); + } + for led in platform_io_builder.peripherals_with_type::() { + if let Ok(led) = led.try_use_peripheral() { + connect(led.on, output_reg); + } + } + for rgb_led in platform_io_builder.peripherals_with_type::() { + if let Ok(rgb_led) = rgb_led.try_use_peripheral() { + connect(rgb_led, rgb_output_reg); + } } #[hdl] - let led: UInt<1> = m.output(); - m.connect(led, output_reg); + let io = m.add_platform_io(platform_io_builder); } -#[derive(Parser)] -struct Cli { - /// clock frequency in hertz - #[arg(long, default_value = "1000000", value_parser = clap::value_parser!(u64).range(2..))] - clock_frequency: u64, - #[command(subcommand)] - cli: cli::Cli, -} - -fn main() -> cli::Result { - let cli = Cli::parse(); - cli.cli.run(blinky(cli.clock_frequency)) +fn main() { + ::main("blinky", |_, platform, _| { + Ok(JobParams::new(platform.wrap_main_module(blinky))) + }); } diff --git a/crates/fayalite/examples/tx_only_uart.rs b/crates/fayalite/examples/tx_only_uart.rs new file mode 100644 index 0000000..5c20b39 --- /dev/null +++ b/crates/fayalite/examples/tx_only_uart.rs @@ -0,0 +1,188 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information +use clap::builder::TypedValueParser; +use fayalite::{ + build::{ToArgs, WriteArgs}, + platform::PeripheralRef, + prelude::*, +}; +use ordered_float::NotNan; + +fn pick_clock<'a>( + platform_io_builder: &PlatformIOBuilder<'a>, +) -> PeripheralRef<'a, peripherals::ClockInput> { + let mut clks = platform_io_builder.peripherals_with_type::(); + clks.sort_by_key(|clk| { + // sort clocks by preference, smaller return values means higher preference + let mut frequency = clk.ty().frequency(); + let priority; + if frequency < 10e6 { + frequency = -frequency; // prefer bigger frequencies + priority = 1; + } else if frequency > 50e6 { + // prefer smaller frequencies + priority = 2; // least preferred + } else { + priority = 0; // most preferred + frequency = (frequency - 25e6).abs(); // prefer closer to 25MHz + } + (priority, NotNan::new(frequency).expect("should be valid")) + }); + clks[0] +} + +#[hdl_module] +fn tx_only_uart( + platform_io_builder: PlatformIOBuilder<'_>, + divisor: f64, + message: impl AsRef<[u8]>, +) { + let message = message.as_ref(); + let clk_input = pick_clock(&platform_io_builder).use_peripheral(); + let rst = platform_io_builder.peripherals_with_type::()[0].use_peripheral(); + let cd = #[hdl] + ClockDomain { + clk: clk_input.clk, + rst, + }; + let numerator = 1u128 << 16; + let denominator = (divisor * numerator as f64).round() as u128; + + #[hdl] + let remainder_reg: UInt<128> = reg_builder().clock_domain(cd).reset(0u128); + + #[hdl] + let sum: UInt<128> = wire(); + connect_any(sum, remainder_reg + numerator); + + #[hdl] + let tick_reg = reg_builder().clock_domain(cd).reset(false); + connect(tick_reg, false); + + #[hdl] + let next_remainder: UInt<128> = wire(); + connect(remainder_reg, next_remainder); + + #[hdl] + if sum.cmp_ge(denominator) { + connect_any(next_remainder, sum - denominator); + connect(tick_reg, true); + } else { + connect(next_remainder, sum); + } + + #[hdl] + let uart_state_reg = reg_builder().clock_domain(cd).reset(0_hdl_u4); + #[hdl] + let next_uart_state: UInt<4> = wire(); + + connect_any(next_uart_state, uart_state_reg + 1u8); + + #[hdl] + let message_mem: Array> = wire(Array[UInt::new_static()][message.len()]); + for (message, message_mem) in message.iter().zip(message_mem) { + connect(message_mem, *message); + } + #[hdl] + let addr_reg: UInt<32> = reg_builder().clock_domain(cd).reset(0u32); + #[hdl] + let next_addr: UInt<32> = wire(); + connect(next_addr, addr_reg); + + #[hdl] + let tx = reg_builder().clock_domain(cd).reset(true); + + #[hdl] + let tx_bits: Array = wire(); + + connect(tx_bits[0], false); // start bit + connect(tx_bits[9], true); // stop bit + + for i in 0..8 { + connect(tx_bits[i + 1], message_mem[addr_reg][i]); // data bits + } + + connect(tx, tx_bits[uart_state_reg]); + + #[hdl] + if uart_state_reg.cmp_eq(Expr::ty(tx_bits).len() - 1) { + connect(next_uart_state, 0_hdl_u4); + let next_addr_val = addr_reg + 1u8; + #[hdl] + if next_addr_val.cmp_lt(message.len()) { + connect_any(next_addr, next_addr_val); + } else { + connect(next_addr, 0u32); + } + } + + #[hdl] + if tick_reg { + connect(uart_state_reg, next_uart_state); + connect(addr_reg, next_addr); + } + + for uart in platform_io_builder.peripherals_with_type::() { + connect(uart.use_peripheral().tx, tx); + } + + #[hdl] + let io = m.add_platform_io(platform_io_builder); +} + +fn parse_baud_rate( + v: impl AsRef, +) -> Result, Box> { + let retval: NotNan = v + .as_ref() + .parse() + .map_err(|_| "invalid baud rate, must be a finite positive floating-point value")?; + if *retval > 0.0 && retval.is_finite() { + Ok(retval) + } else { + Err("baud rate must be finite and positive".into()) + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)] +pub struct ExtraArgs { + #[arg(long, value_parser = clap::builder::StringValueParser::new().try_map(parse_baud_rate), default_value = "115200")] + pub baud_rate: NotNan, + #[arg(long, default_value = "Hello World from Fayalite!!!\r\n", value_parser = clap::builder::NonEmptyStringValueParser::new())] + pub message: String, +} + +impl ToArgs for ExtraArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { baud_rate, message } = self; + args.write_display_arg(format_args!("--baud-rate={baud_rate}")); + args.write_long_option_eq("message", message); + } +} + +fn main() { + type Cli = BuildCli; + Cli::main( + "tx_only_uart", + |_, platform, ExtraArgs { baud_rate, message }| { + Ok(JobParams::new(platform.try_wrap_main_module(|io| { + let clk = pick_clock(&io).ty(); + let divisor = clk.frequency() / *baud_rate; + let baud_rate_error = |msg| { + ::command() + .error(clap::error::ErrorKind::ValueValidation, msg) + }; + const HUGE_DIVISOR: f64 = u64::MAX as f64; + match divisor { + divisor if !divisor.is_finite() => { + return Err(baud_rate_error("bad baud rate")); + } + HUGE_DIVISOR.. => return Err(baud_rate_error("baud rate is too small")), + 4.0.. => {} + _ => return Err(baud_rate_error("baud rate is too large")), + } + Ok(tx_only_uart(io, divisor, message)) + })?)) + }, + ); +} diff --git a/crates/fayalite/src/_docs.rs b/crates/fayalite/src/_docs.rs index f7020f9..5b1888b 100644 --- a/crates/fayalite/src/_docs.rs +++ b/crates/fayalite/src/_docs.rs @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information #![doc = include_str!("../README.md")] //! @@ -10,7 +12,7 @@ //! function to add inputs/outputs and other components to that module. //! //! ``` -//! # use fayalite::{hdl_module, int::UInt}; +//! # use fayalite::prelude::*; //! # //! #[hdl_module] //! pub fn example_module() { @@ -18,7 +20,7 @@ //! let an_input: UInt<10> = m.input(); // create an input that is a 10-bit unsigned integer //! #[hdl] //! let some_output: UInt<10> = m.output(); -//! m.connect(some_output, an_input); // assigns the value of `an_input` to `some_output` +//! connect(some_output, an_input); // assigns the value of `an_input` to `some_output` //! } //! ``` diff --git a/crates/fayalite/src/_docs/modules.rs b/crates/fayalite/src/_docs/modules.rs index c392f2e..99b98e8 100644 --- a/crates/fayalite/src/_docs/modules.rs +++ b/crates/fayalite/src/_docs/modules.rs @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information //! # Fayalite Modules //! //! The [`#[hdl_module]`][`crate::hdl_module`] attribute is applied to a Rust diff --git a/crates/fayalite/src/_docs/modules/extern_module.rs b/crates/fayalite/src/_docs/modules/extern_module.rs index 353cbe4..c1367d9 100644 --- a/crates/fayalite/src/_docs/modules/extern_module.rs +++ b/crates/fayalite/src/_docs/modules/extern_module.rs @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information //! These are for when you want to use modules written in //! some other language, such as Verilog. //! @@ -11,8 +13,6 @@ //! * [`parameter_raw_verilog()`][`ModuleBuilder::parameter_raw_verilog`] //! * [`parameter()`][`ModuleBuilder::parameter`] //! -//! These use the [`ExternModule`][`crate::module::ExternModule`] tag type. -//! //! [inputs/outputs]: crate::_docs::modules::module_bodies::hdl_let_statements::inputs_outputs #[allow(unused)] diff --git a/crates/fayalite/src/_docs/modules/module_bodies.rs b/crates/fayalite/src/_docs/modules/module_bodies.rs index bd85c61..c12ae21 100644 --- a/crates/fayalite/src/_docs/modules/module_bodies.rs +++ b/crates/fayalite/src/_docs/modules/module_bodies.rs @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information //! # Module Function Bodies //! //! The `#[hdl_module]` attribute lets you have statements/expressions with `#[hdl]` annotations diff --git a/crates/fayalite/src/_docs/modules/module_bodies/hdl_array_expressions.rs b/crates/fayalite/src/_docs/modules/module_bodies/hdl_array_expressions.rs index aabb791..c0b15ad 100644 --- a/crates/fayalite/src/_docs/modules/module_bodies/hdl_array_expressions.rs +++ b/crates/fayalite/src/_docs/modules/module_bodies/hdl_array_expressions.rs @@ -1,24 +1,26 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information //! # `#[hdl]` Array Expressions //! -//! `#[hdl]` can be used on Array Expressions to construct an [`Array<[T; N]>`][Array] expression: +//! `#[hdl]` can be used on Array Expressions to construct an [`Array<[T; N]>`][type@Array] expression: //! //! ``` -//! # use fayalite::{hdl_module, int::UInt, array::Array}; +//! # use fayalite::prelude::*; //! # #[hdl_module] //! # fn module() { //! #[hdl] //! let v: UInt<8> = m.input(); //! #[hdl] -//! let w: Array<[UInt<8>; 4]> = m.wire(); -//! m.connect( +//! let w: Array, 4> = wire(); +//! connect( //! w, //! #[hdl] -//! [4_hdl_u8, v, 3_hdl_u8, (v + 7_hdl_u8).cast()] // you can make an array like this +//! [4_hdl_u8, v, 3_hdl_u8, (v + 7_hdl_u8).cast_to_static()] // you can make an array like this //! ); -//! m.connect( +//! connect( //! w, //! #[hdl] -//! [(v + 1_hdl_u8).cast(); 4] // or you can make an array repeat like this +//! [(v + 1_hdl_u8).cast_to_static(); 4] // or you can make an array repeat like this //! ); //! # } //! ``` diff --git a/crates/fayalite/src/_docs/modules/module_bodies/hdl_if_statements.rs b/crates/fayalite/src/_docs/modules/module_bodies/hdl_if_statements.rs index ffc446c..7d09943 100644 --- a/crates/fayalite/src/_docs/modules/module_bodies/hdl_if_statements.rs +++ b/crates/fayalite/src/_docs/modules/module_bodies/hdl_if_statements.rs @@ -1,10 +1,11 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information //! # `#[hdl] if` Statements //! //! `#[hdl] if` statements behave similarly to Rust `if` statements, except they end up as muxes //! and stuff in the final hardware instead of being run when the fayalite module is being created. //! -//! The condition of an `#[hdl] if` statement must have type [`UInt<1>`] or [`DynUInt`] with -//! `width() == 1` or be an [expression][Expr] of one of those types. +//! The condition of an `#[hdl] if` statement must have type [`Expr`][Bool]. //! //! `#[hdl] if` statements' bodies must evaluate to type `()` for now. //! @@ -14,7 +15,4 @@ //! [match]: super::hdl_match_statements #[allow(unused)] -use crate::{ - expr::Expr, - int::{DynUInt, UInt}, -}; +use crate::int::Bool; diff --git a/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements.rs b/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements.rs index c4e3e70..229871b 100644 --- a/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements.rs +++ b/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements.rs @@ -1,5 +1,8 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information //! ## `#[hdl] let` statements +pub mod destructuring; pub mod inputs_outputs; pub mod instances; pub mod memories; diff --git a/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/destructuring.rs b/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/destructuring.rs new file mode 100644 index 0000000..1fc4705 --- /dev/null +++ b/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/destructuring.rs @@ -0,0 +1,33 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information +//! ### Destructuring Let +//! +//! You can use `#[hdl] let` to destructure types, similarly to Rust `let` statements with non-trivial patterns. +//! +//! `#[hdl] let` statements can only match one level of struct/tuple pattern for now, +//! e.g. you can match with the pattern `MyStruct { a, b }`, but not `MyStruct { a, b: Struct2 { v } }`. +//! +//! ``` +//! # use fayalite::prelude::*; +//! #[hdl] +//! struct MyStruct { +//! a: UInt<8>, +//! b: Bool, +//! } +//! +//! #[hdl_module] +//! fn my_module() { +//! #[hdl] +//! let my_input: MyStruct = m.input(); +//! #[hdl] +//! let my_output: UInt<8> = m.input(); +//! #[hdl] +//! let MyStruct { a, b } = my_input; +//! #[hdl] +//! if b { +//! connect(my_output, a); +//! } else { +//! connect(my_output, 0_hdl_u8); +//! } +//! } +//! ``` diff --git a/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/inputs_outputs.rs b/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/inputs_outputs.rs index 965d176..14169d9 100644 --- a/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/inputs_outputs.rs +++ b/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/inputs_outputs.rs @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information //! ### Inputs/Outputs //! //! Inputs/Outputs create a Rust variable with type [`Expr`] where `T` is the type of the input/output. @@ -6,14 +8,14 @@ //! so you should read it. //! //! ``` -//! # use fayalite::{hdl_module, int::UInt, expr::Expr, array::Array}; +//! # use fayalite::prelude::*; //! # #[hdl_module] //! # fn module() { //! #[hdl] //! let my_input: UInt<10> = m.input(); //! let _: Expr> = my_input; // my_input has type Expr> //! #[hdl] -//! let my_output: Array<[UInt<10>; 3]> = m.output(); +//! let my_output: Array, 3> = m.output(); //! # } //! ``` //! diff --git a/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/instances.rs b/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/instances.rs index ab82a14..75def03 100644 --- a/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/instances.rs +++ b/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/instances.rs @@ -1,18 +1,20 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information //! ### Module Instances //! //! module instances are kinda like the hardware equivalent of calling a function, //! you can create them like so: //! //! ``` -//! # use fayalite::{hdl_module, int::UInt, array::Array}; +//! # use fayalite::prelude::*; //! # #[hdl_module] //! # fn module() { //! #[hdl] -//! let my_instance = m.instance(some_module()); +//! let my_instance = instance(some_module()); //! // now you can use `my_instance`'s inputs/outputs like so: //! #[hdl] //! let v: UInt<3> = m.input(); -//! m.connect(my_instance.a, v); +//! connect(my_instance.a, v); //! #[hdl_module] //! fn some_module() { //! #[hdl] diff --git a/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/memories.rs b/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/memories.rs index 0f82e4f..ddd60b9 100644 --- a/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/memories.rs +++ b/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/memories.rs @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information //! # Memories //! //! Memories are optimized for storing large amounts of data. @@ -7,12 +9,12 @@ //! //! There are several different ways to create a memory: //! -//! ## using [`ModuleBuilder::memory()`] +//! ## using [`memory()`] //! //! This way you have to set the [`depth`][`MemBuilder::depth`] separately. //! //! ``` -//! # use fayalite::{hdl_module, int::UInt, clock::ClockDomain}; +//! # use fayalite::prelude::*; //! # #[hdl_module] //! # fn module() { //! // first, we need some IO @@ -25,45 +27,45 @@ //! //! // now create the memory //! #[hdl] -//! let mut my_memory = m.memory(); +//! let mut my_memory = memory(); //! my_memory.depth(256); // the memory has 256 elements //! //! let read_port = my_memory.new_read_port(); //! //! // connect up the read port -//! m.connect_any(read_port.addr, read_addr); -//! m.connect(read_port.en, 1_hdl_u1); -//! m.connect(read_port.clk, cd.clk); -//! m.connect(read_data, read_port.data); +//! connect_any(read_port.addr, read_addr); +//! connect(read_port.en, true); +//! connect(read_port.clk, cd.clk); +//! connect(read_data, read_port.data); //! //! // we need more IO for the write port //! #[hdl] //! let write_addr: UInt<8> = m.input(); //! #[hdl] -//! let do_write: UInt<1> = m.input(); +//! let do_write: Bool = m.input(); //! #[hdl] //! let write_data: UInt<8> = m.input(); //! //! let write_port = my_memory.new_write_port(); //! -//! m.connect_any(write_port.addr, write_addr); -//! m.connect(write_port.en, do_write); -//! m.connect(write_port.clk, cd.clk); -//! m.connect(write_port.data, write_port.data); -//! m.connect(write_port.mask, 1_hdl_u1); +//! connect_any(write_port.addr, write_addr); +//! connect(write_port.en, do_write); +//! connect(write_port.clk, cd.clk); +//! connect(write_port.data, write_port.data); +//! connect(write_port.mask, true); //! # } //! ``` //! -//! ## using [`ModuleBuilder::memory_array()`] +//! ## using [`memory_array()`] //! //! this allows you to specify the memory's underlying array type directly. //! //! ``` -//! # use fayalite::{hdl_module, int::UInt, memory::MemBuilder}; +//! # use fayalite::prelude::*; //! # #[hdl_module] //! # fn module() { //! #[hdl] -//! let mut my_memory: MemBuilder<[UInt<8>; 256]> = m.memory_array(); +//! let mut my_memory: MemBuilder, ConstUsize<256>> = memory_array(); //! //! let read_port = my_memory.new_read_port(); //! // ... @@ -72,25 +74,22 @@ //! # } //! ``` //! -//! ## using [`ModuleBuilder::memory_with_init()`] +//! ## using [`memory_with_init()`] //! //! This allows you to deduce the memory's array type from the data used to initialize the memory. //! //! ``` -//! # use fayalite::{hdl_module, int::UInt}; +//! # use fayalite::prelude::*; //! # #[hdl_module] //! # fn module() { //! # #[hdl] //! # let read_addr: UInt<2> = m.input(); //! #[hdl] -//! let mut my_memory = m.memory_with_init( -//! #[hdl] -//! [0x12_hdl_u8, 0x34_hdl_u8, 0x56_hdl_u8, 0x78_hdl_u8], -//! ); +//! let mut my_memory = memory_with_init([0x12_hdl_u8, 0x34_hdl_u8, 0x56_hdl_u8, 0x78_hdl_u8]); //! //! let read_port = my_memory.new_read_port(); //! // note that `read_addr` is `UInt<2>` since the memory only has 4 elements -//! m.connect_any(read_port.addr, read_addr); +//! connect_any(read_port.addr, read_addr); //! // ... //! let write_port = my_memory.new_write_port(); //! // ... @@ -98,4 +97,4 @@ //! ``` #[allow(unused)] -use crate::{memory::MemBuilder, module::ModuleBuilder}; +use crate::prelude::*; diff --git a/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/registers.rs b/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/registers.rs index 68dfaed..2876389 100644 --- a/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/registers.rs +++ b/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/registers.rs @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information //! ### Registers //! //! Registers are memory devices that will change their state only on a clock @@ -7,20 +9,23 @@ //! //! Registers follow [connection semantics], which are unlike assignments in software, so you should read it. //! +//! By convention, register names end in `_reg` -- this helps you tell which values are written +//! immediately or on the next clock edge when connecting to them. +//! //! ``` -//! # use fayalite::{hdl_module, int::UInt, array::Array, clock::ClockDomain}; +//! # use fayalite::prelude::*; //! # #[hdl_module] //! # fn module() { //! # #[hdl] -//! # let v: UInt<1> = m.input(); +//! # let v: Bool = m.input(); //! #[hdl] //! let cd: ClockDomain = m.input(); //! #[hdl] -//! let my_register: UInt<8> = m.reg_builder().clock_domain(cd).reset(8_hdl_u8); +//! let my_reg: UInt<8> = reg_builder().clock_domain(cd).reset(8_hdl_u8); //! #[hdl] //! if v { -//! // my_register is only changed when both `v` is set and `cd`'s clock edge occurs. -//! m.connect(my_register, 0x45_hdl_u8); +//! // my_reg is only changed when both `v` is set and `cd`'s clock edge occurs. +//! connect(my_reg, 0x45_hdl_u8); //! } //! # } //! ``` diff --git a/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/wires.rs b/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/wires.rs index 532ea21..7d92b41 100644 --- a/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/wires.rs +++ b/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/wires.rs @@ -1,27 +1,29 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information //! ### Wires //! //! Wires are kinda like variables, but unlike registers, //! they have no memory (they're combinatorial). -//! You must [connect][`ModuleBuilder::connect`] to all wires, so they have a defined value. +//! You must [connect] to all wires, so they have a defined value. //! //! Wires create a Rust variable with type [`Expr`] where `T` is the type of the wire. //! //! Wires follow [connection semantics], which are unlike assignments in software, so you should read it. //! //! ``` -//! # use fayalite::{hdl_module, int::UInt, array::Array, clock::ClockDomain}; +//! # use fayalite::prelude::*; //! # #[hdl_module] //! # fn module() { //! # #[hdl] -//! # let v: UInt<1> = m.input(); +//! # let v: Bool = m.input(); //! #[hdl] -//! let my_wire: UInt<8> = m.wire(); +//! let my_wire: UInt<8> = wire(); //! #[hdl] //! if v { -//! m.connect(my_wire, 0x45_hdl_u8); +//! connect(my_wire, 0x45_hdl_u8); //! } else { //! // wires must be connected to under all conditions -//! m.connect(my_wire, 0x23_hdl_u8); +//! connect(my_wire, 0x23_hdl_u8); //! } //! # } //! ``` @@ -29,4 +31,4 @@ //! [connection semantics]: crate::_docs::semantics::connection_semantics #[allow(unused)] -use crate::{expr::Expr, module::ModuleBuilder}; +use crate::prelude::*; diff --git a/crates/fayalite/src/_docs/modules/module_bodies/hdl_literals.rs b/crates/fayalite/src/_docs/modules/module_bodies/hdl_literals.rs index d6b1fe4..91710e7 100644 --- a/crates/fayalite/src/_docs/modules/module_bodies/hdl_literals.rs +++ b/crates/fayalite/src/_docs/modules/module_bodies/hdl_literals.rs @@ -1,9 +1,10 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information //! # `_hdl`-suffixed literals //! //! You can have integer literals with an arbitrary number of bits like so: //! -//! `_hdl`-suffixed literals have type [`Expr>`] or [`Expr>`] -//! ... which are basically just [`UInt`] or [`SInt`] converted to an expression. +//! `_hdl`-suffixed literals have type [`Expr>`] or [`Expr>`]. //! //! ``` //! # #[fayalite::hdl_module] diff --git a/crates/fayalite/src/_docs/modules/module_bodies/hdl_match_statements.rs b/crates/fayalite/src/_docs/modules/module_bodies/hdl_match_statements.rs index 4f4116e..6df70f1 100644 --- a/crates/fayalite/src/_docs/modules/module_bodies/hdl_match_statements.rs +++ b/crates/fayalite/src/_docs/modules/module_bodies/hdl_match_statements.rs @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information //! # `#[hdl] match` Statements //! //! `#[hdl] match` statements behave similarly to Rust `match` statements, except they end up as muxes @@ -5,5 +7,5 @@ //! //! `#[hdl] match` statements' bodies must evaluate to type `()` for now. //! -//! `#[hdl] match` statements can only match one level of struct/enum pattern for now, -//! e.g. you can match with the pattern `Some(v)`, but not `Some(Some(_))`. +//! `#[hdl] match` statements can only match one level of struct/tuple/enum pattern for now, +//! e.g. you can match with the pattern `HdlSome(v)`, but not `HdlSome(HdlSome(_))`. diff --git a/crates/fayalite/src/_docs/modules/module_bodies/hdl_struct_variant_expressions.rs b/crates/fayalite/src/_docs/modules/module_bodies/hdl_struct_variant_expressions.rs index 314e283..68cd685 100644 --- a/crates/fayalite/src/_docs/modules/module_bodies/hdl_struct_variant_expressions.rs +++ b/crates/fayalite/src/_docs/modules/module_bodies/hdl_struct_variant_expressions.rs @@ -1,28 +1,27 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information //! # `#[hdl]` Struct/Variant Expressions //! //! Note: Structs are also known as [Bundles] when used in Fayalite, the Bundle name comes from [FIRRTL]. //! -//! [Bundles]: crate::bundle::BundleValue +//! [Bundles]: crate::bundle::BundleType //! [FIRRTL]: https://github.com/chipsalliance/firrtl-spec //! -//! `#[hdl]` can be used on Struct/Variant Expressions to construct a value of that -//! struct/variant's type. They can also be used on tuples. +//! `#[hdl]` can be used on Struct Expressions to construct a value of that +//! struct's type. They can also be used on tuples. //! //! ``` -//! # use fayalite::{hdl_module, int::UInt, array::Array, ty::Value}; -//! #[derive(Value, Clone, PartialEq, Eq, Hash, Debug)] -//! #[hdl(static)] +//! # use fayalite::prelude::*; +//! #[hdl] //! pub struct MyStruct { //! pub a: UInt<8>, //! pub b: UInt<16>, //! } //! -//! #[derive(Value, Clone, PartialEq, Eq, Hash, Debug)] +//! #[hdl] //! pub enum MyEnum { //! A, -//! B { -//! v: UInt<32>, -//! }, +//! B(UInt<32>), //! } //! //! # #[hdl_module] @@ -30,8 +29,8 @@ //! #[hdl] //! let v: UInt<8> = m.input(); //! #[hdl] -//! let my_struct: MyStruct = m.wire(); -//! m.connect( +//! let my_struct: MyStruct = wire(); +//! connect( //! my_struct, //! #[hdl] //! MyStruct { @@ -40,15 +39,14 @@ //! }, //! ); //! #[hdl] -//! let my_enum: MyEnum = m.wire(); -//! m.connect( +//! let my_enum: MyEnum = wire(); +//! connect( //! my_enum, -//! #[hdl] -//! MyEnum::B { v: 12345678_hdl_u32 }, +//! MyEnum.B(12345678_hdl_u32), //! ); //! #[hdl] -//! let some_tuple: (UInt<4>, UInt<12>) = m.wire(); -//! m.connect( +//! let some_tuple: (UInt<4>, UInt<12>) = wire(); +//! connect( //! some_tuple, //! #[hdl] //! (12_hdl_u4, 3421_hdl_u12), @@ -57,4 +55,4 @@ //! ``` #[allow(unused)] -use crate::array::Array; +use crate::prelude::*; diff --git a/crates/fayalite/src/_docs/modules/normal_module.rs b/crates/fayalite/src/_docs/modules/normal_module.rs index 7fb6c09..1267551 100644 --- a/crates/fayalite/src/_docs/modules/normal_module.rs +++ b/crates/fayalite/src/_docs/modules/normal_module.rs @@ -1,6 +1,6 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information //! # Normal Modules //! -//! These use the [`NormalModule`][`crate::module::NormalModule`] tag type. -//! //! See also: [Extern Modules][`super::extern_module`] //! See also: [Module Bodies][`super::module_bodies`] diff --git a/crates/fayalite/src/_docs/semantics.rs b/crates/fayalite/src/_docs/semantics.rs index a499e8e..2282f25 100644 --- a/crates/fayalite/src/_docs/semantics.rs +++ b/crates/fayalite/src/_docs/semantics.rs @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information //! # Fayalite Semantics //! //! Fayalite's semantics are based on [FIRRTL]. Due to their significance, some of the semantics are also documented here. diff --git a/crates/fayalite/src/_docs/semantics/connection_semantics.rs b/crates/fayalite/src/_docs/semantics/connection_semantics.rs index 7c77ff3..ba2a679 100644 --- a/crates/fayalite/src/_docs/semantics/connection_semantics.rs +++ b/crates/fayalite/src/_docs/semantics/connection_semantics.rs @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information //! # Connection Semantics //! //! Fayalite's connection semantics are unlike assignments in software, so be careful! @@ -20,62 +22,60 @@ //! Connection Semantics Example: //! //! ``` -//! # use fayalite::{hdl_module, int::UInt}; +//! # use fayalite::prelude::*; //! # #[hdl_module] //! # fn module() { //! #[hdl] -//! let a: UInt<8> = m.wire(); +//! let a: UInt<8> = wire(); //! #[hdl] //! let b: UInt<8> = m.output(); //! //! // doesn't actually affect anything, since `a` is completely overwritten later -//! m.connect(a, 5_hdl_u8); +//! connect(a, 5_hdl_u8); //! //! // here `a` has value `7` since the last connection assigns //! // `7` to `a`, so `b` has value `7` too. -//! m.connect(b, a); +//! connect(b, a); //! //! // this is the last `connect` to `a`, so this `connect` determines `a`'s value -//! m.connect(a, 7_hdl_u8); +//! connect(a, 7_hdl_u8); //! # } //! ``` //! //! # Conditional Connection Semantics //! //! ``` -//! # use fayalite::{hdl_module, int::UInt}; +//! # use fayalite::prelude::*; //! # #[hdl_module] //! # fn module() { //! #[hdl] -//! let cond: UInt<1> = m.input(); +//! let cond: Bool = m.input(); //! #[hdl] -//! let a: UInt<8> = m.wire(); +//! let a: UInt<8> = wire(); //! #[hdl] //! let b: UInt<8> = m.output(); //! //! // this is the last `connect` to `a` when `cond` is `0` -//! m.connect(a, 5_hdl_u8); +//! connect(a, 5_hdl_u8); //! //! // here `a` has value `7` if `cond` is `1` since the last connection assigns //! // `7` to `a`, so `b` has value `7` too, otherwise `a` (and therefore `b`) //! // have value `5` since then the connection assigning `7` is in a //! // conditional block where the condition doesn't hold. -//! m.connect(b, a); +//! connect(b, a); //! //! #[hdl] //! if cond { //! // this is the last `connect` to `a` when `cond` is `1` -//! m.connect(a, 7_hdl_u8); +//! connect(a, 7_hdl_u8); //! } //! # } //! ``` //! //! [conditional block]: self#conditional-connection-semantics -//! [`connect()`]: ModuleBuilder::connect -//! [`connect_any()`]: ModuleBuilder::connect_any //! [wire]: crate::_docs::modules::module_bodies::hdl_let_statements::wires //! [if]: crate::_docs::modules::module_bodies::hdl_if_statements //! [FIRRTL]: https://github.com/chipsalliance/firrtl-spec #[allow(unused)] -use crate::module::ModuleBuilder; +use crate::prelude::*; diff --git a/crates/fayalite/src/annotations.rs b/crates/fayalite/src/annotations.rs index 5e2ba95..4ca84dd 100644 --- a/crates/fayalite/src/annotations.rs +++ b/crates/fayalite/src/annotations.rs @@ -1,17 +1,18 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information use crate::{ - expr::Target, + expr::target::Target, intern::{Intern, Interned}, }; use serde::{Deserialize, Serialize}; use std::{ fmt, hash::{Hash, Hasher}, + iter::FusedIterator, ops::Deref, }; -#[derive(Clone)] +#[derive(Clone, Debug)] struct CustomFirrtlAnnotationFieldsImpl { value: serde_json::Map, serialized: Interned, @@ -118,11 +119,109 @@ pub struct CustomFirrtlAnnotation { pub additional_fields: CustomFirrtlAnnotationFields, } -#[derive(Clone, PartialEq, Eq, Hash, Debug)] -#[non_exhaustive] -pub enum Annotation { - DontTouch, - CustomFirrtl(CustomFirrtlAnnotation), +#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)] +pub struct DontTouchAnnotation; + +#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)] +pub struct SVAttributeAnnotation { + pub text: Interned, +} + +#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)] +pub struct BlackBoxInlineAnnotation { + pub path: Interned, + pub text: Interned, +} + +#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)] +pub struct BlackBoxPathAnnotation { + pub path: Interned, +} + +#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)] +pub struct DocStringAnnotation { + pub text: Interned, +} + +macro_rules! make_annotation_enum { + ( + #[$non_exhaustive:ident] + $(#[$meta:meta])* + $vis:vis enum $AnnotationEnum:ident { + $($Variant:ident($T:ty),)* + } + ) => { + crate::annotations::make_annotation_enum!(@require_non_exhaustive $non_exhaustive); + + #[$non_exhaustive] + $(#[$meta])* + #[derive(Clone, PartialEq, Eq, Hash)] + $vis enum $AnnotationEnum { + $($Variant($T),)* + } + + impl std::fmt::Debug for $AnnotationEnum { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + $(Self::$Variant(v) => v.fmt(f),)* + } + } + } + + $(impl From<$T> for crate::annotations::Annotation { + fn from(v: $T) -> Self { + $AnnotationEnum::$Variant(v).into() + } + } + + impl crate::annotations::IntoAnnotations for $T { + type IntoAnnotations = [crate::annotations::Annotation; 1]; + + fn into_annotations(self) -> Self::IntoAnnotations { + [self.into()] + } + } + + impl crate::annotations::IntoAnnotations for &'_ $T { + type IntoAnnotations = [crate::annotations::Annotation; 1]; + + fn into_annotations(self) -> Self::IntoAnnotations { + [crate::annotations::Annotation::from(self.clone())] + } + } + + impl crate::annotations::IntoAnnotations for &'_ mut $T { + type IntoAnnotations = [crate::annotations::Annotation; 1]; + + fn into_annotations(self) -> Self::IntoAnnotations { + [crate::annotations::Annotation::from(self.clone())] + } + } + + impl crate::annotations::IntoAnnotations for Box<$T> { + type IntoAnnotations = [crate::annotations::Annotation; 1]; + + fn into_annotations(self) -> Self::IntoAnnotations { + [crate::annotations::Annotation::from(*self)] + } + })* + }; + (@require_non_exhaustive non_exhaustive) => {}; +} + +pub(crate) use make_annotation_enum; + +make_annotation_enum! { + #[non_exhaustive] + pub enum Annotation { + DontTouch(DontTouchAnnotation), + SVAttribute(SVAttributeAnnotation), + BlackBoxInline(BlackBoxInlineAnnotation), + BlackBoxPath(BlackBoxPathAnnotation), + DocString(DocStringAnnotation), + CustomFirrtl(CustomFirrtlAnnotation), + Xilinx(crate::vendor::xilinx::XilinxAnnotation), + } } #[derive(Clone, PartialEq, Eq, Hash, Debug)] @@ -187,10 +286,68 @@ impl IntoAnnotations for &'_ mut Annotation { } } -impl> IntoAnnotations for T { - type IntoAnnotations = Self; +pub struct IterIntoAnnotations> { + outer: T, + inner: Option<<::IntoAnnotations as IntoIterator>::IntoIter>, +} - fn into_annotations(self) -> Self::IntoAnnotations { - self +impl> Iterator for IterIntoAnnotations { + type Item = Annotation; + + fn next(&mut self) -> Option { + loop { + if let Some(inner) = &mut self.inner { + let Some(retval) = inner.next() else { + self.inner = None; + continue; + }; + return Some(retval); + } else { + self.inner = Some(self.outer.next()?.into_annotations().into_iter()); + } + } + } + + fn size_hint(&self) -> (usize, Option) { + if let (0, Some(0)) = self.outer.size_hint() { + self.inner + .as_ref() + .map(|v| v.size_hint()) + .unwrap_or((0, Some(0))) + } else { + ( + self.inner.as_ref().map(|v| v.size_hint().0).unwrap_or(0), + None, + ) + } + } + + fn fold(self, init: B, f: F) -> B + where + Self: Sized, + F: FnMut(B, Self::Item) -> B, + { + self.inner + .into_iter() + .chain(self.outer.map(|v| v.into_annotations().into_iter())) + .flatten() + .fold(init, f) + } +} + +impl< + T: FusedIterator>>, +> FusedIterator for IterIntoAnnotations +{ +} + +impl> IntoAnnotations for T { + type IntoAnnotations = IterIntoAnnotations; + + fn into_annotations(self) -> Self::IntoAnnotations { + IterIntoAnnotations { + outer: self.into_iter(), + inner: None, + } } } diff --git a/crates/fayalite/src/array.rs b/crates/fayalite/src/array.rs index 642914b..569f2e2 100644 --- a/crates/fayalite/src/array.rs +++ b/crates/fayalite/src/array.rs @@ -1,681 +1,467 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information + use crate::{ - bundle::{BundleType, BundleValue}, expr::{ - ops::{ArrayIndex, ArrayLiteral, ExprIndex}, - Expr, ToExpr, - }, - intern::{Intern, Interned, InternedCompare, Memoize}, - module::{ - transform::visit::{Fold, Folder, Visit, Visitor}, - ModuleBuilder, NormalModule, + CastToBits, Expr, HdlPartialEq, ReduceBits, ToExpr, + ops::{ArrayLiteral, ExprFromIterator, ExprIntoIterator, ExprPartialEq}, }, + int::{Bool, DYN_SIZE, DynSize, KnownSize, Size, SizeType}, + intern::{Intern, Interned, LazyInterned}, + module::transform::visit::{Fold, Folder, Visit, Visitor}, + sim::value::{SimValue, SimValuePartialEq}, source_location::SourceLocation, ty::{ - CanonicalType, CanonicalTypeKind, CanonicalValue, Connect, DynCanonicalType, - DynCanonicalValue, DynType, DynValueTrait, MatchVariantWithoutScope, StaticType, - StaticValue, Type, TypeEnum, Value, ValueEnum, + CanonicalType, MatchVariantWithoutScope, OpaqueSimValueSlice, OpaqueSimValueWriter, + OpaqueSimValueWritten, StaticType, Type, TypeProperties, TypeWithDeref, + serde_impls::SerdeCanonicalType, }, - type_deduction::{HitUndeducedType, UndeducedType}, - util::{ConstBool, GenericConstBool, MakeMutSlice}, -}; -use bitvec::{slice::BitSlice, vec::BitVec}; -use std::{ - any::Any, - borrow::{Borrow, BorrowMut}, - fmt, - hash::Hash, - marker::PhantomData, - ops::IndexMut, - sync::Arc, + util::ConstUsize, }; +use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error}; +use std::{iter::FusedIterator, ops::Index}; -mod sealed { - pub trait Sealed {} +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct ArrayType { + element: LazyInterned, + len: Len::SizeType, + type_properties: TypeProperties, } -pub trait ValueArrayOrSlice: - sealed::Sealed - + BorrowMut<[::Element]> - + AsRef<[::Element]> - + AsMut<[::Element]> - + Hash - + fmt::Debug - + Eq - + Send - + Sync - + 'static - + IndexMut::Element> - + ToOwned - + InternedCompare -{ - type Element: Value::ElementType>; - type ElementType: Type::Element>; - type LenType: 'static + Copy + Ord + fmt::Debug + Hash + Send + Sync; - type Match: 'static - + Clone - + Eq - + fmt::Debug - + Hash - + Send - + Sync - + BorrowMut<[Expr]>; - type MaskVA: ValueArrayOrSlice< - Element = ::MaskValue, - ElementType = ::MaskType, - LenType = Self::LenType, - MaskVA = Self::MaskVA, - > + ?Sized; - type IsStaticLen: GenericConstBool; - const FIXED_LEN_TYPE: Option; - fn make_match(array: Expr>) -> Self::Match; - fn len_from_len_type(v: Self::LenType) -> usize; - #[allow(clippy::result_unit_err)] - fn try_len_type_from_len(v: usize) -> Result; - fn len_type(&self) -> Self::LenType; - fn len(&self) -> usize; - fn is_empty(&self) -> bool; - fn iter(&self) -> std::slice::Iter { - Borrow::<[_]>::borrow(self).iter() - } - fn clone_to_arc(&self) -> Arc; - fn arc_make_mut(v: &mut Arc) -> &mut Self; - fn arc_to_arc_slice(self: Arc) -> Arc<[Self::Element]>; -} - -impl sealed::Sealed for [T] {} - -impl ValueArrayOrSlice for [V] -where - V::Type: Type, -{ - type Element = V; - type ElementType = V::Type; - type LenType = usize; - type Match = Box<[Expr]>; - type MaskVA = [::MaskValue]; - type IsStaticLen = ConstBool; - const FIXED_LEN_TYPE: Option = None; - - fn make_match(array: Expr>) -> Self::Match { - (0..array.canonical_type().len()) - .map(|index| ArrayIndex::::new_unchecked(array.canonical(), index).to_expr()) - .collect() - } - - fn len_from_len_type(v: Self::LenType) -> usize { - v - } - - fn try_len_type_from_len(v: usize) -> Result { - Ok(v) - } - - fn len_type(&self) -> Self::LenType { - self.len() - } - - fn len(&self) -> usize { - <[_]>::len(self) - } - - fn is_empty(&self) -> bool { - <[_]>::is_empty(self) - } - - fn clone_to_arc(&self) -> Arc { - Arc::from(self) - } - - fn arc_make_mut(v: &mut Arc) -> &mut Self { - MakeMutSlice::make_mut_slice(v) - } - - fn arc_to_arc_slice(self: Arc) -> Arc<[Self::Element]> { - self +impl std::fmt::Debug for ArrayType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Array<{:?}, {}>", self.element, self.len()) } } -impl sealed::Sealed for [T; N] {} +pub type Array = ArrayType>; -impl ValueArrayOrSlice for [V; N] -where - V::Type: Type, -{ - type Element = V; - type ElementType = V::Type; - type LenType = (); - type Match = [Expr; N]; - type MaskVA = [::MaskValue; N]; - type IsStaticLen = ConstBool; - const FIXED_LEN_TYPE: Option = Some(()); +#[allow(non_upper_case_globals)] +pub const Array: ArrayWithoutGenerics = ArrayWithoutGenerics; +#[allow(non_upper_case_globals)] +pub const ArrayType: ArrayWithoutGenerics = ArrayWithoutGenerics; - fn make_match(array: Expr>) -> Self::Match { - std::array::from_fn(|index| { - ArrayIndex::::new_unchecked(array.canonical(), index).to_expr() - }) - } - - fn len_from_len_type(_v: Self::LenType) -> usize { - N - } - - fn try_len_type_from_len(v: usize) -> Result { - if v == N { - Ok(()) - } else { - Err(()) +impl ArrayType { + const fn make_type_properties(element: TypeProperties, len: usize) -> TypeProperties { + let TypeProperties { + is_passive, + is_storable, + is_castable_from_bits, + bit_width, + sim_only_values_len, + } = element; + let Some(bit_width) = bit_width.checked_mul(len) else { + panic!("array too big"); + }; + let Some(sim_only_values_len) = sim_only_values_len.checked_mul(len) else { + panic!("array too big"); + }; + TypeProperties { + is_passive, + is_storable, + is_castable_from_bits, + bit_width, + sim_only_values_len, } } - - fn len_type(&self) -> Self::LenType {} - - fn len(&self) -> usize { - N - } - - fn is_empty(&self) -> bool { - N == 0 - } - - fn clone_to_arc(&self) -> Arc { - Arc::new(self.clone()) - } - - fn arc_make_mut(v: &mut Arc) -> &mut Self { - Arc::make_mut(v) - } - - fn arc_to_arc_slice(self: Arc) -> Arc<[Self::Element]> { - self - } -} - -#[derive(Debug, PartialEq, Eq, Hash)] -pub struct ArrayType { - element: VA::ElementType, - len: VA::LenType, - bit_width: Result, -} - -pub trait ArrayTypeTrait: - Type< - CanonicalType = ArrayType<[DynCanonicalValue]>, - Value = Array<::ValueArrayOrSlice>, - CanonicalValue = Array<[DynCanonicalValue]>, - MaskType = ArrayType< - <::ValueArrayOrSlice as ValueArrayOrSlice>::MaskVA, - >, - > + From::ValueArrayOrSlice>> - + Into::ValueArrayOrSlice>> - + BorrowMut::ValueArrayOrSlice>> - + sealed::Sealed -{ - type ValueArrayOrSlice: ValueArrayOrSlice - + ?Sized; - type Element: Value; - type ElementType: Type; -} - -impl sealed::Sealed for ArrayType {} - -impl Connect for ArrayType {} - -impl ArrayTypeTrait for ArrayType { - type ValueArrayOrSlice = VA; - type Element = VA::Element; - type ElementType = VA::ElementType; -} - -impl Clone for ArrayType { - fn clone(&self) -> Self { + pub fn new(element: T, len: Len::SizeType) -> Self { + let type_properties = + Self::make_type_properties(element.canonical().type_properties(), Len::as_usize(len)); Self { - element: self.element.clone(), - len: self.len, - bit_width: self.bit_width, + element: LazyInterned::Interned(element.intern_sized()), + len, + type_properties, } } -} - -impl Copy for ArrayType where VA::ElementType: Copy {} - -impl ArrayType { - pub fn element(&self) -> &VA::ElementType { - &self.element + pub fn element(&self) -> T { + *self.element } - pub fn len(&self) -> usize { - VA::len_from_len_type(self.len) + pub fn len(self) -> usize { + Len::as_usize(self.len) } - pub fn is_empty(&self) -> bool { + pub fn is_empty(self) -> bool { self.len() == 0 } - pub fn bit_width(&self) -> Result { - self.bit_width + pub fn type_properties(self) -> TypeProperties { + self.type_properties } - pub fn into_slice_type(self) -> ArrayType<[VA::Element]> { - ArrayType { - len: self.len(), - element: self.element, - bit_width: self.bit_width, - } + pub fn as_dyn_array(self) -> Array { + Array::new_dyn(self.element().canonical(), self.len()) } - #[track_caller] - pub fn new_with_len(element: VA::ElementType, len: usize) -> Self { - Self::new_with_len_type( - element, - VA::try_len_type_from_len(len).expect("length should match"), - ) - } - #[track_caller] - fn get_bit_width( - element: &VA::ElementType, - len: VA::LenType, - ) -> Result { - let Some(bit_width) = VA::len_from_len_type(len).checked_mul(element.bit_width()?) else { - panic!("array is too big: bit-width overflowed"); - }; - Ok(bit_width) - } - #[track_caller] - pub fn new_with_len_type(element: VA::ElementType, len: VA::LenType) -> Self { - let bit_width = Self::get_bit_width(&element, len); - ArrayType { - element, - len, - bit_width, - } + pub fn can_connect(self, rhs: ArrayType) -> bool { + self.len() == rhs.len() + && self + .element() + .canonical() + .can_connect(rhs.element().canonical()) } } -impl Fold for ArrayType -where - VA::ElementType: Fold, -{ +impl> ArrayType { + pub fn new_static(element: T) -> Self { + Self::new(element, Len::SizeType::default()) + } +} + +impl Default for ArrayType { + fn default() -> Self { + Self::TYPE + } +} + +impl StaticType for ArrayType { + const TYPE: Self = Self { + element: LazyInterned::new_lazy(&|| T::TYPE.intern_sized()), + len: Len::SIZE, + type_properties: Self::TYPE_PROPERTIES, + }; + const MASK_TYPE: Self::MaskType = ArrayType:: { + element: LazyInterned::new_lazy(&|| T::MASK_TYPE.intern_sized()), + len: Len::SIZE, + type_properties: Self::MASK_TYPE_PROPERTIES, + }; + const TYPE_PROPERTIES: TypeProperties = + Self::make_type_properties(T::TYPE_PROPERTIES, Len::VALUE); + const MASK_TYPE_PROPERTIES: TypeProperties = + Self::make_type_properties(T::MASK_TYPE_PROPERTIES, Len::VALUE); +} + +impl Array { + pub fn new_dyn(element: T, len: usize) -> Self { + Self::new(element, len) + } +} + +impl, Len: Size, State: Folder + ?Sized> Fold for ArrayType { fn fold(self, state: &mut State) -> Result { state.fold_array_type(self) } - fn default_fold(self, state: &mut State) -> Result { - Ok(Self::new_with_len_type(self.element.fold(state)?, self.len)) + + fn default_fold(self, state: &mut State) -> Result::Error> { + Ok(ArrayType::new(self.element().fold(state)?, self.len)) } } -impl Visit for ArrayType -where - VA::ElementType: Visit, +impl, Len: Size, State: Visitor + ?Sized> Visit + for ArrayType { fn visit(&self, state: &mut State) -> Result<(), State::Error> { state.visit_array_type(self) } + fn default_visit(&self, state: &mut State) -> Result<(), State::Error> { - self.element.visit(state) + self.element().visit(state) } } -impl>, const N: usize> ArrayType<[V; N]> { - pub fn new_array(element: V::Type) -> Self { - ArrayType::new_with_len_type(element, ()) - } -} - -impl StaticType for ArrayType<[V; N]> { - fn static_type() -> Self { - Self::new_array(StaticType::static_type()) - } -} - -impl>> ArrayType<[V]> { - pub fn new_slice(element: V::Type, len: usize) -> Self { - ArrayType::new_with_len_type(element, len) - } -} - -impl Type for ArrayType { - type CanonicalType = ArrayType<[DynCanonicalValue]>; - type Value = Array; - type CanonicalValue = Array<[DynCanonicalValue]>; - type MaskType = ArrayType; - type MaskValue = Array; - type MatchVariant = VA::Match; +impl Type for ArrayType { + type BaseType = Array; + type MaskType = ArrayType; + type SimValue = Len::ArraySimValue; + type MatchVariant = Len::ArrayMatch; type MatchActiveScope = (); - type MatchVariantAndInactiveScope = MatchVariantWithoutScope; + type MatchVariantAndInactiveScope = MatchVariantWithoutScope>; type MatchVariantsIter = std::iter::Once; - fn match_variants( - this: Expr, - module_builder: &mut ModuleBuilder, + fn match_variants( + this: Expr, source_location: SourceLocation, - ) -> Self::MatchVariantsIter - where - IO::Type: BundleType, - { - let _ = module_builder; + ) -> Self::MatchVariantsIter { let _ = source_location; - std::iter::once(MatchVariantWithoutScope(VA::make_match(this))) + let retval = Vec::from_iter(this); + std::iter::once(MatchVariantWithoutScope( + Len::ArrayMatch::::try_from(retval) + .ok() + .expect("unreachable"), + )) } fn mask_type(&self) -> Self::MaskType { - #[derive(Clone, Hash, Eq, PartialEq)] - struct ArrayMaskTypeMemoize(PhantomData); - impl Copy for ArrayMaskTypeMemoize {} - impl Memoize for ArrayMaskTypeMemoize { - type Input = ArrayType; - type InputOwned = ArrayType; - type Output = as Type>::MaskType; - - fn inner(self, input: &Self::Input) -> Self::Output { - ArrayType::new_with_len_type(input.element.mask_type(), input.len) - } - } - ArrayMaskTypeMemoize::(PhantomData).get(self) + ArrayType::new(self.element().mask_type(), self.len) } - fn canonical(&self) -> Self::CanonicalType { - ArrayType { - element: self.element.canonical_dyn(), - len: self.len(), - bit_width: self.bit_width, - } + fn canonical(&self) -> CanonicalType { + CanonicalType::Array(Array::new_dyn(self.element().canonical(), self.len())) } - fn source_location(&self) -> SourceLocation { + #[track_caller] + fn from_canonical(canonical_type: CanonicalType) -> Self { + let CanonicalType::Array(array) = canonical_type else { + panic!("expected array"); + }; + Self::new( + T::from_canonical(array.element()), + Len::from_usize(array.len()), + ) + } + + fn source_location() -> SourceLocation { SourceLocation::builtin() } - fn type_enum(&self) -> TypeEnum { - TypeEnum::ArrayType(self.canonical()) + fn sim_value_from_opaque(&self, mut opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue { + let element_ty = self.element(); + let element_size = element_ty.canonical().size(); + let mut value = Vec::with_capacity(self.len()); + for _ in 0..self.len() { + let (element_opaque, rest) = opaque.split_at(element_size); + value.push(SimValue::from_opaque(element_ty, element_opaque.to_owned())); + opaque = rest; + } + value.try_into().ok().expect("used correct length") } - fn from_canonical_type(t: Self::CanonicalType) -> Self { - Self { - element: VA::ElementType::from_dyn_canonical_type(t.element), - len: VA::try_len_type_from_len(t.len).expect("length should match"), - bit_width: t.bit_width, + fn sim_value_clone_from_opaque( + &self, + value: &mut Self::SimValue, + mut opaque: OpaqueSimValueSlice<'_>, + ) { + let element_ty = self.element(); + let element_size = element_ty.canonical().size(); + let value = AsMut::<[SimValue]>::as_mut(value); + assert_eq!(self.len(), value.len()); + for element_value in value { + assert_eq!(SimValue::ty(element_value), element_ty); + let (element_opaque, rest) = opaque.split_at(element_size); + SimValue::opaque_mut(element_value).clone_from_slice(element_opaque); + opaque = rest; } } - fn as_dyn_canonical_type_impl(this: &Self) -> Option<&dyn DynCanonicalType> { - Some(::downcast_ref::>( - this, - )?) - } -} - -impl Connect> - for ArrayType -{ -} - -impl CanonicalType for ArrayType<[DynCanonicalValue]> { - const CANONICAL_TYPE_KIND: CanonicalTypeKind = CanonicalTypeKind::ArrayType; -} - -#[derive(Debug, PartialEq, Eq, Hash)] -pub struct Array { - element_ty: VA::ElementType, - value: Arc, -} - -impl Clone for Array { - fn clone(&self) -> Self { - Self { - element_ty: self.element_ty.clone(), - value: self.value.clone(), + fn sim_value_to_opaque<'w>( + &self, + value: &Self::SimValue, + mut writer: OpaqueSimValueWriter<'w>, + ) -> OpaqueSimValueWritten<'w> { + let element_ty = self.element(); + let element_size = element_ty.canonical().size(); + let value = AsRef::<[SimValue]>::as_ref(value); + assert_eq!(self.len(), value.len()); + for element_value in value { + assert_eq!(SimValue::ty(element_value), element_ty); + writer.fill_prefix_with(element_size, |writer| { + writer.fill_cloned_from_slice(SimValue::opaque(element_value).as_slice()) + }); } + writer.fill_cloned_from_slice(OpaqueSimValueSlice::empty()) } } -impl ToExpr for Array { - type Type = ArrayType; - - fn ty(&self) -> Self::Type { - ArrayType::new_with_len_type(self.element_ty.clone(), self.value.len_type()) - } - - fn to_expr(&self) -> Expr<::Value> { - Expr::from_value(self) - } -} - -impl Value for Array { - fn to_canonical(&self) -> ::CanonicalValue { - Array { - element_ty: self.element_ty.canonical_dyn(), - value: AsRef::<[_]>::as_ref(&*self.value) - .iter() - .map(|v| v.to_canonical_dyn()) - .collect(), +impl Serialize for ArrayType { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + SerdeCanonicalType::::Array { + element: self.element(), + len: self.len(), } + .serialize(serializer) } - fn to_bits_impl(this: &Self) -> Result, HitUndeducedType> { - #[derive(Hash, Eq, PartialEq)] - struct ArrayToBitsMemoize(PhantomData); - impl Clone for ArrayToBitsMemoize { - fn clone(&self) -> Self { - *self +} + +impl<'de, T: Type + Deserialize<'de>, Len: Size> Deserialize<'de> for ArrayType { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let name = |len| -> String { + if let Some(len) = len { + format!("an Array<_, {len}>") + } else { + "an Array<_>".to_string() } - } - impl Copy for ArrayToBitsMemoize {} - impl Memoize for ArrayToBitsMemoize { - type Input = Array; - type InputOwned = Array; - type Output = Result, HitUndeducedType>; - - fn inner(self, input: &Self::Input) -> Self::Output { - let mut bits = BitVec::with_capacity(input.ty().bit_width().unwrap_or(0)); - for element in AsRef::<[_]>::as_ref(&*input.value).iter() { - bits.extend_from_bitslice(&element.to_bits()?); + }; + match SerdeCanonicalType::::deserialize(deserializer)? { + SerdeCanonicalType::Array { element, len } => { + if let Some(len) = Len::try_from_usize(len) { + Ok(Self::new(element, len)) + } else { + Err(Error::invalid_value( + serde::de::Unexpected::Other(&name(Some(len))), + &&*name(Len::KNOWN_VALUE), + )) } - Ok(Intern::intern_owned(bits)) } - } - ArrayToBitsMemoize::(PhantomData).get(this) - } -} - -impl CanonicalValue for Array<[DynCanonicalValue]> { - fn value_enum_impl(this: &Self) -> ValueEnum { - ValueEnum::Array(this.clone()) - } - fn to_bits_impl(this: &Self) -> Result, HitUndeducedType> { - Value::to_bits_impl(this) - } -} - -impl Array { - pub fn element_ty(&self) -> &VA::ElementType { - &self.element_ty - } - pub fn len(&self) -> usize { - VA::len_from_len_type(self.value.len_type()) - } - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - pub fn value(&self) -> &Arc { - &self.value - } - pub fn set_element(&mut self, index: usize, element: VA::Element) { - assert_eq!(self.element_ty, element.ty()); - VA::arc_make_mut(&mut self.value)[index] = element; - } - pub fn new(element_ty: VA::ElementType, value: Arc) -> Self { - for element in value.iter() { - assert_eq!(element_ty, element.ty()); - } - Self { element_ty, value } - } - pub fn into_slice(self) -> Array<[VA::Element]> { - Array { - element_ty: self.element_ty, - value: self.value.arc_to_arc_slice(), + ty => Err(Error::invalid_value( + serde::de::Unexpected::Other(ty.as_serde_unexpected_str()), + &&*name(Len::KNOWN_VALUE), + )), } } } -impl>> From for Array +impl TypeWithDeref for ArrayType { + fn expr_deref(this: &Expr) -> &Self::MatchVariant { + let retval = Vec::from_iter(*this); + Interned::into_inner(Intern::intern_sized( + Len::ArrayMatch::::try_from(retval) + .ok() + .expect("unreachable"), + )) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)] +pub struct ArrayWithoutGenerics; + +impl Index for ArrayWithoutGenerics { + type Output = ArrayWithoutLen; + + fn index(&self, element: T) -> &Self::Output { + Interned::into_inner(Intern::intern_sized(ArrayWithoutLen { element })) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct ArrayWithoutLen { + element: T, +} + +impl Index for ArrayWithoutLen { + type Output = ArrayType; + + fn index(&self, len: L) -> &Self::Output { + Interned::into_inner(Intern::intern_sized(ArrayType::new(self.element, len))) + } +} + +impl ExprPartialEq> for ArrayType where - VA::Element: StaticValue, + Lhs: ExprPartialEq, { - fn from(value: T) -> Self { - Self::new(StaticType::static_type(), value.into()) + fn cmp_eq(lhs: Expr, rhs: Expr>) -> Expr { + let lhs_ty = Expr::ty(lhs); + let rhs_ty = Expr::ty(rhs); + assert_eq!(lhs_ty.len(), rhs_ty.len()); + lhs.into_iter() + .zip(rhs) + .map(|(l, r)| l.cmp_eq(r)) + .collect::>>() + .cast_to_bits() + .all_one_bits() + } + + fn cmp_ne(lhs: Expr, rhs: Expr>) -> Expr { + let lhs_ty = Expr::ty(lhs); + let rhs_ty = Expr::ty(rhs); + assert_eq!(lhs_ty.len(), rhs_ty.len()); + lhs.into_iter() + .zip(rhs) + .map(|(l, r)| l.cmp_ne(r)) + .collect::>>() + .cast_to_bits() + .any_one_bits() } } -impl, T: StaticType> ToExpr for [E] { - type Type = ArrayType<[T::Value]>; - - fn ty(&self) -> Self::Type { - ArrayType::new_with_len_type(StaticType::static_type(), self.len()) - } - - fn to_expr(&self) -> Expr<::Value> { - let elements = Intern::intern_owned(Vec::from_iter( - self.iter().map(|v| v.to_expr().to_canonical_dyn()), - )); - ArrayLiteral::new_unchecked(elements, self.ty()).to_expr() +impl SimValuePartialEq> for ArrayType +where + Lhs: SimValuePartialEq, +{ + fn sim_value_eq(this: &SimValue, other: &SimValue>) -> bool { + AsRef::<[_]>::as_ref(&**this) + .iter() + .zip(AsRef::<[_]>::as_ref(&**other)) + .all(|(l, r)| SimValuePartialEq::sim_value_eq(l, r)) } } -impl, T: StaticType> ToExpr for Vec { - type Type = ArrayType<[T::Value]>; +impl ExprIntoIterator for ArrayType { + type Item = T; + type ExprIntoIter = ExprArrayIter; - fn ty(&self) -> Self::Type { - <[E]>::ty(self) - } - - fn to_expr(&self) -> Expr<::Value> { - <[E]>::to_expr(self) - } -} - -impl, T: StaticType, const N: usize> ToExpr for [E; N] { - type Type = ArrayType<[T::Value; N]>; - - fn ty(&self) -> Self::Type { - ArrayType::new_with_len_type(StaticType::static_type(), ()) - } - - fn to_expr(&self) -> Expr<::Value> { - let elements = Intern::intern_owned(Vec::from_iter( - self.iter().map(|v| v.to_expr().to_canonical_dyn()), - )); - ArrayLiteral::new_unchecked(elements, self.ty()).to_expr() - } -} - -#[derive(Clone, Debug)] -pub struct ArrayIntoIter { - array: Arc, - indexes: std::ops::Range, -} - -impl Iterator for ArrayIntoIter { - type Item = VA::Element; - - fn next(&mut self) -> Option { - Some(self.array[self.indexes.next()?].clone()) - } - - fn size_hint(&self) -> (usize, Option) { - self.indexes.size_hint() - } -} - -impl std::iter::FusedIterator for ArrayIntoIter {} - -impl ExactSizeIterator for ArrayIntoIter {} - -impl DoubleEndedIterator for ArrayIntoIter { - fn next_back(&mut self) -> Option { - Some(self.array[self.indexes.next_back()?].clone()) - } -} - -impl Array { - pub fn iter(&self) -> std::slice::Iter<'_, VA::Element> { - self.value.iter() - } -} - -impl<'a, VA: ValueArrayOrSlice> IntoIterator for &'a Array { - type Item = &'a VA::Element; - type IntoIter = std::slice::Iter<'a, VA::Element>; - - fn into_iter(self) -> Self::IntoIter { - self.value.iter() - } -} - -impl IntoIterator for Array { - type Item = VA::Element; - type IntoIter = ArrayIntoIter; - - fn into_iter(self) -> Self::IntoIter { - ArrayIntoIter { - indexes: 0..self.len(), - array: self.value, + fn expr_into_iter(e: Expr) -> Self::ExprIntoIter { + ExprArrayIter { + base: e, + indexes: 0..Expr::ty(e).len(), } } } #[derive(Clone, Debug)] -pub struct ArrayExprIter { - array: Expr>, +pub struct ExprArrayIter { + base: Expr>, indexes: std::ops::Range, } -impl Iterator for ArrayExprIter { - type Item = Expr; +impl ExprArrayIter { + pub fn base(&self) -> Expr> { + self.base + } + pub fn indexes(&self) -> std::ops::Range { + self.indexes.clone() + } +} + +impl Iterator for ExprArrayIter { + type Item = Expr; fn next(&mut self) -> Option { - Some(ExprIndex::expr_index(self.array, self.indexes.next()?)) + self.indexes.next().map(|i| self.base[i]) } fn size_hint(&self) -> (usize, Option) { self.indexes.size_hint() } + + fn count(self) -> usize { + self.indexes.count() + } + + fn last(mut self) -> Option { + self.next_back() + } + + fn nth(&mut self, n: usize) -> Option { + self.indexes.nth(n).map(|i| self.base[i]) + } + + fn fold(self, init: B, mut f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.indexes.fold(init, |b, i| f(b, self.base[i])) + } } -impl std::iter::FusedIterator for ArrayExprIter {} - -impl ExactSizeIterator for ArrayExprIter {} - -impl DoubleEndedIterator for ArrayExprIter { +impl DoubleEndedIterator for ExprArrayIter { fn next_back(&mut self) -> Option { - Some(ExprIndex::expr_index(self.array, self.indexes.next_back()?)) + self.indexes.next_back().map(|i| self.base[i]) + } + + fn nth_back(&mut self, n: usize) -> Option { + self.indexes.nth_back(n).map(|i| self.base[i]) + } + + fn rfold(self, init: B, mut f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.indexes.rfold(init, |b, i| f(b, self.base[i])) } } -impl IntoIterator for Expr> { - type Item = Expr; - type IntoIter = ArrayExprIter; - - fn into_iter(self) -> Self::IntoIter { - self.iter() +impl ExactSizeIterator for ExprArrayIter { + fn len(&self) -> usize { + self.indexes.len() } } -impl IntoIterator for &'_ Expr> { - type Item = Expr; - type IntoIter = ArrayExprIter; +impl FusedIterator for ExprArrayIter {} - fn into_iter(self) -> Self::IntoIter { - self.iter() +impl ExprFromIterator> for Array { + fn expr_from_iter>>(iter: T) -> Expr { + ArrayLiteral::new( + A::TYPE, + iter.into_iter().map(|v| Expr::canonical(v)).collect(), + ) + .to_expr() } } -impl Expr> { - pub fn len(self) -> usize { - self.canonical_type().len() - } - pub fn is_empty(self) -> bool { - self.canonical_type().is_empty() - } - pub fn iter(self) -> ArrayExprIter { - ArrayExprIter { - indexes: 0..self.len(), - array: self, - } +impl<'a, A: StaticType> ExprFromIterator<&'a Expr> for Array { + fn expr_from_iter>>(iter: T) -> Expr { + iter.into_iter().copied().collect() } } diff --git a/crates/fayalite/src/build.rs b/crates/fayalite/src/build.rs new file mode 100644 index 0000000..a9e9635 --- /dev/null +++ b/crates/fayalite/src/build.rs @@ -0,0 +1,2803 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::graph::JobGraph, + bundle::{Bundle, BundleType}, + intern::{Intern, InternSlice, Interned}, + module::Module, + platform::{DynPlatform, Platform}, + util::{job_server::AcquiredJob, os_str_strip_prefix}, + vendor, +}; +use clap::ArgAction; +use serde::{ + Deserialize, Deserializer, Serialize, Serializer, + de::{DeserializeOwned, Error as _}, + ser::Error as _, +}; +use std::{ + any::{Any, TypeId}, + borrow::Cow, + cmp::Ordering, + ffi::{OsStr, OsString}, + fmt, + hash::{Hash, Hasher}, + io::Write, + marker::PhantomData, + path::{Path, PathBuf}, + sync::{Arc, OnceLock}, +}; +use tempfile::TempDir; + +pub mod external; +pub mod firrtl; +pub mod formal; +pub mod graph; +pub mod registry; +pub mod verilog; + +pub(crate) fn built_in_job_kinds() -> impl IntoIterator { + [DynJobKind::new(BaseJobKind)] + .into_iter() + .chain(firrtl::built_in_job_kinds()) + .chain(formal::built_in_job_kinds()) + .chain(vendor::built_in_job_kinds()) + .chain(verilog::built_in_job_kinds()) +} + +#[derive(Clone, Hash, PartialEq, Eq, Debug)] +#[non_exhaustive] +pub enum JobItem { + Path { + path: Interned, + }, + DynamicPaths { + paths: Vec>, + source_job_name: Interned, + }, +} + +impl JobItem { + pub fn name(&self) -> JobItemName { + match self { + &JobItem::Path { path } => JobItemName::Path { path }, + &JobItem::DynamicPaths { + paths: _, + source_job_name, + } => JobItemName::DynamicPaths { source_job_name }, + } + } +} + +#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug, Serialize, Deserialize)] +#[non_exhaustive] +pub enum JobItemName { + Path { path: Interned }, + DynamicPaths { source_job_name: Interned }, +} + +impl JobItemName { + fn as_ref(&self) -> JobItemNameRef<'_> { + match self { + JobItemName::Path { path } => JobItemNameRef::Path { path }, + JobItemName::DynamicPaths { source_job_name } => { + JobItemNameRef::DynamicPaths { source_job_name } + } + } + } +} + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] +enum JobItemNameRef<'a> { + Path { path: &'a Path }, + DynamicPaths { source_job_name: &'a str }, +} + +/// ordered by string contents, not by `Interned` +impl PartialOrd for JobItemName { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +/// ordered by string contents, not by `Interned` +impl Ord for JobItemName { + fn cmp(&self, other: &Self) -> Ordering { + if self == other { + Ordering::Equal + } else { + self.as_ref().cmp(&other.as_ref()) + } + } +} + +pub trait WriteArgs: + for<'a> Extend<&'a str> + + for<'a> Extend<&'a OsStr> + + for<'a> Extend<&'a Path> + + for<'a> Extend> + + for<'a> Extend> + + for<'a> Extend> + + Extend + + Extend + + Extend + + Extend> + + Extend> + + Extend> +{ + fn write_display_args(&mut self, args: impl IntoIterator) { + self.extend(args.into_iter().map(|v| v.to_string())); + } + fn write_owned_args(&mut self, args: impl IntoIterator>) { + self.extend(args.into_iter().map(Into::::into)) + } + fn write_args<'a>(&mut self, args: impl IntoIterator>); + fn write_interned_args(&mut self, args: impl IntoIterator>>) { + self.extend(args.into_iter().map(Into::>::into)) + } + fn write_display_arg(&mut self, arg: impl fmt::Display) { + self.write_display_args([arg]); + } + fn write_owned_arg(&mut self, arg: impl Into) { + self.extend([arg.into()]); + } + fn write_arg(&mut self, arg: impl AsRef) { + self.extend([arg.as_ref()]); + } + /// writes `--{name}={value}` + fn write_long_option_eq(&mut self, name: impl AsRef, value: impl AsRef) { + let name = name.as_ref(); + let value = value.as_ref(); + let mut option = + OsString::with_capacity(name.len().saturating_add(value.len()).saturating_add(3)); + option.push("--"); + option.push(name); + option.push("="); + option.push(value); + self.write_owned_arg(option); + } + fn write_interned_arg(&mut self, arg: impl Into>) { + self.extend([arg.into()]); + } + /// finds the first option that is `--{option_name}={value}` and returns `value` + fn get_long_option_eq(&self, option_name: impl AsRef) -> Option<&OsStr>; +} + +pub trait ArgsWriterArg: + AsRef + + From> + + for<'a> From> + + for<'a> From<&'a OsStr> + + From +{ +} + +impl ArgsWriterArg for Interned {} + +impl ArgsWriterArg for OsString {} + +pub struct ArgsWriter(pub Vec); + +impl Default for ArgsWriter { + fn default() -> Self { + Self(Default::default()) + } +} + +impl ArgsWriter { + fn get_long_option_eq_helper(&self, option_name: &str) -> Option<&OsStr> { + self.0.iter().find_map(|arg| { + os_str_strip_prefix(arg.as_ref(), "--") + .and_then(|arg| os_str_strip_prefix(arg, option_name)) + .and_then(|arg| os_str_strip_prefix(arg, "=")) + }) + } +} + +impl<'a, A: ArgsWriterArg> Extend<&'a str> for ArgsWriter { + fn extend>(&mut self, iter: T) { + self.extend(iter.into_iter().map(AsRef::::as_ref)) + } +} + +impl<'a, A: ArgsWriterArg> Extend<&'a OsStr> for ArgsWriter { + fn extend>(&mut self, iter: T) { + self.0.extend(iter.into_iter().map(Into::into)) + } +} + +impl<'a, A: ArgsWriterArg> Extend<&'a Path> for ArgsWriter { + fn extend>(&mut self, iter: T) { + self.extend(iter.into_iter().map(AsRef::::as_ref)) + } +} + +impl Extend for ArgsWriter { + fn extend>(&mut self, iter: T) { + self.extend(iter.into_iter().map(OsString::from)) + } +} + +impl Extend for ArgsWriter { + fn extend>(&mut self, iter: T) { + self.0.extend(iter.into_iter().map(Into::into)) + } +} + +impl Extend for ArgsWriter { + fn extend>(&mut self, iter: T) { + self.extend(iter.into_iter().map(OsString::from)) + } +} + +impl Extend> for ArgsWriter { + fn extend>>(&mut self, iter: T) { + self.extend(iter.into_iter().map(Interned::::from)) + } +} + +impl Extend> for ArgsWriter { + fn extend>>(&mut self, iter: T) { + self.0.extend(iter.into_iter().map(Into::into)) + } +} + +impl Extend> for ArgsWriter { + fn extend>>(&mut self, iter: T) { + self.extend(iter.into_iter().map(Interned::::from)) + } +} + +impl<'a, A: ArgsWriterArg> Extend> for ArgsWriter { + fn extend>>(&mut self, iter: T) { + self.0.extend(iter.into_iter().map(|v| { + match v { + Cow::Borrowed(v) => Cow::::Borrowed(v.as_ref()), + Cow::Owned(v) => Cow::Owned(v.into()), + } + .into() + })) + } +} + +impl<'a, A: ArgsWriterArg> Extend> for ArgsWriter { + fn extend>>(&mut self, iter: T) { + self.0.extend(iter.into_iter().map(Into::into)) + } +} + +impl<'a, A: ArgsWriterArg> Extend> for ArgsWriter { + fn extend>>(&mut self, iter: T) { + self.0.extend(iter.into_iter().map(|v| { + match v { + Cow::Borrowed(v) => Cow::::Borrowed(v.as_ref()), + Cow::Owned(v) => Cow::Owned(v.into()), + } + .into() + })) + } +} + +impl WriteArgs for ArgsWriter { + fn write_args<'a>(&mut self, args: impl IntoIterator>) { + self.0.extend(args.into_iter().map(|v| v.as_ref().into())) + } + fn get_long_option_eq(&self, option_name: impl AsRef) -> Option<&OsStr> { + self.get_long_option_eq_helper(option_name.as_ref()) + } +} + +pub trait ToArgs: clap::Args + 'static + Send + Sync + Hash + Eq + fmt::Debug + Clone { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)); + fn to_interned_args(&self) -> Interned<[Interned]> { + Intern::intern_owned(self.to_interned_args_vec()) + } + fn to_interned_args_vec(&self) -> Vec> { + let mut retval = ArgsWriter::default(); + self.to_args(&mut retval); + retval.0 + } + fn to_os_string_args(&self) -> Vec { + let mut retval = ArgsWriter::default(); + self.to_args(&mut retval); + retval.0 + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct JobKindAndArgs { + pub kind: K, + pub args: K::Args, +} + +impl JobKindAndArgs { + pub fn args_to_jobs( + self, + dependencies: ::KindsAndArgs, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result> { + K::args_to_jobs( + JobArgsAndDependencies { + args: self, + dependencies, + }, + params, + global_params, + ) + } +} + +impl> Copy for JobKindAndArgs {} + +impl From> for DynJobArgs { + fn from(value: JobKindAndArgs) -> Self { + let JobKindAndArgs { kind, args } = value; + DynJobArgs::new(kind, args) + } +} + +impl TryFrom for JobKindAndArgs { + type Error = DynJobArgs; + fn try_from(value: DynJobArgs) -> Result { + value.downcast() + } +} + +#[derive(Debug, PartialEq, Eq, Hash)] +pub struct JobAndKind { + pub kind: K, + pub job: K::Job, +} + +impl> Clone for JobAndKind { + fn clone(&self) -> Self { + Self { + kind: self.kind.clone(), + job: self.job.clone(), + } + } +} + +impl> Copy for JobAndKind {} + +impl From> for DynJob { + fn from(value: JobAndKind) -> Self { + let JobAndKind { kind, job } = value; + DynJob::new(kind, job) + } +} + +impl> TryFrom for JobAndKind { + type Error = DynJob; + fn try_from(value: DynJob) -> Result { + value.downcast() + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub struct JobKindAndDependencies { + pub kind: K, + pub dependencies: K::Dependencies, +} + +impl Default for JobKindAndDependencies { + fn default() -> Self { + Self::new(K::default()) + } +} + +impl JobKindAndDependencies { + pub fn new(kind: K) -> Self { + Self { + kind, + dependencies: kind.dependencies(), + } + } +} + +#[derive(Debug, PartialEq, Eq, Hash)] +pub struct JobAndDependencies { + pub job: JobAndKind, + pub dependencies: ::JobsAndKinds, +} + +impl JobAndDependencies { + pub fn get_job(&self) -> &J + where + Self: GetJob, + { + GetJob::get_job(self) + } + pub fn base_job(&self) -> &BaseJob { + self.job.kind.base_job(&self.job.job, &self.dependencies) + } +} + +impl Clone for JobAndDependencies +where + K::Job: Clone, + ::JobsAndKinds: Clone, +{ + fn clone(&self) -> Self { + Self { + job: self.job.clone(), + dependencies: self.dependencies.clone(), + } + } +} + +impl Copy for JobAndDependencies +where + K::Job: Copy, + ::JobsAndKinds: Copy, +{ +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct JobArgsAndDependencies { + pub args: JobKindAndArgs, + pub dependencies: ::KindsAndArgs, +} + +impl Copy for JobArgsAndDependencies +where + K::Args: Copy, + ::KindsAndArgs: Copy, +{ +} + +impl JobArgsAndDependencies { + pub fn args_to_jobs( + self, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result> { + K::args_to_jobs(self, params, global_params) + } + pub fn base_job_args(&self) -> &BaseJobArgs { + self.args + .kind + .base_job_args(&self.args.args, &self.dependencies) + } +} + +impl>, D: JobKind> JobArgsAndDependencies { + pub fn args_to_jobs_simple( + self, + params: &JobParams, + global_params: &GlobalParams, + f: F, + ) -> eyre::Result> + where + F: FnOnce(K, K::Args, &mut JobAndDependencies) -> eyre::Result, + { + let Self { + args: JobKindAndArgs { kind, args }, + dependencies, + } = self; + let mut dependencies = dependencies.args_to_jobs(params, global_params)?; + let job = f(kind, args, &mut dependencies)?; + Ok(JobAndDependencies { + job: JobAndKind { kind, job }, + dependencies, + }) + } +} + +impl>, D: JobKind> + JobArgsAndDependencies> +{ + pub fn args_to_jobs_external_simple( + self, + params: &JobParams, + global_params: &GlobalParams, + f: F, + ) -> eyre::Result<( + C::AdditionalJobData, + ::JobsAndKinds, + )> + where + F: FnOnce( + external::ExternalCommandArgs, + &mut JobAndDependencies, + ) -> eyre::Result, + { + let Self { + args: JobKindAndArgs { kind: _, args }, + dependencies, + } = self; + let mut dependencies = dependencies.args_to_jobs(params, global_params)?; + let additional_job_data = f(args, &mut dependencies)?; + Ok((additional_job_data, dependencies)) + } +} + +pub trait JobDependencies: 'static + Send + Sync + Hash + Eq + fmt::Debug + Copy { + type KindsAndArgs: 'static + Send + Sync + Hash + Eq + fmt::Debug + Clone; + type JobsAndKinds: 'static + Send + Sync + Hash + Eq + fmt::Debug; + fn kinds_dyn_extend>(self, dyn_kinds: &mut E); + fn kinds_dyn(self) -> Vec { + let mut retval = Vec::new(); + self.kinds_dyn_extend(&mut retval); + retval + } + fn into_dyn_jobs_extend>(jobs: Self::JobsAndKinds, dyn_jobs: &mut E); + fn into_dyn_jobs(jobs: Self::JobsAndKinds) -> Vec { + let mut retval = Vec::new(); + Self::into_dyn_jobs_extend(jobs, &mut retval); + retval + } + #[track_caller] + fn from_dyn_args_prefix>( + args: &mut I, + ) -> Self::KindsAndArgs; + #[track_caller] + fn from_dyn_args>(args: I) -> Self::KindsAndArgs { + let mut iter = args.into_iter(); + let retval = Self::from_dyn_args_prefix(&mut iter); + if iter.next().is_some() { + panic!("wrong number of dependencies"); + } + retval + } +} + +pub trait JobDependenciesHasBase: JobDependencies { + fn base_job_args(args: &Self::KindsAndArgs) -> &BaseJobArgs; + fn base_job(jobs: &Self::JobsAndKinds) -> &BaseJob; + #[track_caller] + fn base_job_args_dyn(dependencies_args: &[DynJobArgs]) -> &BaseJobArgs; + #[track_caller] + fn base_job_dyn(dependencies: &[DynJob]) -> &BaseJob; +} + +impl JobDependencies for JobKindAndDependencies { + type KindsAndArgs = JobArgsAndDependencies; + type JobsAndKinds = JobAndDependencies; + + fn kinds_dyn_extend>(self, dyn_kinds: &mut E) { + let Self { kind, dependencies } = self; + dependencies.kinds_dyn_extend(dyn_kinds); + dyn_kinds.extend([DynJobKind::new(kind)]); + } + + fn into_dyn_jobs_extend>( + jobs: Self::JobsAndKinds, + dyn_jobs: &mut E, + ) { + let JobAndDependencies { job, dependencies } = jobs; + K::Dependencies::into_dyn_jobs_extend(dependencies, dyn_jobs); + dyn_jobs.extend([job.into()]); + } + + #[track_caller] + fn from_dyn_args_prefix>( + args: &mut I, + ) -> Self::KindsAndArgs { + let dependencies = K::Dependencies::from_dyn_args_prefix(args); + let Some(args) = args.next() else { + panic!("wrong number of dependencies"); + }; + match args.downcast() { + Ok(args) => JobArgsAndDependencies { args, dependencies }, + Err(args) => { + panic!( + "wrong type of dependency, expected {} got:\n{args:?}", + std::any::type_name::() + ) + } + } + } +} + +impl JobDependenciesHasBase for JobKindAndDependencies { + fn base_job_args(args: &Self::KindsAndArgs) -> &BaseJobArgs { + args.base_job_args() + } + + fn base_job(jobs: &Self::JobsAndKinds) -> &BaseJob { + jobs.base_job() + } + + #[track_caller] + fn base_job_args_dyn(dependencies_args: &[DynJobArgs]) -> &BaseJobArgs { + let [dependencies_args @ .., args] = dependencies_args else { + panic!("wrong number of dependencies"); + }; + let Some((kind, args)) = args.downcast_ref::() else { + panic!( + "wrong type of dependency, expected {} got:\n{args:?}", + std::any::type_name::() + ) + }; + kind.base_job_args_dyn(args, dependencies_args) + } + + #[track_caller] + fn base_job_dyn(dependencies: &[DynJob]) -> &BaseJob { + let [dependencies @ .., job] = dependencies else { + panic!("wrong number of dependencies"); + }; + let Some((kind, job)) = job.downcast_ref::() else { + panic!( + "wrong type of dependency, expected {} got:\n{job:?}", + std::any::type_name::() + ) + }; + kind.base_job_dyn(job, dependencies) + } +} + +macro_rules! impl_job_dependencies { + (@impl $(($v:ident: $T:ident),)*) => { + impl<$($T: JobDependencies),*> JobDependencies for ($($T,)*) { + type KindsAndArgs = ($($T::KindsAndArgs,)*); + type JobsAndKinds = ($($T::JobsAndKinds,)*); + + fn kinds_dyn_extend>(self, dyn_kinds: &mut E) { + #![allow(unused_variables)] + let ($($v,)*) = self; + $($T::kinds_dyn_extend($v, dyn_kinds);)* + } + + fn into_dyn_jobs_extend>( + jobs: Self::JobsAndKinds, + dyn_jobs: &mut E, + ) { + #![allow(unused_variables)] + let ($($v,)*) = jobs; + $($T::into_dyn_jobs_extend($v, dyn_jobs);)* + } + + #[track_caller] + fn from_dyn_args_prefix>( + args: &mut I, + ) -> Self::KindsAndArgs { + #![allow(unused_variables)] + $(let $v = $T::from_dyn_args_prefix(args);)* + ($($v,)*) + } + } + }; + ($($first:tt, $($rest:tt,)*)?) => { + impl_job_dependencies!(@impl $($first, $($rest,)*)?); + $(impl_job_dependencies!($($rest,)*);)? + }; +} + +impl_job_dependencies! { + (v0: T0), + (v1: T1), + (v2: T2), + (v3: T3), + (v4: T4), + (v5: T5), + (v6: T6), + (v7: T7), + (v8: T8), + (v9: T9), + (v10: T10), + (v11: T11), +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct JobParams { + main_module: Module, +} + +impl AsRef for JobParams { + fn as_ref(&self) -> &Self { + self + } +} + +impl JobParams { + pub fn new_canonical(main_module: Module) -> Self { + Self { main_module } + } + pub fn new(main_module: impl AsRef>) -> Self { + Self::new_canonical(main_module.as_ref().canonical()) + } + pub fn main_module(&self) -> &Module { + &self.main_module + } +} + +#[derive(Clone, Debug)] +pub struct GlobalParams { + top_level_cmd: Option, + application_name: Interned, +} + +impl AsRef for GlobalParams { + fn as_ref(&self) -> &Self { + self + } +} + +impl GlobalParams { + pub fn new(top_level_cmd: Option, application_name: impl AsRef) -> Self { + Self { + top_level_cmd, + application_name: application_name.as_ref().intern(), + } + } + pub fn top_level_cmd(&self) -> Option<&clap::Command> { + self.top_level_cmd.as_ref() + } + pub fn into_top_level_cmd(self) -> Option { + self.top_level_cmd + } + pub fn extract_clap_error(&self, e: eyre::Report) -> eyre::Result { + let e = e.downcast::()?; + Ok(match &self.top_level_cmd { + Some(cmd) => e.with_cmd(cmd), + None => e, + }) + } + pub fn exit_if_clap_error(&self, e: eyre::Report) -> eyre::Report { + match self.extract_clap_error(e) { + Ok(e) => e.exit(), + Err(e) => e, + } + } + pub fn clap_error( + &self, + kind: clap::error::ErrorKind, + message: impl fmt::Display, + ) -> clap::Error { + match self.top_level_cmd.clone() { + Some(top_level_cmd) => top_level_cmd.clone().error(kind, message), + None => clap::Error::raw(kind, message), + } + } + pub fn application_name(&self) -> Interned { + self.application_name + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct CommandParams { + pub command_line: Interned<[Interned]>, + pub current_dir: Option>, +} + +impl CommandParams { + fn to_unix_shell_line( + self, + output: &mut String, + mut escape_arg: impl FnMut(&OsStr, &mut String) -> Result<(), E>, + ) -> Result<(), E> { + let Self { + command_line, + current_dir, + } = self; + let mut end = None; + let mut separator = if let Some(current_dir) = current_dir { + output.push_str("(cd "); + end = Some(")"); + if !current_dir + .as_os_str() + .as_encoded_bytes() + .first() + .is_some_and(|ch| ch.is_ascii_alphanumeric() || matches!(ch, b'/' | b'\\' | b'.')) + { + output.push_str("-- "); + } + escape_arg(current_dir.as_ref(), output)?; + "; exec -- " + } else { + "" + }; + for arg in command_line { + output.push_str(separator); + separator = " "; + escape_arg(&arg, output)?; + } + if let Some(end) = end { + output.push_str(end); + } + Ok(()) + } +} + +pub trait JobKindHelper: 'static + Send + Sync + Hash + Eq + fmt::Debug + Copy { + fn base_job_args<'a>( + self, + args: &'a ::Args, + dependencies: &'a <::Dependencies as JobDependencies>::KindsAndArgs, + ) -> &'a BaseJobArgs + where + Self: JobKind; + fn base_job<'a>( + self, + job: &'a ::Job, + dependencies: &'a <::Dependencies as JobDependencies>::JobsAndKinds, + ) -> &'a BaseJob + where + Self: JobKind; + #[track_caller] + fn base_job_args_dyn<'a>( + self, + args: &'a ::Args, + dependencies_args: &'a [DynJobArgs], + ) -> &'a BaseJobArgs + where + Self: JobKind; + #[track_caller] + fn base_job_dyn<'a>( + self, + job: &'a ::Job, + dependencies: &'a [DynJob], + ) -> &'a BaseJob + where + Self: JobKind; +} + +impl> JobKindHelper for K { + fn base_job_args<'a>( + self, + _args: &'a ::Args, + dependencies: &'a <::Dependencies as JobDependencies>::KindsAndArgs, + ) -> &'a BaseJobArgs { + K::Dependencies::base_job_args(dependencies) + } + fn base_job<'a>( + self, + _job: &'a ::Job, + dependencies: &'a <::Dependencies as JobDependencies>::JobsAndKinds, + ) -> &'a BaseJob { + K::Dependencies::base_job(dependencies) + } + #[track_caller] + fn base_job_args_dyn<'a>( + self, + _args: &'a ::Args, + dependencies_args: &'a [DynJobArgs], + ) -> &'a BaseJobArgs { + K::Dependencies::base_job_args_dyn(dependencies_args) + } + #[track_caller] + fn base_job_dyn<'a>( + self, + _job: &'a ::Job, + dependencies: &'a [DynJob], + ) -> &'a BaseJob { + K::Dependencies::base_job_dyn(dependencies) + } +} + +pub trait JobKind: JobKindHelper { + type Args: ToArgs; + type Job: 'static + Send + Sync + Hash + Eq + fmt::Debug + Serialize + DeserializeOwned; + type Dependencies: JobDependencies; + fn dependencies(self) -> Self::Dependencies; + fn args_to_jobs( + args: JobArgsAndDependencies, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result>; + fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]>; + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]>; + fn name(self) -> Interned; + fn external_command_params(self, job: &Self::Job) -> Option; + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + params: &JobParams, + global_params: &GlobalParams, + acquired_job: &mut AcquiredJob, + ) -> eyre::Result>; + fn subcommand_hidden(self) -> bool { + false + } + fn external_program(self) -> Option> { + None + } +} + +trait DynJobKindTrait: 'static + Send + Sync + fmt::Debug { + fn as_any(&self) -> &dyn Any; + fn as_arc_any(self: Arc) -> Arc; + fn eq_dyn(&self, other: &dyn DynJobKindTrait) -> bool; + fn hash_dyn(&self, state: &mut dyn Hasher); + fn dependencies_kinds_dyn(&self) -> Vec; + fn args_group_id_dyn(&self) -> Option; + fn augment_args_dyn(&self, cmd: clap::Command) -> clap::Command; + fn augment_args_for_update_dyn(&self, cmd: clap::Command) -> clap::Command; + fn from_arg_matches_dyn( + &self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result; + fn name_dyn(&self) -> Interned; + fn subcommand_hidden_dyn(&self) -> bool; + fn deserialize_job_from_json_str(self: Arc, json: &str) -> serde_json::Result; + fn deserialize_job_from_json_value( + self: Arc, + json: &serde_json::Value, + ) -> serde_json::Result; +} + +impl DynJobKindTrait for K { + fn as_any(&self) -> &dyn Any { + self + } + + fn as_arc_any(self: Arc) -> Arc { + self + } + + fn eq_dyn(&self, other: &dyn DynJobKindTrait) -> bool { + other + .as_any() + .downcast_ref::() + .is_some_and(|other| self == other) + } + + fn hash_dyn(&self, mut state: &mut dyn Hasher) { + self.hash(&mut state); + } + + fn dependencies_kinds_dyn(&self) -> Vec { + self.dependencies().kinds_dyn() + } + + fn args_group_id_dyn(&self) -> Option { + ::group_id() + } + + fn augment_args_dyn(&self, cmd: clap::Command) -> clap::Command { + ::augment_args(cmd) + } + + fn augment_args_for_update_dyn(&self, cmd: clap::Command) -> clap::Command { + ::augment_args_for_update(cmd) + } + + fn from_arg_matches_dyn( + &self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result { + Ok(DynJobArgs::new( + *self, + ::from_arg_matches_mut(matches)?, + )) + } + + fn name_dyn(&self) -> Interned { + self.name() + } + + fn subcommand_hidden_dyn(&self) -> bool { + self.subcommand_hidden() + } + + fn deserialize_job_from_json_str(self: Arc, json: &str) -> serde_json::Result { + Ok(DynJob::from_arc(self, serde_json::from_str(json)?)) + } + + fn deserialize_job_from_json_value( + self: Arc, + json: &serde_json::Value, + ) -> serde_json::Result { + Ok(DynJob::from_arc(self, Deserialize::deserialize(json)?)) + } +} + +#[derive(Clone)] +pub struct DynJobKind(Arc); + +impl DynJobKind { + pub fn from_arc(job_kind: Arc) -> Self { + Self(job_kind) + } + pub fn new(job_kind: K) -> Self { + Self(Arc::new(job_kind)) + } + pub fn type_id(&self) -> TypeId { + DynJobKindTrait::as_any(&*self.0).type_id() + } + pub fn downcast(&self) -> Option { + DynJobKindTrait::as_any(&*self.0).downcast_ref().copied() + } + pub fn downcast_arc(self) -> Result, Self> { + if self.downcast::().is_some() { + Ok(Arc::downcast::(self.0.as_arc_any()) + .ok() + .expect("already checked type")) + } else { + Err(self) + } + } + pub fn dependencies_kinds(&self) -> Vec { + DynJobKindTrait::dependencies_kinds_dyn(&*self.0) + } + pub fn args_group_id(&self) -> Option { + DynJobKindTrait::args_group_id_dyn(&*self.0) + } + pub fn augment_args(&self, cmd: clap::Command) -> clap::Command { + DynJobKindTrait::augment_args_dyn(&*self.0, cmd) + } + pub fn augment_args_for_update(&self, cmd: clap::Command) -> clap::Command { + DynJobKindTrait::augment_args_for_update_dyn(&*self.0, cmd) + } + pub fn from_arg_matches( + &self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result { + DynJobKindTrait::from_arg_matches_dyn(&*self.0, matches) + } + pub fn name(&self) -> Interned { + DynJobKindTrait::name_dyn(&*self.0) + } + pub fn subcommand_hidden(&self) -> bool { + DynJobKindTrait::subcommand_hidden_dyn(&*self.0) + } + pub fn deserialize_job_from_json_str(self, json: &str) -> serde_json::Result { + DynJobKindTrait::deserialize_job_from_json_str(self.0, json) + } + pub fn deserialize_job_from_json_value( + self, + json: &serde_json::Value, + ) -> serde_json::Result { + DynJobKindTrait::deserialize_job_from_json_value(self.0, json) + } + fn make_subcommand_without_args(&self) -> clap::Command { + clap::Command::new(Interned::into_inner(self.name())).hide(self.subcommand_hidden()) + } + pub fn make_subcommand(&self) -> clap::Command { + let mut subcommand = self.make_subcommand_without_args(); + for dependency in self.dependencies_kinds() { + subcommand = dependency.augment_args(subcommand); + } + self.augment_args(subcommand) + } + pub fn make_subcommand_for_update(&self) -> clap::Command { + let mut subcommand = self.make_subcommand_without_args(); + for dependency in self.dependencies_kinds() { + subcommand = dependency.augment_args_for_update(subcommand); + } + self.augment_args_for_update(subcommand) + } +} + +impl Hash for DynJobKind { + fn hash(&self, state: &mut H) { + self.type_id().hash(state); + DynJobKindTrait::hash_dyn(&*self.0, state); + } +} + +impl PartialEq for DynJobKind { + fn eq(&self, other: &Self) -> bool { + DynJobKindTrait::eq_dyn(&*self.0, &*other.0) + } +} + +impl Eq for DynJobKind {} + +impl fmt::Debug for DynJobKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +impl Serialize for DynJobKind { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + self.name().serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for DynJobKind { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let name = Cow::::deserialize(deserializer)?; + match Self::registry().get_by_name(&name) { + Some(retval) => Ok(retval.clone()), + None => Err(D::Error::custom(format_args!( + "unknown job kind: name not found in registry: {name:?}" + ))), + } + } +} + +#[derive(Copy, Clone, Debug, Default)] +pub struct DynJobKindValueParser; + +#[derive(Clone, PartialEq, Eq, Hash)] +struct DynJobKindValueEnum { + name: Interned, + job_kind: DynJobKind, +} + +impl clap::ValueEnum for DynJobKindValueEnum { + fn value_variants<'a>() -> &'a [Self] { + Interned::into_inner( + registry::JobKindRegistrySnapshot::get() + .iter_with_names() + .map(|(name, job_kind)| Self { + name, + job_kind: job_kind.clone(), + }) + .collect(), + ) + } + + fn to_possible_value(&self) -> Option { + Some(clap::builder::PossibleValue::new(Interned::into_inner( + self.name, + ))) + } +} + +impl clap::builder::TypedValueParser for DynJobKindValueParser { + type Value = DynJobKind; + + fn parse_ref( + &self, + cmd: &clap::Command, + arg: Option<&clap::Arg>, + value: &std::ffi::OsStr, + ) -> clap::error::Result { + clap::builder::EnumValueParser::::new() + .parse_ref(cmd, arg, value) + .map(|v| v.job_kind) + } + + fn possible_values( + &self, + ) -> Option + '_>> { + static ENUM_VALUE_PARSER: OnceLock> = + OnceLock::new(); + ENUM_VALUE_PARSER + .get_or_init(clap::builder::EnumValueParser::::new) + .possible_values() + } +} + +impl clap::builder::ValueParserFactory for DynJobKind { + type Parser = DynJobKindValueParser; + + fn value_parser() -> Self::Parser { + DynJobKindValueParser::default() + } +} + +trait DynExtendInternedStr { + fn extend_from_slice(&mut self, items: &[Interned]); +} + +impl Extend> for dyn DynExtendInternedStr + '_ { + fn extend>>(&mut self, iter: T) { + let mut buf = [Interned::default(); 64]; + let mut buf_len = 0; + iter.into_iter().for_each(|item| { + buf[buf_len] = item; + buf_len += 1; + if buf_len == buf.len() { + ::extend_from_slice(self, &buf); + buf_len = 0; + } + }); + if buf_len > 0 { + ::extend_from_slice( + self, + &buf[..buf_len], + ); + } + } +} + +impl>> DynExtendInternedStr for T { + fn extend_from_slice(&mut self, items: &[Interned]) { + self.extend(items.iter().copied()); + } +} + +#[derive(PartialEq, Eq, Hash, Clone)] +struct DynJobArgsInner(JobKindAndArgs); + +impl fmt::Debug for DynJobArgsInner { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self(JobKindAndArgs { kind, args }) = self; + f.debug_struct("DynJobArgs") + .field("kind", kind) + .field("args", args) + .finish() + } +} + +trait DynJobArgsTrait: 'static + Send + Sync + fmt::Debug { + fn as_any(&self) -> &dyn Any; + fn as_arc_any(self: Arc) -> Arc; + fn kind_type_id(&self) -> TypeId; + fn eq_dyn(&self, other: &dyn DynJobArgsTrait) -> bool; + fn hash_dyn(&self, state: &mut dyn Hasher); + fn kind(&self) -> DynJobKind; + fn to_args_extend_vec(&self, args: Vec>) -> Vec>; + fn clone_into_arc(&self) -> Arc; + fn update_from_arg_matches( + &mut self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result<()>; + #[track_caller] + fn args_to_jobs( + self: Arc, + dependencies_args: Vec, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result<(DynJob, Vec)>; + #[track_caller] + fn base_job_args_dyn<'a>(&'a self, dependencies_args: &'a [DynJobArgs]) -> &'a BaseJobArgs; +} + +impl DynJobArgsTrait for DynJobArgsInner { + fn as_any(&self) -> &dyn Any { + self + } + + fn as_arc_any(self: Arc) -> Arc { + self + } + + fn kind_type_id(&self) -> TypeId { + TypeId::of::() + } + + fn eq_dyn(&self, other: &dyn DynJobArgsTrait) -> bool { + other + .as_any() + .downcast_ref::() + .is_some_and(|other| self == other) + } + + fn hash_dyn(&self, mut state: &mut dyn Hasher) { + self.hash(&mut state); + } + + fn kind(&self) -> DynJobKind { + DynJobKind::new(self.0.kind) + } + + fn to_args_extend_vec(&self, args: Vec>) -> Vec> { + let mut writer = ArgsWriter(args); + self.0.args.to_args(&mut writer); + writer.0 + } + + fn clone_into_arc(&self) -> Arc { + Arc::new(self.clone()) + } + + fn update_from_arg_matches( + &mut self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result<()> { + clap::FromArgMatches::update_from_arg_matches_mut(&mut self.0.args, matches) + } + + #[track_caller] + fn args_to_jobs( + self: Arc, + dependencies_args: Vec, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result<(DynJob, Vec)> { + let JobAndDependencies { job, dependencies } = JobArgsAndDependencies { + args: Arc::unwrap_or_clone(self).0, + dependencies: K::Dependencies::from_dyn_args(dependencies_args), + } + .args_to_jobs(params, global_params)?; + Ok((job.into(), K::Dependencies::into_dyn_jobs(dependencies))) + } + + #[track_caller] + fn base_job_args_dyn<'a>(&'a self, dependencies_args: &'a [DynJobArgs]) -> &'a BaseJobArgs { + self.0 + .kind + .base_job_args_dyn(&self.0.args, dependencies_args) + } +} + +#[derive(Clone)] +pub struct DynJobArgs(Arc); + +impl DynJobArgs { + pub fn new(kind: K, args: K::Args) -> Self { + Self(Arc::new(DynJobArgsInner(JobKindAndArgs { kind, args }))) + } + pub fn kind_type_id(&self) -> TypeId { + DynJobArgsTrait::kind_type_id(&*self.0) + } + pub fn downcast_ref(&self) -> Option<(&K, &K::Args)> { + let DynJobArgsInner::(JobKindAndArgs { kind, args }) = + DynJobArgsTrait::as_any(&*self.0).downcast_ref()?; + Some((kind, args)) + } + pub fn downcast(self) -> Result, Self> { + if self.downcast_ref::().is_some() { + let this = Arc::downcast::>(self.0.as_arc_any()) + .ok() + .expect("already checked type"); + Ok(Arc::unwrap_or_clone(this).0) + } else { + Err(self) + } + } + pub fn kind(&self) -> DynJobKind { + DynJobArgsTrait::kind(&*self.0) + } + pub fn to_args_vec(&self) -> Vec> { + self.to_args_extend_vec(Vec::new()) + } + pub fn to_args_extend_vec(&self, args: Vec>) -> Vec> { + DynJobArgsTrait::to_args_extend_vec(&*self.0, args) + } + fn make_mut(&mut self) -> &mut dyn DynJobArgsTrait { + // can't just return the reference if the first get_mut returns Some since + // as of rustc 1.90.0 this causes a false-positive lifetime error. + if Arc::get_mut(&mut self.0).is_none() { + self.0 = DynJobArgsTrait::clone_into_arc(&*self.0); + } + Arc::get_mut(&mut self.0).expect("clone_into_arc returns a new arc with a ref-count of 1") + } + pub fn update_from_arg_matches( + &mut self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result<()> { + DynJobArgsTrait::update_from_arg_matches(self.make_mut(), matches) + } + pub fn args_to_jobs( + self, + dependencies_args: Vec, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result<(DynJob, Vec)> { + DynJobArgsTrait::args_to_jobs(self.0, dependencies_args, params, global_params) + } + #[track_caller] + pub fn base_job_args_dyn<'a>(&'a self, dependencies_args: &'a [DynJobArgs]) -> &'a BaseJobArgs { + DynJobArgsTrait::base_job_args_dyn(&*self.0, dependencies_args) + } +} + +impl Hash for DynJobArgs { + fn hash(&self, state: &mut H) { + self.kind_type_id().hash(state); + DynJobArgsTrait::hash_dyn(&*self.0, state); + } +} + +impl PartialEq for DynJobArgs { + fn eq(&self, other: &Self) -> bool { + DynJobArgsTrait::eq_dyn(&*self.0, &*other.0) + } +} + +impl Eq for DynJobArgs {} + +impl fmt::Debug for DynJobArgs { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +#[derive(PartialEq, Eq, Hash)] +struct DynJobInner { + kind: Arc, + job: K::Job, + inputs: Interned<[JobItemName]>, + outputs: Interned<[JobItemName]>, + external_command_params: Option, +} + +impl> Clone for DynJobInner { + fn clone(&self) -> Self { + Self { + kind: self.kind.clone(), + job: self.job.clone(), + inputs: self.inputs, + outputs: self.outputs, + external_command_params: self.external_command_params, + } + } +} + +impl fmt::Debug for DynJobInner { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + kind, + job, + inputs, + outputs, + external_command_params, + } = self; + f.debug_struct("DynJob") + .field("kind", kind) + .field("job", job) + .field("inputs", inputs) + .field("outputs", outputs) + .field("external_command_params", external_command_params) + .finish() + } +} + +trait DynJobTrait: 'static + Send + Sync + fmt::Debug { + fn as_any(&self) -> &dyn Any; + fn as_arc_any(self: Arc) -> Arc; + fn eq_dyn(&self, other: &dyn DynJobTrait) -> bool; + fn hash_dyn(&self, state: &mut dyn Hasher); + fn kind_type_id(&self) -> TypeId; + fn kind(&self) -> DynJobKind; + fn inputs(&self) -> Interned<[JobItemName]>; + fn outputs(&self) -> Interned<[JobItemName]>; + fn external_command_params(&self) -> Option; + fn serialize_to_json_ascii(&self) -> serde_json::Result; + fn serialize_to_json_value(&self) -> serde_json::Result; + fn run( + &self, + inputs: &[JobItem], + params: &JobParams, + global_params: &GlobalParams, + acquired_job: &mut AcquiredJob, + ) -> eyre::Result>; + #[track_caller] + fn base_job_dyn<'a>(&'a self, dependencies: &'a [DynJob]) -> &'a BaseJob; +} + +impl DynJobTrait for DynJobInner { + fn as_any(&self) -> &dyn Any { + self + } + + fn as_arc_any(self: Arc) -> Arc { + self + } + + fn eq_dyn(&self, other: &dyn DynJobTrait) -> bool { + other + .as_any() + .downcast_ref::() + .is_some_and(|other| self == other) + } + + fn hash_dyn(&self, mut state: &mut dyn Hasher) { + self.hash(&mut state); + } + + fn kind_type_id(&self) -> TypeId { + TypeId::of::() + } + + fn kind(&self) -> DynJobKind { + DynJobKind(self.kind.clone()) + } + + fn inputs(&self) -> Interned<[JobItemName]> { + self.inputs + } + + fn outputs(&self) -> Interned<[JobItemName]> { + self.outputs + } + + fn external_command_params(&self) -> Option { + self.external_command_params + } + + fn serialize_to_json_ascii(&self) -> serde_json::Result { + crate::util::serialize_to_json_ascii(&self.job) + } + + fn serialize_to_json_value(&self) -> serde_json::Result { + serde_json::to_value(&self.job) + } + + fn run( + &self, + inputs: &[JobItem], + params: &JobParams, + global_params: &GlobalParams, + acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + self.kind + .run(&self.job, inputs, params, global_params, acquired_job) + } + + #[track_caller] + fn base_job_dyn<'a>(&'a self, dependencies: &'a [DynJob]) -> &'a BaseJob { + self.kind.base_job_dyn(&self.job, dependencies) + } +} + +#[derive(Clone, Debug)] +pub struct DynJob(Arc); + +impl DynJob { + pub fn from_arc(job_kind: Arc, job: K::Job) -> Self { + let inputs = job_kind.inputs(&job); + let outputs = job_kind.outputs(&job); + let external_command_params = job_kind.external_command_params(&job); + Self(Arc::new(DynJobInner { + kind: job_kind, + job, + inputs, + outputs, + external_command_params, + })) + } + pub fn new(job_kind: K, job: K::Job) -> Self { + Self::from_arc(Arc::new(job_kind), job) + } + pub fn kind_type_id(&self) -> TypeId { + self.0.kind_type_id() + } + pub fn downcast_ref(&self) -> Option<(&K, &K::Job)> { + let DynJobInner { kind, job, .. } = self.0.as_any().downcast_ref()?; + Some((kind, job)) + } + pub fn downcast>(self) -> Result, Self> { + if self.kind_type_id() == TypeId::of::() { + let DynJobInner { kind, job, .. } = Arc::unwrap_or_clone( + self.0 + .as_arc_any() + .downcast::>() + .expect("already checked type"), + ); + Ok(JobAndKind { kind: *kind, job }) + } else { + Err(self) + } + } + pub fn kind(&self) -> DynJobKind { + DynJobTrait::kind(&*self.0) + } + pub fn inputs(&self) -> Interned<[JobItemName]> { + DynJobTrait::inputs(&*self.0) + } + pub fn outputs(&self) -> Interned<[JobItemName]> { + DynJobTrait::outputs(&*self.0) + } + pub fn serialize_to_json_ascii(&self) -> serde_json::Result { + DynJobTrait::serialize_to_json_ascii(&*self.0) + } + pub fn serialize_to_json_value(&self) -> serde_json::Result { + DynJobTrait::serialize_to_json_value(&*self.0) + } + pub fn external_command_params(&self) -> Option { + DynJobTrait::external_command_params(&*self.0) + } + #[track_caller] + pub fn internal_command_params_with_program_prefix( + &self, + internal_program_prefix: &[Interned], + platform: Option<&DynPlatform>, + extra_args: &[Interned], + ) -> CommandParams { + let mut command_line = internal_program_prefix.to_vec(); + let command_line = match RunSingleJob::try_add_subcommand(platform, self, &mut command_line) + { + Ok(()) => { + command_line.extend_from_slice(extra_args); + Intern::intern_owned(command_line) + } + Err(e) => panic!("Serializing job {:?} failed: {e}", self.kind().name()), + }; + CommandParams { + command_line, + current_dir: None, + } + } + #[track_caller] + pub fn internal_command_params( + &self, + platform: Option<&DynPlatform>, + extra_args: &[Interned], + ) -> CommandParams { + self.internal_command_params_with_program_prefix( + &[program_name_for_internal_jobs()], + platform, + extra_args, + ) + } + #[track_caller] + pub fn command_params_with_internal_program_prefix( + &self, + internal_program_prefix: &[Interned], + platform: Option<&DynPlatform>, + extra_args: &[Interned], + ) -> CommandParams { + match self.external_command_params() { + Some(v) => v, + None => self.internal_command_params_with_program_prefix( + internal_program_prefix, + platform, + extra_args, + ), + } + } + #[track_caller] + pub fn command_params( + &self, + platform: Option<&DynPlatform>, + extra_args: &[Interned], + ) -> CommandParams { + self.command_params_with_internal_program_prefix( + &[program_name_for_internal_jobs()], + platform, + extra_args, + ) + } + pub fn run( + &self, + inputs: &[JobItem], + params: &JobParams, + global_params: &GlobalParams, + acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + DynJobTrait::run(&*self.0, inputs, params, global_params, acquired_job) + } + #[track_caller] + pub fn base_job_dyn<'a>(&'a self, dependencies: &'a [DynJob]) -> &'a BaseJob { + DynJobTrait::base_job_dyn(&*self.0, dependencies) + } +} + +impl Eq for DynJob {} + +impl PartialEq for DynJob { + fn eq(&self, other: &Self) -> bool { + DynJobTrait::eq_dyn(&*self.0, &*other.0) + } +} + +impl Hash for DynJob { + fn hash(&self, state: &mut H) { + DynJobTrait::hash_dyn(&*self.0, state); + } +} + +#[derive(Serialize, Deserialize)] +#[serde(rename = "DynJob")] +struct DynJobSerde { + kind: DynJobKind, + job: serde_json::Value, +} + +impl Serialize for DynJob { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + DynJobSerde { + kind: self.kind(), + job: self.serialize_to_json_value().map_err(S::Error::custom)?, + } + .serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for DynJob { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let DynJobSerde { kind, job } = Deserialize::deserialize(deserializer)?; + kind.deserialize_job_from_json_value(&job) + .map_err(D::Error::custom) + } +} + +pub trait RunBuild: Sized { + fn main_without_platform(application_name: impl AsRef, make_params: F) + where + Self: clap::Parser + Clone, + F: FnOnce(Self, Extra) -> eyre::Result, + { + let application_name = application_name.as_ref(); + match Self::try_main_without_platform(application_name, make_params) { + Ok(()) => {} + Err(e) => { + let e = GlobalParams::new(Some(Self::command()), application_name) + .exit_if_clap_error(e); + eprintln!("{e:#}"); + std::process::exit(1); + } + } + } + fn try_main_without_platform( + application_name: impl AsRef, + make_params: F, + ) -> eyre::Result<()> + where + Self: clap::Parser + Clone, + F: FnOnce(Self, Extra) -> eyre::Result, + { + let args = Self::parse(); + let global_params = GlobalParams::new(Some(Self::command()), application_name); + args.clone() + .run_without_platform(|extra| make_params(args, extra), &global_params) + .map_err(|e| global_params.exit_if_clap_error(e)) + } + fn run_without_platform( + self, + make_params: F, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(Extra) -> eyre::Result; + fn get_platform(&self) -> Option<&DynPlatform>; + fn main(application_name: impl AsRef, make_params: F) + where + Self: clap::Parser + Clone, + F: FnOnce(Self, DynPlatform, Extra) -> eyre::Result, + { + let application_name = application_name.as_ref(); + match Self::try_main(application_name, make_params) { + Ok(()) => {} + Err(e) => { + let e = GlobalParams::new(Some(Self::command()), application_name) + .exit_if_clap_error(e); + eprintln!("{e:#}"); + std::process::exit(1); + } + } + } + fn try_main(application_name: impl AsRef, make_params: F) -> eyre::Result<()> + where + Self: clap::Parser + Clone, + F: FnOnce(Self, DynPlatform, Extra) -> eyre::Result, + { + let args = Self::parse(); + let global_params = GlobalParams::new(Some(Self::command()), application_name); + let Some(platform) = args.get_platform().cloned() else { + return args.handle_missing_platform(&global_params); + }; + args.clone() + .run( + |platform, extra| make_params(args, platform, extra), + platform, + &global_params, + ) + .map_err(|e| global_params.exit_if_clap_error(e)) + } + fn handle_missing_platform(self, global_params: &GlobalParams) -> eyre::Result<()> { + global_params + .clap_error( + clap::error::ErrorKind::MissingRequiredArgument, + "--platform is required", + ) + .exit(); + } + fn run( + self, + make_params: F, + platform: DynPlatform, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(DynPlatform, Extra) -> eyre::Result, + { + self.run_without_platform(|extra| make_params(platform, extra), global_params) + } +} + +impl RunBuild for JobArgsAndDependencies { + fn run_without_platform( + self, + make_params: F, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(NoArgs) -> eyre::Result, + { + let params = make_params(NoArgs)?; + self.args_to_jobs(¶ms, global_params)? + .run_without_platform(|_| Ok(params), global_params) + } + fn get_platform(&self) -> Option<&DynPlatform> { + self.base_job_args().platform.as_ref() + } + fn run( + self, + make_params: F, + platform: DynPlatform, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(DynPlatform, NoArgs) -> eyre::Result, + { + let params = make_params(platform.clone(), NoArgs)?; + self.args_to_jobs(¶ms, global_params)? + .run(|_, _| Ok(params), platform, global_params) + } +} + +impl RunBuild for JobAndDependencies { + fn run_without_platform( + self, + make_params: F, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(NoArgs) -> eyre::Result, + { + let params = make_params(NoArgs)?; + let Self { job, dependencies } = self; + let mut jobs = vec![DynJob::from(job)]; + K::Dependencies::into_dyn_jobs_extend(dependencies, &mut jobs); + let mut job_graph = JobGraph::new(); + job_graph.add_jobs(jobs); // add all at once to avoid recomputing graph properties multiple times + job_graph.run(¶ms, global_params) + } + fn get_platform(&self) -> Option<&DynPlatform> { + self.base_job().platform() + } + fn run( + self, + make_params: F, + platform: DynPlatform, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(DynPlatform, NoArgs) -> eyre::Result, + { + let params = make_params(platform, NoArgs)?; + let Self { job, dependencies } = self; + let mut jobs = vec![DynJob::from(job)]; + K::Dependencies::into_dyn_jobs_extend(dependencies, &mut jobs); + let mut job_graph = JobGraph::new(); + job_graph.add_jobs(jobs); // add all at once to avoid recomputing graph properties multiple times + job_graph.run(¶ms, global_params) + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub struct RunSingleJob { + pub platform: Option, + pub job: DynJob, + pub extra: Extra, +} + +impl RunSingleJob { + pub const SUBCOMMAND_NAME: &'static str = "run-single-job"; + fn try_add_subcommand( + platform: Option<&DynPlatform>, + job: &DynJob, + subcommand_line: &mut Vec>, + ) -> serde_json::Result<()> { + let mut json = job.serialize_to_json_ascii()?; + json.insert_str(0, "--json="); + subcommand_line.push(Self::SUBCOMMAND_NAME.intern().into()); + if let Some(platform) = platform { + subcommand_line.push( + format!("--platform={}", platform.name()) + .intern_deref() + .into(), + ); + } + subcommand_line.push( + format!("--name={}", job.kind().name()) + .intern_deref() + .into(), + ); + subcommand_line.push(json.intern_deref().into()); + Ok(()) + } +} + +impl TryFrom> for RunSingleJob { + type Error = clap::Error; + + fn try_from(value: RunSingleJobClap) -> Result { + let RunSingleJobClap::RunSingleJob { + platform, + name: job_kind, + json, + extra, + } = value; + let name = job_kind.name(); + job_kind + .deserialize_job_from_json_str(&json) + .map_err(|e| { + clap::Error::raw( + clap::error::ErrorKind::ValueValidation, + format_args!("failed to parse job {name} from JSON: {e}"), + ) + }) + .map(|job| Self { + platform, + job, + extra, + }) + } +} + +#[derive(clap::Subcommand)] +enum RunSingleJobClap { + #[command(name = RunSingleJob::SUBCOMMAND_NAME, hide = true)] + RunSingleJob { + #[arg(long)] + platform: Option, + #[arg(long)] + name: DynJobKind, + #[arg(long)] + json: String, + #[command(flatten)] + extra: Extra, + }, +} + +impl clap::Subcommand for RunSingleJob { + fn augment_subcommands(cmd: clap::Command) -> clap::Command { + RunSingleJobClap::::augment_subcommands(cmd) + } + + fn augment_subcommands_for_update(cmd: clap::Command) -> clap::Command { + RunSingleJobClap::::augment_subcommands(cmd) + } + + fn has_subcommand(name: &str) -> bool { + RunSingleJobClap::::has_subcommand(name) + } +} + +impl clap::FromArgMatches for RunSingleJob { + fn from_arg_matches(matches: &clap::ArgMatches) -> clap::error::Result { + RunSingleJobClap::from_arg_matches(matches)?.try_into() + } + fn from_arg_matches_mut(matches: &mut clap::ArgMatches) -> clap::error::Result { + RunSingleJobClap::from_arg_matches_mut(matches)?.try_into() + } + fn update_from_arg_matches(&mut self, matches: &clap::ArgMatches) -> clap::error::Result<()> { + *self = Self::from_arg_matches(matches)?; + Ok(()) + } + fn update_from_arg_matches_mut( + &mut self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result<()> { + *self = Self::from_arg_matches_mut(matches)?; + Ok(()) + } +} + +impl RunBuild for RunSingleJob { + fn run_without_platform( + self, + make_params: F, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(Extra) -> eyre::Result, + { + let params = make_params(self.extra)?; + let mut job_graph = JobGraph::new(); + job_graph.add_jobs([self.job]); + job_graph.run(¶ms, global_params) + } + fn get_platform(&self) -> Option<&DynPlatform> { + self.platform.as_ref() + } +} + +#[derive(Clone, PartialEq, Eq, Hash, clap::Subcommand)] +pub enum Completions { + #[non_exhaustive] + Completions { + #[arg(default_value = Self::shell_str_from_env(), required = Self::shell_from_env().is_none())] + shell: clap_complete::aot::Shell, + }, +} + +impl Completions { + pub fn new(shell: clap_complete::aot::Shell) -> Self { + Self::Completions { shell } + } + pub fn from_env() -> Option { + Some(Self::Completions { + shell: Self::shell_from_env()?, + }) + } + fn shell_from_env() -> Option { + static SHELL: OnceLock> = OnceLock::new(); + *SHELL.get_or_init(clap_complete::aot::Shell::from_env) + } + fn shell_str_from_env() -> clap::builder::Resettable { + static SHELL_STR: OnceLock> = OnceLock::new(); + SHELL_STR + .get_or_init(|| Self::shell_from_env().map(|v| v.to_string())) + .as_deref() + .map(Into::into) + .into() + } +} + +impl RunBuild for Completions { + fn run_without_platform( + self, + _make_params: F, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(NoArgs) -> eyre::Result, + { + let Self::Completions { shell } = self; + let Some(cmd) = global_params.top_level_cmd() else { + eyre::bail!("completions command requires GlobalParams::top_level_cmd() to be Some"); + }; + let bin_name = cmd.get_bin_name().map(str::intern).unwrap_or_else(|| { + program_name_for_internal_jobs() + .to_interned_str() + .expect("program name is invalid UTF-8") + }); + clap_complete::aot::generate( + shell, + &mut cmd.clone(), + &*bin_name, + &mut std::io::BufWriter::new(std::io::stdout().lock()), + ); + Ok(()) + } + fn handle_missing_platform(self, global_params: &GlobalParams) -> eyre::Result<()> { + self.run_without_platform(|_| unreachable!(), global_params) + } + fn get_platform(&self) -> Option<&DynPlatform> { + None + } +} + +#[derive( + clap::Args, + Copy, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + Debug, + Default, + Serialize, + Deserialize, +)] +pub struct NoArgs; + +impl ToArgs for NoArgs { + fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) { + let Self {} = self; + } +} + +#[derive(Clone, PartialEq, Eq, Hash, clap::Parser)] +pub enum BuildCli { + #[clap(flatten)] + Job(AnyJobSubcommand), + #[clap(flatten)] + RunSingleJob(RunSingleJob), + #[clap(flatten)] + Completions(Completions), + #[cfg(unix)] + #[clap(flatten)] + CreateUnixShellScript(CreateUnixShellScript), +} + +impl RunBuild for BuildCli { + fn run_without_platform( + self, + make_params: F, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(Extra) -> eyre::Result, + { + match self { + BuildCli::Job(v) => v.run_without_platform(make_params, global_params), + BuildCli::RunSingleJob(v) => v.run_without_platform(make_params, global_params), + BuildCli::Completions(v) => { + v.run_without_platform(|NoArgs {}| unreachable!(), global_params) + } + #[cfg(unix)] + BuildCli::CreateUnixShellScript(v) => { + v.run_without_platform(make_params, global_params) + } + } + } + fn handle_missing_platform(self, global_params: &GlobalParams) -> eyre::Result<()> { + match self { + BuildCli::Job(v) => v.handle_missing_platform(global_params), + BuildCli::RunSingleJob(v) => v.handle_missing_platform(global_params), + BuildCli::Completions(v) => v.handle_missing_platform(global_params), + #[cfg(unix)] + BuildCli::CreateUnixShellScript(v) => v.handle_missing_platform(global_params), + } + } + fn get_platform(&self) -> Option<&DynPlatform> { + match self { + BuildCli::Job(v) => v.get_platform(), + BuildCli::RunSingleJob(v) => v.get_platform(), + BuildCli::Completions(v) => v.get_platform(), + #[cfg(unix)] + BuildCli::CreateUnixShellScript(v) => v.get_platform(), + } + } + fn run( + self, + make_params: F, + platform: DynPlatform, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(DynPlatform, Extra) -> eyre::Result, + { + match self { + BuildCli::Job(v) => v.run(make_params, platform, global_params), + BuildCli::RunSingleJob(v) => v.run(make_params, platform, global_params), + BuildCli::Completions(v) => { + v.run(|_, NoArgs {}| unreachable!(), platform, global_params) + } + #[cfg(unix)] + BuildCli::CreateUnixShellScript(v) => v.run(make_params, platform, global_params), + } + } +} + +#[cfg(unix)] +#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Subcommand)] +enum CreateUnixShellScriptInner { + CreateUnixShellScript { + #[arg(name = "i-know-this-is-incomplete", long, required = true, action = ArgAction::SetTrue)] + _incomplete: (), + #[command(subcommand)] + inner: AnyJobSubcommand, + }, +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub struct CreateUnixShellScript(CreateUnixShellScriptInner); + +impl RunBuild for CreateUnixShellScript { + fn run_without_platform( + self, + make_params: F, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(Extra) -> eyre::Result, + { + let platform = self.get_platform().cloned(); + let CreateUnixShellScriptInner::CreateUnixShellScript { + _incomplete: (), + inner: + AnyJobSubcommand { + args, + dependencies_args, + extra, + }, + } = self.0; + let extra_args = extra.to_interned_args_vec(); + let params = make_params(extra)?; + let bin_name = global_params + .top_level_cmd() + .and_then(clap::Command::get_bin_name) + .map(|v| OsStr::new(v).intern()); + let (job, dependencies) = args.args_to_jobs(dependencies_args, ¶ms, global_params)?; + let mut job_graph = JobGraph::new(); + job_graph.add_jobs([job].into_iter().chain(dependencies)); + std::io::stdout().write_all( + job_graph + .to_unix_shell_script_with_internal_program_prefix( + &[bin_name.unwrap_or_else(|| program_name_for_internal_jobs())], + platform.as_ref(), + &extra_args, + ) + .as_bytes(), + )?; + Ok(()) + } + fn get_platform(&self) -> Option<&DynPlatform> { + let CreateUnixShellScriptInner::CreateUnixShellScript { inner, .. } = &self.0; + inner.get_platform() + } +} + +impl clap::FromArgMatches for CreateUnixShellScript { + fn from_arg_matches(matches: &clap::ArgMatches) -> Result { + clap::FromArgMatches::from_arg_matches(matches).map(Self) + } + fn from_arg_matches_mut(matches: &mut clap::ArgMatches) -> Result { + clap::FromArgMatches::from_arg_matches_mut(matches).map(Self) + } + fn update_from_arg_matches(&mut self, matches: &clap::ArgMatches) -> Result<(), clap::Error> { + self.0.update_from_arg_matches(matches) + } + fn update_from_arg_matches_mut( + &mut self, + matches: &mut clap::ArgMatches, + ) -> Result<(), clap::Error> { + self.0.update_from_arg_matches_mut(matches) + } +} + +#[cfg(unix)] +impl clap::Subcommand for CreateUnixShellScript { + fn augment_subcommands(cmd: clap::Command) -> clap::Command { + CreateUnixShellScriptInner::::augment_subcommands(cmd) + } + + fn augment_subcommands_for_update(cmd: clap::Command) -> clap::Command { + CreateUnixShellScriptInner::::augment_subcommands_for_update(cmd) + } + + fn has_subcommand(name: &str) -> bool { + CreateUnixShellScriptInner::::has_subcommand(name) + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub struct AnyJobSubcommand { + pub args: DynJobArgs, + pub dependencies_args: Vec, + pub extra: Extra, +} + +impl AnyJobSubcommand { + pub fn from_subcommand_arg_matches( + job_kind: &DynJobKind, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result { + let dependencies = job_kind.dependencies_kinds(); + let dependencies_args = Result::from_iter( + dependencies + .into_iter() + .map(|dependency| dependency.from_arg_matches(matches)), + )?; + Ok(Self { + args: job_kind.clone().from_arg_matches(matches)?, + dependencies_args, + extra: Extra::from_arg_matches_mut(matches)?, + }) + } + pub fn update_from_subcommand_arg_matches( + &mut self, + job_kind: &DynJobKind, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result<()> { + let Self { + args, + dependencies_args, + extra, + } = self; + if *job_kind == args.kind() { + for dependency in dependencies_args { + dependency.update_from_arg_matches(matches)?; + } + args.update_from_arg_matches(matches)?; + } else { + let dependencies = job_kind.dependencies_kinds(); + let new_dependencies_args = Result::from_iter( + dependencies + .into_iter() + .map(|dependency| dependency.from_arg_matches(matches)), + )?; + *args = job_kind.clone().from_arg_matches(matches)?; + *dependencies_args = new_dependencies_args; + } + extra.update_from_arg_matches_mut(matches) + } +} + +impl clap::Subcommand for AnyJobSubcommand { + fn augment_subcommands(mut cmd: clap::Command) -> clap::Command { + let snapshot = registry::JobKindRegistrySnapshot::get(); + for job_kind in &snapshot { + cmd = cmd.subcommand(Extra::augment_args(job_kind.make_subcommand())); + } + cmd + } + + fn augment_subcommands_for_update(mut cmd: clap::Command) -> clap::Command { + let snapshot = registry::JobKindRegistrySnapshot::get(); + for job_kind in &snapshot { + cmd = cmd.subcommand(Extra::augment_args_for_update( + job_kind.make_subcommand_for_update(), + )); + } + cmd + } + + fn has_subcommand(name: &str) -> bool { + registry::JobKindRegistrySnapshot::get() + .get_by_name(name) + .is_some() + } +} + +impl clap::FromArgMatches for AnyJobSubcommand { + fn from_arg_matches(matches: &clap::ArgMatches) -> clap::error::Result { + Self::from_arg_matches_mut(&mut matches.clone()) + } + + fn from_arg_matches_mut(matches: &mut clap::ArgMatches) -> clap::error::Result { + if let Some((name, mut matches)) = matches.remove_subcommand() { + let job_kind_registry_snapshot = registry::JobKindRegistrySnapshot::get(); + if let Some(job_kind) = job_kind_registry_snapshot.get_by_name(&name) { + Self::from_subcommand_arg_matches(job_kind, &mut matches) + } else { + Err(clap::Error::raw( + clap::error::ErrorKind::InvalidSubcommand, + format!("the subcommand '{name}' wasn't recognized"), + )) + } + } else { + Err(clap::Error::raw( + clap::error::ErrorKind::MissingSubcommand, + "a subcommand is required but one was not provided", + )) + } + } + + fn update_from_arg_matches(&mut self, matches: &clap::ArgMatches) -> clap::error::Result<()> { + Self::update_from_arg_matches_mut(self, &mut matches.clone()) + } + + fn update_from_arg_matches_mut( + &mut self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result<()> { + if let Some((name, mut matches)) = matches.remove_subcommand() { + let job_kind_registry_snapshot = registry::JobKindRegistrySnapshot::get(); + if let Some(job_kind) = job_kind_registry_snapshot.get_by_name(&name) { + self.update_from_subcommand_arg_matches(job_kind, &mut matches) + } else { + Err(clap::Error::raw( + clap::error::ErrorKind::InvalidSubcommand, + format!("the subcommand '{name}' wasn't recognized"), + )) + } + } else { + Err(clap::Error::raw( + clap::error::ErrorKind::MissingSubcommand, + "a subcommand is required but one was not provided", + )) + } + } +} + +impl RunBuild for AnyJobSubcommand { + fn run_without_platform( + self, + make_params: F, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(Extra) -> eyre::Result, + { + let Self { + args, + dependencies_args, + extra, + } = self; + let params = make_params(extra)?; + let (job, dependencies) = args.args_to_jobs(dependencies_args, ¶ms, global_params)?; + let mut job_graph = JobGraph::new(); + job_graph.add_jobs([job].into_iter().chain(dependencies)); // add all at once to avoid recomputing graph properties multiple times + job_graph.run(¶ms, global_params) + } + fn get_platform(&self) -> Option<&DynPlatform> { + self.args + .base_job_args_dyn(&self.dependencies_args) + .platform + .as_ref() + } +} + +pub fn program_name_for_internal_jobs() -> Interned { + static PROGRAM_NAME: OnceLock> = OnceLock::new(); + *PROGRAM_NAME.get_or_init(|| { + std::env::args_os() + .next() + .expect("can't get program name") + .intern_deref() + }) +} + +#[derive(clap::Args, Debug, Clone, Hash, PartialEq, Eq)] +#[group(id = "BaseJob")] +#[non_exhaustive] +pub struct BaseJobArgs { + /// the directory to put the generated main output file and associated files in + #[arg(short, long, value_hint = clap::ValueHint::DirPath)] + pub output: Option, + #[arg(long, env = "FAYALITE_KEEP_TEMP_DIR")] + pub keep_temp_dir: bool, + /// the stem of the generated main output file, e.g. to get foo.v, pass --file-stem=foo + #[arg(long)] + pub file_stem: Option, + /// run commands even if their results are already cached + #[arg(long, env = Self::RUN_EVEN_IF_CACHED_ENV_NAME)] + pub run_even_if_cached: bool, + /// platform + #[arg(long)] + pub platform: Option, +} + +impl BaseJobArgs { + pub const RUN_EVEN_IF_CACHED_ENV_NAME: &'static str = "FAYALITE_RUN_EVEN_IF_CACHED"; + pub fn from_output_dir_and_env(output: PathBuf, platform: Option) -> Self { + Self { + output: Some(output), + keep_temp_dir: false, + file_stem: None, + run_even_if_cached: std::env::var_os(Self::RUN_EVEN_IF_CACHED_ENV_NAME).is_some(), + platform, + } + } +} + +impl ToArgs for BaseJobArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { + output, + keep_temp_dir, + file_stem, + run_even_if_cached, + platform, + } = self; + if let Some(output) = output { + args.write_long_option_eq("output", output); + } + if *keep_temp_dir { + args.write_arg("--keep-temp-dir"); + } + if let Some(file_stem) = file_stem { + args.write_long_option_eq("file-stem", file_stem); + } + if *run_even_if_cached { + args.write_arg("--run-even-if-cached"); + } + if let Some(platform) = platform { + args.write_long_option_eq("platform", platform.name()); + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct BaseJob { + output_dir: Interned, + #[serde(skip)] + temp_dir: Option>, + file_stem: Interned, + run_even_if_cached: bool, + platform: Option, +} + +impl Hash for BaseJob { + fn hash(&self, state: &mut H) { + let Self { + output_dir, + temp_dir: _, + file_stem, + run_even_if_cached, + platform, + } = self; + output_dir.hash(state); + file_stem.hash(state); + run_even_if_cached.hash(state); + platform.hash(state); + } +} + +impl Eq for BaseJob {} + +impl PartialEq for BaseJob { + fn eq(&self, other: &Self) -> bool { + let Self { + output_dir, + temp_dir: _, + file_stem, + run_even_if_cached, + ref platform, + } = *self; + output_dir == other.output_dir + && file_stem == other.file_stem + && run_even_if_cached == other.run_even_if_cached + && *platform == other.platform + } +} + +impl BaseJob { + pub fn output_dir(&self) -> Interned { + self.output_dir + } + pub fn temp_dir(&self) -> Option<&Arc> { + self.temp_dir.as_ref() + } + pub fn file_stem(&self) -> Interned { + self.file_stem + } + pub fn file_with_ext(&self, ext: impl AsRef) -> Interned { + let mut retval = self.output_dir().join(self.file_stem()); + retval.set_extension(ext); + retval.intern_deref() + } + pub fn run_even_if_cached(&self) -> bool { + self.run_even_if_cached + } + pub fn platform(&self) -> Option<&DynPlatform> { + self.platform.as_ref() + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)] +pub struct BaseJobKind; + +impl JobKindHelper for BaseJobKind { + fn base_job<'a>( + self, + job: &'a ::Job, + _dependencies: &'a <::Dependencies as JobDependencies>::JobsAndKinds, + ) -> &'a BaseJob { + job + } + fn base_job_args<'a>( + self, + args: &'a ::Args, + _dependencies: &'a <::Dependencies as JobDependencies>::KindsAndArgs, + ) -> &'a BaseJobArgs { + args + } + #[track_caller] + fn base_job_args_dyn<'a>( + self, + args: &'a ::Args, + dependencies_args: &'a [DynJobArgs], + ) -> &'a BaseJobArgs { + let [] = dependencies_args else { + panic!("wrong number of dependencies"); + }; + args + } + #[track_caller] + fn base_job_dyn<'a>( + self, + job: &'a ::Job, + dependencies: &'a [DynJob], + ) -> &'a BaseJob { + let [] = dependencies else { + panic!("wrong number of dependencies"); + }; + job + } +} + +impl JobKind for BaseJobKind { + type Args = BaseJobArgs; + type Job = BaseJob; + type Dependencies = (); + + fn dependencies(self) -> Self::Dependencies { + () + } + + fn args_to_jobs( + args: JobArgsAndDependencies, + params: &JobParams, + _global_params: &GlobalParams, + ) -> eyre::Result> { + let BaseJobArgs { + output, + keep_temp_dir, + file_stem, + run_even_if_cached, + platform, + } = args.args.args; + let (output_dir, temp_dir) = if let Some(output) = output { + (Intern::intern_owned(output), None) + } else { + // we create the temp dir here rather than in run so other + // jobs can have their paths based on the chosen temp dir + let temp_dir = TempDir::new()?; + let output_dir = temp_dir.path().intern(); + let temp_dir = if keep_temp_dir { + // use TempDir::into_path() to no longer automatically delete the temp dir + let temp_dir_path = temp_dir.into_path(); + println!("created temporary directory: {}", temp_dir_path.display()); + None + } else { + Some(Arc::new(temp_dir)) + }; + (output_dir, temp_dir) + }; + let file_stem = file_stem + .map(Intern::intern_deref) + .unwrap_or(params.main_module().name().into()); + Ok(JobAndDependencies { + job: JobAndKind { + kind: BaseJobKind, + job: BaseJob { + output_dir, + temp_dir, + file_stem, + run_even_if_cached, + platform, + }, + }, + dependencies: (), + }) + } + + fn inputs(self, _job: &Self::Job) -> Interned<[JobItemName]> { + Interned::default() + } + + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::Path { + path: job.output_dir, + }] + .intern_slice() + } + + fn name(self) -> Interned { + "base-job".intern() + } + + fn external_command_params(self, job: &Self::Job) -> Option { + Some(CommandParams { + command_line: [ + "mkdir".intern().into(), + "-p".intern().into(), + "--".intern().into(), + job.output_dir.into(), + ] + .intern_slice(), + current_dir: None, + }) + } + + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + _params: &JobParams, + _global_params: &GlobalParams, + _acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + let [] = inputs else { + panic!("invalid inputs for BaseJob"); + }; + std::fs::create_dir_all(&*job.output_dir)?; + Ok(vec![JobItem::Path { + path: job.output_dir, + }]) + } + + fn subcommand_hidden(self) -> bool { + true + } +} + +pub trait GetJob { + fn get_job(this: &Self) -> &J; +} + +impl> GetJob for &'_ T { + fn get_job(this: &Self) -> &J { + T::get_job(this) + } +} + +impl> GetJob for &'_ mut T { + fn get_job(this: &Self) -> &J { + T::get_job(this) + } +} + +impl> GetJob for Box { + fn get_job(this: &Self) -> &J { + T::get_job(this) + } +} + +pub struct GetJobPositionDependencies(PhantomData); + +impl Default for GetJobPositionDependencies { + fn default() -> Self { + Self(Default::default()) + } +} + +impl fmt::Debug for GetJobPositionDependencies { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "GetJobPositionDependencies<{}>", + std::any::type_name::() + ) + } +} + +impl Hash for GetJobPositionDependencies { + fn hash(&self, _state: &mut H) {} +} + +impl Ord for GetJobPositionDependencies { + fn cmp(&self, _other: &Self) -> Ordering { + Ordering::Equal + } +} + +impl PartialOrd for GetJobPositionDependencies { + fn partial_cmp(&self, _other: &Self) -> Option { + Some(Ordering::Equal) + } +} + +impl Eq for GetJobPositionDependencies {} + +impl PartialEq for GetJobPositionDependencies { + fn eq(&self, _other: &Self) -> bool { + true + } +} + +impl Clone for GetJobPositionDependencies { + fn clone(&self) -> Self { + Self(PhantomData) + } +} + +impl Copy for GetJobPositionDependencies {} + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)] +pub struct GetJobPositionJob; + +impl>>> + GetJob> for JobAndDependencies +{ + fn get_job(this: &Self) -> &J { + GetJob::get_job(&this.dependencies) + } +} + +impl GetJob for JobAndDependencies { + fn get_job(this: &Self) -> &K::Job { + &this.job.job + } +} + +impl>>> + GetJob> for JobArgsAndDependencies +{ + fn get_job(this: &Self) -> &J { + GetJob::get_job(&this.dependencies) + } +} + +impl GetJob for JobArgsAndDependencies { + fn get_job(this: &Self) -> &K::Args { + &this.args.args + } +} + +impl>> + GetJob> for JobKindAndDependencies +{ + fn get_job(this: &Self) -> &J { + GetJob::get_job(&this.dependencies) + } +} + +impl GetJob for JobKindAndDependencies { + fn get_job(this: &Self) -> &K { + &this.kind + } +} diff --git a/crates/fayalite/src/build/external.rs b/crates/fayalite/src/build/external.rs new file mode 100644 index 0000000..1a90414 --- /dev/null +++ b/crates/fayalite/src/build/external.rs @@ -0,0 +1,1177 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::{ + ArgsWriter, CommandParams, GlobalParams, JobAndDependencies, JobAndKind, + JobArgsAndDependencies, JobDependencies, JobDependenciesHasBase, JobItem, JobItemName, + JobKind, JobKindAndArgs, JobParams, ToArgs, WriteArgs, + }, + intern::{Intern, Interned}, + util::{job_server::AcquiredJob, streaming_read_utf8::streaming_read_utf8}, +}; +use base64::{Engine, prelude::BASE64_URL_SAFE_NO_PAD}; +use clap::builder::OsStringValueParser; +use eyre::{Context, ensure, eyre}; +use serde::{ + Deserialize, Deserializer, Serialize, Serializer, + de::{DeserializeOwned, Error}, +}; +use std::{ + borrow::Cow, + collections::BTreeMap, + ffi::{OsStr, OsString}, + fmt, + hash::{Hash, Hasher}, + io::Write, + marker::PhantomData, + path::{Path, PathBuf}, + process::ExitStatus, + sync::OnceLock, +}; + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize, Deserialize)] +#[non_exhaustive] +pub enum ExternalJobCacheVersion { + /// not used, used to be for `FormalCacheVersion` + V1, + V2, +} + +impl ExternalJobCacheVersion { + pub const CURRENT: Self = Self::V2; +} + +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +#[non_exhaustive] +pub enum MaybeUtf8 { + Utf8(String), + Binary(Vec), +} + +impl MaybeUtf8 { + pub fn as_bytes(&self) -> &[u8] { + match self { + MaybeUtf8::Utf8(v) => v.as_bytes(), + MaybeUtf8::Binary(v) => v, + } + } + pub fn as_os_str(&self) -> &OsStr { + #![allow(unreachable_code)] + #[cfg(unix)] + { + return std::os::unix::ffi::OsStrExt::from_bytes(self.as_bytes()); + } + #[cfg(target_os = "wasi")] + { + return std::os::wasi::ffi::OsStrExt::from_bytes(self.as_bytes()); + } + // implementing WTF-8 is too much of a pain so don't have a special case for windows + if let Ok(s) = str::from_utf8(self.as_bytes()) { + return OsStr::new(s); + } + panic!("invalid UTF-8 conversion to OsStr is not implemented on this platform"); + } + pub fn as_path(&self) -> &Path { + Path::new(self.as_os_str()) + } +} + +#[derive(Serialize, Deserialize)] +#[serde(rename = "MaybeUtf8")] +enum MaybeUtf8Serde<'a> { + Utf8(Cow<'a, str>), + Binary(String), +} + +impl<'de> Deserialize<'de> for MaybeUtf8 { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + Ok(match MaybeUtf8Serde::deserialize(deserializer)? { + MaybeUtf8Serde::Utf8(v) => Self::Utf8(v.into_owned()), + MaybeUtf8Serde::Binary(v) => BASE64_URL_SAFE_NO_PAD + .decode(&*v) + .map_err(D::Error::custom)? + .into(), + }) + } +} + +impl Serialize for MaybeUtf8 { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self { + MaybeUtf8::Utf8(v) => MaybeUtf8Serde::Utf8(Cow::Borrowed(v)), + MaybeUtf8::Binary(v) => MaybeUtf8Serde::Binary(BASE64_URL_SAFE_NO_PAD.encode(v)), + } + .serialize(serializer) + } +} + +impl From> for MaybeUtf8 { + fn from(value: Vec) -> Self { + match String::from_utf8(value) { + Ok(value) => Self::Utf8(value), + Err(e) => Self::Binary(e.into_bytes()), + } + } +} + +impl From for MaybeUtf8 { + fn from(value: String) -> Self { + Self::Utf8(value) + } +} + +impl From for MaybeUtf8 { + fn from(value: PathBuf) -> Self { + Self::from(value.into_os_string().into_encoded_bytes()) + } +} + +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Serialize, Deserialize)] +#[serde(rename = "File")] +pub struct ExternalJobCacheV2File<'a> { + pub name: MaybeUtf8, + pub contents: Cow<'a, MaybeUtf8>, +} + +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug)] +pub struct ExternalJobCacheV2Files(pub BTreeMap); + +impl Serialize for ExternalJobCacheV2Files { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.collect_seq( + self.0 + .iter() + .map(|(name, contents)| ExternalJobCacheV2File { + name: name.clone().into(), + contents: Cow::Borrowed(contents), + }), + ) + } +} + +impl<'de> Deserialize<'de> for ExternalJobCacheV2Files { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + Ok(Self( + Vec::deserialize(deserializer)? + .into_iter() + .map(|ExternalJobCacheV2File { name, contents }| { + (name.as_path().to_path_buf(), contents.into_owned()) + }) + .collect(), + )) + } +} + +#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] +#[serde(rename = "ExternalJobCache")] +pub struct ExternalJobCacheV2 { + pub version: ExternalJobCacheVersion, + pub inputs_hash: blake3::Hash, + pub stdout_stderr: String, + pub result: Result, +} + +impl ExternalJobCacheV2 { + fn read_from_file(cache_json_path: Interned) -> eyre::Result { + let cache_str = std::fs::read_to_string(&*cache_json_path) + .wrap_err_with(|| format!("can't read {cache_json_path:?}"))?; + serde_json::from_str(&cache_str) + .wrap_err_with(|| format!("can't decode {cache_json_path:?}")) + } + fn write_to_file(&self, cache_json_path: Interned) -> eyre::Result<()> { + let cache_str = serde_json::to_string_pretty(&self).expect("serialization can't fail"); + std::fs::write(&*cache_json_path, cache_str) + .wrap_err_with(|| format!("can't write {cache_json_path:?}")) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct ExternalJobCaching { + cache_json_path: Interned, + run_even_if_cached: bool, +} + +#[derive(Default)] +struct JobCacheHasher(blake3::Hasher); + +impl JobCacheHasher { + fn hash_size(&mut self, size: usize) { + self.0.update(&u64::to_le_bytes( + size.try_into().expect("size should fit in u64"), + )); + } + fn hash_sized_bytes(&mut self, bytes: &[u8]) { + self.hash_size(bytes.len()); + self.0.update(bytes); + } + fn hash_sized_os_str(&mut self, s: &OsStr) { + self.hash_sized_bytes(s.as_encoded_bytes()); + } + fn hash_iter>( + &mut self, + iter: I, + mut f: F, + ) { + let iter = iter.into_iter(); + self.hash_size(iter.len()); + iter.for_each(|item| f(self, item)); + } + fn try_hash_iter< + F: FnMut(&mut Self, I::Item) -> Result<(), E>, + E, + I: IntoIterator, + >( + &mut self, + iter: I, + mut f: F, + ) -> Result<(), E> { + let mut iter = iter.into_iter(); + self.hash_size(iter.len()); + iter.try_for_each(|item| f(self, item)) + } +} + +fn write_file_atomically_no_clobber C, C: AsRef<[u8]>>( + path: impl AsRef, + containing_dir: impl AsRef, + contents: F, +) -> std::io::Result<()> { + let path = path.as_ref(); + let containing_dir = containing_dir.as_ref(); + if !matches!(std::fs::exists(&path), Ok(true)) { + // use File::create_new rather than tempfile's code to get normal file permissions rather than mode 600 on Unix. + let mut file = tempfile::Builder::new() + .make_in(containing_dir, |path| std::fs::File::create_new(path))?; + file.write_all(contents().as_ref())?; // write all in one operation to avoid a bunch of tiny writes + file.into_temp_path().persist_noclobber(path)?; + } + Ok(()) +} + +impl ExternalJobCaching { + pub fn get_cache_dir_from_output_dir(output_dir: impl AsRef) -> PathBuf { + output_dir.as_ref().join(".fayalite-job-cache") + } + pub fn make_cache_dir( + cache_dir: impl AsRef, + application_name: &str, + ) -> std::io::Result<()> { + let cache_dir = cache_dir.as_ref(); + std::fs::create_dir_all(cache_dir)?; + write_file_atomically_no_clobber(cache_dir.join("CACHEDIR.TAG"), cache_dir, || { + format!( + "Signature: 8a477f597d28d172789f06886806bc55\n\ + # This file is a cache directory tag created by {application_name}.\n\ + # For information about cache directory tags see https://bford.info/cachedir/\n" + ) + })?; + write_file_atomically_no_clobber(cache_dir.join(".gitignore"), cache_dir, || { + format!( + "# This is a cache directory created by {application_name}.\n\ + # ignore all files\n\ + *\n" + ) + }) + } + pub fn new( + output_dir: impl AsRef, + application_name: &str, + json_file_stem: impl AsRef, + run_even_if_cached: bool, + ) -> std::io::Result { + let cache_dir = Self::get_cache_dir_from_output_dir(output_dir); + Self::make_cache_dir(&cache_dir, application_name)?; + let mut cache_json_path = cache_dir; + cache_json_path.push(json_file_stem.as_ref()); + cache_json_path.set_extension("json"); + Ok(Self { + cache_json_path: Path::intern_owned(cache_json_path), + run_even_if_cached, + }) + } + fn write_stdout_stderr(stdout_stderr: &str) { + if stdout_stderr == "" { + return; + } + // use print! so output goes to Rust test output capture + if stdout_stderr.ends_with('\n') { + print!("{stdout_stderr}"); + } else { + println!("{stdout_stderr}"); + } + } + /// returns `Err(_)` if reading the cache failed, otherwise returns `Ok(_)` with the results from the cache + fn run_from_cache( + self, + inputs_hash: blake3::Hash, + output_file_paths: impl IntoIterator>, + ) -> Result, ()> { + if self.run_even_if_cached { + return Err(()); + } + let Ok(ExternalJobCacheV2 { + version: ExternalJobCacheVersion::CURRENT, + inputs_hash: cached_inputs_hash, + stdout_stderr, + result, + }) = ExternalJobCacheV2::read_from_file(self.cache_json_path) + else { + return Err(()); + }; + if inputs_hash != cached_inputs_hash { + return Err(()); + } + match result { + Ok(outputs) => { + for output_file_path in output_file_paths { + let Some(output_data) = outputs.0.get(&*output_file_path) else { + if let Ok(true) = std::fs::exists(&*output_file_path) { + // assume the existing file is the correct one + continue; + } + return Err(()); + }; + let Ok(()) = std::fs::write(&*output_file_path, output_data.as_bytes()) else { + return Err(()); + }; + } + Self::write_stdout_stderr(&stdout_stderr); + Ok(Ok(())) + } + Err(error) => { + Self::write_stdout_stderr(&stdout_stderr); + Ok(Err(error)) + } + } + } + fn make_command( + command_line: Interned<[Interned]>, + ) -> eyre::Result { + ensure!(!command_line.is_empty(), "command line must not be empty"); + let mut cmd = std::process::Command::new(&*command_line[0]); + cmd.args(command_line[1..].iter().map(|arg| &**arg)) + .stdin(std::process::Stdio::null()); + Ok(cmd) + } + pub fn run( + self, + command_line: Interned<[Interned]>, + input_file_paths: impl IntoIterator>, + output_file_paths: impl IntoIterator> + Clone, + run_fn: F, + exit_status_to_error: impl FnOnce(ExitStatus) -> eyre::Report, + ) -> eyre::Result<()> + where + F: FnOnce(std::process::Command) -> eyre::Result>, + { + let mut hasher = JobCacheHasher::default(); + hasher.hash_iter(command_line.iter(), |hasher, arg| { + hasher.hash_sized_os_str(arg) + }); + let mut input_file_paths = + Vec::<&Path>::from_iter(input_file_paths.into_iter().map(Interned::into_inner)); + input_file_paths.sort_unstable(); + input_file_paths.dedup(); + hasher.try_hash_iter( + &input_file_paths, + |hasher, input_file_path| -> eyre::Result<()> { + hasher.hash_sized_os_str(input_file_path.as_ref()); + hasher.hash_sized_bytes( + &std::fs::read(input_file_path).wrap_err_with(|| { + format!("can't read job input file: {input_file_path:?}") + })?, + ); + Ok(()) + }, + )?; + let inputs_hash = hasher.0.finalize(); + match self.run_from_cache(inputs_hash, output_file_paths.clone()) { + Ok(result) => return result.map_err(|e| eyre!(e)), + Err(()) => {} + } + let (pipe_reader, stdout, stderr) = std::io::pipe() + .and_then(|(r, w)| Ok((r, w.try_clone()?, w))) + .wrap_err_with(|| format!("when trying to create a pipe to run: {command_line:?}"))?; + let mut cmd = Self::make_command(command_line)?; + cmd.stdout(stdout).stderr(stderr); + let mut stdout_stderr = String::new(); + let result = std::thread::scope(|scope| { + std::thread::Builder::new() + .name(format!("stdout:{}", command_line[0].display())) + .spawn_scoped(scope, || { + let _ = streaming_read_utf8(std::io::BufReader::new(pipe_reader), |s| { + stdout_stderr.push_str(s); + // use print! so output goes to Rust test output capture + print!("{s}"); + std::io::Result::Ok(()) + }); + if !stdout_stderr.is_empty() && !stdout_stderr.ends_with('\n') { + println!(); + } + }) + .expect("spawn shouldn't fail"); + run_fn(cmd) + })?; + if let Err(exit_status) = result { + // check if the user may have terminated it or something, don't cache the failure + let user_maybe_terminated; + #[cfg(unix)] + { + user_maybe_terminated = std::os::unix::process::ExitStatusExt::signal(&exit_status) + .is_some() + || exit_status.code().is_none_or(|code| code > 1); + } + #[cfg(not(unix))] + { + user_maybe_terminated = !exit_status.success(); + } + if user_maybe_terminated { + let _ = std::fs::remove_file(self.cache_json_path); + return Err(exit_status_to_error(exit_status)); + } + } + let result = result.map_err(exit_status_to_error); + ExternalJobCacheV2 { + version: ExternalJobCacheVersion::CURRENT, + inputs_hash, + stdout_stderr, + result: match &result { + Ok(()) => Ok(ExternalJobCacheV2Files(Result::from_iter( + output_file_paths.into_iter().map( + |output_file_path: Interned| -> eyre::Result<_> { + let output_file_path = &*output_file_path; + Ok(( + PathBuf::from(output_file_path), + MaybeUtf8::from(std::fs::read(output_file_path).wrap_err_with( + || format!("can't read job output file: {output_file_path:?}"), + )?), + )) + }, + ), + )?)), + Err(e) => Err(format!("{e:#}")), + }, + } + .write_to_file(self.cache_json_path)?; + result + } + pub fn run_maybe_cached( + this: Option, + command_line: Interned<[Interned]>, + input_file_paths: impl IntoIterator>, + output_file_paths: impl IntoIterator> + Clone, + run_fn: F, + exit_status_to_error: impl FnOnce(ExitStatus) -> eyre::Report, + ) -> eyre::Result<()> + where + F: FnOnce(std::process::Command) -> eyre::Result>, + { + match this { + Some(this) => this.run( + command_line, + input_file_paths, + output_file_paths, + run_fn, + exit_status_to_error, + ), + None => run_fn(Self::make_command(command_line)?)?.map_err(exit_status_to_error), + } + } +} + +#[derive(Clone, Eq, Hash)] +pub struct ExternalCommandJobKind(PhantomData); + +impl fmt::Debug for ExternalCommandJobKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "ExternalCommandJobKind<{}>", std::any::type_name::()) + } +} + +impl PartialEq for ExternalCommandJobKind { + fn eq(&self, _other: &Self) -> bool { + true + } +} + +impl Ord for ExternalCommandJobKind { + fn cmp(&self, _other: &Self) -> std::cmp::Ordering { + std::cmp::Ordering::Equal + } +} + +impl PartialOrd for ExternalCommandJobKind { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Default for ExternalCommandJobKind { + fn default() -> Self { + Self(PhantomData) + } +} + +impl Copy for ExternalCommandJobKind {} + +impl ExternalCommandJobKind { + pub const fn new() -> Self { + Self(PhantomData) + } +} + +#[derive(Copy, Clone)] +struct ExternalProgramPathValueParser(ExternalProgram); + +fn parse_which_result( + which_result: which::Result, + program_name: impl Into, + program_path_arg_name: impl FnOnce() -> String, +) -> Result, ResolveProgramPathError> { + let which_result = match which_result { + Ok(v) => v, + Err(inner) => { + return Err(ResolveProgramPathError { + inner, + program_name: program_name.into(), + program_path_arg_name: program_path_arg_name(), + }); + } + }; + Ok(which_result.intern_deref()) +} + +impl clap::builder::TypedValueParser for ExternalProgramPathValueParser { + type Value = Interned; + + fn parse_ref( + &self, + cmd: &clap::Command, + arg: Option<&clap::Arg>, + value: &OsStr, + ) -> clap::error::Result { + let program_path_arg_name = self.0.program_path_arg_name; + OsStringValueParser::new() + .try_map(move |program_name| { + parse_which_result(which::which(&program_name), program_name, || { + program_path_arg_name.into() + }) + }) + .parse_ref(cmd, arg, value) + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)] +#[group(id = T::args_group_id())] +#[non_exhaustive] +pub struct ExternalCommandArgs { + #[command(flatten)] + pub program_path: ExternalProgramPath, + #[arg( + name = Interned::into_inner(T::run_even_if_cached_arg_name()), + long = T::run_even_if_cached_arg_name(), + )] + pub run_even_if_cached: bool, + #[command(flatten)] + pub additional_args: T::AdditionalArgs, +} + +#[derive(Clone, Debug)] +pub struct ResolveProgramPathError { + inner: which::Error, + program_name: OsString, + program_path_arg_name: String, +} + +impl fmt::Display for ResolveProgramPathError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + inner, + program_name, + program_path_arg_name, + } = self; + write!( + f, + "{program_path_arg_name}: failed to resolve {program_name:?} to a valid program: {inner}", + ) + } +} + +impl std::error::Error for ResolveProgramPathError {} + +pub fn resolve_program_path( + program_name: Option<&OsStr>, + default_program_name: impl AsRef, + program_path_env_var_name: Option<&OsStr>, +) -> Result, ResolveProgramPathError> { + let default_program_name = default_program_name.as_ref(); + let owned_program_name; + let program_name = if let Some(program_name) = program_name { + program_name + } else if let Some(v) = program_path_env_var_name.and_then(std::env::var_os) { + owned_program_name = v; + &owned_program_name + } else { + default_program_name + }; + parse_which_result(which::which(program_name), program_name, || { + default_program_name.display().to_string() + }) +} + +impl ExternalCommandArgs { + pub fn with_resolved_program_path( + program_path: Interned, + additional_args: T::AdditionalArgs, + ) -> Self { + Self::new( + ExternalProgramPath::with_resolved_program_path(program_path), + additional_args, + ) + } + pub fn new( + program_path: ExternalProgramPath, + additional_args: T::AdditionalArgs, + ) -> Self { + Self { + program_path, + run_even_if_cached: false, + additional_args, + } + } + pub fn resolve_program_path( + program_name: Option<&OsStr>, + additional_args: T::AdditionalArgs, + ) -> Result { + Ok(Self::new( + ExternalProgramPath::resolve_program_path(program_name)?, + additional_args, + )) + } +} + +impl ToArgs for ExternalCommandArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { + program_path, + run_even_if_cached, + ref additional_args, + } = *self; + program_path.to_args(args); + if run_even_if_cached { + args.write_display_arg(format_args!("--{}", T::run_even_if_cached_arg_name())); + } + additional_args.to_args(args); + } +} + +#[derive(Copy, Clone)] +struct ExternalCommandJobParams { + command_params: CommandParams, + inputs: Interned<[JobItemName]>, + outputs: Interned<[JobItemName]>, + output_paths: Interned<[Interned]>, +} + +impl ExternalCommandJobParams { + fn new(job: &ExternalCommandJob) -> Self { + let output_paths = T::output_paths(job); + let mut command_line = ArgsWriter(vec![job.program_path.as_interned_os_str()]); + T::command_line_args(job, &mut command_line); + Self { + command_params: CommandParams { + command_line: Intern::intern_owned(command_line.0), + current_dir: T::current_dir(job), + }, + inputs: T::inputs(job), + outputs: output_paths + .iter() + .map(|&path| JobItemName::Path { path }) + .collect(), + output_paths, + } + } +} + +#[derive(Deserialize, Serialize)] +pub struct ExternalCommandJob { + additional_job_data: T::AdditionalJobData, + program_path: Interned, + output_dir: Interned, + run_even_if_cached: bool, + #[serde(skip)] + params_cache: OnceLock, +} + +impl Eq for ExternalCommandJob {} + +impl> Clone for ExternalCommandJob { + fn clone(&self) -> Self { + let Self { + ref additional_job_data, + program_path, + output_dir, + run_even_if_cached, + ref params_cache, + } = *self; + Self { + additional_job_data: additional_job_data.clone(), + program_path, + output_dir, + run_even_if_cached, + params_cache: params_cache.clone(), + } + } +} + +impl fmt::Debug for ExternalCommandJob { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + additional_job_data, + program_path, + output_dir, + run_even_if_cached, + params_cache: _, + } = self; + write!(f, "ExternalCommandJob<{}>", std::any::type_name::())?; + f.debug_struct("") + .field("additional_job_data", additional_job_data) + .field("program_path", program_path) + .field("output_dir", output_dir) + .field("run_even_if_cached", run_even_if_cached) + .finish() + } +} + +impl PartialEq for ExternalCommandJob { + fn eq(&self, other: &Self) -> bool { + let Self { + additional_job_data, + program_path, + output_dir, + run_even_if_cached, + params_cache: _, + } = self; + *additional_job_data == other.additional_job_data + && *program_path == other.program_path + && *output_dir == other.output_dir + && *run_even_if_cached == other.run_even_if_cached + } +} + +impl Hash for ExternalCommandJob { + fn hash(&self, state: &mut H) { + let Self { + additional_job_data, + program_path, + output_dir, + run_even_if_cached, + params_cache: _, + } = self; + additional_job_data.hash(state); + program_path.hash(state); + output_dir.hash(state); + run_even_if_cached.hash(state); + } +} + +impl ExternalCommandJob { + pub fn additional_job_data(&self) -> &T::AdditionalJobData { + &self.additional_job_data + } + pub fn program_path(&self) -> Interned { + self.program_path + } + pub fn output_dir(&self) -> Interned { + self.output_dir + } + pub fn run_even_if_cached(&self) -> bool { + self.run_even_if_cached + } + fn params(&self) -> &ExternalCommandJobParams { + self.params_cache + .get_or_init(|| ExternalCommandJobParams::new(self)) + } + pub fn command_params(&self) -> CommandParams { + self.params().command_params + } + pub fn inputs(&self) -> Interned<[JobItemName]> { + self.params().inputs + } + pub fn output_paths(&self) -> Interned<[Interned]> { + self.params().output_paths + } + pub fn outputs(&self) -> Interned<[JobItemName]> { + self.params().outputs + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct ExternalProgramPath { + program_path: Interned, + _phantom: PhantomData, +} + +impl ExternalProgramPath { + pub fn with_resolved_program_path(program_path: Interned) -> Self { + Self { + program_path, + _phantom: PhantomData, + } + } + pub fn resolve_program_path( + program_name: Option<&OsStr>, + ) -> Result { + let ExternalProgram { + default_program_name, + program_path_arg_name: _, + program_path_arg_value_name: _, + program_path_env_var_name, + } = ExternalProgram::new::(); + Ok(Self { + program_path: resolve_program_path( + program_name, + default_program_name, + program_path_env_var_name.as_ref().map(OsStr::new), + )?, + _phantom: PhantomData, + }) + } + pub fn program_path(&self) -> Interned { + self.program_path + } +} + +impl fmt::Debug for ExternalProgramPath { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + program_path, + _phantom: _, + } = self; + write!(f, "ExternalProgramPath<{}>", std::any::type_name::())?; + f.debug_tuple("").field(program_path).finish() + } +} + +impl clap::FromArgMatches for ExternalProgramPath { + fn from_arg_matches(matches: &clap::ArgMatches) -> Result { + let id = Interned::into_inner(ExternalProgram::new::().program_path_arg_name); + // don't remove argument so later instances of Self can use it too + let program_path = *matches.get_one(id).expect("arg should always be present"); + Ok(Self { + program_path, + _phantom: PhantomData, + }) + } + + fn update_from_arg_matches(&mut self, matches: &clap::ArgMatches) -> Result<(), clap::Error> { + *self = Self::from_arg_matches(matches)?; + Ok(()) + } +} + +impl clap::Args for ExternalProgramPath { + fn augment_args(cmd: clap::Command) -> clap::Command { + let external_program @ ExternalProgram { + default_program_name, + program_path_arg_name, + program_path_arg_value_name, + program_path_env_var_name, + } = ExternalProgram::new::(); + let arg = cmd + .get_arguments() + .find(|arg| *arg.get_id().as_str() == *program_path_arg_name); + if let Some(arg) = arg { + // don't insert duplicate arguments. + // check that the previous argument actually matches this argument: + assert!(!arg.is_required_set()); + assert!(matches!(arg.get_action(), clap::ArgAction::Set)); + assert_eq!(arg.get_long(), Some(&*program_path_arg_name)); + assert_eq!( + arg.get_value_names(), + Some(&[clap::builder::Str::from(program_path_arg_value_name)][..]) + ); + assert_eq!( + arg.get_env(), + program_path_env_var_name.as_ref().map(OsStr::new) + ); + assert_eq!( + arg.get_default_values(), + &[OsStr::new(&default_program_name)] + ); + assert_eq!(arg.get_value_hint(), clap::ValueHint::CommandName); + cmd + } else { + cmd.arg( + clap::Arg::new(Interned::into_inner(program_path_arg_name)) + .required(false) + .value_parser(ExternalProgramPathValueParser(external_program)) + .action(clap::ArgAction::Set) + .long(program_path_arg_name) + .value_name(program_path_arg_value_name) + .env(program_path_env_var_name.map(Interned::into_inner)) + .default_value(default_program_name) + .value_hint(clap::ValueHint::CommandName), + ) + } + } + + fn augment_args_for_update(cmd: clap::Command) -> clap::Command { + Self::augment_args(cmd) + } +} + +impl ToArgs for ExternalProgramPath { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let ExternalProgram { + program_path_arg_name, + .. + } = ExternalProgram::new::(); + let Self { + program_path, + _phantom: _, + } = self; + if args.get_long_option_eq(program_path_arg_name) != Some(program_path.as_os_str()) { + args.write_long_option_eq(program_path_arg_name, program_path); + } + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +#[non_exhaustive] +pub struct ExternalProgram { + default_program_name: Interned, + program_path_arg_name: Interned, + program_path_arg_value_name: Interned, + program_path_env_var_name: Option>, +} + +impl ExternalProgram { + pub fn new() -> Self { + Self { + default_program_name: T::default_program_name(), + program_path_arg_name: T::program_path_arg_name(), + program_path_arg_value_name: T::program_path_arg_value_name(), + program_path_env_var_name: T::program_path_env_var_name(), + } + } + pub fn default_program_name(&self) -> Interned { + self.default_program_name + } + pub fn program_path_arg_name(&self) -> Interned { + self.program_path_arg_name + } + pub fn program_path_arg_value_name(&self) -> Interned { + self.program_path_arg_value_name + } + pub fn program_path_env_var_name(&self) -> Option> { + self.program_path_env_var_name + } +} + +impl From for ExternalProgram { + fn from(_value: T) -> Self { + Self::new::() + } +} + +impl From for Interned { + fn from(_value: T) -> Self { + ExternalProgram::new::().intern_sized() + } +} + +pub trait ExternalProgramTrait: + 'static + Send + Sync + Hash + Ord + fmt::Debug + Default + Copy +{ + fn program_path_arg_name() -> Interned { + Self::default_program_name() + } + fn program_path_arg_value_name() -> Interned { + Intern::intern_owned(Self::program_path_arg_name().to_uppercase()) + } + fn default_program_name() -> Interned; + fn program_path_env_var_name() -> Option> { + Some(Intern::intern_owned( + Self::program_path_arg_name() + .to_uppercase() + .replace('-', "_"), + )) + } +} + +pub trait ExternalCommand: 'static + Send + Sync + Hash + Eq + fmt::Debug + Sized + Clone { + type AdditionalArgs: ToArgs; + type AdditionalJobData: 'static + + Send + + Sync + + Hash + + Eq + + fmt::Debug + + Serialize + + DeserializeOwned; + type BaseJobPosition; + type Dependencies: JobDependenciesHasBase; + type ExternalProgram: ExternalProgramTrait; + fn dependencies() -> Self::Dependencies; + fn args_to_jobs( + args: JobArgsAndDependencies>, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result<( + Self::AdditionalJobData, + ::JobsAndKinds, + )>; + fn inputs(job: &ExternalCommandJob) -> Interned<[JobItemName]>; + fn output_paths(job: &ExternalCommandJob) -> Interned<[Interned]>; + fn command_line_args(job: &ExternalCommandJob, args: &mut W); + fn current_dir(job: &ExternalCommandJob) -> Option>; + fn job_kind_name() -> Interned; + fn args_group_id() -> clap::Id { + Interned::into_inner(Self::job_kind_name()).into() + } + fn run_even_if_cached_arg_name() -> Interned { + Intern::intern_owned(format!("{}-run-even-if-cached", Self::job_kind_name())) + } + fn subcommand_hidden() -> bool { + false + } +} + +impl JobKind for ExternalCommandJobKind { + type Args = ExternalCommandArgs; + type Job = ExternalCommandJob; + type Dependencies = T::Dependencies; + + fn dependencies(self) -> Self::Dependencies { + T::dependencies() + } + + fn args_to_jobs( + args: JobArgsAndDependencies, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result> { + let JobKindAndArgs { + kind, + args: + ExternalCommandArgs { + program_path: + ExternalProgramPath { + program_path, + _phantom: _, + }, + run_even_if_cached, + additional_args: _, + }, + } = args.args; + let (additional_job_data, dependencies) = T::args_to_jobs(args, params, global_params)?; + let base_job = T::Dependencies::base_job(&dependencies); + let job = ExternalCommandJob { + additional_job_data, + program_path, + output_dir: base_job.output_dir(), + run_even_if_cached: base_job.run_even_if_cached() | run_even_if_cached, + params_cache: OnceLock::new(), + }; + job.params(); // fill cache + Ok(JobAndDependencies { + job: JobAndKind { kind, job }, + dependencies, + }) + } + + fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + job.inputs() + } + + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + job.outputs() + } + + fn name(self) -> Interned { + T::job_kind_name() + } + + fn external_command_params(self, job: &Self::Job) -> Option { + Some(job.command_params()) + } + + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + _params: &JobParams, + global_params: &GlobalParams, + acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + assert!( + inputs.iter().map(JobItem::name).eq(job.inputs()), + "{}\ninputs:\n{inputs:?}\njob.inputs():\n{:?}", + std::any::type_name::(), + job.inputs(), + ); + let CommandParams { + command_line, + current_dir, + } = job.command_params(); + ExternalJobCaching::new( + &job.output_dir, + &global_params.application_name(), + &T::job_kind_name(), + job.run_even_if_cached, + )? + .run( + command_line, + inputs + .iter() + .flat_map(|item| match item { + JobItem::Path { path } => std::slice::from_ref(path), + JobItem::DynamicPaths { + paths, + source_job_name: _, + } => paths, + }) + .copied(), + job.output_paths(), + |mut cmd| { + if let Some(current_dir) = current_dir { + cmd.current_dir(current_dir); + } + let status = acquired_job.run_command(cmd, |cmd| cmd.status())?; + if !status.success() { + Ok(Err(status)) + } else { + Ok(Ok(())) + } + }, + |status| eyre!("running {command_line:?} failed: {status}"), + )?; + Ok(job + .output_paths() + .iter() + .map(|&path| JobItem::Path { path }) + .collect()) + } + + fn subcommand_hidden(self) -> bool { + T::subcommand_hidden() + } + + fn external_program(self) -> Option> { + Some(ExternalProgram::new::().intern_sized()) + } +} diff --git a/crates/fayalite/src/build/firrtl.rs b/crates/fayalite/src/build/firrtl.rs new file mode 100644 index 0000000..b5574a9 --- /dev/null +++ b/crates/fayalite/src/build/firrtl.rs @@ -0,0 +1,128 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::{ + BaseJob, BaseJobKind, CommandParams, DynJobKind, GlobalParams, JobAndDependencies, + JobArgsAndDependencies, JobItem, JobItemName, JobKind, JobKindAndDependencies, JobParams, + ToArgs, WriteArgs, + }, + firrtl::{ExportOptions, FileBackend}, + intern::{Intern, InternSlice, Interned}, + util::job_server::AcquiredJob, +}; +use clap::Args; +use serde::{Deserialize, Serialize}; +use std::path::{Path, PathBuf}; + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug)] +pub struct FirrtlJobKind; + +#[derive(Args, Debug, Clone, Hash, PartialEq, Eq)] +#[group(id = "Firrtl")] +#[non_exhaustive] +pub struct FirrtlArgs { + #[command(flatten)] + pub export_options: ExportOptions, +} + +impl ToArgs for FirrtlArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { export_options } = self; + export_options.to_args(args); + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub struct Firrtl { + base: BaseJob, + export_options: ExportOptions, +} + +impl Firrtl { + fn make_firrtl_file_backend(&self) -> FileBackend { + FileBackend { + dir_path: PathBuf::from(&*self.base.output_dir()), + top_fir_file_stem: Some(self.base.file_stem().into()), + circuit_name: None, + } + } + pub fn firrtl_file(&self) -> Interned { + self.base.file_with_ext("fir") + } +} + +impl JobKind for FirrtlJobKind { + type Args = FirrtlArgs; + type Job = Firrtl; + type Dependencies = JobKindAndDependencies; + + fn dependencies(self) -> Self::Dependencies { + JobKindAndDependencies::new(BaseJobKind) + } + + fn args_to_jobs( + args: JobArgsAndDependencies, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result> { + args.args_to_jobs_simple( + params, + global_params, + |_kind, FirrtlArgs { export_options }, dependencies| { + Ok(Firrtl { + base: dependencies.get_job::().clone(), + export_options, + }) + }, + ) + } + + fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::Path { + path: job.base.output_dir(), + }] + .intern_slice() + } + + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::Path { + path: job.firrtl_file(), + }] + .intern_slice() + } + + fn name(self) -> Interned { + "firrtl".intern() + } + + fn external_command_params(self, _job: &Self::Job) -> Option { + None + } + + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + params: &JobParams, + _global_params: &GlobalParams, + _acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + let [JobItem::Path { path: input_path }] = *inputs else { + panic!("wrong inputs, expected a single `Path`"); + }; + assert_eq!(input_path, job.base.output_dir()); + crate::firrtl::export( + job.make_firrtl_file_backend(), + params.main_module(), + job.export_options, + )?; + Ok(vec![JobItem::Path { + path: job.firrtl_file(), + }]) + } +} + +pub(crate) fn built_in_job_kinds() -> impl IntoIterator { + [DynJobKind::new(FirrtlJobKind)] +} diff --git a/crates/fayalite/src/build/formal.rs b/crates/fayalite/src/build/formal.rs new file mode 100644 index 0000000..69c0f2c --- /dev/null +++ b/crates/fayalite/src/build/formal.rs @@ -0,0 +1,388 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::{ + BaseJob, CommandParams, DynJobKind, GetJobPositionDependencies, GlobalParams, + JobAndDependencies, JobArgsAndDependencies, JobDependencies, JobItem, JobItemName, JobKind, + JobKindAndDependencies, JobParams, ToArgs, WriteArgs, + external::{ + ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait, + }, + verilog::{UnadjustedVerilog, VerilogDialect, VerilogJob, VerilogJobKind}, + }, + intern::{Intern, InternSlice, Interned}, + module::NameId, + testing::FormalMode, + util::job_server::AcquiredJob, +}; +use clap::Args; +use eyre::Context; +use serde::{Deserialize, Serialize}; +use std::{ + ffi::{OsStr, OsString}, + fmt::{self, Write}, + path::Path, +}; + +#[derive(Args, Clone, Debug, PartialEq, Eq, Hash)] +#[non_exhaustive] +pub struct FormalArgs { + #[arg(long = "sby-extra-arg", value_name = "ARG")] + pub sby_extra_args: Vec, + #[arg(long, default_value_t)] + pub formal_mode: FormalMode, + #[arg(long, default_value_t = Self::DEFAULT_DEPTH)] + pub formal_depth: u64, + #[arg(long, default_value = Self::DEFAULT_SOLVER)] + pub formal_solver: String, + #[arg(long = "smtbmc-extra-arg", value_name = "ARG")] + pub smtbmc_extra_args: Vec, +} + +impl FormalArgs { + pub const DEFAULT_DEPTH: u64 = 20; + pub const DEFAULT_SOLVER: &'static str = "z3"; +} + +impl ToArgs for FormalArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { + sby_extra_args, + formal_mode, + formal_depth, + formal_solver, + smtbmc_extra_args, + } = self; + for arg in sby_extra_args { + args.write_long_option_eq("sby-extra-arg", arg); + } + args.write_display_args([ + format_args!("--formal-mode={formal_mode}"), + format_args!("--formal-depth={formal_depth}"), + format_args!("--formal-solver={formal_solver}"), + ]); + for arg in smtbmc_extra_args { + args.write_long_option_eq("smtbmc-extra-arg", arg); + } + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +pub struct WriteSbyFileJobKind; + +#[derive(Clone, Debug, PartialEq, Eq, Hash, Deserialize, Serialize)] +pub struct WriteSbyFileJob { + sby_extra_args: Interned<[Interned]>, + formal_mode: FormalMode, + formal_depth: u64, + formal_solver: Interned, + smtbmc_extra_args: Interned<[Interned]>, + sby_file: Interned, + output_dir: Interned, + main_verilog_file: Interned, +} + +impl WriteSbyFileJob { + pub fn sby_extra_args(&self) -> Interned<[Interned]> { + self.sby_extra_args + } + pub fn formal_mode(&self) -> FormalMode { + self.formal_mode + } + pub fn formal_depth(&self) -> u64 { + self.formal_depth + } + pub fn formal_solver(&self) -> Interned { + self.formal_solver + } + pub fn smtbmc_extra_args(&self) -> Interned<[Interned]> { + self.smtbmc_extra_args + } + pub fn sby_file(&self) -> Interned { + self.sby_file + } + pub fn output_dir(&self) -> Interned { + self.output_dir + } + pub fn main_verilog_file(&self) -> Interned { + self.main_verilog_file + } + fn write_sby( + &self, + output: &mut OsString, + additional_files: &[Interned], + main_module_name_id: NameId, + ) -> eyre::Result<()> { + let Self { + sby_extra_args: _, + formal_mode, + formal_depth, + formal_solver, + smtbmc_extra_args, + sby_file: _, + output_dir: _, + main_verilog_file, + } = self; + write!( + output, + "[options]\n\ + mode {formal_mode}\n\ + depth {formal_depth}\n\ + wait on\n\ + \n\ + [engines]\n\ + smtbmc {formal_solver} -- --" + ) + .expect("writing to OsString can't fail"); + for i in smtbmc_extra_args { + output.push(" "); + output.push(i); + } + output.push( + "\n\ + \n\ + [script]\n", + ); + for verilog_file in VerilogJob::all_verilog_files(*main_verilog_file, additional_files)? { + output.push("read_verilog -sv -formal \""); + output.push(verilog_file); + output.push("\"\n"); + } + let circuit_name = crate::firrtl::get_circuit_name(main_module_name_id); + // workaround for wires disappearing -- set `keep` on all wires + writeln!( + output, + "hierarchy -top {circuit_name}\n\ + proc\n\ + setattr -set keep 1 w:\\*\n\ + prep", + ) + .expect("writing to OsString can't fail"); + Ok(()) + } +} + +impl JobKind for WriteSbyFileJobKind { + type Args = FormalArgs; + type Job = WriteSbyFileJob; + type Dependencies = JobKindAndDependencies; + + fn dependencies(self) -> Self::Dependencies { + Default::default() + } + + fn args_to_jobs( + mut args: JobArgsAndDependencies, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result> { + args.dependencies + .dependencies + .args + .args + .additional_args + .verilog_dialect + .get_or_insert(VerilogDialect::Yosys); + args.args_to_jobs_simple(params, global_params, |_kind, args, dependencies| { + let FormalArgs { + sby_extra_args, + formal_mode, + formal_depth, + formal_solver, + smtbmc_extra_args, + } = args; + let base_job = dependencies.get_job::(); + Ok(WriteSbyFileJob { + sby_extra_args: sby_extra_args.into_iter().map(Interned::from).collect(), + formal_mode, + formal_depth, + formal_solver: formal_solver.intern_deref(), + smtbmc_extra_args: smtbmc_extra_args.into_iter().map(Interned::from).collect(), + sby_file: base_job.file_with_ext("sby"), + output_dir: base_job.output_dir(), + main_verilog_file: dependencies.get_job::().main_verilog_file(), + }) + }) + } + + fn inputs(self, _job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::DynamicPaths { + source_job_name: VerilogJobKind.name(), + }] + .intern_slice() + } + + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::Path { path: job.sby_file }].intern_slice() + } + + fn name(self) -> Interned { + "write-sby-file".intern() + } + + fn external_command_params(self, _job: &Self::Job) -> Option { + None + } + + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + params: &JobParams, + _global_params: &GlobalParams, + _acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job))); + let [additional_files] = inputs else { + unreachable!(); + }; + let additional_files = VerilogJob::unwrap_additional_files(additional_files); + let mut contents = OsString::new(); + job.write_sby( + &mut contents, + additional_files, + params.main_module().name_id(), + )?; + let path = job.sby_file; + std::fs::write(path, contents.as_encoded_bytes()) + .wrap_err_with(|| format!("writing {path:?} failed"))?; + Ok(vec![JobItem::Path { path }]) + } + + fn subcommand_hidden(self) -> bool { + true + } +} + +#[derive(Clone, Hash, PartialEq, Eq, Serialize, Deserialize)] +pub struct Formal { + #[serde(flatten)] + write_sby_file: WriteSbyFileJob, + sby_file_name: Interned, +} + +impl fmt::Debug for Formal { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + write_sby_file: + WriteSbyFileJob { + sby_extra_args, + formal_mode, + formal_depth, + formal_solver, + smtbmc_extra_args, + sby_file, + output_dir: _, + main_verilog_file, + }, + sby_file_name, + } = self; + f.debug_struct("Formal") + .field("sby_extra_args", sby_extra_args) + .field("formal_mode", formal_mode) + .field("formal_depth", formal_depth) + .field("formal_solver", formal_solver) + .field("smtbmc_extra_args", smtbmc_extra_args) + .field("sby_file", sby_file) + .field("sby_file_name", sby_file_name) + .field("main_verilog_file", main_verilog_file) + .finish_non_exhaustive() + } +} + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)] +pub struct Symbiyosys; + +impl ExternalProgramTrait for Symbiyosys { + fn default_program_name() -> Interned { + "sby".intern() + } +} + +#[derive(Clone, Hash, PartialEq, Eq, Debug, Args)] +pub struct FormalAdditionalArgs {} + +impl ToArgs for FormalAdditionalArgs { + fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) { + let Self {} = self; + } +} + +impl ExternalCommand for Formal { + type AdditionalArgs = FormalAdditionalArgs; + type AdditionalJobData = Formal; + type BaseJobPosition = GetJobPositionDependencies< + GetJobPositionDependencies< + GetJobPositionDependencies<::BaseJobPosition>, + >, + >; + type Dependencies = JobKindAndDependencies; + type ExternalProgram = Symbiyosys; + + fn dependencies() -> Self::Dependencies { + Default::default() + } + + fn args_to_jobs( + args: JobArgsAndDependencies>, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result<( + Self::AdditionalJobData, + ::JobsAndKinds, + )> { + args.args_to_jobs_external_simple(params, global_params, |args, dependencies| { + let FormalAdditionalArgs {} = args.additional_args; + let write_sby_file = dependencies.get_job::().clone(); + Ok(Formal { + sby_file_name: write_sby_file + .sby_file() + .interned_file_name() + .expect("known to have file name"), + write_sby_file, + }) + }) + } + + fn inputs(job: &ExternalCommandJob) -> Interned<[JobItemName]> { + [ + JobItemName::Path { + path: job.additional_job_data().write_sby_file.sby_file(), + }, + JobItemName::Path { + path: job.additional_job_data().write_sby_file.main_verilog_file(), + }, + JobItemName::DynamicPaths { + source_job_name: VerilogJobKind.name(), + }, + ] + .intern_slice() + } + + fn output_paths(_job: &ExternalCommandJob) -> Interned<[Interned]> { + Interned::default() + } + + fn command_line_args(job: &ExternalCommandJob, args: &mut W) { + // args.write_str_arg("-j1"); // sby seems not to respect job count in parallel mode + args.write_arg("-f"); + args.write_interned_arg(job.additional_job_data().sby_file_name); + args.write_interned_args(job.additional_job_data().write_sby_file.sby_extra_args()); + } + + fn current_dir(job: &ExternalCommandJob) -> Option> { + Some(job.output_dir()) + } + + fn job_kind_name() -> Interned { + "formal".intern() + } +} + +pub(crate) fn built_in_job_kinds() -> impl IntoIterator { + [ + DynJobKind::new(WriteSbyFileJobKind), + DynJobKind::new(ExternalCommandJobKind::::new()), + ] +} diff --git a/crates/fayalite/src/build/graph.rs b/crates/fayalite/src/build/graph.rs new file mode 100644 index 0000000..d81b282 --- /dev/null +++ b/crates/fayalite/src/build/graph.rs @@ -0,0 +1,847 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::{ + DynJob, GlobalParams, JobItem, JobItemName, JobParams, program_name_for_internal_jobs, + }, + intern::Interned, + platform::DynPlatform, + util::{HashMap, HashSet, job_server::AcquiredJob}, +}; +use eyre::{ContextCompat, eyre}; +use petgraph::{ + algo::{DfsSpace, kosaraju_scc, toposort}, + graph::DiGraph, + visit::{GraphBase, Visitable}, +}; +use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error, ser::SerializeSeq}; +use std::{ + cell::OnceCell, + collections::{BTreeMap, BTreeSet, VecDeque}, + convert::Infallible, + ffi::OsStr, + fmt::{self, Write}, + panic, + rc::Rc, + str::Utf8Error, + sync::mpsc, + thread::{self, ScopedJoinHandle}, +}; + +macro_rules! write_str { + ($s:expr, $($rest:tt)*) => { + write!($s, $($rest)*).expect("String::write_fmt can't fail") + }; +} + +#[derive(Clone, Debug)] +enum JobGraphNode { + Job(DynJob), + Item { + #[allow(dead_code, reason = "name used for debugging")] + name: JobItemName, + source_job: Option, + }, +} + +type JobGraphInner = DiGraph; + +#[derive(Clone, Default)] +pub struct JobGraph { + jobs: HashMap::NodeId>, + items: HashMap::NodeId>, + graph: JobGraphInner, + topological_order: Vec<::NodeId>, + space: DfsSpace<::NodeId, ::Map>, +} + +impl fmt::Debug for JobGraph { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + jobs: _, + items: _, + graph, + topological_order, + space: _, + } = self; + f.debug_struct("JobGraph") + .field("graph", graph) + .field("topological_order", topological_order) + .finish_non_exhaustive() + } +} + +#[derive(Clone, Debug)] +pub enum JobGraphError { + CycleError { + job: DynJob, + output: JobItemName, + }, + MultipleJobsCreateSameOutput { + output_item: JobItemName, + existing_job: DynJob, + new_job: DynJob, + }, +} + +impl std::error::Error for JobGraphError {} + +impl fmt::Display for JobGraphError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::CycleError { job, output } => write!( + f, + "job can't be added to job graph because it would introduce a cyclic dependency through this job output:\n\ + {output:?}\n\ + job:\n{job:?}", + ), + JobGraphError::MultipleJobsCreateSameOutput { + output_item, + existing_job, + new_job, + } => write!( + f, + "job can't be added to job graph because the new job has an output that is also produced by an existing job.\n\ + conflicting output:\n\ + {output_item:?}\n\ + existing job:\n\ + {existing_job:?}\n\ + new job:\n\ + {new_job:?}", + ), + } + } +} + +#[derive(Copy, Clone, Debug)] +enum EscapeForUnixShellState { + DollarSingleQuote, + SingleQuote, + Unquoted, +} + +#[derive(Clone)] +pub struct EscapeForUnixShell<'a> { + state: EscapeForUnixShellState, + prefix: [u8; 3], + bytes: &'a [u8], +} + +impl<'a> fmt::Debug for EscapeForUnixShell<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(self, f) + } +} + +impl<'a> fmt::Display for EscapeForUnixShell<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + for c in self.clone() { + f.write_char(c)?; + } + Ok(()) + } +} + +impl<'a> EscapeForUnixShell<'a> { + pub fn new(s: &'a (impl ?Sized + AsRef)) -> Self { + Self::from_bytes(s.as_ref().as_encoded_bytes()) + } + fn make_prefix(bytes: &[u8]) -> [u8; 3] { + let mut prefix = [0; 3]; + prefix[..bytes.len()].copy_from_slice(bytes); + prefix + } + pub fn from_bytes(bytes: &'a [u8]) -> Self { + let mut needs_single_quote = bytes.is_empty(); + for &b in bytes { + match b { + b'!' | b'\'' | b'\"' | b' ' => needs_single_quote = true, + 0..0x20 | 0x7F.. => { + return Self { + state: EscapeForUnixShellState::DollarSingleQuote, + prefix: Self::make_prefix(b"$'"), + bytes, + }; + } + _ => {} + } + } + if needs_single_quote { + Self { + state: EscapeForUnixShellState::SingleQuote, + prefix: Self::make_prefix(b"'"), + bytes, + } + } else { + Self { + state: EscapeForUnixShellState::Unquoted, + prefix: Self::make_prefix(b""), + bytes, + } + } + } +} + +impl Iterator for EscapeForUnixShell<'_> { + type Item = char; + + fn next(&mut self) -> Option { + match &mut self.prefix { + [0, 0, 0] => {} + [0, 0, v] | // find first + [0, v, _] | // non-zero byte + [v, _, _] => { + let retval = *v as char; + *v = 0; + return Some(retval); + } + } + let Some(&next_byte) = self.bytes.split_off_first() else { + return match self.state { + EscapeForUnixShellState::DollarSingleQuote + | EscapeForUnixShellState::SingleQuote => { + self.state = EscapeForUnixShellState::Unquoted; + Some('\'') + } + EscapeForUnixShellState::Unquoted => None, + }; + }; + match self.state { + EscapeForUnixShellState::DollarSingleQuote => match next_byte { + b'\'' | b'\\' => { + self.prefix = Self::make_prefix(&[next_byte]); + Some('\\') + } + b'\t' => { + self.prefix = Self::make_prefix(b"t"); + Some('\\') + } + b'\n' => { + self.prefix = Self::make_prefix(b"n"); + Some('\\') + } + b'\r' => { + self.prefix = Self::make_prefix(b"r"); + Some('\\') + } + 0x20..=0x7E => Some(next_byte as char), + _ => { + self.prefix = [ + b'x', + char::from_digit(next_byte as u32 >> 4, 0x10).expect("known to be in range") + as u8, + char::from_digit(next_byte as u32 & 0xF, 0x10) + .expect("known to be in range") as u8, + ]; + Some('\\') + } + }, + EscapeForUnixShellState::SingleQuote => { + if next_byte == b'\'' { + self.prefix = Self::make_prefix(b"\\''"); + Some('\'') + } else { + Some(next_byte as char) + } + } + EscapeForUnixShellState::Unquoted => match next_byte { + b' ' | b'!' | b'"' | b'#' | b'$' | b'&' | b'\'' | b'(' | b')' | b'*' | b',' + | b';' | b'<' | b'>' | b'?' | b'[' | b'\\' | b']' | b'^' | b'`' | b'{' | b'|' + | b'}' | b'~' => { + self.prefix = Self::make_prefix(&[next_byte]); + Some('\\') + } + _ => Some(next_byte as char), + }, + } + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +#[non_exhaustive] +pub enum UnixMakefileEscapeKind { + NonRecipe, + RecipeWithoutShellEscaping, + RecipeWithShellEscaping, +} + +#[derive(Copy, Clone)] +pub struct EscapeForUnixMakefile<'a> { + s: &'a OsStr, + kind: UnixMakefileEscapeKind, +} + +impl<'a> fmt::Debug for EscapeForUnixMakefile<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(self, f) + } +} + +impl<'a> fmt::Display for EscapeForUnixMakefile<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.do_write( + f, + fmt::Write::write_str, + fmt::Write::write_char, + |_, _| Ok(()), + |_| unreachable!("already checked that the input causes no UTF-8 errors"), + ) + } +} + +impl<'a> EscapeForUnixMakefile<'a> { + fn do_write( + &self, + state: &mut S, + write_str: impl Fn(&mut S, &str) -> Result<(), E>, + write_char: impl Fn(&mut S, char) -> Result<(), E>, + add_variable: impl Fn(&mut S, &'static str) -> Result<(), E>, + utf8_error: impl Fn(Utf8Error) -> E, + ) -> Result<(), E> { + let escape_recipe_char = |c| match c { + '$' => write_str(state, "$$"), + '\0'..='\x1F' | '\x7F' => { + panic!("can't escape a control character for Unix Makefile: {c:?}"); + } + _ => write_char(state, c), + }; + match self.kind { + UnixMakefileEscapeKind::NonRecipe => str::from_utf8(self.s.as_encoded_bytes()) + .map_err(&utf8_error)? + .chars() + .try_for_each(|c| match c { + '=' => { + add_variable(state, "EQUALS = =")?; + write_str(state, "$(EQUALS)") + } + ';' => panic!("can't escape a semicolon (;) for Unix Makefile"), + '$' => write_str(state, "$$"), + '\\' | ' ' | '#' | ':' | '%' | '*' | '?' | '[' | ']' | '~' => { + write_char(state, '\\')?; + write_char(state, c) + } + '\0'..='\x1F' | '\x7F' => { + panic!("can't escape a control character for Unix Makefile: {c:?}"); + } + _ => write_char(state, c), + }), + UnixMakefileEscapeKind::RecipeWithoutShellEscaping => { + str::from_utf8(self.s.as_encoded_bytes()) + .map_err(&utf8_error)? + .chars() + .try_for_each(escape_recipe_char) + } + UnixMakefileEscapeKind::RecipeWithShellEscaping => { + EscapeForUnixShell::new(self.s).try_for_each(escape_recipe_char) + } + } + } + pub fn new( + s: &'a (impl ?Sized + AsRef), + kind: UnixMakefileEscapeKind, + needed_variables: &mut BTreeSet<&'static str>, + ) -> Result { + let s = s.as_ref(); + let retval = Self { s, kind }; + retval.do_write( + needed_variables, + |_, _| Ok(()), + |_, _| Ok(()), + |needed_variables, variable| { + needed_variables.insert(variable); + Ok(()) + }, + |e| e, + )?; + Ok(retval) + } +} + +impl JobGraph { + pub fn new() -> Self { + Self::default() + } + fn try_add_item_node( + &mut self, + name: JobItemName, + new_source_job: Option, + new_nodes: &mut HashSet<::NodeId>, + ) -> Result<::NodeId, JobGraphError> { + use hashbrown::hash_map::Entry; + match self.items.entry(name) { + Entry::Occupied(item_entry) => { + let node_id = *item_entry.get(); + let JobGraphNode::Item { + name: _, + source_job, + } = &mut self.graph[node_id] + else { + unreachable!("known to be an item"); + }; + if let Some(new_source_job) = new_source_job { + if let Some(source_job) = source_job { + return Err(JobGraphError::MultipleJobsCreateSameOutput { + output_item: item_entry.key().clone(), + existing_job: source_job.clone(), + new_job: new_source_job, + }); + } else { + *source_job = Some(new_source_job); + } + } + Ok(node_id) + } + Entry::Vacant(item_entry) => { + let node_id = self.graph.add_node(JobGraphNode::Item { + name, + source_job: new_source_job, + }); + new_nodes.insert(node_id); + item_entry.insert(node_id); + Ok(node_id) + } + } + } + pub fn try_add_jobs>( + &mut self, + jobs: I, + ) -> Result<(), JobGraphError> { + use hashbrown::hash_map::Entry; + let jobs = jobs.into_iter(); + struct RemoveNewNodesOnError<'a> { + this: &'a mut JobGraph, + new_nodes: HashSet<::NodeId>, + } + impl Drop for RemoveNewNodesOnError<'_> { + fn drop(&mut self) { + for node in self.new_nodes.drain() { + self.this.graph.remove_node(node); + } + } + } + let mut remove_new_nodes_on_error = RemoveNewNodesOnError { + this: self, + new_nodes: HashSet::with_capacity_and_hasher(jobs.size_hint().0, Default::default()), + }; + let new_nodes = &mut remove_new_nodes_on_error.new_nodes; + let this = &mut *remove_new_nodes_on_error.this; + for job in jobs { + let Entry::Vacant(job_entry) = this.jobs.entry(job.clone()) else { + continue; + }; + let job_node_id = this + .graph + .add_node(JobGraphNode::Job(job_entry.key().clone())); + new_nodes.insert(job_node_id); + job_entry.insert(job_node_id); + for name in job.outputs() { + let item_node_id = this.try_add_item_node(name, Some(job.clone()), new_nodes)?; + this.graph.add_edge(job_node_id, item_node_id, ()); + } + for name in job.inputs() { + let item_node_id = this.try_add_item_node(name, None, new_nodes)?; + this.graph.add_edge(item_node_id, job_node_id, ()); + } + } + match toposort(&this.graph, Some(&mut this.space)) { + Ok(v) => { + this.topological_order = v; + // no need to remove any of the new nodes on drop since we didn't encounter any errors + remove_new_nodes_on_error.new_nodes.clear(); + Ok(()) + } + Err(_) => { + // there's at least one cycle, find one! + let cycle = kosaraju_scc(&this.graph) + .into_iter() + .find_map(|scc| { + if scc.len() <= 1 { + // can't be a cycle since our graph is bipartite -- + // jobs only connect to items, never jobs to jobs or items to items + None + } else { + Some(scc) + } + }) + .expect("we know there's a cycle"); + let cycle_set = HashSet::from_iter(cycle.iter().copied()); + let job = cycle + .into_iter() + .find_map(|node_id| { + if let JobGraphNode::Job(job) = &this.graph[node_id] { + Some(job.clone()) + } else { + None + } + }) + .expect("a job must be part of the cycle"); + let output = job + .outputs() + .into_iter() + .find(|output| cycle_set.contains(&this.items[output])) + .expect("an output must be part of the cycle"); + Err(JobGraphError::CycleError { job, output }) + } + } + } + #[track_caller] + pub fn add_jobs>(&mut self, jobs: I) { + match self.try_add_jobs(jobs) { + Ok(()) => {} + Err(e) => panic!("error: {e}"), + } + } + pub fn to_unix_makefile( + &self, + platform: Option<&DynPlatform>, + extra_args: &[Interned], + ) -> Result { + self.to_unix_makefile_with_internal_program_prefix( + &[program_name_for_internal_jobs()], + platform, + extra_args, + ) + } + pub fn to_unix_makefile_with_internal_program_prefix( + &self, + internal_program_prefix: &[Interned], + platform: Option<&DynPlatform>, + extra_args: &[Interned], + ) -> Result { + let mut retval = String::new(); + let mut needed_variables = BTreeSet::new(); + let mut phony_targets = BTreeSet::new(); + for &node_id in &self.topological_order { + let JobGraphNode::Job(job) = &self.graph[node_id] else { + continue; + }; + let outputs = job.outputs(); + if outputs.is_empty() { + retval.push_str(":"); + } else { + for output in job.outputs() { + match output { + JobItemName::Path { path } => { + write_str!( + retval, + "{} ", + EscapeForUnixMakefile::new( + &str::from_utf8(path.as_os_str().as_encoded_bytes())?, + UnixMakefileEscapeKind::NonRecipe, + &mut needed_variables + )? + ); + } + JobItemName::DynamicPaths { source_job_name } => { + write_str!( + retval, + "{} ", + EscapeForUnixMakefile::new( + &source_job_name, + UnixMakefileEscapeKind::NonRecipe, + &mut needed_variables + )? + ); + phony_targets.insert(Interned::into_inner(source_job_name)); + } + } + } + if outputs.len() == 1 { + retval.push_str(":"); + } else { + retval.push_str("&:"); + } + } + for input in job.inputs() { + match input { + JobItemName::Path { path } => { + write_str!( + retval, + " {}", + EscapeForUnixMakefile::new( + &str::from_utf8(path.as_os_str().as_encoded_bytes())?, + UnixMakefileEscapeKind::NonRecipe, + &mut needed_variables + )? + ); + } + JobItemName::DynamicPaths { source_job_name } => { + write_str!( + retval, + " {}", + EscapeForUnixMakefile::new( + &source_job_name, + UnixMakefileEscapeKind::NonRecipe, + &mut needed_variables + )? + ); + phony_targets.insert(Interned::into_inner(source_job_name)); + } + } + } + retval.push_str("\n\t"); + job.command_params_with_internal_program_prefix( + internal_program_prefix, + platform, + extra_args, + ) + .to_unix_shell_line(&mut retval, |arg, output| { + write_str!( + output, + "{}", + EscapeForUnixMakefile::new( + arg, + UnixMakefileEscapeKind::RecipeWithShellEscaping, + &mut needed_variables + )? + ); + Ok(()) + })?; + retval.push_str("\n\n"); + } + if !phony_targets.is_empty() { + retval.push_str("\n.PHONY:"); + for phony_target in phony_targets { + write_str!( + retval, + " {}", + EscapeForUnixMakefile::new( + phony_target, + UnixMakefileEscapeKind::NonRecipe, + &mut needed_variables + )? + ); + } + retval.push_str("\n"); + } + if !needed_variables.is_empty() { + retval.insert_str( + 0, + &String::from_iter(needed_variables.into_iter().map(|v| format!("{v}\n"))), + ); + } + Ok(retval) + } + pub fn to_unix_shell_script( + &self, + platform: Option<&DynPlatform>, + extra_args: &[Interned], + ) -> String { + self.to_unix_shell_script_with_internal_program_prefix( + &[program_name_for_internal_jobs()], + platform, + extra_args, + ) + } + pub fn to_unix_shell_script_with_internal_program_prefix( + &self, + internal_program_prefix: &[Interned], + platform: Option<&DynPlatform>, + extra_args: &[Interned], + ) -> String { + let mut retval = String::from( + "#!/bin/sh\n\ + set -ex\n", + ); + for &node_id in &self.topological_order { + let JobGraphNode::Job(job) = &self.graph[node_id] else { + continue; + }; + let Ok(()) = job + .command_params_with_internal_program_prefix( + internal_program_prefix, + platform, + extra_args, + ) + .to_unix_shell_line(&mut retval, |arg, output| -> Result<(), Infallible> { + write_str!(output, "{}", EscapeForUnixShell::new(&arg)); + Ok(()) + }); + retval.push_str("\n"); + } + retval + } + pub fn run(&self, params: &JobParams, global_params: &GlobalParams) -> eyre::Result<()> { + // use scope to auto-join threads on errors + thread::scope(|scope| { + struct WaitingJobState { + job_node_id: ::NodeId, + job: DynJob, + inputs: BTreeMap>, + } + let mut ready_jobs = VecDeque::new(); + let mut item_name_to_waiting_jobs_map = HashMap::<_, Vec<_>>::default(); + for &node_id in &self.topological_order { + let JobGraphNode::Job(job) = &self.graph[node_id] else { + continue; + }; + let waiting_job = WaitingJobState { + job_node_id: node_id, + job: job.clone(), + inputs: job + .inputs() + .iter() + .map(|&name| (name, OnceCell::new())) + .collect(), + }; + if waiting_job.inputs.is_empty() { + ready_jobs.push_back(waiting_job); + } else { + let waiting_job = Rc::new(waiting_job); + for &input_item in waiting_job.inputs.keys() { + item_name_to_waiting_jobs_map + .entry(input_item) + .or_default() + .push(waiting_job.clone()); + } + } + } + struct RunningJob<'scope> { + job: DynJob, + thread: ScopedJoinHandle<'scope, eyre::Result>>, + } + let mut running_jobs = HashMap::default(); + let (finished_jobs_sender, finished_jobs_receiver) = mpsc::channel(); + loop { + while let Some(finished_job) = finished_jobs_receiver.try_recv().ok() { + let Some(RunningJob { job, thread }) = running_jobs.remove(&finished_job) + else { + unreachable!(); + }; + let output_items = thread.join().map_err(panic::resume_unwind)??; + assert!( + output_items.iter().map(JobItem::name).eq(job.outputs()), + "job's run() method returned the wrong output items:\n\ + output items:\n\ + {output_items:?}\n\ + expected outputs:\n\ + {:?}\n\ + job:\n\ + {job:?}", + job.outputs(), + ); + for output_item in output_items { + for waiting_job in item_name_to_waiting_jobs_map + .remove(&output_item.name()) + .unwrap_or_default() + { + let Ok(()) = + waiting_job.inputs[&output_item.name()].set(output_item.clone()) + else { + unreachable!(); + }; + if let Some(waiting_job) = Rc::into_inner(waiting_job) { + ready_jobs.push_back(waiting_job); + } + } + } + } + if let Some(WaitingJobState { + job_node_id, + job, + inputs, + }) = ready_jobs.pop_front() + { + struct RunningJobInThread<'a> { + job_node_id: ::NodeId, + job: DynJob, + inputs: Vec, + params: &'a JobParams, + global_params: &'a GlobalParams, + acquired_job: AcquiredJob, + finished_jobs_sender: mpsc::Sender<::NodeId>, + } + impl RunningJobInThread<'_> { + fn run(mut self) -> eyre::Result> { + self.job.run( + &self.inputs, + self.params, + self.global_params, + &mut self.acquired_job, + ) + } + } + impl Drop for RunningJobInThread<'_> { + fn drop(&mut self) { + let _ = self.finished_jobs_sender.send(self.job_node_id); + } + } + let name = job.kind().name(); + let running_job_in_thread = RunningJobInThread { + job_node_id, + job: job.clone(), + inputs: Result::from_iter(job.inputs().iter().map(|input_name| { + inputs.get(input_name).and_then(|v| v.get().cloned()).wrap_err_with(|| { + eyre!("failed when trying to run job {name}: nothing provided the input item: {input_name:?}") + }) + }))?, + params, + global_params, + acquired_job: AcquiredJob::acquire()?, + finished_jobs_sender: finished_jobs_sender.clone(), + }; + running_jobs.insert( + job_node_id, + RunningJob { + job, + thread: thread::Builder::new() + .name(format!("job:{name}")) + .spawn_scoped(scope, move || running_job_in_thread.run()) + .expect("failed to spawn thread for job"), + }, + ); + } + if running_jobs.is_empty() { + assert!(item_name_to_waiting_jobs_map.is_empty()); + assert!(ready_jobs.is_empty()); + return Ok(()); + } + } + }) + } +} + +impl Extend for JobGraph { + #[track_caller] + fn extend>(&mut self, iter: T) { + self.add_jobs(iter); + } +} + +impl FromIterator for JobGraph { + #[track_caller] + fn from_iter>(iter: T) -> Self { + let mut retval = Self::new(); + retval.add_jobs(iter); + retval + } +} + +impl Serialize for JobGraph { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let mut serializer = serializer.serialize_seq(Some(self.jobs.len()))?; + for &node_id in &self.topological_order { + let JobGraphNode::Job(job) = &self.graph[node_id] else { + continue; + }; + serializer.serialize_element(job)?; + } + serializer.end() + } +} + +impl<'de> Deserialize<'de> for JobGraph { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let jobs = Vec::::deserialize(deserializer)?; + let mut retval = JobGraph::new(); + retval.try_add_jobs(jobs).map_err(D::Error::custom)?; + Ok(retval) + } +} diff --git a/crates/fayalite/src/build/registry.rs b/crates/fayalite/src/build/registry.rs new file mode 100644 index 0000000..bbd9f2c --- /dev/null +++ b/crates/fayalite/src/build/registry.rs @@ -0,0 +1,313 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::{DynJobKind, JobKind, built_in_job_kinds}, + intern::Interned, + util::InternedStrCompareAsStr, +}; +use std::{ + collections::BTreeMap, + fmt, + sync::{Arc, OnceLock, RwLock, RwLockWriteGuard}, +}; + +impl DynJobKind { + pub fn registry() -> JobKindRegistrySnapshot { + JobKindRegistrySnapshot(JobKindRegistry::get()) + } + #[track_caller] + pub fn register(self) { + JobKindRegistry::register(JobKindRegistry::lock(), self); + } +} + +#[derive(Clone, Debug)] +struct JobKindRegistry { + job_kinds: BTreeMap, +} + +enum JobKindRegisterError { + SameName { + name: InternedStrCompareAsStr, + old_job_kind: DynJobKind, + new_job_kind: DynJobKind, + }, +} + +impl fmt::Display for JobKindRegisterError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::SameName { + name, + old_job_kind, + new_job_kind, + } => write!( + f, + "two different `JobKind` can't share the same name:\n\ + {name:?}\n\ + old job kind:\n\ + {old_job_kind:?}\n\ + new job kind:\n\ + {new_job_kind:?}", + ), + } + } +} + +trait JobKindRegistryRegisterLock { + type Locked; + fn lock(self) -> Self::Locked; + fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry; +} + +impl JobKindRegistryRegisterLock for &'static RwLock> { + type Locked = RwLockWriteGuard<'static, Arc>; + fn lock(self) -> Self::Locked { + self.write().expect("shouldn't be poisoned") + } + fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry { + Arc::make_mut(locked) + } +} + +impl JobKindRegistryRegisterLock for &'_ mut JobKindRegistry { + type Locked = Self; + + fn lock(self) -> Self::Locked { + self + } + + fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry { + locked + } +} + +impl JobKindRegistry { + fn lock() -> &'static RwLock> { + static REGISTRY: OnceLock>> = OnceLock::new(); + REGISTRY.get_or_init(Default::default) + } + fn try_register( + lock: L, + job_kind: DynJobKind, + ) -> Result<(), JobKindRegisterError> { + use std::collections::btree_map::Entry; + let name = InternedStrCompareAsStr(job_kind.name()); + // run user code only outside of lock + let mut locked = lock.lock(); + let this = L::make_mut(&mut locked); + let result = match this.job_kinds.entry(name) { + Entry::Occupied(entry) => Err(JobKindRegisterError::SameName { + name, + old_job_kind: entry.get().clone(), + new_job_kind: job_kind, + }), + Entry::Vacant(entry) => { + entry.insert(job_kind); + Ok(()) + } + }; + drop(locked); + // outside of lock now, so we can test if it's the same DynJobKind + match result { + Err(JobKindRegisterError::SameName { + name: _, + old_job_kind, + new_job_kind, + }) if old_job_kind == new_job_kind => Ok(()), + result => result, + } + } + #[track_caller] + fn register(lock: L, job_kind: DynJobKind) { + match Self::try_register(lock, job_kind) { + Err(e) => panic!("{e}"), + Ok(()) => {} + } + } + fn get() -> Arc { + Self::lock().read().expect("shouldn't be poisoned").clone() + } +} + +impl Default for JobKindRegistry { + fn default() -> Self { + let mut retval = Self { + job_kinds: BTreeMap::new(), + }; + for job_kind in built_in_job_kinds() { + Self::register(&mut retval, job_kind); + } + retval + } +} + +#[derive(Clone, Debug)] +pub struct JobKindRegistrySnapshot(Arc); + +impl JobKindRegistrySnapshot { + pub fn get() -> Self { + JobKindRegistrySnapshot(JobKindRegistry::get()) + } + pub fn get_by_name<'a>(&'a self, name: &str) -> Option<&'a DynJobKind> { + self.0.job_kinds.get(name) + } + pub fn iter_with_names(&self) -> JobKindRegistryIterWithNames<'_> { + JobKindRegistryIterWithNames(self.0.job_kinds.iter()) + } + pub fn iter(&self) -> JobKindRegistryIter<'_> { + JobKindRegistryIter(self.0.job_kinds.values()) + } +} + +impl<'a> IntoIterator for &'a JobKindRegistrySnapshot { + type Item = &'a DynJobKind; + type IntoIter = JobKindRegistryIter<'a>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +impl<'a> IntoIterator for &'a mut JobKindRegistrySnapshot { + type Item = &'a DynJobKind; + type IntoIter = JobKindRegistryIter<'a>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +#[derive(Clone, Debug)] +pub struct JobKindRegistryIter<'a>( + std::collections::btree_map::Values<'a, InternedStrCompareAsStr, DynJobKind>, +); + +impl<'a> Iterator for JobKindRegistryIter<'a> { + type Item = &'a DynJobKind; + + fn next(&mut self) -> Option { + self.0.next() + } + + fn size_hint(&self) -> (usize, Option) { + self.0.size_hint() + } + + fn count(self) -> usize + where + Self: Sized, + { + self.0.count() + } + + fn last(self) -> Option { + self.0.last() + } + + fn nth(&mut self, n: usize) -> Option { + self.0.nth(n) + } + + fn fold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.0.fold(init, f) + } +} + +impl<'a> std::iter::FusedIterator for JobKindRegistryIter<'a> {} + +impl<'a> ExactSizeIterator for JobKindRegistryIter<'a> {} + +impl<'a> DoubleEndedIterator for JobKindRegistryIter<'a> { + fn next_back(&mut self) -> Option { + self.0.next_back() + } + + fn nth_back(&mut self, n: usize) -> Option { + self.0.nth_back(n) + } + + fn rfold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.0.rfold(init, f) + } +} + +#[derive(Clone, Debug)] +pub struct JobKindRegistryIterWithNames<'a>( + std::collections::btree_map::Iter<'a, InternedStrCompareAsStr, DynJobKind>, +); + +impl<'a> Iterator for JobKindRegistryIterWithNames<'a> { + type Item = (Interned, &'a DynJobKind); + + fn next(&mut self) -> Option { + self.0.next().map(|(name, job_kind)| (name.0, job_kind)) + } + + fn size_hint(&self) -> (usize, Option) { + self.0.size_hint() + } + + fn count(self) -> usize + where + Self: Sized, + { + self.0.count() + } + + fn last(self) -> Option { + self.0.last().map(|(name, job_kind)| (name.0, job_kind)) + } + + fn nth(&mut self, n: usize) -> Option { + self.0.nth(n).map(|(name, job_kind)| (name.0, job_kind)) + } + + fn fold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.0 + .map(|(name, job_kind)| (name.0, job_kind)) + .fold(init, f) + } +} + +impl<'a> std::iter::FusedIterator for JobKindRegistryIterWithNames<'a> {} + +impl<'a> ExactSizeIterator for JobKindRegistryIterWithNames<'a> {} + +impl<'a> DoubleEndedIterator for JobKindRegistryIterWithNames<'a> { + fn next_back(&mut self) -> Option { + self.0 + .next_back() + .map(|(name, job_kind)| (name.0, job_kind)) + } + + fn nth_back(&mut self, n: usize) -> Option { + self.0 + .nth_back(n) + .map(|(name, job_kind)| (name.0, job_kind)) + } + + fn rfold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.0 + .map(|(name, job_kind)| (name.0, job_kind)) + .rfold(init, f) + } +} + +#[track_caller] +pub fn register_job_kind(kind: K) { + DynJobKind::new(kind).register(); +} diff --git a/crates/fayalite/src/build/verilog.rs b/crates/fayalite/src/build/verilog.rs new file mode 100644 index 0000000..7ce77ec --- /dev/null +++ b/crates/fayalite/src/build/verilog.rs @@ -0,0 +1,418 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::{ + BaseJob, CommandParams, DynJobKind, GetJobPositionDependencies, GetJobPositionJob, + GlobalParams, JobAndDependencies, JobArgsAndDependencies, JobDependencies, JobItem, + JobItemName, JobKind, JobKindAndDependencies, JobParams, ToArgs, WriteArgs, + external::{ + ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait, + }, + firrtl::{Firrtl, FirrtlJobKind}, + }, + intern::{Intern, InternSlice, Interned}, + util::job_server::AcquiredJob, +}; +use clap::Args; +use eyre::{Context, bail}; +use serde::{Deserialize, Serialize}; +use std::{ + ffi::{OsStr, OsString}, + fmt, mem, + path::Path, +}; + +/// based on [LLVM Circt's recommended lowering options][lowering-options] +/// +/// [lowering-options]: https://circt.llvm.org/docs/VerilogGeneration/#recommended-loweringoptions-by-target +#[derive(clap::ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[non_exhaustive] +pub enum VerilogDialect { + Questa, + Spyglass, + Verilator, + Vivado, + Yosys, +} + +impl fmt::Display for VerilogDialect { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.as_str()) + } +} + +impl VerilogDialect { + pub fn as_str(self) -> &'static str { + match self { + VerilogDialect::Questa => "questa", + VerilogDialect::Spyglass => "spyglass", + VerilogDialect::Verilator => "verilator", + VerilogDialect::Vivado => "vivado", + VerilogDialect::Yosys => "yosys", + } + } + pub fn firtool_extra_args(self) -> &'static [&'static str] { + match self { + VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"], + VerilogDialect::Spyglass => { + &["--lowering-options=explicitBitcast,disallowExpressionInliningInPorts"] + } + VerilogDialect::Verilator => &[ + "--lowering-options=locationInfoStyle=wrapInAtSquareBracket,disallowLocalVariables", + ], + VerilogDialect::Vivado => &["--lowering-options=mitigateVivadoArrayIndexConstPropBug"], + VerilogDialect::Yosys => { + &["--lowering-options=disallowLocalVariables,disallowPackedArrays"] + } + } + } +} + +#[derive(Args, Debug, Clone, PartialEq, Eq, Hash)] +#[non_exhaustive] +pub struct UnadjustedVerilogArgs { + #[arg(long = "firtool-extra-arg", value_name = "ARG")] + pub firtool_extra_args: Vec, + /// adapt the generated Verilog for a particular toolchain + #[arg(long)] + pub verilog_dialect: Option, + #[arg(long)] + pub verilog_debug: bool, +} + +impl ToArgs for UnadjustedVerilogArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { + ref firtool_extra_args, + verilog_dialect, + verilog_debug, + } = *self; + for arg in firtool_extra_args { + args.write_long_option_eq("firtool-extra-arg", arg); + } + if let Some(verilog_dialect) = verilog_dialect { + args.write_long_option_eq("verilog-dialect", verilog_dialect.as_str()); + } + if verilog_debug { + args.write_arg("--verilog-debug"); + } + } +} + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)] +pub struct Firtool; + +impl ExternalProgramTrait for Firtool { + fn default_program_name() -> Interned { + "firtool".intern() + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug, Deserialize, Serialize)] +pub struct UnadjustedVerilog { + firrtl_file: Interned, + firrtl_file_name: Interned, + unadjusted_verilog_file: Interned, + unadjusted_verilog_file_name: Interned, + firtool_extra_args: Interned<[Interned]>, + verilog_dialect: Option, + verilog_debug: bool, +} + +impl UnadjustedVerilog { + pub fn firrtl_file(&self) -> Interned { + self.firrtl_file + } + pub fn unadjusted_verilog_file(&self) -> Interned { + self.unadjusted_verilog_file + } + pub fn firtool_extra_args(&self) -> Interned<[Interned]> { + self.firtool_extra_args + } + pub fn verilog_dialect(&self) -> Option { + self.verilog_dialect + } + pub fn verilog_debug(&self) -> bool { + self.verilog_debug + } +} + +impl ExternalCommand for UnadjustedVerilog { + type AdditionalArgs = UnadjustedVerilogArgs; + type AdditionalJobData = UnadjustedVerilog; + type BaseJobPosition = GetJobPositionDependencies; + type Dependencies = JobKindAndDependencies; + type ExternalProgram = Firtool; + + fn dependencies() -> Self::Dependencies { + Default::default() + } + + fn args_to_jobs( + args: JobArgsAndDependencies>, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result<( + Self::AdditionalJobData, + ::JobsAndKinds, + )> { + args.args_to_jobs_external_simple(params, global_params, |args, dependencies| { + let UnadjustedVerilogArgs { + firtool_extra_args, + verilog_dialect, + verilog_debug, + } = args.additional_args; + let unadjusted_verilog_file = dependencies + .dependencies + .job + .job + .file_with_ext("unadjusted.v"); + let firrtl_job = dependencies.get_job::(); + Ok(UnadjustedVerilog { + firrtl_file: firrtl_job.firrtl_file(), + firrtl_file_name: firrtl_job + .firrtl_file() + .interned_file_name() + .expect("known to have file name"), + unadjusted_verilog_file, + unadjusted_verilog_file_name: unadjusted_verilog_file + .interned_file_name() + .expect("known to have file name"), + firtool_extra_args: firtool_extra_args.into_iter().map(Interned::from).collect(), + verilog_dialect, + verilog_debug, + }) + }) + } + + fn inputs(job: &ExternalCommandJob) -> Interned<[JobItemName]> { + [JobItemName::Path { + path: job.additional_job_data().firrtl_file, + }] + .intern_slice() + } + + fn output_paths(job: &ExternalCommandJob) -> Interned<[Interned]> { + [job.additional_job_data().unadjusted_verilog_file].intern_slice() + } + + fn command_line_args(job: &ExternalCommandJob, args: &mut W) { + let UnadjustedVerilog { + firrtl_file: _, + firrtl_file_name, + unadjusted_verilog_file: _, + unadjusted_verilog_file_name, + firtool_extra_args, + verilog_dialect, + verilog_debug, + } = *job.additional_job_data(); + args.write_interned_arg(firrtl_file_name); + args.write_arg("-o"); + args.write_interned_arg(unadjusted_verilog_file_name); + if verilog_debug { + args.write_args(["-g", "--preserve-values=all"]); + } + if let Some(dialect) = verilog_dialect { + args.write_args(dialect.firtool_extra_args().iter().copied()); + } + args.write_interned_args(firtool_extra_args); + } + + fn current_dir(job: &ExternalCommandJob) -> Option> { + Some(job.output_dir()) + } + + fn job_kind_name() -> Interned { + "unadjusted-verilog".intern() + } + + fn subcommand_hidden() -> bool { + true + } + + fn run_even_if_cached_arg_name() -> Interned { + "firtool-run-even-if-cached".intern() + } +} + +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct VerilogJobKind; + +#[derive(Clone, Debug, PartialEq, Eq, Hash, Args)] +#[non_exhaustive] +pub struct VerilogJobArgs {} + +impl ToArgs for VerilogJobArgs { + fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) { + let Self {} = self; + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub struct VerilogJob { + output_dir: Interned, + unadjusted_verilog_file: Interned, + main_verilog_file: Interned, +} + +impl VerilogJob { + pub fn output_dir(&self) -> Interned { + self.output_dir + } + pub fn unadjusted_verilog_file(&self) -> Interned { + self.unadjusted_verilog_file + } + pub fn main_verilog_file(&self) -> Interned { + self.main_verilog_file + } + #[track_caller] + pub fn unwrap_additional_files(additional_files: &JobItem) -> &[Interned] { + match additional_files { + JobItem::DynamicPaths { + paths, + source_job_name, + } if *source_job_name == VerilogJobKind.name() => paths, + v => panic!("expected VerilogJob's additional files JobItem: {v:?}"), + } + } + pub fn all_verilog_files( + main_verilog_file: Interned, + additional_files: &[Interned], + ) -> eyre::Result]>> { + let mut retval = Vec::with_capacity(additional_files.len().saturating_add(1)); + for verilog_file in [main_verilog_file].iter().chain(additional_files) { + if !["v", "sv"] + .iter() + .any(|extension| verilog_file.extension() == Some(extension.as_ref())) + { + continue; + } + let verilog_file = std::path::absolute(verilog_file).wrap_err_with(|| { + format!("converting {verilog_file:?} to an absolute path failed") + })?; + if verilog_file + .as_os_str() + .as_encoded_bytes() + .iter() + .any(|&ch| (ch != b' ' && ch != b'\t' && ch.is_ascii_whitespace()) || ch == b'"') + { + bail!("verilog file path contains characters that aren't permitted"); + } + retval.push(verilog_file.intern_deref()); + } + Ok(retval.intern_slice()) + } +} + +impl JobKind for VerilogJobKind { + type Args = VerilogJobArgs; + type Job = VerilogJob; + type Dependencies = JobKindAndDependencies>; + + fn dependencies(self) -> Self::Dependencies { + Default::default() + } + + fn args_to_jobs( + args: JobArgsAndDependencies, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result> { + args.args_to_jobs_simple(params, global_params, |_kind, args, dependencies| { + let VerilogJobArgs {} = args; + let base_job = dependencies.get_job::(); + Ok(VerilogJob { + output_dir: base_job.output_dir(), + unadjusted_verilog_file: dependencies + .job + .job + .additional_job_data() + .unadjusted_verilog_file(), + main_verilog_file: base_job.file_with_ext("v"), + }) + }) + } + + fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::Path { + path: job.unadjusted_verilog_file, + }] + .intern_slice() + } + + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [ + JobItemName::Path { + path: job.main_verilog_file, + }, + JobItemName::DynamicPaths { + source_job_name: self.name(), + }, + ] + .intern_slice() + } + + fn name(self) -> Interned { + "verilog".intern() + } + + fn external_command_params(self, _job: &Self::Job) -> Option { + None + } + + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + _params: &JobParams, + _global_params: &GlobalParams, + _acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job))); + let input = std::fs::read_to_string(job.unadjusted_verilog_file())?; + let file_separator_prefix = "\n// ----- 8< ----- FILE \""; + let file_separator_suffix = "\" ----- 8< -----\n\n"; + let mut input = &*input; + let main_verilog_file = job.main_verilog_file(); + let mut file_name = Some(main_verilog_file); + let mut additional_outputs = Vec::new(); + loop { + let (chunk, next_file_name) = if let Some((chunk, rest)) = + input.split_once(file_separator_prefix) + { + let Some((next_file_name, rest)) = rest.split_once(file_separator_suffix) else { + bail!( + "parsing firtool's output failed: found {file_separator_prefix:?} but no {file_separator_suffix:?}" + ); + }; + input = rest; + let next_file_name = job.output_dir.join(next_file_name).intern_deref(); + additional_outputs.push(next_file_name); + (chunk, Some(next_file_name)) + } else { + (mem::take(&mut input), None) + }; + let Some(file_name) = mem::replace(&mut file_name, next_file_name) else { + break; + }; + std::fs::write(&file_name, chunk)?; + } + Ok(vec![ + JobItem::Path { + path: main_verilog_file, + }, + JobItem::DynamicPaths { + paths: additional_outputs, + source_job_name: self.name(), + }, + ]) + } +} + +pub(crate) fn built_in_job_kinds() -> impl IntoIterator { + [ + DynJobKind::new(ExternalCommandJobKind::::new()), + DynJobKind::new(VerilogJobKind), + ] +} diff --git a/crates/fayalite/src/bundle.rs b/crates/fayalite/src/bundle.rs index ed753c5..a0de189 100644 --- a/crates/fayalite/src/bundle.rs +++ b/crates/fayalite/src/bundle.rs @@ -1,87 +1,34 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information + use crate::{ - expr::{ops::BundleLiteral, Expr, ToExpr}, - intern::{ - Intern, Interned, InternedCompare, Memoize, PtrEqWithTypeId, SupportsPtrEqWithTypeId, + expr::{ + CastToBits, Expr, ReduceBits, ToExpr, + ops::{ArrayLiteral, BundleLiteral, ExprPartialEq}, }, - module::{ModuleBuilder, NormalModule}, + int::{Bool, DynSize}, + intern::{Intern, InternSlice, Interned}, + sim::value::{SimValue, SimValuePartialEq, ToSimValue, ToSimValueWithType}, source_location::SourceLocation, ty::{ - CanonicalType, CanonicalTypeKind, CanonicalValue, Connect, DynCanonicalType, - DynCanonicalValue, DynType, MatchVariantWithoutScope, StaticType, Type, TypeEnum, - TypeWithDeref, Value, ValueEnum, + CanonicalType, MatchVariantWithoutScope, OpaqueSimValue, OpaqueSimValueSize, + OpaqueSimValueSlice, OpaqueSimValueWriter, OpaqueSimValueWritten, StaticType, Type, + TypeProperties, TypeWithDeref, impl_match_variant_as_self, }, - type_deduction::{HitUndeducedType, UndeducedType}, -}; -use bitvec::{slice::BitSlice, vec::BitVec}; -use hashbrown::HashMap; -use std::{ - fmt, - hash::{Hash, Hasher}, - marker::PhantomData, - sync::Arc, + util::HashMap, }; +use serde::{Deserialize, Serialize}; +use std::{fmt, marker::PhantomData}; -#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] -pub struct FieldType { +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, Serialize, Deserialize)] +pub struct BundleField { pub name: Interned, pub flipped: bool, - pub ty: T, + pub ty: CanonicalType, } -pub struct FmtDebugInStruct<'a, T> { - field: &'a FieldType, - field_offset: Option, -} - -impl fmt::Debug for FmtDebugInStruct<'_, T> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { - field: - &FieldType { - name, - flipped, - ref ty, - }, - field_offset, - } = *self; - if flipped { - write!(f, "#[hdl(flip)] ")?; - } - if f.alternate() { - if let Some(field_offset) = field_offset { - writeln!(f, "/* offset = {field_offset} */")?; - } - } - write!(f, "{name}: ")?; - ty.fmt(f) - } -} - -impl fmt::Display for FmtDebugInStruct<'_, T> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Debug::fmt(self, f) - } -} - -impl FieldType { - pub fn map_ty U>(self, f: F) -> FieldType { - let Self { name, flipped, ty } = self; - FieldType { - name, - flipped, - ty: f(ty), - } - } - pub fn as_ref_ty(&self) -> FieldType<&T> { - FieldType { - name: self.name, - flipped: self.flipped, - ty: &self.ty, - } - } - pub fn fmt_debug_in_struct(&self, field_offset: Option) -> FmtDebugInStruct<'_, T> { +impl BundleField { + pub fn fmt_debug_in_struct(self, field_offset: usize) -> FmtDebugInStruct { FmtDebugInStruct { field: self, field_offset, @@ -89,735 +36,817 @@ impl FieldType { } } -impl FieldType { - pub fn canonical(&self) -> FieldType { - FieldType { - name: self.name, - flipped: self.flipped, - ty: self.ty.canonical(), +#[derive(Copy, Clone)] +pub struct FmtDebugInStruct { + field: BundleField, + field_offset: usize, +} + +impl fmt::Debug for FmtDebugInStruct { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + field: BundleField { name, flipped, ty }, + field_offset, + } = *self; + if flipped { + write!(f, "#[hdl(flip)] ")?; } - } - pub fn to_dyn(&self) -> FieldType> { - FieldType { - name: self.name, - flipped: self.flipped, - ty: self.ty.to_dyn(), - } - } - pub fn canonical_dyn(&self) -> FieldType> { - FieldType { - name: self.name, - flipped: self.flipped, - ty: self.ty.canonical_dyn(), + if f.alternate() { + writeln!(f, "/* offset = {field_offset} */")?; } + write!(f, "{name}: ")?; + ty.fmt(f) } } -impl FieldType> { - pub fn from_canonical_type_helper( - self, - expected_name: &str, - expected_flipped: bool, - ) -> T { - assert_eq!(&*self.name, expected_name, "field name doesn't match"); - assert_eq!( - self.flipped, expected_flipped, - "field {expected_name} orientation (flipped or not) doesn't match" - ); - let ty = &*self.ty; - if let Ok(ty) = ::downcast(ty) { - return T::from_canonical_type(ty); - } - let type_name = std::any::type_name::(); - panic!("field {expected_name} type doesn't match, expected: {type_name:?}, got: {ty:?}"); +impl fmt::Display for FmtDebugInStruct { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Debug::fmt(self, f) } } #[derive(Clone, Eq)] -struct DynBundleTypeImpl { - fields: Interned<[FieldType>]>, +struct BundleImpl { + fields: Interned<[BundleField]>, name_indexes: HashMap, usize>, - field_offsets: Interned<[usize]>, - is_passive: Result, - is_storable: Result, - is_castable_from_bits: Result, - bit_width: Result, + field_offsets: Interned<[OpaqueSimValueSize]>, + type_properties: TypeProperties, } -impl fmt::Debug for DynBundleTypeImpl { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "DynBundleType ")?; - f.debug_set() - .entries(self.fields.iter().enumerate().map(|(index, field)| { - field.fmt_debug_in_struct(self.field_offsets.get(index).copied()) - })) - .finish() +impl std::hash::Hash for BundleImpl { + fn hash(&self, state: &mut H) { + self.fields.hash(state); } } -impl PartialEq for DynBundleTypeImpl { +impl PartialEq for BundleImpl { fn eq(&self, other: &Self) -> bool { self.fields == other.fields } } -impl Hash for DynBundleTypeImpl { - fn hash(&self, state: &mut H) { - self.fields.hash(state); +impl std::fmt::Debug for BundleImpl { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str("Bundle ")?; + f.debug_set() + .entries(self.fields.iter().enumerate().map(|(index, field)| { + field.fmt_debug_in_struct(self.field_offsets[index].bit_width) + })) + .finish() } } -#[derive(Copy, Clone, Hash, PartialEq, Eq)] -pub struct DynBundleType(Interned); +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct Bundle(Interned); -impl fmt::Debug for DynBundleType { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { +impl std::fmt::Debug for Bundle { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { self.0.fmt(f) } } -impl DynBundleType { - pub fn new(fields: Interned<[FieldType>]>) -> Self { - fn calc_prop( - fields: Interned<[FieldType>]>, - property: impl Fn(&Interned) -> Result, - ) -> Result { - fields - .iter() - .map(|field| { - if field.flipped { - Ok(false) - } else { - property(&field.ty) - } - }) - .fold(Ok(true), HitUndeducedType::reduce_and) +#[derive(Clone)] +pub struct BundleTypePropertiesBuilder(TypeProperties); + +impl BundleTypePropertiesBuilder { + #[must_use] + pub const fn new() -> Self { + Self(TypeProperties { + is_passive: true, + is_storable: true, + is_castable_from_bits: true, + bit_width: 0, + sim_only_values_len: 0, + }) + } + pub const fn clone(&self) -> Self { + Self(self.0) + } + #[must_use] + pub const fn field(self, flipped: bool, field_props: TypeProperties) -> Self { + let Some(OpaqueSimValueSize { + bit_width, + sim_only_values_len, + }) = self.0.size().checked_add(field_props.size()) + else { + panic!("bundle is too big: size overflowed"); + }; + if flipped { + Self(TypeProperties { + is_passive: false, + is_storable: false, + is_castable_from_bits: false, + bit_width, + sim_only_values_len, + }) + } else { + Self(TypeProperties { + is_passive: self.0.is_passive & field_props.is_passive, + is_storable: self.0.is_storable & field_props.is_storable, + is_castable_from_bits: self.0.is_castable_from_bits + & field_props.is_castable_from_bits, + bit_width, + sim_only_values_len, + }) } - let is_passive = calc_prop(fields, DynType::is_passive); - let is_storable = calc_prop(fields, DynType::is_storable); - let is_castable_from_bits = calc_prop(fields, DynType::is_castable_from_bits); - let mut name_indexes = HashMap::with_capacity(fields.len()); + } + pub const fn finish(self) -> TypeProperties { + self.0 + } +} + +impl Default for BundleTypePropertiesBuilder { + fn default() -> Self { + Self::new() + } +} + +impl Bundle { + #[track_caller] + pub fn new(fields: Interned<[BundleField]>) -> Self { + let mut name_indexes = HashMap::with_capacity_and_hasher(fields.len(), Default::default()); let mut field_offsets = Vec::with_capacity(fields.len()); - let mut bit_width = Ok(0usize); - for (index, &FieldType { name, ty, .. }) in fields.iter().enumerate() { + let mut type_props_builder = BundleTypePropertiesBuilder::new(); + for (index, &BundleField { name, flipped, ty }) in fields.iter().enumerate() { if let Some(old_index) = name_indexes.insert(name, index) { panic!("duplicate field name {name:?}: at both index {old_index} and {index}"); } - if let Ok(bit_width_value) = bit_width { - field_offsets.push(bit_width_value); - bit_width = ty.bit_width().map(|field_bit_width| { - bit_width_value - .checked_add(field_bit_width) - .unwrap_or_else(|| panic!("bundle is too big: bit-width overflowed")) - }); - } + field_offsets.push(type_props_builder.0.size()); + type_props_builder = type_props_builder.field(flipped, ty.type_properties()); } - Self( - DynBundleTypeImpl { - fields, - name_indexes, - field_offsets: Intern::intern_owned(field_offsets), - is_passive, - is_storable, - is_castable_from_bits, - bit_width, - } - .intern_sized(), - ) - } - pub fn is_passive(self) -> Result { - self.0.is_passive - } - pub fn is_storable(self) -> Result { - self.0.is_storable - } - pub fn is_castable_from_bits(self) -> Result { - self.0.is_castable_from_bits - } - pub fn bit_width(self) -> Result { - self.0.bit_width + Self(Intern::intern_sized(BundleImpl { + fields, + name_indexes, + field_offsets: Intern::intern_owned(field_offsets), + type_properties: type_props_builder.finish(), + })) } pub fn name_indexes(&self) -> &HashMap, usize> { &self.0.name_indexes } - pub fn field_by_name( - &self, - name: Interned, - ) -> Option>> { + pub fn field_by_name(&self, name: Interned) -> Option { Some(self.0.fields[*self.0.name_indexes.get(&name)?]) } - pub fn field_offsets(self) -> Interned<[usize]> { + pub fn field_offsets(self) -> Interned<[OpaqueSimValueSize]> { self.0.field_offsets } -} - -#[derive(Debug, Clone, Hash, PartialEq, Eq)] -pub struct DynBundle { - ty: DynBundleType, - fields: Arc<[DynCanonicalValue]>, -} - -impl DynBundle { - pub fn new(ty: DynBundleType, fields: Arc<[DynCanonicalValue]>) -> Self { - assert_eq!( - ty.fields().len(), - fields.len(), - "field values don't match type" - ); - for (field_ty, field) in ty.fields().iter().zip(fields.iter()) { - assert_eq!(field_ty.ty, field.ty(), "field value doesn't match type"); + pub fn type_properties(self) -> TypeProperties { + self.0.type_properties + } + pub fn can_connect(self, rhs: Self) -> bool { + if self.0.fields.len() != rhs.0.fields.len() { + return false; + } + for ( + &BundleField { + name: lhs_name, + flipped: lhs_flipped, + ty: lhs_ty, + }, + &BundleField { + name: rhs_name, + flipped: rhs_flipped, + ty: rhs_ty, + }, + ) in self.0.fields.iter().zip(rhs.0.fields.iter()) + { + if lhs_name != rhs_name || lhs_flipped != rhs_flipped || !lhs_ty.can_connect(rhs_ty) { + return false; + } } - DynBundle { ty, fields } - } - pub fn fields(&self) -> &Arc<[DynCanonicalValue]> { - &self.fields - } -} - -pub trait TypeHintTrait: Send + Sync + fmt::Debug + SupportsPtrEqWithTypeId { - fn matches(&self, ty: &dyn DynType) -> Result<(), String>; -} - -impl InternedCompare for dyn TypeHintTrait { - type InternedCompareKey = PtrEqWithTypeId; - fn interned_compare_key_ref(this: &Self) -> Self::InternedCompareKey { - Self::get_ptr_eq_with_type_id(this) - } - fn interned_compare_key_weak(this: &std::sync::Weak) -> Self::InternedCompareKey { - Self::get_ptr_eq_with_type_id(&*this.upgrade().unwrap()) - } -} - -pub struct TypeHint(PhantomData); - -impl TypeHint { - pub fn intern_dyn() -> Interned { - Interned::cast_unchecked( - Self(PhantomData).intern_sized(), - |v| -> &dyn TypeHintTrait { v }, - ) - } -} - -impl fmt::Debug for TypeHint { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "TypeHint<{}>", std::any::type_name::()) - } -} - -impl Hash for TypeHint { - fn hash(&self, _state: &mut H) {} -} - -impl Eq for TypeHint {} - -impl PartialEq for TypeHint { - fn eq(&self, _other: &Self) -> bool { true } } -impl Clone for TypeHint { - fn clone(&self) -> Self { - *self - } -} - -impl Copy for TypeHint {} - -impl TypeHintTrait for TypeHint { - fn matches(&self, ty: &dyn DynType) -> Result<(), String> { - match ty.downcast::() { - Ok(_) => Ok(()), - Err(_) => Err(format!("can't cast {ty:?} to {self:?}")), - } - } -} - -#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] -pub struct FieldsHint { - pub known_fields: Interned<[FieldType>]>, - pub more_fields: bool, -} - -impl FieldsHint { - pub fn new( - known_fields: impl IntoIterator>>, - more_fields: bool, - ) -> Self { - let known_fields = Intern::intern_owned(Vec::from_iter(known_fields)); - Self { - known_fields, - more_fields, - } - } - pub fn check_field(self, index: usize, field: FieldType<&dyn DynType>) -> Result<(), String> { - let Some(&known_field) = self.known_fields.get(index) else { - return if self.more_fields { - Ok(()) - } else { - Err(format!( - "too many fields: name={:?} index={index}", - field.name - )) - }; - }; - let FieldType { - name: known_name, - flipped: known_flipped, - ty: type_hint, - } = known_field; - let FieldType { name, flipped, ty } = field; - if name != known_name { - Err(format!( - "wrong field name {name:?}, expected {known_name:?}" - )) - } else if flipped != known_flipped { - Err(format!( - "wrong field direction: flipped={flipped:?}, expected flipped={known_flipped}" - )) - } else { - type_hint.matches(ty) - } - } -} - -pub trait BundleType: - Type + TypeWithDeref -where - Self::Value: BundleValue + ToExpr, -{ - type Builder; - fn builder() -> Self::Builder; - fn fields(&self) -> Interned<[FieldType>]>; - fn fields_hint() -> FieldsHint; -} - -pub trait BundleValue: Value -where - ::Type: BundleType, -{ - fn to_bits_impl(this: &Self) -> Result, HitUndeducedType> { - #[derive(Hash, Eq, PartialEq)] - struct ToBitsMemoize(PhantomData); - impl Clone for ToBitsMemoize { - fn clone(&self) -> Self { - *self - } - } - impl Copy for ToBitsMemoize {} - impl>> Memoize for ToBitsMemoize { - type Input = T; - type InputOwned = T; - type Output = Result, HitUndeducedType>; - - fn inner(self, input: &Self::Input) -> Self::Output { - let input = input.to_canonical(); - let mut bits = BitVec::with_capacity(input.ty.bit_width().unwrap_or(0)); - for field in input.fields.iter() { - bits.extend_from_bitslice(&field.to_bits()?); - } - Ok(Intern::intern_owned(bits)) - } - } - ToBitsMemoize::(PhantomData).get(this) - } -} - -impl Connect for DynBundleType {} - -pub struct DynBundleMatch; - -impl Type for DynBundleType { - type CanonicalType = DynBundleType; - type Value = DynBundle; - type CanonicalValue = DynBundle; - type MaskType = DynBundleType; - type MaskValue = DynBundle; - type MatchVariant = DynBundleMatch; - type MatchActiveScope = (); - type MatchVariantAndInactiveScope = MatchVariantWithoutScope; - type MatchVariantsIter = std::iter::Once; - - fn match_variants( - this: Expr, - module_builder: &mut ModuleBuilder, - source_location: SourceLocation, - ) -> Self::MatchVariantsIter - where - IO::Type: BundleType, - { - let _ = this; - let _ = module_builder; - let _ = source_location; - std::iter::once(MatchVariantWithoutScope(DynBundleMatch)) - } - +impl Type for Bundle { + type BaseType = Bundle; + type MaskType = Bundle; + type SimValue = OpaqueSimValue; + impl_match_variant_as_self!(); fn mask_type(&self) -> Self::MaskType { - #[derive(Copy, Clone, Eq, PartialEq, Hash)] - struct Impl; - - impl Memoize for Impl { - type Input = DynBundleType; - type InputOwned = DynBundleType; - type Output = DynBundleType; - - fn inner(self, input: &Self::Input) -> Self::Output { - DynBundleType::new(Intern::intern_owned(Vec::from_iter( - input - .fields() - .iter() - .map(|&FieldType { name, flipped, ty }| FieldType { - name, - flipped, - ty: ty.mask_type().canonical(), - }), - ))) - } - } - Impl.get(self) + Self::new(Interned::from_iter(self.0.fields.into_iter().map( + |BundleField { name, flipped, ty }| BundleField { + name, + flipped, + ty: ty.mask_type(), + }, + ))) } - - fn canonical(&self) -> Self::CanonicalType { - *self + fn canonical(&self) -> CanonicalType { + CanonicalType::Bundle(*self) } - - fn source_location(&self) -> SourceLocation { + #[track_caller] + fn from_canonical(canonical_type: CanonicalType) -> Self { + let CanonicalType::Bundle(bundle) = canonical_type else { + panic!("expected bundle"); + }; + bundle + } + fn source_location() -> SourceLocation { SourceLocation::builtin() } - - fn type_enum(&self) -> TypeEnum { - TypeEnum::BundleType(*self) + fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue { + assert_eq!(self.type_properties().size(), opaque.size()); + opaque.to_owned() } - - fn from_canonical_type(t: Self::CanonicalType) -> Self { - t + fn sim_value_clone_from_opaque( + &self, + value: &mut Self::SimValue, + opaque: OpaqueSimValueSlice<'_>, + ) { + assert_eq!(self.type_properties().size(), opaque.size()); + assert_eq!(value.size(), opaque.size()); + value.clone_from_slice(opaque); } - - fn as_dyn_canonical_type_impl(this: &Self) -> Option<&dyn DynCanonicalType> { - Some(this) + fn sim_value_to_opaque<'w>( + &self, + value: &Self::SimValue, + writer: OpaqueSimValueWriter<'w>, + ) -> OpaqueSimValueWritten<'w> { + assert_eq!(self.type_properties().size(), writer.size()); + assert_eq!(value.size(), writer.size()); + writer.fill_cloned_from_slice(value.as_slice()) } } +pub trait BundleType: Type { + type Builder: Default; + type FilledBuilder: ToExpr; + fn fields(&self) -> Interned<[BundleField]>; +} + +pub struct BundleSimValueFromOpaque<'a> { + fields: std::slice::Iter<'static, BundleField>, + opaque: OpaqueSimValueSlice<'a>, +} + +impl<'a> BundleSimValueFromOpaque<'a> { + #[track_caller] + pub fn new(bundle_ty: T, opaque: OpaqueSimValueSlice<'a>) -> Self { + let fields = bundle_ty.fields(); + assert_eq!( + opaque.size(), + fields + .iter() + .map(|BundleField { ty, .. }| ty.size()) + .sum::() + ); + Self { + fields: Interned::into_inner(fields).iter(), + opaque, + } + } + #[track_caller] + fn field_ty_and_opaque(&mut self) -> (T, OpaqueSimValueSlice<'a>) { + let Some(&BundleField { + name: _, + flipped: _, + ty, + }) = self.fields.next() + else { + panic!("tried to read too many fields from BundleSimValueFromBits"); + }; + let (field_opaque, rest) = self.opaque.split_at(ty.size()); + self.opaque = rest; + (T::from_canonical(ty), field_opaque) + } + #[track_caller] + pub fn field_from_opaque(&mut self) -> SimValue { + let (field_ty, field_opaque) = self.field_ty_and_opaque::(); + SimValue::from_opaque(field_ty, field_opaque.to_owned()) + } + #[track_caller] + pub fn field_clone_from_opaque(&mut self, field_value: &mut SimValue) { + let (field_ty, field_opaque) = self.field_ty_and_opaque::(); + assert_eq!(field_ty, SimValue::ty(field_value)); + SimValue::opaque_mut(field_value).clone_from_slice(field_opaque); + } +} + +pub struct BundleSimValueToOpaque<'a> { + fields: std::slice::Iter<'static, BundleField>, + writer: OpaqueSimValueWriter<'a>, +} + +impl<'a> BundleSimValueToOpaque<'a> { + #[track_caller] + pub fn new(bundle_ty: T, writer: OpaqueSimValueWriter<'a>) -> Self { + let fields = bundle_ty.fields(); + assert_eq!( + writer.size(), + fields + .iter() + .map(|BundleField { ty, .. }| ty.size()) + .sum::() + ); + Self { + fields: Interned::into_inner(fields).iter(), + writer, + } + } + #[track_caller] + pub fn field(&mut self, field_value: &SimValue) { + let Some(&BundleField { + name: _, + flipped: _, + ty, + }) = self.fields.next() + else { + panic!("tried to write too many fields with BundleSimValueToOpaque"); + }; + assert_eq!(T::from_canonical(ty), SimValue::ty(field_value)); + self.writer.fill_prefix_with(ty.size(), |writer| { + writer.fill_cloned_from_slice(SimValue::opaque(field_value).as_slice()) + }); + } + #[track_caller] + pub fn finish(mut self) -> OpaqueSimValueWritten<'a> { + assert_eq!( + self.fields.next(), + None, + "wrote too few fields with BundleSimValueToOpaque" + ); + self.writer + .fill_cloned_from_slice(OpaqueSimValueSlice::empty()) + } +} + +#[derive(Default)] pub struct NoBuilder; -impl TypeWithDeref for DynBundleType { - fn expr_deref(this: &Expr) -> &Self::MatchVariant { - let _ = this; - &DynBundleMatch +pub struct Unfilled(PhantomData); + +impl Default for Unfilled { + fn default() -> Self { + Self(PhantomData) } } -impl Connect for DynBundleType {} - -impl BundleType for DynBundleType { +impl BundleType for Bundle { type Builder = NoBuilder; - - fn builder() -> Self::Builder { - NoBuilder - } - - fn fields(&self) -> Interned<[FieldType>]> { + type FilledBuilder = Expr; + fn fields(&self) -> Interned<[BundleField]> { self.0.fields } +} - fn fields_hint() -> FieldsHint { - FieldsHint { - known_fields: [][..].intern(), - more_fields: true, +#[derive(Default)] +pub struct TupleBuilder(T); + +macro_rules! impl_tuple_builder_fields { + ( + @impl + { } - } -} - -impl CanonicalType for DynBundleType { - const CANONICAL_TYPE_KIND: CanonicalTypeKind = CanonicalTypeKind::BundleType; -} - -impl ToExpr for DynBundle { - type Type = DynBundleType; - - fn ty(&self) -> Self::Type { - self.ty - } - - fn to_expr(&self) -> Expr<::Value> { - Expr::from_value(self) - } -} - -impl Value for DynBundle { - fn to_canonical(&self) -> ::CanonicalValue { - self.clone() - } - fn to_bits_impl(this: &Self) -> Result, HitUndeducedType> { - BundleValue::to_bits_impl(this) - } -} - -impl BundleValue for DynBundle {} - -impl CanonicalValue for DynBundle { - fn value_enum_impl(this: &Self) -> ValueEnum { - ValueEnum::Bundle(this.clone()) - } - fn to_bits_impl(this: &Self) -> Result, HitUndeducedType> { - BundleValue::to_bits_impl(this) - } -} - -macro_rules! impl_tuple_builder { - ($builder:ident, [ - $(($before_Ts:ident $before_fields:ident $before_members:literal))* - ] [ - ($T:ident $field:ident $m:literal) - $(($after_Ts:ident $after_fields:ident $after_members:literal))* - ]) => { - impl_tuple_builder!($builder, [ - $(($before_Ts $before_fields $before_members))* - ($T $field $m) - ] [ - $(($after_Ts $after_fields $after_members))* - ]); - - impl $builder< - Phantom, - $($before_Ts,)* - (), - $($after_Ts,)* - > { - pub fn $field<$T: ToExpr>(self, $field: $T) -> $builder< - Phantom, - $($before_Ts,)* - Expr<<$T::Type as Type>::Value>, - $($after_Ts,)* - > { - let Self { - $($before_fields,)* - $field: _, - $($after_fields, )* - _phantom: _, - } = self; - let $field = $field.to_expr(); - $builder { - $($before_fields,)* - $field, - $($after_fields,)* - _phantom: PhantomData, - } + [ + $({ + #[type_var($head_type_var:ident)] + #[field($head_field:ident)] + #[var($head_var:ident)] + })* + ] + { + #[type_var($cur_type_var:ident)] + #[field($cur_field:ident)] + #[var($cur_var:ident)] + } + [ + $({ + #[type_var($tail_type_var:ident)] + #[field($tail_field:ident)] + #[var($tail_var:ident)] + })* + ] + ) => { + impl< + $($head_type_var,)* + $cur_type_var: Type, + $($tail_type_var,)* + > TupleBuilder<( + $($head_type_var,)* + Unfilled<$cur_type_var>, + $($tail_type_var,)* + )> + { + pub fn $cur_field(self, $cur_var: impl ToExpr) -> TupleBuilder<( + $($head_type_var,)* + Expr<$cur_type_var>, + $($tail_type_var,)* + )> + { + let ($($head_var,)* _, $($tail_var,)*) = self.0; + TupleBuilder(($($head_var,)* $cur_var.to_expr(), $($tail_var,)*)) } } }; - ($builder:ident, [$($before:tt)*] []) => {}; -} - -macro_rules! into_unit { - ($($tt:tt)*) => { - () + ($global:tt [$($head:tt)*] $cur:tt [$next:tt $($tail:tt)*]) => { + impl_tuple_builder_fields!(@impl $global [$($head)*] $cur [$next $($tail)*]); + impl_tuple_builder_fields!($global [$($head)* $cur] $next [$($tail)*]); }; + ($global:tt [$($head:tt)*] $cur:tt []) => { + impl_tuple_builder_fields!(@impl $global [$($head)*] $cur []); + }; + ($global:tt [$cur:tt $($tail:tt)*]) => { + impl_tuple_builder_fields!($global [] $cur [$($tail)*]); + }; + ($global:tt []) => {}; } -macro_rules! impl_tuple { - ($builder:ident, $(($T:ident $T2:ident $field:ident $m:tt)),*) => { - pub struct $builder { - $($field: $T,)* - _phantom: PhantomData, +macro_rules! impl_tuples { + ( + [$({ + #[ + num = $num:tt, + field = $field:ident, + ty = $ty_var:ident: $Ty:ident, + lhs = $lhs_var:ident: $Lhs:ident, + rhs = $rhs_var:ident: $Rhs:ident + ] + $var:ident: $T:ident + })*] + [] + ) => { + impl_tuple_builder_fields! { + {} + [$({ + #[type_var($T)] + #[field($field)] + #[var($var)] + })*] } - - impl_tuple_builder!($builder, [] [$(($T $field $m))*]); - - impl<$($T: Value),*> $builder<($($T,)*), $(Expr<$T>,)*> - where - $($T::Type: Type,)* - { - pub fn build(self) -> Expr<($($T,)*)> { - let Self { - $($field,)* - _phantom: _, - } = self; - BundleLiteral::new_unchecked( - [$($field.to_canonical_dyn()),*][..].intern(), - ($($field.ty(),)*), - ).to_expr() - } - } - - impl<$($T: ToExpr,)*> ToExpr for ($($T,)*) { - type Type = ($($T::Type,)*); - - #[allow(clippy::unused_unit)] - fn ty(&self) -> Self::Type { - let ($($field,)*) = self; - ($($field.ty(),)*) - } - - fn to_expr(&self) -> Expr<::Value> { - let ($($field,)*) = self; - $(let $field = $field.to_expr();)* - BundleLiteral::new_unchecked( - [$($field.to_canonical_dyn()),*][..].intern(), - ($($field.ty(),)*), - ).to_expr() - } - } - - impl<$($T,)*> Connect for ($($T,)*) {} - - impl<$($T, $T2,)*> Connect<($($T2,)*)> for ($($T,)*) - where - $($T: Connect<$T2>,)* - { - } - - impl<$($T: Type,)*> Type for ($($T,)*) - where - $($T::Value: Value,)* - { - type CanonicalType = DynBundleType; - type Value = ($($T::Value,)*); - type CanonicalValue = DynBundle; + impl<$($T: Type,)*> Type for ($($T,)*) { + type BaseType = Bundle; type MaskType = ($($T::MaskType,)*); - type MaskValue = ($($T::MaskValue,)*); - type MatchVariant = ($(Expr<$T::Value>,)*); + type SimValue = ($(SimValue<$T>,)*); + type MatchVariant = ($(Expr<$T>,)*); type MatchActiveScope = (); type MatchVariantAndInactiveScope = MatchVariantWithoutScope; type MatchVariantsIter = std::iter::Once; - - fn match_variants( - this: Expr, - module_builder: &mut ModuleBuilder, + fn match_variants( + this: Expr, source_location: SourceLocation, - ) -> Self::MatchVariantsIter - where - IO::Type: BundleType, - { + ) -> Self::MatchVariantsIter { let _ = this; - let _ = module_builder; let _ = source_location; - std::iter::once(MatchVariantWithoutScope(($(this.field(stringify!($m)),)*))) + std::iter::once(MatchVariantWithoutScope(($(Expr::field(this, stringify!($num)),)*))) } - - #[allow(clippy::unused_unit)] fn mask_type(&self) -> Self::MaskType { - let ($($field,)*) = self; - ($($field.mask_type(),)*) + #![allow(clippy::unused_unit)] + let ($($var,)*) = self; + ($($var.mask_type(),)*) } - - fn canonical(&self) -> Self::CanonicalType { - DynBundleType::new(self.fields()) + fn canonical(&self) -> CanonicalType { + Bundle::new(self.fields()).canonical() } - - fn source_location(&self) -> SourceLocation { + #[track_caller] + fn from_canonical(canonical_type: CanonicalType) -> Self { + #![allow(clippy::unused_unit)] + let CanonicalType::Bundle(bundle) = canonical_type else { + panic!("expected bundle"); + }; + let [$($var,)*] = *bundle.fields() else { + panic!("bundle has wrong number of fields"); + }; + $(let BundleField { name, flipped, ty } = $var; + assert_eq!(&*name, stringify!($num)); + assert!(!flipped); + let $var = $T::from_canonical(ty);)* + ($($var,)*) + } + fn source_location() -> SourceLocation { SourceLocation::builtin() } - - fn type_enum(&self) -> TypeEnum { - TypeEnum::BundleType(self.canonical()) + fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue { + #![allow(unused_mut, unused_variables)] + let mut v = BundleSimValueFromOpaque::new(*self, opaque); + $(let $var = v.field_from_opaque();)* + ($($var,)*) } - - #[allow(clippy::unused_unit)] - fn from_canonical_type(t: Self::CanonicalType) -> Self { - let [$($field),*] = *t.fields() else { - panic!("wrong number of fields"); - }; - ($($field.from_canonical_type_helper(stringify!($m), false),)*) - } - } - - impl<$($T: Type,)*> TypeWithDeref for ($($T,)*) - where - $($T::Value: Value,)* - { - fn expr_deref( - this: &::fayalite::expr::Expr<::Value>, - ) -> &::MatchVariant { - let _ = this; - Interned::<_>::into_inner( - Intern::intern_sized(( - $(this.field(stringify!($m)),)* - )), - ) - } - } - - impl<$($T: Type,)*> BundleType for ($($T,)*) - where - $($T::Value: Value,)* - { - type Builder = $builder<($($T::Value,)*), $(into_unit!($T),)*>; - fn builder() -> Self::Builder { - $builder { - $($field: (),)* - _phantom: PhantomData, - } - } - fn fields( + fn sim_value_clone_from_opaque( &self, - ) -> Interned<[FieldType>]> { - [ - $(FieldType { - name: stringify!($m).intern(), - flipped: false, - ty: self.$m.canonical_dyn(), - },)* - ][..].intern() + value: &mut Self::SimValue, + opaque: OpaqueSimValueSlice<'_>, + ) { + #![allow(unused_mut, unused_variables)] + let mut v = BundleSimValueFromOpaque::new(*self, opaque); + let ($($var,)*) = value; + $(v.field_clone_from_opaque($var);)* } - fn fields_hint() -> FieldsHint { - FieldsHint::new([ - $(FieldType { - name: stringify!($m).intern(), - flipped: false, - ty: TypeHint::<$T>::intern_dyn(), - },)* - ], false) + fn sim_value_to_opaque<'w>( + &self, + value: &Self::SimValue, + writer: OpaqueSimValueWriter<'w>, + ) -> OpaqueSimValueWritten<'w> { + #![allow(unused_mut, unused_variables)] + let mut v = BundleSimValueToOpaque::new(*self, writer); + let ($($var,)*) = value; + $(v.field($var);)* + v.finish() } } - - impl<$($T: StaticType,)*> StaticType for ($($T,)*) - where - $($T::Value: Value,)* - { - #[allow(clippy::unused_unit)] - fn static_type() -> Self { - ($($T::static_type(),)*) + impl<$($T: Type,)*> BundleType for ($($T,)*) { + type Builder = TupleBuilder<($(Unfilled<$T>,)*)>; + type FilledBuilder = TupleBuilder<($(Expr<$T>,)*)>; + fn fields(&self) -> Interned<[BundleField]> { + let ($($var,)*) = self; + [$(BundleField { name: stringify!($num).intern(), flipped: false, ty: $var.canonical() }),*].intern_slice() } } + impl<$($T: Type,)*> TypeWithDeref for ($($T,)*) { + fn expr_deref(this: &Expr) -> &Self::MatchVariant { + let _ = this; + Interned::into_inner(($(Expr::field(*this, stringify!($num)),)*).intern_sized()) + } + } + impl<$($T: StaticType,)*> StaticType for ($($T,)*) { + const TYPE: Self = ($($T::TYPE,)*); + const MASK_TYPE: Self::MaskType = ($($T::MASK_TYPE,)*); + const TYPE_PROPERTIES: TypeProperties = { + let builder = BundleTypePropertiesBuilder::new(); + $(let builder = builder.field(false, $T::TYPE_PROPERTIES);)* + builder.finish() + }; + const MASK_TYPE_PROPERTIES: TypeProperties = { + let builder = BundleTypePropertiesBuilder::new(); + $(let builder = builder.field(false, $T::MASK_TYPE_PROPERTIES);)* + builder.finish() + }; + } + impl<$($T: ToExpr,)*> ToExpr for ($($T,)*) { + type Type = ($($T::Type,)*); - impl<$($T: Value,)*> Value for ($($T,)*) - where - $($T::Type: Type,)* - { - fn to_canonical(&self) -> ::CanonicalValue { - let ty = self.ty().canonical(); - DynBundle::new( - ty, - Arc::new([ - $(self.$m.to_canonical_dyn(),)* - ]), + fn to_expr(&self) -> Expr { + let ($($var,)*) = self; + $(let $var = $var.to_expr();)* + let ty = ($(Expr::ty($var),)*); + let field_values = [$(Expr::canonical($var)),*]; + BundleLiteral::new(ty, field_values.intern_slice()).to_expr() + } + } + impl<$($T: Type,)*> ToExpr for TupleBuilder<($(Expr<$T>,)*)> { + type Type = ($($T,)*); + + fn to_expr(&self) -> Expr { + let ($($var,)*) = self.0; + let ty = ($(Expr::ty($var),)*); + let field_values = [$(Expr::canonical($var)),*]; + BundleLiteral::new(ty, field_values.intern_slice()).to_expr() + } + } + impl<$($T: ToSimValueWithType,)*> ToSimValueWithType for ($($T,)*) { + #[track_caller] + fn to_sim_value_with_type(&self, ty: CanonicalType) -> SimValue { + SimValue::into_canonical(ToSimValueWithType::::to_sim_value_with_type(self, Bundle::from_canonical(ty))) + } + #[track_caller] + fn into_sim_value_with_type(self, ty: CanonicalType) -> SimValue + { + SimValue::into_canonical(ToSimValueWithType::::into_sim_value_with_type(self, Bundle::from_canonical(ty))) + } + } + impl<$($T: ToSimValueWithType,)*> ToSimValueWithType for ($($T,)*) { + #[track_caller] + fn to_sim_value_with_type(&self, ty: Bundle) -> SimValue { + let ($($var,)*) = self; + let [$($ty_var,)*] = *ty.fields() else { + panic!("bundle has wrong number of fields"); + }; + $(let $var = $var.to_sim_value_with_type($ty_var.ty);)* + ToSimValueWithType::into_sim_value_with_type(($($var,)*), ty) + } + #[track_caller] + fn into_sim_value_with_type(self, ty: Bundle) -> SimValue { + #![allow(unused_mut)] + #![allow(clippy::unused_unit)] + let ($($var,)*) = self; + let [$($ty_var,)*] = *ty.fields() else { + panic!("bundle has wrong number of fields"); + }; + let mut opaque = OpaqueSimValue::empty(); + $(let $var = $var.into_sim_value_with_type($ty_var.ty); + assert_eq!(SimValue::ty(&$var), $ty_var.ty); + opaque.extend_from_slice(SimValue::opaque(&$var).as_slice()); + )* + SimValue::from_opaque(ty, opaque) + } + } + impl<$($T: ToSimValueWithType<$Ty>, $Ty: Type,)*> ToSimValueWithType<($($Ty,)*)> for ($($T,)*) { + #[track_caller] + fn to_sim_value_with_type(&self, ty: ($($Ty,)*)) -> SimValue<($($Ty,)*)> { + let ($($var,)*) = self; + let ($($ty_var,)*) = ty; + $(let $var = $var.to_sim_value_with_type($ty_var);)* + SimValue::from_value(ty, ($($var,)*)) + } + #[track_caller] + fn into_sim_value_with_type(self, ty: ($($Ty,)*)) -> SimValue<($($Ty,)*)> { + let ($($var,)*) = self; + let ($($ty_var,)*) = ty; + $(let $var = $var.into_sim_value_with_type($ty_var);)* + SimValue::from_value(ty, ($($var,)*)) + } + } + impl<$($T: ToSimValue,)*> ToSimValue for ($($T,)*) { + type Type = ($($T::Type,)*); + #[track_caller] + fn to_sim_value(&self) -> SimValue { + let ($($var,)*) = self; + $(let $var = $var.to_sim_value();)* + SimValue::from_value(($(SimValue::ty(&$var),)*), ($($var,)*)) + } + #[track_caller] + fn into_sim_value(self) -> SimValue { + let ($($var,)*) = self; + $(let $var = $var.to_sim_value();)* + SimValue::from_value(($(SimValue::ty(&$var),)*), ($($var,)*)) + } + } + impl<$($Lhs: Type + ExprPartialEq<$Rhs>, $Rhs: Type,)*> ExprPartialEq<($($Rhs,)*)> for ($($Lhs,)*) { + fn cmp_eq(lhs: Expr, rhs: Expr<($($Rhs,)*)>) -> Expr { + let ($($lhs_var,)*) = *lhs; + let ($($rhs_var,)*) = *rhs; + ArrayLiteral::::new( + Bool, + FromIterator::from_iter([$(Expr::canonical(ExprPartialEq::cmp_eq($lhs_var, $rhs_var)),)*]), ) + .cast_to_bits() + .all_one_bits() } - fn to_bits_impl(this: &Self) -> Result, HitUndeducedType> { - BundleValue::to_bits_impl(this) - } - } - impl<$($T: Value,)*> BundleValue for ($($T,)*) - where - $($T::Type: Type,)* - { + fn cmp_ne(lhs: Expr, rhs: Expr<($($Rhs,)*)>) -> Expr { + let ($($lhs_var,)*) = *lhs; + let ($($rhs_var,)*) = *rhs; + ArrayLiteral::::new( + Bool, + FromIterator::from_iter([$(Expr::canonical(ExprPartialEq::cmp_ne($lhs_var, $rhs_var)),)*]), + ) + .cast_to_bits() + .any_one_bits() + } } + impl<$($Lhs: SimValuePartialEq<$Rhs>, $Rhs: Type,)*> SimValuePartialEq<($($Rhs,)*)> for ($($Lhs,)*) { + fn sim_value_eq(lhs: &SimValue, rhs: &SimValue<($($Rhs,)*)>) -> bool { + let ($($lhs_var,)*) = &**lhs; + let ($($rhs_var,)*) = &**rhs; + let retval = true; + $(let retval = retval && $lhs_var == $rhs_var;)* + retval + } + } + }; + ([$($lhs:tt)*] [$rhs_first:tt $($rhs:tt)*]) => { + impl_tuples!([$($lhs)*] []); + impl_tuples!([$($lhs)* $rhs_first] [$($rhs)*]); }; } -impl_tuple!(TupleBuilder0,); -impl_tuple!(TupleBuilder1, (A A2 field_0 0)); -impl_tuple!(TupleBuilder2, (A A2 field_0 0), (B B2 field_1 1)); -impl_tuple!(TupleBuilder3, (A A2 field_0 0), (B B2 field_1 1), (C C2 field_2 2)); -impl_tuple!(TupleBuilder4, (A A2 field_0 0), (B B2 field_1 1), (C C2 field_2 2), (D D2 field_3 3)); -impl_tuple!(TupleBuilder5, (A A2 field_0 0), (B B2 field_1 1), (C C2 field_2 2), (D D2 field_3 3), (E E2 field_4 4)); -impl_tuple!(TupleBuilder6, (A A2 field_0 0), (B B2 field_1 1), (C C2 field_2 2), (D D2 field_3 3), (E E2 field_4 4), (F F2 field_5 5)); -impl_tuple!(TupleBuilder7, (A A2 field_0 0), (B B2 field_1 1), (C C2 field_2 2), (D D2 field_3 3), (E E2 field_4 4), (F F2 field_5 5), (G G2 field_6 6)); -impl_tuple!(TupleBuilder8, (A A2 field_0 0), (B B2 field_1 1), (C C2 field_2 2), (D D2 field_3 3), (E E2 field_4 4), (F F2 field_5 5), (G G2 field_6 6), (H H2 field_7 7)); -impl_tuple!(TupleBuilder9, (A A2 field_0 0), (B B2 field_1 1), (C C2 field_2 2), (D D2 field_3 3), (E E2 field_4 4), (F F2 field_5 5), (G G2 field_6 6), (H H2 field_7 7), (I I2 field_8 8)); -impl_tuple!(TupleBuilder10, (A A2 field_0 0), (B B2 field_1 1), (C C2 field_2 2), (D D2 field_3 3), (E E2 field_4 4), (F F2 field_5 5), (G G2 field_6 6), (H H2 field_7 7), (I I2 field_8 8), (J J2 field_9 9)); -impl_tuple!(TupleBuilder11, (A A2 field_0 0), (B B2 field_1 1), (C C2 field_2 2), (D D2 field_3 3), (E E2 field_4 4), (F F2 field_5 5), (G G2 field_6 6), (H H2 field_7 7), (I I2 field_8 8), (J J2 field_9 9), (K K2 field_10 10)); -impl_tuple!(TupleBuilder12, (A A2 field_0 0), (B B2 field_1 1), (C C2 field_2 2), (D D2 field_3 3), (E E2 field_4 4), (F F2 field_5 5), (G G2 field_6 6), (H H2 field_7 7), (I I2 field_8 8), (J J2 field_9 9), (K K2 field_10 10), (L L2 field_11 11)); +impl_tuples! { + [] [ + {#[num = 0, field = field_0, ty = ty0: Ty0, lhs = lhs0: Lhs0, rhs = rhs0: Rhs0] v0: T0} + {#[num = 1, field = field_1, ty = ty1: Ty1, lhs = lhs1: Lhs1, rhs = rhs1: Rhs1] v1: T1} + {#[num = 2, field = field_2, ty = ty2: Ty2, lhs = lhs2: Lhs2, rhs = rhs2: Rhs2] v2: T2} + {#[num = 3, field = field_3, ty = ty3: Ty3, lhs = lhs3: Lhs3, rhs = rhs3: Rhs3] v3: T3} + {#[num = 4, field = field_4, ty = ty4: Ty4, lhs = lhs4: Lhs4, rhs = rhs4: Rhs4] v4: T4} + {#[num = 5, field = field_5, ty = ty5: Ty5, lhs = lhs5: Lhs5, rhs = rhs5: Rhs5] v5: T5} + {#[num = 6, field = field_6, ty = ty6: Ty6, lhs = lhs6: Lhs6, rhs = rhs6: Rhs6] v6: T6} + {#[num = 7, field = field_7, ty = ty7: Ty7, lhs = lhs7: Lhs7, rhs = rhs7: Rhs7] v7: T7} + {#[num = 8, field = field_8, ty = ty8: Ty8, lhs = lhs8: Lhs8, rhs = rhs8: Rhs8] v8: T8} + {#[num = 9, field = field_9, ty = ty9: Ty9, lhs = lhs9: Lhs9, rhs = rhs9: Rhs9] v9: T9} + {#[num = 10, field = field_10, ty = ty10: Ty10, lhs = lhs10: Lhs10, rhs = rhs10: Rhs10] v10: T10} + {#[num = 11, field = field_11, ty = ty11: Ty11, lhs = lhs11: Lhs11, rhs = rhs11: Rhs11] v11: T11} + ] +} + +impl Type for PhantomData { + type BaseType = Bundle; + type MaskType = (); + type SimValue = PhantomData; + type MatchVariant = PhantomData; + type MatchActiveScope = (); + type MatchVariantAndInactiveScope = MatchVariantWithoutScope; + type MatchVariantsIter = std::iter::Once; + fn match_variants( + this: Expr, + source_location: SourceLocation, + ) -> Self::MatchVariantsIter { + let _ = this; + let _ = source_location; + std::iter::once(MatchVariantWithoutScope(PhantomData)) + } + fn mask_type(&self) -> Self::MaskType { + () + } + fn canonical(&self) -> CanonicalType { + Bundle::new(self.fields()).canonical() + } + #[track_caller] + fn from_canonical(canonical_type: CanonicalType) -> Self { + let CanonicalType::Bundle(bundle) = canonical_type else { + panic!("expected bundle"); + }; + assert!( + bundle.fields().is_empty(), + "bundle has wrong number of fields" + ); + PhantomData + } + fn source_location() -> SourceLocation { + SourceLocation::builtin() + } + fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue { + assert!(opaque.is_empty()); + *self + } + fn sim_value_clone_from_opaque( + &self, + _value: &mut Self::SimValue, + opaque: OpaqueSimValueSlice<'_>, + ) { + assert!(opaque.is_empty()); + } + fn sim_value_to_opaque<'w>( + &self, + _value: &Self::SimValue, + writer: OpaqueSimValueWriter<'w>, + ) -> OpaqueSimValueWritten<'w> { + writer.fill_cloned_from_slice(OpaqueSimValueSlice::empty()) + } +} + +pub struct PhantomDataBuilder(PhantomData); + +impl Default for PhantomDataBuilder { + fn default() -> Self { + Self(PhantomData) + } +} + +impl ToExpr for PhantomDataBuilder { + type Type = PhantomData; + + fn to_expr(&self) -> Expr { + PhantomData.to_expr() + } +} + +impl BundleType for PhantomData { + type Builder = PhantomDataBuilder; + type FilledBuilder = PhantomDataBuilder; + fn fields(&self) -> Interned<[BundleField]> { + Interned::default() + } +} + +impl TypeWithDeref for PhantomData { + fn expr_deref(_this: &Expr) -> &Self::MatchVariant { + &PhantomData + } +} + +impl StaticType for PhantomData { + const TYPE: Self = PhantomData; + const MASK_TYPE: Self::MaskType = (); + const TYPE_PROPERTIES: TypeProperties = <()>::TYPE_PROPERTIES; + const MASK_TYPE_PROPERTIES: TypeProperties = <()>::TYPE_PROPERTIES; +} + +impl ToExpr for PhantomData { + type Type = PhantomData; + + fn to_expr(&self) -> Expr { + BundleLiteral::new(PhantomData, Interned::default()).to_expr() + } +} + +impl ToSimValue for PhantomData { + type Type = PhantomData; + + #[track_caller] + fn to_sim_value(&self) -> SimValue { + SimValue::from_value(*self, *self) + } +} + +impl ToSimValueWithType for PhantomData { + #[track_caller] + fn to_sim_value_with_type(&self, ty: Self) -> SimValue { + SimValue::from_value(ty, *self) + } +} + +impl ToSimValueWithType for PhantomData { + #[track_caller] + fn to_sim_value_with_type(&self, ty: Bundle) -> SimValue { + assert!(ty.fields().is_empty()); + SimValue::from_opaque(ty, OpaqueSimValue::empty()) + } +} + +impl ToSimValueWithType for PhantomData { + #[track_caller] + fn to_sim_value_with_type(&self, canonical_ty: CanonicalType) -> SimValue { + let ty = Bundle::from_canonical(canonical_ty); + assert!(ty.fields().is_empty()); + SimValue::from_opaque(canonical_ty, OpaqueSimValue::empty()) + } +} diff --git a/crates/fayalite/src/cli.rs b/crates/fayalite/src/cli.rs deleted file mode 100644 index 412e565..0000000 --- a/crates/fayalite/src/cli.rs +++ /dev/null @@ -1,326 +0,0 @@ -use crate::{ - bundle::{BundleType, BundleValue, DynBundle}, - firrtl, - intern::Interned, - module::Module, -}; -use clap::{ - builder::{OsStringValueParser, TypedValueParser}, - Args, Parser, Subcommand, ValueEnum, ValueHint, -}; -use eyre::{eyre, Report}; -use std::{error, ffi::OsString, fmt, io, path::PathBuf, process}; - -pub type Result = std::result::Result; - -pub struct CliError(Report); - -impl fmt::Debug for CliError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.fmt(f) - } -} - -impl fmt::Display for CliError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.fmt(f) - } -} - -impl error::Error for CliError {} - -impl From for CliError { - fn from(value: io::Error) -> Self { - CliError(Report::new(value)) - } -} - -pub trait RunPhase { - type Output; - fn run(&self, arg: Arg) -> Result; -} - -#[derive(Args, Debug)] -#[non_exhaustive] -pub struct BaseArgs { - /// the directory to put the generated main output file and associated files in - #[arg(short, long, value_hint = ValueHint::DirPath)] - pub output: PathBuf, - /// the stem of the generated main output file, e.g. to get foo.v, pass --file-stem=foo - #[arg(long)] - pub file_stem: Option, -} - -impl BaseArgs { - pub fn to_firrtl_file_backend(&self) -> firrtl::FileBackend { - firrtl::FileBackend { - dir_path: self.output.clone(), - top_fir_file_stem: self.file_stem.clone(), - } - } -} - -#[derive(Args, Debug)] -#[non_exhaustive] -pub struct FirrtlArgs { - #[command(flatten)] - pub base: BaseArgs, -} - -#[derive(Debug)] -#[non_exhaustive] -pub struct FirrtlOutput { - pub file_stem: String, -} - -impl FirrtlOutput { - pub fn firrtl_file(&self, args: &FirrtlArgs) -> PathBuf { - let mut retval = args.base.output.join(&self.file_stem); - retval.set_extension("fir"); - retval - } -} - -impl FirrtlArgs { - fn run_impl(&self, top_module: Module) -> Result { - let firrtl::FileBackend { - top_fir_file_stem, .. - } = firrtl::export(self.base.to_firrtl_file_backend(), &top_module)?; - Ok(FirrtlOutput { - file_stem: top_fir_file_stem.expect( - "export is known to set the file stem from the circuit name if not provided", - ), - }) - } -} - -impl RunPhase> for FirrtlArgs -where - T::Type: BundleType, -{ - type Output = FirrtlOutput; - fn run(&self, top_module: Module) -> Result { - self.run_impl(top_module.canonical()) - } -} - -impl RunPhase>> for FirrtlArgs -where - T::Type: BundleType, -{ - type Output = FirrtlOutput; - fn run(&self, top_module: Interned>) -> Result { - self.run(*top_module) - } -} - -/// based on [LLVM Circt's recommended lowering options -/// ](https://circt.llvm.org/docs/VerilogGeneration/#recommended-loweringoptions-by-target) -#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash)] -#[non_exhaustive] -pub enum VerilogDialect { - Questa, - Spyglass, - Verilator, - Vivado, - Yosys, -} - -impl VerilogDialect { - pub fn firtool_extra_args(self) -> &'static [&'static str] { - match self { - VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"], - VerilogDialect::Spyglass => { - &["--lowering-options=explicitBitcast,disallowExpressionInliningInPorts"] - } - VerilogDialect::Verilator => &[ - "--lowering-options=locationInfoStyle=wrapInAtSquareBracket,disallowLocalVariables", - ], - VerilogDialect::Vivado => &["--lowering-options=mitigateVivadoArrayIndexConstPropBug"], - VerilogDialect::Yosys => { - &["--lowering-options=disallowLocalVariables,disallowPackedArrays"] - } - } - } -} - -#[derive(Args, Debug)] -#[non_exhaustive] -pub struct VerilogArgs { - #[command(flatten)] - pub firrtl: FirrtlArgs, - #[arg( - long, - default_value = "firtool", - env = "FIRTOOL", - value_hint = ValueHint::CommandName, - value_parser = OsStringValueParser::new().try_map(which::which) - )] - pub firtool: PathBuf, - #[arg(long)] - pub firtool_extra_args: Vec, - /// adapt the generated Verilog for a particular toolchain - #[arg(long)] - pub verilog_dialect: Option, -} - -#[derive(Debug)] -#[non_exhaustive] -pub struct VerilogOutput { - pub firrtl: FirrtlOutput, -} - -impl VerilogOutput { - pub fn verilog_file(&self, args: &VerilogArgs) -> PathBuf { - let mut retval = args.firrtl.base.output.join(&self.firrtl.file_stem); - retval.set_extension("v"); - retval - } -} - -impl VerilogArgs { - fn run_impl(&self, firrtl_output: FirrtlOutput) -> Result { - let output = VerilogOutput { - firrtl: firrtl_output, - }; - let mut cmd = process::Command::new(&self.firtool); - cmd.arg(output.firrtl.firrtl_file(&self.firrtl)); - cmd.arg("-o"); - cmd.arg(output.verilog_file(self)); - if let Some(dialect) = self.verilog_dialect { - cmd.args(dialect.firtool_extra_args()); - } - cmd.args(&self.firtool_extra_args); - cmd.current_dir(&self.firrtl.base.output); - let status = cmd.status()?; - if status.success() { - Ok(output) - } else { - Err(CliError(eyre!( - "running {} failed: {status}", - self.firtool.display() - ))) - } - } -} - -impl RunPhase for VerilogArgs -where - FirrtlArgs: RunPhase, -{ - type Output = VerilogOutput; - fn run(&self, arg: Arg) -> Result { - let firrtl_output = self.firrtl.run(arg)?; - self.run_impl(firrtl_output) - } -} - -#[derive(Subcommand, Debug)] -enum CliCommand { - /// Generate FIRRTL - Firrtl(FirrtlArgs), - /// Generate Verilog - Verilog(VerilogArgs), -} - -/// a simple CLI -/// -/// Use like: -/// -/// ```no_run -/// # use fayalite::hdl_module; -/// # #[hdl_module] -/// # fn my_module() {} -/// use fayalite::cli; -/// -/// fn main() -> cli::Result { -/// cli::Cli::parse().run(my_module()) -/// } -/// ``` -/// -/// You can also use it with a larger [`clap`]-based CLI like so: -/// -/// ```no_run -/// # use fayalite::hdl_module; -/// # #[hdl_module] -/// # fn my_module() {} -/// use clap::{Subcommand, Parser}; -/// use fayalite::cli; -/// -/// #[derive(Subcommand)] -/// pub enum Cmd { -/// #[command(flatten)] -/// Fayalite(cli::Cli), -/// MySpecialCommand { -/// #[arg(long)] -/// foo: bool, -/// }, -/// } -/// -/// #[derive(Parser)] -/// pub struct Cli { -/// #[command(subcommand)] -/// cmd: Cmd, // or just use cli::Cli directly if you don't need more subcommands -/// } -/// -/// fn main() -> cli::Result { -/// match Cli::parse().cmd { -/// Cmd::Fayalite(v) => v.run(my_module())?, -/// Cmd::MySpecialCommand { foo } => println!("special: foo={foo}"), -/// } -/// Ok(()) -/// } -/// ``` -#[derive(Parser, Debug)] -// clear things that would be crate-specific -#[command(name = "Fayalite Simple CLI", about = None, long_about = None)] -pub struct Cli { - #[command(subcommand)] - subcommand: CliCommand, -} - -impl clap::Subcommand for Cli { - fn augment_subcommands(cmd: clap::Command) -> clap::Command { - CliCommand::augment_subcommands(cmd) - } - - fn augment_subcommands_for_update(cmd: clap::Command) -> clap::Command { - CliCommand::augment_subcommands_for_update(cmd) - } - - fn has_subcommand(name: &str) -> bool { - CliCommand::has_subcommand(name) - } -} - -impl RunPhase for Cli -where - FirrtlArgs: RunPhase, -{ - type Output = (); - fn run(&self, arg: T) -> Result { - match &self.subcommand { - CliCommand::Firrtl(c) => { - c.run(arg)?; - } - CliCommand::Verilog(c) => { - c.run(arg)?; - } - } - Ok(()) - } -} - -impl Cli { - /// forwards to [`clap::Parser::parse()`] so you don't have to import [`clap::Parser`] - pub fn parse() -> Self { - clap::Parser::parse() - } - /// forwards to [`RunPhase::run()`] so you don't have to import [`RunPhase`] - pub fn run(&self, top_module: T) -> Result<()> - where - Self: RunPhase, - { - RunPhase::run(self, top_module) - } -} diff --git a/crates/fayalite/src/clock.rs b/crates/fayalite/src/clock.rs index 48c1a3c..909edbd 100644 --- a/crates/fayalite/src/clock.rs +++ b/crates/fayalite/src/clock.rs @@ -2,114 +2,92 @@ // See Notices.txt for copyright information use crate::{ expr::{Expr, ToExpr}, - int::{UInt, UIntType}, - intern::Interned, - reset::Reset, + hdl, + int::Bool, + reset::{Reset, ResetType}, source_location::SourceLocation, ty::{ - impl_match_values_as_self, CanonicalType, CanonicalTypeKind, CanonicalValue, Connect, - DynCanonicalType, StaticType, Type, TypeEnum, Value, ValueEnum, + CanonicalType, OpaqueSimValueSize, OpaqueSimValueSlice, OpaqueSimValueWriter, + OpaqueSimValueWritten, StaticType, Type, TypeProperties, impl_match_variant_as_self, }, - type_deduction::{HitUndeducedType, UndeducedType}, - util::interned_bit, }; -use bitvec::slice::BitSlice; +use bitvec::{bits, order::Lsb0}; #[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)] -pub struct ClockType; +pub struct Clock; -impl ClockType { - pub const fn new() -> Self { - Self - } -} +impl Type for Clock { + type BaseType = Clock; + type MaskType = Bool; + type SimValue = bool; -impl Connect for ClockType {} - -impl Type for ClockType { - type Value = Clock; - type CanonicalType = ClockType; - type CanonicalValue = Clock; - type MaskType = UIntType<1>; - type MaskValue = UInt<1>; - - impl_match_values_as_self!(); + impl_match_variant_as_self!(); fn mask_type(&self) -> Self::MaskType { - UIntType::new() + Bool } - fn type_enum(&self) -> TypeEnum { - TypeEnum::Clock(*self) + fn canonical(&self) -> CanonicalType { + CanonicalType::Clock(*self) } - fn from_canonical_type(t: Self::CanonicalType) -> Self { - t - } - - fn canonical(&self) -> Self::CanonicalType { - *self - } - - fn source_location(&self) -> SourceLocation { + fn source_location() -> SourceLocation { SourceLocation::builtin() } - fn as_dyn_canonical_type_impl(this: &Self) -> Option<&dyn DynCanonicalType> { - Some(this) + fn from_canonical(canonical_type: CanonicalType) -> Self { + let CanonicalType::Clock(retval) = canonical_type else { + panic!("expected Clock"); + }; + retval + } + + fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue { + assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1)); + opaque.bits()[0] + } + + fn sim_value_clone_from_opaque( + &self, + value: &mut Self::SimValue, + opaque: OpaqueSimValueSlice<'_>, + ) { + assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1)); + *value = opaque.bits()[0]; + } + + fn sim_value_to_opaque<'w>( + &self, + value: &Self::SimValue, + writer: OpaqueSimValueWriter<'w>, + ) -> OpaqueSimValueWritten<'w> { + assert_eq!(writer.size(), OpaqueSimValueSize::from_bit_width(1)); + writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice( + [bits![0], bits![1]][*value as usize], + )) } } -impl Connect for ClockType {} - -impl CanonicalType for ClockType { - const CANONICAL_TYPE_KIND: CanonicalTypeKind = CanonicalTypeKind::Clock; -} - -impl StaticType for ClockType { - fn static_type() -> Self { - Self +impl Clock { + pub fn type_properties(self) -> TypeProperties { + Self::TYPE_PROPERTIES + } + pub fn can_connect(self, _rhs: Self) -> bool { + true } } -#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] -pub struct Clock(pub bool); - -impl ToExpr for Clock { - type Type = ClockType; - - fn ty(&self) -> Self::Type { - ClockType - } - - fn to_expr(&self) -> Expr { - Expr::from_value(self) - } -} - -impl Value for Clock { - fn to_canonical(&self) -> ::CanonicalValue { - *self - } - fn to_bits_impl(this: &Self) -> Result, HitUndeducedType> { - Ok(interned_bit(this.0)) - } -} - -impl CanonicalValue for Clock { - fn value_enum_impl(this: &Self) -> ValueEnum { - ValueEnum::Clock(*this) - } - fn to_bits_impl(this: &Self) -> Result, HitUndeducedType> { - Ok(interned_bit(this.0)) - } -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Value)] -#[hdl(static, outline_generated)] -pub struct ClockDomain { - pub clk: Clock, - pub rst: Reset, +impl StaticType for Clock { + const TYPE: Self = Self; + const MASK_TYPE: Self::MaskType = Bool; + const TYPE_PROPERTIES: TypeProperties = TypeProperties { + is_passive: true, + is_storable: false, + is_castable_from_bits: true, + bit_width: 1, + sim_only_values_len: 0, + }; + const MASK_TYPE_PROPERTIES: TypeProperties = Bool::TYPE_PROPERTIES; } pub trait ToClock { @@ -140,10 +118,10 @@ impl ToClock for Expr { } } -impl ToClock for Clock { - fn to_clock(&self) -> Expr { - self.to_expr() - } +#[hdl] +pub struct ClockDomain { + pub clk: Clock, + pub rst: R, } impl ToClock for bool { @@ -151,9 +129,3 @@ impl ToClock for bool { self.to_expr().to_clock() } } - -impl ToClock for UInt<1> { - fn to_clock(&self) -> Expr { - self.to_expr().to_clock() - } -} diff --git a/crates/fayalite/src/enum_.rs b/crates/fayalite/src/enum_.rs index 1e6180d..083072b 100644 --- a/crates/fayalite/src/enum_.rs +++ b/crates/fayalite/src/enum_.rs @@ -1,43 +1,49 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information -#![allow(clippy::type_complexity)] + use crate::{ - bundle::{BundleValue, TypeHintTrait}, - expr::{ops::VariantAccess, Expr, NotALiteralExpr, ToExpr}, - int::{UInt, UIntType}, - intern::{Intern, Interned, MemoizeGeneric}, - module::{ - EnumMatchVariantAndInactiveScopeImpl, EnumMatchVariantsIterImpl, ModuleBuilder, - NormalModule, Scope, + expr::{ + Expr, ToExpr, + ops::{ExprPartialEq, VariantAccess}, }, + hdl, + int::{Bool, UIntValue}, + intern::{Intern, Interned}, + module::{ + EnumMatchVariantAndInactiveScopeImpl, EnumMatchVariantsIterImpl, Scope, connect, + enum_match_variants_helper, incomplete_wire, wire, + }, + sim::value::{SimValue, SimValuePartialEq}, source_location::SourceLocation, ty::{ - CanonicalType, CanonicalTypeKind, CanonicalValue, Connect, DynCanonicalType, - DynCanonicalValue, DynType, MatchVariantAndInactiveScope, Type, TypeEnum, Value, ValueEnum, + CanonicalType, MatchVariantAndInactiveScope, OpaqueSimValue, OpaqueSimValueSize, + OpaqueSimValueSlice, OpaqueSimValueWriter, OpaqueSimValueWritten, StaticType, Type, + TypeProperties, }, - type_deduction::{HitUndeducedType, UndeducedType}, -}; -use bitvec::{order::Lsb0, slice::BitSlice, vec::BitVec, view::BitView}; -use hashbrown::HashMap; -use std::{ - borrow::Cow, - fmt, - hash::{Hash, Hasher}, - iter::FusedIterator, - marker::PhantomData, + util::HashMap, }; +use bitvec::{order::Lsb0, slice::BitSlice, view::BitView}; +use serde::{Deserialize, Serialize}; +use std::{convert::Infallible, fmt, iter::FusedIterator, sync::Arc}; -#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] -pub struct VariantType { +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)] +pub struct EnumVariant { pub name: Interned, - pub ty: Option, + pub ty: Option, } -pub struct FmtDebugInEnum<'a, T>(&'a VariantType); +impl EnumVariant { + pub fn fmt_debug_in_enum(self) -> FmtDebugInEnum { + FmtDebugInEnum(self) + } +} -impl fmt::Debug for FmtDebugInEnum<'_, T> { +#[derive(Copy, Clone)] +pub struct FmtDebugInEnum(EnumVariant); + +impl fmt::Debug for FmtDebugInEnum { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let VariantType { name, ref ty } = *self.0; + let EnumVariant { name, ty } = self.0; if let Some(ty) = ty { write!(f, "{name}({ty:?})") } else { @@ -46,76 +52,22 @@ impl fmt::Debug for FmtDebugInEnum<'_, T> { } } -impl fmt::Display for FmtDebugInEnum<'_, T> { +impl fmt::Display for FmtDebugInEnum { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(self, f) } } -impl VariantType { - pub fn map_opt_ty) -> Option>(self, f: F) -> VariantType { - let Self { name, ty } = self; - VariantType { name, ty: f(ty) } - } - pub fn map_ty U>(self, f: F) -> VariantType { - let Self { name, ty } = self; - VariantType { - name, - ty: ty.map(f), - } - } - pub fn as_ref_ty(&self) -> VariantType<&T> { - VariantType { - name: self.name, - ty: self.ty.as_ref(), - } - } - pub fn fmt_debug_in_enum(&self) -> FmtDebugInEnum { - FmtDebugInEnum(self) - } -} - -impl VariantType { - pub fn canonical(&self) -> VariantType { - self.as_ref_ty().map_ty(T::canonical) - } - pub fn to_dyn(&self) -> VariantType> { - self.as_ref_ty().map_ty(T::to_dyn) - } - pub fn canonical_dyn(&self) -> VariantType> { - self.as_ref_ty().map_ty(T::canonical_dyn) - } -} - -impl VariantType> { - pub fn from_canonical_type_helper_has_value(self, expected_name: &str) -> T { - assert_eq!(&*self.name, expected_name, "variant name doesn't match"); - let Some(ty) = self.ty else { - panic!("variant {expected_name} has no value but a value is expected"); - }; - T::from_dyn_canonical_type(ty) - } - pub fn from_canonical_type_helper_no_value(self, expected_name: &str) { - assert_eq!(&*self.name, expected_name, "variant name doesn't match"); - assert!( - self.ty.is_none(), - "variant {expected_name} has a value but is expected to have no value" - ); - } -} - #[derive(Clone, Eq)] -struct DynEnumTypeImpl { - variants: Interned<[VariantType>]>, +struct EnumImpl { + variants: Interned<[EnumVariant]>, name_indexes: HashMap, usize>, - bit_width: Result, - is_storable: Result, - is_castable_from_bits: Result, + type_properties: TypeProperties, } -impl fmt::Debug for DynEnumTypeImpl { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "DynEnumType ")?; +impl std::fmt::Debug for EnumImpl { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str("Enum ")?; f.debug_set() .entries( self.variants @@ -126,68 +78,143 @@ impl fmt::Debug for DynEnumTypeImpl { } } -impl PartialEq for DynEnumTypeImpl { +impl std::hash::Hash for EnumImpl { + fn hash(&self, state: &mut H) { + self.variants.hash(state); + } +} + +impl PartialEq for EnumImpl { fn eq(&self, other: &Self) -> bool { self.variants == other.variants } } -impl Hash for DynEnumTypeImpl { - fn hash(&self, state: &mut H) { - self.variants.hash(state); - } -} +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct Enum(Interned); -#[derive(Copy, Clone, Hash, PartialEq, Eq)] -pub struct DynEnumType(Interned); - -impl fmt::Debug for DynEnumType { +impl fmt::Debug for Enum { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) } } -fn discriminant_bit_width_impl(variant_count: usize) -> usize { - variant_count - .next_power_of_two() - .checked_ilog2() - .unwrap_or(0) as usize +const fn discriminant_bit_width_impl(variant_count: usize) -> usize { + match variant_count.next_power_of_two().checked_ilog2() { + Some(x) => x as usize, + None => 0, + } } -impl DynEnumType { +#[derive(Clone)] +pub struct EnumTypePropertiesBuilder { + type_properties: TypeProperties, + variant_count: usize, +} + +impl EnumTypePropertiesBuilder { + #[must_use] + pub const fn new() -> Self { + Self { + type_properties: TypeProperties { + is_passive: true, + is_storable: true, + is_castable_from_bits: true, + bit_width: 0, + sim_only_values_len: 0, + }, + variant_count: 0, + } + } + pub const fn clone(&self) -> Self { + Self { ..*self } + } + #[must_use] + pub const fn variant(self, field_props: Option) -> Self { + let Self { + mut type_properties, + variant_count, + } = self; + if let Some(TypeProperties { + is_passive, + is_storable, + is_castable_from_bits, + bit_width, + sim_only_values_len, + }) = field_props + { + assert!(is_passive, "variant type must be a passive type"); + assert!( + sim_only_values_len == 0, + "can't have `SimOnlyValue`s in an Enum" + ); + type_properties = TypeProperties { + is_passive: true, + is_storable: type_properties.is_storable & is_storable, + is_castable_from_bits: type_properties.is_castable_from_bits + & is_castable_from_bits, + bit_width: if type_properties.bit_width < bit_width { + bit_width + } else { + type_properties.bit_width + }, + sim_only_values_len: 0, + }; + } + Self { + type_properties, + variant_count: variant_count + 1, + } + } + #[must_use] + pub fn variants(self, variants: impl IntoIterator) -> Self { + variants.into_iter().fold(self, |this, variant| { + this.variant(variant.ty.map(CanonicalType::type_properties)) + }) + } + pub const fn finish(self) -> TypeProperties { + assert!( + self.variant_count != 0, + "zero-variant enums aren't yet supported: \ + https://github.com/chipsalliance/firrtl-spec/issues/208", + ); + let Some(bit_width) = self + .type_properties + .bit_width + .checked_add(discriminant_bit_width_impl(self.variant_count)) + else { + panic!("enum is too big: bit-width overflowed"); + }; + TypeProperties { + bit_width, + ..self.type_properties + } + } +} + +impl Default for EnumTypePropertiesBuilder { + fn default() -> Self { + Self::new() + } +} + +impl Enum { #[track_caller] - pub fn new(variants: Interned<[VariantType>]>) -> Self { - assert!(!variants.is_empty(), "zero-variant enums aren't yet supported: https://github.com/chipsalliance/firrtl-spec/issues/208"); - let mut name_indexes = HashMap::with_capacity(variants.len()); - let mut body_bit_width = Ok(0usize); - let mut is_storable = Ok(true); - let mut is_castable_from_bits = Ok(true); - for (index, &VariantType { name, ty }) in variants.iter().enumerate() { - if let Some(old_index) = name_indexes.insert(name, index) { + pub fn new(variants: Interned<[EnumVariant]>) -> Self { + let mut name_indexes = + HashMap::with_capacity_and_hasher(variants.len(), Default::default()); + let mut type_props_builder = EnumTypePropertiesBuilder::new(); + for (index, EnumVariant { name, ty }) in variants.iter().enumerate() { + if let Some(old_index) = name_indexes.insert(*name, index) { panic!("duplicate variant name {name:?}: at both index {old_index} and {index}"); } - if let Some(ty) = ty { - assert!( - ty.is_passive().unwrap_or(true), - "variant type must be a passive type: {ty:?}" - ); - body_bit_width = body_bit_width.and_then(|v| Ok(v.max(ty.bit_width()?))); - is_storable = HitUndeducedType::reduce_and(is_storable, ty.is_storable()); - is_castable_from_bits = - HitUndeducedType::reduce_and(is_castable_from_bits, ty.is_castable_from_bits()); - } + type_props_builder = type_props_builder.variant(ty.map(CanonicalType::type_properties)); } - let bit_width = body_bit_width.map(|v| { - v.checked_add(discriminant_bit_width_impl(variants.len())) - .unwrap_or_else(|| panic!("enum is too big: bit-width overflowed")) - }); Self( - DynEnumTypeImpl { + EnumImpl { variants, name_indexes, - bit_width, - is_storable, - is_castable_from_bits, + type_properties: type_props_builder.finish(), } .intern_sized(), ) @@ -195,240 +222,63 @@ impl DynEnumType { pub fn discriminant_bit_width(self) -> usize { discriminant_bit_width_impl(self.variants().len()) } - pub fn is_passive(self) -> Result { - Ok(true) - } - pub fn is_storable(self) -> Result { - self.0.is_storable - } - pub fn is_castable_from_bits(self) -> Result { - self.0.is_castable_from_bits - } - pub fn bit_width(self) -> Result { - self.0.bit_width + pub fn type_properties(self) -> TypeProperties { + self.0.type_properties } pub fn name_indexes(&self) -> &HashMap, usize> { &self.0.name_indexes } -} - -#[derive(Debug, Clone, Hash, PartialEq, Eq)] -pub struct DynEnum { - ty: DynEnumType, - variant_index: usize, - variant_value: Option, -} - -impl DynEnum { - #[track_caller] - pub fn new_by_index( - ty: DynEnumType, - variant_index: usize, - variant_value: Option, - ) -> Self { - let variant = ty.variants()[variant_index]; - assert_eq!( - variant_value.as_ref().map(|v| v.ty()), - variant.ty, - "variant value doesn't match type" - ); - Self { - ty, - variant_index, - variant_value, + pub fn can_connect(self, rhs: Self) -> bool { + if self.0.variants.len() != rhs.0.variants.len() { + return false; } - } - #[track_caller] - pub fn new_by_name( - ty: DynEnumType, - variant_name: Interned, - variant_value: Option, - ) -> Self { - let variant_index = ty.name_indexes()[&variant_name]; - Self::new_by_index(ty, variant_index, variant_value) - } - pub fn variant_index(&self) -> usize { - self.variant_index - } - pub fn variant_value(&self) -> &Option { - &self.variant_value - } - pub fn variant_with_type(&self) -> VariantType> { - self.ty.variants()[self.variant_index] - } - pub fn variant_name(&self) -> Interned { - self.variant_with_type().name - } - pub fn variant_type(&self) -> Option> { - self.variant_with_type().ty - } - pub fn variant_with_value(&self) -> VariantType<&DynCanonicalValue> { - self.variant_with_type() - .map_opt_ty(|_| self.variant_value.as_ref()) - } -} - -#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] -pub struct VariantsHint { - pub known_variants: Interned<[VariantType>]>, - pub more_variants: bool, -} - -impl VariantsHint { - pub fn new( - known_variants: impl IntoIterator>>, - more_variants: bool, - ) -> Self { - let known_variants = Intern::intern_owned(Vec::from_iter(known_variants)); - Self { - known_variants, - more_variants, - } - } - pub fn check_variant( - self, - index: usize, - variant: VariantType<&dyn DynType>, - ) -> Result<(), String> { - let Some(&known_variant) = self.known_variants.get(index) else { - return if self.more_variants { - Ok(()) - } else { - Err(format!( - "too many variants: name={:?} index={index}", - variant.name - )) - }; - }; - let VariantType { - name: known_name, - ty: type_hint, - } = known_variant; - let VariantType { name, ty } = variant; - if name != known_name { - Err(format!( - "wrong variant name {name:?}, expected {known_name:?}" - )) - } else { - match (ty, type_hint) { - (Some(ty), Some(type_hint)) => type_hint.matches(ty), - (None, None) => Ok(()), - (None, Some(_)) => Err(format!( - "expected variant {name:?} to have type, no type provided" - )), - (Some(_), None) => Err(format!( - "expected variant {name:?} to have no type, but a type was provided" - )), + for ( + &EnumVariant { + name: lhs_name, + ty: lhs_ty, + }, + &EnumVariant { + name: rhs_name, + ty: rhs_ty, + }, + ) in self.0.variants.iter().zip(rhs.0.variants.iter()) + { + if lhs_name != rhs_name { + return false; + } + match (lhs_ty, rhs_ty) { + (None, None) => {} + (None, Some(_)) | (Some(_), None) => return false, + (Some(lhs_ty), Some(rhs_ty)) => { + if !lhs_ty.can_connect(rhs_ty) { + return false; + } + } } } + true } } pub trait EnumType: Type< - CanonicalType = DynEnumType, - CanonicalValue = DynEnum, - MaskType = UIntType<1>, - MaskValue = UInt<1>, - MatchActiveScope = Scope, - MatchVariantAndInactiveScope = EnumMatchVariantAndInactiveScope, - MatchVariantsIter = EnumMatchVariantsIter, -> -where - Self::Value: EnumValue + ToExpr, + BaseType = Enum, + MaskType = Bool, + MatchActiveScope = Scope, + MatchVariantAndInactiveScope = EnumMatchVariantAndInactiveScope, + MatchVariantsIter = EnumMatchVariantsIter, + > { - type Builder; + type SimBuilder: From; + fn variants(&self) -> Interned<[EnumVariant]>; fn match_activate_scope( v: Self::MatchVariantAndInactiveScope, ) -> (Self::MatchVariant, Self::MatchActiveScope); - fn builder() -> Self::Builder; - fn variants(&self) -> Interned<[VariantType>]>; - fn variants_hint() -> VariantsHint; - #[allow(clippy::result_unit_err)] - fn variant_to_bits( - &self, - variant_index: usize, - variant_value: Option<&VariantValue>, - ) -> Result, HitUndeducedType>, NotALiteralExpr> { - #[derive(Hash, Eq, PartialEq)] - struct VariantToBitsMemoize(PhantomData<(E, V)>); - impl Clone for VariantToBitsMemoize { - fn clone(&self) -> Self { - *self - } - } - impl Copy for VariantToBitsMemoize {} - impl< - E: EnumType>, - V: ToExpr + Eq + Hash + Send + Sync + 'static + Clone, - > MemoizeGeneric for VariantToBitsMemoize - { - type InputRef<'a> = (&'a E, usize, Option<&'a V>); - type InputOwned = (E, usize, Option); - type InputCow<'a> = (Cow<'a, E>, usize, Option>); - type Output = Result, HitUndeducedType>, NotALiteralExpr>; - - fn input_borrow(input: &Self::InputOwned) -> Self::InputRef<'_> { - (&input.0, input.1, input.2.as_ref()) - } - fn input_eq(a: Self::InputRef<'_>, b: Self::InputRef<'_>) -> bool { - a == b - } - fn input_cow_into_owned(input: Self::InputCow<'_>) -> Self::InputOwned { - (input.0.into_owned(), input.1, input.2.map(Cow::into_owned)) - } - fn input_cow_borrow<'a>(input: &'a Self::InputCow<'_>) -> Self::InputRef<'a> { - (&input.0, input.1, input.2.as_deref()) - } - fn input_cow_from_owned<'a>(input: Self::InputOwned) -> Self::InputCow<'a> { - (Cow::Owned(input.0), input.1, input.2.map(Cow::Owned)) - } - fn input_cow_from_ref(input: Self::InputRef<'_>) -> Self::InputCow<'_> { - (Cow::Borrowed(input.0), input.1, input.2.map(Cow::Borrowed)) - } - fn inner(self, input: Self::InputRef<'_>) -> Self::Output { - let (ty, variant_index, variant_value) = input; - let ty = ty.canonical(); - let bit_width = match ty.bit_width() { - Ok(v) => v, - Err(e) => return Ok(Err(e)), - }; - let mut bits = BitVec::with_capacity(bit_width); - bits.extend_from_bitslice( - &variant_index.view_bits::()[..ty.discriminant_bit_width()], - ); - if let Some(variant_value) = variant_value { - match variant_value.to_expr().to_literal_bits()? { - Ok(variant_value) => bits.extend_from_bitslice(&variant_value), - Err(e) => return Ok(Err(e)), - } - } - bits.resize(bit_width, false); - Ok(Ok(Intern::intern_owned(bits))) - } - } - VariantToBitsMemoize::(PhantomData).get(( - self, - variant_index, - variant_value, - )) - } } -pub trait EnumValue: Value -where - ::Type: EnumType, -{ -} +pub struct EnumMatchVariantAndInactiveScope(EnumMatchVariantAndInactiveScopeImpl); -pub struct EnumMatchVariantAndInactiveScope(EnumMatchVariantAndInactiveScopeImpl) -where - T::Value: EnumValue; - -impl MatchVariantAndInactiveScope for EnumMatchVariantAndInactiveScope -where - T::Value: EnumValue, -{ +impl MatchVariantAndInactiveScope for EnumMatchVariantAndInactiveScope { type MatchVariant = T::MatchVariant; type MatchActiveScope = Scope; @@ -437,36 +287,22 @@ where } } -impl EnumMatchVariantAndInactiveScope -where - T::Value: EnumValue, -{ - pub fn variant_access(&self) -> Interned>> { +impl EnumMatchVariantAndInactiveScope { + pub fn variant_access(&self) -> VariantAccess { self.0.variant_access() } - pub fn activate( - self, - ) -> ( - Interned>>, - Scope, - ) { + pub fn activate(self) -> (VariantAccess, Scope) { self.0.activate() } } #[derive(Clone)] -pub struct EnumMatchVariantsIter -where - T::Value: EnumValue, -{ +pub struct EnumMatchVariantsIter { pub(crate) inner: EnumMatchVariantsIterImpl, pub(crate) variant_index: std::ops::Range, } -impl Iterator for EnumMatchVariantsIter -where - T::Value: EnumValue, -{ +impl Iterator for EnumMatchVariantsIter { type Item = EnumMatchVariantAndInactiveScope; fn next(&mut self) -> Option { @@ -480,21 +316,15 @@ where } } -impl ExactSizeIterator for EnumMatchVariantsIter -where - T::Value: EnumValue, -{ +impl ExactSizeIterator for EnumMatchVariantsIter { fn len(&self) -> usize { self.variant_index.len() } } -impl FusedIterator for EnumMatchVariantsIter where T::Value: EnumValue {} +impl FusedIterator for EnumMatchVariantsIter {} -impl DoubleEndedIterator for EnumMatchVariantsIter -where - T::Value: EnumValue, -{ +impl DoubleEndedIterator for EnumMatchVariantsIter { fn next_back(&mut self) -> Option { self.variant_index.next_back().map(|variant_index| { EnumMatchVariantAndInactiveScope(self.inner.for_variant_index(variant_index)) @@ -502,131 +332,790 @@ where } } -impl Connect for DynEnumType {} +pub struct NoBuilder { + _ty: Enum, +} -impl Type for DynEnumType { - type CanonicalType = DynEnumType; - type Value = DynEnum; - type CanonicalValue = DynEnum; - type MaskType = UIntType<1>; - type MaskValue = UInt<1>; - type MatchVariant = Option>; - type MatchActiveScope = Scope; - type MatchVariantAndInactiveScope = EnumMatchVariantAndInactiveScope; - type MatchVariantsIter = EnumMatchVariantsIter; - - fn match_variants( - this: Expr, - module_builder: &mut ModuleBuilder, - source_location: SourceLocation, - ) -> Self::MatchVariantsIter - where - IO::Type: crate::bundle::BundleType, - { - module_builder.enum_match_variants_helper(this, source_location) - } - - fn mask_type(&self) -> Self::MaskType { - UIntType::new() - } - - fn canonical(&self) -> Self::CanonicalType { - *self - } - - fn source_location(&self) -> SourceLocation { - SourceLocation::builtin() - } - - fn type_enum(&self) -> TypeEnum { - TypeEnum::EnumType(*self) - } - - fn from_canonical_type(t: Self::CanonicalType) -> Self { - t - } - - fn as_dyn_canonical_type_impl(this: &Self) -> Option<&dyn DynCanonicalType> { - Some(this) +impl From for NoBuilder { + fn from(_ty: Enum) -> Self { + Self { _ty } } } -impl Connect for DynEnumType {} - -pub struct NoBuilder; - -impl EnumType for DynEnumType { - type Builder = NoBuilder; - +impl EnumType for Enum { + type SimBuilder = NoBuilder; fn match_activate_scope( v: Self::MatchVariantAndInactiveScope, ) -> (Self::MatchVariant, Self::MatchActiveScope) { let (expr, scope) = v.0.activate(); - (expr.variant_type().ty.map(|_| expr.to_expr()), scope) + (expr.variant_type().map(|_| expr.to_expr()), scope) } - - fn builder() -> Self::Builder { - NoBuilder - } - - fn variants(&self) -> Interned<[VariantType>]> { + fn variants(&self) -> Interned<[EnumVariant]> { self.0.variants } +} - fn variants_hint() -> VariantsHint { - VariantsHint { - known_variants: [][..].intern(), - more_variants: true, +impl Type for Enum { + type BaseType = Enum; + type MaskType = Bool; + type SimValue = OpaqueSimValue; + type MatchVariant = Option>; + type MatchActiveScope = Scope; + type MatchVariantAndInactiveScope = EnumMatchVariantAndInactiveScope; + type MatchVariantsIter = EnumMatchVariantsIter; + + fn match_variants( + this: Expr, + source_location: SourceLocation, + ) -> Self::MatchVariantsIter { + enum_match_variants_helper(this, source_location) + } + + fn mask_type(&self) -> Self::MaskType { + Bool + } + + fn canonical(&self) -> CanonicalType { + CanonicalType::Enum(*self) + } + + #[track_caller] + fn from_canonical(canonical_type: CanonicalType) -> Self { + let CanonicalType::Enum(retval) = canonical_type else { + panic!("expected enum"); + }; + retval + } + fn source_location() -> SourceLocation { + SourceLocation::builtin() + } + fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue { + assert_eq!(self.type_properties().size(), opaque.size()); + opaque.to_owned() + } + fn sim_value_clone_from_opaque( + &self, + value: &mut Self::SimValue, + opaque: OpaqueSimValueSlice<'_>, + ) { + assert_eq!(self.type_properties().size(), opaque.size()); + assert_eq!(value.size(), opaque.size()); + value.clone_from_slice(opaque); + } + fn sim_value_to_opaque<'w>( + &self, + value: &Self::SimValue, + writer: OpaqueSimValueWriter<'w>, + ) -> OpaqueSimValueWritten<'w> { + assert_eq!(self.type_properties().size(), writer.size()); + assert_eq!(value.size(), writer.size()); + writer.fill_cloned_from_slice(value.as_slice()) + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug, Default)] +pub struct EnumPaddingSimValue { + bits: Option, +} + +impl EnumPaddingSimValue { + pub const fn new() -> Self { + Self { bits: None } + } + pub fn bit_width(&self) -> Option { + self.bits.as_ref().map(UIntValue::width) + } + pub fn bits(&self) -> &Option { + &self.bits + } + pub fn bits_mut(&mut self) -> &mut Option { + &mut self.bits + } + pub fn into_bits(self) -> Option { + self.bits + } + pub fn from_bits(bits: Option) -> Self { + Self { bits } + } + pub fn from_bitslice(v: &BitSlice) -> Self { + Self { + bits: Some(UIntValue::new(Arc::new(v.to_bitvec()))), } } } -impl CanonicalType for DynEnumType { - const CANONICAL_TYPE_KIND: CanonicalTypeKind = CanonicalTypeKind::EnumType; +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub struct UnknownVariantSimValue { + discriminant: usize, + body_bits: UIntValue, } -impl ToExpr for DynEnum { - type Type = DynEnumType; - - fn ty(&self) -> Self::Type { - self.ty +impl UnknownVariantSimValue { + pub fn discriminant(&self) -> usize { + self.discriminant } - - fn to_expr(&self) -> Expr<::Value> { - Expr::from_value(self) + pub fn body_bits(&self) -> &UIntValue { + &self.body_bits + } + pub fn body_bits_mut(&mut self) -> &mut UIntValue { + &mut self.body_bits + } + pub fn into_body_bits(self) -> UIntValue { + self.body_bits + } + pub fn into_parts(self) -> (usize, UIntValue) { + (self.discriminant, self.body_bits) + } + pub fn new(discriminant: usize, body_bits: UIntValue) -> Self { + Self { + discriminant, + body_bits, + } } } -impl Value for DynEnum { - fn to_canonical(&self) -> ::CanonicalValue { - self.clone() +pub struct EnumSimValueFromOpaque<'a> { + variants: Interned<[EnumVariant]>, + discriminant: usize, + body_bits: &'a BitSlice, +} + +impl<'a> EnumSimValueFromOpaque<'a> { + #[track_caller] + pub fn new(ty: T, opaque: OpaqueSimValueSlice<'a>) -> Self { + let variants = ty.variants(); + let size = EnumTypePropertiesBuilder::new() + .variants(variants) + .finish() + .size(); + assert!(size.only_bit_width().is_some()); + assert_eq!(size, opaque.size()); + let (discriminant_bits, body_bits) = opaque + .bits() + .split_at(discriminant_bit_width_impl(variants.len())); + let mut discriminant = 0usize; + discriminant.view_bits_mut::()[..discriminant_bits.len()] + .copy_from_bitslice(discriminant_bits); + Self { + variants, + discriminant, + body_bits, + } } - fn to_bits_impl(this: &Self) -> Result, HitUndeducedType> { - this.ty - .variant_to_bits(this.variant_index, this.variant_value.as_ref()) - .unwrap() + pub fn discriminant(&self) -> usize { + self.discriminant + } + #[track_caller] + #[cold] + fn usage_error(&self, clone: bool) -> ! { + let clone = if clone { "clone_" } else { "" }; + match self.variants.get(self.discriminant) { + None => { + panic!("should have called EnumSimValueFromBits::unknown_variant_{clone}from_bits"); + } + Some(EnumVariant { ty: None, .. }) => { + panic!( + "should have called EnumSimValueFromBits::variant_no_field_{clone}from_bits" + ); + } + Some(EnumVariant { ty: Some(_), .. }) => { + panic!( + "should have called EnumSimValueFromBits::variant_with_field_{clone}from_bits" + ); + } + } + } + #[track_caller] + fn known_variant(&self, clone: bool) -> (Option, &'a BitSlice, &'a BitSlice) { + let Some(EnumVariant { ty, .. }) = self.variants.get(self.discriminant) else { + self.usage_error(clone); + }; + let variant_bit_width = ty.map_or(0, CanonicalType::bit_width); + let (variant_bits, padding_bits) = self.body_bits.split_at(variant_bit_width); + (*ty, variant_bits, padding_bits) + } + #[track_caller] + pub fn unknown_variant_from_opaque(self) -> UnknownVariantSimValue { + let None = self.variants.get(self.discriminant) else { + self.usage_error(false); + }; + UnknownVariantSimValue::new( + self.discriminant, + UIntValue::new(Arc::new(self.body_bits.to_bitvec())), + ) + } + #[track_caller] + pub fn unknown_variant_clone_from_opaque(self, value: &mut UnknownVariantSimValue) { + let None = self.variants.get(self.discriminant) else { + self.usage_error(true); + }; + value.discriminant = self.discriminant; + assert_eq!(value.body_bits.width(), self.body_bits.len()); + value + .body_bits + .bits_mut() + .copy_from_bitslice(self.body_bits); + } + #[track_caller] + pub fn variant_no_field_from_opaque(self) -> EnumPaddingSimValue { + let (None, _variant_bits, padding_bits) = self.known_variant(false) else { + self.usage_error(false); + }; + EnumPaddingSimValue::from_bitslice(padding_bits) + } + #[track_caller] + pub fn variant_with_field_from_opaque(self) -> (SimValue, EnumPaddingSimValue) { + let (Some(variant_ty), variant_bits, padding_bits) = self.known_variant(false) else { + self.usage_error(false); + }; + ( + SimValue::from_bitslice(T::from_canonical(variant_ty), variant_bits), + EnumPaddingSimValue::from_bitslice(padding_bits), + ) + } + #[track_caller] + fn clone_padding_from_bits(padding: &mut EnumPaddingSimValue, padding_bits: &BitSlice) { + match padding.bits_mut() { + None => *padding = EnumPaddingSimValue::from_bitslice(padding_bits), + Some(padding) => { + assert_eq!(padding.width(), padding_bits.len()); + padding.bits_mut().copy_from_bitslice(padding_bits); + } + } + } + #[track_caller] + pub fn variant_no_field_clone_from_opaque(self, padding: &mut EnumPaddingSimValue) { + let (None, _variant_bits, padding_bits) = self.known_variant(true) else { + self.usage_error(true); + }; + Self::clone_padding_from_bits(padding, padding_bits); + } + #[track_caller] + pub fn variant_with_field_clone_from_opaque( + self, + value: &mut SimValue, + padding: &mut EnumPaddingSimValue, + ) { + let (Some(variant_ty), variant_bits, padding_bits) = self.known_variant(true) else { + self.usage_error(true); + }; + assert_eq!(SimValue::ty(value), T::from_canonical(variant_ty)); + SimValue::bits_mut(value) + .bits_mut() + .copy_from_bitslice(variant_bits); + Self::clone_padding_from_bits(padding, padding_bits); } } -impl EnumValue for DynEnum {} +pub struct EnumSimValueToOpaque<'a> { + variants: Interned<[EnumVariant]>, + bit_width: usize, + discriminant_bit_width: usize, + writer: OpaqueSimValueWriter<'a>, +} -impl CanonicalValue for DynEnum { - fn value_enum_impl(this: &Self) -> ValueEnum { - ValueEnum::Enum(this.clone()) +impl<'a> EnumSimValueToOpaque<'a> { + #[track_caller] + pub fn new(ty: T, writer: OpaqueSimValueWriter<'a>) -> Self { + let variants = ty.variants(); + let size = EnumTypePropertiesBuilder::new() + .variants(variants) + .finish() + .size(); + assert_eq!(size, writer.size()); + Self { + variants, + bit_width: size + .only_bit_width() + .expect("enums should only contain bits"), + discriminant_bit_width: discriminant_bit_width_impl(variants.len()), + writer, + } } - fn to_bits_impl(this: &Self) -> Result, HitUndeducedType> { - this.ty - .variant_to_bits(this.variant_index, this.variant_value.as_ref()) - .unwrap() + #[track_caller] + fn write_discriminant(&mut self, mut discriminant: usize) { + let orig_discriminant = discriminant; + let discriminant_bits = + &mut discriminant.view_bits_mut::()[..self.discriminant_bit_width]; + self.writer.fill_prefix_with( + OpaqueSimValueSize::from_bit_width(self.discriminant_bit_width), + |writer| { + writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(discriminant_bits)) + }, + ); + discriminant_bits.fill(false); + assert!( + discriminant == 0, + "{orig_discriminant:#x} is too big to fit in enum discriminant bits", + ); + } + #[track_caller] + pub fn unknown_variant_to_opaque( + mut self, + value: &UnknownVariantSimValue, + ) -> OpaqueSimValueWritten<'a> { + self.write_discriminant(value.discriminant); + let None = self.variants.get(value.discriminant) else { + panic!("can't use UnknownVariantSimValue to set known discriminant"); + }; + assert_eq!( + self.bit_width - self.discriminant_bit_width, + value.body_bits.width() + ); + self.writer + .fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(value.body_bits.bits())) + } + #[track_caller] + fn known_variant( + mut self, + discriminant: usize, + value: Option<&OpaqueSimValue>, + padding: &EnumPaddingSimValue, + ) -> OpaqueSimValueWritten<'a> { + self.write_discriminant(discriminant); + let variant_ty = self.variants[discriminant].ty; + let variant_size = variant_ty.map_or(OpaqueSimValueSize::empty(), CanonicalType::size); + if let Some(value) = value { + if variant_ty.is_none() { + panic!("expected variant to have no field"); + } + self.writer.fill_prefix_with(variant_size, |writer| { + writer.fill_cloned_from_slice(value.as_slice()) + }); + } else if variant_ty.is_some() { + panic!("expected variant to have a field"); + } + if let Some(padding) = padding.bits() { + assert_eq!(padding.ty().type_properties().size(), self.writer.size()); + self.writer + .fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(padding.bits())) + } else { + self.writer.fill_with_zeros() + } + } + #[track_caller] + pub fn variant_no_field_to_opaque( + self, + discriminant: usize, + padding: &EnumPaddingSimValue, + ) -> OpaqueSimValueWritten<'a> { + self.known_variant(discriminant, None, padding) + } + #[track_caller] + pub fn variant_with_field_to_opaque( + self, + discriminant: usize, + value: &SimValue, + padding: &EnumPaddingSimValue, + ) -> OpaqueSimValueWritten<'a> { + let Some(variant_ty) = self.variants[discriminant].ty else { + panic!("expected variant to have no field"); + }; + assert_eq!(SimValue::ty(value), T::from_canonical(variant_ty)); + self.known_variant(discriminant, Some(SimValue::opaque(value)), padding) } } -mod impl_option { - #[allow(dead_code)] - #[derive(crate::ty::Value)] - #[hdl(target(std::option::Option), connect_inexact, outline_generated)] - pub enum Option { - None, - Some(T), +#[doc(hidden)] +pub fn assert_is_enum_type(v: T) -> T { + v +} + +#[doc(hidden)] +pub fn enum_type_to_sim_builder(v: T) -> T::SimBuilder { + v.into() +} + +#[hdl] +pub enum HdlOption { + HdlNone, + HdlSome(T), +} + +impl, Rhs: Type> ExprPartialEq> for HdlOption { + #[hdl] + fn cmp_eq(lhs: Expr, rhs: Expr>) -> Expr { + #[hdl] + let cmp_eq = wire(); + #[hdl] + match lhs { + HdlSome(lhs) => + { + #[hdl] + match rhs { + HdlSome(rhs) => connect(cmp_eq, ExprPartialEq::cmp_eq(lhs, rhs)), + HdlNone => connect(cmp_eq, false), + } + } + HdlNone => + { + #[hdl] + match rhs { + HdlSome(_) => connect(cmp_eq, false), + HdlNone => connect(cmp_eq, true), + } + } + } + cmp_eq + } + + #[hdl] + fn cmp_ne(lhs: Expr, rhs: Expr>) -> Expr { + #[hdl] + let cmp_ne = wire(); + #[hdl] + match lhs { + HdlSome(lhs) => + { + #[hdl] + match rhs { + HdlSome(rhs) => connect(cmp_ne, ExprPartialEq::cmp_ne(lhs, rhs)), + HdlNone => connect(cmp_ne, true), + } + } + HdlNone => + { + #[hdl] + match rhs { + HdlSome(_) => connect(cmp_ne, true), + HdlNone => connect(cmp_ne, false), + } + } + } + cmp_ne + } +} + +impl, Rhs: Type> SimValuePartialEq> for HdlOption { + fn sim_value_eq(this: &SimValue, other: &SimValue>) -> bool { + type SimValueMatch = ::SimValue; + match (&**this, &**other) { + (SimValueMatch::::HdlNone(_), SimValueMatch::>::HdlNone(_)) => { + true + } + (SimValueMatch::::HdlSome(..), SimValueMatch::>::HdlNone(_)) + | (SimValueMatch::::HdlNone(_), SimValueMatch::>::HdlSome(..)) => { + false + } + ( + SimValueMatch::::HdlSome(l, _), + SimValueMatch::>::HdlSome(r, _), + ) => l == r, + } + } +} + +#[allow(non_snake_case)] +pub fn HdlNone() -> Expr> { + HdlOption[T::TYPE].HdlNone() +} + +#[allow(non_snake_case)] +pub fn HdlSome(value: impl ToExpr) -> Expr> { + let value = value.to_expr(); + HdlOption[Expr::ty(value)].HdlSome(value) +} + +impl HdlOption { + #[track_caller] + pub fn try_map( + expr: Expr, + f: impl FnOnce(Expr) -> Result, E>, + ) -> Result>, E> { + Self::try_and_then(expr, |v| Ok(HdlSome(f(v)?))) + } + #[track_caller] + pub fn map( + expr: Expr, + f: impl FnOnce(Expr) -> Expr, + ) -> Expr> { + Self::and_then(expr, |v| HdlSome(f(v))) + } + #[hdl] + #[track_caller] + pub fn try_and_then( + expr: Expr, + f: impl FnOnce(Expr) -> Result>, E>, + ) -> Result>, E> { + // manually run match steps so we can extract the return type to construct HdlNone + type Wrap = T; + #[hdl] + let mut and_then_out = incomplete_wire(); + let mut iter = Self::match_variants(expr, SourceLocation::caller()); + let none = iter.next().unwrap(); + let some = iter.next().unwrap(); + assert!(iter.next().is_none()); + let (Wrap::<::MatchVariant>::HdlSome(value), some_scope) = + Self::match_activate_scope(some) + else { + unreachable!(); + }; + let value = f(value).inspect_err(|_| { + and_then_out.complete(()); // avoid error + })?; + let and_then_out = and_then_out.complete(Expr::ty(value)); + connect(and_then_out, value); + drop(some_scope); + let (Wrap::<::MatchVariant>::HdlNone, none_scope) = + Self::match_activate_scope(none) + else { + unreachable!(); + }; + connect(and_then_out, Expr::ty(and_then_out).HdlNone()); + drop(none_scope); + Ok(and_then_out) + } + #[track_caller] + pub fn and_then( + expr: Expr, + f: impl FnOnce(Expr) -> Expr>, + ) -> Expr> { + match Self::try_and_then(expr, |v| Ok::<_, Infallible>(f(v))) { + Ok(v) => v, + Err(e) => match e {}, + } + } + #[hdl] + #[track_caller] + pub fn and(expr: Expr, opt_b: Expr>) -> Expr> { + #[hdl] + let and_out = wire(Expr::ty(opt_b)); + connect(and_out, Expr::ty(opt_b).HdlNone()); + #[hdl] + if let HdlSome(_) = expr { + connect(and_out, opt_b); + } + and_out + } + #[hdl] + #[track_caller] + pub fn try_filter( + expr: Expr, + f: impl FnOnce(Expr) -> Result, E>, + ) -> Result, E> { + #[hdl] + let filtered = wire(Expr::ty(expr)); + connect(filtered, Expr::ty(expr).HdlNone()); + let mut f = Some(f); + #[hdl] + if let HdlSome(v) = expr { + #[hdl] + if f.take().unwrap()(v)? { + connect(filtered, HdlSome(v)); + } + } + Ok(filtered) + } + #[hdl] + #[track_caller] + pub fn filter(expr: Expr, f: impl FnOnce(Expr) -> Expr) -> Expr { + match Self::try_filter(expr, |v| Ok::<_, Infallible>(f(v))) { + Ok(v) => v, + Err(e) => match e {}, + } + } + #[hdl] + #[track_caller] + pub fn try_inspect( + expr: Expr, + f: impl FnOnce(Expr) -> Result<(), E>, + ) -> Result, E> { + let mut f = Some(f); + #[hdl] + if let HdlSome(v) = expr { + f.take().unwrap()(v)?; + } + Ok(expr) + } + #[hdl] + #[track_caller] + pub fn inspect(expr: Expr, f: impl FnOnce(Expr)) -> Expr { + let mut f = Some(f); + #[hdl] + if let HdlSome(v) = expr { + f.take().unwrap()(v); + } + expr + } + #[hdl] + #[track_caller] + pub fn is_none(expr: Expr) -> Expr { + #[hdl] + let is_none_out: Bool = wire(); + connect(is_none_out, false); + #[hdl] + if let HdlNone = expr { + connect(is_none_out, true); + } + is_none_out + } + #[hdl] + #[track_caller] + pub fn is_some(expr: Expr) -> Expr { + #[hdl] + let is_some_out: Bool = wire(); + connect(is_some_out, false); + #[hdl] + if let HdlSome(_) = expr { + connect(is_some_out, true); + } + is_some_out + } + #[hdl] + #[track_caller] + pub fn map_or( + expr: Expr, + default: Expr, + f: impl FnOnce(Expr) -> Expr, + ) -> Expr { + #[hdl] + let mapped = wire(Expr::ty(default)); + let mut f = Some(f); + #[hdl] + match expr { + HdlSome(v) => connect(mapped, f.take().unwrap()(v)), + HdlNone => connect(mapped, default), + } + mapped + } + #[hdl] + #[track_caller] + pub fn map_or_else( + expr: Expr, + default: impl FnOnce() -> Expr, + f: impl FnOnce(Expr) -> Expr, + ) -> Expr { + #[hdl] + let mut mapped = incomplete_wire(); + let mut default = Some(default); + let mut f = Some(f); + let mut retval = None; + #[hdl] + match expr { + HdlSome(v) => { + let v = f.take().unwrap()(v); + let mapped = *retval.get_or_insert_with(|| mapped.complete(Expr::ty(v))); + connect(mapped, v); + } + HdlNone => { + let v = default.take().unwrap()(); + let mapped = *retval.get_or_insert_with(|| mapped.complete(Expr::ty(v))); + connect(mapped, v); + } + } + retval.unwrap() + } + #[hdl] + #[track_caller] + pub fn or(expr: Expr, opt_b: Expr) -> Expr { + #[hdl] + let or_out = wire(Expr::ty(expr)); + connect(or_out, opt_b); + #[hdl] + if let HdlSome(_) = expr { + connect(or_out, expr); + } + or_out + } + #[hdl] + #[track_caller] + pub fn or_else(expr: Expr, f: impl FnOnce() -> Expr) -> Expr { + #[hdl] + let or_else_out = wire(Expr::ty(expr)); + connect(or_else_out, f()); + #[hdl] + if let HdlSome(_) = expr { + connect(or_else_out, expr); + } + or_else_out + } + #[hdl] + #[track_caller] + pub fn unwrap_or(expr: Expr, default: Expr) -> Expr { + #[hdl] + let unwrap_or_else_out = wire(Expr::ty(default)); + connect(unwrap_or_else_out, default); + #[hdl] + if let HdlSome(v) = expr { + connect(unwrap_or_else_out, v); + } + unwrap_or_else_out + } + #[hdl] + #[track_caller] + pub fn unwrap_or_else(expr: Expr, f: impl FnOnce() -> Expr) -> Expr { + #[hdl] + let unwrap_or_else_out = wire(Expr::ty(expr).HdlSome); + connect(unwrap_or_else_out, f()); + #[hdl] + if let HdlSome(v) = expr { + connect(unwrap_or_else_out, v); + } + unwrap_or_else_out + } + #[hdl] + #[track_caller] + pub fn xor(expr: Expr, opt_b: Expr) -> Expr { + #[hdl] + let xor_out = wire(Expr::ty(expr)); + #[hdl] + if let HdlSome(_) = expr { + #[hdl] + if let HdlNone = opt_b { + connect(xor_out, expr); + } else { + connect(xor_out, Expr::ty(expr).HdlNone()); + } + } else { + connect(xor_out, opt_b); + } + xor_out + } + #[hdl] + #[track_caller] + pub fn zip(expr: Expr, other: Expr>) -> Expr> { + #[hdl] + let zip_out = wire(HdlOption[(Expr::ty(expr).HdlSome, Expr::ty(other).HdlSome)]); + connect(zip_out, Expr::ty(zip_out).HdlNone()); + #[hdl] + if let HdlSome(l) = expr { + #[hdl] + if let HdlSome(r) = other { + connect(zip_out, HdlSome((l, r))); + } + } + zip_out + } +} + +impl HdlOption> { + #[hdl] + #[track_caller] + pub fn flatten(expr: Expr) -> Expr> { + #[hdl] + let flattened = wire(Expr::ty(expr).HdlSome); + #[hdl] + match expr { + HdlSome(v) => connect(flattened, v), + HdlNone => connect(flattened, Expr::ty(expr).HdlSome.HdlNone()), + } + flattened + } +} + +impl HdlOption<(T, U)> { + #[hdl] + #[track_caller] + pub fn unzip(expr: Expr) -> Expr<(HdlOption, HdlOption)> { + let (t, u) = Expr::ty(expr).HdlSome; + #[hdl] + let unzipped = wire((HdlOption[t], HdlOption[u])); + connect(unzipped, (HdlOption[t].HdlNone(), HdlOption[u].HdlNone())); + #[hdl] + if let HdlSome(v) = expr { + connect(unzipped.0, HdlSome(v.0)); + connect(unzipped.1, HdlSome(v.1)); + } + unzipped } } diff --git a/crates/fayalite/src/expr.rs b/crates/fayalite/src/expr.rs index 5bc6c63..89e60cd 100644 --- a/crates/fayalite/src/expr.rs +++ b/crates/fayalite/src/expr.rs @@ -1,96 +1,104 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information + use crate::{ - array::ArrayType, - bundle::{BundleType, BundleValue, DynBundle, DynBundleType, FieldType}, - enum_::{DynEnumType, EnumType, EnumValue}, - int::{DynSIntType, DynUInt, DynUIntType, IntValue, StaticOrDynIntType, UInt, UIntType}, - intern::{Intern, Interned, InternedCompare, PtrEqWithTypeId, SupportsPtrEqWithTypeId}, + array::{Array, ArrayType}, + bundle::{Bundle, BundleType}, + enum_::{Enum, EnumType}, + expr::{ + ops::ExprCastTo, + target::{GetTarget, Target}, + }, + int::{Bool, DynSize, IntType, SIntType, SIntValue, Size, SizeType, UInt, UIntType, UIntValue}, + intern::{Intern, Interned}, memory::{DynPortType, MemPort, PortType}, module::{ + Instance, ModuleIO, transform::visit::{Fold, Folder, Visit, Visitor}, - Instance, ModuleIO, TargetName, }, + phantom_const::PhantomConst, reg::Reg, - source_location::SourceLocation, - ty::{ - DynCanonicalType, DynCanonicalValue, DynType, DynValue, DynValueTrait, Type, TypeWithDeref, - Value, - }, - type_deduction::HitUndeducedType, - util::ConstBool, - valueless::Valueless, + reset::{AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset}, + ty::{CanonicalType, StaticType, Type, TypeWithDeref}, wire::Wire, }; use bitvec::slice::BitSlice; -use std::{any::Any, convert::Infallible, fmt, hash::Hash, marker::PhantomData, ops::Deref}; +use std::{convert::Infallible, fmt, ops::Deref}; pub mod ops; +pub mod target; macro_rules! expr_enum { ( pub enum $ExprEnum:ident { - $($Variant:ident($VariantTy:ty),)+ + $($Variant:ident($VariantTy:ty),)* } ) => { - #[derive(Copy, Clone, Eq, PartialEq, Hash)] + #[derive(Copy, Clone, PartialEq, Eq, Hash)] pub enum $ExprEnum { - $($Variant(Interned<$VariantTy>),)+ + $($Variant($VariantTy),)* } impl fmt::Debug for $ExprEnum { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - $(Self::$Variant(v) => v.fmt(f),)+ + $(Self::$Variant(v) => v.fmt(f),)* } } } - impl $ExprEnum { - pub fn target(self) -> Option> { + $(impl From<$VariantTy> for $ExprEnum { + fn from(v: $VariantTy) -> Self { + Self::$Variant(v) + } + })* + + impl Fold for $ExprEnum { + fn fold(self, state: &mut State) -> Result { + state.fold_expr_enum(self) + } + + fn default_fold(self, state: &mut State) -> Result { match self { - $(Self::$Variant(v) => v.target(),)+ + $(Self::$Variant(v) => Fold::fold(v, state).map(Self::$Variant),)* } } - pub fn to_literal_bits(&self) -> Result, HitUndeducedType>, NotALiteralExpr> { + } + + impl Visit for $ExprEnum { + fn visit(&self, state: &mut State) -> Result<(), State::Error> { + state.visit_expr_enum(self) + } + + fn default_visit(&self, state: &mut State) -> Result<(), State::Error> { match self { - $(Self::$Variant(v) => v.to_literal_bits(),)+ + $(Self::$Variant(v) => Visit::visit(v, state),)* } } } impl ToExpr for $ExprEnum { - type Type = Interned; + type Type = CanonicalType; - fn ty(&self) -> Self::Type { + fn to_expr(&self) -> Expr { match self { - $(Self::$Variant(v) => v.ty().canonical_dyn(),)+ - } - } - - fn to_expr(&self) -> Expr { - Expr::new_unchecked(*self) - } - } - - impl Fold for $ExprEnum { - fn fold(self, state: &mut State) -> Result { - state.fold_expr_enum(self) - } - fn default_fold(self, state: &mut State) -> Result { - match self { - $(Self::$Variant(v) => Fold::fold(v, state).map(Self::$Variant),)+ + $(Self::$Variant(v) => Expr::canonical(v.to_expr()),)* } } } - impl Visit for $ExprEnum { - fn visit(&self, state: &mut State) -> Result<(), State::Error> { - state.visit_expr_enum(self) - } - fn default_visit(&self, state: &mut State) -> Result<(), State::Error> { + impl GetTarget for $ExprEnum { + fn target(&self) -> Option> { match self { - $(Self::$Variant(v) => Visit::visit(v, state),)+ + $(Self::$Variant(v) => v.target(),)* + } + } + } + + impl ToLiteralBits for $ExprEnum { + fn to_literal_bits(&self) -> Result, NotALiteralExpr> { + match self { + $(Self::$Variant(v) => v.to_literal_bits(),)* } } } @@ -99,84 +107,131 @@ macro_rules! expr_enum { expr_enum! { pub enum ExprEnum { - Literal(Literal>), - ArrayLiteral(ops::ArrayLiteral>), - BundleLiteral(ops::BundleLiteral), - EnumLiteral(ops::EnumLiteral), - NotU(ops::Not), - NotS(ops::Not), - Neg(ops::Neg), - BitAndU(ops::BitAnd), - BitAndS(ops::BitAnd), - BitOrU(ops::BitOr), - BitOrS(ops::BitOr), - BitXorU(ops::BitXor), - BitXorS(ops::BitXor), - AddU(ops::Add), - AddS(ops::Add), - SubU(ops::Sub), - SubS(ops::Sub), - MulU(ops::Mul), - MulS(ops::Mul), - DynShlU(ops::DynShl), - DynShlS(ops::DynShl), - DynShrU(ops::DynShr), - DynShrS(ops::DynShr), - FixedShlU(ops::FixedShl), - FixedShlS(ops::FixedShl), - FixedShrU(ops::FixedShr), - FixedShrS(ops::FixedShr), - CmpLtU(ops::CmpLt), - CmpLtS(ops::CmpLt), - CmpLeU(ops::CmpLe), - CmpLeS(ops::CmpLe), - CmpGtU(ops::CmpGt), - CmpGtS(ops::CmpGt), - CmpGeU(ops::CmpGe), - CmpGeS(ops::CmpGe), - CmpEqU(ops::CmpEq), - CmpEqS(ops::CmpEq), - CmpNeU(ops::CmpNe), - CmpNeS(ops::CmpNe), - CastUIntToUInt(ops::CastInt), - CastUIntToSInt(ops::CastInt), - CastSIntToUInt(ops::CastInt), - CastSIntToSInt(ops::CastInt), - SliceUInt(ops::Slice), - SliceSInt(ops::Slice), - ReduceBitAnd(ops::ReduceBitAnd>), - ReduceBitOr(ops::ReduceBitOr>), - ReduceBitXor(ops::ReduceBitXor>), - FieldAccess(ops::FieldAccess>), - VariantAccess(ops::VariantAccess>), - ArrayIndex(ops::ArrayIndex>), - DynArrayIndex(ops::DynArrayIndex>), - CastToBits(ops::CastToBits), - CastBitsTo(ops::CastBitsTo>), - CastBitToClock(ops::CastBitToClock), - CastBitToSyncReset(ops::CastBitToSyncReset), - CastBitToAsyncReset(ops::CastBitToAsyncReset), + UIntLiteral(Interned), + SIntLiteral(Interned), + BoolLiteral(bool), + PhantomConst(PhantomConst), + BundleLiteral(ops::BundleLiteral), + ArrayLiteral(ops::ArrayLiteral), + EnumLiteral(ops::EnumLiteral), + Uninit(ops::Uninit), + NotU(ops::NotU), + NotS(ops::NotS), + NotB(ops::NotB), + Neg(ops::Neg), + BitAndU(ops::BitAndU), + BitAndS(ops::BitAndS), + BitAndB(ops::BitAndB), + BitOrU(ops::BitOrU), + BitOrS(ops::BitOrS), + BitOrB(ops::BitOrB), + BitXorU(ops::BitXorU), + BitXorS(ops::BitXorS), + BitXorB(ops::BitXorB), + AddU(ops::AddU), + AddS(ops::AddS), + SubU(ops::SubU), + SubS(ops::SubS), + MulU(ops::MulU), + MulS(ops::MulS), + DivU(ops::DivU), + DivS(ops::DivS), + RemU(ops::RemU), + RemS(ops::RemS), + DynShlU(ops::DynShlU), + DynShlS(ops::DynShlS), + DynShrU(ops::DynShrU), + DynShrS(ops::DynShrS), + FixedShlU(ops::FixedShlU), + FixedShlS(ops::FixedShlS), + FixedShrU(ops::FixedShrU), + FixedShrS(ops::FixedShrS), + CmpLtB(ops::CmpLtB), + CmpLeB(ops::CmpLeB), + CmpGtB(ops::CmpGtB), + CmpGeB(ops::CmpGeB), + CmpEqB(ops::CmpEqB), + CmpNeB(ops::CmpNeB), + CmpLtU(ops::CmpLtU), + CmpLeU(ops::CmpLeU), + CmpGtU(ops::CmpGtU), + CmpGeU(ops::CmpGeU), + CmpEqU(ops::CmpEqU), + CmpNeU(ops::CmpNeU), + CmpLtS(ops::CmpLtS), + CmpLeS(ops::CmpLeS), + CmpGtS(ops::CmpGtS), + CmpGeS(ops::CmpGeS), + CmpEqS(ops::CmpEqS), + CmpNeS(ops::CmpNeS), + CastUIntToUInt(ops::CastUIntToUInt), + CastUIntToSInt(ops::CastUIntToSInt), + CastSIntToUInt(ops::CastSIntToUInt), + CastSIntToSInt(ops::CastSIntToSInt), + CastBoolToUInt(ops::CastBoolToUInt), + CastBoolToSInt(ops::CastBoolToSInt), + CastUIntToBool(ops::CastUIntToBool), + CastSIntToBool(ops::CastSIntToBool), + CastBoolToSyncReset(ops::CastBoolToSyncReset), + CastUIntToSyncReset(ops::CastUIntToSyncReset), + CastSIntToSyncReset(ops::CastSIntToSyncReset), + CastBoolToAsyncReset(ops::CastBoolToAsyncReset), + CastUIntToAsyncReset(ops::CastUIntToAsyncReset), + CastSIntToAsyncReset(ops::CastSIntToAsyncReset), + CastSyncResetToBool(ops::CastSyncResetToBool), + CastSyncResetToUInt(ops::CastSyncResetToUInt), + CastSyncResetToSInt(ops::CastSyncResetToSInt), CastSyncResetToReset(ops::CastSyncResetToReset), + CastAsyncResetToBool(ops::CastAsyncResetToBool), + CastAsyncResetToUInt(ops::CastAsyncResetToUInt), + CastAsyncResetToSInt(ops::CastAsyncResetToSInt), CastAsyncResetToReset(ops::CastAsyncResetToReset), - CastClockToBit(ops::CastClockToBit), - CastSyncResetToBit(ops::CastSyncResetToBit), - CastAsyncResetToBit(ops::CastAsyncResetToBit), - CastResetToBit(ops::CastResetToBit), - ModuleIO(ModuleIO>), - Instance(Instance), - Wire(Wire>), - Reg(Reg>), + CastResetToBool(ops::CastResetToBool), + CastResetToUInt(ops::CastResetToUInt), + CastResetToSInt(ops::CastResetToSInt), + CastBoolToClock(ops::CastBoolToClock), + CastUIntToClock(ops::CastUIntToClock), + CastSIntToClock(ops::CastSIntToClock), + CastClockToBool(ops::CastClockToBool), + CastClockToUInt(ops::CastClockToUInt), + CastClockToSInt(ops::CastClockToSInt), + FieldAccess(ops::FieldAccess), + VariantAccess(ops::VariantAccess), + ArrayIndex(ops::ArrayIndex), + DynArrayIndex(ops::DynArrayIndex), + ReduceBitAndU(ops::ReduceBitAndU), + ReduceBitAndS(ops::ReduceBitAndS), + ReduceBitOrU(ops::ReduceBitOrU), + ReduceBitOrS(ops::ReduceBitOrS), + ReduceBitXorU(ops::ReduceBitXorU), + ReduceBitXorS(ops::ReduceBitXorS), + SliceUInt(ops::SliceUInt), + SliceSInt(ops::SliceSInt), + CastToBits(ops::CastToBits), + CastBitsTo(ops::CastBitsTo), + ModuleIO(ModuleIO), + Instance(Instance), + Wire(Wire), + Reg(Reg), + RegSync(Reg), + RegAsync(Reg), MemPort(MemPort), } } -pub struct Expr { - /// use weird names to help work around rust-analyzer bug - __enum: ExprEnum, - __phantom: PhantomData, +impl From for ExprEnum { + fn from(value: UIntValue) -> Self { + ExprEnum::UIntLiteral(Intern::intern_sized(value)) + } } -#[derive(Debug, Copy, Clone)] +impl From for ExprEnum { + fn from(value: SIntValue) -> Self { + ExprEnum::SIntLiteral(Intern::intern_sized(value)) + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[non_exhaustive] pub struct NotALiteralExpr; @@ -188,161 +243,161 @@ impl fmt::Display for NotALiteralExpr { impl std::error::Error for NotALiteralExpr {} -impl Expr { - pub fn expr_enum(self) -> ExprEnum { - self.__enum +pub trait ToLiteralBits { + fn to_literal_bits(&self) -> Result, NotALiteralExpr>; +} + +impl ToLiteralBits for Result, NotALiteralExpr> { + fn to_literal_bits(&self) -> Result, NotALiteralExpr> { + *self } - pub fn new_unchecked(expr_enum: ExprEnum) -> Self { - Self { - __enum: expr_enum, - __phantom: PhantomData, +} + +impl ToLiteralBits for Interned { + fn to_literal_bits(&self) -> Result, NotALiteralExpr> { + Ok(*self) + } +} + +impl ToLiteralBits for NotALiteralExpr { + fn to_literal_bits(&self) -> Result, NotALiteralExpr> { + Err(*self) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct Expr { + __enum: Interned, + __ty: T, + __flow: Flow, +} + +impl fmt::Debug for Expr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + #[cfg(debug_assertions)] + { + let Self { + __enum, + __ty, + __flow, + } = self; + let expr_ty = __ty.canonical(); + let enum_ty = __enum.to_expr().__ty; + assert_eq!( + expr_ty, enum_ty, + "expr ty mismatch:\nExpr {{\n__enum: {__enum:?},\n__ty: {__ty:?},\n__flow: {__flow:?}\n}}" + ); + } + self.__enum.fmt(f) + } +} + +impl Expr { + pub fn expr_enum(this: Self) -> Interned { + this.__enum + } + pub fn ty(this: Self) -> T { + this.__ty + } + pub fn flow(this: Self) -> Flow { + this.__flow + } + pub fn canonical(this: Self) -> Expr { + Expr { + __enum: this.__enum, + __ty: this.__ty.canonical(), + __flow: this.__flow, } } - pub fn canonical(self) -> Expr<::CanonicalValue> + pub fn from_canonical(this: Expr) -> Self { + Expr { + __enum: this.__enum, + __ty: T::from_canonical(this.__ty), + __flow: this.__flow, + } + } + pub fn from_dyn_int(this: Expr) -> Self where - T: Value, + T: IntType, { Expr { - __enum: self.expr_enum(), - __phantom: PhantomData, + __enum: this.__enum, + __ty: T::from_dyn_int(this.__ty), + __flow: this.__flow, } } - pub fn dyn_canonical_type(self) -> Interned { - self.expr_enum().ty() - } - pub fn canonical_type(self) -> ::CanonicalType + pub fn as_dyn_int(this: Self) -> Expr where - T: Value, + T: IntType, { - ::CanonicalType::from_dyn_canonical_type(self.dyn_canonical_type()) + Expr { + __enum: this.__enum, + __ty: this.__ty.as_dyn_int(), + __flow: this.__flow, + } } - pub fn valueless(self) -> Valueless + pub fn as_bundle(this: Self) -> Expr where - T: Value>, + T: BundleType, { - Valueless { ty: self.ty() } + Expr { + __enum: this.__enum, + __ty: Bundle::new(this.__ty.fields()), + __flow: this.__flow, + } } - pub fn to_literal_bits( - &self, - ) -> Result, HitUndeducedType>, NotALiteralExpr> { - self.expr_enum().to_literal_bits() + pub fn from_bundle(this: Expr) -> Self + where + T: BundleType, + { + Expr { + __enum: this.__enum, + __ty: T::from_canonical(CanonicalType::Bundle(this.__ty)), + __flow: this.__flow, + } } #[track_caller] - pub fn with_type>>(self) -> Expr + pub fn field(this: Self, name: &str) -> Expr where - T: Value>, + T: BundleType, { - let retval = Expr::::new_unchecked(self.expr_enum()); - let _ = retval.ty(); // check that the type is correct - retval + ops::FieldAccess::new_by_name(Self::as_bundle(this), name.intern()).to_expr() } - pub fn to_dyn(self) -> Expr { - Expr::new_unchecked(self.expr_enum()) - } - pub fn to_canonical_dyn(self) -> Expr { - Expr::new_unchecked(self.expr_enum()) - } - #[track_caller] - pub fn from_value(value: &T) -> Self + pub fn as_enum(this: Self) -> Expr where - T: Value>, + T: EnumType, { - Literal::::new_unchecked(value.to_canonical()).to_expr() + Expr { + __enum: this.__enum, + __ty: Enum::new(this.__ty.variants()), + __flow: this.__flow, + } } - pub fn target(self) -> Option> { - self.expr_enum().target() - } - pub fn flow(self) -> Flow { - self.target().map(|v| v.flow()).unwrap_or(Flow::Source) + pub fn from_enum(this: Expr) -> Self + where + T: EnumType, + { + Expr { + __enum: this.__enum, + __ty: T::from_canonical(CanonicalType::Enum(this.__ty)), + __flow: this.__flow, + } } } -impl Copy for Expr {} - -impl Clone for Expr { - fn clone(&self) -> Self { - *self +impl ToLiteralBits for Expr { + fn to_literal_bits(&self) -> Result, NotALiteralExpr> { + self.__enum.to_literal_bits() } } -impl fmt::Debug for Expr { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { - __enum, - __phantom: _, - } = self; - __enum.fmt(f) +impl GetTarget for Expr { + fn target(&self) -> Option> { + self.__enum.target() } } -impl>> sealed::Sealed for Expr {} - -impl PartialEq for Expr { - fn eq(&self, other: &Self) -> bool { - let Self { - __enum, - __phantom: _, - } = self; - *__enum == other.__enum - } -} - -impl Eq for Expr {} - -impl Hash for Expr { - fn hash(&self, state: &mut H) { - let Self { - __enum, - __phantom: _, - } = self; - __enum.hash(state); - } -} - -impl Expr> -where - T: StaticOrDynIntType< - 1, - Signed = ConstBool, - CanonicalType = DynUIntType, - CanonicalValue = DynUInt, - >, -{ - pub fn as_bool(self) -> Expr> { - assert_eq!(self.canonical_type().width, 1); - Expr::new_unchecked(self.expr_enum()) - } -} - -impl>> Expr { - pub fn field>>( - self, - name: &str, - ) -> Expr { - ops::FieldAccess::::new_unchecked( - self.canonical(), - name.intern(), - ) - .to_expr() - } -} - -impl>> ToExpr for Expr { - type Type = T::Type; - - fn ty(&self) -> T::Type { - T::Type::from_dyn_canonical_type(self.dyn_canonical_type()) - } - - fn to_expr(&self) -> Expr { - *self - } -} - -impl, T> Deref for Expr -where - T: TypeWithDeref, -{ +impl Deref for Expr { type Target = T::MatchVariant; fn deref(&self) -> &Self::Target { @@ -350,27 +405,115 @@ where } } -impl>, State: ?Sized + Folder> Fold for Expr { +impl Expr> { + pub fn as_dyn_array(this: Self) -> Expr { + Expr { + __enum: this.__enum, + __ty: this.__ty.as_dyn_array(), + __flow: this.__flow, + } + } +} + +impl Fold for Expr { fn fold(self, state: &mut State) -> Result { state.fold_expr(self) } + fn default_fold(self, state: &mut State) -> Result { - Ok(Expr::::new_unchecked(self.expr_enum().fold(state)?).with_type()) + Ok(Expr::from_canonical(self.__enum.fold(state)?.to_expr())) } } -impl>, State: ?Sized + Visitor> Visit for Expr { +impl Visit for Expr { fn visit(&self, state: &mut State) -> Result<(), State::Error> { state.visit_expr(self) } + fn default_visit(&self, state: &mut State) -> Result<(), State::Error> { - self.expr_enum().visit(state) + self.__enum.visit(state) } } -pub trait ExprTraitBase: - fmt::Debug + Any + SupportsPtrEqWithTypeId + Send + Sync + sealed::Sealed + ToExpr -{ +pub trait ToExpr { + type Type: Type; + fn to_expr(&self) -> Expr; +} + +impl ToExpr for Expr { + type Type = T; + + fn to_expr(&self) -> Expr { + *self + } +} + +impl ToExpr for &'_ T { + type Type = T::Type; + + fn to_expr(&self) -> Expr { + T::to_expr(self) + } +} + +impl ToExpr for &'_ mut T { + type Type = T::Type; + + fn to_expr(&self) -> Expr { + T::to_expr(self) + } +} + +impl ToExpr for Box { + type Type = T::Type; + + fn to_expr(&self) -> Expr { + T::to_expr(self) + } +} + +impl ToExpr for Interned { + type Type = T::Type; + + fn to_expr(&self) -> Expr { + T::to_expr(self) + } +} + +impl ToExpr for UIntValue { + type Type = UIntType; + + fn to_expr(&self) -> Expr { + Expr { + __enum: ExprEnum::UIntLiteral(self.clone().as_dyn_int().intern()).intern(), + __ty: self.ty(), + __flow: Flow::Source, + } + } +} + +impl ToExpr for SIntValue { + type Type = SIntType; + + fn to_expr(&self) -> Expr { + Expr { + __enum: ExprEnum::SIntLiteral(self.clone().as_dyn_int().intern()).intern(), + __ty: self.ty(), + __flow: Flow::Source, + } + } +} + +impl ToExpr for bool { + type Type = Bool; + + fn to_expr(&self) -> Expr { + Expr { + __enum: ExprEnum::BoolLiteral(*self).intern(), + __ty: Bool, + __flow: Flow::Source, + } + } } #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] @@ -389,733 +532,263 @@ impl Flow { } } pub const fn flip_if(self, flipped: bool) -> Flow { - if flipped { - self.flip() - } else { - self - } + if flipped { self.flip() } else { self } } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct TargetPathBundleField { - pub name: Interned, -} - -impl fmt::Display for TargetPathBundleField { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, ".{}", self.name) - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct TargetPathArrayElement { - pub index: usize, -} - -impl fmt::Display for TargetPathArrayElement { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "[{}]", self.index) - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct TargetPathDynArrayElement {} - -impl fmt::Display for TargetPathDynArrayElement { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "[]") - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum TargetPathElement { - BundleField(TargetPathBundleField), - ArrayElement(TargetPathArrayElement), - DynArrayElement(TargetPathDynArrayElement), -} - -impl From for TargetPathElement { - fn from(value: TargetPathBundleField) -> Self { - Self::BundleField(value) - } -} - -impl From for TargetPathElement { - fn from(value: TargetPathArrayElement) -> Self { - Self::ArrayElement(value) - } -} - -impl From for TargetPathElement { - fn from(value: TargetPathDynArrayElement) -> Self { - Self::DynArrayElement(value) - } -} - -impl fmt::Display for TargetPathElement { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::BundleField(v) => v.fmt(f), - Self::ArrayElement(v) => v.fmt(f), - Self::DynArrayElement(v) => v.fmt(f), - } - } -} - -impl TargetPathElement { - pub fn canonical_ty(&self, parent: Interned) -> Interned { - match self { - &Self::BundleField(TargetPathBundleField { name }) => { - let parent_ty = parent - .canonical_ty() - .type_enum() - .bundle_type() - .expect("parent type is known to be a bundle"); - let field = parent_ty - .field_by_name(name) - .expect("field name is known to be a valid field of parent type"); - field.ty - } - &Self::ArrayElement(TargetPathArrayElement { index }) => { - let parent_ty = parent - .canonical_ty() - .type_enum() - .array_type() - .expect("parent type is known to be an array"); - assert!(index < parent_ty.len()); - *parent_ty.element() - } - Self::DynArrayElement(_) => { - let parent_ty = parent - .canonical_ty() - .type_enum() - .array_type() - .expect("parent type is known to be an array"); - *parent_ty.element() - } - } - } - pub fn flow(&self, parent: Interned) -> Flow { - match self { - Self::BundleField(v) => { - let parent_ty = parent - .canonical_ty() - .type_enum() - .bundle_type() - .expect("parent type is known to be a bundle"); - let field = parent_ty - .field_by_name(v.name) - .expect("field name is known to be a valid field of parent type"); - parent.flow().flip_if(field.flipped) - } - Self::ArrayElement(_) => parent.flow(), - Self::DynArrayElement(_) => parent.flow(), - } - } - pub fn is_static(&self) -> bool { - match self { - Self::BundleField(_) | Self::ArrayElement(_) => true, - Self::DynArrayElement(_) => false, - } - } -} - -macro_rules! impl_target_base { - ( - $(#[$enum_meta:meta])* - $enum_vis:vis enum $TargetBase:ident { - $( - #[is = $is_fn:ident] - #[to = $to_fn:ident] - $(#[$variant_meta:meta])* - $Variant:ident($VariantTy:ty), - )* - } - ) => { - $(#[$enum_meta])* - $enum_vis enum $TargetBase { - $( - $(#[$variant_meta])* - $Variant($VariantTy), - )* - } - - impl fmt::Debug for $TargetBase { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - $(Self::$Variant(v) => v.fmt(f),)* - } - } - } - - $( - impl From<$VariantTy> for $TargetBase { - fn from(value: $VariantTy) -> Self { - Self::$Variant(value) - } - } - - impl From<$VariantTy> for Target { - fn from(value: $VariantTy) -> Self { - $TargetBase::$Variant(value).into() - } - } - )* - - impl $TargetBase { - $( - $enum_vis fn $is_fn(&self) -> bool { - self.$to_fn().is_some() - } - $enum_vis fn $to_fn(&self) -> Option<&$VariantTy> { - if let Self::$Variant(retval) = self { - Some(retval) - } else { - None - } - } - )* - $enum_vis fn must_connect_to(&self) -> bool { - match self { - $(Self::$Variant(v) => v.must_connect_to(),)* - } - } - $enum_vis fn flow(&self) -> Flow { - match self { - $(Self::$Variant(v) => v.flow(),)* - } - } - $enum_vis fn source_location(&self) -> SourceLocation { - match self { - $(Self::$Variant(v) => v.source_location(),)* - } - } - } - }; -} - -impl_target_base! { - #[derive(Clone, PartialEq, Eq, Hash)] - pub enum TargetBase { - #[is = is_module_io] - #[to = module_io] - ModuleIO(ModuleIO>), - #[is = is_mem_port] - #[to = mem_port] - MemPort(MemPort), - #[is = is_reg] - #[to = reg] - Reg(Reg>), - #[is = is_wire] - #[to = wire] - Wire(Wire>), - #[is = is_instance] - #[to = instance] - Instance(Instance), - } -} - -impl fmt::Display for TargetBase { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{:?}", self.target_name()) - } -} - -impl TargetBase { - pub fn target_name(&self) -> TargetName { - match self { - TargetBase::ModuleIO(v) => TargetName(v.scoped_name(), None), - TargetBase::MemPort(v) => TargetName(v.mem_name(), Some(v.port_name())), - TargetBase::Reg(v) => TargetName(v.scoped_name(), None), - TargetBase::Wire(v) => TargetName(v.scoped_name(), None), - TargetBase::Instance(v) => TargetName(v.scoped_name(), None), - } - } - pub fn canonical_ty(&self) -> Interned { - match self { - TargetBase::ModuleIO(v) => v.ty(), - TargetBase::MemPort(v) => v.ty().canonical_dyn(), - TargetBase::Reg(v) => v.ty(), - TargetBase::Wire(v) => v.ty(), - TargetBase::Instance(v) => v.ty().canonical_dyn(), - } - } -} - -#[derive(Copy, Clone, PartialEq, Eq, Hash)] -pub struct TargetChild { - parent: Interned, - path_element: Interned, - canonical_ty: Interned, - flow: Flow, -} - -impl fmt::Debug for TargetChild { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { - parent, - path_element, - canonical_ty: _, - flow: _, - } = self; - parent.fmt(f)?; - fmt::Display::fmt(path_element, f) - } -} - -impl fmt::Display for TargetChild { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { - parent, - path_element, - canonical_ty: _, - flow: _, - } = self; - parent.fmt(f)?; - path_element.fmt(f) - } -} - -impl TargetChild { - pub fn new(parent: Interned, path_element: Interned) -> Self { - Self { - parent, - path_element, - canonical_ty: path_element.canonical_ty(parent), - flow: path_element.flow(parent), - } - } - pub fn parent(self) -> Interned { - self.parent - } - pub fn path_element(self) -> Interned { - self.path_element - } - pub fn canonical_ty(self) -> Interned { - self.canonical_ty - } - pub fn flow(self) -> Flow { - self.flow - } - pub fn bundle_field(self) -> Option>> { - if let TargetPathElement::BundleField(TargetPathBundleField { name }) = *self.path_element { - let parent_ty = self - .parent - .canonical_ty() - .type_enum() - .bundle_type() - .expect("parent known to be bundle"); - Some( - parent_ty - .field_by_name(name) - .expect("field name known to be a valid field of parent"), - ) - } else { - None - } - } -} - -#[derive(Clone, PartialEq, Eq, Hash)] -pub enum Target { - Base(Interned), - Child(TargetChild), -} - -impl From for Target { - fn from(value: TargetBase) -> Self { - Self::Base(Intern::intern_sized(value)) - } -} - -impl From for Target { - fn from(value: TargetChild) -> Self { - Self::Child(value) - } -} - -impl From> for Target { - fn from(value: Interned) -> Self { - Self::Base(value) - } -} - -impl Target { - pub fn base(&self) -> Interned { - let mut target = self; - loop { - match target { - Self::Base(v) => break *v, - Self::Child(v) => target = &v.parent, - } - } - } - pub fn child(&self) -> Option { - match *self { - Target::Base(_) => None, - Target::Child(v) => Some(v), - } - } - pub fn is_static(&self) -> bool { - let mut target = self; - loop { - match target { - Self::Base(_) => return true, - Self::Child(v) if !v.path_element().is_static() => return false, - Self::Child(v) => target = &v.parent, - } - } - } - #[must_use] - pub fn join(&self, path_element: Interned) -> Self { - TargetChild::new(self.intern(), path_element).into() - } - pub fn flow(&self) -> Flow { - match self { - Self::Base(v) => v.flow(), - Self::Child(v) => v.flow(), - } - } - pub fn canonical_ty(&self) -> Interned { - match self { - Target::Base(v) => v.canonical_ty(), - Target::Child(v) => v.canonical_ty(), - } - } -} - -impl fmt::Display for Target { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::Base(v) => v.fmt(f), - Self::Child(v) => v.fmt(f), - } - } -} - -impl fmt::Debug for Target { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::Base(v) => v.fmt(f), - Self::Child(v) => v.fmt(f), - } - } -} - -pub trait ExprTrait: ExprTraitBase { - fn expr_enum(&self) -> ExprEnum; - fn target(&self) -> Option>; - fn valueless(&self) -> Valueless { - Valueless { ty: self.ty() } - } - fn to_literal_bits( - &self, - ) -> Result, HitUndeducedType>, NotALiteralExpr>; -} - -impl ExprTraitBase for T {} - -impl InternedCompare for dyn ExprTrait { - type InternedCompareKey = PtrEqWithTypeId; - fn interned_compare_key_ref(this: &Self) -> Self::InternedCompareKey { - Self::get_ptr_eq_with_type_id(this) - } - fn interned_compare_key_weak(this: &std::sync::Weak) -> Self::InternedCompareKey { - Self::get_ptr_eq_with_type_id(&*this.upgrade().unwrap()) - } -} - -pub struct SimState {} - -mod sealed { - pub trait Sealed {} -} - -pub trait ToExpr { - type Type: Type; - fn ty(&self) -> Self::Type; - fn to_expr(&self) -> Expr<::Value>; -} - -impl ToExpr for &'_ T { - type Type = T::Type; - - fn ty(&self) -> Self::Type { - (**self).ty() - } - - fn to_expr(&self) -> Expr<::Value> { - (**self).to_expr() - } -} - -impl ToExpr for &'_ mut T { - type Type = T::Type; - - fn ty(&self) -> Self::Type { - (**self).ty() - } - - fn to_expr(&self) -> Expr<::Value> { - (**self).to_expr() - } -} - -impl ToExpr for Box { - type Type = T::Type; - - fn ty(&self) -> Self::Type { - (**self).ty() - } - - fn to_expr(&self) -> Expr<::Value> { - (**self).to_expr() - } -} - -#[derive(Clone, Eq, PartialEq, Hash)] -pub struct Literal { - value: T::CanonicalValue, -} - -impl fmt::Debug for Literal { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.value.fmt(f) - } -} - -impl Literal { - #[track_caller] - pub fn new_unchecked(value: T::CanonicalValue) -> Self { - assert!( - value.ty().is_passive().unwrap_or(true), - "can't have a literal with flipped fields" - ); - Self { value } - } - pub fn value(&self) -> &T::CanonicalValue { - &self.value - } - pub fn canonical(&self) -> Literal { - Literal { - value: self.value.clone(), - } - } -} - -impl sealed::Sealed for Literal {} - -impl ToExpr for Literal { - type Type = T; - - fn ty(&self) -> Self::Type { - Self::Type::from_canonical_type(self.value.ty()) - } - - fn to_expr(&self) -> Expr<::Value> { - Expr::new_unchecked(self.expr_enum()) - } -} - -impl ExprTrait for Literal { - fn expr_enum(&self) -> ExprEnum { - ExprEnum::Literal( - Literal { - value: self.value.to_canonical_dyn(), - } - .intern_sized(), - ) - } - - fn target(&self) -> Option> { - None - } - - fn to_literal_bits( - &self, - ) -> Result, HitUndeducedType>, NotALiteralExpr> { - Ok(self.value.to_bits()) - } -} - -impl Fold for Literal -where - T::CanonicalValue: Fold, -{ - fn fold(self, state: &mut State) -> Result { - state.fold_literal(self) - } - fn default_fold(self, state: &mut State) -> Result { - Ok(Literal { - value: self.value.fold(state)?, - }) - } -} - -impl Visit for Literal -where - T::CanonicalValue: Visit, -{ - fn visit(&self, state: &mut State) -> Result<(), ::Error> { - state.visit_literal(self) - } - fn default_visit(&self, state: &mut State) -> Result<(), ::Error> { - self.value.visit(state) - } -} - -impl sealed::Sealed for ModuleIO {} - impl ToExpr for ModuleIO { type Type = T; - fn ty(&self) -> Self::Type { - self.field_type().ty.clone() - } - - fn to_expr(&self) -> Expr<::Value> { - Expr::new_unchecked(self.expr_enum()) + fn to_expr(&self) -> Expr { + Expr { + __enum: ExprEnum::ModuleIO(self.canonical()).intern_sized(), + __ty: self.ty(), + __flow: self.flow(), + } } } -impl ExprTrait for ModuleIO { - fn expr_enum(&self) -> ExprEnum { - ExprEnum::ModuleIO(self.to_canonical_dyn_module_io().intern_sized()) - } - - fn target(&self) -> Option> { - Some(Intern::intern_sized( - self.to_canonical_dyn_module_io().into(), - )) - } - - fn to_literal_bits( - &self, - ) -> Result, HitUndeducedType>, NotALiteralExpr> { +impl ToLiteralBits for ModuleIO { + fn to_literal_bits(&self) -> Result, NotALiteralExpr> { Err(NotALiteralExpr) } } -impl sealed::Sealed for Instance where T::Type: BundleType {} - -impl ToExpr for Instance -where - T::Type: BundleType, -{ - type Type = T::Type; - - fn ty(&self) -> Self::Type { - (*self.instantiated().io_ty()).clone() - } - - fn to_expr(&self) -> Expr<::Value> { - Expr::new_unchecked(self.expr_enum()) - } -} - -impl ExprTrait for Instance -where - T::Type: BundleType, -{ - fn expr_enum(&self) -> ExprEnum { - ExprEnum::Instance(self.canonical().intern_sized()) - } - +impl GetTarget for ModuleIO { fn target(&self) -> Option> { Some(Intern::intern_sized(self.canonical().into())) } +} - fn to_literal_bits( - &self, - ) -> Result, HitUndeducedType>, NotALiteralExpr> { +impl ToExpr for Instance { + type Type = T; + + fn to_expr(&self) -> Expr { + Expr { + __enum: ExprEnum::Instance(self.canonical()).intern_sized(), + __ty: self.ty(), + __flow: self.flow(), + } + } +} + +impl ToLiteralBits for Instance { + fn to_literal_bits(&self) -> Result, NotALiteralExpr> { Err(NotALiteralExpr) } } -impl sealed::Sealed for Wire {} - -impl ExprTrait for Wire { - fn expr_enum(&self) -> ExprEnum { - ExprEnum::Wire(self.to_dyn_canonical_wire().intern_sized()) - } - - fn target(&self) -> Option> { - Some(Intern::intern_sized(self.to_dyn_canonical_wire().into())) - } - - fn to_literal_bits( - &self, - ) -> Result, HitUndeducedType>, NotALiteralExpr> { - Err(NotALiteralExpr) - } -} - -impl sealed::Sealed for Reg {} - -impl ExprTrait for Reg { - fn expr_enum(&self) -> ExprEnum { - ExprEnum::Reg(self.to_dyn_canonical_reg().intern_sized()) - } - - fn target(&self) -> Option> { - Some(Intern::intern_sized(self.to_dyn_canonical_reg().into())) - } - - fn to_literal_bits( - &self, - ) -> Result, HitUndeducedType>, NotALiteralExpr> { - Err(NotALiteralExpr) - } -} - -#[doc(hidden)] -pub fn value_from_expr_type(_expr: Expr, infallible: Infallible) -> V { - match infallible {} -} - -#[doc(hidden)] -pub fn check_match_expr(_expr: Expr, _check_fn: impl FnOnce(V, Infallible)) {} - -#[doc(hidden)] -#[inline] -pub fn make_enum_expr( - _check_fn: impl FnOnce(Infallible) -> V, - build: impl FnOnce(::Builder) -> Expr, -) -> Expr -where - V::Type: EnumType, -{ - build(V::Type::builder()) -} - -#[doc(hidden)] -#[inline] -pub fn make_bundle_expr( - _check_fn: impl FnOnce(Infallible) -> V, - build: impl FnOnce(::Builder) -> Expr, -) -> Expr -where - V::Type: BundleType, -{ - build(V::Type::builder()) -} - -impl sealed::Sealed for MemPort where Self: ToExpr {} - -impl ExprTrait for MemPort -where - Self: ToExpr, -{ - fn expr_enum(&self) -> ExprEnum { - ExprEnum::MemPort(self.canonical().intern_sized()) - } +impl GetTarget for Instance { fn target(&self) -> Option> { Some(Intern::intern_sized(self.canonical().into())) } - fn to_literal_bits( - &self, - ) -> Result, HitUndeducedType>, NotALiteralExpr> { +} + +impl ToExpr for Wire { + type Type = T; + + fn to_expr(&self) -> Expr { + Expr { + __enum: ExprEnum::Wire(self.canonical()).intern_sized(), + __ty: self.ty(), + __flow: self.flow(), + } + } +} + +impl ToLiteralBits for Wire { + fn to_literal_bits(&self) -> Result, NotALiteralExpr> { Err(NotALiteralExpr) } } + +impl GetTarget for Wire { + fn target(&self) -> Option> { + Some(Intern::intern_sized(self.canonical().into())) + } +} + +impl ToExpr for Reg { + type Type = T; + + fn to_expr(&self) -> Expr { + struct Dispatch; + impl ResetTypeDispatch for Dispatch { + type Input = Reg; + type Output = ExprEnum; + + fn reset(self, input: Self::Input) -> Self::Output { + ExprEnum::Reg(input) + } + + fn sync_reset(self, input: Self::Input) -> Self::Output { + ExprEnum::RegSync(input) + } + + fn async_reset(self, input: Self::Input) -> Self::Output { + ExprEnum::RegAsync(input) + } + } + Expr { + __enum: R::dispatch(self.canonical(), Dispatch).intern_sized(), + __ty: self.ty(), + __flow: self.flow(), + } + } +} + +impl ToLiteralBits for Reg { + fn to_literal_bits(&self) -> Result, NotALiteralExpr> { + Err(NotALiteralExpr) + } +} + +impl GetTarget for Reg { + fn target(&self) -> Option> { + Some(Intern::intern_sized(self.canonical().into())) + } +} + +impl ToExpr for MemPort { + type Type = T::Port; + + fn to_expr(&self) -> Expr { + Expr { + __enum: ExprEnum::MemPort(self.canonical()).intern_sized(), + __ty: self.ty(), + __flow: self.flow(), + } + } +} + +impl ToLiteralBits for MemPort { + fn to_literal_bits(&self) -> Result, NotALiteralExpr> { + Err(NotALiteralExpr) + } +} + +impl GetTarget for MemPort { + fn target(&self) -> Option> { + Some(Intern::intern_sized(self.canonical().into())) + } +} + +pub trait HdlPartialEq { + fn cmp_eq(self, rhs: Rhs) -> Expr; + fn cmp_ne(self, rhs: Rhs) -> Expr; +} + +pub trait HdlPartialOrd: HdlPartialEq { + fn cmp_lt(self, rhs: Rhs) -> Expr; + fn cmp_le(self, rhs: Rhs) -> Expr; + fn cmp_gt(self, rhs: Rhs) -> Expr; + fn cmp_ge(self, rhs: Rhs) -> Expr; +} + +pub trait ReduceBits { + type UIntOutput; + type BoolOutput; + fn reduce_bitand(self) -> Self::UIntOutput; + fn reduce_bitor(self) -> Self::UIntOutput; + fn reduce_bitxor(self) -> Self::UIntOutput; + fn any_one_bits(self) -> Self::BoolOutput; + fn any_zero_bits(self) -> Self::BoolOutput; + fn all_one_bits(self) -> Self::BoolOutput; + fn all_zero_bits(self) -> Self::BoolOutput; + fn parity_odd(self) -> Self::BoolOutput; + fn parity_even(self) -> Self::BoolOutput; +} + +pub trait CastToBits { + fn cast_to_bits(&self) -> Expr; +} + +impl CastToBits for T { + fn cast_to_bits(&self) -> Expr { + ops::CastToBits::new(Expr::canonical(self.to_expr())).to_expr() + } +} + +pub trait CastBitsTo { + #[track_caller] + fn cast_bits_to(&self, ty: T) -> Expr; +} + +impl> + ?Sized, Width: Size> CastBitsTo for T { + fn cast_bits_to(&self, ty: ToType) -> Expr { + ops::CastBitsTo::new(Expr::as_dyn_int(self.to_expr()), ty).to_expr() + } +} + +pub trait CastTo: ToExpr { + fn cast_to(&self, to_type: ToType) -> Expr + where + Self::Type: ExprCastTo, + { + ExprCastTo::cast_to(self.to_expr(), to_type) + } + fn cast_to_static(&self) -> Expr + where + Self::Type: ExprCastTo, + { + ExprCastTo::cast_to(self.to_expr(), ToType::TYPE) + } +} + +impl CastTo for T {} + +#[doc(hidden)] +pub fn check_match_expr( + _expr: Expr, + _check_fn: impl FnOnce(T::MatchVariant, Infallible), +) { +} + +pub trait MakeUninitExpr: Type { + fn uninit(self) -> Expr; +} + +impl MakeUninitExpr for T { + fn uninit(self) -> Expr { + ops::Uninit::new(self).to_expr() + } +} + +pub fn repeat( + element: impl ToExpr, + len: L, +) -> Expr> { + let element = element.to_expr(); + let canonical_element = Expr::canonical(element); + ops::ArrayLiteral::new( + Expr::ty(element), + std::iter::repeat(canonical_element) + .take(L::Size::as_usize(len)) + .collect(), + ) + .to_expr() +} + +impl ToExpr for PhantomConst { + type Type = Self; + + fn to_expr(&self) -> Expr { + Expr { + __enum: ExprEnum::PhantomConst(self.canonical_phantom_const()).intern_sized(), + __ty: *self, + __flow: Flow::Source, + } + } +} + +impl GetTarget for PhantomConst { + fn target(&self) -> Option> { + None + } +} + +impl ToLiteralBits for PhantomConst { + fn to_literal_bits(&self) -> Result, NotALiteralExpr> { + Ok(Interned::default()) + } +} diff --git a/crates/fayalite/src/expr/ops.rs b/crates/fayalite/src/expr/ops.rs index 6f1523f..b10e3ae 100644 --- a/crates/fayalite/src/expr/ops.rs +++ b/crates/fayalite/src/expr/ops.rs @@ -1,1099 +1,2321 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information + use crate::{ - array::{Array, ArrayType, ArrayTypeTrait, ValueArrayOrSlice}, - bundle::{BundleType, BundleValue, DynBundleType, FieldType}, - clock::{Clock, ClockType, ToClock}, - enum_::{DynEnumType, EnumType, EnumValue, VariantType}, + array::{Array, ArrayType}, + bundle::{Bundle, BundleField, BundleType}, + clock::{Clock, ToClock}, + enum_::{Enum, EnumType, EnumVariant}, expr::{ - sealed, Expr, ExprEnum, ExprTrait, NotALiteralExpr, Target, TargetPathArrayElement, - TargetPathBundleField, TargetPathDynArrayElement, TargetPathElement, ToExpr, + CastBitsTo as _, CastTo, CastToBits as _, Expr, ExprEnum, Flow, HdlPartialEq, + HdlPartialOrd, NotALiteralExpr, ReduceBits, ToExpr, ToLiteralBits, + target::{ + GetTarget, Target, TargetPathArrayElement, TargetPathBundleField, + TargetPathDynArrayElement, TargetPathElement, + }, }, int::{ - DynInt, DynIntType, DynSInt, DynSIntType, DynUInt, DynUIntType, Int, IntCmp, IntType, - IntTypeTrait, IntValue, StaticOrDynIntType, UInt, UIntType, + Bool, BoolOrIntType, DynSize, IntType, KnownSize, SInt, SIntType, SIntValue, Size, UInt, + UIntType, UIntValue, }, intern::{Intern, Interned}, + phantom_const::{PhantomConst, PhantomConstValue}, reset::{ - AsyncReset, AsyncResetType, Reset, ResetType, SyncReset, SyncResetType, ToAsyncReset, - ToReset, ToSyncReset, + AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset, ToAsyncReset, ToReset, + ToSyncReset, }, - ty::{ - CanonicalType, CanonicalValue, DynCanonicalType, DynCanonicalValue, DynType, Type, Value, - }, - type_deduction::HitUndeducedType, - util::{interned_bit, ConstBool, ConstBoolDispatch, ConstBoolDispatchTag, GenericConstBool}, - valueless::{Valueless, ValuelessTr}, + ty::{CanonicalType, StaticType, Type}, + util::ConstUsize, }; -use bitvec::{slice::BitSlice, vec::BitVec}; -use num_traits::ToPrimitive; +use bitvec::{order::Lsb0, slice::BitSlice, vec::BitVec, view::BitView}; +use num_bigint::BigInt; +use num_traits::{ToPrimitive, Zero}; use std::{ fmt, - hash::{Hash, Hasher}, - ops::{self, Index, Range, RangeBounds}, + marker::PhantomData, + ops::{ + Add, BitAnd, BitOr, BitXor, Div, Index, Mul, Neg as StdNeg, Not, Range, RangeBounds, Rem, + Shl, Shr, Sub, + }, }; -fn unary_literal_bits>>( - v: Expr, - f: impl FnOnce(Interned) -> Interned, -) -> Result, HitUndeducedType>, NotALiteralExpr> { - Ok(v.to_literal_bits()?.map(f)) -} - -fn try_unary_literal_bits>>( - v: Expr, - f: impl FnOnce(Interned) -> Result, HitUndeducedType>, -) -> Result, HitUndeducedType>, NotALiteralExpr> { - Ok(v.to_literal_bits()?.and_then(f)) -} - -fn binary_literal_bits>, R: Value>>( - l: Expr, - r: Expr, - f: impl FnOnce(Interned, Interned) -> Interned, -) -> Result, HitUndeducedType>, NotALiteralExpr> { - let l = l.to_literal_bits()?; - let r = r.to_literal_bits()?; - Ok((|| Ok(f(l?, r?)))()) -} - -fn try_binary_literal_bits>, R: Value>>( - l: Expr, - r: Expr, - f: impl FnOnce( - Interned, - Interned, - ) -> Result, HitUndeducedType>, -) -> Result, HitUndeducedType>, NotALiteralExpr> { - let l = l.to_literal_bits()?; - let r = r.to_literal_bits()?; - Ok((|| f(l?, r?))()) -} - -macro_rules! fixed_ary_op { +macro_rules! forward_value_to_expr_unary_op_trait { ( - pub struct $name:ident<$($T:ident,)* $(#[const] $C:ident: $CTy:ty,)*> + #[generics($($generics:tt)*)] + #[value($Value:ty)] + $Trait:ident::$method:ident + ) => { + impl<$($generics)*> $Trait for $Value where - ($($where:tt)*) + Expr<<$Value as ToExpr>::Type>: $Trait, { - $($arg_vis:vis $arg:ident: $Arg:ty,)+ - $(#[cache] - $cache_before_ty:ident: $CacheBeforeTy:ty = $cache_before_ty_expr:expr,)* - #[type$(($ty_arg:ident))?] - $ty_vis:vis $ty_name:ident: $Ty:ty = $ty_expr:expr, - $(#[cache] - $cache_after_ty:ident: $CacheAfterTy:ty = $cache_after_ty_expr:expr,)* - $(#[target] - $target_name:ident: Option> = $target_expr:expr,)? - fn simulate(&$simulate_self:ident, $sim_state:ident: &mut SimState) -> _ { - $($simulate_body:tt)+ - } + type Output = ::Type> as $Trait>::Output; - fn expr_enum(&$expr_enum_self:ident) -> _ { - $($expr_enum_body:tt)+ + fn $method(self) -> Self::Output { + $Trait::$method(self.to_expr()) } + } + }; +} - fn to_literal_bits( - &$to_literal_bits_self:ident, - ) -> Result, HitUndeducedType>, NotALiteralExpr> { - $($to_literal_bits_body:tt)+ - } +macro_rules! impl_unary_op_trait { + ( + #[generics($($generics:tt)*)] + fn $Trait:ident::$method:ident($arg:ident: $Arg:ty) -> $Output:ty { + $($body:tt)* } ) => { - pub struct $name<$($T,)* $(const $C: $CTy,)*> - where - $($where)* + impl<$($generics)*> $Trait for Expr<$Arg> { - $($arg_vis $arg: $Arg,)+ - $($cache_before_ty: $CacheBeforeTy,)* - $ty_vis $ty_name: $Ty, - $($cache_after_ty: $CacheAfterTy,)* - $($target_name: Option>,)? - } + type Output = Expr<$Output>; - impl<$($T,)* $(const $C: $CTy,)*> $name<$($T,)* $($C,)*> - where - $($where)* - { - pub fn new_unchecked($($arg: $Arg,)* $($ty_arg: $Ty,)?) -> Self { - $(let $cache_before_ty: $CacheBeforeTy = $cache_before_ty_expr;)* - let $ty_name: $Ty = $ty_expr; - $(let $cache_after_ty: $CacheAfterTy = $cache_after_ty_expr;)* - $(let $target_name: Option> = $target_expr;)? - Self { - $($arg,)* - $($cache_before_ty,)* - $ty_name, - $($cache_after_ty,)* - $($target_name,)? - } - } - pub fn canonical(&self) -> $name<$($T::CanonicalType,)* $($C,)*> { - $name { - $($arg: self.$arg.clone(),)* - $($cache_before_ty: self.$cache_before_ty.clone(),)* - $ty_name: self.$ty_name.canonical(), - $($cache_after_ty: self.$cache_after_ty.clone(),)* - $($target_name: self.$target_name,)? - } + fn $method(self) -> Self::Output { + let $arg = self; + $($body)* } } + }; +} - impl<$($T,)* $(const $C: $CTy,)*> Clone for $name<$($T,)* $($C,)*> - where - $($where)* - { - fn clone(&self) -> Self { - Self { - $($arg: self.$arg.clone(),)* - $($cache_before_ty: self.$cache_before_ty.clone(),)* - $ty_name: self.$ty_name.clone(), - $($cache_after_ty: self.$cache_after_ty.clone(),)* - $($target_name: self.$target_name,)? - } - } - } - - impl<$($T,)* $(const $C: $CTy,)*> PartialEq for $name<$($T,)* $($C,)*> - where - $($where)* - { - fn eq(&self, rhs: &Self) -> bool { - $(self.$arg == rhs.$arg &&)* self.$ty_name == rhs.$ty_name - } - } - - impl<$($T,)* $(const $C: $CTy,)*> Eq for $name<$($T,)* $($C,)*> - where - $($where)* - { - } - - impl<$($T,)* $(const $C: $CTy,)*> Hash for $name<$($T,)* $($C,)*> - where - $($where)* - { - fn hash(&self, state: &mut H) { - $(self.$arg.hash(state);)* - self.$ty_name.hash(state); - } - } - - impl<$($T,)* $(const $C: $CTy,)*> fmt::Debug for $name<$($T,)* $($C,)*> - where - $($where)* - { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { - f.debug_tuple(stringify!($name))$(.field(&self.$arg))*.finish() - } - } - - impl<$($T,)* $(const $C: $CTy,)*> ToExpr for $name<$($T,)* $($C,)*> - where - $($where)* - { - type Type = $Ty; - - fn ty(&self) -> Self::Type { - self.$ty_name.clone() - } - fn to_expr(&self) -> Expr<::Value> { - Expr::new_unchecked(self.expr_enum()) - } - } - - impl<$($T,)* $(const $C: $CTy,)*> ExprTrait for $name<$($T,)* $($C,)*> - where - $($where)* - { - fn expr_enum(&$expr_enum_self) -> ExprEnum { - $($expr_enum_body)+ - } +macro_rules! impl_get_target_none { + ([$($generics:tt)*] $ty:ty) => { + impl<$($generics)*> GetTarget for $ty { fn target(&self) -> Option> { - ($(self.$target_name,)? None::>,).0 - } - fn to_literal_bits( - &$to_literal_bits_self, - ) -> Result, HitUndeducedType>, NotALiteralExpr> { - $($to_literal_bits_body)+ - } - } - - impl<$($T,)* $(const $C: $CTy,)*> sealed::Sealed for $name<$($T,)* $($C,)*> - where - $($where)* - { - } - }; -} - -macro_rules! unary_op { - ( - #[method = $op:ident] - impl<$T:ident> $Op:ident for _ where ($($where:tt)*) { - fn expr_enum(&$expr_enum_self:ident) -> _ { - $($expr_enum_body:tt)+ - } - } - ) => { - fixed_ary_op! { - pub struct $Op<$T,> - where ( - $($where)* - ) - { - pub arg: Expr<$T::CanonicalValue>, - #[type] - ty: < as ops::$Op>::Output as ValuelessTr>::Type = ops::$Op::$op( - Valueless::<$T>::from_canonical(arg.valueless()), - ).ty, - #[cache] - literal_bits: Result, HitUndeducedType>, NotALiteralExpr> = { - unary_literal_bits(arg, |v| ops::$Op::$op($T::CanonicalValue::from_bit_slice(&v)).to_bits()) - }, - - fn simulate(&self, sim_state: &mut SimState) -> _ { - ops::$Op::$op(self.arg.simulate(sim_state)) - } - - fn expr_enum(&$expr_enum_self) -> _ { - $($expr_enum_body)+ - } - - fn to_literal_bits( - &self, - ) -> Result, HitUndeducedType>, NotALiteralExpr> { - self.literal_bits - } - } - } - - impl<$T, V: Value> ops::$Op for Expr - where - $($where)* - { - type Output = Expr<< as ops::$Op>::Output as ValuelessTr>::Value>; - - fn $op(self) -> Self::Output { - $Op::<$T>::new_unchecked(self.canonical()).to_expr() + None } } }; } -unary_op! { - #[method = not] - impl Not for _ - where ( - T: IntTypeTrait< - CanonicalType = DynIntType<::Signed>, - CanonicalValue = DynInt<::Signed>, - >, - ) - { - fn expr_enum(&self) -> _ { - struct Tag; - impl ConstBoolDispatchTag for Tag { - type Type = Not>; - } - match ConstBoolDispatch::new::(self.canonical()) { - ConstBoolDispatch::False(v) => ExprEnum::NotU(v.intern_sized()), - ConstBoolDispatch::True(v) => ExprEnum::NotS(v.intern_sized()), - } +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct NotU { + arg: Expr>, + literal_bits: Result, NotALiteralExpr>, +} + +impl NotU { + pub fn new(arg: Expr>) -> Self { + Self { + arg, + literal_bits: arg + .to_literal_bits() + .map(|bits| Intern::intern_owned(bits.to_bitvec().not())), + } + } + pub fn arg(self) -> Expr> { + self.arg + } +} + +impl ToExpr for NotU { + type Type = UIntType; + + fn to_expr(&self) -> Expr { + Expr { + __enum: ExprEnum::NotU(NotU { + arg: Expr::as_dyn_int(self.arg), + literal_bits: self.literal_bits, + }) + .intern(), + __ty: self.arg.__ty, + __flow: Flow::Source, } } } -unary_op! { - #[method = neg] - impl Neg for _ - where ( - T: IntTypeTrait< - Signed = ConstBool, - CanonicalType = DynSIntType, - CanonicalValue = DynSInt, - >, - ) - { - fn expr_enum(&self) -> _ { - ExprEnum::Neg(self.canonical().intern_sized()) +impl ToLiteralBits for NotU { + fn to_literal_bits(&self) -> Result, NotALiteralExpr> { + self.literal_bits + } +} + +impl_get_target_none!([Width: Size] NotU); + +impl_unary_op_trait! { + #[generics(Width: Size)] + fn Not::not(arg: UIntType) -> UIntType { + NotU::new(arg).to_expr() + } +} + +forward_value_to_expr_unary_op_trait! { + #[generics(Width: Size)] + #[value(UIntValue)] + Not::not +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct NotS { + arg: Expr>, + literal_bits: Result, NotALiteralExpr>, +} + +impl NotS { + pub fn new(arg: Expr>) -> Self { + Self { + arg, + literal_bits: arg + .to_literal_bits() + .map(|bits| Intern::intern_owned(bits.to_bitvec().not())), + } + } + pub fn arg(self) -> Expr> { + self.arg + } +} + +impl ToExpr for NotS { + type Type = UIntType; + + fn to_expr(&self) -> Expr { + Expr { + __enum: ExprEnum::NotS(NotS { + arg: Expr::as_dyn_int(self.arg), + literal_bits: self.literal_bits, + }) + .intern(), + __ty: self.arg.__ty.as_same_width_uint(), + __flow: Flow::Source, } } } -macro_rules! binary_op { +impl ToLiteralBits for NotS { + fn to_literal_bits(&self) -> Result, NotALiteralExpr> { + self.literal_bits + } +} + +impl_get_target_none!([Width: Size] NotS); + +impl_unary_op_trait! { + #[generics(Width: Size)] + fn Not::not(arg: SIntType) -> UIntType { + NotS::new(arg).to_expr() + } +} + +forward_value_to_expr_unary_op_trait! { + #[generics(Width: Size)] + #[value(SIntValue)] + Not::not +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct NotB { + arg: Expr, + literal_bits: Result, NotALiteralExpr>, +} + +impl NotB { + pub fn new(arg: Expr) -> Self { + Self { + arg, + literal_bits: arg + .to_literal_bits() + .map(|bits| Intern::intern_owned(bits.to_bitvec().not())), + } + } + pub fn arg(self) -> Expr { + self.arg + } +} + +impl ToExpr for NotB { + type Type = Bool; + + fn to_expr(&self) -> Expr { + Expr { + __enum: ExprEnum::NotB(NotB { + arg: self.arg, + literal_bits: self.literal_bits, + }) + .intern(), + __ty: self.arg.__ty, + __flow: Flow::Source, + } + } +} + +impl ToLiteralBits for NotB { + fn to_literal_bits(&self) -> Result, NotALiteralExpr> { + self.literal_bits + } +} + +impl_get_target_none!([] NotB); + +impl_unary_op_trait! { + #[generics()] + fn Not::not(arg: Bool) -> Bool { + NotB::new(arg).to_expr() + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct Neg { + arg: Expr, + literal_bits: Result, NotALiteralExpr>, +} + +impl Neg { + pub fn new(arg: Expr) -> Self { + let mut retval = Self { + arg, + literal_bits: Err(NotALiteralExpr), + }; + let result_ty = retval.ty(); + retval.literal_bits = arg.to_literal_bits().map(|bits| { + Intern::intern_owned(result_ty.bits_from_bigint_wrapping(&-SInt::bits_to_bigint(&bits))) + }); + retval + } + pub fn ty(self) -> SInt { + SInt::new_dyn( + Expr::ty(self.arg) + .width() + .checked_add(1) + .expect("width too big"), + ) + } + pub fn arg(self) -> Expr { + self.arg + } +} + +impl ToExpr for Neg { + type Type = SInt; + + fn to_expr(&self) -> Expr { + Expr { + __enum: ExprEnum::Neg(*self).intern(), + __ty: self.ty(), + __flow: Flow::Source, + } + } +} + +impl ToLiteralBits for Neg { + fn to_literal_bits(&self) -> Result, NotALiteralExpr> { + self.literal_bits + } +} + +impl_get_target_none!([] Neg); + +impl_unary_op_trait! { + #[generics(Width: Size)] + fn StdNeg::neg(arg: SIntType) -> SInt { + Neg::new(Expr::as_dyn_int(arg)).to_expr() + } +} + +forward_value_to_expr_unary_op_trait! { + #[generics(Width: Size)] + #[value(SIntValue)] + StdNeg::neg +} + +macro_rules! impl_binary_op_trait { ( - #[ - method = $op:ident, - rhs_to_canonical_dyn = $rhs_to_canonical_dyn:ident, - expr_enum_u = $expr_enum_u:ident, - expr_enum_s = $expr_enum_s:ident - ] - impl<$LhsType:ident, $RhsType:ident> $Op:ident for _ where $($where:tt)* + #[generics($($generics:tt)*)] + fn $Trait:ident::$method:ident($lhs:ident: $Lhs:ty, $rhs:ident: $Rhs:ty) -> $Output:ty { + $($body:tt)* + } ) => { - fixed_ary_op! { - pub struct $Op<$LhsType, $RhsType,> - where ( - $($where)* - ) - { - pub lhs: Expr<$LhsType::CanonicalValue>, - pub rhs: Expr<$RhsType::CanonicalValue>, - #[type] - ty: < - as ops::$Op>>::Output - as ValuelessTr - >::Type = ops::$Op::$op( - Valueless::<$LhsType>::from_canonical(lhs.valueless()), - Valueless::<$RhsType>::from_canonical(rhs.valueless()), - ).ty, - #[cache] - literal_bits: Result, HitUndeducedType>, NotALiteralExpr> = { - binary_literal_bits(lhs, rhs, |lhs, rhs| { - ops::$Op::$op( - $LhsType::CanonicalValue::from_bit_slice(&lhs), - $RhsType::CanonicalValue::from_bit_slice(&rhs), - ) - .to_bits() - }) - }, - fn simulate(&self, sim_state: &mut SimState) -> _ { - ops::$Op::$op(self.lhs.simulate(sim_state), self.rhs.simulate(sim_state)) - } - fn expr_enum(&self) -> _ { - struct Tag; - impl ConstBoolDispatchTag for Tag { - type Type = - $Op, DynIntType, DynUIntType>; - } - match ConstBoolDispatch::new::(self.canonical()) { - ConstBoolDispatch::False(v) => ExprEnum::$CmpOpU(v.intern_sized()), - ConstBoolDispatch::True(v) => ExprEnum::$CmpOpS(v.intern_sized()), - } - } - - fn to_literal_bits( - &self, - ) -> Result, HitUndeducedType>, NotALiteralExpr> { - self.literal_bits + fn to_expr(&self) -> Expr { + Expr { + __enum: ExprEnum::$name(*self).intern(), + __ty: Bool, + __flow: Flow::Source, } } })* - impl< - LhsType: IntTypeTrait< - CanonicalType = DynIntType<::Signed>, - CanonicalValue = DynInt<::Signed>, - >, - RhsType: IntTypeTrait< - Signed = LhsType::Signed, - CanonicalType = DynIntType<::Signed>, - CanonicalValue = DynInt<::Signed>, - >, - Rhs: ToExpr, - Lhs: Value, - > IntCmp for Expr - { - type Output = Expr>; - - $(fn $fn(self, rhs: Rhs) -> Self::Output { - $CmpOp::>::new_unchecked( - self.canonical(), - rhs.to_expr().canonical(), - ).to_expr() - })* - } - - impl< - LhsType: IntTypeTrait< - CanonicalType = DynIntType<::Signed>, - CanonicalValue = DynInt<::Signed>, - >, - RhsType: IntTypeTrait< - Signed = LhsType::Signed, - CanonicalType = DynIntType<::Signed>, - CanonicalValue = DynInt<::Signed>, - >, - Rhs: ToExpr, - > IntCmp for IntValue { - type Output = Expr>; - - $(fn $fn(self, rhs: Rhs) -> Self::Output { - self.to_expr().$fn(rhs) - })* - } - - impl< - LhsType: IntTypeTrait, - RhsType: IntTypeTrait, - > IntCmp> for Valueless { - type Output = Valueless>; - - $(fn $fn(self, _rhs: Valueless) -> Self::Output { - Valueless { ty: UIntType::new() } + impl$(<$LhsWidth: Size, $RhsWidth: Size>)? $Trait<$Rhs> for $Lhs { + $(fn $method($lhs: Expr, $rhs: Expr<$Rhs>) -> Expr { + $name::new($dyn_lhs, $dyn_rhs).to_expr() })* } }; } -cmp_op! { - CmpLt, CmpLtU, CmpLtS, cmp_lt, PartialOrd::lt; - CmpLe, CmpLeU, CmpLeS, cmp_le, PartialOrd::le; - CmpGt, CmpGtU, CmpGtS, cmp_gt, PartialOrd::gt; - CmpGe, CmpGeU, CmpGeS, cmp_ge, PartialOrd::ge; - CmpEq, CmpEqU, CmpEqS, cmp_eq, PartialEq::eq; - CmpNe, CmpNeU, CmpNeS, cmp_ne, PartialEq::ne; +impl_compare_op! { + #[dyn_type(Bool)] + #[to_dyn_type(lhs => lhs, rhs => rhs)] + #[type(Bool, Bool)] + #[trait(ExprPartialEq)] + struct CmpEqB; fn cmp_eq(); PartialEq::eq(); + struct CmpNeB; fn cmp_ne(); PartialEq::ne(); } -fixed_ary_op! { - pub struct CastInt - where ( - FromType: IntTypeTrait< - CanonicalType = DynIntType<::Signed>, - CanonicalValue = DynInt<::Signed>, - >, - ToType: IntTypeTrait< - CanonicalType = DynIntType<::Signed>, - CanonicalValue = DynInt<::Signed>, - >, - ) - { - pub value: Expr, - #[type(ty)] - pub ty: ToType = ty, - #[cache] - literal_bits: Result, HitUndeducedType>, NotALiteralExpr> = { - unary_literal_bits(value, |literal_bits| { - let mut bits = literal_bits.to_bitvec(); - let fill = FromType::Signed::VALUE - && bits.len().checked_sub(1).map(|i| bits[i]).unwrap_or(false); - bits.resize(ty.width(), fill); - Intern::intern_owned(bits) - }) - }, +impl_compare_op! { + #[dyn_type(Bool)] + #[to_dyn_type(lhs => lhs, rhs => rhs)] + #[type(Bool, Bool)] + #[trait(ExprPartialOrd)] + struct CmpLtB; fn cmp_lt(); PartialOrd::lt(); + struct CmpLeB; fn cmp_le(); PartialOrd::le(); + struct CmpGtB; fn cmp_gt(); PartialOrd::gt(); + struct CmpGeB; fn cmp_ge(); PartialOrd::ge(); +} - fn simulate(&self, sim_state: &mut SimState) -> _ { - self.value.simulate(sim_state).cast_as_type(self.ty.canonical()) - } +impl_compare_op! { + #[width(LhsWidth, RhsWidth)] + #[dyn_type(UInt)] + #[to_dyn_type(lhs => Expr::as_dyn_int(lhs), rhs => Expr::as_dyn_int(rhs))] + #[type(UIntType, UIntType)] + #[trait(ExprPartialEq)] + struct CmpEqU; fn cmp_eq(); PartialEq::eq(); + struct CmpNeU; fn cmp_ne(); PartialEq::ne(); +} - fn expr_enum(&self) -> _ { - struct Tag1; - impl ConstBoolDispatchTag for Tag1 { - type Type = ConstBoolDispatch< - CastInt, DynUIntType>, - CastInt, DynSIntType>, - >; - } - struct Tag2(FromSigned); - impl ConstBoolDispatchTag for Tag2 { - type Type = - CastInt, DynIntType