diff --git a/.forgejo/workflows/deps.yml b/.forgejo/workflows/deps.yml deleted file mode 100644 index ffaca53..0000000 --- a/.forgejo/workflows/deps.yml +++ /dev/null @@ -1,77 +0,0 @@ -# SPDX-License-Identifier: LGPL-3.0-or-later -# See Notices.txt for copyright information -on: - workflow_call: - outputs: - cache-primary-key: - value: ${{ jobs.deps.outputs.cache-primary-key }} - -jobs: - deps: - runs-on: debian-12 - outputs: - cache-primary-key: ${{ steps.restore-deps.outputs.cache-primary-key }} - steps: - - uses: https://code.forgejo.org/actions/checkout@v3 - with: - fetch-depth: 0 - - uses: https://code.forgejo.org/actions/cache/restore@v3 - id: restore-deps - with: - path: deps - key: ${{ github.repository }}-deps-${{ runner.os }}-${{ hashFiles('.forgejo/workflows/deps.yml') }} - lookup-only: true - - name: Install Apt packages - if: steps.restore-deps.outputs.cache-hit != 'true' - run: | - apt-get update -qq - apt-get install -qq \ - bison \ - build-essential \ - ccache \ - clang \ - cvc5 \ - flex \ - gawk \ - g++ \ - git \ - libboost-filesystem-dev \ - libboost-python-dev \ - libboost-system-dev \ - libffi-dev \ - libreadline-dev \ - lld \ - pkg-config \ - python3 \ - python3-click \ - tcl-dev \ - zlib1g-dev - - name: Install Firtool - if: steps.restore-deps.outputs.cache-hit != 'true' - run: | - mkdir -p deps - wget -O deps/firrtl.tar.gz https://github.com/llvm/circt/releases/download/firtool-1.86.0/firrtl-bin-linux-x64.tar.gz - sha256sum -c - <<<'bf6f4ab18ae76f135c944efbd81e25391c31c1bd0617c58ab0592640abefee14 deps/firrtl.tar.gz' - tar -C deps -xvaf deps/firrtl.tar.gz - rm -rf deps/firtool - mv deps/firtool-1.86.0 deps/firtool - - name: Get SymbiYosys - if: steps.restore-deps.outputs.cache-hit != 'true' - run: | - git clone --depth=1 --branch=yosys-0.45 https://github.com/YosysHQ/sby.git deps/sby - - name: Build Z3 - if: steps.restore-deps.outputs.cache-hit != 'true' - run: | - git clone --depth=1 --recursive --branch=z3-4.13.3 https://github.com/Z3Prover/z3.git deps/z3 - (cd deps/z3; PYTHON=python3 ./configure --prefix=/usr/local) - make -C deps/z3/build -j"$(nproc)" - - name: Build Yosys - if: steps.restore-deps.outputs.cache-hit != 'true' - run: | - git clone --depth=1 --recursive --branch=0.45 https://github.com/YosysHQ/yosys.git deps/yosys - make -C deps/yosys -j"$(nproc)" - - uses: https://code.forgejo.org/actions/cache/save@v3 - if: steps.restore-deps.outputs.cache-hit != 'true' - with: - path: deps - key: ${{ steps.restore-deps.outputs.cache-primary-key }} diff --git a/.forgejo/workflows/test.yml b/.forgejo/workflows/test.yml index 969d691..001168f 100644 --- a/.forgejo/workflows/test.yml +++ b/.forgejo/workflows/test.yml @@ -3,58 +3,23 @@ on: [push, pull_request] jobs: - deps: - uses: ./.forgejo/workflows/deps.yml test: runs-on: debian-12 - needs: deps + container: + image: git.libre-chip.org/libre-chip/fayalite-deps:latest steps: - - uses: https://code.forgejo.org/actions/checkout@v3 + - uses: actions/checkout@v3 with: fetch-depth: 0 - run: | scripts/check-copyright.sh - - run: | - apt-get update -qq - apt-get install -qq \ - bison \ - build-essential \ - ccache \ - clang \ - cvc5 \ - flex \ - gawk \ - git \ - libboost-filesystem-dev \ - libboost-python-dev \ - libboost-system-dev \ - libffi-dev \ - libreadline-dev \ - lld \ - pkg-config \ - python3 \ - python3-click \ - tcl-dev \ - z3 \ - zlib1g-dev - - run: | - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain 1.80.1 - source "$HOME/.cargo/env" - echo "$PATH" >> "$GITHUB_PATH" - - uses: https://code.forgejo.org/actions/cache/restore@v3 - with: - path: deps - key: ${{ needs.deps.outputs.cache-primary-key }} - fail-on-cache-miss: true - - run: | - make -C deps/z3/build install - make -C deps/sby install - make -C deps/yosys install - export PATH="$(realpath deps/firtool/bin):$PATH" - echo "$PATH" >> "$GITHUB_PATH" - - uses: https://github.com/Swatinem/rust-cache@v2 + - uses: https://git.libre-chip.org/mirrors/rust-cache@v2 with: save-if: ${{ github.ref == 'refs/heads/master' }} - run: cargo test - run: cargo build --tests --features=unstable-doc + - run: cargo test --doc --features=unstable-doc - run: cargo doc --features=unstable-doc + - run: FAYALITE_TEST_HASHER=always_zero cargo test --test=module --features=unstable-doc,unstable-test-hasher + - run: cargo run --example blinky yosys-nextpnr-xray --platform=arty-a7-100t --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db -o target/blinky-out + - run: cargo run --example tx_only_uart yosys-nextpnr-xray --platform=arty-a7-100t --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db -o target/tx_only_uart-out diff --git a/Cargo.lock b/Cargo.lock index 500bd34..be5f3bc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,18 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 - -[[package]] -name = "ahash" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01" -dependencies = [ - "cfg-if", - "once_cell", - "version_check", - "zerocopy", -] +version = 4 [[package]] name = "allocator-api2" @@ -37,9 +25,9 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.7" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" [[package]] name = "anstyle-parse" @@ -93,6 +81,12 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + [[package]] name = "basic-toml" version = "0.1.8" @@ -161,9 +155,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "clap" -version = "4.5.9" +version = "4.5.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64acc1846d54c1fe936a78dc189c34e28d3f5afc348403f28ecf53660b9b8462" +checksum = "e2134bb3ea021b78629caa971416385309e0131b351b25e01dc16fb54e1b5fae" dependencies = [ "clap_builder", "clap_derive", @@ -171,9 +165,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.9" +version = "4.5.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fb8393d67ba2e7bfaf28a23458e4e2b543cc73a99595511eb207fdb8aede942" +checksum = "c2ba64afa3c0a6df7fa517765e31314e983f51dda798ffba27b988194fb65dc9" dependencies = [ "anstream", "anstyle", @@ -182,10 +176,19 @@ dependencies = [ ] [[package]] -name = "clap_derive" -version = "4.5.8" +name = "clap_complete" +version = "4.5.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bac35c6dafb060fd4d275d9a4ffae97917c13a6327903a8be2153cd964f7085" +checksum = "75bf0b32ad2e152de789bb635ea4d3078f6b838ad7974143e99b99f45a04af4a" +dependencies = [ + "clap", +] + +[[package]] +name = "clap_derive" +version = "4.5.47" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbfd7eae0b0f1a6e63d4b13c9c478de77c2eb546fba158ad50b4203dc24b9f9c" dependencies = [ "heck", "proc-macro2", @@ -195,9 +198,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.7.1" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70" +checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" [[package]] name = "colorchoice" @@ -301,11 +304,13 @@ checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" [[package]] name = "fayalite" -version = "0.2.1" +version = "0.3.0" dependencies = [ + "base64", "bitvec", "blake3", "clap", + "clap_complete", "ctor", "eyre", "fayalite-proc-macros", @@ -314,24 +319,26 @@ dependencies = [ "jobslot", "num-bigint", "num-traits", - "os_pipe", + "ordered-float", + "petgraph", "serde", "serde_json", "tempfile", "trybuild", + "vec_map", "which", ] [[package]] name = "fayalite-proc-macros" -version = "0.2.1" +version = "0.3.0" dependencies = [ "fayalite-proc-macros-impl", ] [[package]] name = "fayalite-proc-macros-impl" -version = "0.2.1" +version = "0.3.0" dependencies = [ "base16ct", "num-bigint", @@ -345,7 +352,7 @@ dependencies = [ [[package]] name = "fayalite-visit-gen" -version = "0.2.1" +version = "0.3.0" dependencies = [ "indexmap", "prettyplease", @@ -357,6 +364,18 @@ dependencies = [ "thiserror", ] +[[package]] +name = "fixedbitset" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + [[package]] name = "funty" version = "2.0.0" @@ -375,12 +394,13 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.14" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c" +checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" dependencies = [ "cfg-if", "libc", + "r-efi", "wasi", ] @@ -392,12 +412,13 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "hashbrown" -version = "0.14.3" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" dependencies = [ - "ahash", "allocator-api2", + "equivalent", + "foldhash", ] [[package]] @@ -423,9 +444,9 @@ checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" [[package]] name = "indexmap" -version = "2.2.6" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" dependencies = [ "equivalent", "hashbrown", @@ -446,23 +467,23 @@ checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" [[package]] name = "jobslot" -version = "0.2.19" +version = "0.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe10868679d7a24c2c67d862d0e64a342ce9aef7cdde9ce8019bd35d353d458d" +checksum = "58715c67c327da7f1558708348d68c207fd54900c4ae0529e29305d04d795b8c" dependencies = [ "cfg-if", "derive_destructure2", "getrandom", "libc", "scopeguard", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] name = "libc" -version = "0.2.153" +version = "0.2.176" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" +checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174" [[package]] name = "linux-raw-sys" @@ -472,11 +493,10 @@ checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" [[package]] name = "num-bigint" -version = "0.4.4" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ - "autocfg", "num-integer", "num-traits", ] @@ -506,13 +526,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] -name = "os_pipe" -version = "1.2.1" +name = "ordered-float" +version = "5.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ffd2b0a5634335b135d5728d84c5e0fd726954b87111f7506a61c502280d982" +checksum = "7f4779c6901a562440c3786d08192c6fbda7c1c2060edd10006b05ee35d10f2d" dependencies = [ - "libc", - "windows-sys 0.59.0", + "num-traits", + "rand", + "serde", +] + +[[package]] +name = "petgraph" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a98c6720655620a521dcc722d0ad66cd8afd5d86e34a89ef691c50b7b24de06" +dependencies = [ + "fixedbitset", + "hashbrown", + "indexmap", + "serde", ] [[package]] @@ -527,9 +560,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.83" +version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b33eb56c327dec362a9e55b3ad14f9d2f0904fb5a5b03b513ab5465399e9f43" +checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" dependencies = [ "unicode-ident", ] @@ -543,12 +576,37 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + [[package]] name = "radium" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "rand_core", + "serde", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "serde", +] + [[package]] name = "rustix" version = "0.38.31" @@ -631,9 +689,9 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "syn" -version = "2.0.66" +version = "2.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5" +checksum = "9c786062daee0d6db1132800e623df74274a0a87322d8e183338e01b3d98d058" dependencies = [ "proc-macro2", "quote", @@ -720,6 +778,12 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +[[package]] +name = "vec_map" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" + [[package]] name = "version_check" version = "0.9.4" @@ -728,9 +792,21 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +version = "0.14.7+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c" +dependencies = [ + "wasip2", +] + +[[package]] +name = "wasip2" +version = "1.0.1+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" +dependencies = [ + "wit-bindgen", +] [[package]] name = "which" @@ -775,6 +851,12 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + [[package]] name = "windows-sys" version = "0.52.0" @@ -786,11 +868,11 @@ dependencies = [ [[package]] name = "windows-sys" -version = "0.59.0" +version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" dependencies = [ - "windows-targets", + "windows-link", ] [[package]] @@ -863,6 +945,12 @@ version = "0.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" +[[package]] +name = "wit-bindgen" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" + [[package]] name = "wyz" version = "0.5.1" @@ -871,23 +959,3 @@ checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" dependencies = [ "tap", ] - -[[package]] -name = "zerocopy" -version = "0.7.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" -dependencies = [ - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.7.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] diff --git a/Cargo.toml b/Cargo.toml index b6b8616..2380ea7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,38 +5,42 @@ resolver = "2" members = ["crates/*"] [workspace.package] -version = "0.2.1" +version = "0.3.0" license = "LGPL-3.0-or-later" -edition = "2021" +edition = "2024" repository = "https://git.libre-chip.org/libre-chip/fayalite" keywords = ["hdl", "hardware", "semiconductors", "firrtl", "fpga"] categories = ["simulation", "development-tools", "compilers"] -rust-version = "1.80.1" +rust-version = "1.89.0" [workspace.dependencies] -fayalite-proc-macros = { version = "=0.2.1", path = "crates/fayalite-proc-macros" } -fayalite-proc-macros-impl = { version = "=0.2.1", path = "crates/fayalite-proc-macros-impl" } -fayalite-visit-gen = { version = "=0.2.1", path = "crates/fayalite-visit-gen" } +fayalite-proc-macros = { version = "=0.3.0", path = "crates/fayalite-proc-macros" } +fayalite-proc-macros-impl = { version = "=0.3.0", path = "crates/fayalite-proc-macros-impl" } +fayalite-visit-gen = { version = "=0.3.0", path = "crates/fayalite-visit-gen" } base16ct = "0.2.0" +base64 = "0.22.1" bitvec = { version = "1.0.1", features = ["serde"] } blake3 = { version = "1.5.4", features = ["serde"] } clap = { version = "4.5.9", features = ["derive", "env", "string"] } +clap_complete = "4.5.58" ctor = "0.2.8" eyre = "0.6.12" -hashbrown = "0.14.3" -indexmap = { version = "2.2.6", features = ["serde"] } -jobslot = "0.2.19" -num-bigint = "0.4.4" +hashbrown = "0.15.2" +indexmap = { version = "2.5.0", features = ["serde"] } +jobslot = "0.2.23" +num-bigint = "0.4.6" num-traits = "0.2.16" -os_pipe = "1.2.1" +ordered-float = { version = "5.1.0", features = ["serde"] } +petgraph = "0.8.1" prettyplease = "0.2.20" proc-macro2 = "1.0.83" quote = "1.0.36" serde = { version = "1.0.202", features = ["derive"] } serde_json = { version = "1.0.117", features = ["preserve_order"] } sha2 = "0.10.8" -syn = { version = "2.0.66", features = ["full", "fold", "visit", "extra-traits"] } +syn = { version = "2.0.93", features = ["full", "fold", "visit", "extra-traits"] } tempfile = "3.10.1" thiserror = "1.0.61" trybuild = "1.0" +vec_map = "0.8.2" which = "6.0.1" diff --git a/README.md b/README.md index 438550e..18cd78c 100644 --- a/README.md +++ b/README.md @@ -7,3 +7,78 @@ See Notices.txt for copyright information Fayalite is a library for designing digital hardware -- a hardware description language (HDL) embedded in the Rust programming language. Fayalite's semantics are based on [FIRRTL] as interpreted by [LLVM CIRCT](https://circt.llvm.org/docs/Dialects/FIRRTL/FIRRTLAnnotations/). [FIRRTL]: https://github.com/chipsalliance/firrtl-spec + +# Building the [Blinky example] for the Arty A7 100T on Linux + +[Blinky example]: crates/fayalite/examples/blinky.rs + +This uses the container image containing all the external programs and files that Fayalite needs to build for FPGAs, the sources for the container image are in + +Steps: + +Install podman (or docker). + +Run: +```bash +podman run --rm --security-opt label=disable --volume="$(pwd):$(pwd)" -w="$(pwd)" -it git.libre-chip.org/libre-chip/fayalite-deps:latest cargo run --example blinky yosys-nextpnr-xray --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db --platform arty-a7-100t -o target/blinky-out +``` + +To actually program the FPGA, you'll need to install [openFPGALoader] on your host OS: + +[openFPGALoader]: https://github.com/trabucayre/openFPGALoader + +On Debian 12: +```bash +sudo apt update && sudo apt install openfpgaloader +``` + +Then program the FPGA: +```bash +sudo openFPGALoader --board arty_a7_100t target/blinky-out/blinky.bit +``` + +This will program the FPGA but leave the Flash chip unmodified, so the FPGA will revert when the board is power-cycled. + +To program the Flash also, so it stays programmed when power-cycling the board: + +```bash +sudo openFPGALoader --board arty_a7_100t -f target/blinky-out/blinky.bit +``` + +# Building the [Transmit-only UART example] for the Arty A7 100T on Linux + +[Transmit-only UART example]: crates/fayalite/examples/tx_only_uart.rs + +Follow the steps above of building the Blinky example, but replace `blinky` with `tx_only_uart`. + +View the output using [tio](https://github.com/tio/tio) which you can install in Debian using `apt`. + +Find the correct USB device: +```bash +sudo tio --list +``` + +You want the device with a name like (note the `if01`, `if00` is presumably the JTAG port): +`/dev/serial/by-id/usb-Digilent_Digilent_USB_Device_210319B4A51E-if01-port0` + +Connect to the serial port: +```bash +sudo tio -b115200 /dev/serial/by-id/put-your-device-id-here +``` + +You'll see (repeating endlessly): +```text +Hello World from Fayalite!!! +Hello World from Fayalite!!! +Hello World from Fayalite!!! +``` + +Press Ctrl+T then `q` to exit tio. + +# Funding + +## NLnet Grants + +* [Libre-Chip CPU with proof of No Spectre bugs](https://nlnet.nl/project/Libre-Chip-proof/) 2024-12-324 [(progress)](https://git.libre-chip.org/libre-chip/grant-tracking/src/branch/master/nlnet-2024-12-324/progress.md) + +This project was funded through the [NGI0 Commons Fund](https://nlnet.nl/commonsfund), a fund established by [NLnet](https://nlnet.nl/) with financial support from the European Commission's [Next Generation Internet](https://ngi.eu) programme, under the aegis of [DG Communications Networks, Content and Technology](https://commission.europa.eu/about-european-commission/departments-and-executive-agencies/communications-networks-content-and-technology_en) under grant agreement № [101135429](https://cordis.europa.eu/project/id/101135429). Additional funding is made available by the [Swiss State Secretariat for Education, Research and Innovation](https://www.sbfi.admin.ch/sbfi/en/home.html) (SERI). diff --git a/crates/fayalite-proc-macros-impl/src/fold.rs b/crates/fayalite-proc-macros-impl/src/fold.rs index 49cc8c1..22e7b82 100644 --- a/crates/fayalite-proc-macros-impl/src/fold.rs +++ b/crates/fayalite-proc-macros-impl/src/fold.rs @@ -220,6 +220,7 @@ forward_fold!(syn::ExprArray => fold_expr_array); forward_fold!(syn::ExprCall => fold_expr_call); forward_fold!(syn::ExprIf => fold_expr_if); forward_fold!(syn::ExprMatch => fold_expr_match); +forward_fold!(syn::ExprMethodCall => fold_expr_method_call); forward_fold!(syn::ExprPath => fold_expr_path); forward_fold!(syn::ExprRepeat => fold_expr_repeat); forward_fold!(syn::ExprStruct => fold_expr_struct); diff --git a/crates/fayalite-proc-macros-impl/src/hdl_bundle.rs b/crates/fayalite-proc-macros-impl/src/hdl_bundle.rs index 79326e2..e8dc51b 100644 --- a/crates/fayalite-proc-macros-impl/src/hdl_bundle.rs +++ b/crates/fayalite-proc-macros-impl/src/hdl_bundle.rs @@ -1,21 +1,22 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information use crate::{ + Errors, HdlAttr, PairsIterExt, hdl_type_common::{ - common_derives, get_target, ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedField, - ParsedFieldsNamed, ParsedGenerics, SplitForImpl, TypesParser, WrappedInConst, + ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedField, ParsedFieldsNamed, ParsedGenerics, + SplitForImpl, TypesParser, WrappedInConst, common_derives, get_target, }, - kw, Errors, HdlAttr, PairsIterExt, + kw, }; use proc_macro2::TokenStream; -use quote::{format_ident, quote_spanned, ToTokens}; +use quote::{ToTokens, format_ident, quote_spanned}; use syn::{ - parse_quote, parse_quote_spanned, + AngleBracketedGenericArguments, Attribute, Field, FieldMutability, Fields, FieldsNamed, + GenericParam, Generics, Ident, ItemStruct, Path, Token, Type, Visibility, parse_quote, + parse_quote_spanned, punctuated::{Pair, Punctuated}, spanned::Spanned, token::Brace, - AngleBracketedGenericArguments, Attribute, Field, FieldMutability, Fields, FieldsNamed, - GenericParam, Generics, Ident, ItemStruct, Path, Token, Type, Visibility, }; #[derive(Clone, Debug)] @@ -30,7 +31,9 @@ pub(crate) struct ParsedBundle { pub(crate) field_flips: Vec>>, pub(crate) mask_type_ident: Ident, pub(crate) mask_type_match_variant_ident: Ident, + pub(crate) mask_type_sim_value_ident: Ident, pub(crate) match_variant_ident: Ident, + pub(crate) sim_value_ident: Ident, pub(crate) builder_ident: Ident, pub(crate) mask_type_builder_ident: Ident, } @@ -83,7 +86,12 @@ impl ParsedBundle { custom_bounds, no_static: _, no_runtime_generics: _, + cmp_eq: _, + ref get, } = options.body; + if let Some((get, ..)) = get { + errors.error(get, "#[hdl(get(...))] is not allowed on structs"); + } let mut fields = match fields { syn::Fields::Named(fields) => fields, syn::Fields::Unnamed(fields) => { @@ -124,7 +132,9 @@ impl ParsedBundle { field_flips, mask_type_ident: format_ident!("__{}__MaskType", ident), mask_type_match_variant_ident: format_ident!("__{}__MaskType__MatchVariant", ident), + mask_type_sim_value_ident: format_ident!("__{}__MaskType__SimValue", ident), match_variant_ident: format_ident!("__{}__MatchVariant", ident), + sim_value_ident: format_ident!("__{}__SimValue", ident), mask_type_builder_ident: format_ident!("__{}__MaskType__Builder", ident), builder_ident: format_ident!("__{}__Builder", ident), ident, @@ -339,7 +349,6 @@ impl ToTokens for Builder { } })); quote_spanned! {self.ident.span()=> - #[automatically_derived] #[allow(non_camel_case_types, non_snake_case, dead_code)] impl #impl_generics #unfilled_ty #where_clause @@ -426,7 +435,9 @@ impl ToTokens for ParsedBundle { field_flips, mask_type_ident, mask_type_match_variant_ident, + mask_type_sim_value_ident, match_variant_ident, + sim_value_ident, builder_ident, mask_type_builder_ident, } = self; @@ -437,6 +448,8 @@ impl ToTokens for ParsedBundle { custom_bounds: _, no_static, no_runtime_generics, + cmp_eq, + get: _, } = &options.body; let target = get_target(target, ident); let mut item_attrs = attrs.clone(); @@ -521,7 +534,7 @@ impl ToTokens for ParsedBundle { semi_token: None, } .to_tokens(tokens); - let mut mask_type_match_variant_fields = mask_type_fields; + let mut mask_type_match_variant_fields = mask_type_fields.clone(); for Field { ty, .. } in &mut mask_type_match_variant_fields.named { *ty = parse_quote_spanned! {span=> ::fayalite::expr::Expr<#ty> @@ -563,6 +576,58 @@ impl ToTokens for ParsedBundle { semi_token: None, } .to_tokens(tokens); + let mut mask_type_sim_value_fields = mask_type_fields; + for Field { ty, .. } in &mut mask_type_sim_value_fields.named { + *ty = parse_quote_spanned! {span=> + ::fayalite::sim::value::SimValue<#ty> + }; + } + ItemStruct { + attrs: vec![ + parse_quote_spanned! {span=> + #[::fayalite::__std::prelude::v1::derive( + ::fayalite::__std::fmt::Debug, + ::fayalite::__std::clone::Clone, + )] + }, + parse_quote_spanned! {span=> + #[allow(non_camel_case_types, dead_code)] + }, + ], + vis: vis.clone(), + struct_token: *struct_token, + ident: mask_type_sim_value_ident.clone(), + generics: generics.into(), + fields: Fields::Named(mask_type_sim_value_fields), + semi_token: None, + } + .to_tokens(tokens); + let mut sim_value_fields = FieldsNamed::from(fields.clone()); + for Field { ty, .. } in &mut sim_value_fields.named { + *ty = parse_quote_spanned! {span=> + ::fayalite::sim::value::SimValue<#ty> + }; + } + ItemStruct { + attrs: vec![ + parse_quote_spanned! {span=> + #[::fayalite::__std::prelude::v1::derive( + ::fayalite::__std::fmt::Debug, + ::fayalite::__std::clone::Clone, + )] + }, + parse_quote_spanned! {span=> + #[allow(non_camel_case_types, dead_code)] + }, + ], + vis: vis.clone(), + struct_token: *struct_token, + ident: sim_value_ident.clone(), + generics: generics.into(), + fields: Fields::Named(sim_value_fields), + semi_token: None, + } + .to_tokens(tokens); let this_token = Ident::new("__this", span); let fields_token = Ident::new("__fields", span); let self_token = Token![self](span); @@ -613,6 +678,32 @@ impl ToTokens for ParsedBundle { } }, )); + let sim_value_from_opaque_fields = + Vec::from_iter(fields.named().into_iter().map(|field| { + let ident: &Ident = field.ident().as_ref().unwrap(); + quote_spanned! {span=> + #ident: v.field_from_opaque(), + } + })); + let sim_value_clone_from_opaque_fields = + Vec::from_iter(fields.named().into_iter().map(|field| { + let ident: &Ident = field.ident().as_ref().unwrap(); + quote_spanned! {span=> + v.field_clone_from_opaque(&mut value.#ident); + } + })); + let sim_value_to_opaque_fields = Vec::from_iter(fields.named().into_iter().map(|field| { + let ident: &Ident = field.ident().as_ref().unwrap(); + quote_spanned! {span=> + v.field(&value.#ident); + } + })); + let to_sim_value_fields = Vec::from_iter(fields.named().into_iter().map(|field| { + let ident: &Ident = field.ident().as_ref().unwrap(); + quote_spanned! {span=> + #ident: ::fayalite::sim::value::SimValue::ty(&self.#ident), + } + })); let fields_len = fields.named().into_iter().len(); quote_spanned! {span=> #[automatically_derived] @@ -621,6 +712,7 @@ impl ToTokens for ParsedBundle { { type BaseType = ::fayalite::bundle::Bundle; type MaskType = #mask_type_ident #type_generics; + type SimValue = #mask_type_sim_value_ident #type_generics; type MatchVariant = #mask_type_match_variant_ident #type_generics; type MatchActiveScope = (); type MatchVariantAndInactiveScope = ::fayalite::ty::MatchVariantWithoutScope< @@ -658,6 +750,35 @@ impl ToTokens for ParsedBundle { fn source_location() -> ::fayalite::source_location::SourceLocation { ::fayalite::source_location::SourceLocation::caller() } + fn sim_value_from_opaque( + &self, + opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>, + ) -> ::SimValue { + #![allow(unused_mut, unused_variables)] + let mut v = ::fayalite::bundle::BundleSimValueFromOpaque::new(*self, opaque); + #mask_type_sim_value_ident { + #(#sim_value_from_opaque_fields)* + } + } + fn sim_value_clone_from_opaque( + &self, + value: &mut ::SimValue, + opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>, + ) { + #![allow(unused_mut, unused_variables)] + let mut v = ::fayalite::bundle::BundleSimValueFromOpaque::new(*self, opaque); + #(#sim_value_clone_from_opaque_fields)* + } + fn sim_value_to_opaque<'__w>( + &self, + value: &::SimValue, + writer: ::fayalite::ty::OpaqueSimValueWriter<'__w>, + ) -> ::fayalite::ty::OpaqueSimValueWritten<'__w> { + #![allow(unused_mut, unused_variables)] + let mut v = ::fayalite::bundle::BundleSimValueToOpaque::new(*self, writer); + #(#sim_value_to_opaque_fields)* + v.finish() + } } #[automatically_derived] impl #impl_generics ::fayalite::bundle::BundleType for #mask_type_ident #type_generics @@ -689,11 +810,57 @@ impl ToTokens for ParsedBundle { } } #[automatically_derived] + impl #impl_generics ::fayalite::sim::value::ToSimValue for #mask_type_sim_value_ident #type_generics + #where_clause + { + type Type = #mask_type_ident #type_generics; + + fn to_sim_value( + &self, + ) -> ::fayalite::sim::value::SimValue< + ::Type, + > { + let ty = #mask_type_ident { + #(#to_sim_value_fields)* + }; + ::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self)) + } + fn into_sim_value( + self, + ) -> ::fayalite::sim::value::SimValue< + ::Type, + > { + let ty = #mask_type_ident { + #(#to_sim_value_fields)* + }; + ::fayalite::sim::value::SimValue::from_value(ty, self) + } + } + #[automatically_derived] + impl #impl_generics ::fayalite::sim::value::ToSimValueWithType<#mask_type_ident #type_generics> + for #mask_type_sim_value_ident #type_generics + #where_clause + { + fn to_sim_value_with_type( + &self, + ty: #mask_type_ident #type_generics, + ) -> ::fayalite::sim::value::SimValue<#mask_type_ident #type_generics> { + ::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self)) + } + fn into_sim_value_with_type( + self, + ty: #mask_type_ident #type_generics, + ) -> ::fayalite::sim::value::SimValue<#mask_type_ident #type_generics> { + ::fayalite::sim::value::SimValue::from_value(ty, self) + } + } + #[automatically_derived] impl #impl_generics ::fayalite::ty::Type for #target #type_generics #where_clause { type BaseType = ::fayalite::bundle::Bundle; type MaskType = #mask_type_ident #type_generics; + type SimValue = #sim_value_ident #type_generics; type MatchVariant = #match_variant_ident #type_generics; type MatchActiveScope = (); type MatchVariantAndInactiveScope = ::fayalite::ty::MatchVariantWithoutScope< @@ -733,6 +900,35 @@ impl ToTokens for ParsedBundle { fn source_location() -> ::fayalite::source_location::SourceLocation { ::fayalite::source_location::SourceLocation::caller() } + fn sim_value_from_opaque( + &self, + opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>, + ) -> ::SimValue { + #![allow(unused_mut, unused_variables)] + let mut v = ::fayalite::bundle::BundleSimValueFromOpaque::new(*self, opaque); + #sim_value_ident { + #(#sim_value_from_opaque_fields)* + } + } + fn sim_value_clone_from_opaque( + &self, + value: &mut ::SimValue, + opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>, + ) { + #![allow(unused_mut, unused_variables)] + let mut v = ::fayalite::bundle::BundleSimValueFromOpaque::new(*self, opaque); + #(#sim_value_clone_from_opaque_fields)* + } + fn sim_value_to_opaque<'__w>( + &self, + value: &::SimValue, + writer: ::fayalite::ty::OpaqueSimValueWriter<'__w>, + ) -> ::fayalite::ty::OpaqueSimValueWritten<'__w> { + #![allow(unused_mut, unused_variables)] + let mut v = ::fayalite::bundle::BundleSimValueToOpaque::new(*self, writer); + #(#sim_value_to_opaque_fields)* + v.finish() + } } #[automatically_derived] impl #impl_generics ::fayalite::bundle::BundleType for #target #type_generics @@ -763,8 +959,144 @@ impl ToTokens for ParsedBundle { ::fayalite::intern::Interned::into_inner(::fayalite::intern::Intern::intern_sized(__retval)) } } + #[automatically_derived] + impl #impl_generics ::fayalite::sim::value::ToSimValue for #sim_value_ident #type_generics + #where_clause + { + type Type = #target #type_generics; + + fn to_sim_value( + &self, + ) -> ::fayalite::sim::value::SimValue< + ::Type, + > { + let ty = #target { + #(#to_sim_value_fields)* + }; + ::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self)) + } + fn into_sim_value( + self, + ) -> ::fayalite::sim::value::SimValue< + ::Type, + > { + let ty = #target { + #(#to_sim_value_fields)* + }; + ::fayalite::sim::value::SimValue::from_value(ty, self) + } + } + #[automatically_derived] + impl #impl_generics ::fayalite::sim::value::ToSimValueWithType<#target #type_generics> + for #sim_value_ident #type_generics + #where_clause + { + fn to_sim_value_with_type( + &self, + ty: #target #type_generics, + ) -> ::fayalite::sim::value::SimValue<#target #type_generics> { + ::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self)) + } + fn into_sim_value_with_type( + self, + ty: #target #type_generics, + ) -> ::fayalite::sim::value::SimValue<#target #type_generics> { + ::fayalite::sim::value::SimValue::from_value(ty, self) + } + } } .to_tokens(tokens); + if let Some((cmp_eq,)) = cmp_eq { + let mut expr_where_clause = + Generics::from(generics) + .where_clause + .unwrap_or_else(|| syn::WhereClause { + where_token: Token![where](span), + predicates: Punctuated::new(), + }); + let mut sim_value_where_clause = expr_where_clause.clone(); + let mut fields_sim_value_eq = vec![]; + let mut fields_cmp_eq = vec![]; + let mut fields_cmp_ne = vec![]; + for field in fields.named() { + let field_ident = field.ident(); + let field_ty = field.ty(); + expr_where_clause + .predicates + .push(parse_quote_spanned! {cmp_eq.span=> + #field_ty: ::fayalite::expr::ops::ExprPartialEq<#field_ty> + }); + sim_value_where_clause + .predicates + .push(parse_quote_spanned! {cmp_eq.span=> + #field_ty: ::fayalite::sim::value::SimValuePartialEq<#field_ty> + }); + fields_sim_value_eq.push(quote_spanned! {span=> + ::fayalite::sim::value::SimValuePartialEq::sim_value_eq(&__lhs.#field_ident, &__rhs.#field_ident) + }); + fields_cmp_eq.push(quote_spanned! {span=> + ::fayalite::expr::ops::ExprPartialEq::cmp_eq(__lhs.#field_ident, __rhs.#field_ident) + }); + fields_cmp_ne.push(quote_spanned! {span=> + ::fayalite::expr::ops::ExprPartialEq::cmp_ne(__lhs.#field_ident, __rhs.#field_ident) + }); + } + let sim_value_eq_body; + let cmp_eq_body; + let cmp_ne_body; + if fields_len == 0 { + sim_value_eq_body = quote_spanned! {span=> + true + }; + cmp_eq_body = quote_spanned! {span=> + ::fayalite::expr::ToExpr::to_expr(&true) + }; + cmp_ne_body = quote_spanned! {span=> + ::fayalite::expr::ToExpr::to_expr(&false) + }; + } else { + sim_value_eq_body = quote_spanned! {span=> + #(#fields_sim_value_eq)&&* + }; + cmp_eq_body = quote_spanned! {span=> + #(#fields_cmp_eq)&* + }; + cmp_ne_body = quote_spanned! {span=> + #(#fields_cmp_ne)|* + }; + }; + quote_spanned! {span=> + #[automatically_derived] + impl #impl_generics ::fayalite::expr::ops::ExprPartialEq for #target #type_generics + #expr_where_clause + { + fn cmp_eq( + __lhs: ::fayalite::expr::Expr, + __rhs: ::fayalite::expr::Expr, + ) -> ::fayalite::expr::Expr<::fayalite::int::Bool> { + #cmp_eq_body + } + fn cmp_ne( + __lhs: ::fayalite::expr::Expr, + __rhs: ::fayalite::expr::Expr, + ) -> ::fayalite::expr::Expr<::fayalite::int::Bool> { + #cmp_ne_body + } + } + #[automatically_derived] + impl #impl_generics ::fayalite::sim::value::SimValuePartialEq for #target #type_generics + #sim_value_where_clause + { + fn sim_value_eq( + __lhs: &::fayalite::sim::value::SimValue, + __rhs: &::fayalite::sim::value::SimValue, + ) -> bool { + #sim_value_eq_body + } + } + } + .to_tokens(tokens); + } if let (None, MaybeParsed::Parsed(generics)) = (no_static, &self.generics) { let static_generics = generics.clone().for_static_type(); let (static_impl_generics, static_type_generics, static_where_clause) = @@ -800,6 +1132,14 @@ impl ToTokens for ParsedBundle { } })); quote_spanned! {span=> + #[automatically_derived] + impl #static_impl_generics ::fayalite::__std::default::Default for #mask_type_ident #static_type_generics + #static_where_clause + { + fn default() -> Self { + ::TYPE + } + } #[automatically_derived] impl #static_impl_generics ::fayalite::ty::StaticType for #mask_type_ident #static_type_generics #static_where_clause @@ -822,6 +1162,15 @@ impl ToTokens for ParsedBundle { }; } #[automatically_derived] + impl #static_impl_generics ::fayalite::__std::default::Default + for #target #static_type_generics + #static_where_clause + { + fn default() -> Self { + ::TYPE + } + } + #[automatically_derived] impl #static_impl_generics ::fayalite::ty::StaticType for #target #static_type_generics #static_where_clause { diff --git a/crates/fayalite-proc-macros-impl/src/hdl_enum.rs b/crates/fayalite-proc-macros-impl/src/hdl_enum.rs index 1d16177..885cf87 100644 --- a/crates/fayalite-proc-macros-impl/src/hdl_enum.rs +++ b/crates/fayalite-proc-macros-impl/src/hdl_enum.rs @@ -1,20 +1,20 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information use crate::{ + Errors, HdlAttr, PairsIterExt, hdl_type_common::{ - common_derives, get_target, ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics, - ParsedType, SplitForImpl, TypesParser, WrappedInConst, + ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics, ParsedType, SplitForImpl, + TypesParser, WrappedInConst, common_derives, get_target, }, - kw, Errors, HdlAttr, PairsIterExt, + kw, }; use proc_macro2::TokenStream; -use quote::{format_ident, quote_spanned, ToTokens}; +use quote::{ToTokens, format_ident, quote_spanned}; use syn::{ - parse_quote_spanned, + Attribute, Field, FieldMutability, Fields, FieldsNamed, FieldsUnnamed, Generics, Ident, + ItemEnum, ItemStruct, Token, Type, Variant, Visibility, parse_quote_spanned, punctuated::{Pair, Punctuated}, token::{Brace, Paren}, - Attribute, Field, FieldMutability, Fields, FieldsNamed, FieldsUnnamed, Generics, Ident, - ItemEnum, ItemStruct, Token, Type, Variant, Visibility, }; crate::options! { @@ -129,6 +129,9 @@ pub(crate) struct ParsedEnum { pub(crate) brace_token: Brace, pub(crate) variants: Punctuated, pub(crate) match_variant_ident: Ident, + pub(crate) sim_value_ident: Ident, + pub(crate) sim_builder_ident: Ident, + pub(crate) sim_builder_ty_field_ident: Ident, } impl ParsedEnum { @@ -155,7 +158,15 @@ impl ParsedEnum { custom_bounds, no_static: _, no_runtime_generics: _, + cmp_eq, + ref get, } = options.body; + if let Some((cmp_eq,)) = cmp_eq { + errors.error(cmp_eq, "#[hdl(cmp_eq)] is not yet implemented for enums"); + } + if let Some((get, ..)) = get { + errors.error(get, "#[hdl(get(...))] is not allowed on enums"); + } attrs.retain(|attr| { if attr.path().is_ident("repr") { errors.error(attr, "#[repr] is not supported on #[hdl] enums"); @@ -186,6 +197,9 @@ impl ParsedEnum { brace_token, variants, match_variant_ident: format_ident!("__{}__MatchVariant", ident), + sim_value_ident: format_ident!("__{}__SimValue", ident), + sim_builder_ident: format_ident!("__{}__SimBuilder", ident), + sim_builder_ty_field_ident: format_ident!("__ty", span = ident.span()), ident, }) } @@ -203,6 +217,9 @@ impl ToTokens for ParsedEnum { brace_token, variants, match_variant_ident, + sim_value_ident, + sim_builder_ident, + sim_builder_ty_field_ident, } = self; let span = ident.span(); let ItemOptions { @@ -211,6 +228,8 @@ impl ToTokens for ParsedEnum { custom_bounds: _, no_static, no_runtime_generics, + cmp_eq: _, // TODO: implement cmp_eq for enums + get: _, } = &options.body; let target = get_target(target, ident); let mut struct_attrs = attrs.clone(); @@ -404,11 +423,137 @@ impl ToTokens for ParsedEnum { )), } .to_tokens(tokens); + let mut struct_attrs = attrs.clone(); + struct_attrs.push(parse_quote_spanned! {span=> + #[allow(dead_code, non_camel_case_types)] + }); + ItemStruct { + attrs: struct_attrs, + vis: vis.clone(), + struct_token: Token![struct](enum_token.span), + ident: sim_builder_ident.clone(), + generics: generics.into(), + fields: FieldsNamed { + brace_token: *brace_token, + named: Punctuated::from_iter([Field { + attrs: vec![], + vis: Visibility::Inherited, + mutability: FieldMutability::None, + ident: Some(sim_builder_ty_field_ident.clone()), + colon_token: Some(Token![:](span)), + ty: parse_quote_spanned! {span=> + #target #type_generics + }, + }]), + } + .into(), + semi_token: None, + } + .to_tokens(tokens); + let mut enum_attrs = attrs.clone(); + enum_attrs.push(parse_quote_spanned! {span=> + #[::fayalite::__std::prelude::v1::derive( + ::fayalite::__std::fmt::Debug, + ::fayalite::__std::clone::Clone, + )] + }); + enum_attrs.push(parse_quote_spanned! {span=> + #[allow(dead_code, non_camel_case_types)] + }); + let sim_value_has_unknown_variant = !variants.len().is_power_of_two(); + let sim_value_unknown_variant_name = sim_value_has_unknown_variant.then(|| { + let mut name = String::new(); + let unknown = "Unknown"; + loop { + let orig_len = name.len(); + name.push_str(unknown); + if variants.iter().all(|v| v.ident != name) { + break Ident::new(&name, span); + } + name.truncate(orig_len); + name.push('_'); + } + }); + let sim_value_unknown_variant = + sim_value_unknown_variant_name + .as_ref() + .map(|unknown_variant_name| { + Pair::End(parse_quote_spanned! {span=> + #unknown_variant_name(::fayalite::enum_::UnknownVariantSimValue) + }) + }); + ItemEnum { + attrs: enum_attrs, + vis: vis.clone(), + enum_token: *enum_token, + ident: sim_value_ident.clone(), + generics: generics.into(), + brace_token: *brace_token, + variants: Punctuated::from_iter( + variants + .pairs() + .map_pair_value_ref( + |ParsedVariant { + attrs, + options: _, + ident, + field, + }| Variant { + attrs: attrs.clone(), + ident: ident.clone(), + fields: match field { + Some(ParsedVariantField { + paren_token, + attrs, + options: _, + ty, + comma_token, + }) => Fields::Unnamed(FieldsUnnamed { + paren_token: *paren_token, + unnamed: Punctuated::from_iter([ + Pair::new( + Field { + attrs: attrs.clone(), + vis: Visibility::Inherited, + mutability: FieldMutability::None, + ident: None, + colon_token: None, + ty: parse_quote_spanned! {span=> + ::fayalite::sim::value::SimValue<#ty> + }, + }, + Some(comma_token.unwrap_or(Token![,](ident.span()))), + ), + Pair::new( + Field { + attrs: vec![], + vis: Visibility::Inherited, + mutability: FieldMutability::None, + ident: None, + colon_token: None, + ty: parse_quote_spanned! {span=> + ::fayalite::enum_::EnumPaddingSimValue + }, + }, + None, + ), + ]), + }), + None => Fields::Unnamed(parse_quote_spanned! {span=> + (::fayalite::enum_::EnumPaddingSimValue) + }), + }, + discriminant: None, + }, + ) + .chain(sim_value_unknown_variant), + ), + } + .to_tokens(tokens); let self_token = Token![self](span); for (index, ParsedVariant { ident, field, .. }) in variants.iter().enumerate() { if let Some(ParsedVariantField { ty, .. }) = field { quote_spanned! {span=> - #[automatically_derived] impl #impl_generics #target #type_generics #where_clause { @@ -430,10 +575,27 @@ impl ToTokens for ParsedEnum { ) } } + impl #impl_generics #sim_builder_ident #type_generics + #where_clause + { + #[allow(non_snake_case, dead_code)] + #vis fn #ident<__V: ::fayalite::sim::value::ToSimValueWithType<#ty>>( + #self_token, + v: __V, + ) -> ::fayalite::sim::value::SimValue<#target #type_generics> { + let v = ::fayalite::sim::value::ToSimValueWithType::into_sim_value_with_type( + v, + #self_token.#sim_builder_ty_field_ident.#ident, + ); + ::fayalite::sim::value::SimValue::from_value( + #self_token.#sim_builder_ty_field_ident, + #sim_value_ident::#ident(v, ::fayalite::enum_::EnumPaddingSimValue::new()), + ) + } + } } } else { quote_spanned! {span=> - #[automatically_derived] impl #impl_generics #target #type_generics #where_clause { @@ -448,6 +610,17 @@ impl ToTokens for ParsedEnum { ) } } + impl #impl_generics #sim_builder_ident #type_generics + #where_clause + { + #[allow(non_snake_case, dead_code)] + #vis fn #ident(#self_token) -> ::fayalite::sim::value::SimValue<#target #type_generics> { + ::fayalite::sim::value::SimValue::from_value( + #self_token.#sim_builder_ty_field_ident, + #sim_value_ident::#ident(::fayalite::enum_::EnumPaddingSimValue::new()), + ) + } + } } } .to_tokens(tokens); @@ -529,6 +702,142 @@ impl ToTokens for ParsedEnum { } }, )); + let sim_value_from_opaque_unknown_match_arm = if let Some(sim_value_unknown_variant_name) = + &sim_value_unknown_variant_name + { + quote_spanned! {span=> + _ => #sim_value_ident::#sim_value_unknown_variant_name(v.unknown_variant_from_opaque()), + } + } else { + quote_spanned! {span=> + _ => ::fayalite::__std::unreachable!(), + } + }; + let sim_value_from_opaque_match_arms = Vec::from_iter( + variants + .iter() + .enumerate() + .map( + |( + index, + ParsedVariant { + attrs: _, + options: _, + ident, + field, + }, + )| { + if let Some(_) = field { + quote_spanned! {span=> + #index => { + let (field, padding) = v.variant_with_field_from_opaque(); + #sim_value_ident::#ident(field, padding) + } + } + } else { + quote_spanned! {span=> + #index => #sim_value_ident::#ident( + v.variant_no_field_from_opaque(), + ), + } + } + }, + ) + .chain([sim_value_from_opaque_unknown_match_arm]), + ); + let sim_value_clone_from_opaque_unknown_match_arm = + if let Some(sim_value_unknown_variant_name) = &sim_value_unknown_variant_name { + quote_spanned! {span=> + _ => if let #sim_value_ident::#sim_value_unknown_variant_name(value) = value { + v.unknown_variant_clone_from_opaque(value); + } else { + *value = #sim_value_ident::#sim_value_unknown_variant_name( + v.unknown_variant_from_opaque(), + ); + }, + } + } else { + quote_spanned! {span=> + _ => ::fayalite::__std::unreachable!(), + } + }; + let sim_value_clone_from_opaque_match_arms = Vec::from_iter( + variants + .iter() + .enumerate() + .map( + |( + index, + ParsedVariant { + attrs: _, + options: _, + ident, + field, + }, + )| { + if let Some(_) = field { + quote_spanned! {span=> + #index => if let #sim_value_ident::#ident(field, padding) = value { + v.variant_with_field_clone_from_opaque(field, padding); + } else { + let (field, padding) = v.variant_with_field_from_opaque(); + *value = #sim_value_ident::#ident(field, padding); + }, + } + } else { + quote_spanned! {span=> + #index => if let #sim_value_ident::#ident(padding) = value { + v.variant_no_field_clone_from_opaque(padding); + } else { + *value = #sim_value_ident::#ident( + v.variant_no_field_from_opaque(), + ); + }, + } + } + }, + ) + .chain([sim_value_clone_from_opaque_unknown_match_arm]), + ); + let sim_value_to_opaque_match_arms = Vec::from_iter( + variants + .iter() + .enumerate() + .map( + |( + index, + ParsedVariant { + attrs: _, + options: _, + ident, + field, + }, + )| { + if let Some(_) = field { + quote_spanned! {span=> + #sim_value_ident::#ident(field, padding) => { + v.variant_with_field_to_opaque(#index, field, padding) + } + } + } else { + quote_spanned! {span=> + #sim_value_ident::#ident(padding) => { + v.variant_no_field_to_opaque(#index, padding) + } + } + } + }, + ) + .chain(sim_value_unknown_variant_name.as_ref().map( + |sim_value_unknown_variant_name| { + quote_spanned! {span=> + #sim_value_ident::#sim_value_unknown_variant_name(value) => { + v.unknown_variant_to_opaque(value) + } + } + }, + )), + ); let variants_len = variants.len(); quote_spanned! {span=> #[automatically_derived] @@ -537,6 +846,7 @@ impl ToTokens for ParsedEnum { { type BaseType = ::fayalite::enum_::Enum; type MaskType = ::fayalite::int::Bool; + type SimValue = #sim_value_ident #type_generics; type MatchVariant = #match_variant_ident #type_generics; type MatchActiveScope = ::fayalite::module::Scope; type MatchVariantAndInactiveScope = ::fayalite::enum_::EnumMatchVariantAndInactiveScope; @@ -569,11 +879,41 @@ impl ToTokens for ParsedEnum { fn source_location() -> ::fayalite::source_location::SourceLocation { ::fayalite::source_location::SourceLocation::caller() } + fn sim_value_from_opaque( + &self, + opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>, + ) -> ::SimValue { + let v = ::fayalite::enum_::EnumSimValueFromOpaque::new(*self, opaque); + match v.discriminant() { + #(#sim_value_from_opaque_match_arms)* + } + } + fn sim_value_clone_from_opaque( + &self, + value: &mut ::SimValue, + opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>, + ) { + let v = ::fayalite::enum_::EnumSimValueFromOpaque::new(*self, opaque); + match v.discriminant() { + #(#sim_value_clone_from_opaque_match_arms)* + } + } + fn sim_value_to_opaque<'__w>( + &self, + value: &::SimValue, + writer: ::fayalite::ty::OpaqueSimValueWriter<'__w>, + ) -> ::fayalite::ty::OpaqueSimValueWritten<'__w> { + let v = ::fayalite::enum_::EnumSimValueToOpaque::new(*self, writer); + match value { + #(#sim_value_to_opaque_match_arms)* + } + } } #[automatically_derived] impl #impl_generics ::fayalite::enum_::EnumType for #target #type_generics #where_clause { + type SimBuilder = #sim_builder_ident #type_generics; fn match_activate_scope( v: ::MatchVariantAndInactiveScope, ) -> (::MatchVariant, ::MatchActiveScope) { @@ -592,6 +932,33 @@ impl ToTokens for ParsedEnum { ][..]) } } + #[automatically_derived] + impl #impl_generics ::fayalite::sim::value::ToSimValueWithType<#target #type_generics> + for #sim_value_ident #type_generics + #where_clause + { + fn to_sim_value_with_type( + &self, + ty: #target #type_generics, + ) -> ::fayalite::sim::value::SimValue<#target #type_generics> { + ::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self)) + } + fn into_sim_value_with_type( + self, + ty: #target #type_generics, + ) -> ::fayalite::sim::value::SimValue<#target #type_generics> { + ::fayalite::sim::value::SimValue::from_value(ty, self) + } + } + #[automatically_derived] + impl #impl_generics ::fayalite::__std::convert::From<#target #type_generics> + for #sim_builder_ident #type_generics + #where_clause + { + fn from(#sim_builder_ty_field_ident: #target #type_generics) -> Self { + Self { #sim_builder_ty_field_ident } + } + } } .to_tokens(tokens); if let (None, MaybeParsed::Parsed(generics)) = (no_static, &self.generics) { @@ -629,6 +996,15 @@ impl ToTokens for ParsedEnum { } })); quote_spanned! {span=> + #[automatically_derived] + impl #static_impl_generics ::fayalite::__std::default::Default + for #target #static_type_generics + #static_where_clause + { + fn default() -> Self { + ::TYPE + } + } #[automatically_derived] impl #static_impl_generics ::fayalite::ty::StaticType for #target #static_type_generics @@ -647,6 +1023,34 @@ impl ToTokens for ParsedEnum { const MASK_TYPE_PROPERTIES: ::fayalite::ty::TypeProperties = <::fayalite::int::Bool as ::fayalite::ty::StaticType>::TYPE_PROPERTIES; } + #[automatically_derived] + impl #static_impl_generics ::fayalite::sim::value::ToSimValue + for #sim_value_ident #static_type_generics + #static_where_clause + { + type Type = #target #static_type_generics; + + fn to_sim_value( + &self, + ) -> ::fayalite::sim::value::SimValue< + ::Type, + > { + ::fayalite::sim::value::SimValue::from_value( + ::fayalite::ty::StaticType::TYPE, + ::fayalite::__std::clone::Clone::clone(self), + ) + } + fn into_sim_value( + self, + ) -> ::fayalite::sim::value::SimValue< + ::Type, + > { + ::fayalite::sim::value::SimValue::from_value( + ::fayalite::ty::StaticType::TYPE, + self, + ) + } + } } .to_tokens(tokens); } diff --git a/crates/fayalite-proc-macros-impl/src/hdl_type_alias.rs b/crates/fayalite-proc-macros-impl/src/hdl_type_alias.rs index e5d5f7b..8235366 100644 --- a/crates/fayalite-proc-macros-impl/src/hdl_type_alias.rs +++ b/crates/fayalite-proc-macros-impl/src/hdl_type_alias.rs @@ -1,30 +1,356 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information use crate::{ + Errors, HdlAttr, hdl_type_common::{ - get_target, ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics, ParsedType, - TypesParser, + ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics, ParsedType, TypesParser, + WrappedInConst, common_derives, get_target, known_items, }, - kw, Errors, HdlAttr, + kw, }; use proc_macro2::TokenStream; -use quote::ToTokens; -use syn::{parse_quote_spanned, Attribute, Generics, Ident, ItemType, Token, Type, Visibility}; +use quote::{ToTokens, format_ident, quote_spanned}; +use syn::{ + AngleBracketedGenericArguments, Attribute, Expr, Fields, GenericArgument, GenericParam, + Generics, Ident, ItemStruct, ItemType, Path, PathArguments, Token, TraitBound, + TraitBoundModifier, Type, TypeGroup, TypeParam, TypeParamBound, TypeParen, Visibility, + parse_quote_spanned, punctuated::Pair, token::Paren, +}; #[derive(Clone, Debug)] -pub(crate) struct ParsedTypeAlias { - pub(crate) attrs: Vec, - pub(crate) options: HdlAttr, - pub(crate) vis: Visibility, - pub(crate) type_token: Token![type], - pub(crate) ident: Ident, - pub(crate) generics: MaybeParsed, - pub(crate) eq_token: Token![=], - pub(crate) ty: MaybeParsed, - pub(crate) semi_token: Token![;], +pub(crate) struct PhantomConstGetBound { + pub(crate) phantom_const_get: known_items::PhantomConstGet, + pub(crate) colon2_token: Option, + pub(crate) lt_token: Token![<], + pub(crate) ty: Type, + pub(crate) comma_token: Option, + pub(crate) gt_token: Token![>], +} + +impl From for Path { + fn from(value: PhantomConstGetBound) -> Self { + let PhantomConstGetBound { + phantom_const_get, + colon2_token, + lt_token, + ty, + comma_token, + gt_token, + } = value; + let mut path = phantom_const_get.path; + path.segments.last_mut().expect("known to exist").arguments = + PathArguments::AngleBracketed(AngleBracketedGenericArguments { + colon2_token, + lt_token, + args: FromIterator::from_iter([Pair::new(GenericArgument::Type(ty), comma_token)]), + gt_token, + }); + path + } +} + +impl From for TraitBound { + fn from(value: PhantomConstGetBound) -> Self { + let path = Path::from(value); + TraitBound { + paren_token: None, + modifier: TraitBoundModifier::None, + lifetimes: None, + path, + } + } +} + +impl From for TypeParamBound { + fn from(value: PhantomConstGetBound) -> Self { + TraitBound::from(value).into() + } +} + +impl PhantomConstGetBound { + fn parse_opt(bound: TypeParamBound) -> Option { + let TypeParamBound::Trait(TraitBound { + paren_token: None, + modifier: TraitBoundModifier::None, + lifetimes: None, + path, + }) = bound + else { + return None; + }; + let Ok(( + phantom_const_get, + PathArguments::AngleBracketed(AngleBracketedGenericArguments { + colon2_token, + lt_token, + args, + gt_token, + }), + )) = known_items::PhantomConstGet::parse_path_with_arguments(path) + else { + return None; + }; + let mut args = args.into_pairs(); + let (GenericArgument::Type(ty), comma_token) = args.next()?.into_tuple() else { + return None; + }; + let None = args.next() else { + return None; + }; + Some(Self { + phantom_const_get, + colon2_token, + lt_token, + ty, + comma_token, + gt_token, + }) + } +} + +#[derive(Clone, Debug)] +pub(crate) struct PhantomConstAccessorTypeParam { + attrs: Vec, + ident: Ident, + colon_token: Token![:], + phantom_const_get_bound: PhantomConstGetBound, + plus_token: Option, +} + +impl From for TypeParam { + fn from(value: PhantomConstAccessorTypeParam) -> Self { + let PhantomConstAccessorTypeParam { + attrs, + ident, + colon_token, + phantom_const_get_bound, + plus_token, + } = value; + TypeParam { + attrs, + ident, + colon_token: Some(colon_token), + bounds: FromIterator::from_iter([Pair::new( + phantom_const_get_bound.into(), + plus_token, + )]), + eq_token: None, + default: None, + } + } +} + +impl From for GenericParam { + fn from(value: PhantomConstAccessorTypeParam) -> Self { + TypeParam::from(value).into() + } +} + +impl PhantomConstAccessorTypeParam { + fn parse_opt(generic_param: GenericParam) -> Option { + let GenericParam::Type(TypeParam { + attrs, + ident, + colon_token, + bounds, + eq_token: None, + default: None, + }) = generic_param + else { + return None; + }; + let colon_token = colon_token.unwrap_or(Token![:](ident.span())); + let mut bounds = bounds.into_pairs(); + let (bound, plus_token) = bounds.next()?.into_tuple(); + let phantom_const_get_bound = PhantomConstGetBound::parse_opt(bound)?; + let None = bounds.next() else { + return None; + }; + Some(Self { + attrs, + ident, + colon_token, + phantom_const_get_bound, + plus_token, + }) + } +} + +#[derive(Clone, Debug)] +pub(crate) struct PhantomConstAccessorGenerics { + lt_token: Token![<], + type_param: PhantomConstAccessorTypeParam, + comma_token: Option, + gt_token: Token![>], +} + +impl From for Generics { + fn from(value: PhantomConstAccessorGenerics) -> Self { + let PhantomConstAccessorGenerics { + lt_token, + type_param, + comma_token, + gt_token, + } = value; + Generics { + lt_token: Some(lt_token), + params: FromIterator::from_iter([Pair::new(type_param.into(), comma_token)]), + gt_token: Some(gt_token), + where_clause: None, + } + } +} + +impl<'a> From<&'a PhantomConstAccessorGenerics> for Generics { + fn from(value: &'a PhantomConstAccessorGenerics) -> Self { + value.clone().into() + } +} + +impl PhantomConstAccessorGenerics { + fn parse_opt(generics: Generics) -> Option { + let Generics { + lt_token, + params, + gt_token, + where_clause: None, + } = generics + else { + return None; + }; + let mut params = params.into_pairs(); + let (generic_param, comma_token) = params.next()?.into_tuple(); + let type_param = PhantomConstAccessorTypeParam::parse_opt(generic_param)?; + let span = type_param.ident.span(); + let lt_token = lt_token.unwrap_or(Token![<](span)); + let gt_token = gt_token.unwrap_or(Token![>](span)); + let None = params.next() else { + return None; + }; + Some(Self { + lt_token, + type_param, + comma_token, + gt_token, + }) + } +} + +#[derive(Clone, Debug)] +pub(crate) enum ParsedTypeAlias { + TypeAlias { + attrs: Vec, + options: HdlAttr, + vis: Visibility, + type_token: Token![type], + ident: Ident, + generics: MaybeParsed, + eq_token: Token![=], + ty: MaybeParsed, + semi_token: Token![;], + }, + PhantomConstAccessor { + attrs: Vec, + options: HdlAttr, + get: (kw::get, Paren, Expr), + vis: Visibility, + type_token: Token![type], + ident: Ident, + generics: PhantomConstAccessorGenerics, + eq_token: Token![=], + ty: Type, + ty_is_dyn_size: Option, + semi_token: Token![;], + }, } impl ParsedTypeAlias { + fn ty_is_dyn_size(ty: &Type) -> Option { + match ty { + Type::Group(TypeGroup { + group_token: _, + elem, + }) => Self::ty_is_dyn_size(elem), + Type::Paren(TypeParen { + paren_token: _, + elem, + }) => Self::ty_is_dyn_size(elem), + Type::Path(syn::TypePath { qself: None, path }) => { + known_items::DynSize::parse_path(path.clone()).ok() + } + _ => None, + } + } + fn parse_phantom_const_accessor( + item: ItemType, + mut errors: Errors, + options: HdlAttr, + get: (kw::get, Paren, Expr), + ) -> syn::Result { + let ItemType { + attrs, + vis, + type_token, + ident, + generics, + eq_token, + ty, + semi_token, + } = item; + let ItemOptions { + outline_generated: _, + ref target, + custom_bounds, + no_static, + no_runtime_generics, + cmp_eq, + get: _, + } = options.body; + if let Some((no_static,)) = no_static { + errors.error(no_static, "no_static is not valid on type aliases"); + } + if let Some((target, ..)) = target { + errors.error( + target, + "target is not implemented on PhantomConstGet type aliases", + ); + } + if let Some((no_runtime_generics,)) = no_runtime_generics { + errors.error( + no_runtime_generics, + "no_runtime_generics is not implemented on PhantomConstGet type aliases", + ); + } + if let Some((cmp_eq,)) = cmp_eq { + errors.error(cmp_eq, "cmp_eq is not valid on type aliases"); + } + if let Some((custom_bounds,)) = custom_bounds { + errors.error( + custom_bounds, + "custom_bounds is not implemented on PhantomConstGet type aliases", + ); + } + let Some(generics) = PhantomConstAccessorGenerics::parse_opt(generics) else { + errors.error(ident, "#[hdl(get(...))] type alias must be of the form:\ntype MyTypeGetter> = RetType;"); + errors.finish()?; + unreachable!(); + }; + errors.finish()?; + let ty_is_dyn_size = Self::ty_is_dyn_size(&ty); + Ok(Self::PhantomConstAccessor { + attrs, + options, + get, + vis, + type_token, + ident, + generics, + eq_token, + ty: *ty, + ty_is_dyn_size, + semi_token, + }) + } fn parse(item: ItemType) -> syn::Result { let ItemType { mut attrs, @@ -49,10 +375,32 @@ impl ParsedTypeAlias { custom_bounds, no_static, no_runtime_generics: _, + cmp_eq, + ref mut get, } = options.body; + if let Some(get) = get.take() { + return Self::parse_phantom_const_accessor( + ItemType { + attrs, + vis, + type_token, + ident, + generics, + eq_token, + ty, + semi_token, + }, + errors, + options, + get, + ); + } if let Some((no_static,)) = no_static { errors.error(no_static, "no_static is not valid on type aliases"); } + if let Some((cmp_eq,)) = cmp_eq { + errors.error(cmp_eq, "cmp_eq is not valid on type aliases"); + } let generics = if custom_bounds.is_some() { MaybeParsed::Unrecognized(generics) } else if let Some(generics) = errors.ok(ParsedGenerics::parse(&mut generics)) { @@ -62,7 +410,7 @@ impl ParsedTypeAlias { }; let ty = TypesParser::maybe_run(generics.as_ref(), *ty, &mut errors); errors.finish()?; - Ok(Self { + Ok(Self::TypeAlias { attrs, options, vis, @@ -78,53 +426,155 @@ impl ParsedTypeAlias { impl ToTokens for ParsedTypeAlias { fn to_tokens(&self, tokens: &mut TokenStream) { - let Self { - attrs, - options, - vis, - type_token, - ident, - generics, - eq_token, - ty, - semi_token, - } = self; - let ItemOptions { - outline_generated: _, - target, - custom_bounds: _, - no_static: _, - no_runtime_generics, - } = &options.body; - let target = get_target(target, ident); - let mut type_attrs = attrs.clone(); - type_attrs.push(parse_quote_spanned! {ident.span()=> - #[allow(type_alias_bounds)] - }); - ItemType { - attrs: type_attrs, - vis: vis.clone(), - type_token: *type_token, - ident: ident.clone(), - generics: generics.into(), - eq_token: *eq_token, - ty: Box::new(ty.clone().into()), - semi_token: *semi_token, - } - .to_tokens(tokens); - if let (MaybeParsed::Parsed(generics), MaybeParsed::Parsed(ty), None) = - (generics, ty, no_runtime_generics) - { - generics.make_runtime_generics(tokens, vis, ident, &target, |context| { - ty.make_hdl_type_expr(context) - }) + match self { + Self::TypeAlias { + attrs, + options, + vis, + type_token, + ident, + generics, + eq_token, + ty, + semi_token, + } => { + let ItemOptions { + outline_generated: _, + target, + custom_bounds: _, + no_static: _, + no_runtime_generics, + cmp_eq: _, + get: _, + } = &options.body; + let target = get_target(target, ident); + let mut type_attrs = attrs.clone(); + type_attrs.push(parse_quote_spanned! {ident.span()=> + #[allow(type_alias_bounds)] + }); + ItemType { + attrs: type_attrs, + vis: vis.clone(), + type_token: *type_token, + ident: ident.clone(), + generics: generics.into(), + eq_token: *eq_token, + ty: Box::new(ty.clone().into()), + semi_token: *semi_token, + } + .to_tokens(tokens); + if let (MaybeParsed::Parsed(generics), MaybeParsed::Parsed(ty), None) = + (generics, ty, no_runtime_generics) + { + generics.make_runtime_generics(tokens, vis, ident, &target, |context| { + ty.make_hdl_type_expr(context) + }) + } + } + Self::PhantomConstAccessor { + attrs, + options, + get: (_get_kw, _get_paren, get_expr), + vis, + type_token, + ident, + generics, + eq_token, + ty, + ty_is_dyn_size, + semi_token, + } => { + let ItemOptions { + outline_generated: _, + target: _, + custom_bounds: _, + no_static: _, + no_runtime_generics: _, + cmp_eq: _, + get: _, + } = &options.body; + let span = ident.span(); + let mut type_attrs = attrs.clone(); + type_attrs.push(parse_quote_spanned! {span=> + #[allow(type_alias_bounds)] + }); + let type_param_ident = &generics.type_param.ident; + let syn_generics = Generics::from(generics); + ItemType { + attrs: type_attrs, + vis: vis.clone(), + type_token: *type_token, + ident: ident.clone(), + generics: syn_generics.clone(), + eq_token: *eq_token, + ty: parse_quote_spanned! {span=> + <#ty as ::fayalite::phantom_const::ReturnSelfUnchanged<#type_param_ident>>::Type + }, + semi_token: *semi_token, + } + .to_tokens(tokens); + let generics_accumulation_ident = + format_ident!("__{}__GenericsAccumulation", ident); + ItemStruct { + attrs: vec![ + common_derives(span), + parse_quote_spanned! {span=> + #[allow(non_camel_case_types)] + }, + ], + vis: vis.clone(), + struct_token: Token![struct](span), + ident: generics_accumulation_ident.clone(), + generics: Generics::default(), + fields: Fields::Unnamed(parse_quote_spanned! {span=> + (()) + }), + semi_token: Some(Token![;](span)), + } + .to_tokens(tokens); + quote_spanned! {span=> + #[allow(non_upper_case_globals, dead_code)] + #vis const #ident: #generics_accumulation_ident = #generics_accumulation_ident(()); + } + .to_tokens(tokens); + let mut wrapped_in_const = WrappedInConst::new(tokens, span); + let tokens = wrapped_in_const.inner(); + let (impl_generics, _type_generics, where_clause) = syn_generics.split_for_impl(); + let phantom_const_get_ty = &generics.type_param.phantom_const_get_bound.ty; + let index_output = if let Some(ty_is_dyn_size) = ty_is_dyn_size { + known_items::usize(ty_is_dyn_size.span).to_token_stream() + } else { + ty.to_token_stream() + }; + quote_spanned! {span=> + #[allow(non_upper_case_globals)] + #[automatically_derived] + impl #impl_generics ::fayalite::__std::ops::Index<#type_param_ident> + for #generics_accumulation_ident + #where_clause + { + type Output = #index_output; + + fn index(&self, __param: #type_param_ident) -> &Self::Output { + ::fayalite::phantom_const::type_alias_phantom_const_get_helper::<#phantom_const_get_ty, #index_output>( + __param, + #get_expr, + ) + } + } + } + .to_tokens(tokens); + } } } } pub(crate) fn hdl_type_alias_impl(item: ItemType) -> syn::Result { let item = ParsedTypeAlias::parse(item)?; - let outline_generated = item.options.body.outline_generated; + let outline_generated = match &item { + ParsedTypeAlias::TypeAlias { options, .. } + | ParsedTypeAlias::PhantomConstAccessor { options, .. } => options.body.outline_generated, + }; let mut contents = item.to_token_stream(); if outline_generated.is_some() { contents = crate::outline_generated(contents, "hdl-type-alias-"); diff --git a/crates/fayalite-proc-macros-impl/src/hdl_type_common.rs b/crates/fayalite-proc-macros-impl/src/hdl_type_common.rs index 3f3f817..73acc39 100644 --- a/crates/fayalite-proc-macros-impl/src/hdl_type_common.rs +++ b/crates/fayalite-proc-macros-impl/src/hdl_type_common.rs @@ -1,21 +1,21 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information -use crate::{fold::impl_fold, kw, Errors, HdlAttr, PairsIterExt}; +use crate::{Errors, HdlAttr, PairsIterExt, fold::impl_fold, kw}; use proc_macro2::{Span, TokenStream}; -use quote::{format_ident, quote_spanned, ToTokens}; +use quote::{ToTokens, format_ident, quote_spanned}; use std::{collections::HashMap, fmt, mem}; use syn::{ - parse::{Parse, ParseStream}, - parse_quote, parse_quote_spanned, - punctuated::{Pair, Punctuated}, - spanned::Spanned, - token::{Brace, Bracket, Paren}, AngleBracketedGenericArguments, Attribute, Block, ConstParam, Expr, ExprBlock, ExprGroup, ExprIndex, ExprParen, ExprPath, ExprTuple, Field, FieldMutability, Fields, FieldsNamed, FieldsUnnamed, GenericArgument, GenericParam, Generics, Ident, ImplGenerics, Index, ItemStruct, Path, PathArguments, PathSegment, PredicateType, QSelf, Stmt, Token, Turbofish, Type, TypeGenerics, TypeGroup, TypeParam, TypeParen, TypePath, TypeTuple, Visibility, WhereClause, WherePredicate, + parse::{Parse, ParseStream}, + parse_quote, parse_quote_spanned, + punctuated::{Pair, Punctuated}, + spanned::Spanned, + token::{Brace, Bracket, Paren}, }; crate::options! { @@ -26,6 +26,8 @@ crate::options! { CustomBounds(custom_bounds), NoStatic(no_static), NoRuntimeGenerics(no_runtime_generics), + CmpEq(cmp_eq), + Get(get, Expr), } } @@ -298,7 +300,7 @@ impl ParseTypes for ParsedExpr { return Ok(ParsedExpr::Delimited(ParsedExprDelimited { delim: ExprDelimiter::Group(*group_token), expr: parser.parse(expr)?, - })) + })); } Expr::Paren(ExprParen { attrs, @@ -308,7 +310,7 @@ impl ParseTypes for ParsedExpr { return Ok(ParsedExpr::Delimited(ParsedExprDelimited { delim: ExprDelimiter::Paren(*paren_token), expr: parser.parse(expr)?, - })) + })); } Expr::Path(ExprPath { attrs, @@ -1901,8 +1903,8 @@ pub(crate) mod known_items { use proc_macro2::{Ident, Span, TokenStream}; use quote::ToTokens; use syn::{ - parse::{Parse, ParseStream}, Path, PathArguments, PathSegment, Token, + parse::{Parse, ParseStream}, }; macro_rules! impl_known_item_body { @@ -2044,6 +2046,8 @@ pub(crate) mod known_items { impl_known_item!(::fayalite::int::Size); impl_known_item!(::fayalite::int::UInt); impl_known_item!(::fayalite::int::UIntType); + impl_known_item!(::fayalite::phantom_const::PhantomConstGet); + impl_known_item!(::fayalite::reset::ResetType); impl_known_item!(::fayalite::ty::CanonicalType); impl_known_item!(::fayalite::ty::StaticType); impl_known_item!(::fayalite::ty::Type); @@ -2068,11 +2072,16 @@ macro_rules! impl_bounds { $( $Variant:ident, )* + $( + #[unknown] + $Unknown:ident, + )? } ) => { #[derive(Clone, Debug)] $vis enum $enum_type { $($Variant(known_items::$Variant),)* + $($Unknown(syn::TypeParamBound),)? } $(impl From for $enum_type { @@ -2085,28 +2094,54 @@ macro_rules! impl_bounds { fn to_tokens(&self, tokens: &mut TokenStream) { match self { $(Self::$Variant(v) => v.to_tokens(tokens),)* + $(Self::$Unknown(v) => v.to_tokens(tokens),)? } } } impl $enum_type { $vis fn parse_path(path: Path) -> Result { + #![allow(unreachable_code)] $(let path = match known_items::$Variant::parse_path(path) { Ok(v) => return Ok(Self::$Variant(v)), Err(path) => path, };)* + $(return Ok(Self::$Unknown(syn::TraitBound { + paren_token: None, + modifier: syn::TraitBoundModifier::None, + lifetimes: None, + path, + }.into()));)? Err(path) } + $vis fn parse_type_param_bound(mut type_param_bound: syn::TypeParamBound) -> Result { + #![allow(unreachable_code)] + if let syn::TypeParamBound::Trait(mut trait_bound) = type_param_bound { + if let syn::TraitBound { + paren_token: _, + modifier: syn::TraitBoundModifier::None, + lifetimes: None, + path: _, + } = trait_bound { + match Self::parse_path(trait_bound.path) { + Ok(retval) => return Ok(retval), + Err(path) => trait_bound.path = path, + } + } + type_param_bound = trait_bound.into(); + } + $(return Ok(Self::$Unknown(type_param_bound));)? + Err(type_param_bound) + } } impl Parse for $enum_type { fn parse(input: ParseStream) -> syn::Result { - Self::parse_path(Path::parse_mod_style(input)?).map_err(|path| { - syn::Error::new_spanned( - path, + Self::parse_type_param_bound(input.parse()?) + .map_err(|type_param_bound| syn::Error::new_spanned( + type_param_bound, format_args!("expected one of: {}", [$(stringify!($Variant)),*].join(", ")), - ) - }) + )) } } @@ -2114,6 +2149,7 @@ macro_rules! impl_bounds { #[allow(non_snake_case)] $vis struct $struct_type { $($vis $Variant: Option,)* + $($vis $Unknown: Vec,)? } impl ToTokens for $struct_type { @@ -2125,42 +2161,63 @@ macro_rules! impl_bounds { separator = Some(::default()); v.to_tokens(tokens); })* + $(for v in &self.$Unknown { + separator.to_tokens(tokens); + separator = Some(::default()); + v.to_tokens(tokens); + })* } } const _: () = { #[derive(Clone, Debug)] - $vis struct Iter($vis $struct_type); + #[allow(non_snake_case)] + $vis struct Iter { + $($Variant: Option,)* + $($Unknown: std::vec::IntoIter,)? + } impl IntoIterator for $struct_type { type Item = $enum_type; type IntoIter = Iter; fn into_iter(self) -> Self::IntoIter { - Iter(self) + Iter { + $($Variant: self.$Variant,)* + $($Unknown: self.$Unknown.into_iter(),)? + } } } impl Iterator for Iter { type Item = $enum_type; - fn next(&mut self) -> Option { $( - if let Some(value) = self.0.$Variant.take() { + if let Some(value) = self.$Variant.take() { return Some($enum_type::$Variant(value)); } )* + $( + if let Some(value) = self.$Unknown.next() { + return Some($enum_type::$Unknown(value)); + } + )? None } #[allow(unused_mut, unused_variables)] fn fold B>(mut self, mut init: B, mut f: F) -> B { $( - if let Some(value) = self.0.$Variant.take() { + if let Some(value) = self.$Variant.take() { init = f(init, $enum_type::$Variant(value)); } )* + $( + if let Some(value) = self.$Unknown.next() { + init = f(init, $enum_type::$Unknown(value)); + } + )? init } } @@ -2172,6 +2229,9 @@ macro_rules! impl_bounds { $($enum_type::$Variant(v) => { self.$Variant = Some(v); })* + $($enum_type::$Unknown(v) => { + self.$Unknown.push(v); + })? }); } } @@ -2190,6 +2250,7 @@ macro_rules! impl_bounds { $(if let Some(v) = v.$Variant { self.$Variant = Some(v); })* + $(self.$Unknown.extend(v.$Unknown);)* }); } } @@ -2239,9 +2300,12 @@ impl_bounds! { EnumType, IntType, KnownSize, + ResetType, Size, StaticType, Type, + #[unknown] + Unknown, } } @@ -2252,8 +2316,11 @@ impl_bounds! { BundleType, EnumType, IntType, + ResetType, StaticType, Type, + #[unknown] + Unknown, } } @@ -2264,8 +2331,10 @@ impl From for ParsedBound { ParsedTypeBound::BundleType(v) => ParsedBound::BundleType(v), ParsedTypeBound::EnumType(v) => ParsedBound::EnumType(v), ParsedTypeBound::IntType(v) => ParsedBound::IntType(v), + ParsedTypeBound::ResetType(v) => ParsedBound::ResetType(v), ParsedTypeBound::StaticType(v) => ParsedBound::StaticType(v), ParsedTypeBound::Type(v) => ParsedBound::Type(v), + ParsedTypeBound::Unknown(v) => ParsedBound::Unknown(v), } } } @@ -2277,8 +2346,10 @@ impl From for ParsedBounds { BundleType, EnumType, IntType, + ResetType, StaticType, Type, + Unknown, } = value; Self { BoolOrIntType, @@ -2286,9 +2357,11 @@ impl From for ParsedBounds { EnumType, IntType, KnownSize: None, + ResetType, Size: None, StaticType, Type, + Unknown, } } } @@ -2314,11 +2387,17 @@ impl ParsedTypeBound { ParsedTypeBound::BoolOrIntType(known_items::BoolOrIntType(span)), ParsedTypeBound::Type(known_items::Type(span)), ]), + Self::ResetType(v) => ParsedTypeBounds::from_iter([ + ParsedTypeBound::from(v), + ParsedTypeBound::StaticType(known_items::StaticType(span)), + ParsedTypeBound::Type(known_items::Type(span)), + ]), Self::StaticType(v) => ParsedTypeBounds::from_iter([ ParsedTypeBound::from(v), ParsedTypeBound::Type(known_items::Type(span)), ]), Self::Type(v) => ParsedTypeBounds::from_iter([ParsedTypeBound::from(v)]), + Self::Unknown(v) => ParsedTypeBounds::from_iter([ParsedTypeBound::Unknown(v)]), } } } @@ -2349,9 +2428,11 @@ impl From for ParsedBounds { EnumType: None, IntType: None, KnownSize, + ResetType: None, Size, StaticType: None, Type: None, + Unknown: vec![], } } } @@ -2379,6 +2460,7 @@ impl ParsedBounds { fn categorize(self, errors: &mut Errors, span: Span) -> ParsedBoundsCategory { let mut type_bounds = None; let mut size_type_bounds = None; + let mut unknown_bounds = vec![]; self.into_iter().for_each(|bound| match bound.categorize() { ParsedBoundCategory::Type(bound) => { type_bounds @@ -2390,15 +2472,37 @@ impl ParsedBounds { .get_or_insert_with(ParsedSizeTypeBounds::default) .extend([bound]); } + ParsedBoundCategory::Unknown(bound) => unknown_bounds.push(bound), }); - match (type_bounds, size_type_bounds) { - (None, None) => ParsedBoundsCategory::Type(ParsedTypeBounds { + match (type_bounds, size_type_bounds, unknown_bounds.is_empty()) { + (None, None, true) => ParsedBoundsCategory::Type(ParsedTypeBounds { Type: Some(known_items::Type(span)), ..Default::default() }), - (None, Some(bounds)) => ParsedBoundsCategory::SizeType(bounds), - (Some(bounds), None) => ParsedBoundsCategory::Type(bounds), - (Some(type_bounds), Some(size_type_bounds)) => { + (None, None, false) => { + errors.error( + unknown_bounds.remove(0), + "unknown bounds: must use at least one known bound (such as `Type`) with any unknown bounds", + ); + ParsedBoundsCategory::Type(ParsedTypeBounds { + Unknown: unknown_bounds, + ..Default::default() + }) + } + (None, Some(bounds), true) => ParsedBoundsCategory::SizeType(bounds), + (None, Some(bounds), false) => { + // TODO: implement + errors.error( + unknown_bounds.remove(0), + "unknown bounds with `Size` bounds are not implemented", + ); + ParsedBoundsCategory::SizeType(bounds) + } + (Some(bounds), None, _) => ParsedBoundsCategory::Type(ParsedTypeBounds { + Unknown: unknown_bounds, + ..bounds + }), + (Some(type_bounds), Some(size_type_bounds), _) => { errors.error( size_type_bounds .Size @@ -2415,6 +2519,7 @@ impl ParsedBounds { pub(crate) enum ParsedBoundCategory { Type(ParsedTypeBound), SizeType(ParsedSizeTypeBound), + Unknown(syn::TypeParamBound), } impl ParsedBound { @@ -2425,15 +2530,18 @@ impl ParsedBound { Self::EnumType(v) => ParsedBoundCategory::Type(ParsedTypeBound::EnumType(v)), Self::IntType(v) => ParsedBoundCategory::Type(ParsedTypeBound::IntType(v)), Self::KnownSize(v) => ParsedBoundCategory::SizeType(ParsedSizeTypeBound::KnownSize(v)), + Self::ResetType(v) => ParsedBoundCategory::Type(ParsedTypeBound::ResetType(v)), Self::Size(v) => ParsedBoundCategory::SizeType(ParsedSizeTypeBound::Size(v)), Self::StaticType(v) => ParsedBoundCategory::Type(ParsedTypeBound::StaticType(v)), Self::Type(v) => ParsedBoundCategory::Type(ParsedTypeBound::Type(v)), + Self::Unknown(v) => ParsedBoundCategory::Unknown(v), } } fn implied_bounds(self) -> ParsedBounds { match self.categorize() { ParsedBoundCategory::Type(v) => v.implied_bounds().into(), ParsedBoundCategory::SizeType(v) => v.implied_bounds().into(), + ParsedBoundCategory::Unknown(v) => ParsedBounds::from_iter([ParsedBound::Unknown(v)]), } } } @@ -3310,8 +3418,9 @@ impl ParsedGenerics { ParsedTypeBound::BoolOrIntType(_) | ParsedTypeBound::BundleType(_) | ParsedTypeBound::EnumType(_) - | ParsedTypeBound::IntType(_) => { - errors.error(bound, "bound on mask type not implemented"); + | ParsedTypeBound::IntType(_) + | ParsedTypeBound::ResetType(_) => { + errors.error(bound, "bounds on mask types are not implemented"); } ParsedTypeBound::StaticType(bound) => { if bounds.StaticType.is_none() { @@ -3323,6 +3432,12 @@ impl ParsedGenerics { } } ParsedTypeBound::Type(_) => {} + ParsedTypeBound::Unknown(_) => { + errors.error( + bound, + "unknown bounds on mask types are not implemented", + ); + } } } bounds.add_implied_bounds(); @@ -3648,7 +3763,10 @@ pub(crate) trait AsTurbofish { } impl AsTurbofish for TypeGenerics<'_> { - type Turbofish<'a> = Turbofish<'a> where Self: 'a; + type Turbofish<'a> + = Turbofish<'a> + where + Self: 'a; fn as_turbofish(&self) -> Self::Turbofish<'_> { TypeGenerics::as_turbofish(self) @@ -3656,7 +3774,8 @@ impl AsTurbofish for TypeGenerics<'_> { } impl AsTurbofish for ParsedGenericsTypeGenerics<'_> { - type Turbofish<'a> = ParsedGenericsTurbofish<'a> + type Turbofish<'a> + = ParsedGenericsTurbofish<'a> where Self: 'a; @@ -3707,15 +3826,18 @@ impl SplitForImpl for Generics { } impl SplitForImpl for ParsedGenerics { - type ImplGenerics<'a> = ParsedGenericsImplGenerics<'a> + type ImplGenerics<'a> + = ParsedGenericsImplGenerics<'a> where Self: 'a; - type TypeGenerics<'a> = ParsedGenericsTypeGenerics<'a> + type TypeGenerics<'a> + = ParsedGenericsTypeGenerics<'a> where Self: 'a; - type WhereClause<'a> = ParsedGenericsWhereClause<'a> + type WhereClause<'a> + = ParsedGenericsWhereClause<'a> where Self: 'a; @@ -3932,7 +4054,8 @@ impl ToTokens for MaybeParsed { } impl AsTurbofish for MaybeParsed { - type Turbofish<'a> = MaybeParsed, U::Turbofish<'a>> + type Turbofish<'a> + = MaybeParsed, U::Turbofish<'a>> where Self: 'a; @@ -3945,13 +4068,16 @@ impl AsTurbofish for MaybeParsed { } impl SplitForImpl for MaybeParsed { - type ImplGenerics<'a> = MaybeParsed, U::ImplGenerics<'a>> + type ImplGenerics<'a> + = MaybeParsed, U::ImplGenerics<'a>> where Self: 'a; - type TypeGenerics<'a> = MaybeParsed, U::TypeGenerics<'a>> + type TypeGenerics<'a> + = MaybeParsed, U::TypeGenerics<'a>> where Self: 'a; - type WhereClause<'a> = MaybeParsed, U::WhereClause<'a>> + type WhereClause<'a> + = MaybeParsed, U::WhereClause<'a>> where Self: 'a; diff --git a/crates/fayalite-proc-macros-impl/src/lib.rs b/crates/fayalite-proc-macros-impl/src/lib.rs index 0ffd4d4..13ec7a2 100644 --- a/crates/fayalite-proc-macros-impl/src/lib.rs +++ b/crates/fayalite-proc-macros-impl/src/lib.rs @@ -2,15 +2,20 @@ // See Notices.txt for copyright information #![cfg_attr(test, recursion_limit = "512")] use proc_macro2::{Span, TokenStream}; -use quote::{quote, ToTokens}; -use std::io::{ErrorKind, Write}; +use quote::{ToTokens, quote}; +use std::{ + collections::{HashMap, hash_map::Entry}, + io::{ErrorKind, Write}, +}; use syn::{ - bracketed, parenthesized, + AttrStyle, Attribute, Error, Ident, Item, ItemFn, LitBool, LitStr, Meta, Token, bracketed, + ext::IdentExt, + parenthesized, parse::{Parse, ParseStream, Parser}, parse_quote, - punctuated::Pair, + punctuated::{Pair, Punctuated}, spanned::Spanned, - AttrStyle, Attribute, Error, Item, ItemFn, Token, + token::{Bracket, Paren}, }; mod fold; @@ -19,6 +24,7 @@ mod hdl_enum; mod hdl_type_alias; mod hdl_type_common; mod module; +mod process_cfg; pub(crate) trait CustomToken: Copy @@ -59,14 +65,22 @@ mod kw { }; } + custom_keyword!(__evaluated_cfgs); + custom_keyword!(add_platform_io); + custom_keyword!(all); + custom_keyword!(any); + custom_keyword!(cfg); + custom_keyword!(cfg_attr); custom_keyword!(clock_domain); + custom_keyword!(cmp_eq); custom_keyword!(connect_inexact); custom_keyword!(custom_bounds); custom_keyword!(flip); + custom_keyword!(get); custom_keyword!(hdl); custom_keyword!(hdl_module); - custom_keyword!(input); custom_keyword!(incomplete_wire); + custom_keyword!(input); custom_keyword!(instance); custom_keyword!(m); custom_keyword!(memory); @@ -75,10 +89,12 @@ mod kw { custom_keyword!(no_reset); custom_keyword!(no_runtime_generics); custom_keyword!(no_static); + custom_keyword!(not); custom_keyword!(outline_generated); custom_keyword!(output); custom_keyword!(reg_builder); custom_keyword!(reset); + custom_keyword!(sim); custom_keyword!(skip); custom_keyword!(target); custom_keyword!(wire); @@ -901,15 +917,346 @@ fn hdl_module_impl(item: ItemFn) -> syn::Result { Ok(contents) } -pub fn hdl_module(attr: TokenStream, item: TokenStream) -> syn::Result { - let kw = kw::hdl_module::default(); - hdl_module_impl(syn::parse2(quote! { #[#kw(#attr)] #item })?) +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub(crate) enum CfgExpr { + Option { + ident: Ident, + value: Option<(Token![=], LitStr)>, + }, + All { + all: kw::all, + paren: Paren, + exprs: Punctuated, + }, + Any { + any: kw::any, + paren: Paren, + exprs: Punctuated, + }, + Not { + not: kw::not, + paren: Paren, + expr: Box, + trailing_comma: Option, + }, } -pub fn hdl_attr(attr: TokenStream, item: TokenStream) -> syn::Result { - let kw = kw::hdl::default(); - let item = quote! { #[#kw(#attr)] #item }; - let item = syn::parse2::(item)?; +impl Parse for CfgExpr { + fn parse(input: ParseStream) -> syn::Result { + match input.cursor().ident() { + Some((_, cursor)) if cursor.eof() => { + return Ok(CfgExpr::Option { + ident: input.call(Ident::parse_any)?, + value: None, + }); + } + _ => {} + } + if input.peek(Ident::peek_any) && input.peek2(Token![=]) { + return Ok(CfgExpr::Option { + ident: input.call(Ident::parse_any)?, + value: Some((input.parse()?, input.parse()?)), + }); + } + let contents; + if input.peek(kw::all) { + Ok(CfgExpr::All { + all: input.parse()?, + paren: parenthesized!(contents in input), + exprs: contents.call(Punctuated::parse_terminated)?, + }) + } else if input.peek(kw::any) { + Ok(CfgExpr::Any { + any: input.parse()?, + paren: parenthesized!(contents in input), + exprs: contents.call(Punctuated::parse_terminated)?, + }) + } else if input.peek(kw::not) { + Ok(CfgExpr::Not { + not: input.parse()?, + paren: parenthesized!(contents in input), + expr: contents.parse()?, + trailing_comma: contents.parse()?, + }) + } else { + Err(input.error("expected cfg-pattern")) + } + } +} + +impl ToTokens for CfgExpr { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + CfgExpr::Option { ident, value } => { + ident.to_tokens(tokens); + if let Some((eq, value)) = value { + eq.to_tokens(tokens); + value.to_tokens(tokens); + } + } + CfgExpr::All { all, paren, exprs } => { + all.to_tokens(tokens); + paren.surround(tokens, |tokens| exprs.to_tokens(tokens)); + } + CfgExpr::Any { any, paren, exprs } => { + any.to_tokens(tokens); + paren.surround(tokens, |tokens| exprs.to_tokens(tokens)); + } + CfgExpr::Not { + not, + paren, + expr, + trailing_comma, + } => { + not.to_tokens(tokens); + paren.surround(tokens, |tokens| { + expr.to_tokens(tokens); + trailing_comma.to_tokens(tokens); + }); + } + } + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub(crate) struct Cfg { + cfg: kw::cfg, + paren: Paren, + expr: CfgExpr, + trailing_comma: Option, +} + +impl Cfg { + fn parse_meta(meta: &Meta) -> syn::Result { + syn::parse2(meta.to_token_stream()) + } +} + +impl ToTokens for Cfg { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + cfg, + paren, + expr, + trailing_comma, + } = self; + cfg.to_tokens(tokens); + paren.surround(tokens, |tokens| { + expr.to_tokens(tokens); + trailing_comma.to_tokens(tokens); + }); + } +} + +impl Parse for Cfg { + fn parse(input: ParseStream) -> syn::Result { + let contents; + Ok(Self { + cfg: input.parse()?, + paren: parenthesized!(contents in input), + expr: contents.parse()?, + trailing_comma: contents.parse()?, + }) + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub(crate) struct CfgAttr { + cfg_attr: kw::cfg_attr, + paren: Paren, + expr: CfgExpr, + comma: Token![,], + attrs: Punctuated, +} + +impl CfgAttr { + pub(crate) fn to_cfg(&self) -> Cfg { + Cfg { + cfg: kw::cfg(self.cfg_attr.span), + paren: self.paren, + expr: self.expr.clone(), + trailing_comma: None, + } + } + fn parse_meta(meta: &Meta) -> syn::Result { + syn::parse2(meta.to_token_stream()) + } +} + +impl Parse for CfgAttr { + fn parse(input: ParseStream) -> syn::Result { + let contents; + Ok(Self { + cfg_attr: input.parse()?, + paren: parenthesized!(contents in input), + expr: contents.parse()?, + comma: contents.parse()?, + attrs: contents.call(Punctuated::parse_terminated)?, + }) + } +} + +pub(crate) struct CfgAndValue { + cfg: Cfg, + eq_token: Token![=], + value: LitBool, +} + +impl Parse for CfgAndValue { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + cfg: input.parse()?, + eq_token: input.parse()?, + value: input.parse()?, + }) + } +} + +pub(crate) struct Cfgs { + pub(crate) bracket: Bracket, + pub(crate) cfgs_map: HashMap, + pub(crate) cfgs_list: Vec, +} + +impl Default for Cfgs { + fn default() -> Self { + Self { + bracket: Default::default(), + cfgs_map: Default::default(), + cfgs_list: Default::default(), + } + } +} + +impl Cfgs { + fn insert_cfg(&mut self, cfg: Cfg, value: T) { + match self.cfgs_map.entry(cfg) { + Entry::Occupied(_) => {} + Entry::Vacant(entry) => { + self.cfgs_list.push(entry.key().clone()); + entry.insert(value); + } + } + } +} + +impl Parse for Cfgs { + fn parse(input: ParseStream) -> syn::Result { + let contents; + let bracket = bracketed!(contents in input); + let mut cfgs_map = HashMap::new(); + let mut cfgs_list = Vec::new(); + for CfgAndValue { + cfg, + eq_token, + value, + } in contents.call(Punctuated::::parse_terminated)? + { + let _ = eq_token; + match cfgs_map.entry(cfg) { + Entry::Occupied(_) => {} + Entry::Vacant(entry) => { + cfgs_list.push(entry.key().clone()); + entry.insert(value.value); + } + } + } + Ok(Self { + bracket, + cfgs_map, + cfgs_list, + }) + } +} + +impl Parse for Cfgs<()> { + fn parse(input: ParseStream) -> syn::Result { + let contents; + let bracket = bracketed!(contents in input); + let mut cfgs_map = HashMap::new(); + let mut cfgs_list = Vec::new(); + for cfg in contents.call(Punctuated::::parse_terminated)? { + match cfgs_map.entry(cfg) { + Entry::Occupied(_) => {} + Entry::Vacant(entry) => { + cfgs_list.push(entry.key().clone()); + entry.insert(()); + } + } + } + Ok(Self { + bracket, + cfgs_map, + cfgs_list, + }) + } +} + +impl ToTokens for Cfgs<()> { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + bracket, + cfgs_map: _, + cfgs_list, + } = self; + bracket.surround(tokens, |tokens| { + for cfg in cfgs_list { + cfg.to_tokens(tokens); + ::default().to_tokens(tokens); + } + }); + } +} + +fn hdl_main( + kw: impl CustomToken, + attr: TokenStream, + item: TokenStream, +) -> syn::Result { + fn parse_evaluated_cfgs_attr( + input: ParseStream, + parse_inner: impl FnOnce(ParseStream) -> syn::Result, + ) -> syn::Result { + let _: Token![#] = input.parse()?; + let bracket_content; + bracketed!(bracket_content in input); + let _: kw::__evaluated_cfgs = bracket_content.parse()?; + let paren_content; + parenthesized!(paren_content in bracket_content); + parse_inner(&paren_content) + } + let (evaluated_cfgs, item): (_, TokenStream) = Parser::parse2( + |input: ParseStream| { + let peek = input.fork(); + if parse_evaluated_cfgs_attr(&peek, |_| Ok(())).is_ok() { + let evaluated_cfgs = parse_evaluated_cfgs_attr(input, Cfgs::::parse)?; + Ok((Some(evaluated_cfgs), input.parse()?)) + } else { + Ok((None, input.parse()?)) + } + }, + item, + )?; + let cfgs = if let Some(cfgs) = evaluated_cfgs { + cfgs + } else { + let cfgs = process_cfg::collect_cfgs(syn::parse2(item.clone())?)?; + if cfgs.cfgs_list.is_empty() { + Cfgs::default() + } else { + return Ok(quote! { + ::fayalite::__cfg_expansion_helper! { + [] + #cfgs + {#[::fayalite::#kw(#attr)]} { #item } + } + }); + } + }; + let item = syn::parse2(quote! { #[#kw(#attr)] #item })?; + let Some(item) = process_cfg::process_cfgs(item, cfgs)? else { + return Ok(TokenStream::new()); + }; match item { Item::Enum(item) => hdl_enum::hdl_enum(item), Item::Struct(item) => hdl_bundle::hdl_bundle(item), @@ -921,3 +1268,11 @@ pub fn hdl_attr(attr: TokenStream, item: TokenStream) -> syn::Result syn::Result { + hdl_main(kw::hdl_module::default(), attr, item) +} + +pub fn hdl_attr(attr: TokenStream, item: TokenStream) -> syn::Result { + hdl_main(kw::hdl::default(), attr, item) +} diff --git a/crates/fayalite-proc-macros-impl/src/module.rs b/crates/fayalite-proc-macros-impl/src/module.rs index 0852f58..5628ff9 100644 --- a/crates/fayalite-proc-macros-impl/src/module.rs +++ b/crates/fayalite-proc-macros-impl/src/module.rs @@ -1,19 +1,20 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information use crate::{ + Errors, HdlAttr, PairsIterExt, hdl_type_common::{ParsedGenerics, SplitForImpl}, kw, - module::transform_body::{HdlLet, HdlLetKindIO}, - options, Errors, HdlAttr, PairsIterExt, + module::transform_body::{HdlLet, HdlLetKindIO, ModuleIOOrAddPlatformIO}, + options, }; use proc_macro2::TokenStream; -use quote::{format_ident, quote, quote_spanned, ToTokens}; +use quote::{ToTokens, format_ident, quote, quote_spanned}; use std::collections::HashSet; use syn::{ - parse_quote, - visit::{visit_pat, Visit}, Attribute, Block, ConstParam, Error, FnArg, GenericParam, Generics, Ident, ItemFn, ItemStruct, LifetimeParam, ReturnType, Signature, TypeParam, Visibility, WhereClause, WherePredicate, + parse_quote, + visit::{Visit, visit_pat}, }; mod transform_body; @@ -38,7 +39,7 @@ pub(crate) fn check_name_conflicts_with_module_builder(name: &Ident) -> syn::Res if name == "m" { Err(Error::new_spanned( name, - "name conflicts with implicit `m: &mut ModuleBuilder<_>`", + "name conflicts with implicit `m: &ModuleBuilder`", )) } else { Ok(()) @@ -66,7 +67,7 @@ struct ModuleFnModule { vis: Visibility, sig: Signature, block: Box, - struct_generics: ParsedGenerics, + struct_generics: Option, the_struct: TokenStream, } @@ -289,7 +290,7 @@ impl ModuleFn { paren_token, body, } => { - debug_assert!(io.is_empty()); + debug_assert!(matches!(io, ModuleIOOrAddPlatformIO::ModuleIO(v) if v.is_empty())); return Ok(Self(ModuleFnImpl::Fn { attrs, config_options: HdlAttr { @@ -321,6 +322,21 @@ impl ModuleFn { body, }, }; + let io = match io { + ModuleIOOrAddPlatformIO::ModuleIO(io) => io, + ModuleIOOrAddPlatformIO::AddPlatformIO => { + return Ok(Self(ModuleFnImpl::Module(ModuleFnModule { + attrs, + config_options, + module_kind: module_kind.unwrap(), + vis, + sig, + block, + struct_generics: None, + the_struct: TokenStream::new(), + }))); + } + }; let (_struct_impl_generics, _struct_type_generics, struct_where_clause) = struct_generics.split_for_impl(); let struct_where_clause: Option = parse_quote! { #struct_where_clause }; @@ -363,7 +379,7 @@ impl ModuleFn { vis, sig, block, - struct_generics, + struct_generics: Some(struct_generics), the_struct, }))) } @@ -377,7 +393,7 @@ impl ModuleFn { module_kind, vis, sig, - block, + mut block, struct_generics, the_struct, } = match self.0 { @@ -432,13 +448,24 @@ impl ModuleFn { ModuleKind::Normal => quote! { ::fayalite::module::ModuleKind::Normal }, }; let fn_name = &outer_sig.ident; - let (_struct_impl_generics, struct_type_generics, _struct_where_clause) = - struct_generics.split_for_impl(); - let struct_ty = quote! {#fn_name #struct_type_generics}; + let struct_ty = match struct_generics { + Some(struct_generics) => { + let (_struct_impl_generics, struct_type_generics, _struct_where_clause) = + struct_generics.split_for_impl(); + quote! {#fn_name #struct_type_generics} + } + None => quote! {::fayalite::bundle::Bundle}, + }; body_sig.ident = parse_quote! {__body}; body_sig .inputs .insert(0, parse_quote! { m: &::fayalite::module::ModuleBuilder }); + block.stmts.insert( + 0, + parse_quote! { + let _ = m; + }, + ); let body_fn = ItemFn { attrs: vec![], vis: Visibility::Inherited, diff --git a/crates/fayalite-proc-macros-impl/src/module/transform_body.rs b/crates/fayalite-proc-macros-impl/src/module/transform_body.rs index 6e99e87..7b41f5e 100644 --- a/crates/fayalite-proc-macros-impl/src/module/transform_body.rs +++ b/crates/fayalite-proc-macros-impl/src/module/transform_body.rs @@ -1,36 +1,45 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information use crate::{ - fold::{impl_fold, DoFold}, + Errors, HdlAttr, + fold::{DoFold, impl_fold}, hdl_type_common::{ - known_items, ParseFailed, ParseTypes, ParsedGenerics, ParsedType, TypesParser, + ParseFailed, ParseTypes, ParsedGenerics, ParsedType, TypesParser, known_items, }, is_hdl_attr, kw, - module::{check_name_conflicts_with_module_builder, ModuleIO, ModuleIOKind, ModuleKind}, - options, Errors, HdlAttr, + module::{ModuleIO, ModuleIOKind, ModuleKind, check_name_conflicts_with_module_builder}, + options, }; use num_bigint::BigInt; use proc_macro2::{Span, TokenStream}; -use quote::{quote, quote_spanned, ToTokens}; +use quote::{ToTokens, quote, quote_spanned}; use std::{borrow::Borrow, convert::Infallible}; use syn::{ - fold::{fold_expr, fold_expr_lit, fold_expr_unary, fold_local, fold_stmt, Fold}, + Attribute, Block, Error, Expr, ExprIf, ExprLet, ExprLit, ExprRepeat, ExprUnary, + GenericArgument, Ident, Item, Lit, LitStr, Local, LocalInit, Pat, Token, Type, UnOp, + fold::{Fold, fold_expr, fold_expr_lit, fold_expr_unary, fold_local, fold_stmt}, parenthesized, - parse::{Nothing, Parse, ParseStream}, + parse::{Parse, ParseStream}, parse_quote, parse_quote_spanned, spanned::Spanned, token::Paren, - Attribute, Block, Error, Expr, ExprIf, ExprLet, ExprLit, ExprRepeat, ExprUnary, - GenericArgument, Ident, Item, Lit, LitStr, Local, LocalInit, Pat, Token, Type, UnOp, }; mod expand_aggregate_literals; mod expand_match; +options! { + #[options = ExprOptions] + pub(crate) enum ExprOption { + Sim(sim), + } +} + options! { pub(crate) enum LetFnKind { Input(input), Output(output), + AddPlatformIO(add_platform_io), Instance(instance), RegBuilder(reg_builder), Wire(wire), @@ -208,6 +217,49 @@ impl HdlLetKindToTokens for HdlLetKindInstance { } } +#[derive(Clone, Debug)] +pub(crate) struct HdlLetKindAddPlatformIO { + pub(crate) m: kw::m, + pub(crate) dot_token: Token![.], + pub(crate) add_platform_io: kw::add_platform_io, + pub(crate) paren: Paren, + pub(crate) platform_io_builder: Box, +} + +impl ParseTypes for HdlLetKindAddPlatformIO { + fn parse_types(input: &mut Self, _parser: &mut TypesParser<'_>) -> Result { + Ok(input.clone()) + } +} + +impl_fold! { + struct HdlLetKindAddPlatformIO<> { + m: kw::m, + dot_token: Token![.], + add_platform_io: kw::add_platform_io, + paren: Paren, + platform_io_builder: Box, + } +} + +impl HdlLetKindToTokens for HdlLetKindAddPlatformIO { + fn ty_to_tokens(&self, _tokens: &mut TokenStream) {} + + fn expr_to_tokens(&self, tokens: &mut TokenStream) { + let Self { + m, + dot_token, + add_platform_io, + paren, + platform_io_builder, + } = self; + m.to_tokens(tokens); + dot_token.to_tokens(tokens); + add_platform_io.to_tokens(tokens); + paren.surround(tokens, |tokens| platform_io_builder.to_tokens(tokens)); + } +} + #[derive(Clone, Debug)] pub(crate) struct RegBuilderClockDomain { pub(crate) dot_token: Token![.], @@ -703,6 +755,7 @@ impl HdlLetKindMemory { #[derive(Clone, Debug)] pub(crate) enum HdlLetKind { IO(HdlLetKindIO), + AddPlatformIO(HdlLetKindAddPlatformIO), Incomplete(HdlLetKindIncomplete), Instance(HdlLetKindInstance), RegBuilder(HdlLetKindRegBuilder), @@ -713,6 +766,7 @@ pub(crate) enum HdlLetKind { impl_fold! { enum HdlLetKind { IO(HdlLetKindIO), + AddPlatformIO(HdlLetKindAddPlatformIO), Incomplete(HdlLetKindIncomplete), Instance(HdlLetKindInstance), RegBuilder(HdlLetKindRegBuilder), @@ -728,6 +782,9 @@ impl, I> ParseTypes> for HdlLetKind { ) -> Result { match input { HdlLetKind::IO(input) => ParseTypes::parse_types(input, parser).map(HdlLetKind::IO), + HdlLetKind::AddPlatformIO(input) => { + ParseTypes::parse_types(input, parser).map(HdlLetKind::AddPlatformIO) + } HdlLetKind::Incomplete(input) => { ParseTypes::parse_types(input, parser).map(HdlLetKind::Incomplete) } @@ -853,6 +910,23 @@ impl HdlLetKindParse for HdlLetKind { ModuleIOKind::Output(output), ) .map(Self::IO), + LetFnKind::AddPlatformIO((add_platform_io,)) => { + if let Some(parsed_ty) = parsed_ty { + return Err(Error::new_spanned( + parsed_ty.1, + "type annotation not allowed for instance", + )); + } + let (m, dot_token) = unwrap_m_dot(m_dot, kind)?; + let paren_contents; + Ok(Self::AddPlatformIO(HdlLetKindAddPlatformIO { + m, + dot_token, + add_platform_io, + paren: parenthesized!(paren_contents in input), + platform_io_builder: paren_contents.call(parse_single_fn_arg)?, + })) + } LetFnKind::Instance((instance,)) => { if let Some(parsed_ty) = parsed_ty { return Err(Error::new_spanned( @@ -928,6 +1002,7 @@ impl HdlLetKindToTokens for HdlLetKind { fn ty_to_tokens(&self, tokens: &mut TokenStream) { match self { HdlLetKind::IO(v) => v.ty_to_tokens(tokens), + HdlLetKind::AddPlatformIO(v) => v.ty_to_tokens(tokens), HdlLetKind::Incomplete(v) => v.ty_to_tokens(tokens), HdlLetKind::Instance(v) => v.ty_to_tokens(tokens), HdlLetKind::RegBuilder(v) => v.ty_to_tokens(tokens), @@ -939,6 +1014,7 @@ impl HdlLetKindToTokens for HdlLetKind { fn expr_to_tokens(&self, tokens: &mut TokenStream) { match self { HdlLetKind::IO(v) => v.expr_to_tokens(tokens), + HdlLetKind::AddPlatformIO(v) => v.expr_to_tokens(tokens), HdlLetKind::Incomplete(v) => v.expr_to_tokens(tokens), HdlLetKind::Instance(v) => v.expr_to_tokens(tokens), HdlLetKind::RegBuilder(v) => v.expr_to_tokens(tokens), @@ -952,7 +1028,7 @@ with_debug_clone_and_fold! { #[allow(dead_code)] pub(crate) struct HdlLet { pub(crate) attrs: Vec, - pub(crate) hdl_attr: HdlAttr, + pub(crate) hdl_attr: HdlAttr, pub(crate) let_token: Token![let], pub(crate) mut_token: Option, pub(crate) name: Ident, @@ -1109,7 +1185,7 @@ fn parse_quote_let_pat>( } } -fn wrap_ty_with_expr(ty: impl ToTokens) -> Type { +pub(crate) fn wrap_ty_with_expr(ty: impl ToTokens) -> Type { parse_quote_spanned! {ty.span()=> ::fayalite::expr::Expr<#ty> } @@ -1141,7 +1217,7 @@ impl ToTokens for ImplicitName { struct Visitor<'a> { module_kind: Option, errors: Errors, - io: Vec, + io: ModuleIOOrAddPlatformIO, block_depth: usize, parsed_generics: &'a ParsedGenerics, } @@ -1173,7 +1249,7 @@ impl Visitor<'_> { Some(_) => {} } } - fn process_hdl_if(&mut self, hdl_attr: HdlAttr, expr_if: ExprIf) -> Expr { + fn process_hdl_if(&mut self, hdl_attr: HdlAttr, expr_if: ExprIf) -> Expr { let ExprIf { attrs, if_token, @@ -1181,10 +1257,10 @@ impl Visitor<'_> { then_branch, else_branch, } = expr_if; - self.require_normal_module_or_fn(if_token); - let else_expr = else_branch.unzip().1.map(|else_expr| match *else_expr { - Expr::If(expr_if) => self.process_hdl_if(hdl_attr.clone(), expr_if), - expr => expr, + let (else_token, else_expr) = else_branch.unzip(); + let else_expr = else_expr.map(|else_expr| match *else_expr { + Expr::If(expr_if) => Box::new(self.process_hdl_if(hdl_attr.clone(), expr_if)), + _ => else_expr, }); if let Expr::Let(ExprLet { attrs: let_attrs, @@ -1206,7 +1282,19 @@ impl Visitor<'_> { }, ); } - if let Some(else_expr) = else_expr { + let ExprOptions { sim } = hdl_attr.body; + if sim.is_some() { + ExprIf { + attrs, + if_token, + cond: parse_quote_spanned! {if_token.span=> + *::fayalite::sim::value::SimValue::<::fayalite::int::Bool>::value(&::fayalite::sim::value::ToSimValue::into_sim_value(#cond)) + }, + then_branch, + else_branch: else_token.zip(else_expr), + } + .into() + } else if let Some(else_expr) = else_expr { parse_quote_spanned! {if_token.span=> #(#attrs)* { @@ -1269,7 +1357,81 @@ impl Visitor<'_> { }), semi_token: hdl_let.semi_token, }; - self.io.push(hdl_let); + match &mut self.io { + ModuleIOOrAddPlatformIO::ModuleIO(io) => io.push(hdl_let), + ModuleIOOrAddPlatformIO::AddPlatformIO => { + self.errors.error( + kind, + "can't have other inputs/outputs in a module using m.add_platform_io()", + ); + } + } + let_stmt + } + fn process_hdl_let_add_platform_io( + &mut self, + hdl_let: HdlLet, + ) -> Local { + let HdlLet { + mut attrs, + hdl_attr: _, + let_token, + mut_token, + ref name, + eq_token, + kind: + HdlLetKindAddPlatformIO { + m, + dot_token, + add_platform_io, + paren, + platform_io_builder, + }, + semi_token, + } = hdl_let; + let mut expr = quote! {#m #dot_token #add_platform_io}; + paren.surround(&mut expr, |expr| { + let name_str = ImplicitName { + name, + span: name.span(), + }; + quote_spanned! {name.span()=> + #name_str, #platform_io_builder + } + .to_tokens(expr); + }); + self.require_module(add_platform_io); + attrs.push(parse_quote_spanned! {let_token.span=> + #[allow(unused_variables)] + }); + let let_stmt = Local { + attrs, + let_token, + pat: parse_quote! { #mut_token #name }, + init: Some(LocalInit { + eq_token, + expr: parse_quote! { #expr }, + diverge: None, + }), + semi_token, + }; + match &mut self.io { + ModuleIOOrAddPlatformIO::ModuleIO(io) => { + for io in io { + self.errors.error( + io.kind.kind, + "can't have other inputs/outputs in a module using m.add_platform_io()", + ); + } + } + ModuleIOOrAddPlatformIO::AddPlatformIO => { + self.errors.error( + add_platform_io, + "can't use m.add_platform_io() more than once in a single module", + ); + } + } + self.io = ModuleIOOrAddPlatformIO::AddPlatformIO; let_stmt } fn process_hdl_let_instance(&mut self, hdl_let: HdlLet) -> Local { @@ -1490,6 +1652,7 @@ impl Visitor<'_> { } the_match! { IO => process_hdl_let_io, + AddPlatformIO => process_hdl_let_add_platform_io, Incomplete => process_hdl_let_incomplete, Instance => process_hdl_let_instance, RegBuilder => process_hdl_let_reg_builder, @@ -1586,7 +1749,7 @@ impl Visitor<'_> { } } -fn empty_let() -> Local { +pub(crate) fn empty_let() -> Local { Local { attrs: vec![], let_token: Default::default(), @@ -1668,20 +1831,42 @@ impl Fold for Visitor<'_> { Repeat => process_hdl_repeat, Struct => process_hdl_struct, Tuple => process_hdl_tuple, + MethodCall => process_hdl_method_call, + Call => process_hdl_call, } } } - fn fold_local(&mut self, let_stmt: Local) -> Local { + fn fold_local(&mut self, mut let_stmt: Local) -> Local { match self .errors - .ok(HdlAttr::::parse_and_leave_attr( + .ok(HdlAttr::::parse_and_leave_attr( &let_stmt.attrs, )) { None => return empty_let(), Some(None) => return fold_local(self, let_stmt), Some(Some(HdlAttr { .. })) => {} }; + let mut pat = &let_stmt.pat; + if let Pat::Type(pat_type) = pat { + pat = &pat_type.pat; + } + let Pat::Ident(syn::PatIdent { + attrs: _, + by_ref: None, + mutability: _, + ident: _, + subpat: None, + }) = pat + else { + let hdl_attr = + HdlAttr::::parse_and_take_attr(&mut let_stmt.attrs) + .ok() + .flatten() + .expect("already checked above"); + let let_stmt = fold_local(self, let_stmt); + return self.process_hdl_let_pat(hdl_attr, let_stmt); + }; let hdl_let = syn::parse2::>>(let_stmt.into_token_stream()); let Some(hdl_let) = self.errors.ok(hdl_let) else { return empty_let(); @@ -1711,15 +1896,20 @@ impl Fold for Visitor<'_> { } } +pub(crate) enum ModuleIOOrAddPlatformIO { + ModuleIO(Vec), + AddPlatformIO, +} + pub(crate) fn transform_body( module_kind: Option, mut body: Box, parsed_generics: &ParsedGenerics, -) -> syn::Result<(Box, Vec)> { +) -> syn::Result<(Box, ModuleIOOrAddPlatformIO)> { let mut visitor = Visitor { module_kind, errors: Errors::new(), - io: vec![], + io: ModuleIOOrAddPlatformIO::ModuleIO(vec![]), block_depth: 0, parsed_generics, }; diff --git a/crates/fayalite-proc-macros-impl/src/module/transform_body/expand_aggregate_literals.rs b/crates/fayalite-proc-macros-impl/src/module/transform_body/expand_aggregate_literals.rs index b5a0ad3..1aabb19 100644 --- a/crates/fayalite-proc-macros-impl/src/module/transform_body/expand_aggregate_literals.rs +++ b/crates/fayalite-proc-macros-impl/src/module/transform_body/expand_aggregate_literals.rs @@ -1,45 +1,102 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information -use crate::{kw, module::transform_body::Visitor, HdlAttr}; + +use crate::{ + HdlAttr, kw, + module::transform_body::{ + ExprOptions, Visitor, + expand_match::{EnumPath, parse_enum_path}, + }, +}; use quote::{format_ident, quote_spanned}; +use std::mem; use syn::{ - parse::Nothing, parse_quote, parse_quote_spanned, spanned::Spanned, Expr, ExprArray, ExprPath, - ExprRepeat, ExprStruct, ExprTuple, FieldValue, TypePath, + Expr, ExprArray, ExprCall, ExprGroup, ExprMethodCall, ExprParen, ExprPath, ExprRepeat, + ExprStruct, ExprTuple, FieldValue, Token, TypePath, parse_quote_spanned, + punctuated::Punctuated, spanned::Spanned, token::Paren, }; impl Visitor<'_> { pub(crate) fn process_hdl_array( &mut self, - hdl_attr: HdlAttr, + hdl_attr: HdlAttr, mut expr_array: ExprArray, ) -> Expr { - self.require_normal_module_or_fn(hdl_attr); - for elem in &mut expr_array.elems { - *elem = parse_quote_spanned! {elem.span()=> - ::fayalite::expr::ToExpr::to_expr(&(#elem)) - }; + let ExprOptions { sim } = hdl_attr.body; + let span = hdl_attr.kw.span; + if sim.is_some() { + for elem in &mut expr_array.elems { + *elem = parse_quote_spanned! {elem.span()=> + ::fayalite::sim::value::ToSimValue::to_sim_value(&(#elem)) + }; + } + parse_quote_spanned! {span=> + ::fayalite::sim::value::ToSimValue::into_sim_value(#expr_array) + } + } else { + for elem in &mut expr_array.elems { + *elem = parse_quote_spanned! {elem.span()=> + ::fayalite::expr::ToExpr::to_expr(&(#elem)) + }; + } + parse_quote_spanned! {span=> + ::fayalite::expr::ToExpr::to_expr(&#expr_array) + } } - parse_quote! {::fayalite::expr::ToExpr::to_expr(&#expr_array)} } pub(crate) fn process_hdl_repeat( &mut self, - hdl_attr: HdlAttr, + hdl_attr: HdlAttr, mut expr_repeat: ExprRepeat, ) -> Expr { - self.require_normal_module_or_fn(hdl_attr); let repeated_value = &expr_repeat.expr; - *expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=> - ::fayalite::expr::ToExpr::to_expr(&(#repeated_value)) - }; - parse_quote! {::fayalite::expr::ToExpr::to_expr(&#expr_repeat)} + let ExprOptions { sim } = hdl_attr.body; + let span = hdl_attr.kw.span; + if sim.is_some() { + *expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=> + ::fayalite::sim::value::ToSimValue::to_sim_value(&(#repeated_value)) + }; + parse_quote_spanned! {span=> + ::fayalite::sim::value::ToSimValue::into_sim_value(#expr_repeat) + } + } else { + *expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=> + ::fayalite::expr::ToExpr::to_expr(&(#repeated_value)) + }; + parse_quote_spanned! {span=> + ::fayalite::expr::ToExpr::to_expr(&#expr_repeat) + } + } } pub(crate) fn process_hdl_struct( &mut self, - hdl_attr: HdlAttr, - expr_struct: ExprStruct, + hdl_attr: HdlAttr, + mut expr_struct: ExprStruct, ) -> Expr { - self.require_normal_module_or_fn(&hdl_attr); let name_span = expr_struct.path.segments.last().unwrap().ident.span(); + let ExprOptions { sim } = hdl_attr.body; + if sim.is_some() { + let ty_path = TypePath { + qself: expr_struct.qself.take(), + path: expr_struct.path, + }; + expr_struct.path = parse_quote_spanned! {name_span=> + __SimValue::<#ty_path> + }; + for field in &mut expr_struct.fields { + let expr = &field.expr; + field.expr = parse_quote_spanned! {field.member.span()=> + ::fayalite::sim::value::ToSimValue::to_sim_value(&(#expr)) + }; + } + return parse_quote_spanned! {name_span=> + { + type __SimValue = ::SimValue; + let value: ::fayalite::sim::value::SimValue<#ty_path> = ::fayalite::sim::value::ToSimValue::into_sim_value(#expr_struct); + value + } + }; + } let builder_ident = format_ident!("__builder", span = name_span); let empty_builder = if expr_struct.qself.is_some() || expr_struct @@ -91,12 +148,126 @@ impl Visitor<'_> { } pub(crate) fn process_hdl_tuple( &mut self, - hdl_attr: HdlAttr, - expr_tuple: ExprTuple, + hdl_attr: HdlAttr, + mut expr_tuple: ExprTuple, ) -> Expr { - self.require_normal_module_or_fn(hdl_attr); - parse_quote_spanned! {expr_tuple.span()=> - ::fayalite::expr::ToExpr::to_expr(&#expr_tuple) + let ExprOptions { sim } = hdl_attr.body; + if sim.is_some() { + for element in &mut expr_tuple.elems { + *element = parse_quote_spanned! {element.span()=> + &(#element) + }; + } + parse_quote_spanned! {expr_tuple.span()=> + ::fayalite::sim::value::ToSimValue::into_sim_value(#expr_tuple) + } + } else { + parse_quote_spanned! {expr_tuple.span()=> + ::fayalite::expr::ToExpr::to_expr(&#expr_tuple) + } } } + pub(crate) fn process_hdl_call( + &mut self, + hdl_attr: HdlAttr, + mut expr_call: ExprCall, + ) -> Expr { + let span = hdl_attr.kw.span; + let mut func = &mut *expr_call.func; + let EnumPath { + variant_path: _, + enum_path, + variant_name, + } = loop { + match func { + Expr::Group(ExprGroup { expr, .. }) | Expr::Paren(ExprParen { expr, .. }) => { + func = &mut **expr; + } + Expr::Path(_) => { + let Expr::Path(ExprPath { attrs, qself, path }) = + mem::replace(func, Expr::PLACEHOLDER) + else { + unreachable!(); + }; + match parse_enum_path(TypePath { qself, path }) { + Ok(path) => break path, + Err(path) => { + self.errors.error(&path, "unsupported enum variant path"); + let TypePath { qself, path } = path; + *func = ExprPath { attrs, qself, path }.into(); + return expr_call.into(); + } + } + } + _ => { + self.errors.error( + &expr_call.func, + "#[hdl] function call -- function must be a possibly-parenthesized path", + ); + return expr_call.into(); + } + } + }; + self.process_hdl_method_call( + hdl_attr, + ExprMethodCall { + attrs: expr_call.attrs, + receiver: parse_quote_spanned! {span=> + <#enum_path as ::fayalite::ty::StaticType>::TYPE + }, + dot_token: Token![.](span), + method: variant_name, + turbofish: None, + paren_token: expr_call.paren_token, + args: expr_call.args, + }, + ) + } + pub(crate) fn process_hdl_method_call( + &mut self, + hdl_attr: HdlAttr, + mut expr_method_call: ExprMethodCall, + ) -> Expr { + let ExprOptions { sim } = hdl_attr.body; + let span = hdl_attr.kw.span; + // remove any number of groups and up to one paren + let mut receiver = &mut *expr_method_call.receiver; + let mut has_group = false; + let receiver = loop { + match receiver { + Expr::Group(ExprGroup { expr, .. }) => { + has_group = true; + receiver = expr; + } + Expr::Paren(ExprParen { expr, .. }) => break &mut **expr, + receiver @ Expr::Path(_) => break receiver, + _ => { + if !has_group { + self.errors.error( + &expr_method_call.receiver, + "#[hdl] on a method call needs parenthesized receiver", + ); + } + break &mut *expr_method_call.receiver; + } + } + }; + let func = if sim.is_some() { + parse_quote_spanned! {span=> + ::fayalite::enum_::enum_type_to_sim_builder + } + } else { + parse_quote_spanned! {span=> + ::fayalite::enum_::assert_is_enum_type + } + }; + *expr_method_call.receiver = ExprCall { + attrs: vec![], + func, + paren_token: Paren(span), + args: Punctuated::from_iter([mem::replace(receiver, Expr::PLACEHOLDER)]), + } + .into(); + expr_method_call.into() + } } diff --git a/crates/fayalite-proc-macros-impl/src/module/transform_body/expand_match.rs b/crates/fayalite-proc-macros-impl/src/module/transform_body/expand_match.rs index 1d53104..069f00d 100644 --- a/crates/fayalite-proc-macros-impl/src/module/transform_body/expand_match.rs +++ b/crates/fayalite-proc-macros-impl/src/module/transform_body/expand_match.rs @@ -1,24 +1,121 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information use crate::{ - fold::{impl_fold, DoFold}, - kw, - module::transform_body::{with_debug_clone_and_fold, Visitor}, Errors, HdlAttr, PairsIterExt, + fold::{DoFold, impl_fold}, + kw, + module::transform_body::{ + ExprOptions, Visitor, empty_let, with_debug_clone_and_fold, wrap_ty_with_expr, + }, }; use proc_macro2::{Span, TokenStream}; -use quote::{format_ident, quote_spanned, ToTokens, TokenStreamExt}; +use quote::{ToTokens, TokenStreamExt, format_ident, quote_spanned}; +use std::collections::BTreeSet; use syn::{ - fold::{fold_arm, fold_expr_match, fold_pat, Fold}, - parse::Nothing, + Arm, Attribute, Expr, ExprMatch, FieldPat, Ident, Local, Member, Pat, PatIdent, PatOr, + PatParen, PatPath, PatRest, PatStruct, PatTuple, PatTupleStruct, PatWild, Path, PathSegment, + Token, TypePath, + fold::{Fold, fold_arm, fold_expr_match, fold_local, fold_pat}, parse_quote_spanned, punctuated::Punctuated, spanned::Spanned, token::{Brace, Paren}, - Arm, Attribute, Expr, ExprMatch, FieldPat, Ident, Member, Pat, PatIdent, PatOr, PatParen, - PatPath, PatRest, PatStruct, PatTupleStruct, PatWild, Path, PathSegment, Token, TypePath, }; +macro_rules! visit_trait { + ( + $($vis:vis fn $fn:ident($state:ident: _, $value:ident: &$Value:ty) $block:block)* + ) => { + trait VisitMatchPat<'a> { + $(fn $fn(&mut self, $value: &'a $Value) { + $fn(self, $value); + })* + } + + $($vis fn $fn<'a>($state: &mut (impl ?Sized + VisitMatchPat<'a>), $value: &'a $Value) $block)* + }; +} + +visit_trait! { + fn visit_match_pat_binding(_state: _, v: &MatchPatBinding) { + let MatchPatBinding { ident: _ } = v; + } + fn visit_match_pat_wild(_state: _, v: &MatchPatWild) { + let MatchPatWild { underscore_token: _ } = v; + } + fn visit_match_pat_rest(_state: _, v: &MatchPatRest) { + let MatchPatRest { dot2_token: _ } = v; + } + fn visit_match_pat_paren(state: _, v: &MatchPatParen) { + let MatchPatParen { paren_token: _, pat } = v; + state.visit_match_pat(pat); + } + fn visit_match_pat_paren_simple(state: _, v: &MatchPatParen) { + let MatchPatParen { paren_token: _, pat } = v; + state.visit_match_pat_simple(pat); + } + fn visit_match_pat_or(state: _, v: &MatchPatOr) { + let MatchPatOr { leading_vert: _, cases } = v; + for v in cases { + state.visit_match_pat(v); + } + } + fn visit_match_pat_or_simple(state: _, v: &MatchPatOr) { + let MatchPatOr { leading_vert: _, cases } = v; + for v in cases { + state.visit_match_pat_simple(v); + } + } + fn visit_match_pat_struct_field(state: _, v: &MatchPatStructField) { + let MatchPatStructField { field_name: _, colon_token: _, pat } = v; + state.visit_match_pat_simple(pat); + } + fn visit_match_pat_struct(state: _, v: &MatchPatStruct) { + let MatchPatStruct { match_span: _, path: _, brace_token: _, fields, rest: _ } = v; + for v in fields { + state.visit_match_pat_struct_field(v); + } + } + fn visit_match_pat_tuple(state: _, v: &MatchPatTuple) { + let MatchPatTuple { paren_token: _, fields } = v; + for v in fields { + state.visit_match_pat_simple(v); + } + } + fn visit_match_pat_enum_variant(state: _, v: &MatchPatEnumVariant) { + let MatchPatEnumVariant { + match_span:_, + sim:_, + variant_path: _, + enum_path: _, + variant_name: _, + field, + } = v; + if let Some((_, v)) = field { + state.visit_match_pat_simple(v); + } + } + fn visit_match_pat_simple(state: _, v: &MatchPatSimple) { + match v { + MatchPatSimple::Paren(v) => state.visit_match_pat_paren_simple(v), + MatchPatSimple::Or(v) => state.visit_match_pat_or_simple(v), + MatchPatSimple::Binding(v) => state.visit_match_pat_binding(v), + MatchPatSimple::Wild(v) => state.visit_match_pat_wild(v), + MatchPatSimple::Rest(v) => state.visit_match_pat_rest(v), + } + } + fn visit_match_pat(state: _, v: &MatchPat) { + match v { + MatchPat::Simple(v) => state.visit_match_pat_simple(v), + MatchPat::Or(v) => state.visit_match_pat_or(v), + MatchPat::Paren(v) => state.visit_match_pat_paren(v), + MatchPat::Struct(v) => state.visit_match_pat_struct(v), + MatchPat::Tuple(v) => state.visit_match_pat_tuple(v), + MatchPat::EnumVariant(v) => state.visit_match_pat_enum_variant(v), + } + } +} + with_debug_clone_and_fold! { struct MatchPatBinding<> { ident: Ident, @@ -53,6 +150,15 @@ with_debug_clone_and_fold! { } } +impl

MatchPatOr

{ + /// returns the first `|` between two patterns + fn first_inner_vert(&self) -> Option { + let mut pairs = self.cases.pairs(); + pairs.next_back(); + pairs.next().and_then(|v| v.into_tuple().1.copied()) + } +} + impl ToTokens for MatchPatOr

{ fn to_tokens(&self, tokens: &mut TokenStream) { let Self { @@ -77,6 +183,19 @@ impl ToTokens for MatchPatWild { } } +with_debug_clone_and_fold! { + struct MatchPatRest<> { + dot2_token: Token![..], + } +} + +impl ToTokens for MatchPatRest { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { dot2_token } = self; + dot2_token.to_tokens(tokens); + } +} + with_debug_clone_and_fold! { struct MatchPatStructField<> { field_name: Ident, @@ -159,9 +278,29 @@ impl ToTokens for MatchPatStruct { } } +with_debug_clone_and_fold! { + struct MatchPatTuple<> { + paren_token: Paren, + fields: Punctuated, + } +} + +impl ToTokens for MatchPatTuple { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + paren_token, + fields, + } = self; + paren_token.surround(tokens, |tokens| { + fields.to_tokens(tokens); + }) + } +} + with_debug_clone_and_fold! { struct MatchPatEnumVariant<> { match_span: Span, + sim: Option<(kw::sim,)>, variant_path: Path, enum_path: Path, variant_name: Ident, @@ -173,6 +312,7 @@ impl ToTokens for MatchPatEnumVariant { fn to_tokens(&self, tokens: &mut TokenStream) { let Self { match_span, + sim, variant_path: _, enum_path, variant_name, @@ -182,7 +322,28 @@ impl ToTokens for MatchPatEnumVariant { __MatchTy::<#enum_path>::#variant_name } .to_tokens(tokens); - if let Some((paren_token, field)) = field { + if sim.is_some() { + if let Some((paren_token, field)) = field { + paren_token.surround(tokens, |tokens| { + field.to_tokens(tokens); + match field { + MatchPatSimple::Paren(_) + | MatchPatSimple::Or(_) + | MatchPatSimple::Binding(_) + | MatchPatSimple::Wild(_) => quote_spanned! {*match_span=> + , _ + } + .to_tokens(tokens), + MatchPatSimple::Rest(_) => {} + } + }); + } else { + quote_spanned! {*match_span=> + (_) + } + .to_tokens(tokens); + } + } else if let Some((paren_token, field)) = field { paren_token.surround(tokens, |tokens| field.to_tokens(tokens)); } } @@ -194,6 +355,7 @@ enum MatchPatSimple { Or(MatchPatOr), Binding(MatchPatBinding), Wild(MatchPatWild), + Rest(MatchPatRest), } impl_fold! { @@ -202,6 +364,7 @@ impl_fold! { Or(MatchPatOr), Binding(MatchPatBinding), Wild(MatchPatWild), + Rest(MatchPatRest), } } @@ -212,17 +375,18 @@ impl ToTokens for MatchPatSimple { Self::Paren(v) => v.to_tokens(tokens), Self::Binding(v) => v.to_tokens(tokens), Self::Wild(v) => v.to_tokens(tokens), + Self::Rest(v) => v.to_tokens(tokens), } } } -struct EnumPath { - variant_path: Path, - enum_path: Path, - variant_name: Ident, +pub(crate) struct EnumPath { + pub(crate) variant_path: Path, + pub(crate) enum_path: Path, + pub(crate) variant_name: Ident, } -fn parse_enum_path(variant_path: TypePath) -> Result { +pub(crate) fn parse_enum_path(variant_path: TypePath) -> Result { let TypePath { qself: None, path: variant_path, @@ -278,8 +442,9 @@ trait ParseMatchPat: Sized { fn or(v: MatchPatOr) -> Self; fn paren(v: MatchPatParen) -> Self; fn struct_(state: &mut HdlMatchParseState<'_>, v: MatchPatStruct) -> Result; + fn tuple(state: &mut HdlMatchParseState<'_>, v: MatchPatTuple) -> Result; fn enum_variant(state: &mut HdlMatchParseState<'_>, v: MatchPatEnumVariant) - -> Result; + -> Result; fn parse(state: &mut HdlMatchParseState<'_>, pat: Pat) -> Result { match pat { Pat::Ident(PatIdent { @@ -313,6 +478,7 @@ trait ParseMatchPat: Sized { state, MatchPatEnumVariant { match_span: state.match_span, + sim: state.sim, variant_path, enum_path, variant_name, @@ -359,6 +525,7 @@ trait ParseMatchPat: Sized { state, MatchPatEnumVariant { match_span: state.match_span, + sim: state.sim, variant_path, enum_path, variant_name, @@ -443,6 +610,7 @@ trait ParseMatchPat: Sized { state, MatchPatEnumVariant { match_span: state.match_span, + sim: state.sim, variant_path, enum_path, variant_name, @@ -462,7 +630,34 @@ trait ParseMatchPat: Sized { }) => Ok(Self::simple(MatchPatSimple::Wild(MatchPatWild { underscore_token, }))), - Pat::Tuple(_) | Pat::Slice(_) | Pat::Const(_) | Pat::Lit(_) | Pat::Range(_) => { + Pat::Tuple(PatTuple { + attrs: _, + paren_token, + elems, + }) => { + let fields = elems + .into_pairs() + .filter_map_pair_value(|field_pat| { + if let Pat::Rest(PatRest { + attrs: _, + dot2_token, + }) = field_pat + { + Some(MatchPatSimple::Rest(MatchPatRest { dot2_token })) + } else { + MatchPatSimple::parse(state, field_pat).ok() + } + }) + .collect(); + Self::tuple( + state, + MatchPatTuple { + paren_token, + fields, + }, + ) + } + Pat::Slice(_) | Pat::Const(_) | Pat::Lit(_) | Pat::Range(_) => { state .errors .error(pat, "not yet implemented in #[hdl] patterns"); @@ -497,6 +692,14 @@ impl ParseMatchPat for MatchPatSimple { Err(()) } + fn tuple(state: &mut HdlMatchParseState<'_>, v: MatchPatTuple) -> Result { + state.errors.push(syn::Error::new( + v.paren_token.span.open(), + "matching tuples is not yet implemented inside structs/enums in #[hdl] patterns", + )); + Err(()) + } + fn enum_variant( state: &mut HdlMatchParseState<'_>, v: MatchPatEnumVariant, @@ -515,6 +718,7 @@ enum MatchPat { Or(MatchPatOr), Paren(MatchPatParen), Struct(MatchPatStruct), + Tuple(MatchPatTuple), EnumVariant(MatchPatEnumVariant), } @@ -524,6 +728,7 @@ impl_fold! { Or(MatchPatOr), Paren(MatchPatParen), Struct(MatchPatStruct), + Tuple(MatchPatTuple), EnumVariant(MatchPatEnumVariant), } } @@ -545,6 +750,10 @@ impl ParseMatchPat for MatchPat { Ok(Self::Struct(v)) } + fn tuple(_state: &mut HdlMatchParseState<'_>, v: MatchPatTuple) -> Result { + Ok(Self::Tuple(v)) + } + fn enum_variant( _state: &mut HdlMatchParseState<'_>, v: MatchPatEnumVariant, @@ -560,6 +769,7 @@ impl ToTokens for MatchPat { Self::Or(v) => v.to_tokens(tokens), Self::Paren(v) => v.to_tokens(tokens), Self::Struct(v) => v.to_tokens(tokens), + Self::Tuple(v) => v.to_tokens(tokens), Self::EnumVariant(v) => v.to_tokens(tokens), } } @@ -622,10 +832,6 @@ struct RewriteAsCheckMatch { } impl Fold for RewriteAsCheckMatch { - fn fold_field_pat(&mut self, mut i: FieldPat) -> FieldPat { - i.colon_token = Some(Token![:](i.member.span())); - i - } fn fold_pat(&mut self, pat: Pat) -> Pat { match pat { Pat::Ident(mut pat_ident) => match parse_enum_ident(pat_ident.ident) { @@ -740,17 +946,177 @@ impl Fold for RewriteAsCheckMatch { // don't recurse into expressions i } + fn fold_local(&mut self, mut let_stmt: Local) -> Local { + if let Some(syn::LocalInit { + eq_token, + expr: _, + diverge, + }) = let_stmt.init.take() + { + let_stmt.init = Some(syn::LocalInit { + eq_token, + expr: parse_quote_spanned! {self.span=> + __match_value + }, + diverge: diverge.map(|(else_, _expr)| { + ( + else_, + parse_quote_spanned! {self.span=> + match __infallible {} + }, + ) + }), + }); + } + fold_local(self, let_stmt) + } } struct HdlMatchParseState<'a> { + sim: Option<(kw::sim,)>, match_span: Span, errors: &'a mut Errors, } +struct HdlLetPatVisitState<'a> { + errors: &'a mut Errors, + bindings: BTreeSet<&'a Ident>, +} + +impl<'a> VisitMatchPat<'a> for HdlLetPatVisitState<'a> { + fn visit_match_pat_binding(&mut self, v: &'a MatchPatBinding) { + self.bindings.insert(&v.ident); + } + + fn visit_match_pat_or(&mut self, v: &'a MatchPatOr) { + if let Some(first_inner_vert) = v.first_inner_vert() { + self.errors.error( + first_inner_vert, + "or-patterns are not supported in let statements", + ); + } + visit_match_pat_or(self, v); + } + + fn visit_match_pat_or_simple(&mut self, v: &'a MatchPatOr) { + if let Some(first_inner_vert) = v.first_inner_vert() { + self.errors.error( + first_inner_vert, + "or-patterns are not supported in let statements", + ); + } + visit_match_pat_or_simple(self, v); + } + + fn visit_match_pat_enum_variant(&mut self, v: &'a MatchPatEnumVariant) { + self.errors.error(v, "refutable pattern in let statement"); + } +} + impl Visitor<'_> { + pub(crate) fn process_hdl_let_pat( + &mut self, + hdl_attr: HdlAttr, + mut let_stmt: Local, + ) -> Local { + let span = let_stmt.let_token.span(); + let ExprOptions { sim } = hdl_attr.body; + if let Pat::Type(pat) = &mut let_stmt.pat { + *pat.ty = wrap_ty_with_expr((*pat.ty).clone()); + } + let check_let_stmt = RewriteAsCheckMatch { span }.fold_local(let_stmt.clone()); + let Local { + attrs: _, + let_token, + pat, + init, + semi_token, + } = let_stmt; + let Some(syn::LocalInit { + eq_token, + expr, + diverge, + }) = init + else { + self.errors + .error(let_token, "#[hdl] let must be assigned a value"); + return empty_let(); + }; + if let Some((else_, _)) = diverge { + // TODO: implement let-else + self.errors + .error(else_, "#[hdl] let ... else { ... } is not implemented"); + return empty_let(); + } + let Ok(pat) = MatchPat::parse( + &mut HdlMatchParseState { + sim, + match_span: span, + errors: &mut self.errors, + }, + pat, + ) else { + return empty_let(); + }; + let mut state = HdlLetPatVisitState { + errors: &mut self.errors, + bindings: BTreeSet::new(), + }; + state.visit_match_pat(&pat); + let HdlLetPatVisitState { + errors: _, + bindings, + } = state; + let retval = if sim.is_some() { + parse_quote_spanned! {span=> + let (#(#bindings,)*) = { + type __MatchTy = ::SimValue; + let __match_value = ::fayalite::sim::value::ToSimValue::to_sim_value(&(#expr)); + #let_token #pat #eq_token ::fayalite::sim::value::SimValue::into_value(__match_value) #semi_token + (#(#bindings,)*) + }; + } + } else { + parse_quote_spanned! {span=> + let (#(#bindings,)* __scope,) = { + type __MatchTy = ::MatchVariant; + let __match_expr = ::fayalite::expr::ToExpr::to_expr(&(#expr)); + ::fayalite::expr::check_match_expr( + __match_expr, + |__match_value, __infallible| { + #[allow(unused_variables)] + #check_let_stmt + match __infallible {} + }, + ); + let mut __match_iter = ::fayalite::module::match_(__match_expr); + let ::fayalite::__std::option::Option::Some(__match_variant) = + ::fayalite::__std::iter::Iterator::next(&mut __match_iter) + else { + ::fayalite::__std::unreachable!("#[hdl] let with uninhabited type"); + }; + let ::fayalite::__std::option::Option::None = + ::fayalite::__std::iter::Iterator::next(&mut __match_iter) + else { + ::fayalite::__std::unreachable!("#[hdl] let with refutable pattern"); + }; + let (__match_variant, __scope) = + ::fayalite::ty::MatchVariantAndInactiveScope::match_activate_scope( + __match_variant, + ); + #let_token #pat #eq_token __match_variant #semi_token + (#(#bindings,)* __scope,) + }; + } + }; + match retval { + syn::Stmt::Local(retval) => retval, + _ => unreachable!(), + } + } pub(crate) fn process_hdl_match( &mut self, - _hdl_attr: HdlAttr, + hdl_attr: HdlAttr, expr_match: ExprMatch, ) -> Expr { let span = expr_match.match_token.span(); @@ -762,8 +1128,9 @@ impl Visitor<'_> { brace_token: _, arms, } = expr_match; - self.require_normal_module_or_fn(match_token); + let ExprOptions { sim } = hdl_attr.body; let mut state = HdlMatchParseState { + sim, match_span: span, errors: &mut self.errors, }; @@ -771,24 +1138,36 @@ impl Visitor<'_> { arms.into_iter() .filter_map(|arm| MatchArm::parse(&mut state, arm).ok()), ); - let expr = quote_spanned! {span=> - { - type __MatchTy = ::MatchVariant; - let __match_expr = ::fayalite::expr::ToExpr::to_expr(&(#expr)); - ::fayalite::expr::check_match_expr(__match_expr, |__match_value, __infallible| { - #[allow(unused_variables)] - #check_match - }); - for __match_variant in ::fayalite::module::match_(__match_expr) { - let (__match_variant, __scope) = - ::fayalite::ty::MatchVariantAndInactiveScope::match_activate_scope( - __match_variant, - ); - #match_token __match_variant { + let expr = if sim.is_some() { + quote_spanned! {span=> + { + type __MatchTy = ::SimValue; + let __match_expr = ::fayalite::sim::value::ToSimValue::to_sim_value(&(#expr)); + #match_token ::fayalite::sim::value::SimValue::into_value(__match_expr) { #(#arms)* } } } + } else { + quote_spanned! {span=> + { + type __MatchTy = ::MatchVariant; + let __match_expr = ::fayalite::expr::ToExpr::to_expr(&(#expr)); + ::fayalite::expr::check_match_expr(__match_expr, |__match_value, __infallible| { + #[allow(unused_variables)] + #check_match + }); + for __match_variant in ::fayalite::module::match_(__match_expr) { + let (__match_variant, __scope) = + ::fayalite::ty::MatchVariantAndInactiveScope::match_activate_scope( + __match_variant, + ); + #match_token __match_variant { + #(#arms)* + } + } + } + } }; syn::parse2(expr).unwrap() } diff --git a/crates/fayalite-proc-macros-impl/src/process_cfg.rs b/crates/fayalite-proc-macros-impl/src/process_cfg.rs new file mode 100644 index 0000000..bcf2fa1 --- /dev/null +++ b/crates/fayalite-proc-macros-impl/src/process_cfg.rs @@ -0,0 +1,2527 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{Cfg, CfgAttr, Cfgs, Errors}; +use proc_macro2::Ident; +use std::{collections::VecDeque, marker::PhantomData}; +use syn::{ + Token, + punctuated::{Pair, Punctuated}, +}; + +struct State { + cfgs: Cfgs, + errors: Errors, + _phantom: PhantomData

, +} + +impl State

{ + #[must_use] + fn eval_cfg(&mut self, cfg: Cfg) -> bool { + struct MyDispatch<'a> { + cfg: Cfg, + _phantom: PhantomData<&'a ()>, + } + impl<'a> PhaseDispatch for MyDispatch<'a> { + type Args = &'a mut State

; + type Output = bool; + + fn dispatch_collect( + self, + args: Self::Args, + ) -> Self::Output { + args.cfgs.insert_cfg(self.cfg, ()); + true + } + + fn dispatch_process( + self, + args: Self::Args, + ) -> Self::Output { + if let Some(&retval) = args.cfgs.cfgs_map.get(&self.cfg) { + retval + } else { + args.errors.error(self.cfg, "unrecognized cfg -- cfg wasn't evaluated when running `__cfg_expansion_helper!`"); + true + } + } + } + P::dispatch( + MyDispatch { + cfg, + _phantom: PhantomData, + }, + self, + ) + } + #[must_use] + fn eval_cfgs( + &mut self, + mut attrs: Vec, + ) -> Option, P>> { + let mut queue = VecDeque::from(attrs); + attrs = Vec::with_capacity(queue.len()); // cfg_attr is rare, and cfg can't increase length + while let Some(attr) = queue.pop_front() { + if attr.path().is_ident("cfg") { + if let Some(cfg) = self.errors.ok(Cfg::parse_meta(&attr.meta)) { + if !self.eval_cfg(cfg) { + return None; + } + continue; + } + } else if attr.path().is_ident("cfg_attr") { + if let Some(cfg_attr) = self.errors.ok(CfgAttr::parse_meta(&attr.meta)) { + if self.eval_cfg(cfg_attr.to_cfg()) { + // push onto queue since cfg_attr(, cfg_attr(, )) is valid + for meta in cfg_attr.attrs { + queue.push_front(syn::Attribute { + pound_token: attr.pound_token, + style: attr.style, + bracket_token: attr.bracket_token, + meta, + }); + } + } + continue; + } + } + attrs.push(attr); + } + Some(Output::new(attrs)) + } + fn process_qself_and_path( + &mut self, + qself: Option, + path: syn::Path, + ) -> Option<(Output, P>, Output)> { + let qself = if let Some(syn::QSelf { + lt_token, + ty, + position, + as_token, + gt_token, + }) = qself + { + ty.process(self)?.map(|ty| { + Some(syn::QSelf { + lt_token, + ty, + position, + as_token, + gt_token, + }) + }) + } else { + Output::new(None) + }; + let syn::Path { + leading_colon, + segments, + } = path; + // path segments don't get removed + let path = segments.process(self)?.map(|segments| syn::Path { + leading_colon, + segments, + }); + Some((qself, path)) + } +} + +trait PhaseDispatch { + type Args; + type Output; + fn dispatch_collect(self, args: Self::Args) + -> Self::Output; + fn dispatch_process(self, args: Self::Args) + -> Self::Output; +} + +trait Phase: Sized + 'static { + type Output; + type CfgsValue; + fn output_new(v: T) -> Output; + fn output_map U>(v: Output, f: F) -> Output; + fn output_zip(t: Output, u: Output) -> Output<(T, U), Self>; + fn dispatch(d: D, args: D::Args) -> D::Output; +} + +struct CollectCfgsPhase; + +impl Phase for CollectCfgsPhase { + type Output = (); + type CfgsValue = (); + + fn output_new(_v: T) -> Output { + Output(()) + } + + fn output_map U>(_v: Output, _f: F) -> Output { + Output(()) + } + + fn output_zip(_t: Output, _u: Output) -> Output<(T, U), Self> { + Output(()) + } + + fn dispatch(d: D, args: D::Args) -> D::Output { + d.dispatch_collect(args) + } +} + +struct ProcessCfgsPhase; + +impl Phase for ProcessCfgsPhase { + type Output = T; + type CfgsValue = bool; + + fn output_new(v: T) -> Output { + Output(v) + } + + fn output_map U>(v: Output, f: F) -> Output { + Output(f(v.0)) + } + + fn output_zip(t: Output, u: Output) -> Output<(T, U), Self> { + Output((t.0, u.0)) + } + + fn dispatch(d: D, args: D::Args) -> D::Output { + d.dispatch_process(args) + } +} + +struct Output(P::Output); + +trait OutputZip: Sized { + type Output; + fn zip(self) -> Output; + fn call R>(self, f: F) -> Output { + self.zip().map(f) + } +} + +impl OutputZip

for () { + type Output = (); + + fn zip(self) -> Output { + Output::new(()) + } +} + +impl OutputZip

for (Output,) { + type Output = (T,); + + fn zip(self) -> Output { + self.0.map(|v| (v,)) + } +} + +macro_rules! impl_zip { + ($first_arg:ident: $first_T:ident, $($arg:ident: $T:ident),* $(,)?) => { + impl_zip!(@step [], [($first_arg: $first_T) $(($arg: $T))*], (),); + }; + ( + @impl($first_arg:tt,), + $tuple_pat:tt, + ) => {}; + ( + @impl(($first_arg:ident: $first_T:ident), + $(($arg:ident: $T:ident),)*), + $tuple_pat:tt, + ) => { + impl<$first_T, $($T,)* P: Phase> OutputZip

for (Output<$first_T, P>, $(Output<$T, P>),*) { + type Output = ($first_T, $($T),*); + fn zip(self) -> Output<($first_T, $($T),*), P> { + let (tuples, $($arg),*) = self; + $(let tuples = P::output_zip(tuples, $arg);)* + tuples.map(|$tuple_pat| ($first_arg, $($arg),*)) + } + } + }; + ( + @step [$($cur:tt)*], + [], + $tuple_pat:tt, + ) => {}; + ( + @step [$($cur:tt)*], + [($next_arg:ident: $next_T:ident) $($rest:tt)*], + (), + ) => { + impl_zip!(@impl($($cur,)* ($next_arg: $next_T),), $next_arg,); + impl_zip!(@step [$($cur)* ($next_arg: $next_T)], [$($rest)*], $next_arg,); + }; + ( + @step [$($cur:tt)*], + [($next_arg:ident: $next_T:ident) $($rest:tt)*], + $tuple_pat:tt, + ) => { + impl_zip!(@impl($($cur,)* ($next_arg: $next_T),), ($tuple_pat, $next_arg),); + impl_zip!(@step [$($cur)* ($next_arg: $next_T)], [$($rest)*], ($tuple_pat, $next_arg),); + }; +} + +impl_zip!(t0: T0, t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11); + +impl Copy for Output where P::Output: Copy {} + +impl Clone for Output +where + P::Output: Clone, +{ + fn clone(&self) -> Self { + Self(self.0.clone()) + } +} + +impl Output { + fn new(v: T) -> Self { + P::output_new(v) + } + fn map U>(self, f: F) -> Output { + P::output_map(self, f) + } +} + +trait Process: Sized { + #[must_use] + fn process(self, state: &mut State

) -> Option>; +} + +impl Process

for syn::Item { + fn process(self, _state: &mut State

) -> Option> { + // don't recurse into items + Some(Output::new(self)) + } +} + +impl Process

for Vec { + fn process(self, state: &mut State

) -> Option> { + state.eval_cfgs(self) + } +} + +impl, P: Phase> Process

for Box { + fn process(self, state: &mut State

) -> Option> { + Some(T::process(*self, state)?.map(Box::new)) + } +} + +trait ProcessVecElement { + const REMOVE_ELEMENTS: bool; +} + +impl ProcessVecElement for syn::Arm { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessVecElement for syn::Stmt { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessVecElement for syn::ForeignItem { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessVecElement for syn::ImplItem { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessVecElement for syn::Item { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessVecElement for syn::TraitItem { + const REMOVE_ELEMENTS: bool = true; +} + +impl + ProcessVecElement, P: Phase> Process

for Vec { + fn process(self, state: &mut State

) -> Option> { + let mut output = Output::new(Vec::new()); + for value in self { + if let Some(value) = value.process(state) { + output = (output, value).call(|(mut output, value)| { + output.push(value); + output + }); + } else if !T::REMOVE_ELEMENTS { + return None; + } + } + Some(output) + } +} + +trait ProcessOption { + /// if a configured-off value causes this value to be `None` instead of propagating the configuring-off + const REMOVE_VALUE: bool; +} + +impl ProcessOption for syn::Abi { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for syn::Block { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for syn::WhereClause { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for syn::Expr { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for syn::Type { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for Box { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for syn::AngleBracketedGenericArguments { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for syn::ImplRestriction { + const REMOVE_VALUE: bool = false; +} + +impl ProcessOption for syn::BoundLifetimes { + const REMOVE_VALUE: bool = false; +} + +impl ProcessOption for (Token![=], syn::Expr) { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for (Token![=], syn::Type) { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for (Token![if], Box) { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for (Token![else], Box) { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for (Token![&], Option) { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for (Token![as], Ident) { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for (Ident, Token![:]) { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for (Option, syn::Path, Token![for]) { + const REMOVE_VALUE: bool = false; +} + +impl ProcessOption for syn::BareVariadic { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for syn::Variadic { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for syn::LocalInit { + const REMOVE_VALUE: bool = false; +} + +impl ProcessOption for syn::Label { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for syn::PatRest { + const REMOVE_VALUE: bool = true; +} + +impl ProcessOption for (Box, Token![:]) { + const REMOVE_VALUE: bool = false; +} + +impl ProcessOption for (Token![@], Box) { + const REMOVE_VALUE: bool = false; +} + +impl ProcessOption for (syn::token::Brace, Vec) { + const REMOVE_VALUE: bool = false; +} + +impl + ProcessOption, P: Phase> Process

for Option { + fn process(self, state: &mut State

) -> Option> { + if let Some(this) = self { + match this.process(state) { + Some(v) => Some(v.map(Some)), + None => { + if T::REMOVE_VALUE { + Some(Output::new(None)) + } else { + None + } + } + } + } else { + Some(Output::new(None)) + } + } +} + +trait ProcessPunctuatedElement { + const REMOVE_ELEMENTS: bool; +} + +impl + ProcessPunctuatedElement, P: Phase, Punct: Default> Process

+ for Punctuated +{ + fn process(self, state: &mut State

) -> Option> { + let mut output = Output::new(Punctuated::::new()); + for pair in self.into_pairs() { + let (value, punct) = pair.into_tuple(); + if let Some(value) = value.process(state) { + output = (output, value).call(|(mut output, value)| { + output.extend([Pair::new(value, punct)]); + output + }); + } else if !T::REMOVE_ELEMENTS { + return None; + } + } + Some(output) + } +} + +impl ProcessPunctuatedElement for syn::PathSegment { + const REMOVE_ELEMENTS: bool = false; +} + +impl ProcessPunctuatedElement for syn::Type { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::Expr { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::Pat { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::CapturedParam { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::GenericArgument { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::GenericParam { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::Lifetime { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::WherePredicate { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::Variant { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::FnArg { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::BareFnArg { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::TypeParamBound { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::FieldValue { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::Field { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::FieldPat { + const REMOVE_ELEMENTS: bool = true; +} + +impl ProcessPunctuatedElement for syn::UseTree { + const REMOVE_ELEMENTS: bool = true; +} + +impl, U: Process

, P: Phase> Process

for (T, U) { + fn process(self, state: &mut State

) -> Option> { + let (t, u) = self; + let t = t.process(state)?; + let u = u.process(state)?; + Some((t, u).zip()) + } +} + +impl, U: Process

, V: Process

, P: Phase> Process

for (T, U, V) { + fn process(self, state: &mut State

) -> Option> { + let (t, u, v) = self; + let t = t.process(state)?; + let u = u.process(state)?; + let v = v.process(state)?; + Some((t, u, v).zip()) + } +} + +macro_rules! process_no_op { + ($ty:ty) => { + impl Process

for $ty { + fn process(self, _state: &mut State

) -> Option> { + Some(Output::new(self)) + } + } + + impl ProcessOption for $ty { + const REMOVE_VALUE: bool = false; + } + }; +} + +process_no_op!(Token![Self]); +process_no_op!(Token![abstract]); +process_no_op!(Token![as]); +process_no_op!(Token![async]); +process_no_op!(Token![auto]); +process_no_op!(Token![await]); +process_no_op!(Token![become]); +process_no_op!(Token![box]); +process_no_op!(Token![break]); +process_no_op!(Token![const]); +process_no_op!(Token![continue]); +process_no_op!(Token![crate]); +process_no_op!(Token![default]); +process_no_op!(Token![do]); +process_no_op!(Token![dyn]); +process_no_op!(Token![else]); +process_no_op!(Token![enum]); +process_no_op!(Token![extern]); +process_no_op!(Token![final]); +process_no_op!(Token![fn]); +process_no_op!(Token![for]); +process_no_op!(Token![if]); +process_no_op!(Token![impl]); +process_no_op!(Token![in]); +process_no_op!(Token![let]); +process_no_op!(Token![loop]); +process_no_op!(Token![macro]); +process_no_op!(Token![match]); +process_no_op!(Token![mod]); +process_no_op!(Token![move]); +process_no_op!(Token![mut]); +process_no_op!(Token![override]); +process_no_op!(Token![priv]); +process_no_op!(Token![pub]); +process_no_op!(Token![raw]); +process_no_op!(Token![ref]); +process_no_op!(Token![return]); +process_no_op!(Token![self]); +process_no_op!(Token![static]); +process_no_op!(Token![struct]); +process_no_op!(Token![super]); +process_no_op!(Token![trait]); +process_no_op!(Token![try]); +process_no_op!(Token![type]); +process_no_op!(Token![typeof]); +process_no_op!(Token![union]); +process_no_op!(Token![unsafe]); +process_no_op!(Token![unsized]); +process_no_op!(Token![use]); +process_no_op!(Token![virtual]); +process_no_op!(Token![where]); +process_no_op!(Token![while]); +process_no_op!(Token![yield]); + +process_no_op!(Token![!]); +process_no_op!(Token![!=]); +process_no_op!(Token![#]); +process_no_op!(Token![$]); +process_no_op!(Token![%]); +process_no_op!(Token![%=]); +process_no_op!(Token![&]); +process_no_op!(Token![&&]); +process_no_op!(Token![&=]); +process_no_op!(Token![*]); +process_no_op!(Token![*=]); +process_no_op!(Token![+]); +process_no_op!(Token![+=]); +process_no_op!(Token![,]); +process_no_op!(Token![-]); +process_no_op!(Token![-=]); +process_no_op!(Token![->]); +process_no_op!(Token![.]); +process_no_op!(Token![..]); +process_no_op!(Token![...]); +process_no_op!(Token![..=]); +process_no_op!(Token![/]); +process_no_op!(Token![/=]); +process_no_op!(Token![:]); +process_no_op!(Token![::]); +process_no_op!(Token![;]); +process_no_op!(Token![<]); +process_no_op!(Token![<-]); +process_no_op!(Token![<<]); +process_no_op!(Token![<<=]); +process_no_op!(Token![<=]); +process_no_op!(Token![=]); +process_no_op!(Token![==]); +process_no_op!(Token![=>]); +process_no_op!(Token![>]); +process_no_op!(Token![>=]); +process_no_op!(Token![>>]); +process_no_op!(Token![>>=]); +process_no_op!(Token![?]); +process_no_op!(Token![@]); +process_no_op!(Token![^]); +process_no_op!(Token![^=]); +process_no_op!(Token![_]); +process_no_op!(Token![|]); +process_no_op!(Token![|=]); +process_no_op!(Token![||]); +process_no_op!(Token![~]); + +process_no_op!(syn::token::Brace); +process_no_op!(syn::token::Bracket); +process_no_op!(syn::token::Paren); +process_no_op!(syn::token::Group); + +process_no_op!(Ident); +process_no_op!(syn::Index); +process_no_op!(syn::Lifetime); +process_no_op!(syn::LitBool); +process_no_op!(syn::LitByte); +process_no_op!(syn::LitByteStr); +process_no_op!(syn::LitChar); +process_no_op!(syn::LitCStr); +process_no_op!(syn::LitFloat); +process_no_op!(syn::LitInt); +process_no_op!(syn::LitStr); +process_no_op!(proc_macro2::TokenStream); +process_no_op!(proc_macro2::Literal); + +macro_rules! process_struct { + ($ty:path { + $($field:ident,)* + }) => { + impl Process

for $ty { + fn process(self, state: &mut State

) -> Option> { + let Self { + $($field,)* + } = self; + $(let $field = $field.process(state)?;)* + Some(($($field,)*).call(|($($field,)*)| Self { + $($field,)* + })) + } + } + }; + ($ty:path { + $($fields_before:ident,)* + #[qself] + $qself:ident, + $path:ident, + $($fields_after:ident,)* + }) => { + impl Process

for $ty { + fn process(self, state: &mut State

) -> Option> { + let Self { + $($fields_before,)* + $qself, + $path, + $($fields_after,)* + } = self; + $(let $fields_before = $fields_before.process(state)?;)* + let ($qself, $path) = state.process_qself_and_path($qself, $path)?; + $(let $fields_after = $fields_after.process(state)?;)* + Some(( + $($fields_before,)* + $qself, + $path, + $($fields_after,)* + ).call(|( + $($fields_before,)* + $qself, + $path, + $($fields_after,)* + )| Self { + $($fields_before,)* + $qself, + $path, + $($fields_after,)* + })) + } + } + }; +} + +process_struct! { + syn::Abi { + extern_token, + name, + } +} + +process_struct! { + syn::AngleBracketedGenericArguments { + colon2_token, + lt_token, + args, + gt_token, + } +} + +process_struct! { + syn::Arm { + attrs, + pat, + guard, + fat_arrow_token, + body, + comma, + } +} + +process_struct! { + syn::AssocConst { + ident, + generics, + eq_token, + value, + } +} + +process_struct! { + syn::AssocType { + ident, + generics, + eq_token, + ty, + } +} + +process_struct! { + syn::BareFnArg { + attrs, + name, + ty, + } +} + +process_struct! { + syn::BareVariadic { + attrs, + name, + dots, + comma, + } +} + +process_struct! { + syn::Block { + brace_token, + stmts, + } +} + +process_struct! { + syn::BoundLifetimes { + for_token, + lt_token, + lifetimes, + gt_token, + } +} + +process_struct! { + syn::ConstParam { + attrs, + const_token, + ident, + colon_token, + ty, + eq_token, + default, + } +} + +process_struct! { + syn::Constraint { + ident, + generics, + colon_token, + bounds, + } +} + +process_struct! { + syn::DataEnum { + enum_token, + brace_token, + variants, + } +} + +process_struct! { + syn::DataStruct { + struct_token, + fields, + semi_token, + } +} + +process_struct! { + syn::DataUnion { + union_token, + fields, + } +} + +process_struct! { + syn::DeriveInput { + attrs, + vis, + ident, + generics, + data, + } +} + +process_struct! { + syn::ExprArray { + attrs, + bracket_token, + elems, + } +} + +process_struct! { + syn::ExprAssign { + attrs, + left, + eq_token, + right, + } +} + +process_struct! { + syn::ExprAsync { + attrs, + async_token, + capture, + block, + } +} + +process_struct! { + syn::ExprAwait { + attrs, + base, + dot_token, + await_token, + } +} + +process_struct! { + syn::ExprBinary { + attrs, + left, + op, + right, + } +} + +process_struct! { + syn::ExprBlock { + attrs, + label, + block, + } +} + +process_struct! { + syn::ExprBreak { + attrs, + break_token, + label, + expr, + } +} + +process_struct! { + syn::ExprCall { + attrs, + func, + paren_token, + args, + } +} + +process_struct! { + syn::ExprCast { + attrs, + expr, + as_token, + ty, + } +} + +process_struct! { + syn::ExprClosure { + attrs, + lifetimes, + constness, + movability, + asyncness, + capture, + or1_token, + inputs, + or2_token, + output, + body, + } +} + +process_struct! { + syn::ExprConst { + attrs, + const_token, + block, + } +} + +process_struct! { + syn::ExprContinue { + attrs, + continue_token, + label, + } +} + +process_struct! { + syn::ExprField { + attrs, + base, + dot_token, + member, + } +} + +process_struct! { + syn::ExprForLoop { + attrs, + label, + for_token, + pat, + in_token, + expr, + body, + } +} + +process_struct! { + syn::ExprGroup { + attrs, + group_token, + expr, + } +} + +process_struct! { + syn::ExprIf { + attrs, + if_token, + cond, + then_branch, + else_branch, + } +} + +process_struct! { + syn::ExprIndex { + attrs, + expr, + bracket_token, + index, + } +} + +process_struct! { + syn::ExprInfer { + attrs, + underscore_token, + } +} + +process_struct! { + syn::ExprLet { + attrs, + let_token, + pat, + eq_token, + expr, + } +} + +process_struct! { + syn::ExprLit { + attrs, + lit, + } +} + +process_struct! { + syn::ExprLoop { + attrs, + label, + loop_token, + body, + } +} + +process_struct! { + syn::ExprMacro { + attrs, + mac, + } +} + +process_struct! { + syn::ExprMatch { + attrs, + match_token, + expr, + brace_token, + arms, + } +} + +process_struct! { + syn::ExprMethodCall { + attrs, + receiver, + dot_token, + method, + turbofish, + paren_token, + args, + } +} + +process_struct! { + syn::ExprParen { + attrs, + paren_token, + expr, + } +} + +process_struct! { + syn::ExprPath { + attrs, + #[qself] + qself, + path, + } +} + +process_struct! { + syn::ExprRange { + attrs, + start, + limits, + end, + } +} + +process_struct! { + syn::ExprRawAddr { + attrs, + and_token, + raw, + mutability, + expr, + } +} + +process_struct! { + syn::ExprReference { + attrs, + and_token, + mutability, + expr, + } +} + +process_struct! { + syn::ExprRepeat { + attrs, + bracket_token, + expr, + semi_token, + len, + } +} + +process_struct! { + syn::ExprReturn { + attrs, + return_token, + expr, + } +} + +process_struct! { + syn::ExprStruct { + attrs, + #[qself] + qself, + path, + brace_token, + fields, + dot2_token, + rest, + } +} + +process_struct! { + syn::ExprTry { + attrs, + expr, + question_token, + } +} + +process_struct! { + syn::ExprTryBlock { + attrs, + try_token, + block, + } +} + +process_struct! { + syn::ExprTuple { + attrs, + paren_token, + elems, + } +} + +process_struct! { + syn::ExprUnary { + attrs, + op, + expr, + } +} + +process_struct! { + syn::ExprUnsafe { + attrs, + unsafe_token, + block, + } +} + +process_struct! { + syn::ExprWhile { + attrs, + label, + while_token, + cond, + body, + } +} + +process_struct! { + syn::ExprYield { + attrs, + yield_token, + expr, + } +} + +process_struct! { + syn::Field { + attrs, + vis, + mutability, + ident, + colon_token, + ty, + } +} + +process_struct! { + syn::FieldPat { + attrs, + member, + colon_token, + pat, + } +} + +process_struct! { + syn::FieldValue { + attrs, + member, + colon_token, + expr, + } +} + +process_struct! { + syn::FieldsNamed { + brace_token, + named, + } +} + +process_struct! { + syn::FieldsUnnamed { + paren_token, + unnamed, + } +} + +process_struct! { + syn::ForeignItemFn { + attrs, + vis, + sig, + semi_token, + } +} + +process_struct! { + syn::ForeignItemMacro { + attrs, + mac, + semi_token, + } +} + +process_struct! { + syn::ForeignItemStatic { + attrs, + vis, + static_token, + mutability, + ident, + colon_token, + ty, + semi_token, + } +} + +process_struct! { + syn::ForeignItemType { + attrs, + vis, + type_token, + ident, + generics, + semi_token, + } +} + +process_struct! { + syn::Generics { + lt_token, + params, + gt_token, + where_clause, + } +} + +process_struct! { + syn::ImplItemConst { + attrs, + vis, + defaultness, + const_token, + ident, + generics, + colon_token, + ty, + eq_token, + expr, + semi_token, + } +} + +process_struct! { + syn::ImplItemFn { + attrs, + vis, + defaultness, + sig, + block, + } +} + +process_struct! { + syn::ImplItemMacro { + attrs, + mac, + semi_token, + } +} + +process_struct! { + syn::ImplItemType { + attrs, + vis, + defaultness, + type_token, + ident, + generics, + eq_token, + ty, + semi_token, + } +} + +process_struct! { + syn::ItemConst { + attrs, + vis, + const_token, + ident, + generics, + colon_token, + ty, + eq_token, + expr, + semi_token, + } +} + +process_struct! { + syn::ItemEnum { + attrs, + vis, + enum_token, + ident, + generics, + brace_token, + variants, + } +} + +process_struct! { + syn::ItemExternCrate { + attrs, + vis, + extern_token, + crate_token, + ident, + rename, + semi_token, + } +} + +process_struct! { + syn::ItemFn { + attrs, + vis, + sig, + block, + } +} + +process_struct! { + syn::ItemForeignMod { + attrs, + unsafety, + abi, + brace_token, + items, + } +} + +process_struct! { + syn::ItemImpl { + attrs, + defaultness, + unsafety, + impl_token, + generics, + trait_, + self_ty, + brace_token, + items, + } +} + +process_struct! { + syn::ItemMacro { + attrs, + ident, + mac, + semi_token, + } +} + +process_struct! { + syn::ItemMod { + attrs, + vis, + unsafety, + mod_token, + ident, + content, + semi, + } +} + +process_struct! { + syn::ItemStatic { + attrs, + vis, + static_token, + mutability, + ident, + colon_token, + ty, + eq_token, + expr, + semi_token, + } +} + +process_struct! { + syn::ItemStruct { + attrs, + vis, + struct_token, + ident, + generics, + fields, + semi_token, + } +} + +process_struct! { + syn::ItemTrait { + attrs, + vis, + unsafety, + auto_token, + restriction, + trait_token, + ident, + generics, + colon_token, + supertraits, + brace_token, + items, + } +} + +process_struct! { + syn::ItemTraitAlias { + attrs, + vis, + trait_token, + ident, + generics, + eq_token, + bounds, + semi_token, + } +} + +process_struct! { + syn::ItemType { + attrs, + vis, + type_token, + ident, + generics, + eq_token, + ty, + semi_token, + } +} + +process_struct! { + syn::ItemUnion { + attrs, + vis, + union_token, + ident, + generics, + fields, + } +} + +process_struct! { + syn::ItemUse { + attrs, + vis, + use_token, + leading_colon, + tree, + semi_token, + } +} + +process_struct! { + syn::Label { + name, + colon_token, + } +} + +process_struct! { + syn::LifetimeParam { + attrs, + lifetime, + colon_token, + bounds, + } +} + +process_struct! { + syn::Local { + attrs, + let_token, + pat, + init, + semi_token, + } +} + +process_struct! { + syn::LocalInit { + eq_token, + expr, + diverge, + } +} + +process_struct! { + syn::Macro { + path, + bang_token, + delimiter, + tokens, + } +} + +process_struct! { + syn::MetaList { + path, + delimiter, + tokens, + } +} + +process_struct! { + syn::MetaNameValue { + path, + eq_token, + value, + } +} + +process_struct! { + syn::ParenthesizedGenericArguments { + paren_token, + inputs, + output, + } +} + +process_struct! { + syn::PatIdent { + attrs, + by_ref, + mutability, + ident, + subpat, + } +} + +process_struct! { + syn::PatOr { + attrs, + leading_vert, + cases, + } +} + +process_struct! { + syn::PatParen { + attrs, + paren_token, + pat, + } +} + +process_struct! { + syn::PatReference { + attrs, + and_token, + mutability, + pat, + } +} + +process_struct! { + syn::PatRest { + attrs, + dot2_token, + } +} + +process_struct! { + syn::PatSlice { + attrs, + bracket_token, + elems, + } +} + +process_struct! { + syn::PatStruct { + attrs, + #[qself] + qself, + path, + brace_token, + fields, + rest, + } +} + +process_struct! { + syn::PatTuple { + attrs, + paren_token, + elems, + } +} + +process_struct! { + syn::PatTupleStruct { + attrs, + #[qself] + qself, + path, + paren_token, + elems, + } +} + +process_struct! { + syn::PatType { + attrs, + pat, + colon_token, + ty, + } +} + +process_struct! { + syn::PatWild { + attrs, + underscore_token, + } +} + +process_struct! { + syn::Path { + leading_colon, + segments, + } +} + +process_struct! { + syn::PathSegment { + ident, + arguments, + } +} + +process_struct! { + syn::PreciseCapture { + use_token, + lt_token, + params, + gt_token, + } +} + +process_struct! { + syn::PredicateLifetime { + lifetime, + colon_token, + bounds, + } +} + +process_struct! { + syn::PredicateType { + lifetimes, + bounded_ty, + colon_token, + bounds, + } +} + +process_struct! { + syn::Receiver { + attrs, + reference, + mutability, + self_token, + colon_token, + ty, + } +} + +process_struct! { + syn::Signature { + constness, + asyncness, + unsafety, + abi, + fn_token, + ident, + generics, + paren_token, + inputs, + variadic, + output, + } +} + +process_struct! { + syn::StmtMacro { + attrs, + mac, + semi_token, + } +} + +process_struct! { + syn::TraitBound { + paren_token, + modifier, + lifetimes, + path, + } +} + +process_struct! { + syn::TraitItemConst { + attrs, + const_token, + ident, + generics, + colon_token, + ty, + default, + semi_token, + } +} + +process_struct! { + syn::TraitItemFn { + attrs, + sig, + default, + semi_token, + } +} + +process_struct! { + syn::TraitItemMacro { + attrs, + mac, + semi_token, + } +} + +process_struct! { + syn::TraitItemType { + attrs, + type_token, + ident, + generics, + colon_token, + bounds, + default, + semi_token, + } +} + +process_struct! { + syn::TypeArray { + bracket_token, + elem, + semi_token, + len, + } +} + +process_struct! { + syn::TypeBareFn { + lifetimes, + unsafety, + abi, + fn_token, + paren_token, + inputs, + variadic, + output, + } +} + +process_struct! { + syn::TypeGroup { + group_token, + elem, + } +} + +process_struct! { + syn::TypeImplTrait { + impl_token, + bounds, + } +} + +process_struct! { + syn::TypeInfer { + underscore_token, + } +} + +process_struct! { + syn::TypeMacro { + mac, + } +} + +process_struct! { + syn::TypeNever { + bang_token, + } +} + +process_struct! { + syn::TypeParam { + attrs, + ident, + colon_token, + bounds, + eq_token, + default, + } +} + +process_struct! { + syn::TypeParen { + paren_token, + elem, + } +} + +process_struct! { + syn::TypePath { + #[qself] + qself, + path, + } +} + +process_struct! { + syn::TypePtr { + star_token, + const_token, + mutability, + elem, + } +} + +process_struct! { + syn::TypeReference { + and_token, + lifetime, + mutability, + elem, + } +} + +process_struct! { + syn::TypeSlice { + bracket_token, + elem, + } +} + +process_struct! { + syn::TypeTraitObject { + dyn_token, + bounds, + } +} + +process_struct! { + syn::TypeTuple { + paren_token, + elems, + } +} + +process_struct! { + syn::UseGlob { + star_token, + } +} + +process_struct! { + syn::UseGroup { + brace_token, + items, + } +} + +process_struct! { + syn::UseName { + ident, + } +} + +process_struct! { + syn::UsePath { + ident, + colon2_token, + tree, + } +} + +process_struct! { + syn::UseRename { + ident, + as_token, + rename, + } +} + +process_struct! { + syn::Variadic { + attrs, + pat, + dots, + comma, + } +} + +process_struct! { + syn::Variant { + attrs, + ident, + fields, + discriminant, + } +} + +process_struct! { + syn::VisRestricted { + pub_token, + paren_token, + in_token, + path, + } +} + +process_struct! { + syn::WhereClause { + where_token, + predicates, + } +} + +macro_rules! process_enum { + ($path:path { + $($variant:ident$(($($field:ident),* $(,)?))?,)* + }) => { + impl Process

for $path { + fn process(self, state: &mut State

) -> Option> { + match self { + $(Self::$variant$(($($field),*))? => Some(($($($field.process(state)?,)*)?).call(|($($($field,)*)?)| Self::$variant$(($($field),*))?)),)* + } + } + } + }; + ($path:path { + $($variant:ident$(($($field:ident),* $(,)?))?,)* + #[no_op] + _, + }) => { + impl Process

for $path { + fn process(self, state: &mut State

) -> Option> { + #![allow(unused_variables)] + match self { + $(Self::$variant$(($($field),*))? => Some(($($($field.process(state)?,)*)?).call(|($($($field,)*)?)| Self::$variant$(($($field),*))?)),)* + _ => Some(Output::new(self)), + } + } + } + }; +} + +process_enum! { + syn::AttrStyle { + Outer, + Inner(f0), + } +} + +process_enum! { + syn::BinOp { + Add(f0), + Sub(f0), + Mul(f0), + Div(f0), + Rem(f0), + And(f0), + Or(f0), + BitXor(f0), + BitAnd(f0), + BitOr(f0), + Shl(f0), + Shr(f0), + Eq(f0), + Lt(f0), + Le(f0), + Ne(f0), + Ge(f0), + Gt(f0), + AddAssign(f0), + SubAssign(f0), + MulAssign(f0), + DivAssign(f0), + RemAssign(f0), + BitXorAssign(f0), + BitAndAssign(f0), + BitOrAssign(f0), + ShlAssign(f0), + ShrAssign(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::CapturedParam { + Lifetime(f0), + Ident(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::Data { + Struct(f0), + Enum(f0), + Union(f0), + } +} + +process_enum! { + syn::Expr { + Array(f0), + Assign(f0), + Async(f0), + Await(f0), + Binary(f0), + Block(f0), + Break(f0), + Call(f0), + Cast(f0), + Closure(f0), + Const(f0), + Continue(f0), + Field(f0), + ForLoop(f0), + Group(f0), + If(f0), + Index(f0), + Infer(f0), + Let(f0), + Lit(f0), + Loop(f0), + Macro(f0), + Match(f0), + MethodCall(f0), + Paren(f0), + Path(f0), + Range(f0), + RawAddr(f0), + Reference(f0), + Repeat(f0), + Return(f0), + Struct(f0), + Try(f0), + TryBlock(f0), + Tuple(f0), + Unary(f0), + Unsafe(f0), + Verbatim(f0), + While(f0), + Yield(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::FieldMutability { + None, + #[no_op] + _, + } +} + +process_enum! { + syn::Fields { + Named(f0), + Unnamed(f0), + Unit, + } +} + +process_enum! { + syn::FnArg { + Receiver(f0), + Typed(f0), + } +} + +process_enum! { + syn::ForeignItem { + Fn(f0), + Static(f0), + Type(f0), + Macro(f0), + Verbatim(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::GenericArgument { + Lifetime(f0), + Type(f0), + Const(f0), + AssocType(f0), + AssocConst(f0), + Constraint(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::GenericParam { + Lifetime(f0), + Type(f0), + Const(f0), + } +} + +process_enum! { + syn::ImplItem { + Const(f0), + Fn(f0), + Type(f0), + Macro(f0), + Verbatim(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::ImplRestriction { + #[no_op] + _, + } +} + +process_enum! { + syn::Lit { + Str(f0), + ByteStr(f0), + CStr(f0), + Byte(f0), + Char(f0), + Int(f0), + Float(f0), + Bool(f0), + Verbatim(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::MacroDelimiter { + Paren(f0), + Brace(f0), + Bracket(f0), + } +} + +process_enum! { + syn::Member { + Named(f0), + Unnamed(f0), + } +} + +process_enum! { + syn::Meta { + Path(f0), + List(f0), + NameValue(f0), + } +} + +process_enum! { + syn::Pat { + Const(f0), + Ident(f0), + Lit(f0), + Macro(f0), + Or(f0), + Paren(f0), + Path(f0), + Range(f0), + Reference(f0), + Rest(f0), + Slice(f0), + Struct(f0), + Tuple(f0), + TupleStruct(f0), + Type(f0), + Verbatim(f0), + Wild(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::PathArguments { + None, + AngleBracketed(f0), + Parenthesized(f0), + } +} + +process_enum! { + syn::PointerMutability { + Const(f0), + Mut(f0), + } +} + +process_enum! { + syn::RangeLimits { + HalfOpen(f0), + Closed(f0), + } +} + +process_enum! { + syn::ReturnType { + Default, + Type(f0, f1), + } +} + +process_enum! { + syn::StaticMutability { + Mut(f0), + None, + #[no_op] + _, + } +} + +process_enum! { + syn::Stmt { + Local(f0), + Item(f0), + Expr(f0, f1), + Macro(f0), + } +} + +process_enum! { + syn::TraitBoundModifier { + None, + Maybe(f0), + } +} + +process_enum! { + syn::TraitItem { + Const(f0), + Fn(f0), + Type(f0), + Macro(f0), + Verbatim(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::Type { + Array(f0), + BareFn(f0), + Group(f0), + ImplTrait(f0), + Infer(f0), + Macro(f0), + Never(f0), + Paren(f0), + Path(f0), + Ptr(f0), + Reference(f0), + Slice(f0), + TraitObject(f0), + Tuple(f0), + Verbatim(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::TypeParamBound { + Trait(f0), + Lifetime(f0), + PreciseCapture(f0), + Verbatim(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::UnOp { + Deref(f0), + Not(f0), + Neg(f0), + #[no_op] + _, + } +} + +process_enum! { + syn::UseTree { + Path(f0), + Name(f0), + Rename(f0), + Glob(f0), + Group(f0), + } +} + +process_enum! { + syn::Visibility { + Public(f0), + Restricted(f0), + Inherited, + } +} + +process_enum! { + syn::WherePredicate { + Lifetime(f0), + Type(f0), + #[no_op] + _, + } +} + +struct TopItem(syn::Item); + +impl Process

for TopItem { + fn process(self, state: &mut State

) -> Option> { + match self.0 { + syn::Item::Const(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Enum(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::ExternCrate(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Fn(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::ForeignMod(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Impl(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Macro(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Mod(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Static(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Struct(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Trait(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::TraitAlias(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Type(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Union(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + syn::Item::Use(item) => Some(item.process(state)?.map(Into::into).map(TopItem)), + _ => Some(Output::new(self)), + } + } +} + +pub(crate) fn process_cfgs(item: syn::Item, cfgs: Cfgs) -> syn::Result> { + let mut state = State:: { + cfgs, + errors: Errors::new(), + _phantom: PhantomData, + }; + let retval = TopItem(item).process(&mut state).map(|v| v.0.0); + state.errors.finish()?; + Ok(retval) +} + +pub(crate) fn collect_cfgs(item: syn::Item) -> syn::Result> { + let mut state = State:: { + cfgs: Cfgs::default(), + errors: Errors::new(), + _phantom: PhantomData, + }; + let (None | Some(Output(()))) = TopItem(item).process(&mut state); + state.errors.finish()?; + Ok(state.cfgs) +} diff --git a/crates/fayalite-visit-gen/src/lib.rs b/crates/fayalite-visit-gen/src/lib.rs index 008a4c6..81e4577 100644 --- a/crates/fayalite-visit-gen/src/lib.rs +++ b/crates/fayalite-visit-gen/src/lib.rs @@ -1,7 +1,7 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information use proc_macro2::{Span, TokenStream}; -use quote::{format_ident, quote, ToTokens}; +use quote::{ToTokens, format_ident, quote}; use std::{collections::BTreeMap, fs}; use syn::{fold::Fold, parse_quote}; diff --git a/crates/fayalite/Cargo.toml b/crates/fayalite/Cargo.toml index 5724a80..fdf1c87 100644 --- a/crates/fayalite/Cargo.toml +++ b/crates/fayalite/Cargo.toml @@ -14,9 +14,11 @@ rust-version.workspace = true version.workspace = true [dependencies] +base64.workspace = true bitvec.workspace = true blake3.workspace = true clap.workspace = true +clap_complete.workspace = true ctor.workspace = true eyre.workspace = true fayalite-proc-macros.workspace = true @@ -24,20 +26,24 @@ hashbrown.workspace = true jobslot.workspace = true num-bigint.workspace = true num-traits.workspace = true -os_pipe.workspace = true +ordered-float.workspace = true +petgraph.workspace = true serde_json.workspace = true serde.workspace = true tempfile.workspace = true +vec_map.workspace = true which.workspace = true [dev-dependencies] trybuild.workspace = true +serde = { workspace = true, features = ["rc"] } [build-dependencies] fayalite-visit-gen.workspace = true [features] unstable-doc = [] +unstable-test-hasher = [] [package.metadata.docs.rs] features = ["unstable-doc"] diff --git a/crates/fayalite/build.rs b/crates/fayalite/build.rs index 24d8f31..c6680d5 100644 --- a/crates/fayalite/build.rs +++ b/crates/fayalite/build.rs @@ -5,6 +5,9 @@ use std::{env, fs, path::Path}; fn main() { println!("cargo::rustc-check-cfg=cfg(todo)"); + println!("cargo::rustc-check-cfg=cfg(cfg_false_for_tests)"); + println!("cargo::rustc-check-cfg=cfg(cfg_true_for_tests)"); + println!("cargo::rustc-cfg=cfg_true_for_tests"); let path = "visit_types.json"; println!("cargo::rerun-if-changed={path}"); println!("cargo::rerun-if-changed=build.rs"); diff --git a/crates/fayalite/examples/blinky.rs b/crates/fayalite/examples/blinky.rs index 87b77c1..75799fd 100644 --- a/crates/fayalite/examples/blinky.rs +++ b/crates/fayalite/examples/blinky.rs @@ -1,47 +1,64 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information -use clap::Parser; -use fayalite::{cli, prelude::*}; +use fayalite::prelude::*; #[hdl_module] -fn blinky(clock_frequency: u64) { - #[hdl] - let clk: Clock = m.input(); - #[hdl] - let rst: SyncReset = m.input(); +fn blinky(platform_io_builder: PlatformIOBuilder<'_>) { + let clk_input = + platform_io_builder.peripherals_with_type::()[0].use_peripheral(); + let rst = platform_io_builder.peripherals_with_type::()[0].use_peripheral(); let cd = #[hdl] ClockDomain { - clk, - rst: rst.to_reset(), + clk: clk_input.clk, + rst, }; - let max_value = clock_frequency / 2 - 1; + let max_value = (Expr::ty(clk_input).frequency() / 2.0).round_ties_even() as u64 - 1; let int_ty = UInt::range_inclusive(0..=max_value); #[hdl] let counter_reg: UInt = reg_builder().clock_domain(cd).reset(0u8.cast_to(int_ty)); #[hdl] let output_reg: Bool = reg_builder().clock_domain(cd).reset(false); #[hdl] + let rgb_output_reg = reg_builder().clock_domain(cd).reset( + #[hdl] + peripherals::RgbLed { + r: false, + g: false, + b: false, + }, + ); + #[hdl] if counter_reg.cmp_eq(max_value) { connect_any(counter_reg, 0u8); connect(output_reg, !output_reg); + connect(rgb_output_reg.r, !rgb_output_reg.r); + #[hdl] + if rgb_output_reg.r { + connect(rgb_output_reg.g, !rgb_output_reg.g); + #[hdl] + if rgb_output_reg.g { + connect(rgb_output_reg.b, !rgb_output_reg.b); + } + } } else { connect_any(counter_reg, counter_reg + 1_hdl_u1); } + for led in platform_io_builder.peripherals_with_type::() { + if let Ok(led) = led.try_use_peripheral() { + connect(led.on, output_reg); + } + } + for rgb_led in platform_io_builder.peripherals_with_type::() { + if let Ok(rgb_led) = rgb_led.try_use_peripheral() { + connect(rgb_led, rgb_output_reg); + } + } #[hdl] - let led: Bool = m.output(); - connect(led, output_reg); + let io = m.add_platform_io(platform_io_builder); } -#[derive(Parser)] -struct Cli { - /// clock frequency in hertz - #[arg(long, default_value = "1000000", value_parser = clap::value_parser!(u64).range(2..))] - clock_frequency: u64, - #[command(subcommand)] - cli: cli::Cli, -} - -fn main() -> cli::Result { - let cli = Cli::parse(); - cli.cli.run(blinky(cli.clock_frequency)) +fn main() { + ::main("blinky", |_, platform, _| { + Ok(JobParams::new(platform.wrap_main_module(blinky))) + }); } diff --git a/crates/fayalite/examples/tx_only_uart.rs b/crates/fayalite/examples/tx_only_uart.rs new file mode 100644 index 0000000..5c20b39 --- /dev/null +++ b/crates/fayalite/examples/tx_only_uart.rs @@ -0,0 +1,188 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information +use clap::builder::TypedValueParser; +use fayalite::{ + build::{ToArgs, WriteArgs}, + platform::PeripheralRef, + prelude::*, +}; +use ordered_float::NotNan; + +fn pick_clock<'a>( + platform_io_builder: &PlatformIOBuilder<'a>, +) -> PeripheralRef<'a, peripherals::ClockInput> { + let mut clks = platform_io_builder.peripherals_with_type::(); + clks.sort_by_key(|clk| { + // sort clocks by preference, smaller return values means higher preference + let mut frequency = clk.ty().frequency(); + let priority; + if frequency < 10e6 { + frequency = -frequency; // prefer bigger frequencies + priority = 1; + } else if frequency > 50e6 { + // prefer smaller frequencies + priority = 2; // least preferred + } else { + priority = 0; // most preferred + frequency = (frequency - 25e6).abs(); // prefer closer to 25MHz + } + (priority, NotNan::new(frequency).expect("should be valid")) + }); + clks[0] +} + +#[hdl_module] +fn tx_only_uart( + platform_io_builder: PlatformIOBuilder<'_>, + divisor: f64, + message: impl AsRef<[u8]>, +) { + let message = message.as_ref(); + let clk_input = pick_clock(&platform_io_builder).use_peripheral(); + let rst = platform_io_builder.peripherals_with_type::()[0].use_peripheral(); + let cd = #[hdl] + ClockDomain { + clk: clk_input.clk, + rst, + }; + let numerator = 1u128 << 16; + let denominator = (divisor * numerator as f64).round() as u128; + + #[hdl] + let remainder_reg: UInt<128> = reg_builder().clock_domain(cd).reset(0u128); + + #[hdl] + let sum: UInt<128> = wire(); + connect_any(sum, remainder_reg + numerator); + + #[hdl] + let tick_reg = reg_builder().clock_domain(cd).reset(false); + connect(tick_reg, false); + + #[hdl] + let next_remainder: UInt<128> = wire(); + connect(remainder_reg, next_remainder); + + #[hdl] + if sum.cmp_ge(denominator) { + connect_any(next_remainder, sum - denominator); + connect(tick_reg, true); + } else { + connect(next_remainder, sum); + } + + #[hdl] + let uart_state_reg = reg_builder().clock_domain(cd).reset(0_hdl_u4); + #[hdl] + let next_uart_state: UInt<4> = wire(); + + connect_any(next_uart_state, uart_state_reg + 1u8); + + #[hdl] + let message_mem: Array> = wire(Array[UInt::new_static()][message.len()]); + for (message, message_mem) in message.iter().zip(message_mem) { + connect(message_mem, *message); + } + #[hdl] + let addr_reg: UInt<32> = reg_builder().clock_domain(cd).reset(0u32); + #[hdl] + let next_addr: UInt<32> = wire(); + connect(next_addr, addr_reg); + + #[hdl] + let tx = reg_builder().clock_domain(cd).reset(true); + + #[hdl] + let tx_bits: Array = wire(); + + connect(tx_bits[0], false); // start bit + connect(tx_bits[9], true); // stop bit + + for i in 0..8 { + connect(tx_bits[i + 1], message_mem[addr_reg][i]); // data bits + } + + connect(tx, tx_bits[uart_state_reg]); + + #[hdl] + if uart_state_reg.cmp_eq(Expr::ty(tx_bits).len() - 1) { + connect(next_uart_state, 0_hdl_u4); + let next_addr_val = addr_reg + 1u8; + #[hdl] + if next_addr_val.cmp_lt(message.len()) { + connect_any(next_addr, next_addr_val); + } else { + connect(next_addr, 0u32); + } + } + + #[hdl] + if tick_reg { + connect(uart_state_reg, next_uart_state); + connect(addr_reg, next_addr); + } + + for uart in platform_io_builder.peripherals_with_type::() { + connect(uart.use_peripheral().tx, tx); + } + + #[hdl] + let io = m.add_platform_io(platform_io_builder); +} + +fn parse_baud_rate( + v: impl AsRef, +) -> Result, Box> { + let retval: NotNan = v + .as_ref() + .parse() + .map_err(|_| "invalid baud rate, must be a finite positive floating-point value")?; + if *retval > 0.0 && retval.is_finite() { + Ok(retval) + } else { + Err("baud rate must be finite and positive".into()) + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)] +pub struct ExtraArgs { + #[arg(long, value_parser = clap::builder::StringValueParser::new().try_map(parse_baud_rate), default_value = "115200")] + pub baud_rate: NotNan, + #[arg(long, default_value = "Hello World from Fayalite!!!\r\n", value_parser = clap::builder::NonEmptyStringValueParser::new())] + pub message: String, +} + +impl ToArgs for ExtraArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { baud_rate, message } = self; + args.write_display_arg(format_args!("--baud-rate={baud_rate}")); + args.write_long_option_eq("message", message); + } +} + +fn main() { + type Cli = BuildCli; + Cli::main( + "tx_only_uart", + |_, platform, ExtraArgs { baud_rate, message }| { + Ok(JobParams::new(platform.try_wrap_main_module(|io| { + let clk = pick_clock(&io).ty(); + let divisor = clk.frequency() / *baud_rate; + let baud_rate_error = |msg| { + ::command() + .error(clap::error::ErrorKind::ValueValidation, msg) + }; + const HUGE_DIVISOR: f64 = u64::MAX as f64; + match divisor { + divisor if !divisor.is_finite() => { + return Err(baud_rate_error("bad baud rate")); + } + HUGE_DIVISOR.. => return Err(baud_rate_error("baud rate is too small")), + 4.0.. => {} + _ => return Err(baud_rate_error("baud rate is too large")), + } + Ok(tx_only_uart(io, divisor, message)) + })?)) + }, + ); +} diff --git a/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements.rs b/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements.rs index 61d29b5..229871b 100644 --- a/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements.rs +++ b/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements.rs @@ -2,6 +2,7 @@ // See Notices.txt for copyright information //! ## `#[hdl] let` statements +pub mod destructuring; pub mod inputs_outputs; pub mod instances; pub mod memories; diff --git a/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/destructuring.rs b/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/destructuring.rs new file mode 100644 index 0000000..1fc4705 --- /dev/null +++ b/crates/fayalite/src/_docs/modules/module_bodies/hdl_let_statements/destructuring.rs @@ -0,0 +1,33 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information +//! ### Destructuring Let +//! +//! You can use `#[hdl] let` to destructure types, similarly to Rust `let` statements with non-trivial patterns. +//! +//! `#[hdl] let` statements can only match one level of struct/tuple pattern for now, +//! e.g. you can match with the pattern `MyStruct { a, b }`, but not `MyStruct { a, b: Struct2 { v } }`. +//! +//! ``` +//! # use fayalite::prelude::*; +//! #[hdl] +//! struct MyStruct { +//! a: UInt<8>, +//! b: Bool, +//! } +//! +//! #[hdl_module] +//! fn my_module() { +//! #[hdl] +//! let my_input: MyStruct = m.input(); +//! #[hdl] +//! let my_output: UInt<8> = m.input(); +//! #[hdl] +//! let MyStruct { a, b } = my_input; +//! #[hdl] +//! if b { +//! connect(my_output, a); +//! } else { +//! connect(my_output, 0_hdl_u8); +//! } +//! } +//! ``` diff --git a/crates/fayalite/src/_docs/modules/module_bodies/hdl_match_statements.rs b/crates/fayalite/src/_docs/modules/module_bodies/hdl_match_statements.rs index 9e6c511..6df70f1 100644 --- a/crates/fayalite/src/_docs/modules/module_bodies/hdl_match_statements.rs +++ b/crates/fayalite/src/_docs/modules/module_bodies/hdl_match_statements.rs @@ -7,5 +7,5 @@ //! //! `#[hdl] match` statements' bodies must evaluate to type `()` for now. //! -//! `#[hdl] match` statements can only match one level of struct/enum pattern for now, +//! `#[hdl] match` statements can only match one level of struct/tuple/enum pattern for now, //! e.g. you can match with the pattern `HdlSome(v)`, but not `HdlSome(HdlSome(_))`. diff --git a/crates/fayalite/src/annotations.rs b/crates/fayalite/src/annotations.rs index 8eff4a0..4ca84dd 100644 --- a/crates/fayalite/src/annotations.rs +++ b/crates/fayalite/src/annotations.rs @@ -12,7 +12,7 @@ use std::{ ops::Deref, }; -#[derive(Clone)] +#[derive(Clone, Debug)] struct CustomFirrtlAnnotationFieldsImpl { value: serde_json::Map, serialized: Interned, @@ -145,52 +145,73 @@ pub struct DocStringAnnotation { macro_rules! make_annotation_enum { ( + #[$non_exhaustive:ident] $(#[$meta:meta])* - $vis:vis enum $Annotation:ident { - $($Variant:ident($T:ident),)* + $vis:vis enum $AnnotationEnum:ident { + $($Variant:ident($T:ty),)* } ) => { + crate::annotations::make_annotation_enum!(@require_non_exhaustive $non_exhaustive); + + #[$non_exhaustive] $(#[$meta])* - $vis enum $Annotation { + #[derive(Clone, PartialEq, Eq, Hash)] + $vis enum $AnnotationEnum { $($Variant($T),)* } - $(impl IntoAnnotations for $T { - type IntoAnnotations = [$Annotation; 1]; - - fn into_annotations(self) -> Self::IntoAnnotations { - [$Annotation::$Variant(self)] + impl std::fmt::Debug for $AnnotationEnum { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + $(Self::$Variant(v) => v.fmt(f),)* + } } } - impl IntoAnnotations for &'_ $T { - type IntoAnnotations = [$Annotation; 1]; - - fn into_annotations(self) -> Self::IntoAnnotations { - [$Annotation::$Variant(*self)] + $(impl From<$T> for crate::annotations::Annotation { + fn from(v: $T) -> Self { + $AnnotationEnum::$Variant(v).into() } } - impl IntoAnnotations for &'_ mut $T { - type IntoAnnotations = [$Annotation; 1]; + impl crate::annotations::IntoAnnotations for $T { + type IntoAnnotations = [crate::annotations::Annotation; 1]; fn into_annotations(self) -> Self::IntoAnnotations { - [$Annotation::$Variant(*self)] + [self.into()] } } - impl IntoAnnotations for Box<$T> { - type IntoAnnotations = [$Annotation; 1]; + impl crate::annotations::IntoAnnotations for &'_ $T { + type IntoAnnotations = [crate::annotations::Annotation; 1]; fn into_annotations(self) -> Self::IntoAnnotations { - [$Annotation::$Variant(*self)] + [crate::annotations::Annotation::from(self.clone())] + } + } + + impl crate::annotations::IntoAnnotations for &'_ mut $T { + type IntoAnnotations = [crate::annotations::Annotation; 1]; + + fn into_annotations(self) -> Self::IntoAnnotations { + [crate::annotations::Annotation::from(self.clone())] + } + } + + impl crate::annotations::IntoAnnotations for Box<$T> { + type IntoAnnotations = [crate::annotations::Annotation; 1]; + + fn into_annotations(self) -> Self::IntoAnnotations { + [crate::annotations::Annotation::from(*self)] } })* }; + (@require_non_exhaustive non_exhaustive) => {}; } +pub(crate) use make_annotation_enum; + make_annotation_enum! { - #[derive(Clone, PartialEq, Eq, Hash, Debug)] #[non_exhaustive] pub enum Annotation { DontTouch(DontTouchAnnotation), @@ -199,6 +220,7 @@ make_annotation_enum! { BlackBoxPath(BlackBoxPathAnnotation), DocString(DocStringAnnotation), CustomFirrtl(CustomFirrtlAnnotation), + Xilinx(crate::vendor::xilinx::XilinxAnnotation), } } @@ -314,10 +336,8 @@ impl> Iterator for IterIntoAnnotations { } impl< - T: FusedIterator< - Item: IntoAnnotations>, - >, - > FusedIterator for IterIntoAnnotations + T: FusedIterator>>, +> FusedIterator for IterIntoAnnotations { } diff --git a/crates/fayalite/src/array.rs b/crates/fayalite/src/array.rs index f617f91..569f2e2 100644 --- a/crates/fayalite/src/array.rs +++ b/crates/fayalite/src/array.rs @@ -2,17 +2,24 @@ // See Notices.txt for copyright information use crate::{ - expr::{ops::ArrayIndex, Expr, ToExpr}, - int::{DynSize, KnownSize, Size, SizeType, DYN_SIZE}, + expr::{ + CastToBits, Expr, HdlPartialEq, ReduceBits, ToExpr, + ops::{ArrayLiteral, ExprFromIterator, ExprIntoIterator, ExprPartialEq}, + }, + int::{Bool, DYN_SIZE, DynSize, KnownSize, Size, SizeType}, intern::{Intern, Interned, LazyInterned}, module::transform::visit::{Fold, Folder, Visit, Visitor}, + sim::value::{SimValue, SimValuePartialEq}, source_location::SourceLocation, ty::{ - CanonicalType, MatchVariantWithoutScope, StaticType, Type, TypeProperties, TypeWithDeref, + CanonicalType, MatchVariantWithoutScope, OpaqueSimValueSlice, OpaqueSimValueWriter, + OpaqueSimValueWritten, StaticType, Type, TypeProperties, TypeWithDeref, + serde_impls::SerdeCanonicalType, }, util::ConstUsize, }; -use std::ops::Index; +use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error}; +use std::{iter::FusedIterator, ops::Index}; #[derive(Copy, Clone, PartialEq, Eq, Hash)] pub struct ArrayType { @@ -41,15 +48,20 @@ impl ArrayType { is_storable, is_castable_from_bits, bit_width, + sim_only_values_len, } = element; let Some(bit_width) = bit_width.checked_mul(len) else { panic!("array too big"); }; + let Some(sim_only_values_len) = sim_only_values_len.checked_mul(len) else { + panic!("array too big"); + }; TypeProperties { is_passive, is_storable, is_castable_from_bits, bit_width, + sim_only_values_len, } } pub fn new(element: T, len: Len::SizeType) -> Self { @@ -91,6 +103,12 @@ impl> ArrayType { } } +impl Default for ArrayType { + fn default() -> Self { + Self::TYPE + } +} + impl StaticType for ArrayType { const TYPE: Self = Self { element: LazyInterned::new_lazy(&|| T::TYPE.intern_sized()), @@ -139,6 +157,7 @@ impl, Len: Size, State: Visitor + ?Sized> Visit impl Type for ArrayType { type BaseType = Array; type MaskType = ArrayType; + type SimValue = Len::ArraySimValue; type MatchVariant = Len::ArrayMatch; type MatchActiveScope = (); type MatchVariantAndInactiveScope = MatchVariantWithoutScope>; @@ -148,10 +167,8 @@ impl Type for ArrayType { this: Expr, source_location: SourceLocation, ) -> Self::MatchVariantsIter { - let base = Expr::as_dyn_array(this); - let base_ty = Expr::ty(base); let _ = source_location; - let retval = Vec::from_iter((0..base_ty.len()).map(|i| ArrayIndex::new(base, i).to_expr())); + let retval = Vec::from_iter(this); std::iter::once(MatchVariantWithoutScope( Len::ArrayMatch::::try_from(retval) .ok() @@ -177,16 +194,106 @@ impl Type for ArrayType { Len::from_usize(array.len()), ) } + fn source_location() -> SourceLocation { SourceLocation::builtin() } + + fn sim_value_from_opaque(&self, mut opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue { + let element_ty = self.element(); + let element_size = element_ty.canonical().size(); + let mut value = Vec::with_capacity(self.len()); + for _ in 0..self.len() { + let (element_opaque, rest) = opaque.split_at(element_size); + value.push(SimValue::from_opaque(element_ty, element_opaque.to_owned())); + opaque = rest; + } + value.try_into().ok().expect("used correct length") + } + + fn sim_value_clone_from_opaque( + &self, + value: &mut Self::SimValue, + mut opaque: OpaqueSimValueSlice<'_>, + ) { + let element_ty = self.element(); + let element_size = element_ty.canonical().size(); + let value = AsMut::<[SimValue]>::as_mut(value); + assert_eq!(self.len(), value.len()); + for element_value in value { + assert_eq!(SimValue::ty(element_value), element_ty); + let (element_opaque, rest) = opaque.split_at(element_size); + SimValue::opaque_mut(element_value).clone_from_slice(element_opaque); + opaque = rest; + } + } + + fn sim_value_to_opaque<'w>( + &self, + value: &Self::SimValue, + mut writer: OpaqueSimValueWriter<'w>, + ) -> OpaqueSimValueWritten<'w> { + let element_ty = self.element(); + let element_size = element_ty.canonical().size(); + let value = AsRef::<[SimValue]>::as_ref(value); + assert_eq!(self.len(), value.len()); + for element_value in value { + assert_eq!(SimValue::ty(element_value), element_ty); + writer.fill_prefix_with(element_size, |writer| { + writer.fill_cloned_from_slice(SimValue::opaque(element_value).as_slice()) + }); + } + writer.fill_cloned_from_slice(OpaqueSimValueSlice::empty()) + } +} + +impl Serialize for ArrayType { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + SerdeCanonicalType::::Array { + element: self.element(), + len: self.len(), + } + .serialize(serializer) + } +} + +impl<'de, T: Type + Deserialize<'de>, Len: Size> Deserialize<'de> for ArrayType { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let name = |len| -> String { + if let Some(len) = len { + format!("an Array<_, {len}>") + } else { + "an Array<_>".to_string() + } + }; + match SerdeCanonicalType::::deserialize(deserializer)? { + SerdeCanonicalType::Array { element, len } => { + if let Some(len) = Len::try_from_usize(len) { + Ok(Self::new(element, len)) + } else { + Err(Error::invalid_value( + serde::de::Unexpected::Other(&name(Some(len))), + &&*name(Len::KNOWN_VALUE), + )) + } + } + ty => Err(Error::invalid_value( + serde::de::Unexpected::Other(ty.as_serde_unexpected_str()), + &&*name(Len::KNOWN_VALUE), + )), + } + } } impl TypeWithDeref for ArrayType { fn expr_deref(this: &Expr) -> &Self::MatchVariant { - let base = Expr::as_dyn_array(*this); - let base_ty = Expr::ty(base); - let retval = Vec::from_iter((0..base_ty.len()).map(|i| ArrayIndex::new(base, i).to_expr())); + let retval = Vec::from_iter(*this); Interned::into_inner(Intern::intern_sized( Len::ArrayMatch::::try_from(retval) .ok() @@ -218,3 +325,143 @@ impl Index for ArrayWithoutLen { Interned::into_inner(Intern::intern_sized(ArrayType::new(self.element, len))) } } + +impl ExprPartialEq> for ArrayType +where + Lhs: ExprPartialEq, +{ + fn cmp_eq(lhs: Expr, rhs: Expr>) -> Expr { + let lhs_ty = Expr::ty(lhs); + let rhs_ty = Expr::ty(rhs); + assert_eq!(lhs_ty.len(), rhs_ty.len()); + lhs.into_iter() + .zip(rhs) + .map(|(l, r)| l.cmp_eq(r)) + .collect::>>() + .cast_to_bits() + .all_one_bits() + } + + fn cmp_ne(lhs: Expr, rhs: Expr>) -> Expr { + let lhs_ty = Expr::ty(lhs); + let rhs_ty = Expr::ty(rhs); + assert_eq!(lhs_ty.len(), rhs_ty.len()); + lhs.into_iter() + .zip(rhs) + .map(|(l, r)| l.cmp_ne(r)) + .collect::>>() + .cast_to_bits() + .any_one_bits() + } +} + +impl SimValuePartialEq> for ArrayType +where + Lhs: SimValuePartialEq, +{ + fn sim_value_eq(this: &SimValue, other: &SimValue>) -> bool { + AsRef::<[_]>::as_ref(&**this) + .iter() + .zip(AsRef::<[_]>::as_ref(&**other)) + .all(|(l, r)| SimValuePartialEq::sim_value_eq(l, r)) + } +} + +impl ExprIntoIterator for ArrayType { + type Item = T; + type ExprIntoIter = ExprArrayIter; + + fn expr_into_iter(e: Expr) -> Self::ExprIntoIter { + ExprArrayIter { + base: e, + indexes: 0..Expr::ty(e).len(), + } + } +} + +#[derive(Clone, Debug)] +pub struct ExprArrayIter { + base: Expr>, + indexes: std::ops::Range, +} + +impl ExprArrayIter { + pub fn base(&self) -> Expr> { + self.base + } + pub fn indexes(&self) -> std::ops::Range { + self.indexes.clone() + } +} + +impl Iterator for ExprArrayIter { + type Item = Expr; + + fn next(&mut self) -> Option { + self.indexes.next().map(|i| self.base[i]) + } + + fn size_hint(&self) -> (usize, Option) { + self.indexes.size_hint() + } + + fn count(self) -> usize { + self.indexes.count() + } + + fn last(mut self) -> Option { + self.next_back() + } + + fn nth(&mut self, n: usize) -> Option { + self.indexes.nth(n).map(|i| self.base[i]) + } + + fn fold(self, init: B, mut f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.indexes.fold(init, |b, i| f(b, self.base[i])) + } +} + +impl DoubleEndedIterator for ExprArrayIter { + fn next_back(&mut self) -> Option { + self.indexes.next_back().map(|i| self.base[i]) + } + + fn nth_back(&mut self, n: usize) -> Option { + self.indexes.nth_back(n).map(|i| self.base[i]) + } + + fn rfold(self, init: B, mut f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.indexes.rfold(init, |b, i| f(b, self.base[i])) + } +} + +impl ExactSizeIterator for ExprArrayIter { + fn len(&self) -> usize { + self.indexes.len() + } +} + +impl FusedIterator for ExprArrayIter {} + +impl ExprFromIterator> for Array { + fn expr_from_iter>>(iter: T) -> Expr { + ArrayLiteral::new( + A::TYPE, + iter.into_iter().map(|v| Expr::canonical(v)).collect(), + ) + .to_expr() + } +} + +impl<'a, A: StaticType> ExprFromIterator<&'a Expr> for Array { + fn expr_from_iter>>(iter: T) -> Expr { + iter.into_iter().copied().collect() + } +} diff --git a/crates/fayalite/src/build.rs b/crates/fayalite/src/build.rs new file mode 100644 index 0000000..a9e9635 --- /dev/null +++ b/crates/fayalite/src/build.rs @@ -0,0 +1,2803 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::graph::JobGraph, + bundle::{Bundle, BundleType}, + intern::{Intern, InternSlice, Interned}, + module::Module, + platform::{DynPlatform, Platform}, + util::{job_server::AcquiredJob, os_str_strip_prefix}, + vendor, +}; +use clap::ArgAction; +use serde::{ + Deserialize, Deserializer, Serialize, Serializer, + de::{DeserializeOwned, Error as _}, + ser::Error as _, +}; +use std::{ + any::{Any, TypeId}, + borrow::Cow, + cmp::Ordering, + ffi::{OsStr, OsString}, + fmt, + hash::{Hash, Hasher}, + io::Write, + marker::PhantomData, + path::{Path, PathBuf}, + sync::{Arc, OnceLock}, +}; +use tempfile::TempDir; + +pub mod external; +pub mod firrtl; +pub mod formal; +pub mod graph; +pub mod registry; +pub mod verilog; + +pub(crate) fn built_in_job_kinds() -> impl IntoIterator { + [DynJobKind::new(BaseJobKind)] + .into_iter() + .chain(firrtl::built_in_job_kinds()) + .chain(formal::built_in_job_kinds()) + .chain(vendor::built_in_job_kinds()) + .chain(verilog::built_in_job_kinds()) +} + +#[derive(Clone, Hash, PartialEq, Eq, Debug)] +#[non_exhaustive] +pub enum JobItem { + Path { + path: Interned, + }, + DynamicPaths { + paths: Vec>, + source_job_name: Interned, + }, +} + +impl JobItem { + pub fn name(&self) -> JobItemName { + match self { + &JobItem::Path { path } => JobItemName::Path { path }, + &JobItem::DynamicPaths { + paths: _, + source_job_name, + } => JobItemName::DynamicPaths { source_job_name }, + } + } +} + +#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug, Serialize, Deserialize)] +#[non_exhaustive] +pub enum JobItemName { + Path { path: Interned }, + DynamicPaths { source_job_name: Interned }, +} + +impl JobItemName { + fn as_ref(&self) -> JobItemNameRef<'_> { + match self { + JobItemName::Path { path } => JobItemNameRef::Path { path }, + JobItemName::DynamicPaths { source_job_name } => { + JobItemNameRef::DynamicPaths { source_job_name } + } + } + } +} + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] +enum JobItemNameRef<'a> { + Path { path: &'a Path }, + DynamicPaths { source_job_name: &'a str }, +} + +/// ordered by string contents, not by `Interned` +impl PartialOrd for JobItemName { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +/// ordered by string contents, not by `Interned` +impl Ord for JobItemName { + fn cmp(&self, other: &Self) -> Ordering { + if self == other { + Ordering::Equal + } else { + self.as_ref().cmp(&other.as_ref()) + } + } +} + +pub trait WriteArgs: + for<'a> Extend<&'a str> + + for<'a> Extend<&'a OsStr> + + for<'a> Extend<&'a Path> + + for<'a> Extend> + + for<'a> Extend> + + for<'a> Extend> + + Extend + + Extend + + Extend + + Extend> + + Extend> + + Extend> +{ + fn write_display_args(&mut self, args: impl IntoIterator) { + self.extend(args.into_iter().map(|v| v.to_string())); + } + fn write_owned_args(&mut self, args: impl IntoIterator>) { + self.extend(args.into_iter().map(Into::::into)) + } + fn write_args<'a>(&mut self, args: impl IntoIterator>); + fn write_interned_args(&mut self, args: impl IntoIterator>>) { + self.extend(args.into_iter().map(Into::>::into)) + } + fn write_display_arg(&mut self, arg: impl fmt::Display) { + self.write_display_args([arg]); + } + fn write_owned_arg(&mut self, arg: impl Into) { + self.extend([arg.into()]); + } + fn write_arg(&mut self, arg: impl AsRef) { + self.extend([arg.as_ref()]); + } + /// writes `--{name}={value}` + fn write_long_option_eq(&mut self, name: impl AsRef, value: impl AsRef) { + let name = name.as_ref(); + let value = value.as_ref(); + let mut option = + OsString::with_capacity(name.len().saturating_add(value.len()).saturating_add(3)); + option.push("--"); + option.push(name); + option.push("="); + option.push(value); + self.write_owned_arg(option); + } + fn write_interned_arg(&mut self, arg: impl Into>) { + self.extend([arg.into()]); + } + /// finds the first option that is `--{option_name}={value}` and returns `value` + fn get_long_option_eq(&self, option_name: impl AsRef) -> Option<&OsStr>; +} + +pub trait ArgsWriterArg: + AsRef + + From> + + for<'a> From> + + for<'a> From<&'a OsStr> + + From +{ +} + +impl ArgsWriterArg for Interned {} + +impl ArgsWriterArg for OsString {} + +pub struct ArgsWriter(pub Vec); + +impl Default for ArgsWriter { + fn default() -> Self { + Self(Default::default()) + } +} + +impl ArgsWriter { + fn get_long_option_eq_helper(&self, option_name: &str) -> Option<&OsStr> { + self.0.iter().find_map(|arg| { + os_str_strip_prefix(arg.as_ref(), "--") + .and_then(|arg| os_str_strip_prefix(arg, option_name)) + .and_then(|arg| os_str_strip_prefix(arg, "=")) + }) + } +} + +impl<'a, A: ArgsWriterArg> Extend<&'a str> for ArgsWriter { + fn extend>(&mut self, iter: T) { + self.extend(iter.into_iter().map(AsRef::::as_ref)) + } +} + +impl<'a, A: ArgsWriterArg> Extend<&'a OsStr> for ArgsWriter { + fn extend>(&mut self, iter: T) { + self.0.extend(iter.into_iter().map(Into::into)) + } +} + +impl<'a, A: ArgsWriterArg> Extend<&'a Path> for ArgsWriter { + fn extend>(&mut self, iter: T) { + self.extend(iter.into_iter().map(AsRef::::as_ref)) + } +} + +impl Extend for ArgsWriter { + fn extend>(&mut self, iter: T) { + self.extend(iter.into_iter().map(OsString::from)) + } +} + +impl Extend for ArgsWriter { + fn extend>(&mut self, iter: T) { + self.0.extend(iter.into_iter().map(Into::into)) + } +} + +impl Extend for ArgsWriter { + fn extend>(&mut self, iter: T) { + self.extend(iter.into_iter().map(OsString::from)) + } +} + +impl Extend> for ArgsWriter { + fn extend>>(&mut self, iter: T) { + self.extend(iter.into_iter().map(Interned::::from)) + } +} + +impl Extend> for ArgsWriter { + fn extend>>(&mut self, iter: T) { + self.0.extend(iter.into_iter().map(Into::into)) + } +} + +impl Extend> for ArgsWriter { + fn extend>>(&mut self, iter: T) { + self.extend(iter.into_iter().map(Interned::::from)) + } +} + +impl<'a, A: ArgsWriterArg> Extend> for ArgsWriter { + fn extend>>(&mut self, iter: T) { + self.0.extend(iter.into_iter().map(|v| { + match v { + Cow::Borrowed(v) => Cow::::Borrowed(v.as_ref()), + Cow::Owned(v) => Cow::Owned(v.into()), + } + .into() + })) + } +} + +impl<'a, A: ArgsWriterArg> Extend> for ArgsWriter { + fn extend>>(&mut self, iter: T) { + self.0.extend(iter.into_iter().map(Into::into)) + } +} + +impl<'a, A: ArgsWriterArg> Extend> for ArgsWriter { + fn extend>>(&mut self, iter: T) { + self.0.extend(iter.into_iter().map(|v| { + match v { + Cow::Borrowed(v) => Cow::::Borrowed(v.as_ref()), + Cow::Owned(v) => Cow::Owned(v.into()), + } + .into() + })) + } +} + +impl WriteArgs for ArgsWriter { + fn write_args<'a>(&mut self, args: impl IntoIterator>) { + self.0.extend(args.into_iter().map(|v| v.as_ref().into())) + } + fn get_long_option_eq(&self, option_name: impl AsRef) -> Option<&OsStr> { + self.get_long_option_eq_helper(option_name.as_ref()) + } +} + +pub trait ToArgs: clap::Args + 'static + Send + Sync + Hash + Eq + fmt::Debug + Clone { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)); + fn to_interned_args(&self) -> Interned<[Interned]> { + Intern::intern_owned(self.to_interned_args_vec()) + } + fn to_interned_args_vec(&self) -> Vec> { + let mut retval = ArgsWriter::default(); + self.to_args(&mut retval); + retval.0 + } + fn to_os_string_args(&self) -> Vec { + let mut retval = ArgsWriter::default(); + self.to_args(&mut retval); + retval.0 + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct JobKindAndArgs { + pub kind: K, + pub args: K::Args, +} + +impl JobKindAndArgs { + pub fn args_to_jobs( + self, + dependencies: ::KindsAndArgs, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result> { + K::args_to_jobs( + JobArgsAndDependencies { + args: self, + dependencies, + }, + params, + global_params, + ) + } +} + +impl> Copy for JobKindAndArgs {} + +impl From> for DynJobArgs { + fn from(value: JobKindAndArgs) -> Self { + let JobKindAndArgs { kind, args } = value; + DynJobArgs::new(kind, args) + } +} + +impl TryFrom for JobKindAndArgs { + type Error = DynJobArgs; + fn try_from(value: DynJobArgs) -> Result { + value.downcast() + } +} + +#[derive(Debug, PartialEq, Eq, Hash)] +pub struct JobAndKind { + pub kind: K, + pub job: K::Job, +} + +impl> Clone for JobAndKind { + fn clone(&self) -> Self { + Self { + kind: self.kind.clone(), + job: self.job.clone(), + } + } +} + +impl> Copy for JobAndKind {} + +impl From> for DynJob { + fn from(value: JobAndKind) -> Self { + let JobAndKind { kind, job } = value; + DynJob::new(kind, job) + } +} + +impl> TryFrom for JobAndKind { + type Error = DynJob; + fn try_from(value: DynJob) -> Result { + value.downcast() + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub struct JobKindAndDependencies { + pub kind: K, + pub dependencies: K::Dependencies, +} + +impl Default for JobKindAndDependencies { + fn default() -> Self { + Self::new(K::default()) + } +} + +impl JobKindAndDependencies { + pub fn new(kind: K) -> Self { + Self { + kind, + dependencies: kind.dependencies(), + } + } +} + +#[derive(Debug, PartialEq, Eq, Hash)] +pub struct JobAndDependencies { + pub job: JobAndKind, + pub dependencies: ::JobsAndKinds, +} + +impl JobAndDependencies { + pub fn get_job(&self) -> &J + where + Self: GetJob, + { + GetJob::get_job(self) + } + pub fn base_job(&self) -> &BaseJob { + self.job.kind.base_job(&self.job.job, &self.dependencies) + } +} + +impl Clone for JobAndDependencies +where + K::Job: Clone, + ::JobsAndKinds: Clone, +{ + fn clone(&self) -> Self { + Self { + job: self.job.clone(), + dependencies: self.dependencies.clone(), + } + } +} + +impl Copy for JobAndDependencies +where + K::Job: Copy, + ::JobsAndKinds: Copy, +{ +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct JobArgsAndDependencies { + pub args: JobKindAndArgs, + pub dependencies: ::KindsAndArgs, +} + +impl Copy for JobArgsAndDependencies +where + K::Args: Copy, + ::KindsAndArgs: Copy, +{ +} + +impl JobArgsAndDependencies { + pub fn args_to_jobs( + self, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result> { + K::args_to_jobs(self, params, global_params) + } + pub fn base_job_args(&self) -> &BaseJobArgs { + self.args + .kind + .base_job_args(&self.args.args, &self.dependencies) + } +} + +impl>, D: JobKind> JobArgsAndDependencies { + pub fn args_to_jobs_simple( + self, + params: &JobParams, + global_params: &GlobalParams, + f: F, + ) -> eyre::Result> + where + F: FnOnce(K, K::Args, &mut JobAndDependencies) -> eyre::Result, + { + let Self { + args: JobKindAndArgs { kind, args }, + dependencies, + } = self; + let mut dependencies = dependencies.args_to_jobs(params, global_params)?; + let job = f(kind, args, &mut dependencies)?; + Ok(JobAndDependencies { + job: JobAndKind { kind, job }, + dependencies, + }) + } +} + +impl>, D: JobKind> + JobArgsAndDependencies> +{ + pub fn args_to_jobs_external_simple( + self, + params: &JobParams, + global_params: &GlobalParams, + f: F, + ) -> eyre::Result<( + C::AdditionalJobData, + ::JobsAndKinds, + )> + where + F: FnOnce( + external::ExternalCommandArgs, + &mut JobAndDependencies, + ) -> eyre::Result, + { + let Self { + args: JobKindAndArgs { kind: _, args }, + dependencies, + } = self; + let mut dependencies = dependencies.args_to_jobs(params, global_params)?; + let additional_job_data = f(args, &mut dependencies)?; + Ok((additional_job_data, dependencies)) + } +} + +pub trait JobDependencies: 'static + Send + Sync + Hash + Eq + fmt::Debug + Copy { + type KindsAndArgs: 'static + Send + Sync + Hash + Eq + fmt::Debug + Clone; + type JobsAndKinds: 'static + Send + Sync + Hash + Eq + fmt::Debug; + fn kinds_dyn_extend>(self, dyn_kinds: &mut E); + fn kinds_dyn(self) -> Vec { + let mut retval = Vec::new(); + self.kinds_dyn_extend(&mut retval); + retval + } + fn into_dyn_jobs_extend>(jobs: Self::JobsAndKinds, dyn_jobs: &mut E); + fn into_dyn_jobs(jobs: Self::JobsAndKinds) -> Vec { + let mut retval = Vec::new(); + Self::into_dyn_jobs_extend(jobs, &mut retval); + retval + } + #[track_caller] + fn from_dyn_args_prefix>( + args: &mut I, + ) -> Self::KindsAndArgs; + #[track_caller] + fn from_dyn_args>(args: I) -> Self::KindsAndArgs { + let mut iter = args.into_iter(); + let retval = Self::from_dyn_args_prefix(&mut iter); + if iter.next().is_some() { + panic!("wrong number of dependencies"); + } + retval + } +} + +pub trait JobDependenciesHasBase: JobDependencies { + fn base_job_args(args: &Self::KindsAndArgs) -> &BaseJobArgs; + fn base_job(jobs: &Self::JobsAndKinds) -> &BaseJob; + #[track_caller] + fn base_job_args_dyn(dependencies_args: &[DynJobArgs]) -> &BaseJobArgs; + #[track_caller] + fn base_job_dyn(dependencies: &[DynJob]) -> &BaseJob; +} + +impl JobDependencies for JobKindAndDependencies { + type KindsAndArgs = JobArgsAndDependencies; + type JobsAndKinds = JobAndDependencies; + + fn kinds_dyn_extend>(self, dyn_kinds: &mut E) { + let Self { kind, dependencies } = self; + dependencies.kinds_dyn_extend(dyn_kinds); + dyn_kinds.extend([DynJobKind::new(kind)]); + } + + fn into_dyn_jobs_extend>( + jobs: Self::JobsAndKinds, + dyn_jobs: &mut E, + ) { + let JobAndDependencies { job, dependencies } = jobs; + K::Dependencies::into_dyn_jobs_extend(dependencies, dyn_jobs); + dyn_jobs.extend([job.into()]); + } + + #[track_caller] + fn from_dyn_args_prefix>( + args: &mut I, + ) -> Self::KindsAndArgs { + let dependencies = K::Dependencies::from_dyn_args_prefix(args); + let Some(args) = args.next() else { + panic!("wrong number of dependencies"); + }; + match args.downcast() { + Ok(args) => JobArgsAndDependencies { args, dependencies }, + Err(args) => { + panic!( + "wrong type of dependency, expected {} got:\n{args:?}", + std::any::type_name::() + ) + } + } + } +} + +impl JobDependenciesHasBase for JobKindAndDependencies { + fn base_job_args(args: &Self::KindsAndArgs) -> &BaseJobArgs { + args.base_job_args() + } + + fn base_job(jobs: &Self::JobsAndKinds) -> &BaseJob { + jobs.base_job() + } + + #[track_caller] + fn base_job_args_dyn(dependencies_args: &[DynJobArgs]) -> &BaseJobArgs { + let [dependencies_args @ .., args] = dependencies_args else { + panic!("wrong number of dependencies"); + }; + let Some((kind, args)) = args.downcast_ref::() else { + panic!( + "wrong type of dependency, expected {} got:\n{args:?}", + std::any::type_name::() + ) + }; + kind.base_job_args_dyn(args, dependencies_args) + } + + #[track_caller] + fn base_job_dyn(dependencies: &[DynJob]) -> &BaseJob { + let [dependencies @ .., job] = dependencies else { + panic!("wrong number of dependencies"); + }; + let Some((kind, job)) = job.downcast_ref::() else { + panic!( + "wrong type of dependency, expected {} got:\n{job:?}", + std::any::type_name::() + ) + }; + kind.base_job_dyn(job, dependencies) + } +} + +macro_rules! impl_job_dependencies { + (@impl $(($v:ident: $T:ident),)*) => { + impl<$($T: JobDependencies),*> JobDependencies for ($($T,)*) { + type KindsAndArgs = ($($T::KindsAndArgs,)*); + type JobsAndKinds = ($($T::JobsAndKinds,)*); + + fn kinds_dyn_extend>(self, dyn_kinds: &mut E) { + #![allow(unused_variables)] + let ($($v,)*) = self; + $($T::kinds_dyn_extend($v, dyn_kinds);)* + } + + fn into_dyn_jobs_extend>( + jobs: Self::JobsAndKinds, + dyn_jobs: &mut E, + ) { + #![allow(unused_variables)] + let ($($v,)*) = jobs; + $($T::into_dyn_jobs_extend($v, dyn_jobs);)* + } + + #[track_caller] + fn from_dyn_args_prefix>( + args: &mut I, + ) -> Self::KindsAndArgs { + #![allow(unused_variables)] + $(let $v = $T::from_dyn_args_prefix(args);)* + ($($v,)*) + } + } + }; + ($($first:tt, $($rest:tt,)*)?) => { + impl_job_dependencies!(@impl $($first, $($rest,)*)?); + $(impl_job_dependencies!($($rest,)*);)? + }; +} + +impl_job_dependencies! { + (v0: T0), + (v1: T1), + (v2: T2), + (v3: T3), + (v4: T4), + (v5: T5), + (v6: T6), + (v7: T7), + (v8: T8), + (v9: T9), + (v10: T10), + (v11: T11), +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct JobParams { + main_module: Module, +} + +impl AsRef for JobParams { + fn as_ref(&self) -> &Self { + self + } +} + +impl JobParams { + pub fn new_canonical(main_module: Module) -> Self { + Self { main_module } + } + pub fn new(main_module: impl AsRef>) -> Self { + Self::new_canonical(main_module.as_ref().canonical()) + } + pub fn main_module(&self) -> &Module { + &self.main_module + } +} + +#[derive(Clone, Debug)] +pub struct GlobalParams { + top_level_cmd: Option, + application_name: Interned, +} + +impl AsRef for GlobalParams { + fn as_ref(&self) -> &Self { + self + } +} + +impl GlobalParams { + pub fn new(top_level_cmd: Option, application_name: impl AsRef) -> Self { + Self { + top_level_cmd, + application_name: application_name.as_ref().intern(), + } + } + pub fn top_level_cmd(&self) -> Option<&clap::Command> { + self.top_level_cmd.as_ref() + } + pub fn into_top_level_cmd(self) -> Option { + self.top_level_cmd + } + pub fn extract_clap_error(&self, e: eyre::Report) -> eyre::Result { + let e = e.downcast::()?; + Ok(match &self.top_level_cmd { + Some(cmd) => e.with_cmd(cmd), + None => e, + }) + } + pub fn exit_if_clap_error(&self, e: eyre::Report) -> eyre::Report { + match self.extract_clap_error(e) { + Ok(e) => e.exit(), + Err(e) => e, + } + } + pub fn clap_error( + &self, + kind: clap::error::ErrorKind, + message: impl fmt::Display, + ) -> clap::Error { + match self.top_level_cmd.clone() { + Some(top_level_cmd) => top_level_cmd.clone().error(kind, message), + None => clap::Error::raw(kind, message), + } + } + pub fn application_name(&self) -> Interned { + self.application_name + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct CommandParams { + pub command_line: Interned<[Interned]>, + pub current_dir: Option>, +} + +impl CommandParams { + fn to_unix_shell_line( + self, + output: &mut String, + mut escape_arg: impl FnMut(&OsStr, &mut String) -> Result<(), E>, + ) -> Result<(), E> { + let Self { + command_line, + current_dir, + } = self; + let mut end = None; + let mut separator = if let Some(current_dir) = current_dir { + output.push_str("(cd "); + end = Some(")"); + if !current_dir + .as_os_str() + .as_encoded_bytes() + .first() + .is_some_and(|ch| ch.is_ascii_alphanumeric() || matches!(ch, b'/' | b'\\' | b'.')) + { + output.push_str("-- "); + } + escape_arg(current_dir.as_ref(), output)?; + "; exec -- " + } else { + "" + }; + for arg in command_line { + output.push_str(separator); + separator = " "; + escape_arg(&arg, output)?; + } + if let Some(end) = end { + output.push_str(end); + } + Ok(()) + } +} + +pub trait JobKindHelper: 'static + Send + Sync + Hash + Eq + fmt::Debug + Copy { + fn base_job_args<'a>( + self, + args: &'a ::Args, + dependencies: &'a <::Dependencies as JobDependencies>::KindsAndArgs, + ) -> &'a BaseJobArgs + where + Self: JobKind; + fn base_job<'a>( + self, + job: &'a ::Job, + dependencies: &'a <::Dependencies as JobDependencies>::JobsAndKinds, + ) -> &'a BaseJob + where + Self: JobKind; + #[track_caller] + fn base_job_args_dyn<'a>( + self, + args: &'a ::Args, + dependencies_args: &'a [DynJobArgs], + ) -> &'a BaseJobArgs + where + Self: JobKind; + #[track_caller] + fn base_job_dyn<'a>( + self, + job: &'a ::Job, + dependencies: &'a [DynJob], + ) -> &'a BaseJob + where + Self: JobKind; +} + +impl> JobKindHelper for K { + fn base_job_args<'a>( + self, + _args: &'a ::Args, + dependencies: &'a <::Dependencies as JobDependencies>::KindsAndArgs, + ) -> &'a BaseJobArgs { + K::Dependencies::base_job_args(dependencies) + } + fn base_job<'a>( + self, + _job: &'a ::Job, + dependencies: &'a <::Dependencies as JobDependencies>::JobsAndKinds, + ) -> &'a BaseJob { + K::Dependencies::base_job(dependencies) + } + #[track_caller] + fn base_job_args_dyn<'a>( + self, + _args: &'a ::Args, + dependencies_args: &'a [DynJobArgs], + ) -> &'a BaseJobArgs { + K::Dependencies::base_job_args_dyn(dependencies_args) + } + #[track_caller] + fn base_job_dyn<'a>( + self, + _job: &'a ::Job, + dependencies: &'a [DynJob], + ) -> &'a BaseJob { + K::Dependencies::base_job_dyn(dependencies) + } +} + +pub trait JobKind: JobKindHelper { + type Args: ToArgs; + type Job: 'static + Send + Sync + Hash + Eq + fmt::Debug + Serialize + DeserializeOwned; + type Dependencies: JobDependencies; + fn dependencies(self) -> Self::Dependencies; + fn args_to_jobs( + args: JobArgsAndDependencies, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result>; + fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]>; + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]>; + fn name(self) -> Interned; + fn external_command_params(self, job: &Self::Job) -> Option; + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + params: &JobParams, + global_params: &GlobalParams, + acquired_job: &mut AcquiredJob, + ) -> eyre::Result>; + fn subcommand_hidden(self) -> bool { + false + } + fn external_program(self) -> Option> { + None + } +} + +trait DynJobKindTrait: 'static + Send + Sync + fmt::Debug { + fn as_any(&self) -> &dyn Any; + fn as_arc_any(self: Arc) -> Arc; + fn eq_dyn(&self, other: &dyn DynJobKindTrait) -> bool; + fn hash_dyn(&self, state: &mut dyn Hasher); + fn dependencies_kinds_dyn(&self) -> Vec; + fn args_group_id_dyn(&self) -> Option; + fn augment_args_dyn(&self, cmd: clap::Command) -> clap::Command; + fn augment_args_for_update_dyn(&self, cmd: clap::Command) -> clap::Command; + fn from_arg_matches_dyn( + &self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result; + fn name_dyn(&self) -> Interned; + fn subcommand_hidden_dyn(&self) -> bool; + fn deserialize_job_from_json_str(self: Arc, json: &str) -> serde_json::Result; + fn deserialize_job_from_json_value( + self: Arc, + json: &serde_json::Value, + ) -> serde_json::Result; +} + +impl DynJobKindTrait for K { + fn as_any(&self) -> &dyn Any { + self + } + + fn as_arc_any(self: Arc) -> Arc { + self + } + + fn eq_dyn(&self, other: &dyn DynJobKindTrait) -> bool { + other + .as_any() + .downcast_ref::() + .is_some_and(|other| self == other) + } + + fn hash_dyn(&self, mut state: &mut dyn Hasher) { + self.hash(&mut state); + } + + fn dependencies_kinds_dyn(&self) -> Vec { + self.dependencies().kinds_dyn() + } + + fn args_group_id_dyn(&self) -> Option { + ::group_id() + } + + fn augment_args_dyn(&self, cmd: clap::Command) -> clap::Command { + ::augment_args(cmd) + } + + fn augment_args_for_update_dyn(&self, cmd: clap::Command) -> clap::Command { + ::augment_args_for_update(cmd) + } + + fn from_arg_matches_dyn( + &self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result { + Ok(DynJobArgs::new( + *self, + ::from_arg_matches_mut(matches)?, + )) + } + + fn name_dyn(&self) -> Interned { + self.name() + } + + fn subcommand_hidden_dyn(&self) -> bool { + self.subcommand_hidden() + } + + fn deserialize_job_from_json_str(self: Arc, json: &str) -> serde_json::Result { + Ok(DynJob::from_arc(self, serde_json::from_str(json)?)) + } + + fn deserialize_job_from_json_value( + self: Arc, + json: &serde_json::Value, + ) -> serde_json::Result { + Ok(DynJob::from_arc(self, Deserialize::deserialize(json)?)) + } +} + +#[derive(Clone)] +pub struct DynJobKind(Arc); + +impl DynJobKind { + pub fn from_arc(job_kind: Arc) -> Self { + Self(job_kind) + } + pub fn new(job_kind: K) -> Self { + Self(Arc::new(job_kind)) + } + pub fn type_id(&self) -> TypeId { + DynJobKindTrait::as_any(&*self.0).type_id() + } + pub fn downcast(&self) -> Option { + DynJobKindTrait::as_any(&*self.0).downcast_ref().copied() + } + pub fn downcast_arc(self) -> Result, Self> { + if self.downcast::().is_some() { + Ok(Arc::downcast::(self.0.as_arc_any()) + .ok() + .expect("already checked type")) + } else { + Err(self) + } + } + pub fn dependencies_kinds(&self) -> Vec { + DynJobKindTrait::dependencies_kinds_dyn(&*self.0) + } + pub fn args_group_id(&self) -> Option { + DynJobKindTrait::args_group_id_dyn(&*self.0) + } + pub fn augment_args(&self, cmd: clap::Command) -> clap::Command { + DynJobKindTrait::augment_args_dyn(&*self.0, cmd) + } + pub fn augment_args_for_update(&self, cmd: clap::Command) -> clap::Command { + DynJobKindTrait::augment_args_for_update_dyn(&*self.0, cmd) + } + pub fn from_arg_matches( + &self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result { + DynJobKindTrait::from_arg_matches_dyn(&*self.0, matches) + } + pub fn name(&self) -> Interned { + DynJobKindTrait::name_dyn(&*self.0) + } + pub fn subcommand_hidden(&self) -> bool { + DynJobKindTrait::subcommand_hidden_dyn(&*self.0) + } + pub fn deserialize_job_from_json_str(self, json: &str) -> serde_json::Result { + DynJobKindTrait::deserialize_job_from_json_str(self.0, json) + } + pub fn deserialize_job_from_json_value( + self, + json: &serde_json::Value, + ) -> serde_json::Result { + DynJobKindTrait::deserialize_job_from_json_value(self.0, json) + } + fn make_subcommand_without_args(&self) -> clap::Command { + clap::Command::new(Interned::into_inner(self.name())).hide(self.subcommand_hidden()) + } + pub fn make_subcommand(&self) -> clap::Command { + let mut subcommand = self.make_subcommand_without_args(); + for dependency in self.dependencies_kinds() { + subcommand = dependency.augment_args(subcommand); + } + self.augment_args(subcommand) + } + pub fn make_subcommand_for_update(&self) -> clap::Command { + let mut subcommand = self.make_subcommand_without_args(); + for dependency in self.dependencies_kinds() { + subcommand = dependency.augment_args_for_update(subcommand); + } + self.augment_args_for_update(subcommand) + } +} + +impl Hash for DynJobKind { + fn hash(&self, state: &mut H) { + self.type_id().hash(state); + DynJobKindTrait::hash_dyn(&*self.0, state); + } +} + +impl PartialEq for DynJobKind { + fn eq(&self, other: &Self) -> bool { + DynJobKindTrait::eq_dyn(&*self.0, &*other.0) + } +} + +impl Eq for DynJobKind {} + +impl fmt::Debug for DynJobKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +impl Serialize for DynJobKind { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + self.name().serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for DynJobKind { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let name = Cow::::deserialize(deserializer)?; + match Self::registry().get_by_name(&name) { + Some(retval) => Ok(retval.clone()), + None => Err(D::Error::custom(format_args!( + "unknown job kind: name not found in registry: {name:?}" + ))), + } + } +} + +#[derive(Copy, Clone, Debug, Default)] +pub struct DynJobKindValueParser; + +#[derive(Clone, PartialEq, Eq, Hash)] +struct DynJobKindValueEnum { + name: Interned, + job_kind: DynJobKind, +} + +impl clap::ValueEnum for DynJobKindValueEnum { + fn value_variants<'a>() -> &'a [Self] { + Interned::into_inner( + registry::JobKindRegistrySnapshot::get() + .iter_with_names() + .map(|(name, job_kind)| Self { + name, + job_kind: job_kind.clone(), + }) + .collect(), + ) + } + + fn to_possible_value(&self) -> Option { + Some(clap::builder::PossibleValue::new(Interned::into_inner( + self.name, + ))) + } +} + +impl clap::builder::TypedValueParser for DynJobKindValueParser { + type Value = DynJobKind; + + fn parse_ref( + &self, + cmd: &clap::Command, + arg: Option<&clap::Arg>, + value: &std::ffi::OsStr, + ) -> clap::error::Result { + clap::builder::EnumValueParser::::new() + .parse_ref(cmd, arg, value) + .map(|v| v.job_kind) + } + + fn possible_values( + &self, + ) -> Option + '_>> { + static ENUM_VALUE_PARSER: OnceLock> = + OnceLock::new(); + ENUM_VALUE_PARSER + .get_or_init(clap::builder::EnumValueParser::::new) + .possible_values() + } +} + +impl clap::builder::ValueParserFactory for DynJobKind { + type Parser = DynJobKindValueParser; + + fn value_parser() -> Self::Parser { + DynJobKindValueParser::default() + } +} + +trait DynExtendInternedStr { + fn extend_from_slice(&mut self, items: &[Interned]); +} + +impl Extend> for dyn DynExtendInternedStr + '_ { + fn extend>>(&mut self, iter: T) { + let mut buf = [Interned::default(); 64]; + let mut buf_len = 0; + iter.into_iter().for_each(|item| { + buf[buf_len] = item; + buf_len += 1; + if buf_len == buf.len() { + ::extend_from_slice(self, &buf); + buf_len = 0; + } + }); + if buf_len > 0 { + ::extend_from_slice( + self, + &buf[..buf_len], + ); + } + } +} + +impl>> DynExtendInternedStr for T { + fn extend_from_slice(&mut self, items: &[Interned]) { + self.extend(items.iter().copied()); + } +} + +#[derive(PartialEq, Eq, Hash, Clone)] +struct DynJobArgsInner(JobKindAndArgs); + +impl fmt::Debug for DynJobArgsInner { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self(JobKindAndArgs { kind, args }) = self; + f.debug_struct("DynJobArgs") + .field("kind", kind) + .field("args", args) + .finish() + } +} + +trait DynJobArgsTrait: 'static + Send + Sync + fmt::Debug { + fn as_any(&self) -> &dyn Any; + fn as_arc_any(self: Arc) -> Arc; + fn kind_type_id(&self) -> TypeId; + fn eq_dyn(&self, other: &dyn DynJobArgsTrait) -> bool; + fn hash_dyn(&self, state: &mut dyn Hasher); + fn kind(&self) -> DynJobKind; + fn to_args_extend_vec(&self, args: Vec>) -> Vec>; + fn clone_into_arc(&self) -> Arc; + fn update_from_arg_matches( + &mut self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result<()>; + #[track_caller] + fn args_to_jobs( + self: Arc, + dependencies_args: Vec, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result<(DynJob, Vec)>; + #[track_caller] + fn base_job_args_dyn<'a>(&'a self, dependencies_args: &'a [DynJobArgs]) -> &'a BaseJobArgs; +} + +impl DynJobArgsTrait for DynJobArgsInner { + fn as_any(&self) -> &dyn Any { + self + } + + fn as_arc_any(self: Arc) -> Arc { + self + } + + fn kind_type_id(&self) -> TypeId { + TypeId::of::() + } + + fn eq_dyn(&self, other: &dyn DynJobArgsTrait) -> bool { + other + .as_any() + .downcast_ref::() + .is_some_and(|other| self == other) + } + + fn hash_dyn(&self, mut state: &mut dyn Hasher) { + self.hash(&mut state); + } + + fn kind(&self) -> DynJobKind { + DynJobKind::new(self.0.kind) + } + + fn to_args_extend_vec(&self, args: Vec>) -> Vec> { + let mut writer = ArgsWriter(args); + self.0.args.to_args(&mut writer); + writer.0 + } + + fn clone_into_arc(&self) -> Arc { + Arc::new(self.clone()) + } + + fn update_from_arg_matches( + &mut self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result<()> { + clap::FromArgMatches::update_from_arg_matches_mut(&mut self.0.args, matches) + } + + #[track_caller] + fn args_to_jobs( + self: Arc, + dependencies_args: Vec, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result<(DynJob, Vec)> { + let JobAndDependencies { job, dependencies } = JobArgsAndDependencies { + args: Arc::unwrap_or_clone(self).0, + dependencies: K::Dependencies::from_dyn_args(dependencies_args), + } + .args_to_jobs(params, global_params)?; + Ok((job.into(), K::Dependencies::into_dyn_jobs(dependencies))) + } + + #[track_caller] + fn base_job_args_dyn<'a>(&'a self, dependencies_args: &'a [DynJobArgs]) -> &'a BaseJobArgs { + self.0 + .kind + .base_job_args_dyn(&self.0.args, dependencies_args) + } +} + +#[derive(Clone)] +pub struct DynJobArgs(Arc); + +impl DynJobArgs { + pub fn new(kind: K, args: K::Args) -> Self { + Self(Arc::new(DynJobArgsInner(JobKindAndArgs { kind, args }))) + } + pub fn kind_type_id(&self) -> TypeId { + DynJobArgsTrait::kind_type_id(&*self.0) + } + pub fn downcast_ref(&self) -> Option<(&K, &K::Args)> { + let DynJobArgsInner::(JobKindAndArgs { kind, args }) = + DynJobArgsTrait::as_any(&*self.0).downcast_ref()?; + Some((kind, args)) + } + pub fn downcast(self) -> Result, Self> { + if self.downcast_ref::().is_some() { + let this = Arc::downcast::>(self.0.as_arc_any()) + .ok() + .expect("already checked type"); + Ok(Arc::unwrap_or_clone(this).0) + } else { + Err(self) + } + } + pub fn kind(&self) -> DynJobKind { + DynJobArgsTrait::kind(&*self.0) + } + pub fn to_args_vec(&self) -> Vec> { + self.to_args_extend_vec(Vec::new()) + } + pub fn to_args_extend_vec(&self, args: Vec>) -> Vec> { + DynJobArgsTrait::to_args_extend_vec(&*self.0, args) + } + fn make_mut(&mut self) -> &mut dyn DynJobArgsTrait { + // can't just return the reference if the first get_mut returns Some since + // as of rustc 1.90.0 this causes a false-positive lifetime error. + if Arc::get_mut(&mut self.0).is_none() { + self.0 = DynJobArgsTrait::clone_into_arc(&*self.0); + } + Arc::get_mut(&mut self.0).expect("clone_into_arc returns a new arc with a ref-count of 1") + } + pub fn update_from_arg_matches( + &mut self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result<()> { + DynJobArgsTrait::update_from_arg_matches(self.make_mut(), matches) + } + pub fn args_to_jobs( + self, + dependencies_args: Vec, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result<(DynJob, Vec)> { + DynJobArgsTrait::args_to_jobs(self.0, dependencies_args, params, global_params) + } + #[track_caller] + pub fn base_job_args_dyn<'a>(&'a self, dependencies_args: &'a [DynJobArgs]) -> &'a BaseJobArgs { + DynJobArgsTrait::base_job_args_dyn(&*self.0, dependencies_args) + } +} + +impl Hash for DynJobArgs { + fn hash(&self, state: &mut H) { + self.kind_type_id().hash(state); + DynJobArgsTrait::hash_dyn(&*self.0, state); + } +} + +impl PartialEq for DynJobArgs { + fn eq(&self, other: &Self) -> bool { + DynJobArgsTrait::eq_dyn(&*self.0, &*other.0) + } +} + +impl Eq for DynJobArgs {} + +impl fmt::Debug for DynJobArgs { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +#[derive(PartialEq, Eq, Hash)] +struct DynJobInner { + kind: Arc, + job: K::Job, + inputs: Interned<[JobItemName]>, + outputs: Interned<[JobItemName]>, + external_command_params: Option, +} + +impl> Clone for DynJobInner { + fn clone(&self) -> Self { + Self { + kind: self.kind.clone(), + job: self.job.clone(), + inputs: self.inputs, + outputs: self.outputs, + external_command_params: self.external_command_params, + } + } +} + +impl fmt::Debug for DynJobInner { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + kind, + job, + inputs, + outputs, + external_command_params, + } = self; + f.debug_struct("DynJob") + .field("kind", kind) + .field("job", job) + .field("inputs", inputs) + .field("outputs", outputs) + .field("external_command_params", external_command_params) + .finish() + } +} + +trait DynJobTrait: 'static + Send + Sync + fmt::Debug { + fn as_any(&self) -> &dyn Any; + fn as_arc_any(self: Arc) -> Arc; + fn eq_dyn(&self, other: &dyn DynJobTrait) -> bool; + fn hash_dyn(&self, state: &mut dyn Hasher); + fn kind_type_id(&self) -> TypeId; + fn kind(&self) -> DynJobKind; + fn inputs(&self) -> Interned<[JobItemName]>; + fn outputs(&self) -> Interned<[JobItemName]>; + fn external_command_params(&self) -> Option; + fn serialize_to_json_ascii(&self) -> serde_json::Result; + fn serialize_to_json_value(&self) -> serde_json::Result; + fn run( + &self, + inputs: &[JobItem], + params: &JobParams, + global_params: &GlobalParams, + acquired_job: &mut AcquiredJob, + ) -> eyre::Result>; + #[track_caller] + fn base_job_dyn<'a>(&'a self, dependencies: &'a [DynJob]) -> &'a BaseJob; +} + +impl DynJobTrait for DynJobInner { + fn as_any(&self) -> &dyn Any { + self + } + + fn as_arc_any(self: Arc) -> Arc { + self + } + + fn eq_dyn(&self, other: &dyn DynJobTrait) -> bool { + other + .as_any() + .downcast_ref::() + .is_some_and(|other| self == other) + } + + fn hash_dyn(&self, mut state: &mut dyn Hasher) { + self.hash(&mut state); + } + + fn kind_type_id(&self) -> TypeId { + TypeId::of::() + } + + fn kind(&self) -> DynJobKind { + DynJobKind(self.kind.clone()) + } + + fn inputs(&self) -> Interned<[JobItemName]> { + self.inputs + } + + fn outputs(&self) -> Interned<[JobItemName]> { + self.outputs + } + + fn external_command_params(&self) -> Option { + self.external_command_params + } + + fn serialize_to_json_ascii(&self) -> serde_json::Result { + crate::util::serialize_to_json_ascii(&self.job) + } + + fn serialize_to_json_value(&self) -> serde_json::Result { + serde_json::to_value(&self.job) + } + + fn run( + &self, + inputs: &[JobItem], + params: &JobParams, + global_params: &GlobalParams, + acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + self.kind + .run(&self.job, inputs, params, global_params, acquired_job) + } + + #[track_caller] + fn base_job_dyn<'a>(&'a self, dependencies: &'a [DynJob]) -> &'a BaseJob { + self.kind.base_job_dyn(&self.job, dependencies) + } +} + +#[derive(Clone, Debug)] +pub struct DynJob(Arc); + +impl DynJob { + pub fn from_arc(job_kind: Arc, job: K::Job) -> Self { + let inputs = job_kind.inputs(&job); + let outputs = job_kind.outputs(&job); + let external_command_params = job_kind.external_command_params(&job); + Self(Arc::new(DynJobInner { + kind: job_kind, + job, + inputs, + outputs, + external_command_params, + })) + } + pub fn new(job_kind: K, job: K::Job) -> Self { + Self::from_arc(Arc::new(job_kind), job) + } + pub fn kind_type_id(&self) -> TypeId { + self.0.kind_type_id() + } + pub fn downcast_ref(&self) -> Option<(&K, &K::Job)> { + let DynJobInner { kind, job, .. } = self.0.as_any().downcast_ref()?; + Some((kind, job)) + } + pub fn downcast>(self) -> Result, Self> { + if self.kind_type_id() == TypeId::of::() { + let DynJobInner { kind, job, .. } = Arc::unwrap_or_clone( + self.0 + .as_arc_any() + .downcast::>() + .expect("already checked type"), + ); + Ok(JobAndKind { kind: *kind, job }) + } else { + Err(self) + } + } + pub fn kind(&self) -> DynJobKind { + DynJobTrait::kind(&*self.0) + } + pub fn inputs(&self) -> Interned<[JobItemName]> { + DynJobTrait::inputs(&*self.0) + } + pub fn outputs(&self) -> Interned<[JobItemName]> { + DynJobTrait::outputs(&*self.0) + } + pub fn serialize_to_json_ascii(&self) -> serde_json::Result { + DynJobTrait::serialize_to_json_ascii(&*self.0) + } + pub fn serialize_to_json_value(&self) -> serde_json::Result { + DynJobTrait::serialize_to_json_value(&*self.0) + } + pub fn external_command_params(&self) -> Option { + DynJobTrait::external_command_params(&*self.0) + } + #[track_caller] + pub fn internal_command_params_with_program_prefix( + &self, + internal_program_prefix: &[Interned], + platform: Option<&DynPlatform>, + extra_args: &[Interned], + ) -> CommandParams { + let mut command_line = internal_program_prefix.to_vec(); + let command_line = match RunSingleJob::try_add_subcommand(platform, self, &mut command_line) + { + Ok(()) => { + command_line.extend_from_slice(extra_args); + Intern::intern_owned(command_line) + } + Err(e) => panic!("Serializing job {:?} failed: {e}", self.kind().name()), + }; + CommandParams { + command_line, + current_dir: None, + } + } + #[track_caller] + pub fn internal_command_params( + &self, + platform: Option<&DynPlatform>, + extra_args: &[Interned], + ) -> CommandParams { + self.internal_command_params_with_program_prefix( + &[program_name_for_internal_jobs()], + platform, + extra_args, + ) + } + #[track_caller] + pub fn command_params_with_internal_program_prefix( + &self, + internal_program_prefix: &[Interned], + platform: Option<&DynPlatform>, + extra_args: &[Interned], + ) -> CommandParams { + match self.external_command_params() { + Some(v) => v, + None => self.internal_command_params_with_program_prefix( + internal_program_prefix, + platform, + extra_args, + ), + } + } + #[track_caller] + pub fn command_params( + &self, + platform: Option<&DynPlatform>, + extra_args: &[Interned], + ) -> CommandParams { + self.command_params_with_internal_program_prefix( + &[program_name_for_internal_jobs()], + platform, + extra_args, + ) + } + pub fn run( + &self, + inputs: &[JobItem], + params: &JobParams, + global_params: &GlobalParams, + acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + DynJobTrait::run(&*self.0, inputs, params, global_params, acquired_job) + } + #[track_caller] + pub fn base_job_dyn<'a>(&'a self, dependencies: &'a [DynJob]) -> &'a BaseJob { + DynJobTrait::base_job_dyn(&*self.0, dependencies) + } +} + +impl Eq for DynJob {} + +impl PartialEq for DynJob { + fn eq(&self, other: &Self) -> bool { + DynJobTrait::eq_dyn(&*self.0, &*other.0) + } +} + +impl Hash for DynJob { + fn hash(&self, state: &mut H) { + DynJobTrait::hash_dyn(&*self.0, state); + } +} + +#[derive(Serialize, Deserialize)] +#[serde(rename = "DynJob")] +struct DynJobSerde { + kind: DynJobKind, + job: serde_json::Value, +} + +impl Serialize for DynJob { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + DynJobSerde { + kind: self.kind(), + job: self.serialize_to_json_value().map_err(S::Error::custom)?, + } + .serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for DynJob { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let DynJobSerde { kind, job } = Deserialize::deserialize(deserializer)?; + kind.deserialize_job_from_json_value(&job) + .map_err(D::Error::custom) + } +} + +pub trait RunBuild: Sized { + fn main_without_platform(application_name: impl AsRef, make_params: F) + where + Self: clap::Parser + Clone, + F: FnOnce(Self, Extra) -> eyre::Result, + { + let application_name = application_name.as_ref(); + match Self::try_main_without_platform(application_name, make_params) { + Ok(()) => {} + Err(e) => { + let e = GlobalParams::new(Some(Self::command()), application_name) + .exit_if_clap_error(e); + eprintln!("{e:#}"); + std::process::exit(1); + } + } + } + fn try_main_without_platform( + application_name: impl AsRef, + make_params: F, + ) -> eyre::Result<()> + where + Self: clap::Parser + Clone, + F: FnOnce(Self, Extra) -> eyre::Result, + { + let args = Self::parse(); + let global_params = GlobalParams::new(Some(Self::command()), application_name); + args.clone() + .run_without_platform(|extra| make_params(args, extra), &global_params) + .map_err(|e| global_params.exit_if_clap_error(e)) + } + fn run_without_platform( + self, + make_params: F, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(Extra) -> eyre::Result; + fn get_platform(&self) -> Option<&DynPlatform>; + fn main(application_name: impl AsRef, make_params: F) + where + Self: clap::Parser + Clone, + F: FnOnce(Self, DynPlatform, Extra) -> eyre::Result, + { + let application_name = application_name.as_ref(); + match Self::try_main(application_name, make_params) { + Ok(()) => {} + Err(e) => { + let e = GlobalParams::new(Some(Self::command()), application_name) + .exit_if_clap_error(e); + eprintln!("{e:#}"); + std::process::exit(1); + } + } + } + fn try_main(application_name: impl AsRef, make_params: F) -> eyre::Result<()> + where + Self: clap::Parser + Clone, + F: FnOnce(Self, DynPlatform, Extra) -> eyre::Result, + { + let args = Self::parse(); + let global_params = GlobalParams::new(Some(Self::command()), application_name); + let Some(platform) = args.get_platform().cloned() else { + return args.handle_missing_platform(&global_params); + }; + args.clone() + .run( + |platform, extra| make_params(args, platform, extra), + platform, + &global_params, + ) + .map_err(|e| global_params.exit_if_clap_error(e)) + } + fn handle_missing_platform(self, global_params: &GlobalParams) -> eyre::Result<()> { + global_params + .clap_error( + clap::error::ErrorKind::MissingRequiredArgument, + "--platform is required", + ) + .exit(); + } + fn run( + self, + make_params: F, + platform: DynPlatform, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(DynPlatform, Extra) -> eyre::Result, + { + self.run_without_platform(|extra| make_params(platform, extra), global_params) + } +} + +impl RunBuild for JobArgsAndDependencies { + fn run_without_platform( + self, + make_params: F, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(NoArgs) -> eyre::Result, + { + let params = make_params(NoArgs)?; + self.args_to_jobs(¶ms, global_params)? + .run_without_platform(|_| Ok(params), global_params) + } + fn get_platform(&self) -> Option<&DynPlatform> { + self.base_job_args().platform.as_ref() + } + fn run( + self, + make_params: F, + platform: DynPlatform, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(DynPlatform, NoArgs) -> eyre::Result, + { + let params = make_params(platform.clone(), NoArgs)?; + self.args_to_jobs(¶ms, global_params)? + .run(|_, _| Ok(params), platform, global_params) + } +} + +impl RunBuild for JobAndDependencies { + fn run_without_platform( + self, + make_params: F, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(NoArgs) -> eyre::Result, + { + let params = make_params(NoArgs)?; + let Self { job, dependencies } = self; + let mut jobs = vec![DynJob::from(job)]; + K::Dependencies::into_dyn_jobs_extend(dependencies, &mut jobs); + let mut job_graph = JobGraph::new(); + job_graph.add_jobs(jobs); // add all at once to avoid recomputing graph properties multiple times + job_graph.run(¶ms, global_params) + } + fn get_platform(&self) -> Option<&DynPlatform> { + self.base_job().platform() + } + fn run( + self, + make_params: F, + platform: DynPlatform, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(DynPlatform, NoArgs) -> eyre::Result, + { + let params = make_params(platform, NoArgs)?; + let Self { job, dependencies } = self; + let mut jobs = vec![DynJob::from(job)]; + K::Dependencies::into_dyn_jobs_extend(dependencies, &mut jobs); + let mut job_graph = JobGraph::new(); + job_graph.add_jobs(jobs); // add all at once to avoid recomputing graph properties multiple times + job_graph.run(¶ms, global_params) + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub struct RunSingleJob { + pub platform: Option, + pub job: DynJob, + pub extra: Extra, +} + +impl RunSingleJob { + pub const SUBCOMMAND_NAME: &'static str = "run-single-job"; + fn try_add_subcommand( + platform: Option<&DynPlatform>, + job: &DynJob, + subcommand_line: &mut Vec>, + ) -> serde_json::Result<()> { + let mut json = job.serialize_to_json_ascii()?; + json.insert_str(0, "--json="); + subcommand_line.push(Self::SUBCOMMAND_NAME.intern().into()); + if let Some(platform) = platform { + subcommand_line.push( + format!("--platform={}", platform.name()) + .intern_deref() + .into(), + ); + } + subcommand_line.push( + format!("--name={}", job.kind().name()) + .intern_deref() + .into(), + ); + subcommand_line.push(json.intern_deref().into()); + Ok(()) + } +} + +impl TryFrom> for RunSingleJob { + type Error = clap::Error; + + fn try_from(value: RunSingleJobClap) -> Result { + let RunSingleJobClap::RunSingleJob { + platform, + name: job_kind, + json, + extra, + } = value; + let name = job_kind.name(); + job_kind + .deserialize_job_from_json_str(&json) + .map_err(|e| { + clap::Error::raw( + clap::error::ErrorKind::ValueValidation, + format_args!("failed to parse job {name} from JSON: {e}"), + ) + }) + .map(|job| Self { + platform, + job, + extra, + }) + } +} + +#[derive(clap::Subcommand)] +enum RunSingleJobClap { + #[command(name = RunSingleJob::SUBCOMMAND_NAME, hide = true)] + RunSingleJob { + #[arg(long)] + platform: Option, + #[arg(long)] + name: DynJobKind, + #[arg(long)] + json: String, + #[command(flatten)] + extra: Extra, + }, +} + +impl clap::Subcommand for RunSingleJob { + fn augment_subcommands(cmd: clap::Command) -> clap::Command { + RunSingleJobClap::::augment_subcommands(cmd) + } + + fn augment_subcommands_for_update(cmd: clap::Command) -> clap::Command { + RunSingleJobClap::::augment_subcommands(cmd) + } + + fn has_subcommand(name: &str) -> bool { + RunSingleJobClap::::has_subcommand(name) + } +} + +impl clap::FromArgMatches for RunSingleJob { + fn from_arg_matches(matches: &clap::ArgMatches) -> clap::error::Result { + RunSingleJobClap::from_arg_matches(matches)?.try_into() + } + fn from_arg_matches_mut(matches: &mut clap::ArgMatches) -> clap::error::Result { + RunSingleJobClap::from_arg_matches_mut(matches)?.try_into() + } + fn update_from_arg_matches(&mut self, matches: &clap::ArgMatches) -> clap::error::Result<()> { + *self = Self::from_arg_matches(matches)?; + Ok(()) + } + fn update_from_arg_matches_mut( + &mut self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result<()> { + *self = Self::from_arg_matches_mut(matches)?; + Ok(()) + } +} + +impl RunBuild for RunSingleJob { + fn run_without_platform( + self, + make_params: F, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(Extra) -> eyre::Result, + { + let params = make_params(self.extra)?; + let mut job_graph = JobGraph::new(); + job_graph.add_jobs([self.job]); + job_graph.run(¶ms, global_params) + } + fn get_platform(&self) -> Option<&DynPlatform> { + self.platform.as_ref() + } +} + +#[derive(Clone, PartialEq, Eq, Hash, clap::Subcommand)] +pub enum Completions { + #[non_exhaustive] + Completions { + #[arg(default_value = Self::shell_str_from_env(), required = Self::shell_from_env().is_none())] + shell: clap_complete::aot::Shell, + }, +} + +impl Completions { + pub fn new(shell: clap_complete::aot::Shell) -> Self { + Self::Completions { shell } + } + pub fn from_env() -> Option { + Some(Self::Completions { + shell: Self::shell_from_env()?, + }) + } + fn shell_from_env() -> Option { + static SHELL: OnceLock> = OnceLock::new(); + *SHELL.get_or_init(clap_complete::aot::Shell::from_env) + } + fn shell_str_from_env() -> clap::builder::Resettable { + static SHELL_STR: OnceLock> = OnceLock::new(); + SHELL_STR + .get_or_init(|| Self::shell_from_env().map(|v| v.to_string())) + .as_deref() + .map(Into::into) + .into() + } +} + +impl RunBuild for Completions { + fn run_without_platform( + self, + _make_params: F, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(NoArgs) -> eyre::Result, + { + let Self::Completions { shell } = self; + let Some(cmd) = global_params.top_level_cmd() else { + eyre::bail!("completions command requires GlobalParams::top_level_cmd() to be Some"); + }; + let bin_name = cmd.get_bin_name().map(str::intern).unwrap_or_else(|| { + program_name_for_internal_jobs() + .to_interned_str() + .expect("program name is invalid UTF-8") + }); + clap_complete::aot::generate( + shell, + &mut cmd.clone(), + &*bin_name, + &mut std::io::BufWriter::new(std::io::stdout().lock()), + ); + Ok(()) + } + fn handle_missing_platform(self, global_params: &GlobalParams) -> eyre::Result<()> { + self.run_without_platform(|_| unreachable!(), global_params) + } + fn get_platform(&self) -> Option<&DynPlatform> { + None + } +} + +#[derive( + clap::Args, + Copy, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + Debug, + Default, + Serialize, + Deserialize, +)] +pub struct NoArgs; + +impl ToArgs for NoArgs { + fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) { + let Self {} = self; + } +} + +#[derive(Clone, PartialEq, Eq, Hash, clap::Parser)] +pub enum BuildCli { + #[clap(flatten)] + Job(AnyJobSubcommand), + #[clap(flatten)] + RunSingleJob(RunSingleJob), + #[clap(flatten)] + Completions(Completions), + #[cfg(unix)] + #[clap(flatten)] + CreateUnixShellScript(CreateUnixShellScript), +} + +impl RunBuild for BuildCli { + fn run_without_platform( + self, + make_params: F, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(Extra) -> eyre::Result, + { + match self { + BuildCli::Job(v) => v.run_without_platform(make_params, global_params), + BuildCli::RunSingleJob(v) => v.run_without_platform(make_params, global_params), + BuildCli::Completions(v) => { + v.run_without_platform(|NoArgs {}| unreachable!(), global_params) + } + #[cfg(unix)] + BuildCli::CreateUnixShellScript(v) => { + v.run_without_platform(make_params, global_params) + } + } + } + fn handle_missing_platform(self, global_params: &GlobalParams) -> eyre::Result<()> { + match self { + BuildCli::Job(v) => v.handle_missing_platform(global_params), + BuildCli::RunSingleJob(v) => v.handle_missing_platform(global_params), + BuildCli::Completions(v) => v.handle_missing_platform(global_params), + #[cfg(unix)] + BuildCli::CreateUnixShellScript(v) => v.handle_missing_platform(global_params), + } + } + fn get_platform(&self) -> Option<&DynPlatform> { + match self { + BuildCli::Job(v) => v.get_platform(), + BuildCli::RunSingleJob(v) => v.get_platform(), + BuildCli::Completions(v) => v.get_platform(), + #[cfg(unix)] + BuildCli::CreateUnixShellScript(v) => v.get_platform(), + } + } + fn run( + self, + make_params: F, + platform: DynPlatform, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(DynPlatform, Extra) -> eyre::Result, + { + match self { + BuildCli::Job(v) => v.run(make_params, platform, global_params), + BuildCli::RunSingleJob(v) => v.run(make_params, platform, global_params), + BuildCli::Completions(v) => { + v.run(|_, NoArgs {}| unreachable!(), platform, global_params) + } + #[cfg(unix)] + BuildCli::CreateUnixShellScript(v) => v.run(make_params, platform, global_params), + } + } +} + +#[cfg(unix)] +#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Subcommand)] +enum CreateUnixShellScriptInner { + CreateUnixShellScript { + #[arg(name = "i-know-this-is-incomplete", long, required = true, action = ArgAction::SetTrue)] + _incomplete: (), + #[command(subcommand)] + inner: AnyJobSubcommand, + }, +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub struct CreateUnixShellScript(CreateUnixShellScriptInner); + +impl RunBuild for CreateUnixShellScript { + fn run_without_platform( + self, + make_params: F, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(Extra) -> eyre::Result, + { + let platform = self.get_platform().cloned(); + let CreateUnixShellScriptInner::CreateUnixShellScript { + _incomplete: (), + inner: + AnyJobSubcommand { + args, + dependencies_args, + extra, + }, + } = self.0; + let extra_args = extra.to_interned_args_vec(); + let params = make_params(extra)?; + let bin_name = global_params + .top_level_cmd() + .and_then(clap::Command::get_bin_name) + .map(|v| OsStr::new(v).intern()); + let (job, dependencies) = args.args_to_jobs(dependencies_args, ¶ms, global_params)?; + let mut job_graph = JobGraph::new(); + job_graph.add_jobs([job].into_iter().chain(dependencies)); + std::io::stdout().write_all( + job_graph + .to_unix_shell_script_with_internal_program_prefix( + &[bin_name.unwrap_or_else(|| program_name_for_internal_jobs())], + platform.as_ref(), + &extra_args, + ) + .as_bytes(), + )?; + Ok(()) + } + fn get_platform(&self) -> Option<&DynPlatform> { + let CreateUnixShellScriptInner::CreateUnixShellScript { inner, .. } = &self.0; + inner.get_platform() + } +} + +impl clap::FromArgMatches for CreateUnixShellScript { + fn from_arg_matches(matches: &clap::ArgMatches) -> Result { + clap::FromArgMatches::from_arg_matches(matches).map(Self) + } + fn from_arg_matches_mut(matches: &mut clap::ArgMatches) -> Result { + clap::FromArgMatches::from_arg_matches_mut(matches).map(Self) + } + fn update_from_arg_matches(&mut self, matches: &clap::ArgMatches) -> Result<(), clap::Error> { + self.0.update_from_arg_matches(matches) + } + fn update_from_arg_matches_mut( + &mut self, + matches: &mut clap::ArgMatches, + ) -> Result<(), clap::Error> { + self.0.update_from_arg_matches_mut(matches) + } +} + +#[cfg(unix)] +impl clap::Subcommand for CreateUnixShellScript { + fn augment_subcommands(cmd: clap::Command) -> clap::Command { + CreateUnixShellScriptInner::::augment_subcommands(cmd) + } + + fn augment_subcommands_for_update(cmd: clap::Command) -> clap::Command { + CreateUnixShellScriptInner::::augment_subcommands_for_update(cmd) + } + + fn has_subcommand(name: &str) -> bool { + CreateUnixShellScriptInner::::has_subcommand(name) + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub struct AnyJobSubcommand { + pub args: DynJobArgs, + pub dependencies_args: Vec, + pub extra: Extra, +} + +impl AnyJobSubcommand { + pub fn from_subcommand_arg_matches( + job_kind: &DynJobKind, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result { + let dependencies = job_kind.dependencies_kinds(); + let dependencies_args = Result::from_iter( + dependencies + .into_iter() + .map(|dependency| dependency.from_arg_matches(matches)), + )?; + Ok(Self { + args: job_kind.clone().from_arg_matches(matches)?, + dependencies_args, + extra: Extra::from_arg_matches_mut(matches)?, + }) + } + pub fn update_from_subcommand_arg_matches( + &mut self, + job_kind: &DynJobKind, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result<()> { + let Self { + args, + dependencies_args, + extra, + } = self; + if *job_kind == args.kind() { + for dependency in dependencies_args { + dependency.update_from_arg_matches(matches)?; + } + args.update_from_arg_matches(matches)?; + } else { + let dependencies = job_kind.dependencies_kinds(); + let new_dependencies_args = Result::from_iter( + dependencies + .into_iter() + .map(|dependency| dependency.from_arg_matches(matches)), + )?; + *args = job_kind.clone().from_arg_matches(matches)?; + *dependencies_args = new_dependencies_args; + } + extra.update_from_arg_matches_mut(matches) + } +} + +impl clap::Subcommand for AnyJobSubcommand { + fn augment_subcommands(mut cmd: clap::Command) -> clap::Command { + let snapshot = registry::JobKindRegistrySnapshot::get(); + for job_kind in &snapshot { + cmd = cmd.subcommand(Extra::augment_args(job_kind.make_subcommand())); + } + cmd + } + + fn augment_subcommands_for_update(mut cmd: clap::Command) -> clap::Command { + let snapshot = registry::JobKindRegistrySnapshot::get(); + for job_kind in &snapshot { + cmd = cmd.subcommand(Extra::augment_args_for_update( + job_kind.make_subcommand_for_update(), + )); + } + cmd + } + + fn has_subcommand(name: &str) -> bool { + registry::JobKindRegistrySnapshot::get() + .get_by_name(name) + .is_some() + } +} + +impl clap::FromArgMatches for AnyJobSubcommand { + fn from_arg_matches(matches: &clap::ArgMatches) -> clap::error::Result { + Self::from_arg_matches_mut(&mut matches.clone()) + } + + fn from_arg_matches_mut(matches: &mut clap::ArgMatches) -> clap::error::Result { + if let Some((name, mut matches)) = matches.remove_subcommand() { + let job_kind_registry_snapshot = registry::JobKindRegistrySnapshot::get(); + if let Some(job_kind) = job_kind_registry_snapshot.get_by_name(&name) { + Self::from_subcommand_arg_matches(job_kind, &mut matches) + } else { + Err(clap::Error::raw( + clap::error::ErrorKind::InvalidSubcommand, + format!("the subcommand '{name}' wasn't recognized"), + )) + } + } else { + Err(clap::Error::raw( + clap::error::ErrorKind::MissingSubcommand, + "a subcommand is required but one was not provided", + )) + } + } + + fn update_from_arg_matches(&mut self, matches: &clap::ArgMatches) -> clap::error::Result<()> { + Self::update_from_arg_matches_mut(self, &mut matches.clone()) + } + + fn update_from_arg_matches_mut( + &mut self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result<()> { + if let Some((name, mut matches)) = matches.remove_subcommand() { + let job_kind_registry_snapshot = registry::JobKindRegistrySnapshot::get(); + if let Some(job_kind) = job_kind_registry_snapshot.get_by_name(&name) { + self.update_from_subcommand_arg_matches(job_kind, &mut matches) + } else { + Err(clap::Error::raw( + clap::error::ErrorKind::InvalidSubcommand, + format!("the subcommand '{name}' wasn't recognized"), + )) + } + } else { + Err(clap::Error::raw( + clap::error::ErrorKind::MissingSubcommand, + "a subcommand is required but one was not provided", + )) + } + } +} + +impl RunBuild for AnyJobSubcommand { + fn run_without_platform( + self, + make_params: F, + global_params: &GlobalParams, + ) -> eyre::Result<()> + where + F: FnOnce(Extra) -> eyre::Result, + { + let Self { + args, + dependencies_args, + extra, + } = self; + let params = make_params(extra)?; + let (job, dependencies) = args.args_to_jobs(dependencies_args, ¶ms, global_params)?; + let mut job_graph = JobGraph::new(); + job_graph.add_jobs([job].into_iter().chain(dependencies)); // add all at once to avoid recomputing graph properties multiple times + job_graph.run(¶ms, global_params) + } + fn get_platform(&self) -> Option<&DynPlatform> { + self.args + .base_job_args_dyn(&self.dependencies_args) + .platform + .as_ref() + } +} + +pub fn program_name_for_internal_jobs() -> Interned { + static PROGRAM_NAME: OnceLock> = OnceLock::new(); + *PROGRAM_NAME.get_or_init(|| { + std::env::args_os() + .next() + .expect("can't get program name") + .intern_deref() + }) +} + +#[derive(clap::Args, Debug, Clone, Hash, PartialEq, Eq)] +#[group(id = "BaseJob")] +#[non_exhaustive] +pub struct BaseJobArgs { + /// the directory to put the generated main output file and associated files in + #[arg(short, long, value_hint = clap::ValueHint::DirPath)] + pub output: Option, + #[arg(long, env = "FAYALITE_KEEP_TEMP_DIR")] + pub keep_temp_dir: bool, + /// the stem of the generated main output file, e.g. to get foo.v, pass --file-stem=foo + #[arg(long)] + pub file_stem: Option, + /// run commands even if their results are already cached + #[arg(long, env = Self::RUN_EVEN_IF_CACHED_ENV_NAME)] + pub run_even_if_cached: bool, + /// platform + #[arg(long)] + pub platform: Option, +} + +impl BaseJobArgs { + pub const RUN_EVEN_IF_CACHED_ENV_NAME: &'static str = "FAYALITE_RUN_EVEN_IF_CACHED"; + pub fn from_output_dir_and_env(output: PathBuf, platform: Option) -> Self { + Self { + output: Some(output), + keep_temp_dir: false, + file_stem: None, + run_even_if_cached: std::env::var_os(Self::RUN_EVEN_IF_CACHED_ENV_NAME).is_some(), + platform, + } + } +} + +impl ToArgs for BaseJobArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { + output, + keep_temp_dir, + file_stem, + run_even_if_cached, + platform, + } = self; + if let Some(output) = output { + args.write_long_option_eq("output", output); + } + if *keep_temp_dir { + args.write_arg("--keep-temp-dir"); + } + if let Some(file_stem) = file_stem { + args.write_long_option_eq("file-stem", file_stem); + } + if *run_even_if_cached { + args.write_arg("--run-even-if-cached"); + } + if let Some(platform) = platform { + args.write_long_option_eq("platform", platform.name()); + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct BaseJob { + output_dir: Interned, + #[serde(skip)] + temp_dir: Option>, + file_stem: Interned, + run_even_if_cached: bool, + platform: Option, +} + +impl Hash for BaseJob { + fn hash(&self, state: &mut H) { + let Self { + output_dir, + temp_dir: _, + file_stem, + run_even_if_cached, + platform, + } = self; + output_dir.hash(state); + file_stem.hash(state); + run_even_if_cached.hash(state); + platform.hash(state); + } +} + +impl Eq for BaseJob {} + +impl PartialEq for BaseJob { + fn eq(&self, other: &Self) -> bool { + let Self { + output_dir, + temp_dir: _, + file_stem, + run_even_if_cached, + ref platform, + } = *self; + output_dir == other.output_dir + && file_stem == other.file_stem + && run_even_if_cached == other.run_even_if_cached + && *platform == other.platform + } +} + +impl BaseJob { + pub fn output_dir(&self) -> Interned { + self.output_dir + } + pub fn temp_dir(&self) -> Option<&Arc> { + self.temp_dir.as_ref() + } + pub fn file_stem(&self) -> Interned { + self.file_stem + } + pub fn file_with_ext(&self, ext: impl AsRef) -> Interned { + let mut retval = self.output_dir().join(self.file_stem()); + retval.set_extension(ext); + retval.intern_deref() + } + pub fn run_even_if_cached(&self) -> bool { + self.run_even_if_cached + } + pub fn platform(&self) -> Option<&DynPlatform> { + self.platform.as_ref() + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)] +pub struct BaseJobKind; + +impl JobKindHelper for BaseJobKind { + fn base_job<'a>( + self, + job: &'a ::Job, + _dependencies: &'a <::Dependencies as JobDependencies>::JobsAndKinds, + ) -> &'a BaseJob { + job + } + fn base_job_args<'a>( + self, + args: &'a ::Args, + _dependencies: &'a <::Dependencies as JobDependencies>::KindsAndArgs, + ) -> &'a BaseJobArgs { + args + } + #[track_caller] + fn base_job_args_dyn<'a>( + self, + args: &'a ::Args, + dependencies_args: &'a [DynJobArgs], + ) -> &'a BaseJobArgs { + let [] = dependencies_args else { + panic!("wrong number of dependencies"); + }; + args + } + #[track_caller] + fn base_job_dyn<'a>( + self, + job: &'a ::Job, + dependencies: &'a [DynJob], + ) -> &'a BaseJob { + let [] = dependencies else { + panic!("wrong number of dependencies"); + }; + job + } +} + +impl JobKind for BaseJobKind { + type Args = BaseJobArgs; + type Job = BaseJob; + type Dependencies = (); + + fn dependencies(self) -> Self::Dependencies { + () + } + + fn args_to_jobs( + args: JobArgsAndDependencies, + params: &JobParams, + _global_params: &GlobalParams, + ) -> eyre::Result> { + let BaseJobArgs { + output, + keep_temp_dir, + file_stem, + run_even_if_cached, + platform, + } = args.args.args; + let (output_dir, temp_dir) = if let Some(output) = output { + (Intern::intern_owned(output), None) + } else { + // we create the temp dir here rather than in run so other + // jobs can have their paths based on the chosen temp dir + let temp_dir = TempDir::new()?; + let output_dir = temp_dir.path().intern(); + let temp_dir = if keep_temp_dir { + // use TempDir::into_path() to no longer automatically delete the temp dir + let temp_dir_path = temp_dir.into_path(); + println!("created temporary directory: {}", temp_dir_path.display()); + None + } else { + Some(Arc::new(temp_dir)) + }; + (output_dir, temp_dir) + }; + let file_stem = file_stem + .map(Intern::intern_deref) + .unwrap_or(params.main_module().name().into()); + Ok(JobAndDependencies { + job: JobAndKind { + kind: BaseJobKind, + job: BaseJob { + output_dir, + temp_dir, + file_stem, + run_even_if_cached, + platform, + }, + }, + dependencies: (), + }) + } + + fn inputs(self, _job: &Self::Job) -> Interned<[JobItemName]> { + Interned::default() + } + + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::Path { + path: job.output_dir, + }] + .intern_slice() + } + + fn name(self) -> Interned { + "base-job".intern() + } + + fn external_command_params(self, job: &Self::Job) -> Option { + Some(CommandParams { + command_line: [ + "mkdir".intern().into(), + "-p".intern().into(), + "--".intern().into(), + job.output_dir.into(), + ] + .intern_slice(), + current_dir: None, + }) + } + + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + _params: &JobParams, + _global_params: &GlobalParams, + _acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + let [] = inputs else { + panic!("invalid inputs for BaseJob"); + }; + std::fs::create_dir_all(&*job.output_dir)?; + Ok(vec![JobItem::Path { + path: job.output_dir, + }]) + } + + fn subcommand_hidden(self) -> bool { + true + } +} + +pub trait GetJob { + fn get_job(this: &Self) -> &J; +} + +impl> GetJob for &'_ T { + fn get_job(this: &Self) -> &J { + T::get_job(this) + } +} + +impl> GetJob for &'_ mut T { + fn get_job(this: &Self) -> &J { + T::get_job(this) + } +} + +impl> GetJob for Box { + fn get_job(this: &Self) -> &J { + T::get_job(this) + } +} + +pub struct GetJobPositionDependencies(PhantomData); + +impl Default for GetJobPositionDependencies { + fn default() -> Self { + Self(Default::default()) + } +} + +impl fmt::Debug for GetJobPositionDependencies { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "GetJobPositionDependencies<{}>", + std::any::type_name::() + ) + } +} + +impl Hash for GetJobPositionDependencies { + fn hash(&self, _state: &mut H) {} +} + +impl Ord for GetJobPositionDependencies { + fn cmp(&self, _other: &Self) -> Ordering { + Ordering::Equal + } +} + +impl PartialOrd for GetJobPositionDependencies { + fn partial_cmp(&self, _other: &Self) -> Option { + Some(Ordering::Equal) + } +} + +impl Eq for GetJobPositionDependencies {} + +impl PartialEq for GetJobPositionDependencies { + fn eq(&self, _other: &Self) -> bool { + true + } +} + +impl Clone for GetJobPositionDependencies { + fn clone(&self) -> Self { + Self(PhantomData) + } +} + +impl Copy for GetJobPositionDependencies {} + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)] +pub struct GetJobPositionJob; + +impl>>> + GetJob> for JobAndDependencies +{ + fn get_job(this: &Self) -> &J { + GetJob::get_job(&this.dependencies) + } +} + +impl GetJob for JobAndDependencies { + fn get_job(this: &Self) -> &K::Job { + &this.job.job + } +} + +impl>>> + GetJob> for JobArgsAndDependencies +{ + fn get_job(this: &Self) -> &J { + GetJob::get_job(&this.dependencies) + } +} + +impl GetJob for JobArgsAndDependencies { + fn get_job(this: &Self) -> &K::Args { + &this.args.args + } +} + +impl>> + GetJob> for JobKindAndDependencies +{ + fn get_job(this: &Self) -> &J { + GetJob::get_job(&this.dependencies) + } +} + +impl GetJob for JobKindAndDependencies { + fn get_job(this: &Self) -> &K { + &this.kind + } +} diff --git a/crates/fayalite/src/build/external.rs b/crates/fayalite/src/build/external.rs new file mode 100644 index 0000000..1a90414 --- /dev/null +++ b/crates/fayalite/src/build/external.rs @@ -0,0 +1,1177 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::{ + ArgsWriter, CommandParams, GlobalParams, JobAndDependencies, JobAndKind, + JobArgsAndDependencies, JobDependencies, JobDependenciesHasBase, JobItem, JobItemName, + JobKind, JobKindAndArgs, JobParams, ToArgs, WriteArgs, + }, + intern::{Intern, Interned}, + util::{job_server::AcquiredJob, streaming_read_utf8::streaming_read_utf8}, +}; +use base64::{Engine, prelude::BASE64_URL_SAFE_NO_PAD}; +use clap::builder::OsStringValueParser; +use eyre::{Context, ensure, eyre}; +use serde::{ + Deserialize, Deserializer, Serialize, Serializer, + de::{DeserializeOwned, Error}, +}; +use std::{ + borrow::Cow, + collections::BTreeMap, + ffi::{OsStr, OsString}, + fmt, + hash::{Hash, Hasher}, + io::Write, + marker::PhantomData, + path::{Path, PathBuf}, + process::ExitStatus, + sync::OnceLock, +}; + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize, Deserialize)] +#[non_exhaustive] +pub enum ExternalJobCacheVersion { + /// not used, used to be for `FormalCacheVersion` + V1, + V2, +} + +impl ExternalJobCacheVersion { + pub const CURRENT: Self = Self::V2; +} + +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +#[non_exhaustive] +pub enum MaybeUtf8 { + Utf8(String), + Binary(Vec), +} + +impl MaybeUtf8 { + pub fn as_bytes(&self) -> &[u8] { + match self { + MaybeUtf8::Utf8(v) => v.as_bytes(), + MaybeUtf8::Binary(v) => v, + } + } + pub fn as_os_str(&self) -> &OsStr { + #![allow(unreachable_code)] + #[cfg(unix)] + { + return std::os::unix::ffi::OsStrExt::from_bytes(self.as_bytes()); + } + #[cfg(target_os = "wasi")] + { + return std::os::wasi::ffi::OsStrExt::from_bytes(self.as_bytes()); + } + // implementing WTF-8 is too much of a pain so don't have a special case for windows + if let Ok(s) = str::from_utf8(self.as_bytes()) { + return OsStr::new(s); + } + panic!("invalid UTF-8 conversion to OsStr is not implemented on this platform"); + } + pub fn as_path(&self) -> &Path { + Path::new(self.as_os_str()) + } +} + +#[derive(Serialize, Deserialize)] +#[serde(rename = "MaybeUtf8")] +enum MaybeUtf8Serde<'a> { + Utf8(Cow<'a, str>), + Binary(String), +} + +impl<'de> Deserialize<'de> for MaybeUtf8 { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + Ok(match MaybeUtf8Serde::deserialize(deserializer)? { + MaybeUtf8Serde::Utf8(v) => Self::Utf8(v.into_owned()), + MaybeUtf8Serde::Binary(v) => BASE64_URL_SAFE_NO_PAD + .decode(&*v) + .map_err(D::Error::custom)? + .into(), + }) + } +} + +impl Serialize for MaybeUtf8 { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self { + MaybeUtf8::Utf8(v) => MaybeUtf8Serde::Utf8(Cow::Borrowed(v)), + MaybeUtf8::Binary(v) => MaybeUtf8Serde::Binary(BASE64_URL_SAFE_NO_PAD.encode(v)), + } + .serialize(serializer) + } +} + +impl From> for MaybeUtf8 { + fn from(value: Vec) -> Self { + match String::from_utf8(value) { + Ok(value) => Self::Utf8(value), + Err(e) => Self::Binary(e.into_bytes()), + } + } +} + +impl From for MaybeUtf8 { + fn from(value: String) -> Self { + Self::Utf8(value) + } +} + +impl From for MaybeUtf8 { + fn from(value: PathBuf) -> Self { + Self::from(value.into_os_string().into_encoded_bytes()) + } +} + +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Serialize, Deserialize)] +#[serde(rename = "File")] +pub struct ExternalJobCacheV2File<'a> { + pub name: MaybeUtf8, + pub contents: Cow<'a, MaybeUtf8>, +} + +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug)] +pub struct ExternalJobCacheV2Files(pub BTreeMap); + +impl Serialize for ExternalJobCacheV2Files { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.collect_seq( + self.0 + .iter() + .map(|(name, contents)| ExternalJobCacheV2File { + name: name.clone().into(), + contents: Cow::Borrowed(contents), + }), + ) + } +} + +impl<'de> Deserialize<'de> for ExternalJobCacheV2Files { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + Ok(Self( + Vec::deserialize(deserializer)? + .into_iter() + .map(|ExternalJobCacheV2File { name, contents }| { + (name.as_path().to_path_buf(), contents.into_owned()) + }) + .collect(), + )) + } +} + +#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] +#[serde(rename = "ExternalJobCache")] +pub struct ExternalJobCacheV2 { + pub version: ExternalJobCacheVersion, + pub inputs_hash: blake3::Hash, + pub stdout_stderr: String, + pub result: Result, +} + +impl ExternalJobCacheV2 { + fn read_from_file(cache_json_path: Interned) -> eyre::Result { + let cache_str = std::fs::read_to_string(&*cache_json_path) + .wrap_err_with(|| format!("can't read {cache_json_path:?}"))?; + serde_json::from_str(&cache_str) + .wrap_err_with(|| format!("can't decode {cache_json_path:?}")) + } + fn write_to_file(&self, cache_json_path: Interned) -> eyre::Result<()> { + let cache_str = serde_json::to_string_pretty(&self).expect("serialization can't fail"); + std::fs::write(&*cache_json_path, cache_str) + .wrap_err_with(|| format!("can't write {cache_json_path:?}")) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct ExternalJobCaching { + cache_json_path: Interned, + run_even_if_cached: bool, +} + +#[derive(Default)] +struct JobCacheHasher(blake3::Hasher); + +impl JobCacheHasher { + fn hash_size(&mut self, size: usize) { + self.0.update(&u64::to_le_bytes( + size.try_into().expect("size should fit in u64"), + )); + } + fn hash_sized_bytes(&mut self, bytes: &[u8]) { + self.hash_size(bytes.len()); + self.0.update(bytes); + } + fn hash_sized_os_str(&mut self, s: &OsStr) { + self.hash_sized_bytes(s.as_encoded_bytes()); + } + fn hash_iter>( + &mut self, + iter: I, + mut f: F, + ) { + let iter = iter.into_iter(); + self.hash_size(iter.len()); + iter.for_each(|item| f(self, item)); + } + fn try_hash_iter< + F: FnMut(&mut Self, I::Item) -> Result<(), E>, + E, + I: IntoIterator, + >( + &mut self, + iter: I, + mut f: F, + ) -> Result<(), E> { + let mut iter = iter.into_iter(); + self.hash_size(iter.len()); + iter.try_for_each(|item| f(self, item)) + } +} + +fn write_file_atomically_no_clobber C, C: AsRef<[u8]>>( + path: impl AsRef, + containing_dir: impl AsRef, + contents: F, +) -> std::io::Result<()> { + let path = path.as_ref(); + let containing_dir = containing_dir.as_ref(); + if !matches!(std::fs::exists(&path), Ok(true)) { + // use File::create_new rather than tempfile's code to get normal file permissions rather than mode 600 on Unix. + let mut file = tempfile::Builder::new() + .make_in(containing_dir, |path| std::fs::File::create_new(path))?; + file.write_all(contents().as_ref())?; // write all in one operation to avoid a bunch of tiny writes + file.into_temp_path().persist_noclobber(path)?; + } + Ok(()) +} + +impl ExternalJobCaching { + pub fn get_cache_dir_from_output_dir(output_dir: impl AsRef) -> PathBuf { + output_dir.as_ref().join(".fayalite-job-cache") + } + pub fn make_cache_dir( + cache_dir: impl AsRef, + application_name: &str, + ) -> std::io::Result<()> { + let cache_dir = cache_dir.as_ref(); + std::fs::create_dir_all(cache_dir)?; + write_file_atomically_no_clobber(cache_dir.join("CACHEDIR.TAG"), cache_dir, || { + format!( + "Signature: 8a477f597d28d172789f06886806bc55\n\ + # This file is a cache directory tag created by {application_name}.\n\ + # For information about cache directory tags see https://bford.info/cachedir/\n" + ) + })?; + write_file_atomically_no_clobber(cache_dir.join(".gitignore"), cache_dir, || { + format!( + "# This is a cache directory created by {application_name}.\n\ + # ignore all files\n\ + *\n" + ) + }) + } + pub fn new( + output_dir: impl AsRef, + application_name: &str, + json_file_stem: impl AsRef, + run_even_if_cached: bool, + ) -> std::io::Result { + let cache_dir = Self::get_cache_dir_from_output_dir(output_dir); + Self::make_cache_dir(&cache_dir, application_name)?; + let mut cache_json_path = cache_dir; + cache_json_path.push(json_file_stem.as_ref()); + cache_json_path.set_extension("json"); + Ok(Self { + cache_json_path: Path::intern_owned(cache_json_path), + run_even_if_cached, + }) + } + fn write_stdout_stderr(stdout_stderr: &str) { + if stdout_stderr == "" { + return; + } + // use print! so output goes to Rust test output capture + if stdout_stderr.ends_with('\n') { + print!("{stdout_stderr}"); + } else { + println!("{stdout_stderr}"); + } + } + /// returns `Err(_)` if reading the cache failed, otherwise returns `Ok(_)` with the results from the cache + fn run_from_cache( + self, + inputs_hash: blake3::Hash, + output_file_paths: impl IntoIterator>, + ) -> Result, ()> { + if self.run_even_if_cached { + return Err(()); + } + let Ok(ExternalJobCacheV2 { + version: ExternalJobCacheVersion::CURRENT, + inputs_hash: cached_inputs_hash, + stdout_stderr, + result, + }) = ExternalJobCacheV2::read_from_file(self.cache_json_path) + else { + return Err(()); + }; + if inputs_hash != cached_inputs_hash { + return Err(()); + } + match result { + Ok(outputs) => { + for output_file_path in output_file_paths { + let Some(output_data) = outputs.0.get(&*output_file_path) else { + if let Ok(true) = std::fs::exists(&*output_file_path) { + // assume the existing file is the correct one + continue; + } + return Err(()); + }; + let Ok(()) = std::fs::write(&*output_file_path, output_data.as_bytes()) else { + return Err(()); + }; + } + Self::write_stdout_stderr(&stdout_stderr); + Ok(Ok(())) + } + Err(error) => { + Self::write_stdout_stderr(&stdout_stderr); + Ok(Err(error)) + } + } + } + fn make_command( + command_line: Interned<[Interned]>, + ) -> eyre::Result { + ensure!(!command_line.is_empty(), "command line must not be empty"); + let mut cmd = std::process::Command::new(&*command_line[0]); + cmd.args(command_line[1..].iter().map(|arg| &**arg)) + .stdin(std::process::Stdio::null()); + Ok(cmd) + } + pub fn run( + self, + command_line: Interned<[Interned]>, + input_file_paths: impl IntoIterator>, + output_file_paths: impl IntoIterator> + Clone, + run_fn: F, + exit_status_to_error: impl FnOnce(ExitStatus) -> eyre::Report, + ) -> eyre::Result<()> + where + F: FnOnce(std::process::Command) -> eyre::Result>, + { + let mut hasher = JobCacheHasher::default(); + hasher.hash_iter(command_line.iter(), |hasher, arg| { + hasher.hash_sized_os_str(arg) + }); + let mut input_file_paths = + Vec::<&Path>::from_iter(input_file_paths.into_iter().map(Interned::into_inner)); + input_file_paths.sort_unstable(); + input_file_paths.dedup(); + hasher.try_hash_iter( + &input_file_paths, + |hasher, input_file_path| -> eyre::Result<()> { + hasher.hash_sized_os_str(input_file_path.as_ref()); + hasher.hash_sized_bytes( + &std::fs::read(input_file_path).wrap_err_with(|| { + format!("can't read job input file: {input_file_path:?}") + })?, + ); + Ok(()) + }, + )?; + let inputs_hash = hasher.0.finalize(); + match self.run_from_cache(inputs_hash, output_file_paths.clone()) { + Ok(result) => return result.map_err(|e| eyre!(e)), + Err(()) => {} + } + let (pipe_reader, stdout, stderr) = std::io::pipe() + .and_then(|(r, w)| Ok((r, w.try_clone()?, w))) + .wrap_err_with(|| format!("when trying to create a pipe to run: {command_line:?}"))?; + let mut cmd = Self::make_command(command_line)?; + cmd.stdout(stdout).stderr(stderr); + let mut stdout_stderr = String::new(); + let result = std::thread::scope(|scope| { + std::thread::Builder::new() + .name(format!("stdout:{}", command_line[0].display())) + .spawn_scoped(scope, || { + let _ = streaming_read_utf8(std::io::BufReader::new(pipe_reader), |s| { + stdout_stderr.push_str(s); + // use print! so output goes to Rust test output capture + print!("{s}"); + std::io::Result::Ok(()) + }); + if !stdout_stderr.is_empty() && !stdout_stderr.ends_with('\n') { + println!(); + } + }) + .expect("spawn shouldn't fail"); + run_fn(cmd) + })?; + if let Err(exit_status) = result { + // check if the user may have terminated it or something, don't cache the failure + let user_maybe_terminated; + #[cfg(unix)] + { + user_maybe_terminated = std::os::unix::process::ExitStatusExt::signal(&exit_status) + .is_some() + || exit_status.code().is_none_or(|code| code > 1); + } + #[cfg(not(unix))] + { + user_maybe_terminated = !exit_status.success(); + } + if user_maybe_terminated { + let _ = std::fs::remove_file(self.cache_json_path); + return Err(exit_status_to_error(exit_status)); + } + } + let result = result.map_err(exit_status_to_error); + ExternalJobCacheV2 { + version: ExternalJobCacheVersion::CURRENT, + inputs_hash, + stdout_stderr, + result: match &result { + Ok(()) => Ok(ExternalJobCacheV2Files(Result::from_iter( + output_file_paths.into_iter().map( + |output_file_path: Interned| -> eyre::Result<_> { + let output_file_path = &*output_file_path; + Ok(( + PathBuf::from(output_file_path), + MaybeUtf8::from(std::fs::read(output_file_path).wrap_err_with( + || format!("can't read job output file: {output_file_path:?}"), + )?), + )) + }, + ), + )?)), + Err(e) => Err(format!("{e:#}")), + }, + } + .write_to_file(self.cache_json_path)?; + result + } + pub fn run_maybe_cached( + this: Option, + command_line: Interned<[Interned]>, + input_file_paths: impl IntoIterator>, + output_file_paths: impl IntoIterator> + Clone, + run_fn: F, + exit_status_to_error: impl FnOnce(ExitStatus) -> eyre::Report, + ) -> eyre::Result<()> + where + F: FnOnce(std::process::Command) -> eyre::Result>, + { + match this { + Some(this) => this.run( + command_line, + input_file_paths, + output_file_paths, + run_fn, + exit_status_to_error, + ), + None => run_fn(Self::make_command(command_line)?)?.map_err(exit_status_to_error), + } + } +} + +#[derive(Clone, Eq, Hash)] +pub struct ExternalCommandJobKind(PhantomData); + +impl fmt::Debug for ExternalCommandJobKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "ExternalCommandJobKind<{}>", std::any::type_name::()) + } +} + +impl PartialEq for ExternalCommandJobKind { + fn eq(&self, _other: &Self) -> bool { + true + } +} + +impl Ord for ExternalCommandJobKind { + fn cmp(&self, _other: &Self) -> std::cmp::Ordering { + std::cmp::Ordering::Equal + } +} + +impl PartialOrd for ExternalCommandJobKind { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Default for ExternalCommandJobKind { + fn default() -> Self { + Self(PhantomData) + } +} + +impl Copy for ExternalCommandJobKind {} + +impl ExternalCommandJobKind { + pub const fn new() -> Self { + Self(PhantomData) + } +} + +#[derive(Copy, Clone)] +struct ExternalProgramPathValueParser(ExternalProgram); + +fn parse_which_result( + which_result: which::Result, + program_name: impl Into, + program_path_arg_name: impl FnOnce() -> String, +) -> Result, ResolveProgramPathError> { + let which_result = match which_result { + Ok(v) => v, + Err(inner) => { + return Err(ResolveProgramPathError { + inner, + program_name: program_name.into(), + program_path_arg_name: program_path_arg_name(), + }); + } + }; + Ok(which_result.intern_deref()) +} + +impl clap::builder::TypedValueParser for ExternalProgramPathValueParser { + type Value = Interned; + + fn parse_ref( + &self, + cmd: &clap::Command, + arg: Option<&clap::Arg>, + value: &OsStr, + ) -> clap::error::Result { + let program_path_arg_name = self.0.program_path_arg_name; + OsStringValueParser::new() + .try_map(move |program_name| { + parse_which_result(which::which(&program_name), program_name, || { + program_path_arg_name.into() + }) + }) + .parse_ref(cmd, arg, value) + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)] +#[group(id = T::args_group_id())] +#[non_exhaustive] +pub struct ExternalCommandArgs { + #[command(flatten)] + pub program_path: ExternalProgramPath, + #[arg( + name = Interned::into_inner(T::run_even_if_cached_arg_name()), + long = T::run_even_if_cached_arg_name(), + )] + pub run_even_if_cached: bool, + #[command(flatten)] + pub additional_args: T::AdditionalArgs, +} + +#[derive(Clone, Debug)] +pub struct ResolveProgramPathError { + inner: which::Error, + program_name: OsString, + program_path_arg_name: String, +} + +impl fmt::Display for ResolveProgramPathError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + inner, + program_name, + program_path_arg_name, + } = self; + write!( + f, + "{program_path_arg_name}: failed to resolve {program_name:?} to a valid program: {inner}", + ) + } +} + +impl std::error::Error for ResolveProgramPathError {} + +pub fn resolve_program_path( + program_name: Option<&OsStr>, + default_program_name: impl AsRef, + program_path_env_var_name: Option<&OsStr>, +) -> Result, ResolveProgramPathError> { + let default_program_name = default_program_name.as_ref(); + let owned_program_name; + let program_name = if let Some(program_name) = program_name { + program_name + } else if let Some(v) = program_path_env_var_name.and_then(std::env::var_os) { + owned_program_name = v; + &owned_program_name + } else { + default_program_name + }; + parse_which_result(which::which(program_name), program_name, || { + default_program_name.display().to_string() + }) +} + +impl ExternalCommandArgs { + pub fn with_resolved_program_path( + program_path: Interned, + additional_args: T::AdditionalArgs, + ) -> Self { + Self::new( + ExternalProgramPath::with_resolved_program_path(program_path), + additional_args, + ) + } + pub fn new( + program_path: ExternalProgramPath, + additional_args: T::AdditionalArgs, + ) -> Self { + Self { + program_path, + run_even_if_cached: false, + additional_args, + } + } + pub fn resolve_program_path( + program_name: Option<&OsStr>, + additional_args: T::AdditionalArgs, + ) -> Result { + Ok(Self::new( + ExternalProgramPath::resolve_program_path(program_name)?, + additional_args, + )) + } +} + +impl ToArgs for ExternalCommandArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { + program_path, + run_even_if_cached, + ref additional_args, + } = *self; + program_path.to_args(args); + if run_even_if_cached { + args.write_display_arg(format_args!("--{}", T::run_even_if_cached_arg_name())); + } + additional_args.to_args(args); + } +} + +#[derive(Copy, Clone)] +struct ExternalCommandJobParams { + command_params: CommandParams, + inputs: Interned<[JobItemName]>, + outputs: Interned<[JobItemName]>, + output_paths: Interned<[Interned]>, +} + +impl ExternalCommandJobParams { + fn new(job: &ExternalCommandJob) -> Self { + let output_paths = T::output_paths(job); + let mut command_line = ArgsWriter(vec![job.program_path.as_interned_os_str()]); + T::command_line_args(job, &mut command_line); + Self { + command_params: CommandParams { + command_line: Intern::intern_owned(command_line.0), + current_dir: T::current_dir(job), + }, + inputs: T::inputs(job), + outputs: output_paths + .iter() + .map(|&path| JobItemName::Path { path }) + .collect(), + output_paths, + } + } +} + +#[derive(Deserialize, Serialize)] +pub struct ExternalCommandJob { + additional_job_data: T::AdditionalJobData, + program_path: Interned, + output_dir: Interned, + run_even_if_cached: bool, + #[serde(skip)] + params_cache: OnceLock, +} + +impl Eq for ExternalCommandJob {} + +impl> Clone for ExternalCommandJob { + fn clone(&self) -> Self { + let Self { + ref additional_job_data, + program_path, + output_dir, + run_even_if_cached, + ref params_cache, + } = *self; + Self { + additional_job_data: additional_job_data.clone(), + program_path, + output_dir, + run_even_if_cached, + params_cache: params_cache.clone(), + } + } +} + +impl fmt::Debug for ExternalCommandJob { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + additional_job_data, + program_path, + output_dir, + run_even_if_cached, + params_cache: _, + } = self; + write!(f, "ExternalCommandJob<{}>", std::any::type_name::())?; + f.debug_struct("") + .field("additional_job_data", additional_job_data) + .field("program_path", program_path) + .field("output_dir", output_dir) + .field("run_even_if_cached", run_even_if_cached) + .finish() + } +} + +impl PartialEq for ExternalCommandJob { + fn eq(&self, other: &Self) -> bool { + let Self { + additional_job_data, + program_path, + output_dir, + run_even_if_cached, + params_cache: _, + } = self; + *additional_job_data == other.additional_job_data + && *program_path == other.program_path + && *output_dir == other.output_dir + && *run_even_if_cached == other.run_even_if_cached + } +} + +impl Hash for ExternalCommandJob { + fn hash(&self, state: &mut H) { + let Self { + additional_job_data, + program_path, + output_dir, + run_even_if_cached, + params_cache: _, + } = self; + additional_job_data.hash(state); + program_path.hash(state); + output_dir.hash(state); + run_even_if_cached.hash(state); + } +} + +impl ExternalCommandJob { + pub fn additional_job_data(&self) -> &T::AdditionalJobData { + &self.additional_job_data + } + pub fn program_path(&self) -> Interned { + self.program_path + } + pub fn output_dir(&self) -> Interned { + self.output_dir + } + pub fn run_even_if_cached(&self) -> bool { + self.run_even_if_cached + } + fn params(&self) -> &ExternalCommandJobParams { + self.params_cache + .get_or_init(|| ExternalCommandJobParams::new(self)) + } + pub fn command_params(&self) -> CommandParams { + self.params().command_params + } + pub fn inputs(&self) -> Interned<[JobItemName]> { + self.params().inputs + } + pub fn output_paths(&self) -> Interned<[Interned]> { + self.params().output_paths + } + pub fn outputs(&self) -> Interned<[JobItemName]> { + self.params().outputs + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct ExternalProgramPath { + program_path: Interned, + _phantom: PhantomData, +} + +impl ExternalProgramPath { + pub fn with_resolved_program_path(program_path: Interned) -> Self { + Self { + program_path, + _phantom: PhantomData, + } + } + pub fn resolve_program_path( + program_name: Option<&OsStr>, + ) -> Result { + let ExternalProgram { + default_program_name, + program_path_arg_name: _, + program_path_arg_value_name: _, + program_path_env_var_name, + } = ExternalProgram::new::(); + Ok(Self { + program_path: resolve_program_path( + program_name, + default_program_name, + program_path_env_var_name.as_ref().map(OsStr::new), + )?, + _phantom: PhantomData, + }) + } + pub fn program_path(&self) -> Interned { + self.program_path + } +} + +impl fmt::Debug for ExternalProgramPath { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + program_path, + _phantom: _, + } = self; + write!(f, "ExternalProgramPath<{}>", std::any::type_name::())?; + f.debug_tuple("").field(program_path).finish() + } +} + +impl clap::FromArgMatches for ExternalProgramPath { + fn from_arg_matches(matches: &clap::ArgMatches) -> Result { + let id = Interned::into_inner(ExternalProgram::new::().program_path_arg_name); + // don't remove argument so later instances of Self can use it too + let program_path = *matches.get_one(id).expect("arg should always be present"); + Ok(Self { + program_path, + _phantom: PhantomData, + }) + } + + fn update_from_arg_matches(&mut self, matches: &clap::ArgMatches) -> Result<(), clap::Error> { + *self = Self::from_arg_matches(matches)?; + Ok(()) + } +} + +impl clap::Args for ExternalProgramPath { + fn augment_args(cmd: clap::Command) -> clap::Command { + let external_program @ ExternalProgram { + default_program_name, + program_path_arg_name, + program_path_arg_value_name, + program_path_env_var_name, + } = ExternalProgram::new::(); + let arg = cmd + .get_arguments() + .find(|arg| *arg.get_id().as_str() == *program_path_arg_name); + if let Some(arg) = arg { + // don't insert duplicate arguments. + // check that the previous argument actually matches this argument: + assert!(!arg.is_required_set()); + assert!(matches!(arg.get_action(), clap::ArgAction::Set)); + assert_eq!(arg.get_long(), Some(&*program_path_arg_name)); + assert_eq!( + arg.get_value_names(), + Some(&[clap::builder::Str::from(program_path_arg_value_name)][..]) + ); + assert_eq!( + arg.get_env(), + program_path_env_var_name.as_ref().map(OsStr::new) + ); + assert_eq!( + arg.get_default_values(), + &[OsStr::new(&default_program_name)] + ); + assert_eq!(arg.get_value_hint(), clap::ValueHint::CommandName); + cmd + } else { + cmd.arg( + clap::Arg::new(Interned::into_inner(program_path_arg_name)) + .required(false) + .value_parser(ExternalProgramPathValueParser(external_program)) + .action(clap::ArgAction::Set) + .long(program_path_arg_name) + .value_name(program_path_arg_value_name) + .env(program_path_env_var_name.map(Interned::into_inner)) + .default_value(default_program_name) + .value_hint(clap::ValueHint::CommandName), + ) + } + } + + fn augment_args_for_update(cmd: clap::Command) -> clap::Command { + Self::augment_args(cmd) + } +} + +impl ToArgs for ExternalProgramPath { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let ExternalProgram { + program_path_arg_name, + .. + } = ExternalProgram::new::(); + let Self { + program_path, + _phantom: _, + } = self; + if args.get_long_option_eq(program_path_arg_name) != Some(program_path.as_os_str()) { + args.write_long_option_eq(program_path_arg_name, program_path); + } + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +#[non_exhaustive] +pub struct ExternalProgram { + default_program_name: Interned, + program_path_arg_name: Interned, + program_path_arg_value_name: Interned, + program_path_env_var_name: Option>, +} + +impl ExternalProgram { + pub fn new() -> Self { + Self { + default_program_name: T::default_program_name(), + program_path_arg_name: T::program_path_arg_name(), + program_path_arg_value_name: T::program_path_arg_value_name(), + program_path_env_var_name: T::program_path_env_var_name(), + } + } + pub fn default_program_name(&self) -> Interned { + self.default_program_name + } + pub fn program_path_arg_name(&self) -> Interned { + self.program_path_arg_name + } + pub fn program_path_arg_value_name(&self) -> Interned { + self.program_path_arg_value_name + } + pub fn program_path_env_var_name(&self) -> Option> { + self.program_path_env_var_name + } +} + +impl From for ExternalProgram { + fn from(_value: T) -> Self { + Self::new::() + } +} + +impl From for Interned { + fn from(_value: T) -> Self { + ExternalProgram::new::().intern_sized() + } +} + +pub trait ExternalProgramTrait: + 'static + Send + Sync + Hash + Ord + fmt::Debug + Default + Copy +{ + fn program_path_arg_name() -> Interned { + Self::default_program_name() + } + fn program_path_arg_value_name() -> Interned { + Intern::intern_owned(Self::program_path_arg_name().to_uppercase()) + } + fn default_program_name() -> Interned; + fn program_path_env_var_name() -> Option> { + Some(Intern::intern_owned( + Self::program_path_arg_name() + .to_uppercase() + .replace('-', "_"), + )) + } +} + +pub trait ExternalCommand: 'static + Send + Sync + Hash + Eq + fmt::Debug + Sized + Clone { + type AdditionalArgs: ToArgs; + type AdditionalJobData: 'static + + Send + + Sync + + Hash + + Eq + + fmt::Debug + + Serialize + + DeserializeOwned; + type BaseJobPosition; + type Dependencies: JobDependenciesHasBase; + type ExternalProgram: ExternalProgramTrait; + fn dependencies() -> Self::Dependencies; + fn args_to_jobs( + args: JobArgsAndDependencies>, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result<( + Self::AdditionalJobData, + ::JobsAndKinds, + )>; + fn inputs(job: &ExternalCommandJob) -> Interned<[JobItemName]>; + fn output_paths(job: &ExternalCommandJob) -> Interned<[Interned]>; + fn command_line_args(job: &ExternalCommandJob, args: &mut W); + fn current_dir(job: &ExternalCommandJob) -> Option>; + fn job_kind_name() -> Interned; + fn args_group_id() -> clap::Id { + Interned::into_inner(Self::job_kind_name()).into() + } + fn run_even_if_cached_arg_name() -> Interned { + Intern::intern_owned(format!("{}-run-even-if-cached", Self::job_kind_name())) + } + fn subcommand_hidden() -> bool { + false + } +} + +impl JobKind for ExternalCommandJobKind { + type Args = ExternalCommandArgs; + type Job = ExternalCommandJob; + type Dependencies = T::Dependencies; + + fn dependencies(self) -> Self::Dependencies { + T::dependencies() + } + + fn args_to_jobs( + args: JobArgsAndDependencies, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result> { + let JobKindAndArgs { + kind, + args: + ExternalCommandArgs { + program_path: + ExternalProgramPath { + program_path, + _phantom: _, + }, + run_even_if_cached, + additional_args: _, + }, + } = args.args; + let (additional_job_data, dependencies) = T::args_to_jobs(args, params, global_params)?; + let base_job = T::Dependencies::base_job(&dependencies); + let job = ExternalCommandJob { + additional_job_data, + program_path, + output_dir: base_job.output_dir(), + run_even_if_cached: base_job.run_even_if_cached() | run_even_if_cached, + params_cache: OnceLock::new(), + }; + job.params(); // fill cache + Ok(JobAndDependencies { + job: JobAndKind { kind, job }, + dependencies, + }) + } + + fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + job.inputs() + } + + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + job.outputs() + } + + fn name(self) -> Interned { + T::job_kind_name() + } + + fn external_command_params(self, job: &Self::Job) -> Option { + Some(job.command_params()) + } + + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + _params: &JobParams, + global_params: &GlobalParams, + acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + assert!( + inputs.iter().map(JobItem::name).eq(job.inputs()), + "{}\ninputs:\n{inputs:?}\njob.inputs():\n{:?}", + std::any::type_name::(), + job.inputs(), + ); + let CommandParams { + command_line, + current_dir, + } = job.command_params(); + ExternalJobCaching::new( + &job.output_dir, + &global_params.application_name(), + &T::job_kind_name(), + job.run_even_if_cached, + )? + .run( + command_line, + inputs + .iter() + .flat_map(|item| match item { + JobItem::Path { path } => std::slice::from_ref(path), + JobItem::DynamicPaths { + paths, + source_job_name: _, + } => paths, + }) + .copied(), + job.output_paths(), + |mut cmd| { + if let Some(current_dir) = current_dir { + cmd.current_dir(current_dir); + } + let status = acquired_job.run_command(cmd, |cmd| cmd.status())?; + if !status.success() { + Ok(Err(status)) + } else { + Ok(Ok(())) + } + }, + |status| eyre!("running {command_line:?} failed: {status}"), + )?; + Ok(job + .output_paths() + .iter() + .map(|&path| JobItem::Path { path }) + .collect()) + } + + fn subcommand_hidden(self) -> bool { + T::subcommand_hidden() + } + + fn external_program(self) -> Option> { + Some(ExternalProgram::new::().intern_sized()) + } +} diff --git a/crates/fayalite/src/build/firrtl.rs b/crates/fayalite/src/build/firrtl.rs new file mode 100644 index 0000000..b5574a9 --- /dev/null +++ b/crates/fayalite/src/build/firrtl.rs @@ -0,0 +1,128 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::{ + BaseJob, BaseJobKind, CommandParams, DynJobKind, GlobalParams, JobAndDependencies, + JobArgsAndDependencies, JobItem, JobItemName, JobKind, JobKindAndDependencies, JobParams, + ToArgs, WriteArgs, + }, + firrtl::{ExportOptions, FileBackend}, + intern::{Intern, InternSlice, Interned}, + util::job_server::AcquiredJob, +}; +use clap::Args; +use serde::{Deserialize, Serialize}; +use std::path::{Path, PathBuf}; + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug)] +pub struct FirrtlJobKind; + +#[derive(Args, Debug, Clone, Hash, PartialEq, Eq)] +#[group(id = "Firrtl")] +#[non_exhaustive] +pub struct FirrtlArgs { + #[command(flatten)] + pub export_options: ExportOptions, +} + +impl ToArgs for FirrtlArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { export_options } = self; + export_options.to_args(args); + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub struct Firrtl { + base: BaseJob, + export_options: ExportOptions, +} + +impl Firrtl { + fn make_firrtl_file_backend(&self) -> FileBackend { + FileBackend { + dir_path: PathBuf::from(&*self.base.output_dir()), + top_fir_file_stem: Some(self.base.file_stem().into()), + circuit_name: None, + } + } + pub fn firrtl_file(&self) -> Interned { + self.base.file_with_ext("fir") + } +} + +impl JobKind for FirrtlJobKind { + type Args = FirrtlArgs; + type Job = Firrtl; + type Dependencies = JobKindAndDependencies; + + fn dependencies(self) -> Self::Dependencies { + JobKindAndDependencies::new(BaseJobKind) + } + + fn args_to_jobs( + args: JobArgsAndDependencies, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result> { + args.args_to_jobs_simple( + params, + global_params, + |_kind, FirrtlArgs { export_options }, dependencies| { + Ok(Firrtl { + base: dependencies.get_job::().clone(), + export_options, + }) + }, + ) + } + + fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::Path { + path: job.base.output_dir(), + }] + .intern_slice() + } + + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::Path { + path: job.firrtl_file(), + }] + .intern_slice() + } + + fn name(self) -> Interned { + "firrtl".intern() + } + + fn external_command_params(self, _job: &Self::Job) -> Option { + None + } + + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + params: &JobParams, + _global_params: &GlobalParams, + _acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + let [JobItem::Path { path: input_path }] = *inputs else { + panic!("wrong inputs, expected a single `Path`"); + }; + assert_eq!(input_path, job.base.output_dir()); + crate::firrtl::export( + job.make_firrtl_file_backend(), + params.main_module(), + job.export_options, + )?; + Ok(vec![JobItem::Path { + path: job.firrtl_file(), + }]) + } +} + +pub(crate) fn built_in_job_kinds() -> impl IntoIterator { + [DynJobKind::new(FirrtlJobKind)] +} diff --git a/crates/fayalite/src/build/formal.rs b/crates/fayalite/src/build/formal.rs new file mode 100644 index 0000000..69c0f2c --- /dev/null +++ b/crates/fayalite/src/build/formal.rs @@ -0,0 +1,388 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::{ + BaseJob, CommandParams, DynJobKind, GetJobPositionDependencies, GlobalParams, + JobAndDependencies, JobArgsAndDependencies, JobDependencies, JobItem, JobItemName, JobKind, + JobKindAndDependencies, JobParams, ToArgs, WriteArgs, + external::{ + ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait, + }, + verilog::{UnadjustedVerilog, VerilogDialect, VerilogJob, VerilogJobKind}, + }, + intern::{Intern, InternSlice, Interned}, + module::NameId, + testing::FormalMode, + util::job_server::AcquiredJob, +}; +use clap::Args; +use eyre::Context; +use serde::{Deserialize, Serialize}; +use std::{ + ffi::{OsStr, OsString}, + fmt::{self, Write}, + path::Path, +}; + +#[derive(Args, Clone, Debug, PartialEq, Eq, Hash)] +#[non_exhaustive] +pub struct FormalArgs { + #[arg(long = "sby-extra-arg", value_name = "ARG")] + pub sby_extra_args: Vec, + #[arg(long, default_value_t)] + pub formal_mode: FormalMode, + #[arg(long, default_value_t = Self::DEFAULT_DEPTH)] + pub formal_depth: u64, + #[arg(long, default_value = Self::DEFAULT_SOLVER)] + pub formal_solver: String, + #[arg(long = "smtbmc-extra-arg", value_name = "ARG")] + pub smtbmc_extra_args: Vec, +} + +impl FormalArgs { + pub const DEFAULT_DEPTH: u64 = 20; + pub const DEFAULT_SOLVER: &'static str = "z3"; +} + +impl ToArgs for FormalArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { + sby_extra_args, + formal_mode, + formal_depth, + formal_solver, + smtbmc_extra_args, + } = self; + for arg in sby_extra_args { + args.write_long_option_eq("sby-extra-arg", arg); + } + args.write_display_args([ + format_args!("--formal-mode={formal_mode}"), + format_args!("--formal-depth={formal_depth}"), + format_args!("--formal-solver={formal_solver}"), + ]); + for arg in smtbmc_extra_args { + args.write_long_option_eq("smtbmc-extra-arg", arg); + } + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +pub struct WriteSbyFileJobKind; + +#[derive(Clone, Debug, PartialEq, Eq, Hash, Deserialize, Serialize)] +pub struct WriteSbyFileJob { + sby_extra_args: Interned<[Interned]>, + formal_mode: FormalMode, + formal_depth: u64, + formal_solver: Interned, + smtbmc_extra_args: Interned<[Interned]>, + sby_file: Interned, + output_dir: Interned, + main_verilog_file: Interned, +} + +impl WriteSbyFileJob { + pub fn sby_extra_args(&self) -> Interned<[Interned]> { + self.sby_extra_args + } + pub fn formal_mode(&self) -> FormalMode { + self.formal_mode + } + pub fn formal_depth(&self) -> u64 { + self.formal_depth + } + pub fn formal_solver(&self) -> Interned { + self.formal_solver + } + pub fn smtbmc_extra_args(&self) -> Interned<[Interned]> { + self.smtbmc_extra_args + } + pub fn sby_file(&self) -> Interned { + self.sby_file + } + pub fn output_dir(&self) -> Interned { + self.output_dir + } + pub fn main_verilog_file(&self) -> Interned { + self.main_verilog_file + } + fn write_sby( + &self, + output: &mut OsString, + additional_files: &[Interned], + main_module_name_id: NameId, + ) -> eyre::Result<()> { + let Self { + sby_extra_args: _, + formal_mode, + formal_depth, + formal_solver, + smtbmc_extra_args, + sby_file: _, + output_dir: _, + main_verilog_file, + } = self; + write!( + output, + "[options]\n\ + mode {formal_mode}\n\ + depth {formal_depth}\n\ + wait on\n\ + \n\ + [engines]\n\ + smtbmc {formal_solver} -- --" + ) + .expect("writing to OsString can't fail"); + for i in smtbmc_extra_args { + output.push(" "); + output.push(i); + } + output.push( + "\n\ + \n\ + [script]\n", + ); + for verilog_file in VerilogJob::all_verilog_files(*main_verilog_file, additional_files)? { + output.push("read_verilog -sv -formal \""); + output.push(verilog_file); + output.push("\"\n"); + } + let circuit_name = crate::firrtl::get_circuit_name(main_module_name_id); + // workaround for wires disappearing -- set `keep` on all wires + writeln!( + output, + "hierarchy -top {circuit_name}\n\ + proc\n\ + setattr -set keep 1 w:\\*\n\ + prep", + ) + .expect("writing to OsString can't fail"); + Ok(()) + } +} + +impl JobKind for WriteSbyFileJobKind { + type Args = FormalArgs; + type Job = WriteSbyFileJob; + type Dependencies = JobKindAndDependencies; + + fn dependencies(self) -> Self::Dependencies { + Default::default() + } + + fn args_to_jobs( + mut args: JobArgsAndDependencies, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result> { + args.dependencies + .dependencies + .args + .args + .additional_args + .verilog_dialect + .get_or_insert(VerilogDialect::Yosys); + args.args_to_jobs_simple(params, global_params, |_kind, args, dependencies| { + let FormalArgs { + sby_extra_args, + formal_mode, + formal_depth, + formal_solver, + smtbmc_extra_args, + } = args; + let base_job = dependencies.get_job::(); + Ok(WriteSbyFileJob { + sby_extra_args: sby_extra_args.into_iter().map(Interned::from).collect(), + formal_mode, + formal_depth, + formal_solver: formal_solver.intern_deref(), + smtbmc_extra_args: smtbmc_extra_args.into_iter().map(Interned::from).collect(), + sby_file: base_job.file_with_ext("sby"), + output_dir: base_job.output_dir(), + main_verilog_file: dependencies.get_job::().main_verilog_file(), + }) + }) + } + + fn inputs(self, _job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::DynamicPaths { + source_job_name: VerilogJobKind.name(), + }] + .intern_slice() + } + + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::Path { path: job.sby_file }].intern_slice() + } + + fn name(self) -> Interned { + "write-sby-file".intern() + } + + fn external_command_params(self, _job: &Self::Job) -> Option { + None + } + + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + params: &JobParams, + _global_params: &GlobalParams, + _acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job))); + let [additional_files] = inputs else { + unreachable!(); + }; + let additional_files = VerilogJob::unwrap_additional_files(additional_files); + let mut contents = OsString::new(); + job.write_sby( + &mut contents, + additional_files, + params.main_module().name_id(), + )?; + let path = job.sby_file; + std::fs::write(path, contents.as_encoded_bytes()) + .wrap_err_with(|| format!("writing {path:?} failed"))?; + Ok(vec![JobItem::Path { path }]) + } + + fn subcommand_hidden(self) -> bool { + true + } +} + +#[derive(Clone, Hash, PartialEq, Eq, Serialize, Deserialize)] +pub struct Formal { + #[serde(flatten)] + write_sby_file: WriteSbyFileJob, + sby_file_name: Interned, +} + +impl fmt::Debug for Formal { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + write_sby_file: + WriteSbyFileJob { + sby_extra_args, + formal_mode, + formal_depth, + formal_solver, + smtbmc_extra_args, + sby_file, + output_dir: _, + main_verilog_file, + }, + sby_file_name, + } = self; + f.debug_struct("Formal") + .field("sby_extra_args", sby_extra_args) + .field("formal_mode", formal_mode) + .field("formal_depth", formal_depth) + .field("formal_solver", formal_solver) + .field("smtbmc_extra_args", smtbmc_extra_args) + .field("sby_file", sby_file) + .field("sby_file_name", sby_file_name) + .field("main_verilog_file", main_verilog_file) + .finish_non_exhaustive() + } +} + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)] +pub struct Symbiyosys; + +impl ExternalProgramTrait for Symbiyosys { + fn default_program_name() -> Interned { + "sby".intern() + } +} + +#[derive(Clone, Hash, PartialEq, Eq, Debug, Args)] +pub struct FormalAdditionalArgs {} + +impl ToArgs for FormalAdditionalArgs { + fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) { + let Self {} = self; + } +} + +impl ExternalCommand for Formal { + type AdditionalArgs = FormalAdditionalArgs; + type AdditionalJobData = Formal; + type BaseJobPosition = GetJobPositionDependencies< + GetJobPositionDependencies< + GetJobPositionDependencies<::BaseJobPosition>, + >, + >; + type Dependencies = JobKindAndDependencies; + type ExternalProgram = Symbiyosys; + + fn dependencies() -> Self::Dependencies { + Default::default() + } + + fn args_to_jobs( + args: JobArgsAndDependencies>, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result<( + Self::AdditionalJobData, + ::JobsAndKinds, + )> { + args.args_to_jobs_external_simple(params, global_params, |args, dependencies| { + let FormalAdditionalArgs {} = args.additional_args; + let write_sby_file = dependencies.get_job::().clone(); + Ok(Formal { + sby_file_name: write_sby_file + .sby_file() + .interned_file_name() + .expect("known to have file name"), + write_sby_file, + }) + }) + } + + fn inputs(job: &ExternalCommandJob) -> Interned<[JobItemName]> { + [ + JobItemName::Path { + path: job.additional_job_data().write_sby_file.sby_file(), + }, + JobItemName::Path { + path: job.additional_job_data().write_sby_file.main_verilog_file(), + }, + JobItemName::DynamicPaths { + source_job_name: VerilogJobKind.name(), + }, + ] + .intern_slice() + } + + fn output_paths(_job: &ExternalCommandJob) -> Interned<[Interned]> { + Interned::default() + } + + fn command_line_args(job: &ExternalCommandJob, args: &mut W) { + // args.write_str_arg("-j1"); // sby seems not to respect job count in parallel mode + args.write_arg("-f"); + args.write_interned_arg(job.additional_job_data().sby_file_name); + args.write_interned_args(job.additional_job_data().write_sby_file.sby_extra_args()); + } + + fn current_dir(job: &ExternalCommandJob) -> Option> { + Some(job.output_dir()) + } + + fn job_kind_name() -> Interned { + "formal".intern() + } +} + +pub(crate) fn built_in_job_kinds() -> impl IntoIterator { + [ + DynJobKind::new(WriteSbyFileJobKind), + DynJobKind::new(ExternalCommandJobKind::::new()), + ] +} diff --git a/crates/fayalite/src/build/graph.rs b/crates/fayalite/src/build/graph.rs new file mode 100644 index 0000000..d81b282 --- /dev/null +++ b/crates/fayalite/src/build/graph.rs @@ -0,0 +1,847 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::{ + DynJob, GlobalParams, JobItem, JobItemName, JobParams, program_name_for_internal_jobs, + }, + intern::Interned, + platform::DynPlatform, + util::{HashMap, HashSet, job_server::AcquiredJob}, +}; +use eyre::{ContextCompat, eyre}; +use petgraph::{ + algo::{DfsSpace, kosaraju_scc, toposort}, + graph::DiGraph, + visit::{GraphBase, Visitable}, +}; +use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error, ser::SerializeSeq}; +use std::{ + cell::OnceCell, + collections::{BTreeMap, BTreeSet, VecDeque}, + convert::Infallible, + ffi::OsStr, + fmt::{self, Write}, + panic, + rc::Rc, + str::Utf8Error, + sync::mpsc, + thread::{self, ScopedJoinHandle}, +}; + +macro_rules! write_str { + ($s:expr, $($rest:tt)*) => { + write!($s, $($rest)*).expect("String::write_fmt can't fail") + }; +} + +#[derive(Clone, Debug)] +enum JobGraphNode { + Job(DynJob), + Item { + #[allow(dead_code, reason = "name used for debugging")] + name: JobItemName, + source_job: Option, + }, +} + +type JobGraphInner = DiGraph; + +#[derive(Clone, Default)] +pub struct JobGraph { + jobs: HashMap::NodeId>, + items: HashMap::NodeId>, + graph: JobGraphInner, + topological_order: Vec<::NodeId>, + space: DfsSpace<::NodeId, ::Map>, +} + +impl fmt::Debug for JobGraph { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + jobs: _, + items: _, + graph, + topological_order, + space: _, + } = self; + f.debug_struct("JobGraph") + .field("graph", graph) + .field("topological_order", topological_order) + .finish_non_exhaustive() + } +} + +#[derive(Clone, Debug)] +pub enum JobGraphError { + CycleError { + job: DynJob, + output: JobItemName, + }, + MultipleJobsCreateSameOutput { + output_item: JobItemName, + existing_job: DynJob, + new_job: DynJob, + }, +} + +impl std::error::Error for JobGraphError {} + +impl fmt::Display for JobGraphError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::CycleError { job, output } => write!( + f, + "job can't be added to job graph because it would introduce a cyclic dependency through this job output:\n\ + {output:?}\n\ + job:\n{job:?}", + ), + JobGraphError::MultipleJobsCreateSameOutput { + output_item, + existing_job, + new_job, + } => write!( + f, + "job can't be added to job graph because the new job has an output that is also produced by an existing job.\n\ + conflicting output:\n\ + {output_item:?}\n\ + existing job:\n\ + {existing_job:?}\n\ + new job:\n\ + {new_job:?}", + ), + } + } +} + +#[derive(Copy, Clone, Debug)] +enum EscapeForUnixShellState { + DollarSingleQuote, + SingleQuote, + Unquoted, +} + +#[derive(Clone)] +pub struct EscapeForUnixShell<'a> { + state: EscapeForUnixShellState, + prefix: [u8; 3], + bytes: &'a [u8], +} + +impl<'a> fmt::Debug for EscapeForUnixShell<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(self, f) + } +} + +impl<'a> fmt::Display for EscapeForUnixShell<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + for c in self.clone() { + f.write_char(c)?; + } + Ok(()) + } +} + +impl<'a> EscapeForUnixShell<'a> { + pub fn new(s: &'a (impl ?Sized + AsRef)) -> Self { + Self::from_bytes(s.as_ref().as_encoded_bytes()) + } + fn make_prefix(bytes: &[u8]) -> [u8; 3] { + let mut prefix = [0; 3]; + prefix[..bytes.len()].copy_from_slice(bytes); + prefix + } + pub fn from_bytes(bytes: &'a [u8]) -> Self { + let mut needs_single_quote = bytes.is_empty(); + for &b in bytes { + match b { + b'!' | b'\'' | b'\"' | b' ' => needs_single_quote = true, + 0..0x20 | 0x7F.. => { + return Self { + state: EscapeForUnixShellState::DollarSingleQuote, + prefix: Self::make_prefix(b"$'"), + bytes, + }; + } + _ => {} + } + } + if needs_single_quote { + Self { + state: EscapeForUnixShellState::SingleQuote, + prefix: Self::make_prefix(b"'"), + bytes, + } + } else { + Self { + state: EscapeForUnixShellState::Unquoted, + prefix: Self::make_prefix(b""), + bytes, + } + } + } +} + +impl Iterator for EscapeForUnixShell<'_> { + type Item = char; + + fn next(&mut self) -> Option { + match &mut self.prefix { + [0, 0, 0] => {} + [0, 0, v] | // find first + [0, v, _] | // non-zero byte + [v, _, _] => { + let retval = *v as char; + *v = 0; + return Some(retval); + } + } + let Some(&next_byte) = self.bytes.split_off_first() else { + return match self.state { + EscapeForUnixShellState::DollarSingleQuote + | EscapeForUnixShellState::SingleQuote => { + self.state = EscapeForUnixShellState::Unquoted; + Some('\'') + } + EscapeForUnixShellState::Unquoted => None, + }; + }; + match self.state { + EscapeForUnixShellState::DollarSingleQuote => match next_byte { + b'\'' | b'\\' => { + self.prefix = Self::make_prefix(&[next_byte]); + Some('\\') + } + b'\t' => { + self.prefix = Self::make_prefix(b"t"); + Some('\\') + } + b'\n' => { + self.prefix = Self::make_prefix(b"n"); + Some('\\') + } + b'\r' => { + self.prefix = Self::make_prefix(b"r"); + Some('\\') + } + 0x20..=0x7E => Some(next_byte as char), + _ => { + self.prefix = [ + b'x', + char::from_digit(next_byte as u32 >> 4, 0x10).expect("known to be in range") + as u8, + char::from_digit(next_byte as u32 & 0xF, 0x10) + .expect("known to be in range") as u8, + ]; + Some('\\') + } + }, + EscapeForUnixShellState::SingleQuote => { + if next_byte == b'\'' { + self.prefix = Self::make_prefix(b"\\''"); + Some('\'') + } else { + Some(next_byte as char) + } + } + EscapeForUnixShellState::Unquoted => match next_byte { + b' ' | b'!' | b'"' | b'#' | b'$' | b'&' | b'\'' | b'(' | b')' | b'*' | b',' + | b';' | b'<' | b'>' | b'?' | b'[' | b'\\' | b']' | b'^' | b'`' | b'{' | b'|' + | b'}' | b'~' => { + self.prefix = Self::make_prefix(&[next_byte]); + Some('\\') + } + _ => Some(next_byte as char), + }, + } + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +#[non_exhaustive] +pub enum UnixMakefileEscapeKind { + NonRecipe, + RecipeWithoutShellEscaping, + RecipeWithShellEscaping, +} + +#[derive(Copy, Clone)] +pub struct EscapeForUnixMakefile<'a> { + s: &'a OsStr, + kind: UnixMakefileEscapeKind, +} + +impl<'a> fmt::Debug for EscapeForUnixMakefile<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(self, f) + } +} + +impl<'a> fmt::Display for EscapeForUnixMakefile<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.do_write( + f, + fmt::Write::write_str, + fmt::Write::write_char, + |_, _| Ok(()), + |_| unreachable!("already checked that the input causes no UTF-8 errors"), + ) + } +} + +impl<'a> EscapeForUnixMakefile<'a> { + fn do_write( + &self, + state: &mut S, + write_str: impl Fn(&mut S, &str) -> Result<(), E>, + write_char: impl Fn(&mut S, char) -> Result<(), E>, + add_variable: impl Fn(&mut S, &'static str) -> Result<(), E>, + utf8_error: impl Fn(Utf8Error) -> E, + ) -> Result<(), E> { + let escape_recipe_char = |c| match c { + '$' => write_str(state, "$$"), + '\0'..='\x1F' | '\x7F' => { + panic!("can't escape a control character for Unix Makefile: {c:?}"); + } + _ => write_char(state, c), + }; + match self.kind { + UnixMakefileEscapeKind::NonRecipe => str::from_utf8(self.s.as_encoded_bytes()) + .map_err(&utf8_error)? + .chars() + .try_for_each(|c| match c { + '=' => { + add_variable(state, "EQUALS = =")?; + write_str(state, "$(EQUALS)") + } + ';' => panic!("can't escape a semicolon (;) for Unix Makefile"), + '$' => write_str(state, "$$"), + '\\' | ' ' | '#' | ':' | '%' | '*' | '?' | '[' | ']' | '~' => { + write_char(state, '\\')?; + write_char(state, c) + } + '\0'..='\x1F' | '\x7F' => { + panic!("can't escape a control character for Unix Makefile: {c:?}"); + } + _ => write_char(state, c), + }), + UnixMakefileEscapeKind::RecipeWithoutShellEscaping => { + str::from_utf8(self.s.as_encoded_bytes()) + .map_err(&utf8_error)? + .chars() + .try_for_each(escape_recipe_char) + } + UnixMakefileEscapeKind::RecipeWithShellEscaping => { + EscapeForUnixShell::new(self.s).try_for_each(escape_recipe_char) + } + } + } + pub fn new( + s: &'a (impl ?Sized + AsRef), + kind: UnixMakefileEscapeKind, + needed_variables: &mut BTreeSet<&'static str>, + ) -> Result { + let s = s.as_ref(); + let retval = Self { s, kind }; + retval.do_write( + needed_variables, + |_, _| Ok(()), + |_, _| Ok(()), + |needed_variables, variable| { + needed_variables.insert(variable); + Ok(()) + }, + |e| e, + )?; + Ok(retval) + } +} + +impl JobGraph { + pub fn new() -> Self { + Self::default() + } + fn try_add_item_node( + &mut self, + name: JobItemName, + new_source_job: Option, + new_nodes: &mut HashSet<::NodeId>, + ) -> Result<::NodeId, JobGraphError> { + use hashbrown::hash_map::Entry; + match self.items.entry(name) { + Entry::Occupied(item_entry) => { + let node_id = *item_entry.get(); + let JobGraphNode::Item { + name: _, + source_job, + } = &mut self.graph[node_id] + else { + unreachable!("known to be an item"); + }; + if let Some(new_source_job) = new_source_job { + if let Some(source_job) = source_job { + return Err(JobGraphError::MultipleJobsCreateSameOutput { + output_item: item_entry.key().clone(), + existing_job: source_job.clone(), + new_job: new_source_job, + }); + } else { + *source_job = Some(new_source_job); + } + } + Ok(node_id) + } + Entry::Vacant(item_entry) => { + let node_id = self.graph.add_node(JobGraphNode::Item { + name, + source_job: new_source_job, + }); + new_nodes.insert(node_id); + item_entry.insert(node_id); + Ok(node_id) + } + } + } + pub fn try_add_jobs>( + &mut self, + jobs: I, + ) -> Result<(), JobGraphError> { + use hashbrown::hash_map::Entry; + let jobs = jobs.into_iter(); + struct RemoveNewNodesOnError<'a> { + this: &'a mut JobGraph, + new_nodes: HashSet<::NodeId>, + } + impl Drop for RemoveNewNodesOnError<'_> { + fn drop(&mut self) { + for node in self.new_nodes.drain() { + self.this.graph.remove_node(node); + } + } + } + let mut remove_new_nodes_on_error = RemoveNewNodesOnError { + this: self, + new_nodes: HashSet::with_capacity_and_hasher(jobs.size_hint().0, Default::default()), + }; + let new_nodes = &mut remove_new_nodes_on_error.new_nodes; + let this = &mut *remove_new_nodes_on_error.this; + for job in jobs { + let Entry::Vacant(job_entry) = this.jobs.entry(job.clone()) else { + continue; + }; + let job_node_id = this + .graph + .add_node(JobGraphNode::Job(job_entry.key().clone())); + new_nodes.insert(job_node_id); + job_entry.insert(job_node_id); + for name in job.outputs() { + let item_node_id = this.try_add_item_node(name, Some(job.clone()), new_nodes)?; + this.graph.add_edge(job_node_id, item_node_id, ()); + } + for name in job.inputs() { + let item_node_id = this.try_add_item_node(name, None, new_nodes)?; + this.graph.add_edge(item_node_id, job_node_id, ()); + } + } + match toposort(&this.graph, Some(&mut this.space)) { + Ok(v) => { + this.topological_order = v; + // no need to remove any of the new nodes on drop since we didn't encounter any errors + remove_new_nodes_on_error.new_nodes.clear(); + Ok(()) + } + Err(_) => { + // there's at least one cycle, find one! + let cycle = kosaraju_scc(&this.graph) + .into_iter() + .find_map(|scc| { + if scc.len() <= 1 { + // can't be a cycle since our graph is bipartite -- + // jobs only connect to items, never jobs to jobs or items to items + None + } else { + Some(scc) + } + }) + .expect("we know there's a cycle"); + let cycle_set = HashSet::from_iter(cycle.iter().copied()); + let job = cycle + .into_iter() + .find_map(|node_id| { + if let JobGraphNode::Job(job) = &this.graph[node_id] { + Some(job.clone()) + } else { + None + } + }) + .expect("a job must be part of the cycle"); + let output = job + .outputs() + .into_iter() + .find(|output| cycle_set.contains(&this.items[output])) + .expect("an output must be part of the cycle"); + Err(JobGraphError::CycleError { job, output }) + } + } + } + #[track_caller] + pub fn add_jobs>(&mut self, jobs: I) { + match self.try_add_jobs(jobs) { + Ok(()) => {} + Err(e) => panic!("error: {e}"), + } + } + pub fn to_unix_makefile( + &self, + platform: Option<&DynPlatform>, + extra_args: &[Interned], + ) -> Result { + self.to_unix_makefile_with_internal_program_prefix( + &[program_name_for_internal_jobs()], + platform, + extra_args, + ) + } + pub fn to_unix_makefile_with_internal_program_prefix( + &self, + internal_program_prefix: &[Interned], + platform: Option<&DynPlatform>, + extra_args: &[Interned], + ) -> Result { + let mut retval = String::new(); + let mut needed_variables = BTreeSet::new(); + let mut phony_targets = BTreeSet::new(); + for &node_id in &self.topological_order { + let JobGraphNode::Job(job) = &self.graph[node_id] else { + continue; + }; + let outputs = job.outputs(); + if outputs.is_empty() { + retval.push_str(":"); + } else { + for output in job.outputs() { + match output { + JobItemName::Path { path } => { + write_str!( + retval, + "{} ", + EscapeForUnixMakefile::new( + &str::from_utf8(path.as_os_str().as_encoded_bytes())?, + UnixMakefileEscapeKind::NonRecipe, + &mut needed_variables + )? + ); + } + JobItemName::DynamicPaths { source_job_name } => { + write_str!( + retval, + "{} ", + EscapeForUnixMakefile::new( + &source_job_name, + UnixMakefileEscapeKind::NonRecipe, + &mut needed_variables + )? + ); + phony_targets.insert(Interned::into_inner(source_job_name)); + } + } + } + if outputs.len() == 1 { + retval.push_str(":"); + } else { + retval.push_str("&:"); + } + } + for input in job.inputs() { + match input { + JobItemName::Path { path } => { + write_str!( + retval, + " {}", + EscapeForUnixMakefile::new( + &str::from_utf8(path.as_os_str().as_encoded_bytes())?, + UnixMakefileEscapeKind::NonRecipe, + &mut needed_variables + )? + ); + } + JobItemName::DynamicPaths { source_job_name } => { + write_str!( + retval, + " {}", + EscapeForUnixMakefile::new( + &source_job_name, + UnixMakefileEscapeKind::NonRecipe, + &mut needed_variables + )? + ); + phony_targets.insert(Interned::into_inner(source_job_name)); + } + } + } + retval.push_str("\n\t"); + job.command_params_with_internal_program_prefix( + internal_program_prefix, + platform, + extra_args, + ) + .to_unix_shell_line(&mut retval, |arg, output| { + write_str!( + output, + "{}", + EscapeForUnixMakefile::new( + arg, + UnixMakefileEscapeKind::RecipeWithShellEscaping, + &mut needed_variables + )? + ); + Ok(()) + })?; + retval.push_str("\n\n"); + } + if !phony_targets.is_empty() { + retval.push_str("\n.PHONY:"); + for phony_target in phony_targets { + write_str!( + retval, + " {}", + EscapeForUnixMakefile::new( + phony_target, + UnixMakefileEscapeKind::NonRecipe, + &mut needed_variables + )? + ); + } + retval.push_str("\n"); + } + if !needed_variables.is_empty() { + retval.insert_str( + 0, + &String::from_iter(needed_variables.into_iter().map(|v| format!("{v}\n"))), + ); + } + Ok(retval) + } + pub fn to_unix_shell_script( + &self, + platform: Option<&DynPlatform>, + extra_args: &[Interned], + ) -> String { + self.to_unix_shell_script_with_internal_program_prefix( + &[program_name_for_internal_jobs()], + platform, + extra_args, + ) + } + pub fn to_unix_shell_script_with_internal_program_prefix( + &self, + internal_program_prefix: &[Interned], + platform: Option<&DynPlatform>, + extra_args: &[Interned], + ) -> String { + let mut retval = String::from( + "#!/bin/sh\n\ + set -ex\n", + ); + for &node_id in &self.topological_order { + let JobGraphNode::Job(job) = &self.graph[node_id] else { + continue; + }; + let Ok(()) = job + .command_params_with_internal_program_prefix( + internal_program_prefix, + platform, + extra_args, + ) + .to_unix_shell_line(&mut retval, |arg, output| -> Result<(), Infallible> { + write_str!(output, "{}", EscapeForUnixShell::new(&arg)); + Ok(()) + }); + retval.push_str("\n"); + } + retval + } + pub fn run(&self, params: &JobParams, global_params: &GlobalParams) -> eyre::Result<()> { + // use scope to auto-join threads on errors + thread::scope(|scope| { + struct WaitingJobState { + job_node_id: ::NodeId, + job: DynJob, + inputs: BTreeMap>, + } + let mut ready_jobs = VecDeque::new(); + let mut item_name_to_waiting_jobs_map = HashMap::<_, Vec<_>>::default(); + for &node_id in &self.topological_order { + let JobGraphNode::Job(job) = &self.graph[node_id] else { + continue; + }; + let waiting_job = WaitingJobState { + job_node_id: node_id, + job: job.clone(), + inputs: job + .inputs() + .iter() + .map(|&name| (name, OnceCell::new())) + .collect(), + }; + if waiting_job.inputs.is_empty() { + ready_jobs.push_back(waiting_job); + } else { + let waiting_job = Rc::new(waiting_job); + for &input_item in waiting_job.inputs.keys() { + item_name_to_waiting_jobs_map + .entry(input_item) + .or_default() + .push(waiting_job.clone()); + } + } + } + struct RunningJob<'scope> { + job: DynJob, + thread: ScopedJoinHandle<'scope, eyre::Result>>, + } + let mut running_jobs = HashMap::default(); + let (finished_jobs_sender, finished_jobs_receiver) = mpsc::channel(); + loop { + while let Some(finished_job) = finished_jobs_receiver.try_recv().ok() { + let Some(RunningJob { job, thread }) = running_jobs.remove(&finished_job) + else { + unreachable!(); + }; + let output_items = thread.join().map_err(panic::resume_unwind)??; + assert!( + output_items.iter().map(JobItem::name).eq(job.outputs()), + "job's run() method returned the wrong output items:\n\ + output items:\n\ + {output_items:?}\n\ + expected outputs:\n\ + {:?}\n\ + job:\n\ + {job:?}", + job.outputs(), + ); + for output_item in output_items { + for waiting_job in item_name_to_waiting_jobs_map + .remove(&output_item.name()) + .unwrap_or_default() + { + let Ok(()) = + waiting_job.inputs[&output_item.name()].set(output_item.clone()) + else { + unreachable!(); + }; + if let Some(waiting_job) = Rc::into_inner(waiting_job) { + ready_jobs.push_back(waiting_job); + } + } + } + } + if let Some(WaitingJobState { + job_node_id, + job, + inputs, + }) = ready_jobs.pop_front() + { + struct RunningJobInThread<'a> { + job_node_id: ::NodeId, + job: DynJob, + inputs: Vec, + params: &'a JobParams, + global_params: &'a GlobalParams, + acquired_job: AcquiredJob, + finished_jobs_sender: mpsc::Sender<::NodeId>, + } + impl RunningJobInThread<'_> { + fn run(mut self) -> eyre::Result> { + self.job.run( + &self.inputs, + self.params, + self.global_params, + &mut self.acquired_job, + ) + } + } + impl Drop for RunningJobInThread<'_> { + fn drop(&mut self) { + let _ = self.finished_jobs_sender.send(self.job_node_id); + } + } + let name = job.kind().name(); + let running_job_in_thread = RunningJobInThread { + job_node_id, + job: job.clone(), + inputs: Result::from_iter(job.inputs().iter().map(|input_name| { + inputs.get(input_name).and_then(|v| v.get().cloned()).wrap_err_with(|| { + eyre!("failed when trying to run job {name}: nothing provided the input item: {input_name:?}") + }) + }))?, + params, + global_params, + acquired_job: AcquiredJob::acquire()?, + finished_jobs_sender: finished_jobs_sender.clone(), + }; + running_jobs.insert( + job_node_id, + RunningJob { + job, + thread: thread::Builder::new() + .name(format!("job:{name}")) + .spawn_scoped(scope, move || running_job_in_thread.run()) + .expect("failed to spawn thread for job"), + }, + ); + } + if running_jobs.is_empty() { + assert!(item_name_to_waiting_jobs_map.is_empty()); + assert!(ready_jobs.is_empty()); + return Ok(()); + } + } + }) + } +} + +impl Extend for JobGraph { + #[track_caller] + fn extend>(&mut self, iter: T) { + self.add_jobs(iter); + } +} + +impl FromIterator for JobGraph { + #[track_caller] + fn from_iter>(iter: T) -> Self { + let mut retval = Self::new(); + retval.add_jobs(iter); + retval + } +} + +impl Serialize for JobGraph { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let mut serializer = serializer.serialize_seq(Some(self.jobs.len()))?; + for &node_id in &self.topological_order { + let JobGraphNode::Job(job) = &self.graph[node_id] else { + continue; + }; + serializer.serialize_element(job)?; + } + serializer.end() + } +} + +impl<'de> Deserialize<'de> for JobGraph { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let jobs = Vec::::deserialize(deserializer)?; + let mut retval = JobGraph::new(); + retval.try_add_jobs(jobs).map_err(D::Error::custom)?; + Ok(retval) + } +} diff --git a/crates/fayalite/src/build/registry.rs b/crates/fayalite/src/build/registry.rs new file mode 100644 index 0000000..bbd9f2c --- /dev/null +++ b/crates/fayalite/src/build/registry.rs @@ -0,0 +1,313 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::{DynJobKind, JobKind, built_in_job_kinds}, + intern::Interned, + util::InternedStrCompareAsStr, +}; +use std::{ + collections::BTreeMap, + fmt, + sync::{Arc, OnceLock, RwLock, RwLockWriteGuard}, +}; + +impl DynJobKind { + pub fn registry() -> JobKindRegistrySnapshot { + JobKindRegistrySnapshot(JobKindRegistry::get()) + } + #[track_caller] + pub fn register(self) { + JobKindRegistry::register(JobKindRegistry::lock(), self); + } +} + +#[derive(Clone, Debug)] +struct JobKindRegistry { + job_kinds: BTreeMap, +} + +enum JobKindRegisterError { + SameName { + name: InternedStrCompareAsStr, + old_job_kind: DynJobKind, + new_job_kind: DynJobKind, + }, +} + +impl fmt::Display for JobKindRegisterError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::SameName { + name, + old_job_kind, + new_job_kind, + } => write!( + f, + "two different `JobKind` can't share the same name:\n\ + {name:?}\n\ + old job kind:\n\ + {old_job_kind:?}\n\ + new job kind:\n\ + {new_job_kind:?}", + ), + } + } +} + +trait JobKindRegistryRegisterLock { + type Locked; + fn lock(self) -> Self::Locked; + fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry; +} + +impl JobKindRegistryRegisterLock for &'static RwLock> { + type Locked = RwLockWriteGuard<'static, Arc>; + fn lock(self) -> Self::Locked { + self.write().expect("shouldn't be poisoned") + } + fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry { + Arc::make_mut(locked) + } +} + +impl JobKindRegistryRegisterLock for &'_ mut JobKindRegistry { + type Locked = Self; + + fn lock(self) -> Self::Locked { + self + } + + fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry { + locked + } +} + +impl JobKindRegistry { + fn lock() -> &'static RwLock> { + static REGISTRY: OnceLock>> = OnceLock::new(); + REGISTRY.get_or_init(Default::default) + } + fn try_register( + lock: L, + job_kind: DynJobKind, + ) -> Result<(), JobKindRegisterError> { + use std::collections::btree_map::Entry; + let name = InternedStrCompareAsStr(job_kind.name()); + // run user code only outside of lock + let mut locked = lock.lock(); + let this = L::make_mut(&mut locked); + let result = match this.job_kinds.entry(name) { + Entry::Occupied(entry) => Err(JobKindRegisterError::SameName { + name, + old_job_kind: entry.get().clone(), + new_job_kind: job_kind, + }), + Entry::Vacant(entry) => { + entry.insert(job_kind); + Ok(()) + } + }; + drop(locked); + // outside of lock now, so we can test if it's the same DynJobKind + match result { + Err(JobKindRegisterError::SameName { + name: _, + old_job_kind, + new_job_kind, + }) if old_job_kind == new_job_kind => Ok(()), + result => result, + } + } + #[track_caller] + fn register(lock: L, job_kind: DynJobKind) { + match Self::try_register(lock, job_kind) { + Err(e) => panic!("{e}"), + Ok(()) => {} + } + } + fn get() -> Arc { + Self::lock().read().expect("shouldn't be poisoned").clone() + } +} + +impl Default for JobKindRegistry { + fn default() -> Self { + let mut retval = Self { + job_kinds: BTreeMap::new(), + }; + for job_kind in built_in_job_kinds() { + Self::register(&mut retval, job_kind); + } + retval + } +} + +#[derive(Clone, Debug)] +pub struct JobKindRegistrySnapshot(Arc); + +impl JobKindRegistrySnapshot { + pub fn get() -> Self { + JobKindRegistrySnapshot(JobKindRegistry::get()) + } + pub fn get_by_name<'a>(&'a self, name: &str) -> Option<&'a DynJobKind> { + self.0.job_kinds.get(name) + } + pub fn iter_with_names(&self) -> JobKindRegistryIterWithNames<'_> { + JobKindRegistryIterWithNames(self.0.job_kinds.iter()) + } + pub fn iter(&self) -> JobKindRegistryIter<'_> { + JobKindRegistryIter(self.0.job_kinds.values()) + } +} + +impl<'a> IntoIterator for &'a JobKindRegistrySnapshot { + type Item = &'a DynJobKind; + type IntoIter = JobKindRegistryIter<'a>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +impl<'a> IntoIterator for &'a mut JobKindRegistrySnapshot { + type Item = &'a DynJobKind; + type IntoIter = JobKindRegistryIter<'a>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +#[derive(Clone, Debug)] +pub struct JobKindRegistryIter<'a>( + std::collections::btree_map::Values<'a, InternedStrCompareAsStr, DynJobKind>, +); + +impl<'a> Iterator for JobKindRegistryIter<'a> { + type Item = &'a DynJobKind; + + fn next(&mut self) -> Option { + self.0.next() + } + + fn size_hint(&self) -> (usize, Option) { + self.0.size_hint() + } + + fn count(self) -> usize + where + Self: Sized, + { + self.0.count() + } + + fn last(self) -> Option { + self.0.last() + } + + fn nth(&mut self, n: usize) -> Option { + self.0.nth(n) + } + + fn fold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.0.fold(init, f) + } +} + +impl<'a> std::iter::FusedIterator for JobKindRegistryIter<'a> {} + +impl<'a> ExactSizeIterator for JobKindRegistryIter<'a> {} + +impl<'a> DoubleEndedIterator for JobKindRegistryIter<'a> { + fn next_back(&mut self) -> Option { + self.0.next_back() + } + + fn nth_back(&mut self, n: usize) -> Option { + self.0.nth_back(n) + } + + fn rfold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.0.rfold(init, f) + } +} + +#[derive(Clone, Debug)] +pub struct JobKindRegistryIterWithNames<'a>( + std::collections::btree_map::Iter<'a, InternedStrCompareAsStr, DynJobKind>, +); + +impl<'a> Iterator for JobKindRegistryIterWithNames<'a> { + type Item = (Interned, &'a DynJobKind); + + fn next(&mut self) -> Option { + self.0.next().map(|(name, job_kind)| (name.0, job_kind)) + } + + fn size_hint(&self) -> (usize, Option) { + self.0.size_hint() + } + + fn count(self) -> usize + where + Self: Sized, + { + self.0.count() + } + + fn last(self) -> Option { + self.0.last().map(|(name, job_kind)| (name.0, job_kind)) + } + + fn nth(&mut self, n: usize) -> Option { + self.0.nth(n).map(|(name, job_kind)| (name.0, job_kind)) + } + + fn fold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.0 + .map(|(name, job_kind)| (name.0, job_kind)) + .fold(init, f) + } +} + +impl<'a> std::iter::FusedIterator for JobKindRegistryIterWithNames<'a> {} + +impl<'a> ExactSizeIterator for JobKindRegistryIterWithNames<'a> {} + +impl<'a> DoubleEndedIterator for JobKindRegistryIterWithNames<'a> { + fn next_back(&mut self) -> Option { + self.0 + .next_back() + .map(|(name, job_kind)| (name.0, job_kind)) + } + + fn nth_back(&mut self, n: usize) -> Option { + self.0 + .nth_back(n) + .map(|(name, job_kind)| (name.0, job_kind)) + } + + fn rfold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.0 + .map(|(name, job_kind)| (name.0, job_kind)) + .rfold(init, f) + } +} + +#[track_caller] +pub fn register_job_kind(kind: K) { + DynJobKind::new(kind).register(); +} diff --git a/crates/fayalite/src/build/verilog.rs b/crates/fayalite/src/build/verilog.rs new file mode 100644 index 0000000..7ce77ec --- /dev/null +++ b/crates/fayalite/src/build/verilog.rs @@ -0,0 +1,418 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::{ + BaseJob, CommandParams, DynJobKind, GetJobPositionDependencies, GetJobPositionJob, + GlobalParams, JobAndDependencies, JobArgsAndDependencies, JobDependencies, JobItem, + JobItemName, JobKind, JobKindAndDependencies, JobParams, ToArgs, WriteArgs, + external::{ + ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait, + }, + firrtl::{Firrtl, FirrtlJobKind}, + }, + intern::{Intern, InternSlice, Interned}, + util::job_server::AcquiredJob, +}; +use clap::Args; +use eyre::{Context, bail}; +use serde::{Deserialize, Serialize}; +use std::{ + ffi::{OsStr, OsString}, + fmt, mem, + path::Path, +}; + +/// based on [LLVM Circt's recommended lowering options][lowering-options] +/// +/// [lowering-options]: https://circt.llvm.org/docs/VerilogGeneration/#recommended-loweringoptions-by-target +#[derive(clap::ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[non_exhaustive] +pub enum VerilogDialect { + Questa, + Spyglass, + Verilator, + Vivado, + Yosys, +} + +impl fmt::Display for VerilogDialect { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.as_str()) + } +} + +impl VerilogDialect { + pub fn as_str(self) -> &'static str { + match self { + VerilogDialect::Questa => "questa", + VerilogDialect::Spyglass => "spyglass", + VerilogDialect::Verilator => "verilator", + VerilogDialect::Vivado => "vivado", + VerilogDialect::Yosys => "yosys", + } + } + pub fn firtool_extra_args(self) -> &'static [&'static str] { + match self { + VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"], + VerilogDialect::Spyglass => { + &["--lowering-options=explicitBitcast,disallowExpressionInliningInPorts"] + } + VerilogDialect::Verilator => &[ + "--lowering-options=locationInfoStyle=wrapInAtSquareBracket,disallowLocalVariables", + ], + VerilogDialect::Vivado => &["--lowering-options=mitigateVivadoArrayIndexConstPropBug"], + VerilogDialect::Yosys => { + &["--lowering-options=disallowLocalVariables,disallowPackedArrays"] + } + } + } +} + +#[derive(Args, Debug, Clone, PartialEq, Eq, Hash)] +#[non_exhaustive] +pub struct UnadjustedVerilogArgs { + #[arg(long = "firtool-extra-arg", value_name = "ARG")] + pub firtool_extra_args: Vec, + /// adapt the generated Verilog for a particular toolchain + #[arg(long)] + pub verilog_dialect: Option, + #[arg(long)] + pub verilog_debug: bool, +} + +impl ToArgs for UnadjustedVerilogArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { + ref firtool_extra_args, + verilog_dialect, + verilog_debug, + } = *self; + for arg in firtool_extra_args { + args.write_long_option_eq("firtool-extra-arg", arg); + } + if let Some(verilog_dialect) = verilog_dialect { + args.write_long_option_eq("verilog-dialect", verilog_dialect.as_str()); + } + if verilog_debug { + args.write_arg("--verilog-debug"); + } + } +} + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)] +pub struct Firtool; + +impl ExternalProgramTrait for Firtool { + fn default_program_name() -> Interned { + "firtool".intern() + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug, Deserialize, Serialize)] +pub struct UnadjustedVerilog { + firrtl_file: Interned, + firrtl_file_name: Interned, + unadjusted_verilog_file: Interned, + unadjusted_verilog_file_name: Interned, + firtool_extra_args: Interned<[Interned]>, + verilog_dialect: Option, + verilog_debug: bool, +} + +impl UnadjustedVerilog { + pub fn firrtl_file(&self) -> Interned { + self.firrtl_file + } + pub fn unadjusted_verilog_file(&self) -> Interned { + self.unadjusted_verilog_file + } + pub fn firtool_extra_args(&self) -> Interned<[Interned]> { + self.firtool_extra_args + } + pub fn verilog_dialect(&self) -> Option { + self.verilog_dialect + } + pub fn verilog_debug(&self) -> bool { + self.verilog_debug + } +} + +impl ExternalCommand for UnadjustedVerilog { + type AdditionalArgs = UnadjustedVerilogArgs; + type AdditionalJobData = UnadjustedVerilog; + type BaseJobPosition = GetJobPositionDependencies; + type Dependencies = JobKindAndDependencies; + type ExternalProgram = Firtool; + + fn dependencies() -> Self::Dependencies { + Default::default() + } + + fn args_to_jobs( + args: JobArgsAndDependencies>, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result<( + Self::AdditionalJobData, + ::JobsAndKinds, + )> { + args.args_to_jobs_external_simple(params, global_params, |args, dependencies| { + let UnadjustedVerilogArgs { + firtool_extra_args, + verilog_dialect, + verilog_debug, + } = args.additional_args; + let unadjusted_verilog_file = dependencies + .dependencies + .job + .job + .file_with_ext("unadjusted.v"); + let firrtl_job = dependencies.get_job::(); + Ok(UnadjustedVerilog { + firrtl_file: firrtl_job.firrtl_file(), + firrtl_file_name: firrtl_job + .firrtl_file() + .interned_file_name() + .expect("known to have file name"), + unadjusted_verilog_file, + unadjusted_verilog_file_name: unadjusted_verilog_file + .interned_file_name() + .expect("known to have file name"), + firtool_extra_args: firtool_extra_args.into_iter().map(Interned::from).collect(), + verilog_dialect, + verilog_debug, + }) + }) + } + + fn inputs(job: &ExternalCommandJob) -> Interned<[JobItemName]> { + [JobItemName::Path { + path: job.additional_job_data().firrtl_file, + }] + .intern_slice() + } + + fn output_paths(job: &ExternalCommandJob) -> Interned<[Interned]> { + [job.additional_job_data().unadjusted_verilog_file].intern_slice() + } + + fn command_line_args(job: &ExternalCommandJob, args: &mut W) { + let UnadjustedVerilog { + firrtl_file: _, + firrtl_file_name, + unadjusted_verilog_file: _, + unadjusted_verilog_file_name, + firtool_extra_args, + verilog_dialect, + verilog_debug, + } = *job.additional_job_data(); + args.write_interned_arg(firrtl_file_name); + args.write_arg("-o"); + args.write_interned_arg(unadjusted_verilog_file_name); + if verilog_debug { + args.write_args(["-g", "--preserve-values=all"]); + } + if let Some(dialect) = verilog_dialect { + args.write_args(dialect.firtool_extra_args().iter().copied()); + } + args.write_interned_args(firtool_extra_args); + } + + fn current_dir(job: &ExternalCommandJob) -> Option> { + Some(job.output_dir()) + } + + fn job_kind_name() -> Interned { + "unadjusted-verilog".intern() + } + + fn subcommand_hidden() -> bool { + true + } + + fn run_even_if_cached_arg_name() -> Interned { + "firtool-run-even-if-cached".intern() + } +} + +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct VerilogJobKind; + +#[derive(Clone, Debug, PartialEq, Eq, Hash, Args)] +#[non_exhaustive] +pub struct VerilogJobArgs {} + +impl ToArgs for VerilogJobArgs { + fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) { + let Self {} = self; + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub struct VerilogJob { + output_dir: Interned, + unadjusted_verilog_file: Interned, + main_verilog_file: Interned, +} + +impl VerilogJob { + pub fn output_dir(&self) -> Interned { + self.output_dir + } + pub fn unadjusted_verilog_file(&self) -> Interned { + self.unadjusted_verilog_file + } + pub fn main_verilog_file(&self) -> Interned { + self.main_verilog_file + } + #[track_caller] + pub fn unwrap_additional_files(additional_files: &JobItem) -> &[Interned] { + match additional_files { + JobItem::DynamicPaths { + paths, + source_job_name, + } if *source_job_name == VerilogJobKind.name() => paths, + v => panic!("expected VerilogJob's additional files JobItem: {v:?}"), + } + } + pub fn all_verilog_files( + main_verilog_file: Interned, + additional_files: &[Interned], + ) -> eyre::Result]>> { + let mut retval = Vec::with_capacity(additional_files.len().saturating_add(1)); + for verilog_file in [main_verilog_file].iter().chain(additional_files) { + if !["v", "sv"] + .iter() + .any(|extension| verilog_file.extension() == Some(extension.as_ref())) + { + continue; + } + let verilog_file = std::path::absolute(verilog_file).wrap_err_with(|| { + format!("converting {verilog_file:?} to an absolute path failed") + })?; + if verilog_file + .as_os_str() + .as_encoded_bytes() + .iter() + .any(|&ch| (ch != b' ' && ch != b'\t' && ch.is_ascii_whitespace()) || ch == b'"') + { + bail!("verilog file path contains characters that aren't permitted"); + } + retval.push(verilog_file.intern_deref()); + } + Ok(retval.intern_slice()) + } +} + +impl JobKind for VerilogJobKind { + type Args = VerilogJobArgs; + type Job = VerilogJob; + type Dependencies = JobKindAndDependencies>; + + fn dependencies(self) -> Self::Dependencies { + Default::default() + } + + fn args_to_jobs( + args: JobArgsAndDependencies, + params: &JobParams, + global_params: &GlobalParams, + ) -> eyre::Result> { + args.args_to_jobs_simple(params, global_params, |_kind, args, dependencies| { + let VerilogJobArgs {} = args; + let base_job = dependencies.get_job::(); + Ok(VerilogJob { + output_dir: base_job.output_dir(), + unadjusted_verilog_file: dependencies + .job + .job + .additional_job_data() + .unadjusted_verilog_file(), + main_verilog_file: base_job.file_with_ext("v"), + }) + }) + } + + fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::Path { + path: job.unadjusted_verilog_file, + }] + .intern_slice() + } + + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [ + JobItemName::Path { + path: job.main_verilog_file, + }, + JobItemName::DynamicPaths { + source_job_name: self.name(), + }, + ] + .intern_slice() + } + + fn name(self) -> Interned { + "verilog".intern() + } + + fn external_command_params(self, _job: &Self::Job) -> Option { + None + } + + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + _params: &JobParams, + _global_params: &GlobalParams, + _acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job))); + let input = std::fs::read_to_string(job.unadjusted_verilog_file())?; + let file_separator_prefix = "\n// ----- 8< ----- FILE \""; + let file_separator_suffix = "\" ----- 8< -----\n\n"; + let mut input = &*input; + let main_verilog_file = job.main_verilog_file(); + let mut file_name = Some(main_verilog_file); + let mut additional_outputs = Vec::new(); + loop { + let (chunk, next_file_name) = if let Some((chunk, rest)) = + input.split_once(file_separator_prefix) + { + let Some((next_file_name, rest)) = rest.split_once(file_separator_suffix) else { + bail!( + "parsing firtool's output failed: found {file_separator_prefix:?} but no {file_separator_suffix:?}" + ); + }; + input = rest; + let next_file_name = job.output_dir.join(next_file_name).intern_deref(); + additional_outputs.push(next_file_name); + (chunk, Some(next_file_name)) + } else { + (mem::take(&mut input), None) + }; + let Some(file_name) = mem::replace(&mut file_name, next_file_name) else { + break; + }; + std::fs::write(&file_name, chunk)?; + } + Ok(vec![ + JobItem::Path { + path: main_verilog_file, + }, + JobItem::DynamicPaths { + paths: additional_outputs, + source_job_name: self.name(), + }, + ]) + } +} + +pub(crate) fn built_in_job_kinds() -> impl IntoIterator { + [ + DynJobKind::new(ExternalCommandJobKind::::new()), + DynJobKind::new(VerilogJobKind), + ] +} diff --git a/crates/fayalite/src/bundle.rs b/crates/fayalite/src/bundle.rs index 843eb6c..a0de189 100644 --- a/crates/fayalite/src/bundle.rs +++ b/crates/fayalite/src/bundle.rs @@ -2,18 +2,25 @@ // See Notices.txt for copyright information use crate::{ - expr::{ops::BundleLiteral, Expr, ToExpr}, - intern::{Intern, Interned}, + expr::{ + CastToBits, Expr, ReduceBits, ToExpr, + ops::{ArrayLiteral, BundleLiteral, ExprPartialEq}, + }, + int::{Bool, DynSize}, + intern::{Intern, InternSlice, Interned}, + sim::value::{SimValue, SimValuePartialEq, ToSimValue, ToSimValueWithType}, source_location::SourceLocation, ty::{ - impl_match_variant_as_self, CanonicalType, MatchVariantWithoutScope, StaticType, Type, - TypeProperties, TypeWithDeref, + CanonicalType, MatchVariantWithoutScope, OpaqueSimValue, OpaqueSimValueSize, + OpaqueSimValueSlice, OpaqueSimValueWriter, OpaqueSimValueWritten, StaticType, Type, + TypeProperties, TypeWithDeref, impl_match_variant_as_self, }, + util::HashMap, }; -use hashbrown::HashMap; +use serde::{Deserialize, Serialize}; use std::{fmt, marker::PhantomData}; -#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, Serialize, Deserialize)] pub struct BundleField { pub name: Interned, pub flipped: bool, @@ -62,7 +69,7 @@ impl fmt::Display for FmtDebugInStruct { struct BundleImpl { fields: Interned<[BundleField]>, name_indexes: HashMap, usize>, - field_offsets: Interned<[usize]>, + field_offsets: Interned<[OpaqueSimValueSize]>, type_properties: TypeProperties, } @@ -82,12 +89,9 @@ impl std::fmt::Debug for BundleImpl { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str("Bundle ")?; f.debug_set() - .entries( - self.fields - .iter() - .enumerate() - .map(|(index, field)| field.fmt_debug_in_struct(self.field_offsets[index])), - ) + .entries(self.fields.iter().enumerate().map(|(index, field)| { + field.fmt_debug_in_struct(self.field_offsets[index].bit_width) + })) .finish() } } @@ -112,6 +116,7 @@ impl BundleTypePropertiesBuilder { is_storable: true, is_castable_from_bits: true, bit_width: 0, + sim_only_values_len: 0, }) } pub const fn clone(&self) -> Self { @@ -119,8 +124,12 @@ impl BundleTypePropertiesBuilder { } #[must_use] pub const fn field(self, flipped: bool, field_props: TypeProperties) -> Self { - let Some(bit_width) = self.0.bit_width.checked_add(field_props.bit_width) else { - panic!("bundle is too big: bit-width overflowed"); + let Some(OpaqueSimValueSize { + bit_width, + sim_only_values_len, + }) = self.0.size().checked_add(field_props.size()) + else { + panic!("bundle is too big: size overflowed"); }; if flipped { Self(TypeProperties { @@ -128,6 +137,7 @@ impl BundleTypePropertiesBuilder { is_storable: false, is_castable_from_bits: false, bit_width, + sim_only_values_len, }) } else { Self(TypeProperties { @@ -136,6 +146,7 @@ impl BundleTypePropertiesBuilder { is_castable_from_bits: self.0.is_castable_from_bits & field_props.is_castable_from_bits, bit_width, + sim_only_values_len, }) } } @@ -153,14 +164,14 @@ impl Default for BundleTypePropertiesBuilder { impl Bundle { #[track_caller] pub fn new(fields: Interned<[BundleField]>) -> Self { - let mut name_indexes = HashMap::with_capacity(fields.len()); + let mut name_indexes = HashMap::with_capacity_and_hasher(fields.len(), Default::default()); let mut field_offsets = Vec::with_capacity(fields.len()); let mut type_props_builder = BundleTypePropertiesBuilder::new(); for (index, &BundleField { name, flipped, ty }) in fields.iter().enumerate() { if let Some(old_index) = name_indexes.insert(name, index) { panic!("duplicate field name {name:?}: at both index {old_index} and {index}"); } - field_offsets.push(type_props_builder.0.bit_width); + field_offsets.push(type_props_builder.0.size()); type_props_builder = type_props_builder.field(flipped, ty.type_properties()); } Self(Intern::intern_sized(BundleImpl { @@ -176,7 +187,7 @@ impl Bundle { pub fn field_by_name(&self, name: Interned) -> Option { Some(self.0.fields[*self.0.name_indexes.get(&name)?]) } - pub fn field_offsets(self) -> Interned<[usize]> { + pub fn field_offsets(self) -> Interned<[OpaqueSimValueSize]> { self.0.field_offsets } pub fn type_properties(self) -> TypeProperties { @@ -210,6 +221,7 @@ impl Bundle { impl Type for Bundle { type BaseType = Bundle; type MaskType = Bundle; + type SimValue = OpaqueSimValue; impl_match_variant_as_self!(); fn mask_type(&self) -> Self::MaskType { Self::new(Interned::from_iter(self.0.fields.into_iter().map( @@ -233,6 +245,28 @@ impl Type for Bundle { fn source_location() -> SourceLocation { SourceLocation::builtin() } + fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue { + assert_eq!(self.type_properties().size(), opaque.size()); + opaque.to_owned() + } + fn sim_value_clone_from_opaque( + &self, + value: &mut Self::SimValue, + opaque: OpaqueSimValueSlice<'_>, + ) { + assert_eq!(self.type_properties().size(), opaque.size()); + assert_eq!(value.size(), opaque.size()); + value.clone_from_slice(opaque); + } + fn sim_value_to_opaque<'w>( + &self, + value: &Self::SimValue, + writer: OpaqueSimValueWriter<'w>, + ) -> OpaqueSimValueWritten<'w> { + assert_eq!(self.type_properties().size(), writer.size()); + assert_eq!(value.size(), writer.size()); + writer.fill_cloned_from_slice(value.as_slice()) + } } pub trait BundleType: Type { @@ -241,6 +275,102 @@ pub trait BundleType: Type { fn fields(&self) -> Interned<[BundleField]>; } +pub struct BundleSimValueFromOpaque<'a> { + fields: std::slice::Iter<'static, BundleField>, + opaque: OpaqueSimValueSlice<'a>, +} + +impl<'a> BundleSimValueFromOpaque<'a> { + #[track_caller] + pub fn new(bundle_ty: T, opaque: OpaqueSimValueSlice<'a>) -> Self { + let fields = bundle_ty.fields(); + assert_eq!( + opaque.size(), + fields + .iter() + .map(|BundleField { ty, .. }| ty.size()) + .sum::() + ); + Self { + fields: Interned::into_inner(fields).iter(), + opaque, + } + } + #[track_caller] + fn field_ty_and_opaque(&mut self) -> (T, OpaqueSimValueSlice<'a>) { + let Some(&BundleField { + name: _, + flipped: _, + ty, + }) = self.fields.next() + else { + panic!("tried to read too many fields from BundleSimValueFromBits"); + }; + let (field_opaque, rest) = self.opaque.split_at(ty.size()); + self.opaque = rest; + (T::from_canonical(ty), field_opaque) + } + #[track_caller] + pub fn field_from_opaque(&mut self) -> SimValue { + let (field_ty, field_opaque) = self.field_ty_and_opaque::(); + SimValue::from_opaque(field_ty, field_opaque.to_owned()) + } + #[track_caller] + pub fn field_clone_from_opaque(&mut self, field_value: &mut SimValue) { + let (field_ty, field_opaque) = self.field_ty_and_opaque::(); + assert_eq!(field_ty, SimValue::ty(field_value)); + SimValue::opaque_mut(field_value).clone_from_slice(field_opaque); + } +} + +pub struct BundleSimValueToOpaque<'a> { + fields: std::slice::Iter<'static, BundleField>, + writer: OpaqueSimValueWriter<'a>, +} + +impl<'a> BundleSimValueToOpaque<'a> { + #[track_caller] + pub fn new(bundle_ty: T, writer: OpaqueSimValueWriter<'a>) -> Self { + let fields = bundle_ty.fields(); + assert_eq!( + writer.size(), + fields + .iter() + .map(|BundleField { ty, .. }| ty.size()) + .sum::() + ); + Self { + fields: Interned::into_inner(fields).iter(), + writer, + } + } + #[track_caller] + pub fn field(&mut self, field_value: &SimValue) { + let Some(&BundleField { + name: _, + flipped: _, + ty, + }) = self.fields.next() + else { + panic!("tried to write too many fields with BundleSimValueToOpaque"); + }; + assert_eq!(T::from_canonical(ty), SimValue::ty(field_value)); + self.writer.fill_prefix_with(ty.size(), |writer| { + writer.fill_cloned_from_slice(SimValue::opaque(field_value).as_slice()) + }); + } + #[track_caller] + pub fn finish(mut self) -> OpaqueSimValueWritten<'a> { + assert_eq!( + self.fields.next(), + None, + "wrote too few fields with BundleSimValueToOpaque" + ); + self.writer + .fill_cloned_from_slice(OpaqueSimValueSlice::empty()) + } +} + #[derive(Default)] pub struct NoBuilder; @@ -323,7 +453,19 @@ macro_rules! impl_tuple_builder_fields { } macro_rules! impl_tuples { - ([$({#[num = $num:literal, field = $field:ident] $var:ident: $T:ident})*] []) => { + ( + [$({ + #[ + num = $num:tt, + field = $field:ident, + ty = $ty_var:ident: $Ty:ident, + lhs = $lhs_var:ident: $Lhs:ident, + rhs = $rhs_var:ident: $Rhs:ident + ] + $var:ident: $T:ident + })*] + [] + ) => { impl_tuple_builder_fields! { {} [$({ @@ -335,6 +477,7 @@ macro_rules! impl_tuples { impl<$($T: Type,)*> Type for ($($T,)*) { type BaseType = Bundle; type MaskType = ($($T::MaskType,)*); + type SimValue = ($(SimValue<$T>,)*); type MatchVariant = ($(Expr<$T>,)*); type MatchActiveScope = (); type MatchVariantAndInactiveScope = MatchVariantWithoutScope; @@ -373,13 +516,40 @@ macro_rules! impl_tuples { fn source_location() -> SourceLocation { SourceLocation::builtin() } + fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue { + #![allow(unused_mut, unused_variables)] + let mut v = BundleSimValueFromOpaque::new(*self, opaque); + $(let $var = v.field_from_opaque();)* + ($($var,)*) + } + fn sim_value_clone_from_opaque( + &self, + value: &mut Self::SimValue, + opaque: OpaqueSimValueSlice<'_>, + ) { + #![allow(unused_mut, unused_variables)] + let mut v = BundleSimValueFromOpaque::new(*self, opaque); + let ($($var,)*) = value; + $(v.field_clone_from_opaque($var);)* + } + fn sim_value_to_opaque<'w>( + &self, + value: &Self::SimValue, + writer: OpaqueSimValueWriter<'w>, + ) -> OpaqueSimValueWritten<'w> { + #![allow(unused_mut, unused_variables)] + let mut v = BundleSimValueToOpaque::new(*self, writer); + let ($($var,)*) = value; + $(v.field($var);)* + v.finish() + } } impl<$($T: Type,)*> BundleType for ($($T,)*) { type Builder = TupleBuilder<($(Unfilled<$T>,)*)>; type FilledBuilder = TupleBuilder<($(Expr<$T>,)*)>; fn fields(&self) -> Interned<[BundleField]> { let ($($var,)*) = self; - [$(BundleField { name: stringify!($num).intern(), flipped: false, ty: $var.canonical() }),*][..].intern() + [$(BundleField { name: stringify!($num).intern(), flipped: false, ty: $var.canonical() }),*].intern_slice() } } impl<$($T: Type,)*> TypeWithDeref for ($($T,)*) { @@ -410,7 +580,7 @@ macro_rules! impl_tuples { $(let $var = $var.to_expr();)* let ty = ($(Expr::ty($var),)*); let field_values = [$(Expr::canonical($var)),*]; - BundleLiteral::new(ty, field_values[..].intern()).to_expr() + BundleLiteral::new(ty, field_values.intern_slice()).to_expr() } } impl<$($T: Type,)*> ToExpr for TupleBuilder<($(Expr<$T>,)*)> { @@ -420,7 +590,107 @@ macro_rules! impl_tuples { let ($($var,)*) = self.0; let ty = ($(Expr::ty($var),)*); let field_values = [$(Expr::canonical($var)),*]; - BundleLiteral::new(ty, field_values[..].intern()).to_expr() + BundleLiteral::new(ty, field_values.intern_slice()).to_expr() + } + } + impl<$($T: ToSimValueWithType,)*> ToSimValueWithType for ($($T,)*) { + #[track_caller] + fn to_sim_value_with_type(&self, ty: CanonicalType) -> SimValue { + SimValue::into_canonical(ToSimValueWithType::::to_sim_value_with_type(self, Bundle::from_canonical(ty))) + } + #[track_caller] + fn into_sim_value_with_type(self, ty: CanonicalType) -> SimValue + { + SimValue::into_canonical(ToSimValueWithType::::into_sim_value_with_type(self, Bundle::from_canonical(ty))) + } + } + impl<$($T: ToSimValueWithType,)*> ToSimValueWithType for ($($T,)*) { + #[track_caller] + fn to_sim_value_with_type(&self, ty: Bundle) -> SimValue { + let ($($var,)*) = self; + let [$($ty_var,)*] = *ty.fields() else { + panic!("bundle has wrong number of fields"); + }; + $(let $var = $var.to_sim_value_with_type($ty_var.ty);)* + ToSimValueWithType::into_sim_value_with_type(($($var,)*), ty) + } + #[track_caller] + fn into_sim_value_with_type(self, ty: Bundle) -> SimValue { + #![allow(unused_mut)] + #![allow(clippy::unused_unit)] + let ($($var,)*) = self; + let [$($ty_var,)*] = *ty.fields() else { + panic!("bundle has wrong number of fields"); + }; + let mut opaque = OpaqueSimValue::empty(); + $(let $var = $var.into_sim_value_with_type($ty_var.ty); + assert_eq!(SimValue::ty(&$var), $ty_var.ty); + opaque.extend_from_slice(SimValue::opaque(&$var).as_slice()); + )* + SimValue::from_opaque(ty, opaque) + } + } + impl<$($T: ToSimValueWithType<$Ty>, $Ty: Type,)*> ToSimValueWithType<($($Ty,)*)> for ($($T,)*) { + #[track_caller] + fn to_sim_value_with_type(&self, ty: ($($Ty,)*)) -> SimValue<($($Ty,)*)> { + let ($($var,)*) = self; + let ($($ty_var,)*) = ty; + $(let $var = $var.to_sim_value_with_type($ty_var);)* + SimValue::from_value(ty, ($($var,)*)) + } + #[track_caller] + fn into_sim_value_with_type(self, ty: ($($Ty,)*)) -> SimValue<($($Ty,)*)> { + let ($($var,)*) = self; + let ($($ty_var,)*) = ty; + $(let $var = $var.into_sim_value_with_type($ty_var);)* + SimValue::from_value(ty, ($($var,)*)) + } + } + impl<$($T: ToSimValue,)*> ToSimValue for ($($T,)*) { + type Type = ($($T::Type,)*); + #[track_caller] + fn to_sim_value(&self) -> SimValue { + let ($($var,)*) = self; + $(let $var = $var.to_sim_value();)* + SimValue::from_value(($(SimValue::ty(&$var),)*), ($($var,)*)) + } + #[track_caller] + fn into_sim_value(self) -> SimValue { + let ($($var,)*) = self; + $(let $var = $var.to_sim_value();)* + SimValue::from_value(($(SimValue::ty(&$var),)*), ($($var,)*)) + } + } + impl<$($Lhs: Type + ExprPartialEq<$Rhs>, $Rhs: Type,)*> ExprPartialEq<($($Rhs,)*)> for ($($Lhs,)*) { + fn cmp_eq(lhs: Expr, rhs: Expr<($($Rhs,)*)>) -> Expr { + let ($($lhs_var,)*) = *lhs; + let ($($rhs_var,)*) = *rhs; + ArrayLiteral::::new( + Bool, + FromIterator::from_iter([$(Expr::canonical(ExprPartialEq::cmp_eq($lhs_var, $rhs_var)),)*]), + ) + .cast_to_bits() + .all_one_bits() + } + + fn cmp_ne(lhs: Expr, rhs: Expr<($($Rhs,)*)>) -> Expr { + let ($($lhs_var,)*) = *lhs; + let ($($rhs_var,)*) = *rhs; + ArrayLiteral::::new( + Bool, + FromIterator::from_iter([$(Expr::canonical(ExprPartialEq::cmp_ne($lhs_var, $rhs_var)),)*]), + ) + .cast_to_bits() + .any_one_bits() + } + } + impl<$($Lhs: SimValuePartialEq<$Rhs>, $Rhs: Type,)*> SimValuePartialEq<($($Rhs,)*)> for ($($Lhs,)*) { + fn sim_value_eq(lhs: &SimValue, rhs: &SimValue<($($Rhs,)*)>) -> bool { + let ($($lhs_var,)*) = &**lhs; + let ($($rhs_var,)*) = &**rhs; + let retval = true; + $(let retval = retval && $lhs_var == $rhs_var;)* + retval } } }; @@ -432,24 +702,25 @@ macro_rules! impl_tuples { impl_tuples! { [] [ - {#[num = 0, field = field_0] v0: T0} - {#[num = 1, field = field_1] v1: T1} - {#[num = 2, field = field_2] v2: T2} - {#[num = 3, field = field_3] v3: T3} - {#[num = 4, field = field_4] v4: T4} - {#[num = 5, field = field_5] v5: T5} - {#[num = 6, field = field_6] v6: T6} - {#[num = 7, field = field_7] v7: T7} - {#[num = 8, field = field_8] v8: T8} - {#[num = 9, field = field_9] v9: T9} - {#[num = 10, field = field_10] v10: T10} - {#[num = 11, field = field_11] v11: T11} + {#[num = 0, field = field_0, ty = ty0: Ty0, lhs = lhs0: Lhs0, rhs = rhs0: Rhs0] v0: T0} + {#[num = 1, field = field_1, ty = ty1: Ty1, lhs = lhs1: Lhs1, rhs = rhs1: Rhs1] v1: T1} + {#[num = 2, field = field_2, ty = ty2: Ty2, lhs = lhs2: Lhs2, rhs = rhs2: Rhs2] v2: T2} + {#[num = 3, field = field_3, ty = ty3: Ty3, lhs = lhs3: Lhs3, rhs = rhs3: Rhs3] v3: T3} + {#[num = 4, field = field_4, ty = ty4: Ty4, lhs = lhs4: Lhs4, rhs = rhs4: Rhs4] v4: T4} + {#[num = 5, field = field_5, ty = ty5: Ty5, lhs = lhs5: Lhs5, rhs = rhs5: Rhs5] v5: T5} + {#[num = 6, field = field_6, ty = ty6: Ty6, lhs = lhs6: Lhs6, rhs = rhs6: Rhs6] v6: T6} + {#[num = 7, field = field_7, ty = ty7: Ty7, lhs = lhs7: Lhs7, rhs = rhs7: Rhs7] v7: T7} + {#[num = 8, field = field_8, ty = ty8: Ty8, lhs = lhs8: Lhs8, rhs = rhs8: Rhs8] v8: T8} + {#[num = 9, field = field_9, ty = ty9: Ty9, lhs = lhs9: Lhs9, rhs = rhs9: Rhs9] v9: T9} + {#[num = 10, field = field_10, ty = ty10: Ty10, lhs = lhs10: Lhs10, rhs = rhs10: Rhs10] v10: T10} + {#[num = 11, field = field_11, ty = ty11: Ty11, lhs = lhs11: Lhs11, rhs = rhs11: Rhs11] v11: T11} ] } impl Type for PhantomData { type BaseType = Bundle; type MaskType = (); + type SimValue = PhantomData; type MatchVariant = PhantomData; type MatchActiveScope = (); type MatchVariantAndInactiveScope = MatchVariantWithoutScope; @@ -482,6 +753,24 @@ impl Type for PhantomData { fn source_location() -> SourceLocation { SourceLocation::builtin() } + fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue { + assert!(opaque.is_empty()); + *self + } + fn sim_value_clone_from_opaque( + &self, + _value: &mut Self::SimValue, + opaque: OpaqueSimValueSlice<'_>, + ) { + assert!(opaque.is_empty()); + } + fn sim_value_to_opaque<'w>( + &self, + _value: &Self::SimValue, + writer: OpaqueSimValueWriter<'w>, + ) -> OpaqueSimValueWritten<'w> { + writer.fill_cloned_from_slice(OpaqueSimValueSlice::empty()) + } } pub struct PhantomDataBuilder(PhantomData); @@ -528,3 +817,36 @@ impl ToExpr for PhantomData { BundleLiteral::new(PhantomData, Interned::default()).to_expr() } } + +impl ToSimValue for PhantomData { + type Type = PhantomData; + + #[track_caller] + fn to_sim_value(&self) -> SimValue { + SimValue::from_value(*self, *self) + } +} + +impl ToSimValueWithType for PhantomData { + #[track_caller] + fn to_sim_value_with_type(&self, ty: Self) -> SimValue { + SimValue::from_value(ty, *self) + } +} + +impl ToSimValueWithType for PhantomData { + #[track_caller] + fn to_sim_value_with_type(&self, ty: Bundle) -> SimValue { + assert!(ty.fields().is_empty()); + SimValue::from_opaque(ty, OpaqueSimValue::empty()) + } +} + +impl ToSimValueWithType for PhantomData { + #[track_caller] + fn to_sim_value_with_type(&self, canonical_ty: CanonicalType) -> SimValue { + let ty = Bundle::from_canonical(canonical_ty); + assert!(ty.fields().is_empty()); + SimValue::from_opaque(canonical_ty, OpaqueSimValue::empty()) + } +} diff --git a/crates/fayalite/src/cli.rs b/crates/fayalite/src/cli.rs deleted file mode 100644 index 1030474..0000000 --- a/crates/fayalite/src/cli.rs +++ /dev/null @@ -1,799 +0,0 @@ -// SPDX-License-Identifier: LGPL-3.0-or-later -// See Notices.txt for copyright information -use crate::{ - bundle::{Bundle, BundleType}, - firrtl::{self, ExportOptions}, - intern::Interned, - module::Module, - util::{job_server::AcquiredJob, streaming_read_utf8::streaming_read_utf8}, -}; -use clap::{ - builder::{OsStringValueParser, TypedValueParser}, - Parser, Subcommand, ValueEnum, ValueHint, -}; -use eyre::{eyre, Report}; -use serde::{Deserialize, Serialize}; -use std::{ - error, - ffi::OsString, - fmt::{self, Write}, - fs, io, mem, - path::{Path, PathBuf}, - process, -}; -use tempfile::TempDir; - -pub type Result = std::result::Result; - -pub struct CliError(Report); - -impl fmt::Debug for CliError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.fmt(f) - } -} - -impl fmt::Display for CliError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.fmt(f) - } -} - -impl error::Error for CliError {} - -impl From for CliError { - fn from(value: io::Error) -> Self { - CliError(Report::new(value)) - } -} - -pub trait RunPhase { - type Output; - fn run(&self, arg: Arg) -> Result { - self.run_with_job(arg, &mut AcquiredJob::acquire()) - } - fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result; -} - -#[derive(Parser, Debug, Clone)] -#[non_exhaustive] -pub struct BaseArgs { - /// the directory to put the generated main output file and associated files in - #[arg(short, long, value_hint = ValueHint::DirPath, required = true)] - pub output: Option, - /// the stem of the generated main output file, e.g. to get foo.v, pass --file-stem=foo - #[arg(long)] - pub file_stem: Option, - #[arg(long, env = "FAYALITE_KEEP_TEMP_DIR")] - pub keep_temp_dir: bool, - #[arg(skip = false)] - pub redirect_output_for_rust_test: bool, -} - -impl BaseArgs { - fn make_firrtl_file_backend(&self) -> Result<(firrtl::FileBackend, Option)> { - let (dir_path, temp_dir) = match &self.output { - Some(output) => (output.clone(), None), - None => { - let temp_dir = TempDir::new()?; - if self.keep_temp_dir { - let temp_dir = temp_dir.into_path(); - println!("created temporary directory: {}", temp_dir.display()); - (temp_dir, None) - } else { - (temp_dir.path().to_path_buf(), Some(temp_dir)) - } - } - }; - Ok(( - firrtl::FileBackend { - dir_path, - top_fir_file_stem: self.file_stem.clone(), - circuit_name: None, - }, - temp_dir, - )) - } - /// handles possibly redirecting the command's output for Rust tests - pub fn run_external_command( - &self, - _acquired_job: &mut AcquiredJob, - mut command: process::Command, - mut captured_output: Option<&mut String>, - ) -> io::Result { - if self.redirect_output_for_rust_test || captured_output.is_some() { - let (reader, writer) = os_pipe::pipe()?; - let mut reader = io::BufReader::new(reader); - command.stderr(writer.try_clone()?); - command.stdout(writer); // must not leave writer around after spawning child - command.stdin(process::Stdio::null()); - let mut child = command.spawn()?; - drop(command); // close writers - Ok(loop { - let status = child.try_wait()?; - streaming_read_utf8(&mut reader, |s| { - if let Some(captured_output) = captured_output.as_deref_mut() { - captured_output.push_str(s); - } - // use print! so output goes to Rust test output capture - print!("{s}"); - io::Result::Ok(()) - })?; - if let Some(status) = status { - break status; - } - }) - } else { - command.status() - } - } -} - -#[derive(Parser, Debug, Clone)] -#[non_exhaustive] -pub struct FirrtlArgs { - #[command(flatten)] - pub base: BaseArgs, - #[command(flatten)] - pub export_options: ExportOptions, -} - -#[derive(Debug)] -#[non_exhaustive] -pub struct FirrtlOutput { - pub file_stem: String, - pub top_module: String, - pub output_dir: PathBuf, - pub temp_dir: Option, -} - -impl FirrtlOutput { - pub fn file_with_ext(&self, ext: &str) -> PathBuf { - let mut retval = self.output_dir.join(&self.file_stem); - retval.set_extension(ext); - retval - } - pub fn firrtl_file(&self) -> PathBuf { - self.file_with_ext("fir") - } -} - -impl FirrtlArgs { - fn run_impl( - &self, - top_module: Module, - _acquired_job: &mut AcquiredJob, - ) -> Result { - let (file_backend, temp_dir) = self.base.make_firrtl_file_backend()?; - let firrtl::FileBackend { - top_fir_file_stem, - circuit_name, - dir_path, - } = firrtl::export(file_backend, &top_module, self.export_options)?; - Ok(FirrtlOutput { - file_stem: top_fir_file_stem.expect( - "export is known to set the file stem from the circuit name if not provided", - ), - top_module: circuit_name.expect("export is known to set the circuit name"), - output_dir: dir_path, - temp_dir, - }) - } -} - -impl RunPhase> for FirrtlArgs { - type Output = FirrtlOutput; - fn run_with_job( - &self, - top_module: Module, - acquired_job: &mut AcquiredJob, - ) -> Result { - self.run_impl(top_module.canonical(), acquired_job) - } -} - -impl RunPhase>> for FirrtlArgs { - type Output = FirrtlOutput; - fn run_with_job( - &self, - top_module: Interned>, - acquired_job: &mut AcquiredJob, - ) -> Result { - self.run_with_job(*top_module, acquired_job) - } -} - -/// based on [LLVM Circt's recommended lowering options -/// ](https://circt.llvm.org/docs/VerilogGeneration/#recommended-loweringoptions-by-target) -#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash)] -#[non_exhaustive] -pub enum VerilogDialect { - Questa, - Spyglass, - Verilator, - Vivado, - Yosys, -} - -impl fmt::Display for VerilogDialect { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(self.as_str()) - } -} - -impl VerilogDialect { - pub fn as_str(self) -> &'static str { - match self { - VerilogDialect::Questa => "questa", - VerilogDialect::Spyglass => "spyglass", - VerilogDialect::Verilator => "verilator", - VerilogDialect::Vivado => "vivado", - VerilogDialect::Yosys => "yosys", - } - } - pub fn firtool_extra_args(self) -> &'static [&'static str] { - match self { - VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"], - VerilogDialect::Spyglass => { - &["--lowering-options=explicitBitcast,disallowExpressionInliningInPorts"] - } - VerilogDialect::Verilator => &[ - "--lowering-options=locationInfoStyle=wrapInAtSquareBracket,disallowLocalVariables", - ], - VerilogDialect::Vivado => &["--lowering-options=mitigateVivadoArrayIndexConstPropBug"], - VerilogDialect::Yosys => { - &["--lowering-options=disallowLocalVariables,disallowPackedArrays"] - } - } - } -} - -#[derive(Parser, Debug, Clone)] -#[non_exhaustive] -pub struct VerilogArgs { - #[command(flatten)] - pub firrtl: FirrtlArgs, - #[arg( - long, - default_value = "firtool", - env = "FIRTOOL", - value_hint = ValueHint::CommandName, - value_parser = OsStringValueParser::new().try_map(which::which) - )] - pub firtool: PathBuf, - #[arg(long)] - pub firtool_extra_args: Vec, - /// adapt the generated Verilog for a particular toolchain - #[arg(long)] - pub verilog_dialect: Option, - #[arg(long, short = 'g')] - pub debug: bool, -} - -#[derive(Debug)] -#[non_exhaustive] -pub struct VerilogOutput { - pub firrtl: FirrtlOutput, - pub verilog_files: Vec, - pub contents_hash: Option, -} - -impl VerilogOutput { - pub fn main_verilog_file(&self) -> PathBuf { - self.firrtl.file_with_ext("v") - } - fn unadjusted_verilog_file(&self) -> PathBuf { - self.firrtl.file_with_ext("unadjusted.v") - } -} - -impl VerilogArgs { - fn process_unadjusted_verilog_file(&self, mut output: VerilogOutput) -> Result { - let input = fs::read_to_string(output.unadjusted_verilog_file())?; - let file_separator_prefix = "\n// ----- 8< ----- FILE \""; - let file_separator_suffix = "\" ----- 8< -----\n\n"; - let mut input = &*input; - output.contents_hash = Some(blake3::hash(input.as_bytes())); - let main_verilog_file = output.main_verilog_file(); - let mut file_name: Option<&Path> = Some(&main_verilog_file); - loop { - let (chunk, next_file_name) = if let Some((chunk, rest)) = - input.split_once(file_separator_prefix) - { - let Some((next_file_name, rest)) = rest.split_once(file_separator_suffix) else { - return Err(CliError(eyre!("parsing firtool's output failed: found {file_separator_prefix:?} but no {file_separator_suffix:?}"))); - }; - input = rest; - (chunk, Some(next_file_name.as_ref())) - } else { - (mem::take(&mut input), None) - }; - let Some(file_name) = mem::replace(&mut file_name, next_file_name) else { - break; - }; - let file_name = output.firrtl.output_dir.join(file_name); - fs::write(&file_name, chunk)?; - if let Some(extension) = file_name.extension() { - if extension == "v" || extension == "sv" { - output.verilog_files.push(file_name); - } - } - } - Ok(output) - } - fn run_impl( - &self, - firrtl_output: FirrtlOutput, - acquired_job: &mut AcquiredJob, - ) -> Result { - let Self { - firrtl, - firtool, - firtool_extra_args, - verilog_dialect, - debug, - } = self; - let output = VerilogOutput { - firrtl: firrtl_output, - verilog_files: vec![], - contents_hash: None, - }; - let mut cmd = process::Command::new(firtool); - cmd.arg(output.firrtl.firrtl_file()); - cmd.arg("-o"); - cmd.arg(output.unadjusted_verilog_file()); - if *debug { - cmd.arg("-g"); - cmd.arg("--preserve-values=all"); - } - if let Some(dialect) = verilog_dialect { - cmd.args(dialect.firtool_extra_args()); - } - cmd.args(firtool_extra_args); - cmd.current_dir(&output.firrtl.output_dir); - let status = firrtl.base.run_external_command(acquired_job, cmd, None)?; - if status.success() { - self.process_unadjusted_verilog_file(output) - } else { - Err(CliError(eyre!( - "running {} failed: {status}", - self.firtool.display() - ))) - } - } -} - -impl RunPhase for VerilogArgs -where - FirrtlArgs: RunPhase, -{ - type Output = VerilogOutput; - fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result { - let firrtl_output = self.firrtl.run_with_job(arg, acquired_job)?; - self.run_impl(firrtl_output, acquired_job) - } -} - -#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Default)] -#[non_exhaustive] -pub enum FormalMode { - #[default] - BMC, - Prove, - Live, - Cover, -} - -impl FormalMode { - pub fn as_str(self) -> &'static str { - match self { - FormalMode::BMC => "bmc", - FormalMode::Prove => "prove", - FormalMode::Live => "live", - FormalMode::Cover => "cover", - } - } -} - -impl fmt::Display for FormalMode { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(self.as_str()) - } -} - -#[derive(Clone)] -struct FormalAdjustArgs; - -impl clap::FromArgMatches for FormalAdjustArgs { - fn from_arg_matches(_matches: &clap::ArgMatches) -> Result { - Ok(Self) - } - - fn update_from_arg_matches(&mut self, _matches: &clap::ArgMatches) -> Result<(), clap::Error> { - Ok(()) - } -} - -impl clap::Args for FormalAdjustArgs { - fn augment_args(cmd: clap::Command) -> clap::Command { - cmd.mut_arg("output", |arg| arg.required(false)) - .mut_arg("verilog_dialect", |arg| { - arg.default_value(VerilogDialect::Yosys.to_string()) - .hide(true) - }) - } - - fn augment_args_for_update(cmd: clap::Command) -> clap::Command { - Self::augment_args(cmd) - } -} - -#[derive(Parser, Clone)] -#[non_exhaustive] -pub struct FormalArgs { - #[command(flatten)] - pub verilog: VerilogArgs, - #[arg( - long, - default_value = "sby", - env = "SBY", - value_hint = ValueHint::CommandName, - value_parser = OsStringValueParser::new().try_map(which::which) - )] - pub sby: PathBuf, - #[arg(long)] - pub sby_extra_args: Vec, - #[arg(long, default_value_t)] - pub mode: FormalMode, - #[arg(long, default_value_t = Self::DEFAULT_DEPTH)] - pub depth: u64, - #[arg(long, default_value = "z3")] - pub solver: String, - #[arg(long)] - pub smtbmc_extra_args: Vec, - #[arg(long, default_value_t = true, env = "FAYALITE_CACHE_RESULTS")] - pub cache_results: bool, - #[command(flatten)] - _formal_adjust_args: FormalAdjustArgs, -} - -impl fmt::Debug for FormalArgs { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { - verilog, - sby, - sby_extra_args, - mode, - depth, - solver, - smtbmc_extra_args, - cache_results, - _formal_adjust_args: _, - } = self; - f.debug_struct("FormalArgs") - .field("verilog", verilog) - .field("sby", sby) - .field("sby_extra_args", sby_extra_args) - .field("mode", mode) - .field("depth", depth) - .field("solver", solver) - .field("smtbmc_extra_args", smtbmc_extra_args) - .field("cache_results", cache_results) - .finish_non_exhaustive() - } -} - -impl FormalArgs { - pub const DEFAULT_DEPTH: u64 = 20; -} - -#[derive(Debug)] -#[non_exhaustive] -pub struct FormalOutput { - pub verilog: VerilogOutput, -} - -impl FormalOutput { - pub fn sby_file(&self) -> PathBuf { - self.verilog.firrtl.file_with_ext("sby") - } - pub fn cache_file(&self) -> PathBuf { - self.verilog.firrtl.file_with_ext("cache.json") - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[non_exhaustive] -pub struct FormalCacheOutput {} - -#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[non_exhaustive] -pub enum FormalCacheVersion { - V1, -} - -impl FormalCacheVersion { - pub const CURRENT: Self = Self::V1; -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[non_exhaustive] -pub struct FormalCache { - pub version: FormalCacheVersion, - pub contents_hash: blake3::Hash, - pub stdout_stderr: String, - pub result: Result, -} - -impl FormalCache { - pub fn new( - version: FormalCacheVersion, - contents_hash: blake3::Hash, - stdout_stderr: String, - result: Result, - ) -> Self { - Self { - version, - contents_hash, - stdout_stderr, - result, - } - } -} - -impl FormalArgs { - fn sby_contents(&self, output: &FormalOutput) -> Result { - let Self { - verilog: _, - sby: _, - sby_extra_args: _, - mode, - depth, - smtbmc_extra_args, - solver, - cache_results: _, - _formal_adjust_args: _, - } = self; - let smtbmc_options = smtbmc_extra_args.join(" "); - let top_module = &output.verilog.firrtl.top_module; - let mut retval = format!( - "[options]\n\ - mode {mode}\n\ - depth {depth}\n\ - wait on\n\ - \n\ - [engines]\n\ - smtbmc {solver} -- -- {smtbmc_options}\n\ - \n\ - [script]\n" - ); - for verilog_file in &output.verilog.verilog_files { - let verilog_file = verilog_file - .to_str() - .ok_or_else(|| CliError(eyre!("verilog file path is not UTF-8")))?; - if verilog_file.contains(|ch: char| { - (ch != ' ' && ch != '\t' && ch.is_ascii_whitespace()) || ch == '"' - }) { - return Err(CliError(eyre!( - "verilog file path contains characters that aren't permitted" - ))); - } - writeln!(retval, "read_verilog -sv -formal \"{verilog_file}\"").unwrap(); - } - // workaround for wires disappearing -- set `keep` on all wires - writeln!(retval, "hierarchy -top {top_module}").unwrap(); - writeln!(retval, "proc").unwrap(); - writeln!(retval, "setattr -set keep 1 w:\\*").unwrap(); - writeln!(retval, "prep").unwrap(); - Ok(retval) - } - fn run_impl( - &self, - verilog_output: VerilogOutput, - acquired_job: &mut AcquiredJob, - ) -> Result { - let output = FormalOutput { - verilog: verilog_output, - }; - let sby_file = output.sby_file(); - let sby_contents = self.sby_contents(&output)?; - let contents_hash = output.verilog.contents_hash.map(|verilog_hash| { - let mut hasher = blake3::Hasher::new(); - hasher.update(verilog_hash.as_bytes()); - hasher.update(sby_contents.as_bytes()); - hasher.update(&(self.sby_extra_args.len() as u64).to_le_bytes()); - for sby_extra_arg in self.sby_extra_args.iter() { - hasher.update(&(sby_extra_arg.len() as u64).to_le_bytes()); - hasher.update(sby_extra_arg.as_bytes()); - } - hasher.finalize() - }); - std::fs::write(&sby_file, sby_contents)?; - let mut cmd = process::Command::new(&self.sby); - cmd.arg("-j1"); // sby seems not to respect job count in parallel mode - cmd.arg("-f"); - cmd.arg(sby_file.file_name().unwrap()); - cmd.args(&self.sby_extra_args); - cmd.current_dir(&output.verilog.firrtl.output_dir); - let mut captured_output = String::new(); - let cache_file = output.cache_file(); - let do_cache = if let Some(contents_hash) = contents_hash.filter(|_| self.cache_results) { - if let Some(FormalCache { - version: FormalCacheVersion::CURRENT, - contents_hash: cache_contents_hash, - stdout_stderr, - result, - }) = fs::read(&cache_file) - .ok() - .and_then(|v| serde_json::from_slice(&v).ok()) - { - if cache_contents_hash == contents_hash { - println!("Using cached formal result:\n{stdout_stderr}"); - return match result { - Ok(FormalCacheOutput {}) => Ok(output), - Err(error) => Err(CliError(eyre::Report::msg(error))), - }; - } - } - true - } else { - false - }; - let _ = fs::remove_file(&cache_file); - let status = self.verilog.firrtl.base.run_external_command( - acquired_job, - cmd, - do_cache.then_some(&mut captured_output), - )?; - let result = if status.success() { - Ok(output) - } else { - Err(CliError(eyre!( - "running {} failed: {status}", - self.sby.display() - ))) - }; - if do_cache { - fs::write( - cache_file, - serde_json::to_string_pretty(&FormalCache { - version: FormalCacheVersion::CURRENT, - contents_hash: contents_hash.unwrap(), - stdout_stderr: captured_output, - result: match &result { - Ok(FormalOutput { verilog: _ }) => Ok(FormalCacheOutput {}), - Err(error) => Err(error.to_string()), - }, - }) - .expect("serialization shouldn't ever fail"), - )?; - } - result - } -} - -impl RunPhase for FormalArgs -where - VerilogArgs: RunPhase, -{ - type Output = FormalOutput; - fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result { - let verilog_output = self.verilog.run_with_job(arg, acquired_job)?; - self.run_impl(verilog_output, acquired_job) - } -} - -#[derive(Subcommand, Debug)] -enum CliCommand { - /// Generate FIRRTL - Firrtl(FirrtlArgs), - /// Generate Verilog - Verilog(VerilogArgs), - /// Run a formal proof - Formal(FormalArgs), -} - -/// a simple CLI -/// -/// Use like: -/// -/// ```no_run -/// # use fayalite::prelude::*; -/// # #[hdl_module] -/// # fn my_module() {} -/// use fayalite::cli; -/// -/// fn main() -> cli::Result { -/// cli::Cli::parse().run(my_module()) -/// } -/// ``` -/// -/// You can also use it with a larger [`clap`]-based CLI like so: -/// -/// ```no_run -/// # use fayalite::prelude::*; -/// # #[hdl_module] -/// # fn my_module() {} -/// use clap::{Subcommand, Parser}; -/// use fayalite::cli; -/// -/// #[derive(Subcommand)] -/// pub enum Cmd { -/// #[command(flatten)] -/// Fayalite(cli::Cli), -/// MySpecialCommand { -/// #[arg(long)] -/// foo: bool, -/// }, -/// } -/// -/// #[derive(Parser)] -/// pub struct Cli { -/// #[command(subcommand)] -/// cmd: Cmd, // or just use cli::Cli directly if you don't need more subcommands -/// } -/// -/// fn main() -> cli::Result { -/// match Cli::parse().cmd { -/// Cmd::Fayalite(v) => v.run(my_module())?, -/// Cmd::MySpecialCommand { foo } => println!("special: foo={foo}"), -/// } -/// Ok(()) -/// } -/// ``` -#[derive(Parser, Debug)] -// clear things that would be crate-specific -#[command(name = "Fayalite Simple CLI", about = None, long_about = None)] -pub struct Cli { - #[command(subcommand)] - subcommand: CliCommand, -} - -impl clap::Subcommand for Cli { - fn augment_subcommands(cmd: clap::Command) -> clap::Command { - CliCommand::augment_subcommands(cmd) - } - - fn augment_subcommands_for_update(cmd: clap::Command) -> clap::Command { - CliCommand::augment_subcommands_for_update(cmd) - } - - fn has_subcommand(name: &str) -> bool { - CliCommand::has_subcommand(name) - } -} - -impl RunPhase for Cli -where - FirrtlArgs: RunPhase, -{ - type Output = (); - fn run_with_job(&self, arg: T, acquired_job: &mut AcquiredJob) -> Result { - match &self.subcommand { - CliCommand::Firrtl(c) => { - c.run_with_job(arg, acquired_job)?; - } - CliCommand::Verilog(c) => { - c.run_with_job(arg, acquired_job)?; - } - CliCommand::Formal(c) => { - c.run_with_job(arg, acquired_job)?; - } - } - Ok(()) - } -} - -impl Cli { - /// forwards to [`clap::Parser::parse()`] so you don't have to import [`clap::Parser`] - pub fn parse() -> Self { - clap::Parser::parse() - } - /// forwards to [`RunPhase::run()`] so you don't have to import [`RunPhase`] - pub fn run(&self, top_module: T) -> Result<()> - where - Self: RunPhase, - { - RunPhase::run(self, top_module) - } -} diff --git a/crates/fayalite/src/clock.rs b/crates/fayalite/src/clock.rs index fe99653..909edbd 100644 --- a/crates/fayalite/src/clock.rs +++ b/crates/fayalite/src/clock.rs @@ -4,10 +4,14 @@ use crate::{ expr::{Expr, ToExpr}, hdl, int::Bool, - reset::Reset, + reset::{Reset, ResetType}, source_location::SourceLocation, - ty::{impl_match_variant_as_self, CanonicalType, StaticType, Type, TypeProperties}, + ty::{ + CanonicalType, OpaqueSimValueSize, OpaqueSimValueSlice, OpaqueSimValueWriter, + OpaqueSimValueWritten, StaticType, Type, TypeProperties, impl_match_variant_as_self, + }, }; +use bitvec::{bits, order::Lsb0}; #[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)] pub struct Clock; @@ -15,6 +19,7 @@ pub struct Clock; impl Type for Clock { type BaseType = Clock; type MaskType = Bool; + type SimValue = bool; impl_match_variant_as_self!(); @@ -36,6 +41,31 @@ impl Type for Clock { }; retval } + + fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue { + assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1)); + opaque.bits()[0] + } + + fn sim_value_clone_from_opaque( + &self, + value: &mut Self::SimValue, + opaque: OpaqueSimValueSlice<'_>, + ) { + assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1)); + *value = opaque.bits()[0]; + } + + fn sim_value_to_opaque<'w>( + &self, + value: &Self::SimValue, + writer: OpaqueSimValueWriter<'w>, + ) -> OpaqueSimValueWritten<'w> { + assert_eq!(writer.size(), OpaqueSimValueSize::from_bit_width(1)); + writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice( + [bits![0], bits![1]][*value as usize], + )) + } } impl Clock { @@ -55,6 +85,7 @@ impl StaticType for Clock { is_storable: false, is_castable_from_bits: true, bit_width: 1, + sim_only_values_len: 0, }; const MASK_TYPE_PROPERTIES: TypeProperties = Bool::TYPE_PROPERTIES; } @@ -88,9 +119,9 @@ impl ToClock for Expr { } #[hdl] -pub struct ClockDomain { +pub struct ClockDomain { pub clk: Clock, - pub rst: Reset, + pub rst: R, } impl ToClock for bool { diff --git a/crates/fayalite/src/enum_.rs b/crates/fayalite/src/enum_.rs index 2ed0b8e..083072b 100644 --- a/crates/fayalite/src/enum_.rs +++ b/crates/fayalite/src/enum_.rs @@ -2,21 +2,31 @@ // See Notices.txt for copyright information use crate::{ - expr::{ops::VariantAccess, Expr, ToExpr}, + expr::{ + Expr, ToExpr, + ops::{ExprPartialEq, VariantAccess}, + }, hdl, - int::Bool, + int::{Bool, UIntValue}, intern::{Intern, Interned}, module::{ - connect, enum_match_variants_helper, incomplete_wire, wire, - EnumMatchVariantAndInactiveScopeImpl, EnumMatchVariantsIterImpl, Scope, + EnumMatchVariantAndInactiveScopeImpl, EnumMatchVariantsIterImpl, Scope, connect, + enum_match_variants_helper, incomplete_wire, wire, }, + sim::value::{SimValue, SimValuePartialEq}, source_location::SourceLocation, - ty::{CanonicalType, MatchVariantAndInactiveScope, StaticType, Type, TypeProperties}, + ty::{ + CanonicalType, MatchVariantAndInactiveScope, OpaqueSimValue, OpaqueSimValueSize, + OpaqueSimValueSlice, OpaqueSimValueWriter, OpaqueSimValueWritten, StaticType, Type, + TypeProperties, + }, + util::HashMap, }; -use hashbrown::HashMap; -use std::{convert::Infallible, fmt, iter::FusedIterator}; +use bitvec::{order::Lsb0, slice::BitSlice, view::BitView}; +use serde::{Deserialize, Serialize}; +use std::{convert::Infallible, fmt, iter::FusedIterator, sync::Arc}; -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)] pub struct EnumVariant { pub name: Interned, pub ty: Option, @@ -111,6 +121,7 @@ impl EnumTypePropertiesBuilder { is_storable: true, is_castable_from_bits: true, bit_width: 0, + sim_only_values_len: 0, }, variant_count: 0, } @@ -129,9 +140,14 @@ impl EnumTypePropertiesBuilder { is_storable, is_castable_from_bits, bit_width, + sim_only_values_len, }) = field_props { assert!(is_passive, "variant type must be a passive type"); + assert!( + sim_only_values_len == 0, + "can't have `SimOnlyValue`s in an Enum" + ); type_properties = TypeProperties { is_passive: true, is_storable: type_properties.is_storable & is_storable, @@ -142,6 +158,7 @@ impl EnumTypePropertiesBuilder { } else { type_properties.bit_width }, + sim_only_values_len: 0, }; } Self { @@ -149,6 +166,12 @@ impl EnumTypePropertiesBuilder { variant_count: variant_count + 1, } } + #[must_use] + pub fn variants(self, variants: impl IntoIterator) -> Self { + variants.into_iter().fold(self, |this, variant| { + this.variant(variant.ty.map(CanonicalType::type_properties)) + }) + } pub const fn finish(self) -> TypeProperties { assert!( self.variant_count != 0, @@ -178,7 +201,8 @@ impl Default for EnumTypePropertiesBuilder { impl Enum { #[track_caller] pub fn new(variants: Interned<[EnumVariant]>) -> Self { - let mut name_indexes = HashMap::with_capacity(variants.len()); + let mut name_indexes = + HashMap::with_capacity_and_hasher(variants.len(), Default::default()); let mut type_props_builder = EnumTypePropertiesBuilder::new(); for (index, EnumVariant { name, ty }) in variants.iter().enumerate() { if let Some(old_index) = name_indexes.insert(*name, index) { @@ -238,13 +262,14 @@ impl Enum { pub trait EnumType: Type< - BaseType = Enum, - MaskType = Bool, - MatchActiveScope = Scope, - MatchVariantAndInactiveScope = EnumMatchVariantAndInactiveScope, - MatchVariantsIter = EnumMatchVariantsIter, -> + BaseType = Enum, + MaskType = Bool, + MatchActiveScope = Scope, + MatchVariantAndInactiveScope = EnumMatchVariantAndInactiveScope, + MatchVariantsIter = EnumMatchVariantsIter, + > { + type SimBuilder: From; fn variants(&self) -> Interned<[EnumVariant]>; fn match_activate_scope( v: Self::MatchVariantAndInactiveScope, @@ -307,7 +332,18 @@ impl DoubleEndedIterator for EnumMatchVariantsIter { } } +pub struct NoBuilder { + _ty: Enum, +} + +impl From for NoBuilder { + fn from(_ty: Enum) -> Self { + Self { _ty } + } +} + impl EnumType for Enum { + type SimBuilder = NoBuilder; fn match_activate_scope( v: Self::MatchVariantAndInactiveScope, ) -> (Self::MatchVariant, Self::MatchActiveScope) { @@ -322,6 +358,7 @@ impl EnumType for Enum { impl Type for Enum { type BaseType = Enum; type MaskType = Bool; + type SimValue = OpaqueSimValue; type MatchVariant = Option>; type MatchActiveScope = Scope; type MatchVariantAndInactiveScope = EnumMatchVariantAndInactiveScope; @@ -352,6 +389,341 @@ impl Type for Enum { fn source_location() -> SourceLocation { SourceLocation::builtin() } + fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue { + assert_eq!(self.type_properties().size(), opaque.size()); + opaque.to_owned() + } + fn sim_value_clone_from_opaque( + &self, + value: &mut Self::SimValue, + opaque: OpaqueSimValueSlice<'_>, + ) { + assert_eq!(self.type_properties().size(), opaque.size()); + assert_eq!(value.size(), opaque.size()); + value.clone_from_slice(opaque); + } + fn sim_value_to_opaque<'w>( + &self, + value: &Self::SimValue, + writer: OpaqueSimValueWriter<'w>, + ) -> OpaqueSimValueWritten<'w> { + assert_eq!(self.type_properties().size(), writer.size()); + assert_eq!(value.size(), writer.size()); + writer.fill_cloned_from_slice(value.as_slice()) + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug, Default)] +pub struct EnumPaddingSimValue { + bits: Option, +} + +impl EnumPaddingSimValue { + pub const fn new() -> Self { + Self { bits: None } + } + pub fn bit_width(&self) -> Option { + self.bits.as_ref().map(UIntValue::width) + } + pub fn bits(&self) -> &Option { + &self.bits + } + pub fn bits_mut(&mut self) -> &mut Option { + &mut self.bits + } + pub fn into_bits(self) -> Option { + self.bits + } + pub fn from_bits(bits: Option) -> Self { + Self { bits } + } + pub fn from_bitslice(v: &BitSlice) -> Self { + Self { + bits: Some(UIntValue::new(Arc::new(v.to_bitvec()))), + } + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub struct UnknownVariantSimValue { + discriminant: usize, + body_bits: UIntValue, +} + +impl UnknownVariantSimValue { + pub fn discriminant(&self) -> usize { + self.discriminant + } + pub fn body_bits(&self) -> &UIntValue { + &self.body_bits + } + pub fn body_bits_mut(&mut self) -> &mut UIntValue { + &mut self.body_bits + } + pub fn into_body_bits(self) -> UIntValue { + self.body_bits + } + pub fn into_parts(self) -> (usize, UIntValue) { + (self.discriminant, self.body_bits) + } + pub fn new(discriminant: usize, body_bits: UIntValue) -> Self { + Self { + discriminant, + body_bits, + } + } +} + +pub struct EnumSimValueFromOpaque<'a> { + variants: Interned<[EnumVariant]>, + discriminant: usize, + body_bits: &'a BitSlice, +} + +impl<'a> EnumSimValueFromOpaque<'a> { + #[track_caller] + pub fn new(ty: T, opaque: OpaqueSimValueSlice<'a>) -> Self { + let variants = ty.variants(); + let size = EnumTypePropertiesBuilder::new() + .variants(variants) + .finish() + .size(); + assert!(size.only_bit_width().is_some()); + assert_eq!(size, opaque.size()); + let (discriminant_bits, body_bits) = opaque + .bits() + .split_at(discriminant_bit_width_impl(variants.len())); + let mut discriminant = 0usize; + discriminant.view_bits_mut::()[..discriminant_bits.len()] + .copy_from_bitslice(discriminant_bits); + Self { + variants, + discriminant, + body_bits, + } + } + pub fn discriminant(&self) -> usize { + self.discriminant + } + #[track_caller] + #[cold] + fn usage_error(&self, clone: bool) -> ! { + let clone = if clone { "clone_" } else { "" }; + match self.variants.get(self.discriminant) { + None => { + panic!("should have called EnumSimValueFromBits::unknown_variant_{clone}from_bits"); + } + Some(EnumVariant { ty: None, .. }) => { + panic!( + "should have called EnumSimValueFromBits::variant_no_field_{clone}from_bits" + ); + } + Some(EnumVariant { ty: Some(_), .. }) => { + panic!( + "should have called EnumSimValueFromBits::variant_with_field_{clone}from_bits" + ); + } + } + } + #[track_caller] + fn known_variant(&self, clone: bool) -> (Option, &'a BitSlice, &'a BitSlice) { + let Some(EnumVariant { ty, .. }) = self.variants.get(self.discriminant) else { + self.usage_error(clone); + }; + let variant_bit_width = ty.map_or(0, CanonicalType::bit_width); + let (variant_bits, padding_bits) = self.body_bits.split_at(variant_bit_width); + (*ty, variant_bits, padding_bits) + } + #[track_caller] + pub fn unknown_variant_from_opaque(self) -> UnknownVariantSimValue { + let None = self.variants.get(self.discriminant) else { + self.usage_error(false); + }; + UnknownVariantSimValue::new( + self.discriminant, + UIntValue::new(Arc::new(self.body_bits.to_bitvec())), + ) + } + #[track_caller] + pub fn unknown_variant_clone_from_opaque(self, value: &mut UnknownVariantSimValue) { + let None = self.variants.get(self.discriminant) else { + self.usage_error(true); + }; + value.discriminant = self.discriminant; + assert_eq!(value.body_bits.width(), self.body_bits.len()); + value + .body_bits + .bits_mut() + .copy_from_bitslice(self.body_bits); + } + #[track_caller] + pub fn variant_no_field_from_opaque(self) -> EnumPaddingSimValue { + let (None, _variant_bits, padding_bits) = self.known_variant(false) else { + self.usage_error(false); + }; + EnumPaddingSimValue::from_bitslice(padding_bits) + } + #[track_caller] + pub fn variant_with_field_from_opaque(self) -> (SimValue, EnumPaddingSimValue) { + let (Some(variant_ty), variant_bits, padding_bits) = self.known_variant(false) else { + self.usage_error(false); + }; + ( + SimValue::from_bitslice(T::from_canonical(variant_ty), variant_bits), + EnumPaddingSimValue::from_bitslice(padding_bits), + ) + } + #[track_caller] + fn clone_padding_from_bits(padding: &mut EnumPaddingSimValue, padding_bits: &BitSlice) { + match padding.bits_mut() { + None => *padding = EnumPaddingSimValue::from_bitslice(padding_bits), + Some(padding) => { + assert_eq!(padding.width(), padding_bits.len()); + padding.bits_mut().copy_from_bitslice(padding_bits); + } + } + } + #[track_caller] + pub fn variant_no_field_clone_from_opaque(self, padding: &mut EnumPaddingSimValue) { + let (None, _variant_bits, padding_bits) = self.known_variant(true) else { + self.usage_error(true); + }; + Self::clone_padding_from_bits(padding, padding_bits); + } + #[track_caller] + pub fn variant_with_field_clone_from_opaque( + self, + value: &mut SimValue, + padding: &mut EnumPaddingSimValue, + ) { + let (Some(variant_ty), variant_bits, padding_bits) = self.known_variant(true) else { + self.usage_error(true); + }; + assert_eq!(SimValue::ty(value), T::from_canonical(variant_ty)); + SimValue::bits_mut(value) + .bits_mut() + .copy_from_bitslice(variant_bits); + Self::clone_padding_from_bits(padding, padding_bits); + } +} + +pub struct EnumSimValueToOpaque<'a> { + variants: Interned<[EnumVariant]>, + bit_width: usize, + discriminant_bit_width: usize, + writer: OpaqueSimValueWriter<'a>, +} + +impl<'a> EnumSimValueToOpaque<'a> { + #[track_caller] + pub fn new(ty: T, writer: OpaqueSimValueWriter<'a>) -> Self { + let variants = ty.variants(); + let size = EnumTypePropertiesBuilder::new() + .variants(variants) + .finish() + .size(); + assert_eq!(size, writer.size()); + Self { + variants, + bit_width: size + .only_bit_width() + .expect("enums should only contain bits"), + discriminant_bit_width: discriminant_bit_width_impl(variants.len()), + writer, + } + } + #[track_caller] + fn write_discriminant(&mut self, mut discriminant: usize) { + let orig_discriminant = discriminant; + let discriminant_bits = + &mut discriminant.view_bits_mut::()[..self.discriminant_bit_width]; + self.writer.fill_prefix_with( + OpaqueSimValueSize::from_bit_width(self.discriminant_bit_width), + |writer| { + writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(discriminant_bits)) + }, + ); + discriminant_bits.fill(false); + assert!( + discriminant == 0, + "{orig_discriminant:#x} is too big to fit in enum discriminant bits", + ); + } + #[track_caller] + pub fn unknown_variant_to_opaque( + mut self, + value: &UnknownVariantSimValue, + ) -> OpaqueSimValueWritten<'a> { + self.write_discriminant(value.discriminant); + let None = self.variants.get(value.discriminant) else { + panic!("can't use UnknownVariantSimValue to set known discriminant"); + }; + assert_eq!( + self.bit_width - self.discriminant_bit_width, + value.body_bits.width() + ); + self.writer + .fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(value.body_bits.bits())) + } + #[track_caller] + fn known_variant( + mut self, + discriminant: usize, + value: Option<&OpaqueSimValue>, + padding: &EnumPaddingSimValue, + ) -> OpaqueSimValueWritten<'a> { + self.write_discriminant(discriminant); + let variant_ty = self.variants[discriminant].ty; + let variant_size = variant_ty.map_or(OpaqueSimValueSize::empty(), CanonicalType::size); + if let Some(value) = value { + if variant_ty.is_none() { + panic!("expected variant to have no field"); + } + self.writer.fill_prefix_with(variant_size, |writer| { + writer.fill_cloned_from_slice(value.as_slice()) + }); + } else if variant_ty.is_some() { + panic!("expected variant to have a field"); + } + if let Some(padding) = padding.bits() { + assert_eq!(padding.ty().type_properties().size(), self.writer.size()); + self.writer + .fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(padding.bits())) + } else { + self.writer.fill_with_zeros() + } + } + #[track_caller] + pub fn variant_no_field_to_opaque( + self, + discriminant: usize, + padding: &EnumPaddingSimValue, + ) -> OpaqueSimValueWritten<'a> { + self.known_variant(discriminant, None, padding) + } + #[track_caller] + pub fn variant_with_field_to_opaque( + self, + discriminant: usize, + value: &SimValue, + padding: &EnumPaddingSimValue, + ) -> OpaqueSimValueWritten<'a> { + let Some(variant_ty) = self.variants[discriminant].ty else { + panic!("expected variant to have no field"); + }; + assert_eq!(SimValue::ty(value), T::from_canonical(variant_ty)); + self.known_variant(discriminant, Some(SimValue::opaque(value)), padding) + } +} + +#[doc(hidden)] +pub fn assert_is_enum_type(v: T) -> T { + v +} + +#[doc(hidden)] +pub fn enum_type_to_sim_builder(v: T) -> T::SimBuilder { + v.into() } #[hdl] @@ -360,6 +732,79 @@ pub enum HdlOption { HdlSome(T), } +impl, Rhs: Type> ExprPartialEq> for HdlOption { + #[hdl] + fn cmp_eq(lhs: Expr, rhs: Expr>) -> Expr { + #[hdl] + let cmp_eq = wire(); + #[hdl] + match lhs { + HdlSome(lhs) => + { + #[hdl] + match rhs { + HdlSome(rhs) => connect(cmp_eq, ExprPartialEq::cmp_eq(lhs, rhs)), + HdlNone => connect(cmp_eq, false), + } + } + HdlNone => + { + #[hdl] + match rhs { + HdlSome(_) => connect(cmp_eq, false), + HdlNone => connect(cmp_eq, true), + } + } + } + cmp_eq + } + + #[hdl] + fn cmp_ne(lhs: Expr, rhs: Expr>) -> Expr { + #[hdl] + let cmp_ne = wire(); + #[hdl] + match lhs { + HdlSome(lhs) => + { + #[hdl] + match rhs { + HdlSome(rhs) => connect(cmp_ne, ExprPartialEq::cmp_ne(lhs, rhs)), + HdlNone => connect(cmp_ne, true), + } + } + HdlNone => + { + #[hdl] + match rhs { + HdlSome(_) => connect(cmp_ne, true), + HdlNone => connect(cmp_ne, false), + } + } + } + cmp_ne + } +} + +impl, Rhs: Type> SimValuePartialEq> for HdlOption { + fn sim_value_eq(this: &SimValue, other: &SimValue>) -> bool { + type SimValueMatch = ::SimValue; + match (&**this, &**other) { + (SimValueMatch::::HdlNone(_), SimValueMatch::>::HdlNone(_)) => { + true + } + (SimValueMatch::::HdlSome(..), SimValueMatch::>::HdlNone(_)) + | (SimValueMatch::::HdlNone(_), SimValueMatch::>::HdlSome(..)) => { + false + } + ( + SimValueMatch::::HdlSome(l, _), + SimValueMatch::>::HdlSome(r, _), + ) => l == r, + } + } +} + #[allow(non_snake_case)] pub fn HdlNone() -> Expr> { HdlOption[T::TYPE].HdlNone() diff --git a/crates/fayalite/src/expr.rs b/crates/fayalite/src/expr.rs index fa50852..89e60cd 100644 --- a/crates/fayalite/src/expr.rs +++ b/crates/fayalite/src/expr.rs @@ -13,10 +13,12 @@ use crate::{ intern::{Intern, Interned}, memory::{DynPortType, MemPort, PortType}, module::{ - transform::visit::{Fold, Folder, Visit, Visitor}, Instance, ModuleIO, + transform::visit::{Fold, Folder, Visit, Visitor}, }, + phantom_const::PhantomConst, reg::Reg, + reset::{AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset}, ty::{CanonicalType, StaticType, Type, TypeWithDeref}, wire::Wire, }; @@ -108,6 +110,7 @@ expr_enum! { UIntLiteral(Interned), SIntLiteral(Interned), BoolLiteral(bool), + PhantomConst(PhantomConst), BundleLiteral(ops::BundleLiteral), ArrayLiteral(ops::ArrayLiteral), EnumLiteral(ops::EnumLiteral), @@ -209,7 +212,9 @@ expr_enum! { ModuleIO(ModuleIO), Instance(Instance), Wire(Wire), - Reg(Reg), + Reg(Reg), + RegSync(Reg), + RegAsync(Reg), MemPort(MemPort), } } @@ -269,6 +274,20 @@ pub struct Expr { impl fmt::Debug for Expr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + #[cfg(debug_assertions)] + { + let Self { + __enum, + __ty, + __flow, + } = self; + let expr_ty = __ty.canonical(); + let enum_ty = __enum.to_expr().__ty; + assert_eq!( + expr_ty, enum_ty, + "expr ty mismatch:\nExpr {{\n__enum: {__enum:?},\n__ty: {__ty:?},\n__flow: {__flow:?}\n}}" + ); + } self.__enum.fmt(f) } } @@ -513,11 +532,7 @@ impl Flow { } } pub const fn flip_if(self, flipped: bool) -> Flow { - if flipped { - self.flip() - } else { - self - } + if flipped { self.flip() } else { self } } } @@ -593,25 +608,42 @@ impl GetTarget for Wire { } } -impl ToExpr for Reg { +impl ToExpr for Reg { type Type = T; fn to_expr(&self) -> Expr { + struct Dispatch; + impl ResetTypeDispatch for Dispatch { + type Input = Reg; + type Output = ExprEnum; + + fn reset(self, input: Self::Input) -> Self::Output { + ExprEnum::Reg(input) + } + + fn sync_reset(self, input: Self::Input) -> Self::Output { + ExprEnum::RegSync(input) + } + + fn async_reset(self, input: Self::Input) -> Self::Output { + ExprEnum::RegAsync(input) + } + } Expr { - __enum: ExprEnum::Reg(self.canonical()).intern_sized(), + __enum: R::dispatch(self.canonical(), Dispatch).intern_sized(), __ty: self.ty(), __flow: self.flow(), } } } -impl ToLiteralBits for Reg { +impl ToLiteralBits for Reg { fn to_literal_bits(&self) -> Result, NotALiteralExpr> { Err(NotALiteralExpr) } } -impl GetTarget for Reg { +impl GetTarget for Reg { fn target(&self) -> Option> { Some(Intern::intern_sized(self.canonical().into())) } @@ -678,6 +710,7 @@ impl CastToBits for T { } pub trait CastBitsTo { + #[track_caller] fn cast_bits_to(&self, ty: T) -> Expr; } @@ -735,3 +768,27 @@ pub fn repeat( ) .to_expr() } + +impl ToExpr for PhantomConst { + type Type = Self; + + fn to_expr(&self) -> Expr { + Expr { + __enum: ExprEnum::PhantomConst(self.canonical_phantom_const()).intern_sized(), + __ty: *self, + __flow: Flow::Source, + } + } +} + +impl GetTarget for PhantomConst { + fn target(&self) -> Option> { + None + } +} + +impl ToLiteralBits for PhantomConst { + fn to_literal_bits(&self) -> Result, NotALiteralExpr> { + Ok(Interned::default()) + } +} diff --git a/crates/fayalite/src/expr/ops.rs b/crates/fayalite/src/expr/ops.rs index 3579641..b10e3ae 100644 --- a/crates/fayalite/src/expr/ops.rs +++ b/crates/fayalite/src/expr/ops.rs @@ -7,19 +7,23 @@ use crate::{ clock::{Clock, ToClock}, enum_::{Enum, EnumType, EnumVariant}, expr::{ + CastBitsTo as _, CastTo, CastToBits as _, Expr, ExprEnum, Flow, HdlPartialEq, + HdlPartialOrd, NotALiteralExpr, ReduceBits, ToExpr, ToLiteralBits, target::{ GetTarget, Target, TargetPathArrayElement, TargetPathBundleField, TargetPathDynArrayElement, TargetPathElement, }, - CastTo, Expr, ExprEnum, Flow, HdlPartialEq, HdlPartialOrd, NotALiteralExpr, ReduceBits, - ToExpr, ToLiteralBits, }, int::{ Bool, BoolOrIntType, DynSize, IntType, KnownSize, SInt, SIntType, SIntValue, Size, UInt, UIntType, UIntValue, }, intern::{Intern, Interned}, - reset::{AsyncReset, Reset, SyncReset, ToAsyncReset, ToReset, ToSyncReset}, + phantom_const::{PhantomConst, PhantomConstValue}, + reset::{ + AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset, ToAsyncReset, ToReset, + ToSyncReset, + }, ty::{CanonicalType, StaticType, Type}, util::ConstUsize, }; @@ -262,7 +266,7 @@ impl Neg { }; let result_ty = retval.ty(); retval.literal_bits = arg.to_literal_bits().map(|bits| { - Intern::intern_owned(result_ty.bits_from_bigint_wrapping(-SInt::bits_to_bigint(&bits))) + Intern::intern_owned(result_ty.bits_from_bigint_wrapping(&-SInt::bits_to_bigint(&bits))) }); retval } @@ -369,7 +373,7 @@ fn binary_op_literal_bits, #[dyn] SInt, AsyncReset, #[trai impl_cast_bit_op!(CastSyncResetToBool, SyncReset, Bool); impl_cast_bit_op!(CastSyncResetToUInt, SyncReset, UInt<1>, #[dyn] UInt); impl_cast_bit_op!(CastSyncResetToSInt, SyncReset, SInt<1>, #[dyn] SInt); -impl_cast_bit_op!(CastSyncResetToReset, SyncReset, Reset, #[trait] ToReset::to_reset); +impl_cast_bit_op!(CastSyncResetToReset, SyncReset, Reset); impl_cast_bit_op!(CastAsyncResetToBool, AsyncReset, Bool); impl_cast_bit_op!(CastAsyncResetToUInt, AsyncReset, UInt<1>, #[dyn] UInt); impl_cast_bit_op!(CastAsyncResetToSInt, AsyncReset, SInt<1>, #[dyn] SInt); -impl_cast_bit_op!(CastAsyncResetToReset, AsyncReset, Reset, #[trait] ToReset::to_reset); +impl_cast_bit_op!(CastAsyncResetToReset, AsyncReset, Reset); impl_cast_bit_op!(CastResetToBool, Reset, Bool); impl_cast_bit_op!(CastResetToUInt, Reset, UInt<1>, #[dyn] UInt); impl_cast_bit_op!(CastResetToSInt, Reset, SInt<1>, #[dyn] SInt); @@ -1788,6 +1792,127 @@ impl_cast_bit_op!(CastClockToBool, Clock, Bool); impl_cast_bit_op!(CastClockToUInt, Clock, UInt<1>, #[dyn] UInt); impl_cast_bit_op!(CastClockToSInt, Clock, SInt<1>, #[dyn] SInt); +impl ToReset for Expr { + fn to_reset(&self) -> Expr { + struct Dispatch; + impl ResetTypeDispatch for Dispatch { + type Input = Expr; + type Output = Expr; + + fn reset(self, input: Self::Input) -> Self::Output { + input + } + + fn sync_reset(self, input: Self::Input) -> Self::Output { + input.cast_to_static() + } + + fn async_reset(self, input: Self::Input) -> Self::Output { + input.cast_to_static() + } + } + T::dispatch(*self, Dispatch) + } +} + +impl ExprCastTo for AsyncReset { + fn cast_to(src: Expr, _to_type: AsyncReset) -> Expr { + src + } +} + +impl ExprCastTo for AsyncReset { + fn cast_to(src: Expr, to_type: SyncReset) -> Expr { + src.cast_to(Bool).cast_to(to_type) + } +} + +impl ExprCastTo for AsyncReset { + fn cast_to(src: Expr, to_type: Clock) -> Expr { + src.cast_to(Bool).cast_to(to_type) + } +} + +impl ExprCastTo for SyncReset { + fn cast_to(src: Expr, to_type: AsyncReset) -> Expr { + src.cast_to(Bool).cast_to(to_type) + } +} + +impl ExprCastTo for SyncReset { + fn cast_to(src: Expr, _to_type: SyncReset) -> Expr { + src + } +} + +impl ExprCastTo for SyncReset { + fn cast_to(src: Expr, to_type: Clock) -> Expr { + src.cast_to(Bool).cast_to(to_type) + } +} + +impl ExprCastTo for Reset { + fn cast_to(src: Expr, to_type: AsyncReset) -> Expr { + src.cast_to(Bool).cast_to(to_type) + } +} + +impl ExprCastTo for Reset { + fn cast_to(src: Expr, to_type: SyncReset) -> Expr { + src.cast_to(Bool).cast_to(to_type) + } +} + +impl ExprCastTo for Reset { + fn cast_to(src: Expr, _to_type: Reset) -> Expr { + src + } +} + +impl ExprCastTo for Reset { + fn cast_to(src: Expr, to_type: Clock) -> Expr { + src.cast_to(Bool).cast_to(to_type) + } +} + +impl ExprCastTo for Clock { + fn cast_to(src: Expr, to_type: AsyncReset) -> Expr { + src.cast_to(Bool).cast_to(to_type) + } +} + +impl ExprCastTo for Clock { + fn cast_to(src: Expr, to_type: SyncReset) -> Expr { + src.cast_to(Bool).cast_to(to_type) + } +} + +impl ExprCastTo for Clock { + fn cast_to(src: Expr, _to_type: Clock) -> Expr { + src + } +} + +impl ExprCastTo<()> for PhantomConst { + fn cast_to(src: Expr, to_type: ()) -> Expr<()> { + src.cast_to_bits().cast_bits_to(to_type) + } +} + +impl ExprCastTo> for () { + fn cast_to(src: Expr, to_type: PhantomConst) -> Expr> { + src.cast_to_bits().cast_bits_to(to_type) + } +} + +impl ExprCastTo> + for PhantomConst +{ + fn cast_to(src: Expr, to_type: PhantomConst) -> Expr> { + src.cast_to_bits().cast_bits_to(to_type) + } +} + #[derive(Copy, Clone, PartialEq, Eq, Hash)] pub struct FieldAccess { base: Expr, @@ -1812,7 +1937,8 @@ impl FieldAccess { let field = Expr::ty(base).fields()[field_index]; let field_type = FieldType::from_canonical(field.ty); let literal_bits = base.to_literal_bits().map(|bits| { - bits[Expr::ty(base).field_offsets()[field_index]..][..field.ty.bit_width()].intern() + bits[Expr::ty(base).field_offsets()[field_index].bit_width..][..field.ty.bit_width()] + .intern() }); let target = base.target().map(|base| { Intern::intern_sized(base.join(TargetPathElement::intern_sized( @@ -2604,3 +2730,47 @@ impl ToExpr for Uninit { } } } + +pub trait ExprIntoIterator: Type { + type Item: Type; + type ExprIntoIter: Iterator>; + + fn expr_into_iter(e: Expr) -> Self::ExprIntoIter; +} + +impl IntoIterator for Expr { + type Item = Expr; + type IntoIter = T::ExprIntoIter; + + fn into_iter(self) -> Self::IntoIter { + T::expr_into_iter(self) + } +} + +impl IntoIterator for &'_ Expr { + type Item = Expr; + type IntoIter = T::ExprIntoIter; + + fn into_iter(self) -> Self::IntoIter { + T::expr_into_iter(*self) + } +} + +impl IntoIterator for &'_ mut Expr { + type Item = Expr; + type IntoIter = T::ExprIntoIter; + + fn into_iter(self) -> Self::IntoIter { + T::expr_into_iter(*self) + } +} + +pub trait ExprFromIterator: Type { + fn expr_from_iter>(iter: T) -> Expr; +} + +impl, A> FromIterator for Expr { + fn from_iter>(iter: T) -> Self { + This::expr_from_iter(iter) + } +} diff --git a/crates/fayalite/src/expr/target.rs b/crates/fayalite/src/expr/target.rs index 0f85f62..c8c55e9 100644 --- a/crates/fayalite/src/expr/target.rs +++ b/crates/fayalite/src/expr/target.rs @@ -3,18 +3,19 @@ use crate::{ array::Array, bundle::{Bundle, BundleField}, - expr::Flow, + expr::{Expr, Flow, ToExpr}, intern::{Intern, Interned}, memory::{DynPortType, MemPort}, module::{Instance, ModuleIO, TargetName}, reg::Reg, + reset::{AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset}, source_location::SourceLocation, ty::{CanonicalType, Type}, wire::Wire, }; use std::fmt; -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct TargetPathBundleField { pub name: Interned, } @@ -25,7 +26,7 @@ impl fmt::Display for TargetPathBundleField { } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct TargetPathArrayElement { pub index: usize, } @@ -36,7 +37,7 @@ impl fmt::Display for TargetPathArrayElement { } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct TargetPathDynArrayElement {} impl fmt::Display for TargetPathDynArrayElement { @@ -45,7 +46,7 @@ impl fmt::Display for TargetPathDynArrayElement { } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum TargetPathElement { BundleField(TargetPathBundleField), ArrayElement(TargetPathArrayElement), @@ -127,6 +128,7 @@ macro_rules! impl_target_base { $(#[$enum_meta:meta])* $enum_vis:vis enum $TargetBase:ident { $( + $(#[from = $from:ident])? #[is = $is_fn:ident] #[to = $to_fn:ident] $(#[$variant_meta:meta])* @@ -150,19 +152,19 @@ macro_rules! impl_target_base { } } - $( + $($( impl From<$VariantTy> for $TargetBase { - fn from(value: $VariantTy) -> Self { + fn $from(value: $VariantTy) -> Self { Self::$Variant(value) } } impl From<$VariantTy> for Target { - fn from(value: $VariantTy) -> Self { + fn $from(value: $VariantTy) -> Self { $TargetBase::$Variant(value).into() } } - )* + )*)? impl $TargetBase { $( @@ -193,30 +195,79 @@ macro_rules! impl_target_base { } } } + + impl ToExpr for $TargetBase { + type Type = CanonicalType; + + fn to_expr(&self) -> Expr { + match self { + $(Self::$Variant(v) => Expr::canonical(v.to_expr()),)* + } + } + } }; } impl_target_base! { - #[derive(Clone, PartialEq, Eq, Hash)] + #[derive(Copy, Clone, PartialEq, Eq, Hash)] pub enum TargetBase { + #[from = from] #[is = is_module_io] #[to = module_io] ModuleIO(ModuleIO), + #[from = from] #[is = is_mem_port] #[to = mem_port] MemPort(MemPort), #[is = is_reg] #[to = reg] - Reg(Reg), + Reg(Reg), + #[is = is_reg_sync] + #[to = reg_sync] + RegSync(Reg), + #[is = is_reg_async] + #[to = reg_async] + RegAsync(Reg), + #[from = from] #[is = is_wire] #[to = wire] Wire(Wire), + #[from = from] #[is = is_instance] #[to = instance] Instance(Instance), } } +impl From> for TargetBase { + fn from(value: Reg) -> Self { + struct Dispatch; + impl ResetTypeDispatch for Dispatch { + type Input = Reg; + type Output = TargetBase; + + fn reset(self, input: Self::Input) -> Self::Output { + TargetBase::Reg(input) + } + + fn sync_reset(self, input: Self::Input) -> Self::Output { + TargetBase::RegSync(input) + } + + fn async_reset(self, input: Self::Input) -> Self::Output { + TargetBase::RegAsync(input) + } + } + R::dispatch(value, Dispatch) + } +} + +impl From> for Target { + fn from(value: Reg) -> Self { + TargetBase::from(value).into() + } +} + impl fmt::Display for TargetBase { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{:?}", self.target_name()) @@ -229,6 +280,8 @@ impl TargetBase { TargetBase::ModuleIO(v) => TargetName(v.scoped_name(), None), TargetBase::MemPort(v) => TargetName(v.mem_name(), Some(v.port_name())), TargetBase::Reg(v) => TargetName(v.scoped_name(), None), + TargetBase::RegSync(v) => TargetName(v.scoped_name(), None), + TargetBase::RegAsync(v) => TargetName(v.scoped_name(), None), TargetBase::Wire(v) => TargetName(v.scoped_name(), None), TargetBase::Instance(v) => TargetName(v.scoped_name(), None), } @@ -238,6 +291,8 @@ impl TargetBase { TargetBase::ModuleIO(v) => v.ty(), TargetBase::MemPort(v) => v.ty().canonical(), TargetBase::Reg(v) => v.ty(), + TargetBase::RegSync(v) => v.ty(), + TargetBase::RegAsync(v) => v.ty(), TargetBase::Wire(v) => v.ty(), TargetBase::Instance(v) => v.ty().canonical(), } @@ -313,7 +368,7 @@ impl TargetChild { } } -#[derive(Clone, PartialEq, Eq, Hash)] +#[derive(Copy, Clone, PartialEq, Eq, Hash)] pub enum Target { Base(Interned), Child(TargetChild), diff --git a/crates/fayalite/src/firrtl.rs b/crates/fayalite/src/firrtl.rs index bc75ccc..cca0d82 100644 --- a/crates/fayalite/src/firrtl.rs +++ b/crates/fayalite/src/firrtl.rs @@ -4,60 +4,99 @@ use crate::{ annotations::{ Annotation, BlackBoxInlineAnnotation, BlackBoxPathAnnotation, CustomFirrtlAnnotation, - DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation, + DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation, TargetedAnnotation, }, array::Array, + build::{ToArgs, WriteArgs}, bundle::{Bundle, BundleField, BundleType}, clock::Clock, enum_::{Enum, EnumType, EnumVariant}, expr::{ + CastBitsTo, Expr, ExprEnum, ops::{self, VariantAccess}, target::{ Target, TargetBase, TargetPathArrayElement, TargetPathBundleField, TargetPathElement, }, - Expr, ExprEnum, }, formal::FormalKind, int::{Bool, DynSize, IntType, SIntValue, UInt, UIntValue}, intern::{Intern, Interned}, memory::{Mem, PortKind, PortName, ReadUnderWrite}, module::{ + AnnotatedModuleIO, Block, ExternModuleBody, ExternModuleParameter, + ExternModuleParameterValue, Module, ModuleBody, ModuleIO, NameId, NameOptId, + NormalModuleBody, Stmt, StmtConnect, StmtDeclaration, StmtFormal, StmtIf, StmtInstance, + StmtMatch, StmtReg, StmtWire, transform::{ - simplify_enums::{simplify_enums, SimplifyEnumsError, SimplifyEnumsKind}, + simplify_enums::{SimplifyEnumsError, SimplifyEnumsKind, simplify_enums}, simplify_memories::simplify_memories, }, - AnnotatedModuleIO, Block, ExternModuleBody, ExternModuleParameter, - ExternModuleParameterValue, Module, ModuleBody, NameOptId, NormalModuleBody, Stmt, - StmtConnect, StmtDeclaration, StmtFormal, StmtIf, StmtInstance, StmtMatch, StmtReg, - StmtWire, }, - reset::{AsyncReset, Reset, SyncReset}, + reset::{AsyncReset, Reset, ResetType, SyncReset}, source_location::SourceLocation, - ty::{CanonicalType, Type}, + ty::{CanonicalType, OpaqueSimValueSize, Type}, util::{ - const_str_array_is_strictly_ascending, BitSliceWriteWithBase, DebugAsRawString, - GenericConstBool, + BitSliceWriteWithBase, DebugAsRawString, GenericConstBool, HashMap, HashSet, + const_str_array_is_strictly_ascending, }, + vendor::xilinx::XilinxAnnotation, }; use bitvec::slice::BitSlice; use clap::value_parser; -use hashbrown::{HashMap, HashSet}; use num_traits::Signed; -use serde::Serialize; +use serde::{Deserialize, Serialize}; use std::{ cell::{Cell, RefCell}, cmp::Ordering, collections::{BTreeMap, VecDeque}, error::Error, + ffi::OsString, fmt::{self, Write}, fs, hash::Hash, io, - ops::Range, + ops::{ControlFlow, Range}, path::{Path, PathBuf}, rc::Rc, }; +#[derive(Clone, Debug)] +#[non_exhaustive] +enum FirrtlError { + SimOnlyValuesAreNotPermitted, +} + +impl fmt::Display for FirrtlError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + FirrtlError::SimOnlyValuesAreNotPermitted => { + f.write_str("`SimOnlyValue`s are not permitted") + } + } + } +} + +impl std::error::Error for FirrtlError {} + +enum FirrtlOrWrappedError { + FirrtlError(FirrtlError), + WrappedError(WrappedError), +} + +impl From for FirrtlOrWrappedError { + fn from(value: FirrtlError) -> Self { + Self::FirrtlError(value) + } +} + +impl From for FirrtlOrWrappedError { + fn from(value: WrappedError) -> Self { + Self::WrappedError(value) + } +} + +type Result = std::result::Result; + struct EscapedString<'a> { value: &'a str, raw: bool, @@ -321,20 +360,20 @@ impl DefinitionsMap { map: Default::default(), } } - fn get_or_make<'a>( + fn get_or_make<'a, E>( &'a self, key: K, - make: impl FnOnce(&K, &'a RcDefinitions) -> (Ident, V), - ) -> (Ident, V) + make: impl FnOnce(&K, &'a RcDefinitions) -> Result<(Ident, V), E>, + ) -> Result<(Ident, V), E> where K: Hash + Eq, V: Clone, { if let Some(retval) = self.map.borrow().get(&key) { - return retval.clone(); + return Ok(retval.clone()); } - let value = make(&key, &self.definitions); - self.map.borrow_mut().entry(key).or_insert(value).clone() + let value = make(&key, &self.definitions)?; + Ok(self.map.borrow_mut().entry(key).or_insert(value).clone()) } } @@ -368,10 +407,10 @@ impl TypeState { self.next_type_name.set(id + 1); Ident(Intern::intern_owned(format!("Ty{id}"))) } - fn get_bundle_field(&mut self, ty: Bundle, name: Interned) -> Ident { - self.bundle_ns(ty).borrow_mut().get(name) + fn get_bundle_field(&mut self, ty: Bundle, name: Interned) -> Result { + Ok(self.bundle_ns(ty)?.borrow_mut().get(name)) } - fn bundle_def(&self, ty: Bundle) -> (Ident, Rc>) { + fn bundle_def(&self, ty: Bundle) -> Result<(Ident, Rc>), FirrtlError> { self.bundle_defs.get_or_make(ty, |&ty, definitions| { let mut ns = Namespace::default(); let mut body = String::new(); @@ -384,21 +423,21 @@ impl TypeState { body.push_str("flip "); } write!(body, "{}: ", ns.get(name)).unwrap(); - body.push_str(&self.ty(ty)); + body.push_str(&self.ty(ty)?); } body.push('}'); let name = self.make_type_name(); definitions.add_definition_line(format_args!("type {name} = {body}")); - (name, Rc::new(RefCell::new(ns))) + Ok((name, Rc::new(RefCell::new(ns)))) }) } - fn bundle_ty(&self, ty: Bundle) -> Ident { - self.bundle_def(ty).0 + fn bundle_ty(&self, ty: Bundle) -> Result { + Ok(self.bundle_def(ty)?.0) } - fn bundle_ns(&self, ty: Bundle) -> Rc> { - self.bundle_def(ty).1 + fn bundle_ns(&self, ty: Bundle) -> Result>, FirrtlError> { + Ok(self.bundle_def(ty)?.1) } - fn enum_def(&self, ty: Enum) -> (Ident, Rc) { + fn enum_def(&self, ty: Enum) -> Result<(Ident, Rc), FirrtlError> { self.enum_defs.get_or_make(ty, |&ty, definitions| { let mut variants = Namespace::default(); let mut body = String::new(); @@ -410,33 +449,33 @@ impl TypeState { write!(body, "{}", variants.get(name)).unwrap(); if let Some(ty) = ty { body.push_str(": "); - body.push_str(&self.ty(ty)); + body.push_str(&self.ty(ty)?); } } body.push_str("|}"); let name = self.make_type_name(); definitions.add_definition_line(format_args!("type {name} = {body}")); - ( + Ok(( name, Rc::new(EnumDef { variants: RefCell::new(variants), body, }), - ) + )) }) } - fn enum_ty(&self, ty: Enum) -> Ident { - self.enum_def(ty).0 + fn enum_ty(&self, ty: Enum) -> Result { + Ok(self.enum_def(ty)?.0) } - fn get_enum_variant(&mut self, ty: Enum, name: Interned) -> Ident { - self.enum_def(ty).1.variants.borrow_mut().get(name) + fn get_enum_variant(&mut self, ty: Enum, name: Interned) -> Result { + Ok(self.enum_def(ty)?.1.variants.borrow_mut().get(name)) } - fn ty(&self, ty: T) -> String { - match ty.canonical() { - CanonicalType::Bundle(ty) => self.bundle_ty(ty).to_string(), - CanonicalType::Enum(ty) => self.enum_ty(ty).to_string(), + fn ty(&self, ty: T) -> Result { + Ok(match ty.canonical() { + CanonicalType::Bundle(ty) => self.bundle_ty(ty)?.to_string(), + CanonicalType::Enum(ty) => self.enum_ty(ty)?.to_string(), CanonicalType::Array(ty) => { - let mut retval = self.ty(ty.element()); + let mut retval = self.ty(ty.element())?; write!(retval, "[{}]", ty.len()).unwrap(); retval } @@ -447,7 +486,11 @@ impl TypeState { CanonicalType::AsyncReset(AsyncReset {}) => "AsyncReset".into(), CanonicalType::SyncReset(SyncReset {}) => "UInt<1>".into(), CanonicalType::Reset(Reset {}) => "Reset".into(), - } + CanonicalType::PhantomConst(_) => "{}".into(), + CanonicalType::DynSimOnly(_) => { + return Err(FirrtlError::SimOnlyValuesAreNotPermitted); + } + }) } } @@ -483,6 +526,7 @@ trait WrappedFileBackendTrait { contents: String, ) -> Result<(), WrappedError>; fn simplify_enums_error(&mut self, error: SimplifyEnumsError) -> WrappedError; + fn firrtl_error(&mut self, error: FirrtlError) -> WrappedError; } struct WrappedFileBackend { @@ -545,6 +589,11 @@ impl WrappedFileBackendTrait for WrappedFileBackend { self.error = Err(error.into()); WrappedError } + + fn firrtl_error(&mut self, error: FirrtlError) -> WrappedError { + self.error = Err(self.file_backend.custom_error(Box::new(error))); + WrappedError + } } #[derive(Clone)] @@ -747,7 +796,10 @@ impl<'a> Exporter<'a> { } fn run(&mut self, top_module: Interned>) -> Result<(), WrappedError> { let mut contents = self.version(); - let circuit = self.circuit(top_module)?; + let circuit = self.circuit(top_module).map_err(|e| match e { + FirrtlOrWrappedError::FirrtlError(e) => self.file_backend.firrtl_error(e), + FirrtlOrWrappedError::WrappedError(e) => e, + })?; contents.push_str(&circuit); self.file_backend .write_top_fir_file(self.circuit_name.to_string(), contents) @@ -755,7 +807,7 @@ impl<'a> Exporter<'a> { fn version(&mut self) -> String { "FIRRTL version 3.2.0\n".to_string() } - fn circuit(&mut self, top_module: Interned>) -> Result { + fn circuit(&mut self, top_module: Interned>) -> Result { let indent = self.indent; self.add_module(top_module); let circuit_indent = indent.push(); @@ -785,9 +837,9 @@ impl<'a> Exporter<'a> { enum_ty: Enum, variant_name: Interned, variant_expr: Option, - ) -> String { - let (_, enum_def) = self.type_state.enum_def(enum_ty); - let variant_ident = self.type_state.get_enum_variant(enum_ty, variant_name); + ) -> Result { + let (_, enum_def) = self.type_state.enum_def(enum_ty)?; + let variant_ident = self.type_state.get_enum_variant(enum_ty, variant_name)?; let mut retval = enum_def.body.clone(); write!(retval, "({variant_ident}").unwrap(); if let Some(variant_expr) = variant_expr { @@ -795,7 +847,7 @@ impl<'a> Exporter<'a> { retval.push_str(&variant_expr); } retval.push(')'); - retval + Ok(retval) } fn uint_literal(&mut self, value: &UIntValue) -> String { format!( @@ -824,32 +876,32 @@ impl<'a> Exporter<'a> { to_ty: ToTy, definitions: &RcDefinitions, const_ty: bool, - ) -> String { + ) -> Result { let from_ty = Expr::ty(value); - let mut value = self.expr(Expr::canonical(value), definitions, const_ty); + let mut value = self.expr(Expr::canonical(value), definitions, const_ty)?; if from_ty.width().checked_add(1) == Some(to_ty.width()) && !FromTy::Signed::VALUE && ToTy::Signed::VALUE { - format!("cvt({value})") + Ok(format!("cvt({value})")) } else if from_ty.width() <= to_ty.width() { // must pad before changing type to preserve value modulo 2^to_ty.width if from_ty.width() < to_ty.width() { value = format!("pad({value}, {})", to_ty.width()); } if FromTy::Signed::VALUE == ToTy::Signed::VALUE { - value + Ok(value) } else if ToTy::Signed::VALUE { - format!("asSInt({value})") + Ok(format!("asSInt({value})")) } else { - format!("asUInt({value})") + Ok(format!("asUInt({value})")) } } else { value = format!("tail({value}, {})", from_ty.width() - to_ty.width()); if ToTy::Signed::VALUE { - format!("asSInt({value})") + Ok(format!("asSInt({value})")) } else { - value + Ok(value) } } } @@ -859,12 +911,12 @@ impl<'a> Exporter<'a> { value: Expr, definitions: &RcDefinitions, const_ty: bool, - ) -> String { - let value = self.expr(Expr::canonical(value), definitions, const_ty); + ) -> Result { + let value = self.expr(Expr::canonical(value), definitions, const_ty)?; if let Some(firrtl_cast_fn) = firrtl_cast_fn { - format!("{firrtl_cast_fn}({value})") + Ok(format!("{firrtl_cast_fn}({value})")) } else { - value + Ok(value) } } fn slice( @@ -873,17 +925,17 @@ impl<'a> Exporter<'a> { range: Range, definitions: &RcDefinitions, const_ty: bool, - ) -> String { + ) -> Result { let base_width = Expr::ty(base).width(); - let base = self.expr(Expr::canonical(base), definitions, const_ty); + let base = self.expr(Expr::canonical(base), definitions, const_ty)?; if range.is_empty() { - format!("tail({base}, {base_width})") + Ok(format!("tail({base}, {base_width})")) } else { - format!( + Ok(format!( "bits({base}, {hi}, {lo})", hi = range.end - 1, lo = range.start, - ) + )) } } fn array_literal_expr( @@ -891,29 +943,29 @@ impl<'a> Exporter<'a> { expr: ops::ArrayLiteral, definitions: &RcDefinitions, const_ty: bool, - ) -> String { + ) -> Result { let ident = self.module.ns.make_new("_array_literal_expr"); - let ty_str = self.type_state.ty(expr.ty()); + let ty_str = self.type_state.ty(expr.ty())?; let const_ = if const_ty { "const " } else { "" }; definitions.add_definition_line(format_args!("wire {ident}: {const_}{ty_str}")); for (index, element) in expr.element_values().into_iter().enumerate() { - let element = self.expr(Expr::canonical(element), definitions, const_ty); + let element = self.expr(Expr::canonical(element), definitions, const_ty)?; definitions.add_definition_line(format_args!("connect {ident}[{index}], {element}")); } if expr.element_values().is_empty() { definitions.add_definition_line(format_args!("invalidate {ident}")); } - ident.to_string() + Ok(ident.to_string()) } fn bundle_literal_expr( &mut self, expr: ops::BundleLiteral, definitions: &RcDefinitions, const_ty: bool, - ) -> String { + ) -> Result { let ident = self.module.ns.make_new("_bundle_literal_expr"); let ty = expr.ty(); - let (ty_ident, bundle_ns) = self.type_state.bundle_def(ty); + let (ty_ident, bundle_ns) = self.type_state.bundle_def(ty)?; let const_ = if const_ty { "const " } else { "" }; definitions.add_definition_line(format_args!("wire {ident}: {const_}{ty_ident}")); for ( @@ -925,39 +977,43 @@ impl<'a> Exporter<'a> { }, ) in expr.field_values().into_iter().zip(ty.fields()) { - debug_assert!(!flipped, "can't have bundle literal with flipped field -- this should have been caught in BundleLiteral::new_unchecked"); + debug_assert!( + !flipped, + "can't have bundle literal with flipped field -- this should have been caught in BundleLiteral::new_unchecked" + ); let name = bundle_ns.borrow_mut().get(name); - let field_value = self.expr(Expr::canonical(field_value), definitions, const_ty); + let field_value = self.expr(Expr::canonical(field_value), definitions, const_ty)?; definitions.add_definition_line(format_args!("connect {ident}.{name}, {field_value}")); } if ty.fields().is_empty() { definitions.add_definition_line(format_args!("invalidate {ident}")); } - ident.to_string() + Ok(ident.to_string()) } fn uninit_expr( &mut self, expr: ops::Uninit, definitions: &RcDefinitions, const_ty: bool, - ) -> String { + ) -> Result { let ident = self.module.ns.make_new("_uninit_expr"); let ty = expr.ty(); - let ty_ident = self.type_state.ty(ty); + let ty_ident = self.type_state.ty(ty)?; let const_ = if const_ty { "const " } else { "" }; definitions.add_definition_line(format_args!("wire {ident}: {const_}{ty_ident}")); definitions.add_definition_line(format_args!("invalidate {ident}")); - ident.to_string() + Ok(ident.to_string()) } fn enum_literal_expr( &mut self, expr: ops::EnumLiteral, definitions: &RcDefinitions, const_ty: bool, - ) -> String { + ) -> Result { let variant_expr = expr .variant_value() - .map(|variant_value| self.expr(variant_value, definitions, const_ty)); + .map(|variant_value| self.expr(variant_value, definitions, const_ty)) + .transpose()?; self.enum_expr_impl(expr.ty(), expr.variant_name(), variant_expr) } fn expr_cast_bundle_to_bits( @@ -966,12 +1022,12 @@ impl<'a> Exporter<'a> { ty: Bundle, definitions: &RcDefinitions, extra_indent: Indent<'_>, - ) -> String { + ) -> Result { if ty.fields().is_empty() { - return "UInt<0>(0)".into(); + return Ok("UInt<0>(0)".into()); } if let [field] = *ty.fields() { - let field_ident = self.type_state.get_bundle_field(ty, field.name); + let field_ident = self.type_state.get_bundle_field(ty, field.name)?; return self.expr_cast_to_bits( format!("{value_str}.{field_ident}"), field.ty, @@ -990,23 +1046,23 @@ impl<'a> Exporter<'a> { ty: UInt[field_ty.bit_width()].canonical(), }, ))); - let (flattened_ty_ident, _) = self.type_state.bundle_def(flattened_bundle_ty); + let (flattened_ty_ident, _) = self.type_state.bundle_def(flattened_bundle_ty)?; let ident = self.module.ns.make_new("_cast_bundle_to_bits_expr"); definitions.add_definition_line(format_args!( "{extra_indent}wire {ident}: {flattened_ty_ident}" )); let mut cat_expr = None; for field in ty.fields() { - let field_ident = self.type_state.get_bundle_field(ty, field.name); + let field_ident = self.type_state.get_bundle_field(ty, field.name)?; let flattened_field_ident = self .type_state - .get_bundle_field(flattened_bundle_ty, field.name); + .get_bundle_field(flattened_bundle_ty, field.name)?; let field_bits = self.expr_cast_to_bits( format!("{value_str}.{field_ident}"), field.ty, definitions, extra_indent, - ); + )?; definitions.add_definition_line(format_args!( "{extra_indent}connect {ident}.{flattened_field_ident}, {field_bits}" )); @@ -1023,7 +1079,7 @@ impl<'a> Exporter<'a> { )); let cat_expr = cat_expr.expect("bundle already checked to have fields"); definitions.add_definition_line(format_args!("{extra_indent}connect {retval}, {cat_expr}")); - retval.to_string() + Ok(retval.to_string()) } fn expr_cast_enum_to_bits( &mut self, @@ -1031,9 +1087,9 @@ impl<'a> Exporter<'a> { ty: Enum, definitions: &RcDefinitions, extra_indent: Indent<'_>, - ) -> String { + ) -> Result { if ty.variants().is_empty() { - return "UInt<0>(0)".into(); + return Ok("UInt<0>(0)".into()); } let retval = self.module.ns.make_new("_cast_enum_to_bits_expr"); definitions.add_definition_line(format_args!( @@ -1050,7 +1106,7 @@ impl<'a> Exporter<'a> { .make_new(&format!("_cast_enum_to_bits_expr_{}", variant.name)); definitions.add_definition_line(format_args!( "{extra_indent}{}({variant_value}):", - self.type_state.get_enum_variant(ty, variant.name), + self.type_state.get_enum_variant(ty, variant.name)?, )); let _match_arm_indent = extra_indent.push(); let variant_bits = self.expr_cast_to_bits( @@ -1058,7 +1114,7 @@ impl<'a> Exporter<'a> { variant_ty, definitions, extra_indent, - ); + )?; definitions.add_definition_line(format_args!( "{extra_indent}connect {retval}, pad(cat({variant_bits}, UInt<{}>({variant_index})), {})", ty.discriminant_bit_width(), @@ -1067,7 +1123,7 @@ impl<'a> Exporter<'a> { } else { definitions.add_definition_line(format_args!( "{extra_indent}{}:", - self.type_state.get_enum_variant(ty, variant.name), + self.type_state.get_enum_variant(ty, variant.name)?, )); let _match_arm_indent = extra_indent.push(); definitions.add_definition_line(format_args!( @@ -1076,7 +1132,7 @@ impl<'a> Exporter<'a> { )); } } - retval.to_string() + Ok(retval.to_string()) } fn expr_cast_array_to_bits( &mut self, @@ -1084,9 +1140,9 @@ impl<'a> Exporter<'a> { ty: Array, definitions: &RcDefinitions, extra_indent: Indent<'_>, - ) -> String { + ) -> Result { if ty.is_empty() { - return "UInt<0>(0)".into(); + return Ok("UInt<0>(0)".into()); } if ty.len() == 1 { return self.expr_cast_to_bits( @@ -1109,7 +1165,7 @@ impl<'a> Exporter<'a> { ty.element(), definitions, extra_indent, - ); + )?; definitions.add_definition_line(format_args!( "{extra_indent}connect {ident}[{index}], {element_bits}" )); @@ -1126,7 +1182,7 @@ impl<'a> Exporter<'a> { )); let cat_expr = cat_expr.expect("array already checked to have elements"); definitions.add_definition_line(format_args!("{extra_indent}connect {retval}, {cat_expr}")); - retval.to_string() + Ok(retval.to_string()) } fn expr_cast_to_bits( &mut self, @@ -1134,7 +1190,7 @@ impl<'a> Exporter<'a> { ty: CanonicalType, definitions: &RcDefinitions, extra_indent: Indent<'_>, - ) -> String { + ) -> Result { match ty { CanonicalType::Bundle(ty) => { self.expr_cast_bundle_to_bits(value_str, ty, definitions, extra_indent) @@ -1146,12 +1202,14 @@ impl<'a> Exporter<'a> { self.expr_cast_array_to_bits(value_str, ty, definitions, extra_indent) } CanonicalType::UInt(_) | CanonicalType::SyncReset(_) | CanonicalType::Bool(_) => { - value_str + Ok(value_str) } CanonicalType::SInt(_) | CanonicalType::Clock(_) | CanonicalType::AsyncReset(_) - | CanonicalType::Reset(_) => format!("asUInt({value_str})"), + | CanonicalType::Reset(_) => Ok(format!("asUInt({value_str})")), + CanonicalType::PhantomConst(_) => Ok("UInt<0>(0)".into()), + CanonicalType::DynSimOnly(_) => Err(FirrtlError::SimOnlyValuesAreNotPermitted.into()), } } fn expr_cast_bits_to_bundle( @@ -1160,13 +1218,13 @@ impl<'a> Exporter<'a> { ty: Bundle, definitions: &RcDefinitions, extra_indent: Indent<'_>, - ) -> String { - let (ty_ident, _) = self.type_state.bundle_def(ty); + ) -> Result { + let (ty_ident, _) = self.type_state.bundle_def(ty)?; let retval = self.module.ns.make_new("_cast_bits_to_bundle_expr"); definitions.add_definition_line(format_args!("{extra_indent}wire {retval}: {ty_ident}")); if ty.fields().is_empty() { definitions.add_definition_line(format_args!("{extra_indent}invalidate {retval}")); - return retval.to_string(); + return Ok(retval.to_string()); } let flattened_bundle_ty = Bundle::new(Interned::from_iter(ty.fields().iter().map( |&BundleField { @@ -1179,7 +1237,7 @@ impl<'a> Exporter<'a> { ty: UInt[field_ty.bit_width()].canonical(), }, ))); - let (flattened_ty_ident, _) = self.type_state.bundle_def(flattened_bundle_ty); + let (flattened_ty_ident, _) = self.type_state.bundle_def(flattened_bundle_ty)?; let flattened_ident = self .module .ns @@ -1187,11 +1245,18 @@ impl<'a> Exporter<'a> { definitions.add_definition_line(format_args!( "{extra_indent}wire {flattened_ident}: {flattened_ty_ident}" )); - for (field, field_offset) in ty.fields().into_iter().zip(ty.field_offsets()) { + for ( + field, + OpaqueSimValueSize { + bit_width: field_offset, + sim_only_values_len: _, + }, + ) in ty.fields().into_iter().zip(ty.field_offsets()) + { let flattened_field_ident = self .type_state - .get_bundle_field(flattened_bundle_ty, field.name); - let field_ident = self.type_state.get_bundle_field(ty, field.name); + .get_bundle_field(flattened_bundle_ty, field.name)?; + let field_ident = self.type_state.get_bundle_field(ty, field.name)?; if let Some(field_bit_width_minus_one) = field.ty.bit_width().checked_sub(1usize) { definitions.add_definition_line(format_args!( "{extra_indent}connect {flattened_ident}.{flattened_field_ident}, bits({value_str}, {}, {field_offset})", @@ -1207,12 +1272,12 @@ impl<'a> Exporter<'a> { field.ty, definitions, extra_indent, - ); + )?; definitions.add_definition_line(format_args!( "{extra_indent}connect {retval}.{field_ident}, {field_value}" )); } - retval.to_string() + Ok(retval.to_string()) } fn expr_cast_bits_to_enum( &mut self, @@ -1220,19 +1285,19 @@ impl<'a> Exporter<'a> { ty: Enum, definitions: &RcDefinitions, extra_indent: Indent<'_>, - ) -> String { - let (ty_ident, enum_def) = self.type_state.enum_def(ty); + ) -> Result { + let (ty_ident, enum_def) = self.type_state.enum_def(ty)?; let retval = self.module.ns.make_new("_cast_bits_to_enum_expr"); definitions.add_definition_line(format_args!("{extra_indent}wire {retval}: {ty_ident}")); if ty.variants().is_empty() { definitions.add_definition_line(format_args!("{extra_indent}invalidate {retval}")); - return retval.to_string(); + return Ok(retval.to_string()); } if let [variant] = *ty.variants() { - let enum_variant = self.type_state.get_enum_variant(ty, variant.name); + let enum_variant = self.type_state.get_enum_variant(ty, variant.name)?; if let Some(variant_ty) = variant.ty { let variant_value = - self.expr_cast_bits_to(value_str, variant_ty, definitions, extra_indent); + self.expr_cast_bits_to(value_str, variant_ty, definitions, extra_indent)?; definitions.add_definition_line(format_args!( "{extra_indent}connect {retval}, {}({enum_variant}, {variant_value})", enum_def.body @@ -1243,7 +1308,7 @@ impl<'a> Exporter<'a> { enum_def.body )); } - return retval.to_string(); + return Ok(retval.to_string()); } let discriminant_bit_width = ty.discriminant_bit_width(); let body_bit_width = ty.type_properties().bit_width - discriminant_bit_width; @@ -1260,7 +1325,9 @@ impl<'a> Exporter<'a> { "UInt<0>(0)".into() }; for (variant_index, variant) in ty.variants().into_iter().enumerate() { - let when_cond = format!("eq(UInt<{discriminant_bit_width}>({variant_index}), tail({value_str}, {body_bit_width}))"); + let when_cond = format!( + "eq(UInt<{discriminant_bit_width}>({variant_index}), tail({value_str}, {body_bit_width}))" + ); if variant_index == ty.variants().len() - 1 { definitions.add_definition_line(format_args!("{extra_indent}else:")); } else if variant_index == 0 { @@ -1270,14 +1337,14 @@ impl<'a> Exporter<'a> { .add_definition_line(format_args!("{extra_indent}else when {when_cond}:")); } let when_pushed_indent = extra_indent.push(); - let enum_variant = self.type_state.get_enum_variant(ty, variant.name); + let enum_variant = self.type_state.get_enum_variant(ty, variant.name)?; if let Some(variant_ty) = variant.ty { let variant_value = self.expr_cast_bits_to( body_value.clone(), variant_ty, definitions, extra_indent, - ); + )?; definitions.add_definition_line(format_args!( "{extra_indent}connect {retval}, {}({enum_variant}, {variant_value})", enum_def.body @@ -1290,7 +1357,7 @@ impl<'a> Exporter<'a> { } drop(when_pushed_indent); } - retval.to_string() + Ok(retval.to_string()) } fn expr_cast_bits_to_array( &mut self, @@ -1298,14 +1365,14 @@ impl<'a> Exporter<'a> { ty: Array, definitions: &RcDefinitions, extra_indent: Indent<'_>, - ) -> String { + ) -> Result { let retval = self.module.ns.make_new("_cast_bits_to_array_expr"); - let array_ty = self.type_state.ty(ty); + let array_ty = self.type_state.ty(ty)?; definitions.add_definition_line(format_args!("{extra_indent}wire {retval}: {array_ty}")); let element_bit_width = ty.element().bit_width(); if ty.is_empty() || element_bit_width == 0 { definitions.add_definition_line(format_args!("{extra_indent}invalidate {retval}")); - return retval.to_string(); + return Ok(retval.to_string()); } let flattened_ident = self .module @@ -1326,12 +1393,12 @@ impl<'a> Exporter<'a> { ty.element(), definitions, extra_indent, - ); + )?; definitions.add_definition_line(format_args!( "{extra_indent}connect {retval}[{index}], {element_value}" )); } - retval.to_string() + Ok(retval.to_string()) } fn expr_cast_bits_to( &mut self, @@ -1339,7 +1406,7 @@ impl<'a> Exporter<'a> { ty: CanonicalType, definitions: &RcDefinitions, extra_indent: Indent<'_>, - ) -> String { + ) -> Result { match ty { CanonicalType::Bundle(ty) => { self.expr_cast_bits_to_bundle(value_str, ty, definitions, extra_indent) @@ -1350,13 +1417,20 @@ impl<'a> Exporter<'a> { CanonicalType::Array(ty) => { self.expr_cast_bits_to_array(value_str, ty, definitions, extra_indent) } - CanonicalType::UInt(_) => value_str, - CanonicalType::SInt(_) => format!("asSInt({value_str})"), - CanonicalType::Bool(_) => value_str, - CanonicalType::Clock(_) => format!("asClock({value_str})"), - CanonicalType::AsyncReset(_) => format!("asAsyncReset({value_str})"), - CanonicalType::SyncReset(_) => value_str, + CanonicalType::UInt(_) => Ok(value_str), + CanonicalType::SInt(_) => Ok(format!("asSInt({value_str})")), + CanonicalType::Bool(_) => Ok(value_str), + CanonicalType::Clock(_) => Ok(format!("asClock({value_str})")), + CanonicalType::AsyncReset(_) => Ok(format!("asAsyncReset({value_str})")), + CanonicalType::SyncReset(_) => Ok(value_str), CanonicalType::Reset(_) => unreachable!("Reset is not bit castable to"), + CanonicalType::PhantomConst(_) => { + let retval = self.module.ns.make_new("_cast_bits_to_phantom_const_expr"); + definitions.add_definition_line(format_args!("{extra_indent}wire {retval}: {{}}")); + definitions.add_definition_line(format_args!("{extra_indent}invalidate {retval}")); + return Ok(retval.to_string()); + } + CanonicalType::DynSimOnly(_) => Err(FirrtlError::SimOnlyValuesAreNotPermitted.into()), } } fn expr_unary( @@ -1365,11 +1439,11 @@ impl<'a> Exporter<'a> { arg: Expr, definitions: &RcDefinitions, const_ty: bool, - ) -> String { - format!( + ) -> Result { + Ok(format!( "{func}({arg})", - arg = self.expr(Expr::canonical(arg), definitions, const_ty) - ) + arg = self.expr(Expr::canonical(arg), definitions, const_ty)?, + )) } fn expr_binary( &mut self, @@ -1378,23 +1452,28 @@ impl<'a> Exporter<'a> { rhs: Expr, definitions: &RcDefinitions, const_ty: bool, - ) -> String { - format!( + ) -> Result { + Ok(format!( "{func}({lhs}, {rhs})", - lhs = self.expr(Expr::canonical(lhs), definitions, const_ty), - rhs = self.expr(Expr::canonical(rhs), definitions, const_ty) - ) + lhs = self.expr(Expr::canonical(lhs), definitions, const_ty)?, + rhs = self.expr(Expr::canonical(rhs), definitions, const_ty)?, + )) } fn expr( &mut self, expr: Expr, definitions: &RcDefinitions, const_ty: bool, - ) -> String { + ) -> Result { match *Expr::expr_enum(expr) { - ExprEnum::UIntLiteral(literal) => self.uint_literal(&literal), - ExprEnum::SIntLiteral(literal) => self.sint_literal(&literal), - ExprEnum::BoolLiteral(literal) => self.bool_literal(literal), + ExprEnum::UIntLiteral(literal) => Ok(self.uint_literal(&literal)), + ExprEnum::SIntLiteral(literal) => Ok(self.sint_literal(&literal)), + ExprEnum::BoolLiteral(literal) => Ok(self.bool_literal(literal)), + ExprEnum::PhantomConst(ty) => self.expr( + UInt[0].zero().cast_bits_to(ty.canonical()), + definitions, + const_ty, + ), ExprEnum::ArrayLiteral(array_literal) => { self.array_literal_expr(array_literal, definitions, const_ty) } @@ -1478,34 +1557,26 @@ impl<'a> Exporter<'a> { ExprEnum::DynShrS(expr) => { self.expr_binary("dshr", expr.lhs(), expr.rhs(), definitions, const_ty) } - ExprEnum::FixedShlU(expr) => { - format!( - "shl({lhs}, {rhs})", - lhs = self.expr(Expr::canonical(expr.lhs()), definitions, const_ty), - rhs = expr.rhs(), - ) - } - ExprEnum::FixedShlS(expr) => { - format!( - "shl({lhs}, {rhs})", - lhs = self.expr(Expr::canonical(expr.lhs()), definitions, const_ty), - rhs = expr.rhs(), - ) - } - ExprEnum::FixedShrU(expr) => { - format!( - "shr({lhs}, {rhs})", - lhs = self.expr(Expr::canonical(expr.lhs()), definitions, const_ty), - rhs = expr.rhs(), - ) - } - ExprEnum::FixedShrS(expr) => { - format!( - "shr({lhs}, {rhs})", - lhs = self.expr(Expr::canonical(expr.lhs()), definitions, const_ty), - rhs = expr.rhs(), - ) - } + ExprEnum::FixedShlU(expr) => Ok(format!( + "shl({lhs}, {rhs})", + lhs = self.expr(Expr::canonical(expr.lhs()), definitions, const_ty)?, + rhs = expr.rhs(), + )), + ExprEnum::FixedShlS(expr) => Ok(format!( + "shl({lhs}, {rhs})", + lhs = self.expr(Expr::canonical(expr.lhs()), definitions, const_ty)?, + rhs = expr.rhs(), + )), + ExprEnum::FixedShrU(expr) => Ok(format!( + "shr({lhs}, {rhs})", + lhs = self.expr(Expr::canonical(expr.lhs()), definitions, const_ty)?, + rhs = expr.rhs(), + )), + ExprEnum::FixedShrS(expr) => Ok(format!( + "shr({lhs}, {rhs})", + lhs = self.expr(Expr::canonical(expr.lhs()), definitions, const_ty)?, + rhs = expr.rhs(), + )), ExprEnum::CmpLtU(expr) => { self.expr_binary("lt", expr.lhs(), expr.rhs(), definitions, const_ty) } @@ -1579,7 +1650,7 @@ impl<'a> Exporter<'a> { self.slice(expr.base(), expr.range(), definitions, const_ty) } ExprEnum::CastToBits(expr) => { - let value_str = self.expr(expr.arg(), definitions, const_ty); + let value_str = self.expr(expr.arg(), definitions, const_ty)?; self.expr_cast_to_bits( value_str, Expr::ty(expr.arg()), @@ -1591,7 +1662,7 @@ impl<'a> Exporter<'a> { ) } ExprEnum::CastBitsTo(expr) => { - let value_str = self.expr(Expr::canonical(expr.arg()), definitions, const_ty); + let value_str = self.expr(Expr::canonical(expr.arg()), definitions, const_ty)?; self.expr_cast_bits_to( value_str, expr.ty(), @@ -1702,48 +1773,57 @@ impl<'a> Exporter<'a> { self.expr_unary("xorr", expr.arg(), definitions, const_ty) } ExprEnum::FieldAccess(expr) => { - let mut out = self.expr(Expr::canonical(expr.base()), definitions, const_ty); + let mut out = self.expr(Expr::canonical(expr.base()), definitions, const_ty)?; let name = self .type_state - .get_bundle_field(Expr::ty(expr.base()), expr.field_name()); + .get_bundle_field(Expr::ty(expr.base()), expr.field_name())?; write!(out, ".{name}").unwrap(); - out + Ok(out) } - ExprEnum::VariantAccess(variant_access) => self + ExprEnum::VariantAccess(variant_access) => Ok(self .module .match_arm_values .get(&variant_access) .expect("VariantAccess must be in its corresponding match arm") - .to_string(), + .to_string()), ExprEnum::ArrayIndex(expr) => { - let mut out = self.expr(Expr::canonical(expr.base()), definitions, const_ty); + let mut out = self.expr(Expr::canonical(expr.base()), definitions, const_ty)?; write!(out, "[{}]", expr.element_index()).unwrap(); - out + Ok(out) } ExprEnum::DynArrayIndex(expr) => { - let mut out = self.expr(Expr::canonical(expr.base()), definitions, const_ty); - let index = self.expr(Expr::canonical(expr.element_index()), definitions, const_ty); + let mut out = self.expr(Expr::canonical(expr.base()), definitions, const_ty)?; + let index = + self.expr(Expr::canonical(expr.element_index()), definitions, const_ty)?; write!(out, "[{index}]").unwrap(); - out + Ok(out) } - ExprEnum::ModuleIO(expr) => self.module.ns.get(expr.name_id()).to_string(), + ExprEnum::ModuleIO(expr) => Ok(self.module.ns.get(expr.name_id()).to_string()), ExprEnum::Instance(expr) => { assert!(!const_ty, "not a constant"); - self.module.ns.get(expr.scoped_name().1).to_string() + Ok(self.module.ns.get(expr.scoped_name().1).to_string()) } ExprEnum::Wire(expr) => { assert!(!const_ty, "not a constant"); - self.module.ns.get(expr.scoped_name().1).to_string() + Ok(self.module.ns.get(expr.scoped_name().1).to_string()) } ExprEnum::Reg(expr) => { assert!(!const_ty, "not a constant"); - self.module.ns.get(expr.scoped_name().1).to_string() + Ok(self.module.ns.get(expr.scoped_name().1).to_string()) + } + ExprEnum::RegSync(expr) => { + assert!(!const_ty, "not a constant"); + Ok(self.module.ns.get(expr.scoped_name().1).to_string()) + } + ExprEnum::RegAsync(expr) => { + assert!(!const_ty, "not a constant"); + Ok(self.module.ns.get(expr.scoped_name().1).to_string()) } ExprEnum::MemPort(expr) => { assert!(!const_ty, "not a constant"); let mem_name = self.module.ns.get(expr.mem_name().1); let port_name = Ident::from(expr.port_name()); - format!("{mem_name}.{port_name}") + Ok(format!("{mem_name}.{port_name}")) } } } @@ -1753,7 +1833,7 @@ impl<'a> Exporter<'a> { memory_name: Ident, array_type: Array, initial_value: Interned, - ) -> Result<(), WrappedError> { + ) -> Result<()> { assert_eq!( initial_value.len(), array_type.type_properties().bit_width, @@ -1803,7 +1883,11 @@ impl<'a> Exporter<'a> { } fn annotation(&mut self, path: AnnotationTargetPath, annotation: &Annotation) { let data = match annotation { - Annotation::DontTouch(DontTouchAnnotation {}) => AnnotationData::DontTouch, + Annotation::DontTouch(DontTouchAnnotation {}) => { + // TODO: error if the annotated thing was renamed because of a naming conflict, + // unless Target::base() is one of the ports of the top-level module since that's handled by ScalarizedModuleABI + AnnotationData::DontTouch + } Annotation::SVAttribute(SVAttributeAnnotation { text }) => { AnnotationData::AttributeAnnotation { description: *text } } @@ -1826,6 +1910,9 @@ impl<'a> Exporter<'a> { class: str::to_string(class), additional_fields: (*additional_fields).into(), }, + Annotation::Xilinx(XilinxAnnotation::XdcLocation(_)) + | Annotation::Xilinx(XilinxAnnotation::XdcIOStandard(_)) + | Annotation::Xilinx(XilinxAnnotation::XdcCreateClock(_)) => return, }; self.annotations.push(FirrtlAnnotation { data, @@ -1835,7 +1922,7 @@ impl<'a> Exporter<'a> { }, }) } - fn annotation_target_ref(&mut self, target: Interned) -> AnnotationTargetRef { + fn annotation_target_ref(&mut self, target: Interned) -> Result { match *target { Target::Base(base) => { let mut segments = vec![]; @@ -1848,20 +1935,22 @@ impl<'a> Exporter<'a> { self.module.ns.get(v.mem_name().1) } TargetBase::Reg(v) => self.module.ns.get(v.name_id()), + TargetBase::RegSync(v) => self.module.ns.get(v.name_id()), + TargetBase::RegAsync(v) => self.module.ns.get(v.name_id()), TargetBase::Wire(v) => self.module.ns.get(v.name_id()), TargetBase::Instance(v) => self.module.ns.get(v.name_id()), }; - AnnotationTargetRef { base, segments } + Ok(AnnotationTargetRef { base, segments }) } Target::Child(child) => { - let mut retval = self.annotation_target_ref(child.parent()); + let mut retval = self.annotation_target_ref(child.parent())?; match *child.path_element() { TargetPathElement::BundleField(TargetPathBundleField { name }) => { retval.segments.push(AnnotationTargetRefSegment::Field { name: self.type_state.get_bundle_field( Bundle::from_canonical(child.parent().canonical_ty()), name, - ), + )?, }) } TargetPathElement::ArrayElement(TargetPathArrayElement { index, .. }) => retval @@ -1869,7 +1958,7 @@ impl<'a> Exporter<'a> { .push(AnnotationTargetRefSegment::Index { index }), TargetPathElement::DynArrayElement(_) => unreachable!(), } - retval + Ok(retval) } } } @@ -1878,9 +1967,9 @@ impl<'a> Exporter<'a> { base_module: Ident, submodules: Vec, annotations: &[crate::annotations::TargetedAnnotation], - ) { + ) -> Result<()> { for annotation in annotations { - let target_ref = Some(self.annotation_target_ref(annotation.target())); + let target_ref = Some(self.annotation_target_ref(annotation.target())?); self.annotation( AnnotationTargetPath { base_module, @@ -1890,8 +1979,9 @@ impl<'a> Exporter<'a> { annotation.annotation(), ); } + Ok(()) } - fn write_mem(&mut self, module_name: Ident, memory: Mem) -> Result { + fn write_mem(&mut self, module_name: Ident, memory: Mem) -> Result { let indent = self.indent; let name_id = memory.scoped_name().1; let source_location = memory.source_location(); @@ -1915,11 +2005,11 @@ impl<'a> Exporter<'a> { annotation, ); } - self.targeted_annotations(module_name, vec![], &memory.port_annotations()); + self.targeted_annotations(module_name, vec![], &memory.port_annotations())?; if let Some(initial_value) = initial_value { self.write_mem_init(module_name, name, array_type, initial_value)?; } - let data_type = self.type_state.ty(array_type.element()); + let data_type = self.type_state.ty(array_type.element())?; let mut body = String::new(); writeln!( body, @@ -1956,13 +2046,45 @@ impl<'a> Exporter<'a> { drop(memory_indent); Ok(body) } + fn stmt_reg( + &mut self, + stmt_reg: StmtReg, + module_name: Ident, + definitions: &RcDefinitions, + body: &mut String, + ) -> Result<()> { + let StmtReg { annotations, reg } = stmt_reg; + let indent = self.indent; + self.targeted_annotations(module_name, vec![], &annotations)?; + let name = self.module.ns.get(reg.name_id()); + let ty = self.type_state.ty(reg.ty())?; + let clk = self.expr(Expr::canonical(reg.clock_domain().clk), definitions, false)?; + if let Some(init) = reg.init() { + let rst = self.expr(Expr::canonical(reg.clock_domain().rst), definitions, false)?; + let init = self.expr(init, definitions, false)?; + writeln!( + body, + "{indent}regreset {name}: {ty}, {clk}, {rst}, {init}{}", + FileInfo::new(reg.source_location()), + ) + .unwrap(); + } else { + writeln!( + body, + "{indent}reg {name}: {ty}, {clk}{}", + FileInfo::new(reg.source_location()), + ) + .unwrap(); + } + Ok(()) + } fn block( &mut self, module: Interned>, block: Block, _block_indent: &PushIndent<'_>, definitions: Option, - ) -> Result { + ) -> Result { let indent = self.indent; let definitions = definitions.unwrap_or_default(); let mut body = String::new(); @@ -1988,8 +2110,8 @@ impl<'a> Exporter<'a> { ) .unwrap(); } - let lhs = self.expr(lhs, &definitions, false); - let rhs = self.expr(rhs, &definitions, false); + let lhs = self.expr(lhs, &definitions, false)?; + let rhs = self.expr(rhs, &definitions, false)?; writeln!( body, "{indent}connect {lhs}, {rhs}{}", @@ -2005,9 +2127,9 @@ impl<'a> Exporter<'a> { text, source_location, }) => { - let clk = self.expr(Expr::canonical(clk), &definitions, false); - let pred = self.expr(Expr::canonical(pred), &definitions, false); - let en = self.expr(Expr::canonical(en), &definitions, false); + let clk = self.expr(Expr::canonical(clk), &definitions, false)?; + let pred = self.expr(Expr::canonical(pred), &definitions, false)?; + let en = self.expr(Expr::canonical(en), &definitions, false)?; let kind = match kind { FormalKind::Assert => "assert", FormalKind::Assume => "assume", @@ -2032,7 +2154,7 @@ impl<'a> Exporter<'a> { let mut when = "when"; let mut pushed_indent; loop { - let cond_str = self.expr(Expr::canonical(cond), &definitions, false); + let cond_str = self.expr(Expr::canonical(cond), &definitions, false)?; writeln!( body, "{indent}{when} {cond_str}:{}", @@ -2074,7 +2196,7 @@ impl<'a> Exporter<'a> { writeln!( body, "{indent}match {}:{}", - self.expr(Expr::canonical(expr), &definitions, false), + self.expr(Expr::canonical(expr), &definitions, false)?, FileInfo::new(source_location), ) .unwrap(); @@ -2086,7 +2208,7 @@ impl<'a> Exporter<'a> { write!( body, "{indent}{}", - self.type_state.get_enum_variant(enum_ty, variant.name), + self.type_state.get_enum_variant(enum_ty, variant.name)?, ) .unwrap(); let variant_access = if variant.ty.is_some() { @@ -2116,9 +2238,9 @@ impl<'a> Exporter<'a> { drop(match_arms_indent); } Stmt::Declaration(StmtDeclaration::Wire(StmtWire { annotations, wire })) => { - self.targeted_annotations(module_name, vec![], &annotations); + self.targeted_annotations(module_name, vec![], &annotations)?; let name = self.module.ns.get(wire.name_id()); - let ty = self.type_state.ty(wire.ty()); + let ty = self.type_state.ty(wire.ty())?; writeln!( body, "{indent}wire {name}: {ty}{}", @@ -2126,36 +2248,20 @@ impl<'a> Exporter<'a> { ) .unwrap(); } - Stmt::Declaration(StmtDeclaration::Reg(StmtReg { annotations, reg })) => { - self.targeted_annotations(module_name, vec![], &annotations); - let name = self.module.ns.get(reg.name_id()); - let ty = self.type_state.ty(reg.ty()); - let clk = - self.expr(Expr::canonical(reg.clock_domain().clk), &definitions, false); - if let Some(init) = reg.init() { - let rst = - self.expr(Expr::canonical(reg.clock_domain().rst), &definitions, false); - let init = self.expr(init, &definitions, false); - writeln!( - body, - "{indent}regreset {name}: {ty}, {clk}, {rst}, {init}{}", - FileInfo::new(reg.source_location()), - ) - .unwrap(); - } else { - writeln!( - body, - "{indent}reg {name}: {ty}, {clk}{}", - FileInfo::new(reg.source_location()), - ) - .unwrap(); - } + Stmt::Declaration(StmtDeclaration::Reg(stmt_reg)) => { + self.stmt_reg(stmt_reg, module_name, &definitions, &mut body)?; + } + Stmt::Declaration(StmtDeclaration::RegSync(stmt_reg)) => { + self.stmt_reg(stmt_reg, module_name, &definitions, &mut body)?; + } + Stmt::Declaration(StmtDeclaration::RegAsync(stmt_reg)) => { + self.stmt_reg(stmt_reg, module_name, &definitions, &mut body)?; } Stmt::Declaration(StmtDeclaration::Instance(StmtInstance { annotations, instance, })) => { - self.targeted_annotations(module_name, vec![], &annotations); + self.targeted_annotations(module_name, vec![], &annotations)?; let name = self.module.ns.get(instance.name_id()); let instantiated = instance.instantiated(); self.add_module(instantiated); @@ -2174,7 +2280,7 @@ impl<'a> Exporter<'a> { } Ok(out) } - fn module(&mut self, module: Interned>) -> Result { + fn module(&mut self, module: Interned>) -> Result { self.module = ModuleState::default(); let indent = self.indent; let module_name = self.global_ns.get(module.name_id()); @@ -2195,9 +2301,9 @@ impl<'a> Exporter<'a> { module_io, } in module.module_io().iter() { - self.targeted_annotations(module_name, vec![], annotations); + self.targeted_annotations(module_name, vec![], annotations)?; let name = self.module.ns.get(module_io.name_id()); - let ty = self.type_state.ty(module_io.ty()); + let ty = self.type_state.ty(module_io.ty())?; if module_io.is_input() { writeln!( body, @@ -2220,6 +2326,7 @@ impl<'a> Exporter<'a> { ModuleBody::Extern(ExternModuleBody { verilog_name, parameters, + simulation: _, }) => { let verilog_name = Ident(verilog_name); writeln!(body, "{indent}defname = {verilog_name}").unwrap(); @@ -2271,6 +2378,7 @@ pub trait FileBackendTrait { type Error: From; type Path: AsRef + fmt::Debug + ?Sized; type PathBuf: AsRef + fmt::Debug; + fn custom_error(&self, error: Box) -> Self::Error; fn path_to_string(&mut self, path: &Self::Path) -> Result; fn write_mem_init_file( &mut self, @@ -2290,6 +2398,10 @@ impl FileBackendTrait for Box { type Path = T::Path; type PathBuf = T::PathBuf; + fn custom_error(&self, error: Box) -> Self::Error { + (**self).custom_error(error) + } + fn path_to_string(&mut self, path: &Self::Path) -> Result { (**self).path_to_string(path) } @@ -2317,6 +2429,10 @@ impl FileBackendTrait for &'_ mut T { type Path = T::Path; type PathBuf = T::PathBuf; + fn custom_error(&self, error: Box) -> Self::Error { + (**self).custom_error(error) + } + fn path_to_string(&mut self, path: &Self::Path) -> Result { (**self).path_to_string(path) } @@ -2344,7 +2460,7 @@ impl FileBackendTrait for &'_ mut T { pub struct FileBackend { pub dir_path: PathBuf, pub circuit_name: Option, - pub top_fir_file_stem: Option, + pub top_fir_file_stem: Option, } impl FileBackend { @@ -2362,6 +2478,10 @@ impl FileBackendTrait for FileBackend { type Path = Path; type PathBuf = PathBuf; + fn custom_error(&self, error: Box) -> Self::Error { + io::Error::new(io::ErrorKind::Other, error) + } + fn path_to_string(&mut self, path: &Self::Path) -> Result { path.to_str() .map(String::from) @@ -2389,7 +2509,7 @@ impl FileBackendTrait for FileBackend { ) -> Result<(), Self::Error> { let top_fir_file_stem = self .top_fir_file_stem - .get_or_insert_with(|| circuit_name.clone()); + .get_or_insert_with(|| circuit_name.clone().into()); self.circuit_name = Some(circuit_name); let mut path = self.dir_path.join(top_fir_file_stem); if let Some(parent) = path.parent().filter(|v| !v.as_os_str().is_empty()) { @@ -2528,6 +2648,10 @@ impl FileBackendTrait for TestBackend { type Path = str; type PathBuf = String; + fn custom_error(&self, error: Box) -> Self::Error { + TestBackendError(error.to_string()) + } + fn path_to_string(&mut self, path: &Self::Path) -> Result { self.step_error_after(&path)?; Ok(path.to_owned()) @@ -2559,21 +2683,12 @@ impl FileBackendTrait for TestBackend { fn export_impl( file_backend: &mut dyn WrappedFileBackendTrait, - mut top_module: Interned>, + top_module: Interned>, options: ExportOptions, ) -> Result<(), WrappedError> { - let ExportOptions { - simplify_memories: do_simplify_memories, - simplify_enums: do_simplify_enums, - __private: _, - } = options; - if let Some(kind) = do_simplify_enums { - top_module = - simplify_enums(top_module, kind).map_err(|e| file_backend.simplify_enums_error(e))?; - } - if do_simplify_memories { - top_module = simplify_memories(top_module); - } + let top_module = options + .prepare_top_module(top_module) + .map_err(|e| file_backend.simplify_enums_error(e))?; let indent_depth = Cell::new(0); let mut global_ns = Namespace::default(); let circuit_name = global_ns.get(top_module.name_id()); @@ -2583,7 +2698,7 @@ fn export_impl( indent_depth: &indent_depth, indent: " ", }, - seen_modules: HashSet::new(), + seen_modules: HashSet::default(), unwritten_modules: VecDeque::new(), global_ns, module: ModuleState::default(), @@ -2635,14 +2750,23 @@ impl clap::builder::TypedValueParser for OptionSimplifyEnumsKindValueParser { #[derive(Copy, Clone, PartialEq, Eq, Hash)] pub struct ExportOptionsPrivate(()); -#[derive(clap::Parser, Copy, Clone, PartialEq, Eq, Hash)] +impl ExportOptionsPrivate { + fn private_new() -> Self { + Self(()) + } +} + +#[derive(clap::Parser, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] pub struct ExportOptions { #[clap(long = "no-simplify-memories", action = clap::ArgAction::SetFalse)] + #[serde(default = "ExportOptions::default_simplify_memories")] pub simplify_memories: bool, #[clap(long, value_parser = OptionSimplifyEnumsKindValueParser, default_value = "replace-with-bundle-of-uints")] - pub simplify_enums: std::option::Option, + #[serde(default = "ExportOptions::default_simplify_enums")] + pub simplify_enums: std::option::Option, // use std::option::Option instead of Option to avoid clap mis-parsing #[doc(hidden)] #[clap(skip = ExportOptionsPrivate(()))] + #[serde(skip, default = "ExportOptionsPrivate::private_new")] /// `#[non_exhaustive]` except allowing struct update syntax pub __private: ExportOptionsPrivate, } @@ -2653,7 +2777,34 @@ impl fmt::Debug for ExportOptions { } } +impl ToArgs for ExportOptions { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { + simplify_memories, + simplify_enums, + __private: ExportOptionsPrivate(()), + } = *self; + if !simplify_memories { + args.write_arg("--no-simplify-memories"); + } + let simplify_enums = simplify_enums.map(|v| { + clap::ValueEnum::to_possible_value(&v).expect("there are no skipped variants") + }); + let simplify_enums = match &simplify_enums { + None => OptionSimplifyEnumsKindValueParser::NONE_NAME, + Some(v) => v.get_name(), + }; + args.write_long_option_eq("simplify-enums", simplify_enums); + } +} + impl ExportOptions { + fn default_simplify_memories() -> bool { + true + } + fn default_simplify_enums() -> Option { + Some(SimplifyEnumsKind::ReplaceWithBundleOfUInts) + } fn debug_fmt( &self, f: &mut fmt::Formatter<'_>, @@ -2705,18 +2856,47 @@ impl ExportOptions { if f.alternate() { "\n}" } else { " }" } ) } + fn prepare_top_module_helper( + self, + mut top_module: Interned>, + ) -> Result>, SimplifyEnumsError> { + let Self { + simplify_memories: do_simplify_memories, + simplify_enums: do_simplify_enums, + __private: _, + } = self; + if let Some(kind) = do_simplify_enums { + top_module = simplify_enums(top_module, kind)?; + } + if do_simplify_memories { + top_module = simplify_memories(top_module); + } + Ok(top_module) + } + pub fn prepare_top_module( + self, + top_module: impl AsRef>, + ) -> Result>, SimplifyEnumsError> { + self.prepare_top_module_helper(top_module.as_ref().canonical().intern()) + } } impl Default for ExportOptions { fn default() -> Self { Self { - simplify_memories: true, - simplify_enums: Some(SimplifyEnumsKind::ReplaceWithBundleOfUInts), + simplify_memories: Self::default_simplify_memories(), + simplify_enums: Self::default_simplify_enums(), __private: ExportOptionsPrivate(()), } } } +pub fn get_circuit_name(top_module_name_id: NameId) -> Interned { + let mut global_ns = Namespace::default(); + let circuit_name = global_ns.get(top_module_name_id); + circuit_name.0 +} + pub fn export( file_backend: B, top_module: &Module, @@ -2728,6 +2908,497 @@ pub fn export( }) } +#[derive(Debug)] +#[non_exhaustive] +pub enum ScalarizedModuleABIError { + SimOnlyValuesAreNotPermitted, + SimplifyEnumsError(SimplifyEnumsError), +} + +impl fmt::Display for ScalarizedModuleABIError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ScalarizedModuleABIError::SimOnlyValuesAreNotPermitted => { + FirrtlError::SimOnlyValuesAreNotPermitted.fmt(f) + } + ScalarizedModuleABIError::SimplifyEnumsError(e) => e.fmt(f), + } + } +} + +impl std::error::Error for ScalarizedModuleABIError {} + +impl From for ScalarizedModuleABIError { + fn from(value: SimplifyEnumsError) -> Self { + Self::SimplifyEnumsError(value) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub enum ScalarizedModuleABIPortItem { + Group(ScalarizedModuleABIPortGroup), + Port(ScalarizedModuleABIPort), +} + +impl ScalarizedModuleABIPortItem { + pub fn module_io(self) -> ModuleIO { + *self + .target() + .base() + .module_io() + .expect("known to be ModuleIO") + } + pub fn target(self) -> Interned { + match self { + Self::Group(v) => v.target(), + Self::Port(v) => v.target(), + } + } + fn for_each_port_and_annotations_helper< + F: for<'a> FnMut( + &'a ScalarizedModuleABIPort, + ScalarizedModuleABIAnnotations<'a>, + ) -> ControlFlow, + B, + >( + &self, + parent: Option<&ScalarizedModuleABIAnnotations<'_>>, + f: &mut F, + ) -> ControlFlow { + match self { + Self::Group(v) => v.for_each_port_and_annotations_helper(parent, f), + Self::Port(port) => f( + port, + ScalarizedModuleABIAnnotations::new(parent, port.annotations.iter()), + ), + } + } + pub fn for_each_port_and_annotations< + F: for<'a> FnMut( + &'a ScalarizedModuleABIPort, + ScalarizedModuleABIAnnotations<'a>, + ) -> ControlFlow, + B, + >( + self, + mut f: F, + ) -> ControlFlow { + self.for_each_port_and_annotations_helper(None, &mut f) + } +} + +impl fmt::Debug for ScalarizedModuleABIPortItem { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Group(v) => v.fmt(f), + Self::Port(v) => v.fmt(f), + } + } +} + +#[derive(Debug, Clone)] +pub struct ScalarizedModuleABIAnnotations<'a> { + parent: Option<&'a ScalarizedModuleABIAnnotations<'a>>, + parent_len: usize, + annotations: std::slice::Iter<'a, TargetedAnnotation>, +} + +impl<'a> ScalarizedModuleABIAnnotations<'a> { + fn new( + parent: Option<&'a ScalarizedModuleABIAnnotations<'a>>, + annotations: std::slice::Iter<'a, TargetedAnnotation>, + ) -> Self { + Self { + parent, + parent_len: parent.map_or(0, |parent| parent.len()), + annotations, + } + } +} + +impl<'a> Default for ScalarizedModuleABIAnnotations<'a> { + fn default() -> Self { + Self { + parent: None, + parent_len: 0, + annotations: Default::default(), + } + } +} + +impl<'a> Iterator for ScalarizedModuleABIAnnotations<'a> { + type Item = &'a TargetedAnnotation; + + fn next(&mut self) -> Option { + loop { + if let retval @ Some(_) = self.annotations.next() { + break retval; + } + *self = self.parent?.clone(); + } + } + + fn size_hint(&self) -> (usize, Option) { + let len = self.len(); + (len, Some(len)) + } + + fn fold(mut self, mut init: B, mut f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + loop { + let Self { + parent, + parent_len: _, + annotations, + } = self; + init = annotations.fold(init, &mut f); + let Some(next) = parent else { + break; + }; + self = next.clone(); + } + init + } +} + +impl std::iter::FusedIterator for ScalarizedModuleABIAnnotations<'_> {} + +impl ExactSizeIterator for ScalarizedModuleABIAnnotations<'_> { + fn len(&self) -> usize { + self.parent_len + self.annotations.len() + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct ScalarizedModuleABIPortGroup { + target: Interned, + common_annotations: Interned<[TargetedAnnotation]>, + children: Interned<[ScalarizedModuleABIPortItem]>, +} + +impl ScalarizedModuleABIPortGroup { + pub fn module_io(self) -> ModuleIO { + *self + .target + .base() + .module_io() + .expect("known to be ModuleIO") + } + pub fn target(self) -> Interned { + self.target + } + pub fn common_annotations(self) -> Interned<[TargetedAnnotation]> { + self.common_annotations + } + pub fn children(self) -> Interned<[ScalarizedModuleABIPortItem]> { + self.children + } + fn for_each_port_and_annotations_helper< + F: for<'a> FnMut( + &'a ScalarizedModuleABIPort, + ScalarizedModuleABIAnnotations<'a>, + ) -> ControlFlow, + B, + >( + &self, + parent: Option<&ScalarizedModuleABIAnnotations<'_>>, + f: &mut F, + ) -> ControlFlow { + let parent = ScalarizedModuleABIAnnotations::new(parent, self.common_annotations.iter()); + for item in &self.children { + item.for_each_port_and_annotations_helper(Some(&parent), f)?; + } + ControlFlow::Continue(()) + } + pub fn for_each_port_and_annotations< + F: for<'a> FnMut( + &'a ScalarizedModuleABIPort, + ScalarizedModuleABIAnnotations<'a>, + ) -> ControlFlow, + B, + >( + self, + mut f: F, + ) -> ControlFlow { + self.for_each_port_and_annotations_helper(None, &mut f) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct ScalarizedModuleABIPort { + target: Interned, + annotations: Interned<[TargetedAnnotation]>, + scalarized_name: Interned, +} + +impl ScalarizedModuleABIPort { + pub fn module_io(self) -> ModuleIO { + *self + .target + .base() + .module_io() + .expect("known to be ModuleIO") + } + pub fn target(self) -> Interned { + self.target + } + pub fn annotations(self) -> Interned<[TargetedAnnotation]> { + self.annotations + } + pub fn scalarized_name(self) -> Interned { + self.scalarized_name + } +} + +enum ScalarizeTreeNodeBody { + Leaf { + scalarized_name: Interned, + }, + Bundle { + ty: Bundle, + fields: Vec, + }, + Array { + elements: Vec, + }, +} + +struct ScalarizeTreeNode { + target: Interned, + annotations: Vec, + body: ScalarizeTreeNodeBody, +} + +impl ScalarizeTreeNode { + #[track_caller] + fn find_target(&mut self, annotation_target: Interned) -> &mut Self { + match *annotation_target { + Target::Base(_) => { + assert_eq!( + annotation_target, self.target, + "annotation not on correct ModuleIO", + ); + self + } + Target::Child(target_child) => { + let parent = self.find_target(target_child.parent()); + match *target_child.path_element() { + TargetPathElement::BundleField(TargetPathBundleField { name }) => { + match parent.body { + ScalarizeTreeNodeBody::Leaf { .. } => parent, + ScalarizeTreeNodeBody::Bundle { ty, ref mut fields } => { + &mut fields[ty.name_indexes()[&name]] + } + ScalarizeTreeNodeBody::Array { .. } => { + unreachable!("types are known to match") + } + } + } + TargetPathElement::ArrayElement(TargetPathArrayElement { index }) => { + match parent.body { + ScalarizeTreeNodeBody::Leaf { .. } => parent, + ScalarizeTreeNodeBody::Bundle { .. } => { + unreachable!("types are known to match") + } + ScalarizeTreeNodeBody::Array { ref mut elements } => { + &mut elements[index] + } + } + } + TargetPathElement::DynArrayElement(_) => { + unreachable!("annotations are only on static targets"); + } + } + } + } + } + fn into_scalarized_item(self) -> ScalarizedModuleABIPortItem { + let Self { + target, + annotations, + body, + } = self; + match body { + ScalarizeTreeNodeBody::Leaf { scalarized_name } => { + ScalarizedModuleABIPortItem::Port(ScalarizedModuleABIPort { + target, + annotations: Intern::intern_owned(annotations), + scalarized_name, + }) + } + ScalarizeTreeNodeBody::Bundle { fields: items, .. } + | ScalarizeTreeNodeBody::Array { elements: items } => { + ScalarizedModuleABIPortItem::Group(ScalarizedModuleABIPortGroup { + target, + common_annotations: Intern::intern_owned(annotations), + children: Interned::from_iter( + items.into_iter().map(Self::into_scalarized_item), + ), + }) + } + } + } +} + +#[derive(Default)] +struct ScalarizeTreeBuilder { + scalarized_ns: Namespace, + type_state: TypeState, + name: String, +} + +impl ScalarizeTreeBuilder { + #[track_caller] + fn build_bundle( + &mut self, + target: Interned, + ty: Bundle, + ) -> Result { + let mut fields = Vec::with_capacity(ty.fields().len()); + let original_len = self.name.len(); + for BundleField { name, .. } in ty.fields() { + let firrtl_name = self + .type_state + .get_bundle_field(ty, name) + .map_err(|e| match e { + FirrtlError::SimOnlyValuesAreNotPermitted => { + ScalarizedModuleABIError::SimOnlyValuesAreNotPermitted + } + })? + .0; + write!(self.name, "_{firrtl_name}").expect("writing to String is infallible"); + fields.push( + self.build( + target + .join(TargetPathElement::intern_sized( + TargetPathBundleField { name }.into(), + )) + .intern_sized(), + )?, + ); + self.name.truncate(original_len); + } + Ok(ScalarizeTreeNode { + target, + annotations: Vec::new(), + body: ScalarizeTreeNodeBody::Bundle { ty, fields }, + }) + } + #[track_caller] + fn build( + &mut self, + target: Interned, + ) -> Result { + Ok(match target.canonical_ty() { + CanonicalType::UInt(_) + | CanonicalType::SInt(_) + | CanonicalType::Bool(_) + | CanonicalType::Enum(_) + | CanonicalType::AsyncReset(_) + | CanonicalType::SyncReset(_) + | CanonicalType::Reset(_) + | CanonicalType::Clock(_) => { + let scalarized_name = self.scalarized_ns.get(str::intern(&self.name)).0; + ScalarizeTreeNode { + target, + annotations: Vec::new(), + body: ScalarizeTreeNodeBody::Leaf { scalarized_name }, + } + } + CanonicalType::Array(ty) => { + let mut elements = Vec::with_capacity(ty.len()); + let original_len = self.name.len(); + for index in 0..ty.len() { + write!(self.name, "_{index}").expect("writing to String is infallible"); + elements.push( + self.build( + target + .join(TargetPathElement::intern_sized( + TargetPathArrayElement { index }.into(), + )) + .intern_sized(), + )?, + ); + self.name.truncate(original_len); + } + ScalarizeTreeNode { + target, + annotations: Vec::new(), + body: ScalarizeTreeNodeBody::Array { elements }, + } + } + CanonicalType::Bundle(ty) => self.build_bundle(target, ty)?, + CanonicalType::PhantomConst(_) => { + self.build_bundle(target, Bundle::new(Interned::default()))? + } + CanonicalType::DynSimOnly(_) => { + return Err(ScalarizedModuleABIError::SimOnlyValuesAreNotPermitted); + } + }) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct ScalarizedModuleABI { + module_io: Interned<[AnnotatedModuleIO]>, + items: Interned<[ScalarizedModuleABIPortItem]>, +} + +impl ScalarizedModuleABI { + #[track_caller] + fn from_io(module_io: Interned<[AnnotatedModuleIO]>) -> Result { + let mut firrtl_ns = Namespace::default(); + let mut tree_builder = ScalarizeTreeBuilder::default(); + let mut items = Vec::new(); + for module_io in module_io { + let firrtl_name = firrtl_ns.get(module_io.module_io.name_id()); + tree_builder.name.clear(); + tree_builder.name.push_str(&firrtl_name.0); + let mut tree = tree_builder.build(Target::from(module_io.module_io).intern_sized())?; + for annotation in module_io.annotations { + tree.find_target(annotation.target()) + .annotations + .push(annotation); + } + items.push(tree.into_scalarized_item()); + } + Ok(Self { + module_io, + items: Intern::intern_owned(items), + }) + } + #[track_caller] + pub fn new( + module: impl AsRef>, + options: ExportOptions, + ) -> Result { + Self::from_io(options.prepare_top_module(module)?.module_io()) + } + pub fn module_io(&self) -> Interned<[AnnotatedModuleIO]> { + self.module_io + } + pub fn items(&self) -> Interned<[ScalarizedModuleABIPortItem]> { + self.items + } + pub fn for_each_port_and_annotations< + F: for<'a> FnMut( + &'a ScalarizedModuleABIPort, + ScalarizedModuleABIAnnotations<'a>, + ) -> ControlFlow, + B, + >( + self, + mut f: F, + ) -> ControlFlow { + for item in &self.items { + item.for_each_port_and_annotations_helper(None, &mut f)?; + } + ControlFlow::Continue(()) + } +} + #[doc(hidden)] #[track_caller] pub fn assert_export_firrtl_impl(top_module: &Module, expected: TestBackend) { diff --git a/crates/fayalite/src/int.rs b/crates/fayalite/src/int.rs index 03b2c88..7fa77ce 100644 --- a/crates/fayalite/src/int.rs +++ b/crates/fayalite/src/int.rs @@ -2,27 +2,55 @@ // See Notices.txt for copyright information use crate::{ + array::ArrayType, expr::{ - target::{GetTarget, Target}, Expr, NotALiteralExpr, ToExpr, ToLiteralBits, + target::{GetTarget, Target}, }, + hdl, intern::{Intern, Interned, Memoize}, + sim::value::{SimValue, ToSimValueWithType}, source_location::SourceLocation, - ty::{impl_match_variant_as_self, CanonicalType, StaticType, Type, TypeProperties}, - util::{interned_bit, ConstBool, ConstUsize, GenericConstBool, GenericConstUsize}, + ty::{ + CanonicalType, OpaqueSimValueSize, OpaqueSimValueSlice, OpaqueSimValueWriter, + OpaqueSimValueWritten, StaticType, Type, TypeProperties, impl_match_variant_as_self, + }, + util::{ConstBool, ConstUsize, GenericConstBool, GenericConstUsize, interned_bit, slice_range}, }; -use bitvec::{order::Lsb0, slice::BitSlice, vec::BitVec}; +use bitvec::{bits, order::Lsb0, slice::BitSlice, vec::BitVec, view::BitView}; use num_bigint::{BigInt, BigUint, Sign}; -use num_traits::{Signed, Zero}; +use num_traits::{One, Signed, Zero}; +use serde::{ + Deserialize, Deserializer, Serialize, Serializer, + de::{DeserializeOwned, Error, Visitor}, +}; use std::{ borrow::{BorrowMut, Cow}, fmt, marker::PhantomData, num::NonZero, - ops::{Bound, Index, Not, Range, RangeBounds, RangeInclusive}, + ops::{Index, Not, Range, RangeBounds, RangeInclusive}, + str::FromStr, sync::Arc, }; +mod uint_in_range; + +#[hdl] +pub type UIntInRangeType = uint_in_range::UIntInRangeType; + +#[hdl] +pub type UIntInRange = + UIntInRangeType, ConstUsize>; + +#[hdl] +pub type UIntInRangeInclusiveType = + uint_in_range::UIntInRangeInclusiveType; + +#[hdl] +pub type UIntInRangeInclusive = + UIntInRangeInclusiveType, ConstUsize>; + mod sealed { pub trait BoolOrIntTypeSealed {} pub trait SizeSealed {} @@ -49,6 +77,16 @@ pub trait KnownSize: + IntoIterator> + TryFrom>> + Into>>; + type ArraySimValue: AsRef<[SimValue]> + + AsMut<[SimValue]> + + BorrowMut<[SimValue]> + + 'static + + Clone + + std::fmt::Debug + + IntoIterator> + + TryFrom>> + + Into>> + + ToSimValueWithType>; } macro_rules! known_widths { @@ -60,6 +98,7 @@ macro_rules! known_widths { }> { const SIZE: Self = Self; type ArrayMatch = [Expr; Self::VALUE]; + type ArraySimValue = [SimValue; Self::VALUE]; } }; ([2 $($rest:tt)*] $($bits:literal)+) => { @@ -72,6 +111,7 @@ macro_rules! known_widths { impl KnownSize for ConstUsize<{2 $(* $rest)*}> { const SIZE: Self = Self; type ArrayMatch = [Expr; Self::VALUE]; + type ArraySimValue = [SimValue; Self::VALUE]; } }; } @@ -79,13 +119,31 @@ macro_rules! known_widths { known_widths!([2 2 2 2 2 2 2 2 2]); pub trait SizeType: - sealed::SizeTypeSealed + Copy + Ord + std::hash::Hash + std::fmt::Debug + Send + Sync + 'static + sealed::SizeTypeSealed + + Copy + + Ord + + std::hash::Hash + + std::fmt::Debug + + Send + + Sync + + 'static + + Serialize + + DeserializeOwned { type Size: Size; } pub trait Size: - sealed::SizeSealed + Copy + Ord + std::hash::Hash + std::fmt::Debug + Send + Sync + 'static + sealed::SizeSealed + + Copy + + Ord + + std::hash::Hash + + std::fmt::Debug + + Send + + Sync + + 'static + + Serialize + + DeserializeOwned { type ArrayMatch: AsRef<[Expr]> + AsMut<[Expr]> @@ -100,6 +158,16 @@ pub trait Size: + IntoIterator> + TryFrom>> + Into>>; + type ArraySimValue: AsRef<[SimValue]> + + AsMut<[SimValue]> + + BorrowMut<[SimValue]> + + 'static + + Clone + + std::fmt::Debug + + IntoIterator> + + TryFrom>> + + Into>> + + ToSimValueWithType>; const KNOWN_VALUE: Option; type SizeType: SizeType + Copy @@ -125,6 +193,7 @@ impl SizeType for usize { impl Size for DynSize { type ArrayMatch = Box<[Expr]>; + type ArraySimValue = Box<[SimValue]>; const KNOWN_VALUE: Option = None; type SizeType = usize; @@ -147,6 +216,7 @@ impl SizeType for T { impl Size for T { type ArrayMatch = ::ArrayMatch; + type ArraySimValue = ::ArraySimValue; const KNOWN_VALUE: Option = Some(T::VALUE); @@ -157,14 +227,309 @@ impl Size for T { } fn try_from_usize(v: usize) -> Option { - if v == T::VALUE { - Some(T::SIZE) + if v == T::VALUE { Some(T::SIZE) } else { None } + } +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub enum ParseIntValueError { + Empty, + InvalidDigit, + MissingDigits, + InvalidRadix, + MissingType, + InvalidType, + TypeMismatch { + parsed_signed: bool, + parsed_width: usize, + expected_signed: bool, + expected_width: usize, + }, + PosOverflow, + NegOverflow, + WidthOverflow, + MissingWidth, +} + +impl std::error::Error for ParseIntValueError {} + +impl fmt::Display for ParseIntValueError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(match self { + Self::Empty => "can't parse integer from empty string", + Self::InvalidDigit => "invalid digit", + Self::MissingDigits => "missing digits", + Self::InvalidRadix => "invalid radix", + Self::MissingType => "missing type", + Self::InvalidType => "invalid type", + Self::TypeMismatch { + parsed_signed, + parsed_width, + expected_signed, + expected_width, + } => { + return write!( + f, + "type mismatch: parsed type {parsed_signed_str}{parsed_width}, \ + expected type {expected_signed_str}{expected_width}", + parsed_signed_str = if *parsed_signed { "i" } else { "u" }, + expected_signed_str = if *expected_signed { "i" } else { "u" }, + ); + } + Self::PosOverflow => "value too large to fit in type", + Self::NegOverflow => "value too small to fit in type", + Self::WidthOverflow => "width is too large", + Self::MissingWidth => "missing width", + }) + } +} + +fn parse_int_value( + s: &str, + type_is_signed: bool, + type_width: Option, + parse_type: bool, +) -> Result, ParseIntValueError> { + if !parse_type && type_width.is_none() { + return Err(ParseIntValueError::MissingWidth); + } + let mut s = s.trim(); + if s.is_empty() { + return Err(ParseIntValueError::Empty); + } + let negative = match s.bytes().next() { + Some(ch @ (b'+' | b'-')) => { + s = s[1..].trim_start(); + ch == b'-' + } + _ => false, + }; + let radix = match s.bytes().next() { + Some(b'0') => match s.bytes().nth(1) { + Some(b'x' | b'X') => { + s = &s[2..]; + 16 + } + Some(b'b' | b'B') => { + s = &s[2..]; + 2 + } + Some(b'o' | b'O') => { + s = &s[2..]; + 8 + } + _ => 10, + }, + Some(b'1'..=b'9') => 10, + _ => return Err(ParseIntValueError::InvalidDigit), + }; + let mut any_digits = false; + let digits_end = s + .as_bytes() + .iter() + .position(|&ch| { + if ch == b'_' { + false + } else if (ch as char).to_digit(radix).is_some() { + any_digits = true; + false + } else { + true + } + }) + .unwrap_or(s.len()); + let digits = &s[..digits_end]; + s = &s[digits_end..]; + if !any_digits { + return Err(ParseIntValueError::MissingDigits); + } + let width = if parse_type { + const HDL_PREFIX: &[u8] = b"hdl_"; + let mut missing_type = ParseIntValueError::MissingType; + if s.as_bytes() + .get(..HDL_PREFIX.len()) + .is_some_and(|bytes| bytes.eq_ignore_ascii_case(HDL_PREFIX)) + { + s = &s[HDL_PREFIX.len()..]; + missing_type = ParseIntValueError::InvalidType; + } + let signed = match s.bytes().next() { + Some(b'u' | b'U') => false, + Some(b'i' | b'I') => true, + Some(_) => return Err(ParseIntValueError::InvalidType), + None => return Err(missing_type), + }; + s = &s[1..]; + let mut width = 0usize; + let mut any_digits = false; + for ch in s.bytes() { + let digit = (ch as char) + .to_digit(10) + .ok_or(ParseIntValueError::InvalidDigit)?; + any_digits = true; + width = width + .checked_mul(10) + .and_then(|v| v.checked_add(digit as usize)) + .ok_or(ParseIntValueError::WidthOverflow)?; + } + if !any_digits { + return Err(ParseIntValueError::MissingDigits); + } + if width > ::MAX_BITS { + return Err(ParseIntValueError::WidthOverflow); + } + let expected_width = type_width.unwrap_or(width); + if type_is_signed != signed || expected_width != width { + let expected_width = type_width.unwrap_or(width); + return Err(ParseIntValueError::TypeMismatch { + parsed_signed: signed, + parsed_width: width, + expected_signed: type_is_signed, + expected_width, + }); + } + width + } else { + if !s.is_empty() { + return Err(ParseIntValueError::InvalidDigit); + } + type_width.expect("checked earlier") + }; + if !type_is_signed && negative { + return Err(ParseIntValueError::InvalidDigit); + } + if radix == 10 { + let mut value: BigInt = digits + .replace("_", "") + .parse() + .expect("checked that the digits are valid already"); + if negative { + value = -value; + } + let uint_value: UIntValue = UInt::new(width).from_bigint_wrapping(&value); + if value.is_zero() { + Ok(uint_value.into_bits()) } else { - None + for i in 0..width { + value.set_bit(i as u64, type_is_signed && negative); + } + if value.is_zero() { + Ok(uint_value.into_bits()) + } else if type_is_signed && negative { + if value.sign() == Sign::Minus && value.magnitude().is_one() { + Ok(uint_value.into_bits()) + } else { + Err(ParseIntValueError::NegOverflow) + } + } else { + Err(ParseIntValueError::PosOverflow) + } + } + } else { + let mut value = BitVec::repeat(false, width); + let bits_per_digit = match radix { + 2 => 1, + 8 => 3, + 16 => 4, + _ => unreachable!(), + }; + let mut digits = digits + .bytes() + .rev() + .filter_map(|ch| (ch as char).to_digit(radix)); + let overflow_error = if negative { + ParseIntValueError::NegOverflow + } else { + ParseIntValueError::PosOverflow + }; + for chunk in value.chunks_mut(bits_per_digit) { + if let Some(mut digit) = digits.next() { + let digit_bits = &mut digit.view_bits_mut::()[..chunk.len()]; + chunk.clone_from_bitslice(digit_bits); + digit_bits.fill(false); + if digit != 0 { + return Err(overflow_error); + } + } else { + break; + } + } + for digit in digits { + if digit != 0 { + return Err(overflow_error); + } + } + let negative_zero = if negative { + // negating a value happens in three regions: + // * the least-significant zeros, which are left as zeros + // * the least-significant one bit, which is left as a one bit + // * all the most-significant bits, which are inverted + // e.g.: + const { + let inp = 0b1010_1_000_u8; + let out = 0b0101_1_000_u8; + assert!(inp.wrapping_neg() == out); + }; + if let Some(first_one) = value.first_one() { + let most_significant_bits = &mut value[first_one + 1..]; + // modifies in-place despite using `Not::not` + let _ = Not::not(most_significant_bits); + false + } else { + true + } + } else { + false + }; + if !negative_zero && type_is_signed && negative != value[value.len() - 1] { + Err(overflow_error) + } else { + Ok(Arc::new(value)) } } } +fn deserialize_int_value<'de, D: Deserializer<'de>>( + deserializer: D, + type_is_signed: bool, + type_width: Option, +) -> Result, D::Error> { + struct IntValueVisitor { + type_is_signed: bool, + type_width: Option, + } + impl<'de> Visitor<'de> for IntValueVisitor { + type Value = Arc; + + fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(if self.type_is_signed { + "SIntValue" + } else { + "UIntValue" + })?; + if let Some(type_width) = self.type_width { + write!(f, "<{type_width}>")?; + } + Ok(()) + } + + fn visit_str(self, v: &str) -> Result { + parse_int_value(v, self.type_is_signed, self.type_width, true).map_err(E::custom) + } + + fn visit_bytes(self, v: &[u8]) -> Result { + match std::str::from_utf8(v) { + Ok(v) => self.visit_str(v), + Err(_) => Err(Error::invalid_value(serde::de::Unexpected::Bytes(v), &self)), + } + } + } + deserializer.deserialize_str(IntValueVisitor { + type_is_signed, + type_width, + }) +} + macro_rules! impl_int { ($pretty_name:ident, $name:ident, $generic_name:ident, $value:ident, $SIGNED:literal) => { #[derive(Copy, Clone, PartialEq, Eq, Hash)] @@ -188,31 +553,26 @@ macro_rules! impl_int { pub const $name: $generic_name = $generic_name; impl $name { - pub fn new(width: Width::SizeType) -> Self { + pub const fn new(width: Width::SizeType) -> Self { Self { width } } pub fn width(self) -> usize { Width::as_usize(self.width) } pub fn type_properties(self) -> TypeProperties { - TypeProperties { - is_passive: true, - is_storable: true, - is_castable_from_bits: true, - bit_width: self.width(), - } + self.as_dyn_int().type_properties_dyn() } - pub fn bits_from_bigint_wrapping(self, v: BigInt) -> BitVec { + pub fn bits_from_bigint_wrapping(self, v: &BigInt) -> BitVec { BoolOrIntType::bits_from_bigint_wrapping(self, v) } - pub fn from_bigint_wrapping(self, v: BigInt) -> $value { + pub fn from_bigint_wrapping(self, v: &BigInt) -> $value { $value { bits: Arc::new(self.bits_from_bigint_wrapping(v)), _phantom: PhantomData, } } pub fn from_int_wrapping(self, v: impl Into) -> $value { - self.from_bigint_wrapping(v.into()) + self.from_bigint_wrapping(&v.into()) } pub fn zero(self) -> $value { self.from_int_wrapping(0u8) @@ -227,12 +587,29 @@ macro_rules! impl_int { impl BoolOrIntType for $name { type Width = Width; type Signed = ConstBool<$SIGNED>; + type Value = $value; fn width(self) -> usize { $name::width(self) } fn new(width: Width::SizeType) -> Self { $name { width } } + fn value_from_bigint_wrapping(self, v: &BigInt) -> Self::Value { + $value::::from_bigint_wrapping(self, v) + } + fn bits_to_value(bits: Cow<'_, BitSlice>) -> Self::Value { + #[derive(Copy, Clone, Eq, PartialEq, Hash)] + struct MemoizeBitsToValue; + impl Memoize for MemoizeBitsToValue { + type Input = BitSlice; + type InputOwned = BitVec; + type Output = Arc; + fn inner(self, input: &Self::Input) -> Self::Output { + Arc::new(input.to_bitvec()) + } + } + $value::new(MemoizeBitsToValue.get_cow(bits)) + } fn bits_to_expr(bits: Cow<'_, BitSlice>) -> Expr { #[derive(Copy, Clone, Eq, PartialEq, Hash)] struct MemoizeBitsToExpr; @@ -246,6 +623,12 @@ macro_rules! impl_int { } Expr::from_dyn_int(MemoizeBitsToExpr.get_cow(bits)) } + fn from_str_without_ty( + self, + s: &str, + ) -> Result::Err> { + parse_int_value(s, $SIGNED, Some(self.width()), false).map(Self::Value::new) + } } impl IntType for $name { @@ -253,12 +636,21 @@ macro_rules! impl_int { } impl $name { - pub fn new_dyn(width: usize) -> Self { + pub const fn new_dyn(width: usize) -> Self { Self { width } } pub fn bits_to_bigint(bits: &BitSlice) -> BigInt { ::bits_to_bigint(bits) } + pub const fn type_properties_dyn(self) -> TypeProperties { + TypeProperties { + is_passive: true, + is_storable: true, + is_castable_from_bits: true, + bit_width: self.width, + sim_only_values_len: 0, + } + } } impl $name { @@ -270,6 +662,7 @@ macro_rules! impl_int { impl Type for $name { type BaseType = $pretty_name; type MaskType = Bool; + type SimValue = $value; impl_match_variant_as_self!(); fn mask_type(&self) -> Self::MaskType { Bool @@ -280,7 +673,7 @@ macro_rules! impl_int { #[track_caller] fn from_canonical(canonical_type: CanonicalType) -> Self { let CanonicalType::$pretty_name(retval) = canonical_type else { - panic!("expected {}", stringify!($name)); + panic!("expected {}", stringify!($pretty_name)); }; $name { width: Width::from_usize(retval.width), @@ -289,20 +682,95 @@ macro_rules! impl_int { fn source_location() -> SourceLocation { SourceLocation::builtin() } + fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue { + assert_eq!( + opaque.size(), + OpaqueSimValueSize::from_bit_width(self.width()) + ); + $value::new(Arc::new(opaque.bits().to_bitvec())) + } + fn sim_value_clone_from_opaque( + &self, + value: &mut Self::SimValue, + opaque: OpaqueSimValueSlice<'_>, + ) { + assert_eq!( + opaque.size(), + OpaqueSimValueSize::from_bit_width(self.width()) + ); + assert_eq!(value.width(), self.width()); + value.bits_mut().copy_from_bitslice(opaque.bits()); + } + fn sim_value_to_opaque<'w>( + &self, + value: &Self::SimValue, + writer: OpaqueSimValueWriter<'w>, + ) -> OpaqueSimValueWritten<'w> { + assert_eq!( + writer.size(), + OpaqueSimValueSize::from_bit_width(self.width()) + ); + assert_eq!(value.width(), self.width()); + writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(value.bits())) + } + } + + impl Default for $name { + fn default() -> Self { + Self::TYPE + } } impl StaticType for $name { const TYPE: Self = Self { width: Width::SIZE }; const MASK_TYPE: Self::MaskType = Bool; - const TYPE_PROPERTIES: TypeProperties = TypeProperties { - is_passive: true, - is_storable: true, - is_castable_from_bits: true, - bit_width: Width::VALUE, - }; + const TYPE_PROPERTIES: TypeProperties = $name { + width: Width::VALUE, + } + .type_properties_dyn(); const MASK_TYPE_PROPERTIES: TypeProperties = Bool::TYPE_PROPERTIES; } + impl Serialize for $name { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + self.canonical().serialize(serializer) + } + } + + impl<'de, Width: Size> Deserialize<'de> for $name { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let name = |width| -> String { + if let Some(width) = width { + format!("a {}<{width}>", stringify!($pretty_name)) + } else { + format!("a {}", stringify!($pretty_name)) + } + }; + match CanonicalType::deserialize(deserializer)? { + CanonicalType::$pretty_name(retval) => { + if let Some(width) = Width::try_from_usize(retval.width()) { + Ok($name { width }) + } else { + Err(Error::invalid_value( + serde::de::Unexpected::Other(&name(Some(retval.width()))), + &&*name(Width::KNOWN_VALUE), + )) + } + } + ty => Err(Error::invalid_value( + serde::de::Unexpected::Other(ty.as_serde_unexpected_str()), + &&*name(Width::KNOWN_VALUE), + )), + } + } + } + #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)] pub struct $generic_name; @@ -320,7 +788,7 @@ macro_rules! impl_int { _phantom: PhantomData, } - impl fmt::Debug for $value { + impl fmt::Display for $value { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let value = self.to_bigint(); let (sign, magnitude) = value.into_parts(); @@ -334,6 +802,47 @@ macro_rules! impl_int { } } + impl fmt::Debug for $value { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(self, f) + } + } + + impl std::str::FromStr for $value { + type Err = ParseIntValueError; + + fn from_str(s: &str) -> Result { + parse_int_value(s, $SIGNED, Width::KNOWN_VALUE, true).map(Self::new) + } + } + + impl Serialize for $value { + fn serialize(&self, serializer: S) -> Result { + self.to_string().serialize(serializer) + } + } + + impl<'de, Width: Size> Deserialize<'de> for $value { + fn deserialize>(deserializer: D) -> Result { + deserialize_int_value(deserializer, $SIGNED, Width::KNOWN_VALUE).map(Self::new) + } + } + + impl PartialOrd for $value { + fn partial_cmp(&self, other: &Self) -> Option { + if self.width() != other.width() { + return None; + } + Some(self.to_bigint().cmp(&other.to_bigint())) + } + } + + impl From<$value> for BigInt { + fn from(v: $value) -> BigInt { + v.to_bigint() + } + } + impl $value { pub fn width(&self) -> usize { if let Some(retval) = Width::KNOWN_VALUE { @@ -343,7 +852,7 @@ macro_rules! impl_int { self.bits.len() } } - pub fn from_bigint_wrapping(ty: $name, v: BigInt) -> $value { + pub fn from_bigint_wrapping(ty: $name, v: &BigInt) -> $value { ty.from_bigint_wrapping(v) } pub fn to_bigint(&self) -> BigInt { @@ -366,6 +875,9 @@ macro_rules! impl_int { pub fn bits(&self) -> &Arc { &self.bits } + pub fn bits_mut(&mut self) -> &mut BitSlice { + Arc::::make_mut(&mut self.bits) + } } impl ToLiteralBits for $value { @@ -407,6 +919,9 @@ macro_rules! impl_int { _phantom: PhantomData, } } + pub fn bitvec_mut(&mut self) -> &mut BitVec { + Arc::make_mut(&mut self.bits) + } } }; } @@ -420,6 +935,10 @@ impl UInt { let v: BigUint = v.into(); Self::new(v.bits().try_into().expect("too big")) } + /// gets the smallest `UInt` that fits `v` losslessly + pub const fn for_value_usize(v: usize) -> Self { + Self::new((usize::BITS - v.leading_zeros()) as usize) + } /// gets the smallest `UInt` that fits `r` losslessly, panics if `r` is empty #[track_caller] pub fn range(r: Range>) -> Self { @@ -430,6 +949,12 @@ impl UInt { } /// gets the smallest `UInt` that fits `r` losslessly, panics if `r` is empty #[track_caller] + pub const fn range_usize(r: Range) -> Self { + assert!(r.end != 0, "empty range"); + Self::range_inclusive_usize(r.start..=(r.end - 1)) + } + /// gets the smallest `UInt` that fits `r` losslessly, panics if `r` is empty + #[track_caller] pub fn range_inclusive(r: RangeInclusive>) -> Self { let (start, end) = r.into_inner(); let start: BigUint = start.into(); @@ -439,6 +964,16 @@ impl UInt { // so must not take more bits than `end` Self::for_value(end) } + /// gets the smallest `UInt` that fits `r` losslessly, panics if `r` is empty + #[track_caller] + pub const fn range_inclusive_usize(r: RangeInclusive) -> Self { + let start = *r.start(); + let end = *r.end(); + assert!(start <= end, "empty range"); + // no need to check `start`` since it's no larger than `end` + // so must not take more bits than `end` + Self::for_value_usize(end) + } } impl SInt { @@ -453,7 +988,10 @@ impl SInt { v.not().bits().checked_add(1).expect("too big") } Sign::NoSign => 0, - Sign::Plus => v.bits(), + Sign::Plus => { + // account for sign bit + v.bits().checked_add(1).expect("too big") + } } .try_into() .expect("too big"), @@ -482,6 +1020,19 @@ macro_rules! impl_prim_int { $(#[$meta:meta])* $prim_int:ident, $ty:ty ) => { + impl From<$prim_int> for <$ty as BoolOrIntType>::Value { + fn from(v: $prim_int) -> Self { + <$ty>::le_bytes_to_value_wrapping( + &v.to_le_bytes(), + <$ty as BoolOrIntType>::Width::VALUE, + ) + } + } + impl From> for <$ty as BoolOrIntType>::Value { + fn from(v: NonZero<$prim_int>) -> Self { + v.get().into() + } + } $(#[$meta])* impl ToExpr for $prim_int { type Type = $ty; @@ -498,10 +1049,7 @@ macro_rules! impl_prim_int { type Type = $ty; fn to_expr(&self) -> Expr { - <$ty>::le_bytes_to_expr_wrapping( - &self.get().to_le_bytes(), - <$ty as BoolOrIntType>::Width::VALUE, - ) + self.get().to_expr() } } }; @@ -519,18 +1067,30 @@ impl_prim_int!(i64, SInt<64>); impl_prim_int!(i128, SInt<128>); impl_prim_int!( - /// for portability reasons, [`usize`] always translates to [`UInt<64>`] + /// for portability reasons, [`usize`] always translates to [`UInt<64>`][type@UInt] usize, UInt<64> ); impl_prim_int!( - /// for portability reasons, [`isize`] always translates to [`SInt<64>`] + /// for portability reasons, [`isize`] always translates to [`SInt<64>`][type@SInt] isize, SInt<64> ); pub trait BoolOrIntType: Type + sealed::BoolOrIntTypeSealed { type Width: Size; type Signed: GenericConstBool; + type Value: Clone + + PartialOrd + + Eq + + std::hash::Hash + + fmt::Debug + + fmt::Display + + Send + + Sync + + 'static + + ToExpr + + Into + + std::str::FromStr; fn width(self) -> usize; fn new(width: ::SizeType) -> Self; fn new_static() -> Self @@ -545,17 +1105,30 @@ pub trait BoolOrIntType: Type + sealed::BoolOrIntTypeSealed { fn as_same_width_uint(self) -> UIntType { UIntType::new(Self::Width::from_usize(self.width())) } - fn bits_from_bigint_wrapping(self, v: BigInt) -> BitVec { - let width = self.width(); + fn value_from_int_wrapping(self, v: impl Into) -> Self::Value { + self.value_from_bigint_wrapping(&v.into()) + } + fn value_from_bigint_wrapping(self, v: &BigInt) -> Self::Value; + fn bits_from_bigint_wrapping(self, v: &BigInt) -> BitVec { + let mut bits = BitVec::repeat(false, self.width()); + Self::copy_bits_from_bigint_wrapping(v, &mut bits); + bits + } + fn copy_bits_from_bigint_wrapping(v: &BigInt, bits: &mut BitSlice) { + let width = bits.len(); let mut bytes = v.to_signed_bytes_le(); bytes.resize( width.div_ceil(u8::BITS as usize), if v.is_negative() { 0xFF } else { 0 }, ); let bitslice = &BitSlice::::from_slice(&bytes)[..width]; - let mut bits = BitVec::new(); - bits.extend_from_bitslice(bitslice); - bits + bits.clone_from_bitslice(bitslice); + } + fn bits_equal_bigint_wrapping(v: &BigInt, bits: &BitSlice) -> bool { + bits.iter() + .by_vals() + .enumerate() + .all(|(bit_index, bit): (usize, bool)| v.bit(bit_index as u64) == bit) } fn bits_to_bigint(bits: &BitSlice) -> BigInt { let sign_byte = if Self::Signed::VALUE && bits.last().as_deref().copied().unwrap_or(false) { @@ -567,8 +1140,9 @@ pub trait BoolOrIntType: Type + sealed::BoolOrIntTypeSealed { BitSlice::::from_slice_mut(&mut bytes)[..bits.len()].clone_from_bitslice(bits); BigInt::from_signed_bytes_le(&bytes) } + fn bits_to_value(bits: Cow<'_, BitSlice>) -> Self::Value; fn bits_to_expr(bits: Cow<'_, BitSlice>) -> Expr; - fn le_bytes_to_expr_wrapping(bytes: &[u8], bit_width: usize) -> Expr { + fn le_bytes_to_bits_wrapping(bytes: &[u8], bit_width: usize) -> BitVec { let bitslice = BitSlice::::from_slice(bytes); let bitslice = &bitslice[..bit_width.min(bitslice.len())]; let mut bits = BitVec::new(); @@ -577,11 +1151,24 @@ pub trait BoolOrIntType: Type + sealed::BoolOrIntTypeSealed { bit_width, Self::Signed::VALUE && bits.last().as_deref().copied().unwrap_or(false), ); - Self::bits_to_expr(Cow::Owned(bits)) + bits } + fn le_bytes_to_expr_wrapping(bytes: &[u8], bit_width: usize) -> Expr { + Self::bits_to_expr(Cow::Owned(Self::le_bytes_to_bits_wrapping( + bytes, bit_width, + ))) + } + fn le_bytes_to_value_wrapping(bytes: &[u8], bit_width: usize) -> Self::Value { + Self::bits_to_value(Cow::Owned(Self::le_bytes_to_bits_wrapping( + bytes, bit_width, + ))) + } + fn from_str_without_ty(self, s: &str) -> Result::Err>; } -pub trait IntType: BoolOrIntType::Dyn> { +pub trait IntType: + BoolOrIntType::Dyn, Value: FromStr> +{ type Dyn: IntType; fn as_dyn_int(self) -> Self::Dyn { Self::new_dyn(self.width()) @@ -597,19 +1184,7 @@ pub trait IntType: BoolOrIntType::Dyn> { Self::Dyn::new(width) } fn slice_index_to_range>(self, index: I) -> Range { - let width = self.width(); - let start = match index.start_bound() { - Bound::Included(start) => *start, - Bound::Excluded(start) => *start + 1, - Bound::Unbounded => 0, - }; - let end = match index.end_bound() { - Bound::Included(end) => *end + 1, - Bound::Excluded(end) => *end, - Bound::Unbounded => width, - }; - assert!(start <= end && end <= width, "slice range out-of-range"); - start..end + slice_range(index, self.width()) } fn slice_and_shift>(self, index: I) -> (UInt, usize) { let range = self.slice_index_to_range(index); @@ -621,7 +1196,7 @@ pub trait IntType: BoolOrIntType::Dyn> { } } -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)] pub struct Bool; impl sealed::BoolOrIntTypeSealed for Bool {} @@ -629,6 +1204,7 @@ impl sealed::BoolOrIntTypeSealed for Bool {} impl BoolOrIntType for Bool { type Width = ConstUsize<1>; type Signed = ConstBool; + type Value = bool; fn width(self) -> usize { 1 @@ -639,10 +1215,23 @@ impl BoolOrIntType for Bool { Bool } + fn value_from_bigint_wrapping(self, v: &BigInt) -> Self::Value { + v.bit(0) + } + fn bits_to_expr(bits: Cow<'_, BitSlice>) -> Expr { assert_eq!(bits.len(), 1); bits[0].to_expr() } + + fn bits_to_value(bits: Cow<'_, BitSlice>) -> Self::Value { + assert_eq!(bits.len(), 1); + bits[0] + } + + fn from_str_without_ty(self, s: &str) -> Result::Err> { + FromStr::from_str(s) + } } impl Bool { @@ -657,6 +1246,7 @@ impl Bool { impl Type for Bool { type BaseType = Bool; type MaskType = Bool; + type SimValue = bool; impl_match_variant_as_self!(); fn mask_type(&self) -> Self::MaskType { Bool @@ -674,6 +1264,28 @@ impl Type for Bool { fn source_location() -> SourceLocation { SourceLocation::builtin() } + fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue { + assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1)); + opaque.bits()[0] + } + fn sim_value_clone_from_opaque( + &self, + value: &mut Self::SimValue, + opaque: OpaqueSimValueSlice<'_>, + ) { + assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1)); + *value = opaque.bits()[0]; + } + fn sim_value_to_opaque<'w>( + &self, + value: &Self::SimValue, + writer: OpaqueSimValueWriter<'w>, + ) -> OpaqueSimValueWritten<'w> { + assert_eq!(writer.size(), OpaqueSimValueSize::from_bit_width(1)); + writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice( + [bits![0], bits![1]][*value as usize], + )) + } } impl StaticType for Bool { @@ -684,6 +1296,7 @@ impl StaticType for Bool { is_storable: true, is_castable_from_bits: true, bit_width: 1, + sim_only_values_len: 0, }; const MASK_TYPE_PROPERTIES: TypeProperties = Bool::TYPE_PROPERTIES; } @@ -693,3 +1306,138 @@ impl ToLiteralBits for bool { Ok(interned_bit(*self)) } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_different_value_widths_compare_ne() { + // interning relies on [SU]IntValue with different `width` comparing not equal + assert_ne!(UInt[3].from_int_wrapping(0), UInt[4].from_int_wrapping(0)); + assert_ne!(SInt[3].from_int_wrapping(0), SInt[4].from_int_wrapping(0)); + } + + #[test] + fn test_uint_for_value() { + assert_eq!(UInt::for_value(0u8).width, 0); + assert_eq!(UInt::for_value(1u8).width, 1); + assert_eq!(UInt::for_value(2u8).width, 2); + assert_eq!(UInt::for_value(3u8).width, 2); + assert_eq!(UInt::for_value(4u8).width, 3); + } + + #[test] + fn test_sint_for_value() { + assert_eq!(SInt::for_value(-5).width, 4); + assert_eq!(SInt::for_value(-4).width, 3); + assert_eq!(SInt::for_value(-3).width, 3); + assert_eq!(SInt::for_value(-2).width, 2); + assert_eq!(SInt::for_value(-1).width, 1); + assert_eq!(SInt::for_value(0).width, 0); + assert_eq!(SInt::for_value(1).width, 2); + assert_eq!(SInt::for_value(2).width, 3); + assert_eq!(SInt::for_value(3).width, 3); + assert_eq!(SInt::for_value(4).width, 4); + } + + #[test] + fn test_serde_round_trip() { + use serde_json::json; + #[track_caller] + fn check( + value: T, + expected: serde_json::Value, + ) { + assert_eq!(serde_json::to_value(&value).unwrap(), expected); + assert_eq!(value, T::deserialize(expected).unwrap()); + } + check(UInt[0], json! { { "UInt": { "width": 0 } } }); + check(UInt::<0>::TYPE, json! { { "UInt": { "width": 0 } } }); + check(UInt::<35>::TYPE, json! { { "UInt": { "width": 35 } } }); + check(SInt[0], json! { { "SInt": { "width": 0 } } }); + check(SInt::<0>::TYPE, json! { { "SInt": { "width": 0 } } }); + check(SInt::<35>::TYPE, json! { { "SInt": { "width": 35 } } }); + check(Bool, json! { "Bool" }); + check(UIntValue::from(0u8), json! { "0x0_u8" }); + check(SIntValue::from(-128i8), json! { "-0x80_i8" }); + check(UInt[3].from_int_wrapping(5), json! { "0x5_u3" }); + check(UInt[12].from_int_wrapping(0x1123), json! { "0x123_u12" }); + check(SInt[12].from_int_wrapping(0xFEE), json! { "-0x12_i12" }); + check(SInt[12].from_int_wrapping(0x7EE), json! { "0x7EE_i12" }); + } + + #[test] + fn test_deserialize() { + use serde_json::json; + #[track_caller] + fn check( + expected: Result, + input: serde_json::Value, + ) { + let mut error = String::new(); + let value = T::deserialize(input).map_err(|e| -> &str { + error = e.to_string(); + &error + }); + assert_eq!(value, expected); + } + check::>( + Err("invalid value: a UInt<2>, expected a UInt<0>"), + json! { { "UInt": { "width": 2 } } }, + ); + check::>( + Err("invalid value: a Bool, expected a UInt<0>"), + json! { "Bool" }, + ); + check::>( + Err("invalid value: a Bool, expected a SInt<0>"), + json! { "Bool" }, + ); + check::( + Err("invalid value: a Bool, expected a UInt"), + json! { "Bool" }, + ); + check::( + Err("invalid value: a Bool, expected a SInt"), + json! { "Bool" }, + ); + check::(Err("value too large to fit in type"), json! { "2_u1" }); + check::(Err("value too large to fit in type"), json! { "10_u1" }); + check::(Err("value too large to fit in type"), json! { "0x2_u1" }); + check::(Err("value too large to fit in type"), json! { "0b10_u1" }); + check::(Err("value too large to fit in type"), json! { "0o2_u1" }); + check::(Err("value too large to fit in type"), json! { "0o377_i8" }); + check::(Err("value too large to fit in type"), json! { "0o200_i8" }); + check(Ok(SInt[8].from_int_wrapping(i8::MAX)), json! { "0o177_i8" }); + check::(Err("value too small to fit in type"), json! { "-0o201_i8" }); + check::(Err("value too small to fit in type"), json! { "-0o377_i8" }); + check::(Err("value too small to fit in type"), json! { "-0o400_i8" }); + check::( + Err("value too small to fit in type"), + json! { "-0o4000_i8" }, + ); + check(Ok(UIntValue::from(0u8)), json! { "0_u8" }); + check(Ok(UIntValue::from(0u8)), json! { "0b0_u8" }); + check(Ok(UIntValue::from(0u8)), json! { "00_u8" }); + check(Ok(UIntValue::from(0u8)), json! { "0x0_u8" }); + check(Ok(UIntValue::from(0u8)), json! { "0o0_u8" }); + check(Ok(SIntValue::from(-128i8)), json! { "-0x000_80_i8" }); + check(Ok(SIntValue::from(-128i8)), json! { "-0o002_00_hdl_i8" }); + check(Ok(SIntValue::from(-128i8)), json! { "-0b1__000_0000_i8" }); + check(Ok(UInt[3].from_int_wrapping(5)), json! { " + 0x5_u3 " }); + check( + Ok(UInt[12].from_int_wrapping(0x1123)), + json! { "0x1_2_3_hdl_u12" }, + ); + check(Ok(SInt[12].from_int_wrapping(0xFEE)), json! { "-0x12_i12" }); + check( + Ok(SInt[12].from_int_wrapping(0x7EE)), + json! { " + \t0x7__E_e_i012\n" }, + ); + check(Ok(SInt[0].from_int_wrapping(0)), json! { "-0i0" }); + check(Ok(SInt[1].from_int_wrapping(0)), json! { "-0i1" }); + check(Ok(SInt[0].from_int_wrapping(0)), json! { "-0x0i0" }); + check(Ok(SInt[1].from_int_wrapping(0)), json! { "-0x0i1" }); + } +} diff --git a/crates/fayalite/src/int/uint_in_range.rs b/crates/fayalite/src/int/uint_in_range.rs new file mode 100644 index 0000000..970a439 --- /dev/null +++ b/crates/fayalite/src/int/uint_in_range.rs @@ -0,0 +1,656 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + bundle::{Bundle, BundleField, BundleType, BundleTypePropertiesBuilder, NoBuilder}, + expr::{ + CastBitsTo, CastTo, CastToBits, Expr, HdlPartialEq, HdlPartialOrd, + ops::{ExprCastTo, ExprPartialEq, ExprPartialOrd}, + }, + int::{Bool, DynSize, KnownSize, Size, SizeType, UInt, UIntType}, + intern::{Intern, InternSlice, Interned}, + phantom_const::PhantomConst, + sim::value::{SimValue, SimValuePartialEq, ToSimValueWithType}, + source_location::SourceLocation, + ty::{ + CanonicalType, OpaqueSimValueSlice, OpaqueSimValueWriter, OpaqueSimValueWritten, + StaticType, Type, TypeProperties, impl_match_variant_as_self, + }, +}; +use bitvec::{order::Lsb0, view::BitView}; +use serde::{ + Deserialize, Deserializer, Serialize, Serializer, + de::{Error, Visitor, value::UsizeDeserializer}, +}; +use std::{fmt, marker::PhantomData, ops::Index}; + +const UINT_IN_RANGE_TYPE_FIELD_NAMES: [&'static str; 2] = ["value", "range"]; + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)] +pub struct UIntInRangeMaskType { + value: Bool, + range: PhantomConstRangeMaskType, +} + +impl Type for UIntInRangeMaskType { + type BaseType = Bundle; + type MaskType = Self; + type SimValue = bool; + impl_match_variant_as_self!(); + + fn mask_type(&self) -> Self::MaskType { + *self + } + + fn canonical(&self) -> CanonicalType { + CanonicalType::Bundle(Bundle::new(self.fields())) + } + + fn from_canonical(canonical_type: CanonicalType) -> Self { + let fields = Bundle::from_canonical(canonical_type).fields(); + let [ + BundleField { + name: value_name, + flipped: false, + ty: value, + }, + BundleField { + name: range_name, + flipped: false, + ty: range, + }, + ] = *fields + else { + panic!("expected UIntInRangeMaskType"); + }; + assert_eq!([&*value_name, &*range_name], UINT_IN_RANGE_TYPE_FIELD_NAMES); + let value = Bool::from_canonical(value); + let range = PhantomConstRangeMaskType::from_canonical(range); + Self { value, range } + } + + fn source_location() -> SourceLocation { + SourceLocation::builtin() + } + + fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue { + Bool.sim_value_from_opaque(opaque) + } + + fn sim_value_clone_from_opaque( + &self, + value: &mut Self::SimValue, + opaque: OpaqueSimValueSlice<'_>, + ) { + Bool.sim_value_clone_from_opaque(value, opaque); + } + + fn sim_value_to_opaque<'w>( + &self, + value: &Self::SimValue, + writer: OpaqueSimValueWriter<'w>, + ) -> OpaqueSimValueWritten<'w> { + Bool.sim_value_to_opaque(value, writer) + } +} + +impl BundleType for UIntInRangeMaskType { + type Builder = NoBuilder; + type FilledBuilder = Expr; + + fn fields(&self) -> Interned<[BundleField]> { + let [value_name, range_name] = UINT_IN_RANGE_TYPE_FIELD_NAMES; + let Self { value, range } = self; + [ + BundleField { + name: value_name.intern(), + flipped: false, + ty: value.canonical(), + }, + BundleField { + name: range_name.intern(), + flipped: false, + ty: range.canonical(), + }, + ] + .intern_slice() + } +} + +impl StaticType for UIntInRangeMaskType { + const TYPE: Self = Self { + value: Bool, + range: PhantomConstRangeMaskType::TYPE, + }; + const MASK_TYPE: Self::MaskType = Self::TYPE; + const TYPE_PROPERTIES: TypeProperties = BundleTypePropertiesBuilder::new() + .field(false, Bool::TYPE_PROPERTIES) + .field(false, PhantomConstRangeMaskType::TYPE_PROPERTIES) + .finish(); + const MASK_TYPE_PROPERTIES: TypeProperties = Self::TYPE_PROPERTIES; +} + +impl ToSimValueWithType for bool { + fn to_sim_value_with_type(&self, ty: UIntInRangeMaskType) -> SimValue { + SimValue::from_value(ty, *self) + } +} + +impl ExprCastTo for UIntInRangeMaskType { + fn cast_to(src: Expr, to_type: Bool) -> Expr { + src.cast_to_bits().cast_to(to_type) + } +} + +impl ExprCastTo for Bool { + fn cast_to(src: Expr, to_type: UIntInRangeMaskType) -> Expr { + src.cast_to_static::>().cast_bits_to(to_type) + } +} + +impl ExprPartialEq for UIntInRangeMaskType { + fn cmp_eq(lhs: Expr, rhs: Expr) -> Expr { + lhs.cast_to_bits().cmp_eq(rhs.cast_to_bits()) + } + fn cmp_ne(lhs: Expr, rhs: Expr) -> Expr { + lhs.cast_to_bits().cmp_ne(rhs.cast_to_bits()) + } +} + +impl SimValuePartialEq for UIntInRangeMaskType { + fn sim_value_eq(this: &SimValue, other: &SimValue) -> bool { + **this == **other + } +} + +type PhantomConstRangeMaskType = > as Type>::MaskType; + +#[derive(Default, Copy, Clone, Debug)] +struct RangeParseError; + +macro_rules! define_uint_in_range_type { + ( + $UIntInRange:ident, + $UIntInRangeType:ident, + $UIntInRangeTypeWithoutGenerics:ident, + $UIntInRangeTypeWithStart:ident, + $SerdeRange:ident, + $range_operator_str:literal, + |$uint_range_usize_start:ident, $uint_range_usize_end:ident| $uint_range_usize:expr, + ) => { + #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] + struct $SerdeRange { + start: Start::SizeType, + end: End::SizeType, + } + + impl Default for $SerdeRange { + fn default() -> Self { + Self { + start: Start::SIZE, + end: End::SIZE, + } + } + } + + impl std::str::FromStr for $SerdeRange { + type Err = RangeParseError; + + fn from_str(s: &str) -> Result { + let Some((start, end)) = s.split_once($range_operator_str) else { + return Err(RangeParseError); + }; + if start.is_empty() + || start.bytes().any(|b| !b.is_ascii_digit()) + || end.is_empty() + || end.bytes().any(|b| !b.is_ascii_digit()) + { + return Err(RangeParseError); + } + let start = start.parse().map_err(|_| RangeParseError)?; + let end = end.parse().map_err(|_| RangeParseError)?; + let retval = Self { start, end }; + if retval.is_empty() { + Err(RangeParseError) + } else { + Ok(retval) + } + } + } + + impl fmt::Display for $SerdeRange { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { start, end } = *self; + write!( + f, + "{}{}{}", + Start::as_usize(start), + $range_operator_str, + End::as_usize(end), + ) + } + } + + impl Serialize for $SerdeRange { + fn serialize(&self, serializer: S) -> Result { + serializer.collect_str(self) + } + } + + impl<'de, Start: Size, End: Size> Deserialize<'de> for $SerdeRange { + fn deserialize>(deserializer: D) -> Result { + struct SerdeRangeVisitor(PhantomData<(Start, End)>); + impl<'de, Start: Size, End: Size> Visitor<'de> for SerdeRangeVisitor { + type Value = $SerdeRange; + + fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("a string with format \"")?; + if let Some(start) = Start::KNOWN_VALUE { + write!(f, "{start}")?; + } else { + f.write_str("")?; + }; + f.write_str($range_operator_str)?; + if let Some(end) = End::KNOWN_VALUE { + write!(f, "{end}")?; + } else { + f.write_str("")?; + }; + f.write_str("\" that is a non-empty range") + } + + fn visit_str(self, v: &str) -> Result { + let $SerdeRange:: { start, end } = + v.parse().map_err(|_| { + Error::invalid_value(serde::de::Unexpected::Str(v), &self) + })?; + let start = + Start::SizeType::deserialize(UsizeDeserializer::::new(start))?; + let end = End::SizeType::deserialize(UsizeDeserializer::::new(end))?; + Ok($SerdeRange { start, end }) + } + + fn visit_bytes(self, v: &[u8]) -> Result { + match std::str::from_utf8(v) { + Ok(v) => self.visit_str(v), + Err(_) => { + Err(Error::invalid_value(serde::de::Unexpected::Bytes(v), &self)) + } + } + } + } + deserializer.deserialize_str(SerdeRangeVisitor(PhantomData)) + } + } + + #[derive(Copy, Clone, PartialEq, Eq, Hash)] + pub struct $UIntInRangeType { + value: UInt, + range: PhantomConst<$SerdeRange>, + } + + impl $UIntInRangeType { + fn from_phantom_const_range(range: PhantomConst<$SerdeRange>) -> Self { + let $SerdeRange { start, end } = *range.get(); + let $uint_range_usize_start = Start::as_usize(start); + let $uint_range_usize_end = End::as_usize(end); + Self { + value: $uint_range_usize, + range, + } + } + pub fn new(start: Start::SizeType, end: End::SizeType) -> Self { + Self::from_phantom_const_range(PhantomConst::new( + $SerdeRange { start, end }.intern_sized(), + )) + } + pub fn bit_width(self) -> usize { + self.value.width() + } + pub fn start(self) -> Start::SizeType { + self.range.get().start + } + pub fn end(self) -> End::SizeType { + self.range.get().end + } + } + + impl fmt::Debug for $UIntInRangeType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { value, range } = self; + let $SerdeRange { start, end } = *range.get(); + f.debug_struct(&format!( + "{}<{}, {}>", + stringify!($UIntInRange), + Start::as_usize(start), + End::as_usize(end), + )) + .field("value", value) + .finish_non_exhaustive() + } + } + + impl Type for $UIntInRangeType { + type BaseType = Bundle; + type MaskType = UIntInRangeMaskType; + type SimValue = usize; + impl_match_variant_as_self!(); + + fn mask_type(&self) -> Self::MaskType { + UIntInRangeMaskType::TYPE + } + + fn canonical(&self) -> CanonicalType { + CanonicalType::Bundle(Bundle::new(self.fields())) + } + + fn from_canonical(canonical_type: CanonicalType) -> Self { + let fields = Bundle::from_canonical(canonical_type).fields(); + let [ + BundleField { + name: value_name, + flipped: false, + ty: value, + }, + BundleField { + name: range_name, + flipped: false, + ty: range, + }, + ] = *fields + else { + panic!("expected {}", stringify!($UIntInRange)); + }; + assert_eq!([&*value_name, &*range_name], UINT_IN_RANGE_TYPE_FIELD_NAMES); + let value = UInt::from_canonical(value); + let range = PhantomConst::<$SerdeRange>::from_canonical(range); + let retval = Self::from_phantom_const_range(range); + assert_eq!(retval, Self { value, range }); + retval + } + + fn source_location() -> SourceLocation { + SourceLocation::builtin() + } + + fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue { + assert_eq!(opaque.size(), self.value.type_properties().size()); + let mut retval = 0usize; + retval.view_bits_mut::()[..opaque.bit_width()] + .clone_from_bitslice(opaque.bits()); + retval + } + + fn sim_value_clone_from_opaque( + &self, + value: &mut Self::SimValue, + opaque: OpaqueSimValueSlice<'_>, + ) { + *value = self.sim_value_from_opaque(opaque); + } + + fn sim_value_to_opaque<'w>( + &self, + value: &Self::SimValue, + writer: OpaqueSimValueWriter<'w>, + ) -> OpaqueSimValueWritten<'w> { + writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice( + &value.view_bits::()[..self.value.width()], + )) + } + } + + impl BundleType for $UIntInRangeType { + type Builder = NoBuilder; + type FilledBuilder = Expr; + + fn fields(&self) -> Interned<[BundleField]> { + let [value_name, range_name] = UINT_IN_RANGE_TYPE_FIELD_NAMES; + let Self { value, range } = self; + [ + BundleField { + name: value_name.intern(), + flipped: false, + ty: value.canonical(), + }, + BundleField { + name: range_name.intern(), + flipped: false, + ty: range.canonical(), + }, + ] + .intern_slice() + } + } + + impl Default for $UIntInRangeType { + fn default() -> Self { + Self::TYPE + } + } + + impl StaticType for $UIntInRangeType { + const TYPE: Self = { + let $uint_range_usize_start = Start::VALUE; + let $uint_range_usize_end = End::VALUE; + Self { + value: $uint_range_usize, + range: PhantomConst::<$SerdeRange>::TYPE, + } + }; + const MASK_TYPE: Self::MaskType = UIntInRangeMaskType::TYPE; + const TYPE_PROPERTIES: TypeProperties = BundleTypePropertiesBuilder::new() + .field(false, Self::TYPE.value.type_properties_dyn()) + .field( + false, + PhantomConst::<$SerdeRange>::TYPE_PROPERTIES, + ) + .finish(); + const MASK_TYPE_PROPERTIES: TypeProperties = UIntInRangeMaskType::TYPE_PROPERTIES; + } + + impl ToSimValueWithType<$UIntInRangeType> for usize { + fn to_sim_value_with_type( + &self, + ty: $UIntInRangeType, + ) -> SimValue<$UIntInRangeType> { + SimValue::from_value(ty, *self) + } + } + + #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)] + pub struct $UIntInRangeTypeWithoutGenerics; + + #[allow(non_upper_case_globals)] + pub const $UIntInRangeType: $UIntInRangeTypeWithoutGenerics = + $UIntInRangeTypeWithoutGenerics; + + impl Index for $UIntInRangeTypeWithoutGenerics { + type Output = $UIntInRangeTypeWithStart; + + fn index(&self, start: StartSize) -> &Self::Output { + Interned::into_inner($UIntInRangeTypeWithStart(start).intern_sized()) + } + } + + #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] + pub struct $UIntInRangeTypeWithStart(Start::SizeType); + + impl, End: Size> + Index for $UIntInRangeTypeWithStart + { + type Output = $UIntInRangeType; + + fn index(&self, end: EndSize) -> &Self::Output { + Interned::into_inner($UIntInRangeType::new(self.0, end).intern_sized()) + } + } + + impl ExprCastTo> + for $UIntInRangeType + { + fn cast_to(src: Expr, to_type: UIntType) -> Expr> { + src.cast_to_bits().cast_to(to_type) + } + } + + impl ExprCastTo<$UIntInRangeType> + for UIntType + { + fn cast_to( + src: Expr, + to_type: $UIntInRangeType, + ) -> Expr<$UIntInRangeType> { + src.cast_to(to_type.value).cast_bits_to(to_type) + } + } + + impl + ExprPartialEq<$UIntInRangeType> + for $UIntInRangeType + { + fn cmp_eq( + lhs: Expr, + rhs: Expr<$UIntInRangeType>, + ) -> Expr { + lhs.cast_to_bits().cmp_eq(rhs.cast_to_bits()) + } + fn cmp_ne( + lhs: Expr, + rhs: Expr<$UIntInRangeType>, + ) -> Expr { + lhs.cast_to_bits().cmp_ne(rhs.cast_to_bits()) + } + } + + impl + ExprPartialOrd<$UIntInRangeType> + for $UIntInRangeType + { + fn cmp_lt( + lhs: Expr, + rhs: Expr<$UIntInRangeType>, + ) -> Expr { + lhs.cast_to_bits().cmp_lt(rhs.cast_to_bits()) + } + fn cmp_le( + lhs: Expr, + rhs: Expr<$UIntInRangeType>, + ) -> Expr { + lhs.cast_to_bits().cmp_le(rhs.cast_to_bits()) + } + fn cmp_gt( + lhs: Expr, + rhs: Expr<$UIntInRangeType>, + ) -> Expr { + lhs.cast_to_bits().cmp_gt(rhs.cast_to_bits()) + } + fn cmp_ge( + lhs: Expr, + rhs: Expr<$UIntInRangeType>, + ) -> Expr { + lhs.cast_to_bits().cmp_ge(rhs.cast_to_bits()) + } + } + + impl + SimValuePartialEq<$UIntInRangeType> + for $UIntInRangeType + { + fn sim_value_eq( + this: &SimValue, + other: &SimValue<$UIntInRangeType>, + ) -> bool { + **this == **other + } + } + + impl ExprPartialEq> + for $UIntInRangeType + { + fn cmp_eq(lhs: Expr, rhs: Expr>) -> Expr { + lhs.cast_to_bits().cmp_eq(rhs) + } + fn cmp_ne(lhs: Expr, rhs: Expr>) -> Expr { + lhs.cast_to_bits().cmp_ne(rhs) + } + } + + impl ExprPartialEq<$UIntInRangeType> + for UIntType + { + fn cmp_eq(lhs: Expr, rhs: Expr<$UIntInRangeType>) -> Expr { + lhs.cmp_eq(rhs.cast_to_bits()) + } + fn cmp_ne(lhs: Expr, rhs: Expr<$UIntInRangeType>) -> Expr { + lhs.cmp_ne(rhs.cast_to_bits()) + } + } + + impl ExprPartialOrd> + for $UIntInRangeType + { + fn cmp_lt(lhs: Expr, rhs: Expr>) -> Expr { + lhs.cast_to_bits().cmp_lt(rhs) + } + fn cmp_le(lhs: Expr, rhs: Expr>) -> Expr { + lhs.cast_to_bits().cmp_le(rhs) + } + fn cmp_gt(lhs: Expr, rhs: Expr>) -> Expr { + lhs.cast_to_bits().cmp_gt(rhs) + } + fn cmp_ge(lhs: Expr, rhs: Expr>) -> Expr { + lhs.cast_to_bits().cmp_ge(rhs) + } + } + + impl ExprPartialOrd<$UIntInRangeType> + for UIntType + { + fn cmp_lt(lhs: Expr, rhs: Expr<$UIntInRangeType>) -> Expr { + lhs.cmp_lt(rhs.cast_to_bits()) + } + fn cmp_le(lhs: Expr, rhs: Expr<$UIntInRangeType>) -> Expr { + lhs.cmp_le(rhs.cast_to_bits()) + } + fn cmp_gt(lhs: Expr, rhs: Expr<$UIntInRangeType>) -> Expr { + lhs.cmp_gt(rhs.cast_to_bits()) + } + fn cmp_ge(lhs: Expr, rhs: Expr<$UIntInRangeType>) -> Expr { + lhs.cmp_ge(rhs.cast_to_bits()) + } + } + }; +} + +define_uint_in_range_type! { + UIntInRange, + UIntInRangeType, + UIntInRangeTypeWithoutGenerics, + UIntInRangeTypeWithStart, + SerdeRange, + "..", + |start, end| UInt::range_usize(start..end), +} + +define_uint_in_range_type! { + UIntInRangeInclusive, + UIntInRangeInclusiveType, + UIntInRangeInclusiveTypeWithoutGenerics, + UIntInRangeInclusiveTypeWithStart, + SerdeRangeInclusive, + "..=", + |start, end| UInt::range_inclusive_usize(start..=end), +} + +impl SerdeRange { + fn is_empty(self) -> bool { + self.start >= self.end + } +} + +impl SerdeRangeInclusive { + fn is_empty(self) -> bool { + self.start > self.end + } +} diff --git a/crates/fayalite/src/intern.rs b/crates/fayalite/src/intern.rs index 3780ad3..b68140b 100644 --- a/crates/fayalite/src/intern.rs +++ b/crates/fayalite/src/intern.rs @@ -1,23 +1,25 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information #![allow(clippy::type_complexity)] -use crate::intern::type_map::TypeIdMap; +use crate::{intern::type_map::TypeIdMap, util::DefaultBuildHasher}; use bitvec::{ptr::BitPtr, slice::BitSlice, vec::BitVec}; -use hashbrown::{hash_map::RawEntryMut, HashMap, HashTable}; +use hashbrown::HashTable; use serde::{Deserialize, Serialize}; use std::{ any::{Any, TypeId}, borrow::{Borrow, Cow}, cmp::Ordering, + ffi::{OsStr, OsString}, fmt, hash::{BuildHasher, Hash, Hasher}, iter::FusedIterator, marker::PhantomData, ops::Deref, + path::{Path, PathBuf}, sync::{Mutex, RwLock}, }; -pub mod type_map; +mod type_map; pub trait LazyInternedTrait: Send + Sync + Any { fn get(&self) -> Interned; @@ -287,15 +289,266 @@ impl InternedCompare for BitSlice { } } -impl InternedCompare for str { - type InternedCompareKey = PtrEqWithMetadata; - fn interned_compare_key_ref(this: &Self) -> Self::InternedCompareKey { - PtrEqWithMetadata(this) +/// Safety: `as_bytes` and `from_bytes_unchecked` must return the same pointer as the input. +/// all values returned by `as_bytes` must be valid to pass to `from_bytes_unchecked`. +/// `into_bytes` must return the exact same thing as `as_bytes`. +/// `Interned` must contain the exact same references as `Interned<[u8]>`, +/// so they can be safely interconverted without needing re-interning. +unsafe trait InternStrLike: ToOwned { + fn as_bytes(this: &Self) -> &[u8]; + fn into_bytes(this: Self::Owned) -> Vec; + /// Safety: `bytes` must be a valid sequence of bytes for this type. All UTF-8 sequences are valid. + unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self; +} + +macro_rules! impl_intern_str_like { + ($ty:ty, owned = $Owned:ty) => { + impl InternedCompare for $ty { + type InternedCompareKey = PtrEqWithMetadata<[u8]>; + fn interned_compare_key_ref(this: &Self) -> Self::InternedCompareKey { + PtrEqWithMetadata(InternStrLike::as_bytes(this)) + } + } + impl Intern for $ty { + fn intern(&self) -> Interned { + Self::intern_cow(Cow::Borrowed(self)) + } + fn intern_cow(this: Cow<'_, Self>) -> Interned { + Interned::cast_unchecked( + <[u8]>::intern_cow(match this { + Cow::Borrowed(v) => Cow::Borrowed(::as_bytes(v)), + Cow::Owned(v) => { + // verify $Owned is correct + let v: $Owned = v; + Cow::Owned(::into_bytes(v)) + } + }), + // Safety: guaranteed safe because we got the bytes from `as_bytes`/`into_bytes` + |v| unsafe { ::from_bytes_unchecked(v) }, + ) + } + } + impl Default for Interned<$ty> { + fn default() -> Self { + // Safety: safe because the empty sequence is valid UTF-8 + unsafe { <$ty as InternStrLike>::from_bytes_unchecked(&[]) }.intern() + } + } + impl<'de> Deserialize<'de> for Interned<$ty> { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + Cow::<'de, $ty>::deserialize(deserializer).map(Intern::intern_cow) + } + } + impl From<$Owned> for Interned<$ty> { + fn from(v: $Owned) -> Self { + v.intern_deref() + } + } + impl From> for $Owned { + fn from(v: Interned<$ty>) -> Self { + Interned::into_inner(v).into() + } + } + impl From> for Box<$ty> { + fn from(v: Interned<$ty>) -> Self { + Interned::into_inner(v).into() + } + } + }; +} + +// Safety: satisfies `InternStrLike`'s requirements where the valid sequences for `from_bytes_unchecked` matches `str` +unsafe impl InternStrLike for str { + fn as_bytes(this: &Self) -> &[u8] { + this.as_bytes() + } + fn into_bytes(this: Self::Owned) -> Vec { + this.into_bytes() + } + unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self { + // Safety: `bytes` is guaranteed UTF-8 by the caller + unsafe { str::from_utf8_unchecked(bytes) } + } +} + +impl_intern_str_like!(str, owned = String); + +// Safety: satisfies `InternStrLike`'s requirements where the valid sequences for `from_bytes_unchecked` matches `OsStr` +unsafe impl InternStrLike for OsStr { + fn as_bytes(this: &Self) -> &[u8] { + this.as_encoded_bytes() + } + fn into_bytes(this: Self::Owned) -> Vec { + this.into_encoded_bytes() + } + unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self { + // Safety: `bytes` is guaranteed valid for `OsStr` by the caller + unsafe { OsStr::from_encoded_bytes_unchecked(bytes) } + } +} + +impl_intern_str_like!(OsStr, owned = OsString); + +// Safety: satisfies `InternStrLike`'s requirements where the valid sequences for `from_bytes_unchecked` matches `OsStr` +unsafe impl InternStrLike for Path { + fn as_bytes(this: &Self) -> &[u8] { + this.as_os_str().as_encoded_bytes() + } + fn into_bytes(this: Self::Owned) -> Vec { + this.into_os_string().into_encoded_bytes() + } + unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self { + // Safety: `bytes` is guaranteed valid for `OsStr` by the caller + unsafe { Path::new(OsStr::from_encoded_bytes_unchecked(bytes)) } + } +} + +impl_intern_str_like!(Path, owned = PathBuf); + +impl Interned { + pub fn from_utf8(v: Interned<[u8]>) -> Result { + Interned::try_cast_unchecked(v, str::from_utf8) + } + pub fn as_interned_bytes(self) -> Interned<[u8]> { + Interned::cast_unchecked(self, str::as_bytes) + } + pub fn as_interned_os_str(self) -> Interned { + Interned::cast_unchecked(self, AsRef::as_ref) + } + pub fn as_interned_path(self) -> Interned { + Interned::cast_unchecked(self, AsRef::as_ref) + } +} + +impl From> for Interned { + fn from(value: Interned) -> Self { + value.as_interned_os_str() + } +} + +impl From> for Interned { + fn from(value: Interned) -> Self { + value.as_interned_path() + } +} + +impl Interned { + pub fn as_interned_encoded_bytes(self) -> Interned<[u8]> { + Interned::cast_unchecked(self, OsStr::as_encoded_bytes) + } + pub fn to_interned_str(self) -> Option> { + Interned::try_cast_unchecked(self, |v| v.to_str().ok_or(())).ok() + } + pub fn display(self) -> std::ffi::os_str::Display<'static> { + Self::into_inner(self).display() + } + pub fn as_interned_path(self) -> Interned { + Interned::cast_unchecked(self, AsRef::as_ref) + } +} + +impl From> for Interned { + fn from(value: Interned) -> Self { + value.as_interned_path() + } +} + +impl Interned { + pub fn as_interned_os_str(self) -> Interned { + Interned::cast_unchecked(self, AsRef::as_ref) + } + pub fn to_interned_str(self) -> Option> { + Interned::try_cast_unchecked(self, |v| v.to_str().ok_or(())).ok() + } + pub fn display(self) -> std::path::Display<'static> { + Self::into_inner(self).display() + } + pub fn interned_file_name(self) -> Option> { + Some(self.file_name()?.intern()) + } +} + +impl From> for Interned { + fn from(value: Interned) -> Self { + value.as_interned_os_str() + } +} + +pub trait InternSlice: Sized { + type Element: 'static + Send + Sync + Clone + Hash + Eq; + fn intern_slice(self) -> Interned<[Self::Element]>; +} + +impl InternSlice for Box<[T]> { + type Element = T; + fn intern_slice(self) -> Interned<[Self::Element]> { + self.into_vec().intern_slice() + } +} + +impl InternSlice for Vec { + type Element = T; + fn intern_slice(self) -> Interned<[Self::Element]> { + self.intern_deref() + } +} + +impl InternSlice for &'_ [T] { + type Element = T; + fn intern_slice(self) -> Interned<[Self::Element]> { + self.intern() + } +} + +impl InternSlice for &'_ mut [T] { + type Element = T; + fn intern_slice(self) -> Interned<[Self::Element]> { + self.intern() + } +} + +impl InternSlice for [T; N] { + type Element = T; + fn intern_slice(self) -> Interned<[Self::Element]> { + (&self).intern_slice() + } +} + +impl InternSlice for Box<[T; N]> { + type Element = T; + fn intern_slice(self) -> Interned<[Self::Element]> { + let this: Box<[T]> = self; + this.intern_slice() + } +} + +impl InternSlice for &'_ [T; N] { + type Element = T; + fn intern_slice(self) -> Interned<[Self::Element]> { + let this: &[T] = self; + this.intern() + } +} + +impl InternSlice for &'_ mut [T; N] { + type Element = T; + fn intern_slice(self) -> Interned<[Self::Element]> { + let this: &[T] = self; + this.intern() } } pub trait Intern: Any + Send + Sync { fn intern(&self) -> Interned; + fn intern_deref(self) -> Interned + where + Self: Sized + Deref>, + { + Self::Target::intern_owned(self) + } fn intern_sized(self) -> Interned where Self: Clone, @@ -316,8 +569,37 @@ pub trait Intern: Any + Send + Sync { } } +impl From> for Interned { + fn from(value: Cow<'_, T>) -> Self { + Intern::intern_cow(value) + } +} + +impl From<&'_ T> for Interned { + fn from(value: &'_ T) -> Self { + Intern::intern(value) + } +} + +impl From for Interned { + fn from(value: T) -> Self { + Intern::intern_sized(value) + } +} + +impl From> for Cow<'_, T> { + fn from(value: Interned) -> Self { + Cow::Borrowed(Interned::into_inner(value)) + } +} + +struct InternerState { + table: HashTable<&'static T>, + hasher: DefaultBuildHasher, +} + pub struct Interner { - map: Mutex>, + state: Mutex>, } impl Interner { @@ -330,7 +612,10 @@ impl Interner { impl Default for Interner { fn default() -> Self { Self { - map: Default::default(), + state: Mutex::new(InternerState { + table: HashTable::new(), + hasher: Default::default(), + }), } } } @@ -341,17 +626,16 @@ impl Interner { alloc: F, value: Cow<'_, T>, ) -> Interned { - let mut map = self.map.lock().unwrap(); - let hasher = map.hasher().clone(); - let hash = hasher.hash_one(&*value); - let inner = match map.raw_entry_mut().from_hash(hash, |k| **k == *value) { - RawEntryMut::Occupied(entry) => *entry.key(), - RawEntryMut::Vacant(entry) => { - *entry - .insert_with_hasher(hash, alloc(value), (), |k| hasher.hash_one(&**k)) - .0 - } - }; + let mut state = self.state.lock().unwrap(); + let InternerState { table, hasher } = &mut *state; + let inner = *table + .entry( + hasher.hash_one(&*value), + |k| **k == *value, + |k| hasher.hash_one(&**k), + ) + .or_insert_with(|| alloc(value)) + .get(); Interned { inner } } } @@ -374,12 +658,6 @@ impl Interner { } } -impl Interner { - fn intern_str(&self, value: Cow<'_, str>) -> Interned { - self.intern(|value| value.into_owned().leak(), value) - } -} - pub struct Interned { inner: &'static T, } @@ -409,6 +687,12 @@ forward_fmt_trait!(Pointer); forward_fmt_trait!(UpperExp); forward_fmt_trait!(UpperHex); +impl, U: ?Sized> AsRef for Interned { + fn as_ref(&self) -> &U { + T::as_ref(self) + } +} + #[derive(Clone, Debug)] pub struct InternedSliceIter { slice: Interned<[T]>, @@ -478,6 +762,57 @@ where } } +impl FromIterator for Interned +where + String: FromIterator, +{ + fn from_iter>(iter: T) -> Self { + String::from_iter(iter).intern_deref() + } +} + +impl FromIterator for Interned +where + PathBuf: FromIterator, +{ + fn from_iter>(iter: T) -> Self { + PathBuf::from_iter(iter).intern_deref() + } +} + +impl FromIterator for Interned +where + OsString: FromIterator, +{ + fn from_iter>(iter: T) -> Self { + OsString::from_iter(iter).intern_deref() + } +} + +impl From> for clap::builder::Str { + fn from(value: Interned) -> Self { + Interned::into_inner(value).into() + } +} + +impl From> for clap::builder::OsStr { + fn from(value: Interned) -> Self { + Interned::into_inner(value).into() + } +} + +impl From> for clap::builder::StyledStr { + fn from(value: Interned) -> Self { + Interned::into_inner(value).into() + } +} + +impl From> for clap::Id { + fn from(value: Interned) -> Self { + Interned::into_inner(value).into() + } +} + impl From> for Vec { fn from(value: Interned<[T]>) -> Self { Vec::from(&*value) @@ -490,24 +825,12 @@ impl From> for Box<[T]> { } } -impl From> for String { - fn from(value: Interned) -> Self { - String::from(&*value) - } -} - impl Default for Interned<[I]> where [I]: Intern, { fn default() -> Self { - [][..].intern() - } -} - -impl Default for Interned { - fn default() -> Self { - "".intern() + Intern::intern(&[]) } } @@ -638,15 +961,6 @@ impl<'de> Deserialize<'de> for Interned { } } -impl<'de> Deserialize<'de> for Interned { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - String::deserialize(deserializer).map(Intern::intern_owned) - } -} - impl Intern for T { fn intern(&self) -> Interned { Self::intern_cow(Cow::Borrowed(self)) @@ -707,26 +1021,6 @@ impl Intern for BitSlice { } } -impl Intern for str { - fn intern(&self) -> Interned { - Self::intern_cow(Cow::Borrowed(self)) - } - - fn intern_owned(this: ::Owned) -> Interned - where - Self: ToOwned, - { - Self::intern_cow(Cow::Owned(this)) - } - - fn intern_cow(this: Cow<'_, Self>) -> Interned - where - Self: ToOwned, - { - Interner::get().intern_str(this) - } -} - pub trait MemoizeGeneric: 'static + Send + Sync + Hash + Eq + Copy { type InputRef<'a>: 'a + Send + Sync + Hash + Copy; type InputOwned: 'static + Send + Sync; @@ -742,7 +1036,7 @@ pub trait MemoizeGeneric: 'static + Send + Sync + Hash + Eq + Copy { fn get_cow(self, input: Self::InputCow<'_>) -> Self::Output { static TYPE_ID_MAP: TypeIdMap = TypeIdMap::new(); let map: &RwLock<( - hashbrown::hash_map::DefaultHashBuilder, + DefaultBuildHasher, HashTable<(Self, Self::InputOwned, Self::Output)>, )> = TYPE_ID_MAP.get_or_insert_default(); fn hash_eq_key<'a, 'b, T: MemoizeGeneric>( diff --git a/crates/fayalite/src/intern/type_map.rs b/crates/fayalite/src/intern/type_map.rs index 48433af..945116b 100644 --- a/crates/fayalite/src/intern/type_map.rs +++ b/crates/fayalite/src/intern/type_map.rs @@ -1,10 +1,8 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information -use hashbrown::HashMap; use std::{ any::{Any, TypeId}, hash::{BuildHasher, Hasher}, - ptr::NonNull, sync::RwLock, }; @@ -75,59 +73,36 @@ impl BuildHasher for TypeIdBuildHasher { } } -struct Value(NonNull); - -impl Value { - unsafe fn get_transmute_lifetime<'b>(&self) -> &'b (dyn Any + Send + Sync) { - unsafe { &*self.0.as_ptr() } - } - fn new(v: Box) -> Self { - unsafe { Self(NonNull::new_unchecked(Box::into_raw(v))) } - } -} - -unsafe impl Send for Value {} -unsafe impl Sync for Value {} - -impl Drop for Value { - fn drop(&mut self) { - unsafe { std::ptr::drop_in_place(self.0.as_ptr()) } - } -} - -pub struct TypeIdMap(RwLock>); +pub(crate) struct TypeIdMap( + RwLock>, +); impl TypeIdMap { - pub const fn new() -> Self { - Self(RwLock::new(HashMap::with_hasher(TypeIdBuildHasher))) + pub(crate) const fn new() -> Self { + Self(RwLock::new(hashbrown::HashMap::with_hasher( + TypeIdBuildHasher, + ))) } #[cold] - unsafe fn insert_slow( + fn insert_slow( &self, type_id: TypeId, make: fn() -> Box, - ) -> &(dyn Any + Sync + Send) { - let value = Value::new(make()); + ) -> &'static (dyn Any + Sync + Send) { + let value = Box::leak(make()); let mut write_guard = self.0.write().unwrap(); - unsafe { - write_guard - .entry(type_id) - .or_insert(value) - .get_transmute_lifetime() - } + *write_guard.entry(type_id).or_insert(value) } - pub fn get_or_insert_default(&self) -> &T { + pub(crate) fn get_or_insert_default(&self) -> &T { let type_id = TypeId::of::(); let read_guard = self.0.read().unwrap(); - let retval = read_guard - .get(&type_id) - .map(|v| unsafe { Value::get_transmute_lifetime(v) }); + let retval = read_guard.get(&type_id).map(|v| *v); drop(read_guard); let retval = match retval { Some(retval) => retval, - None => unsafe { self.insert_slow(type_id, move || Box::new(T::default())) }, + None => self.insert_slow(type_id, move || Box::new(T::default())), }; - unsafe { &*(retval as *const dyn Any as *const T) } + retval.downcast_ref().expect("known to have correct TypeId") } } diff --git a/crates/fayalite/src/lib.rs b/crates/fayalite/src/lib.rs index eedb1bb..98849a6 100644 --- a/crates/fayalite/src/lib.rs +++ b/crates/fayalite/src/lib.rs @@ -4,6 +4,18 @@ // TODO: enable: // #![warn(missing_docs)] +#![deny( + rustdoc::bare_urls, + rustdoc::broken_intra_doc_links, + rustdoc::invalid_codeblock_attributes, + rustdoc::invalid_html_tags, + rustdoc::invalid_rust_codeblocks, + rustdoc::private_doc_tests, + rustdoc::private_intra_doc_links, + rustdoc::redundant_explicit_links, + rustdoc::unescaped_backticks +)] + //! [Main Documentation][_docs] extern crate self as fayalite; @@ -11,6 +23,59 @@ extern crate self as fayalite; #[doc(hidden)] pub use std as __std; +#[doc(hidden)] +#[macro_export] +macro_rules! __cfg_expansion_helper { + ( + [ + $($evaluated_cfgs:ident($($evaluated_exprs:tt)*) = $evaluated_results:ident,)* + ] + [ + $cfg:ident($($expr:tt)*), + $($unevaluated_cfgs:ident($($unevaluated_exprs:tt)*),)* + ] + // pass as tt so we get right span for attribute + $after_evaluation_attr:tt $after_evaluation_body:tt + ) => { + #[$cfg($($expr)*)] + $crate::__cfg_expansion_helper! { + [ + $($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)* + $cfg($($expr)*) = true, + ] + [ + $($unevaluated_cfgs($($unevaluated_exprs)*),)* + ] + $after_evaluation_attr $after_evaluation_body + } + #[$cfg(not($($expr)*))] + $crate::__cfg_expansion_helper! { + [ + $($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)* + $cfg($($expr)*) = false, + ] + [ + $($unevaluated_cfgs($($unevaluated_exprs)*),)* + ] + $after_evaluation_attr $after_evaluation_body + } + }; + ( + [ + $($evaluated_cfgs:ident($($evaluated_exprs:tt)*) = $evaluated_results:ident,)* + ] + [] + // don't use #[...] so we get right span for `#` and `[]` of attribute + {$($after_evaluation_attr:tt)*} {$($after_evaluation_body:tt)*} + ) => { + $($after_evaluation_attr)* + #[__evaluated_cfgs([ + $($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)* + ])] + $($after_evaluation_body)* + }; +} + #[doc(inline)] /// The `#[hdl_module]` attribute is applied to a Rust function so that that function creates /// a [`Module`][`::fayalite::module::Module`] when called. @@ -21,8 +86,139 @@ pub use std as __std; pub use fayalite_proc_macros::hdl_module; #[doc(inline)] +/// The `#[hdl]` attribute is supported on several different kinds of [Rust Items](https://doc.rust-lang.org/reference/items.html): +/// +/// # Functions and Methods +/// Enable's the stuff that you can use inside a [module's body](crate::_docs::modules::module_bodies), +/// but without being a module or changing the function's signature. +/// The only exception is that you can't use stuff that requires the automatically-provided `m` variable. +/// +/// # Structs +// TODO: expand on struct docs +/// e.g.: +/// ``` +/// # use fayalite::prelude::*; +/// # #[hdl] +/// # pub struct OtherStruct {} +/// #[hdl] +/// pub struct MyStruct { +/// #[hdl(flip)] +/// pub a: UInt<5>, +/// pub b: Bool, +/// #[hdl(flip)] +/// pub c: OtherStruct, +/// } +/// ``` +/// +/// # Enums +// TODO: expand on enum docs +/// e.g.: +/// ``` +/// # use fayalite::prelude::*; +/// # #[hdl] +/// # pub struct MyStruct {} +/// #[hdl] +/// pub enum MyEnum { +/// A(UInt<3>), +/// B, +/// C(MyStruct), +/// } +/// ``` +/// +/// # Type Aliases +/// +/// There's three different ways you can create a type alias: +/// +/// # Normal Type Alias +/// +/// This works exactly how you'd expect: +/// ``` +/// # use fayalite::prelude::*; +/// # #[hdl] +/// # pub struct MyStruct { +/// # v: T, +/// # } +/// #[hdl] +/// pub type MyType = MyStruct; +/// +/// // you can then use Fayalite's standard syntax for creating dynamic types at runtime: +/// +/// let ty = MyType[UInt[3]]; +/// assert_eq!(ty, MyStruct[UInt[3]]); +/// ``` +/// +/// # Type Alias that gets a [`Type`] from a [`PhantomConst`] +/// +/// This allows you to use some computed property of a [`PhantomConst`] to get a [`Type`] that you can use in other #[hdl] types. +/// +/// ``` +/// # use fayalite::{intern::Intern, prelude::*}; +/// #[derive(Clone, PartialEq, Eq, Hash, Debug, serde::Serialize, serde::Deserialize)] +/// pub struct Config { +/// pub foo: usize, +/// pub bar: Bundle, +/// } +/// +/// // the expression inside `get` is called with `Interned` and returns `Array` +/// #[hdl(get(|config| Array[config.bar][config.foo]))] +/// pub type GetMyArray> = Array; +/// +/// // you can then use it in other types: +/// +/// #[hdl(no_static)] +/// pub struct WrapMyArray> { +/// pub my_array: GetMyArray

, +/// } +/// +/// // you can then use Fayalite's standard syntax for creating dynamic types at runtime: +/// let bar = Bundle::new(Default::default()); +/// let config = PhantomConst::new(Config { foo: 12, bar }.intern_sized()); +/// let ty = WrapMyArray[config]; +/// assert_eq!(ty.my_array, Array[bar][12]); +/// ``` +/// +/// # Type Alias that gets a [`Size`] from a [`PhantomConst`] +/// +/// This allows you to use some computed property of a [`PhantomConst`] to get a [`Size`] that you can use in other #[hdl] types. +/// +/// ``` +/// # use fayalite::{intern::Intern, prelude::*}; +/// # #[derive(Clone, PartialEq, Eq, Hash, Debug, serde::Serialize, serde::Deserialize)] +/// # pub struct ConfigItem {} +/// # impl ConfigItem { +/// # pub fn new() -> Self { +/// # Self {} +/// # } +/// # } +/// #[derive(Clone, PartialEq, Eq, Hash, Debug, serde::Serialize, serde::Deserialize)] +/// pub struct Config { +/// pub items: Vec, +/// } +/// +/// // the expression inside `get` is called with `Interned` and returns `usize` (not DynSize) +/// #[hdl(get(|config| config.items.len()))] +/// pub type GetItemsLen> = DynSize; // must be DynSize +/// +/// // you can then use it in other types: +/// +/// #[hdl(no_static)] +/// pub struct FlagPerItem> { +/// pub flags: ArrayType>, +/// } +/// +/// // you can then use Fayalite's standard syntax for creating dynamic types at runtime: +/// let config = PhantomConst::new(Config { items: vec![ConfigItem::new(); 5] }.intern_sized()); +/// let ty = FlagPerItem[config]; +/// assert_eq!(ty.flags, Array[Bool][5]); +/// ``` +/// +/// [`PhantomConst`]: crate::phantom_const::PhantomConst +/// [`Size`]: crate::int::Size +/// [`Type`]: crate::ty::Type pub use fayalite_proc_macros::hdl; +pub use bitvec; + /// struct used as a placeholder when applying defaults #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] pub struct __; @@ -32,8 +228,8 @@ pub mod _docs; pub mod annotations; pub mod array; +pub mod build; pub mod bundle; -pub mod cli; pub mod clock; pub mod enum_; pub mod expr; @@ -43,11 +239,15 @@ pub mod int; pub mod intern; pub mod memory; pub mod module; +pub mod phantom_const; +pub mod platform; pub mod prelude; pub mod reg; pub mod reset; +pub mod sim; pub mod source_location; pub mod testing; pub mod ty; pub mod util; +pub mod vendor; pub mod wire; diff --git a/crates/fayalite/src/memory.rs b/crates/fayalite/src/memory.rs index f583a8c..46eb59b 100644 --- a/crates/fayalite/src/memory.rs +++ b/crates/fayalite/src/memory.rs @@ -7,7 +7,7 @@ use crate::{ array::{Array, ArrayType}, bundle::{Bundle, BundleType}, clock::Clock, - expr::{ops::BundleLiteral, repeat, Expr, Flow, ToExpr, ToLiteralBits}, + expr::{Expr, Flow, ToExpr, ToLiteralBits, ops::BundleLiteral, repeat}, hdl, int::{Bool, DynSize, Size, UInt, UIntType}, intern::{Intern, Interned}, @@ -22,7 +22,7 @@ use std::{ fmt, hash::{Hash, Hasher}, marker::PhantomData, - num::NonZeroU32, + num::NonZeroUsize, rc::Rc, }; @@ -470,7 +470,7 @@ pub enum ReadUnderWrite { Undefined, } -#[derive(Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] struct MemImpl { scoped_name: ScopedNameId, source_location: SourceLocation, @@ -478,7 +478,7 @@ struct MemImpl { initial_value: Option>, ports: P, read_latency: usize, - write_latency: NonZeroU32, + write_latency: NonZeroUsize, read_under_write: ReadUnderWrite, port_annotations: Interned<[TargetedAnnotation]>, mem_annotations: Interned<[Annotation]>, @@ -519,7 +519,12 @@ impl fmt::Debug for Mem { f.debug_struct("Mem") .field("name", scoped_name) .field("array_type", array_type) - .field("initial_value", initial_value) + .field( + "initial_value", + &initial_value.as_ref().map(|initial_value| { + DebugMemoryData::from_bit_slice(*array_type, initial_value) + }), + ) .field("read_latency", read_latency) .field("write_latency", write_latency) .field("read_under_write", read_under_write) @@ -562,7 +567,7 @@ impl Mem { initial_value: Option>, ports: Interned<[MemPort]>, read_latency: usize, - write_latency: NonZeroU32, + write_latency: NonZeroUsize, read_under_write: ReadUnderWrite, port_annotations: Interned<[TargetedAnnotation]>, mem_annotations: Interned<[Annotation]>, @@ -645,7 +650,7 @@ impl Mem { pub fn read_latency(self) -> usize { self.0.read_latency } - pub fn write_latency(self) -> NonZeroU32 { + pub fn write_latency(self) -> NonZeroUsize { self.0.write_latency } pub fn read_under_write(self) -> ReadUnderWrite { @@ -707,7 +712,7 @@ pub(crate) struct MemBuilderTarget { pub(crate) initial_value: Option>, pub(crate) ports: Vec>, pub(crate) read_latency: usize, - pub(crate) write_latency: NonZeroU32, + pub(crate) write_latency: NonZeroUsize, pub(crate) read_under_write: ReadUnderWrite, pub(crate) port_annotations: Vec, pub(crate) mem_annotations: Vec, @@ -867,7 +872,7 @@ impl MemBuilder { initial_value: None, ports: vec![], read_latency: 0, - write_latency: NonZeroU32::new(1).unwrap(), + write_latency: NonZeroUsize::new(1).unwrap(), read_under_write: ReadUnderWrite::Old, port_annotations: vec![], mem_annotations: vec![], @@ -1030,10 +1035,10 @@ impl MemBuilder { pub fn read_latency(&mut self, read_latency: usize) { self.target.borrow_mut().read_latency = read_latency; } - pub fn get_write_latency(&self) -> NonZeroU32 { + pub fn get_write_latency(&self) -> NonZeroUsize { self.target.borrow().write_latency } - pub fn write_latency(&mut self, write_latency: NonZeroU32) { + pub fn write_latency(&mut self, write_latency: NonZeroUsize) { self.target.borrow_mut().write_latency = write_latency; } pub fn get_read_under_write(&self) -> ReadUnderWrite { @@ -1061,7 +1066,8 @@ pub fn splat_mask(ty: T, value: Expr) -> Expr> { | CanonicalType::SyncReset(_) | CanonicalType::Reset(_) | CanonicalType::Clock(_) - | CanonicalType::Enum(_) => Expr::from_canonical(Expr::canonical(value)), + | CanonicalType::Enum(_) + | CanonicalType::DynSimOnly(_) => Expr::from_canonical(Expr::canonical(value)), CanonicalType::Array(array) => Expr::from_canonical(Expr::canonical(repeat( splat_mask(array.element(), value), array.len(), @@ -1077,5 +1083,64 @@ pub fn splat_mask(ty: T, value: Expr) -> Expr> { ) .to_expr(), )), + CanonicalType::PhantomConst(_) => Expr::from_canonical(Expr::canonical(().to_expr())), + } +} + +pub trait DebugMemoryDataGetElement { + fn get_element(&self, element_index: usize, array_type: Array) -> &BitSlice; +} + +impl<'a, F: ?Sized + Fn(usize, Array) -> &'a BitSlice> DebugMemoryDataGetElement for &'a F { + fn get_element(&self, element_index: usize, array_type: Array) -> &BitSlice { + self(element_index, array_type) + } +} + +#[derive(Clone)] +pub struct DebugMemoryData { + pub array_type: Array, + pub get_element: GetElement, +} + +impl DebugMemoryDataGetElement for &'_ BitSlice { + fn get_element(&self, element_index: usize, array_type: Array) -> &BitSlice { + assert!(element_index < array_type.len()); + let stride = array_type.element().bit_width(); + let start = element_index + .checked_mul(stride) + .expect("memory is too big"); + let end = start.checked_add(stride).expect("memory is too big"); + &self[start..end] + } +} + +impl<'a> DebugMemoryData<&'a BitSlice> { + pub fn from_bit_slice( + array_type: ArrayType, + bit_slice: &'a BitSlice, + ) -> Self { + let array_type = array_type.as_dyn_array(); + assert_eq!(bit_slice.len(), array_type.type_properties().bit_width); + Self { + array_type, + get_element: bit_slice, + } + } +} + +impl fmt::Debug for DebugMemoryData { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.array_type.len() == 0 { + return f.write_str("[]"); + } + writeln!(f, "[\n // len = {:#x}", self.array_type.len())?; + for element_index in 0..self.array_type.len() { + let element = crate::util::BitSliceWriteWithBase( + self.get_element.get_element(element_index, self.array_type), + ); + writeln!(f, " [{element_index:#x}]: {element:#x},")?; + } + f.write_str("]") } } diff --git a/crates/fayalite/src/module.rs b/crates/fayalite/src/module.rs index 7387832..6527043 100644 --- a/crates/fayalite/src/module.rs +++ b/crates/fayalite/src/module.rs @@ -8,30 +8,34 @@ use crate::{ clock::{Clock, ClockDomain}, enum_::{Enum, EnumMatchVariantsIter, EnumType}, expr::{ + Expr, Flow, ToExpr, ops::VariantAccess, target::{ GetTarget, Target, TargetBase, TargetPathArrayElement, TargetPathBundleField, TargetPathElement, }, - Expr, Flow, ToExpr, }, formal::FormalKind, int::{Bool, DynSize, Size}, intern::{Intern, Interned}, memory::{Mem, MemBuilder, MemBuilderTarget, PortName}, + platform::PlatformIOBuilder, reg::Reg, + reset::{AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset}, + sim::{ExternModuleSimGenerator, ExternModuleSimulation}, source_location::SourceLocation, ty::{CanonicalType, Type}, - util::ScopedRef, + util::{HashMap, HashSet, ScopedRef}, wire::{IncompleteWire, Wire}, }; -use hashbrown::{hash_map::Entry, HashMap, HashSet}; +use hashbrown::hash_map::Entry; use num_bigint::BigInt; use std::{ cell::RefCell, - collections::VecDeque, + collections::{BTreeMap, VecDeque}, convert::Infallible, fmt, + future::IntoFuture, hash::{Hash, Hasher}, iter::FusedIterator, marker::PhantomData, @@ -180,7 +184,7 @@ impl Block { } } -#[derive(Clone, PartialEq, Eq, Hash)] +#[derive(Copy, Clone, PartialEq, Eq, Hash)] pub struct StmtConnect { pub lhs: Expr, pub rhs: Expr, @@ -235,7 +239,7 @@ impl fmt::Debug for StmtConnect { } } -#[derive(Clone, PartialEq, Eq, Hash)] +#[derive(Copy, Clone, PartialEq, Eq, Hash)] pub struct StmtFormal { pub kind: FormalKind, pub clk: Expr, @@ -284,6 +288,8 @@ pub struct StmtIf { pub blocks: [S::Block; 2], } +impl Copy for StmtIf {} + impl StmtIf { pub fn then_block(&self) -> S::Block { self.blocks[0] @@ -315,6 +321,8 @@ pub struct StmtMatch { pub blocks: Interned<[S::Block]>, } +impl Copy for StmtMatch {} + impl StmtMatch { #[track_caller] fn assert_validity(&self) { @@ -346,7 +354,7 @@ macro_rules! wrapper_enum { $(#[$enum_meta:meta])* $vis:vis enum $enum_name:ident<$T_enum:ident: $T_bound:ident = $T_enum_default:ident> { $( - #[is = $is_fn:ident, as_ref = $as_ref_fn:ident] + #[is = $is_fn:ident, as_ref = $as_ref_fn:ident $(, from = $from:ident)?] $(#[$variant_meta:meta])* $Variant:ident($VariantTy:ty), )* @@ -358,7 +366,7 @@ macro_rules! wrapper_enum { $(#[$enum_meta])* $vis enum $enum_name<$T_enum: $T_bound = $T_enum_default> { $( - #[is = $is_fn, as_ref = $as_ref_fn] + #[is = $is_fn, as_ref = $as_ref_fn $(, from = $from)?] $(#[$variant_meta])* $Variant($VariantTy), )* @@ -385,7 +393,7 @@ macro_rules! wrapper_enum { $(#[$enum_meta:meta])* $vis:vis enum $enum_name:ident<$T_enum:ident: $T_bound:ident = $T_enum_default:ident> { $( - #[is = $is_fn:ident, as_ref = $as_ref_fn:ident] + #[is = $is_fn:ident, as_ref = $as_ref_fn:ident $(, from = $from:ident)?] $(#[$variant_meta:meta])* $Variant:ident($VariantTy:ty), )* @@ -397,22 +405,22 @@ macro_rules! wrapper_enum { $(#[$enum_meta])* $vis enum $enum_name<$T_enum: $T_bound = $T_enum_default> { $( - #[is = $is_fn, as_ref = $as_ref_fn] + #[is = $is_fn, as_ref = $as_ref_fn $(, from = $from)?] $(#[$variant_meta])* $Variant($VariantTy), )* } } - $( + $($( wrapper_enum! { impl $T_to From<$VariantTy> for $to_type { - fn from(value: $VariantTy) -> Self { + fn $from(value: $VariantTy) -> Self { $enum_name::$Variant(value).into() } } } - )* + )?)* }; ( #[impl()] @@ -420,7 +428,7 @@ macro_rules! wrapper_enum { $(#[$enum_meta:meta])* $vis:vis enum $enum_name:ident<$T_enum:ident: $T_bound:ident = $T_enum_default:ident> { $( - #[is = $is_fn:ident, as_ref = $as_ref_fn:ident] + #[is = $is_fn:ident, as_ref = $as_ref_fn:ident $(, from = $from:ident)?] $(#[$variant_meta:meta])* $Variant:ident($VariantTy:ty), )* @@ -459,13 +467,15 @@ pub struct StmtWire { pub wire: Wire, } +impl Copy for StmtWire {} + #[derive(Hash, Clone, PartialEq, Eq, Debug)] -pub struct StmtReg { +pub struct StmtReg { pub annotations: S::StmtAnnotations, - pub reg: Reg, + pub reg: Reg, } -impl Copy for StmtReg {} +impl Copy for StmtReg {} #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub struct StmtInstance { @@ -473,6 +483,8 @@ pub struct StmtInstance { pub instance: Instance, } +impl Copy for StmtInstance {} + wrapper_enum! { #[impl( () self: StmtDeclaration = self, @@ -481,20 +493,57 @@ wrapper_enum! { #[to(() StmtDeclaration, () Stmt)] #[derive(Clone, PartialEq, Eq, Hash)] pub enum StmtDeclaration { - #[is = is_wire, as_ref = wire] + #[is = is_wire, as_ref = wire, from = from] Wire(StmtWire), #[is = is_reg, as_ref = reg] - Reg(StmtReg), - #[is = is_instance, as_ref = instance] + Reg(StmtReg), + #[is = is_reg_sync, as_ref = reg_sync] + RegSync(StmtReg), + #[is = is_reg_async, as_ref = reg_async] + RegAsync(StmtReg), + #[is = is_instance, as_ref = instance, from = from] Instance(StmtInstance), } } +impl Copy for StmtDeclaration {} + +impl From> for Stmt { + fn from(value: StmtReg) -> Self { + StmtDeclaration::from(value).into() + } +} + +impl From> for StmtDeclaration { + fn from(value: StmtReg) -> Self { + struct Dispatch(PhantomData); + impl ResetTypeDispatch for Dispatch { + type Input = StmtReg; + type Output = StmtDeclaration; + + fn reset(self, input: Self::Input) -> Self::Output { + StmtDeclaration::Reg(input) + } + + fn sync_reset(self, input: Self::Input) -> Self::Output { + StmtDeclaration::RegSync(input) + } + + fn async_reset(self, input: Self::Input) -> Self::Output { + StmtDeclaration::RegAsync(input) + } + } + R::dispatch(value, Dispatch(PhantomData)) + } +} + impl StmtDeclaration { pub fn annotations(&self) -> S::StmtAnnotations { match self { StmtDeclaration::Wire(v) => v.annotations, StmtDeclaration::Reg(v) => v.annotations, + StmtDeclaration::RegSync(v) => v.annotations, + StmtDeclaration::RegAsync(v) => v.annotations, StmtDeclaration::Instance(v) => v.annotations, } } @@ -502,6 +551,8 @@ impl StmtDeclaration { match self { StmtDeclaration::Wire(v) => v.wire.source_location(), StmtDeclaration::Reg(v) => v.reg.source_location(), + StmtDeclaration::RegSync(v) => v.reg.source_location(), + StmtDeclaration::RegAsync(v) => v.reg.source_location(), StmtDeclaration::Instance(v) => v.instance.source_location(), } } @@ -509,20 +560,26 @@ impl StmtDeclaration { match self { StmtDeclaration::Wire(v) => v.wire.scoped_name(), StmtDeclaration::Reg(v) => v.reg.scoped_name(), + StmtDeclaration::RegSync(v) => v.reg.scoped_name(), + StmtDeclaration::RegAsync(v) => v.reg.scoped_name(), StmtDeclaration::Instance(v) => v.instance.scoped_name(), } } pub fn sub_stmt_blocks(&self) -> &[S::Block] { match self { - StmtDeclaration::Wire(_) | StmtDeclaration::Reg(_) | StmtDeclaration::Instance(_) => { - &[] - } + StmtDeclaration::Wire(_) + | StmtDeclaration::Reg(_) + | StmtDeclaration::RegSync(_) + | StmtDeclaration::RegAsync(_) + | StmtDeclaration::Instance(_) => &[], } } pub fn canonical_ty(&self) -> CanonicalType { match self { StmtDeclaration::Wire(v) => v.wire.ty(), StmtDeclaration::Reg(v) => v.reg.ty(), + StmtDeclaration::RegSync(v) => v.reg.ty(), + StmtDeclaration::RegAsync(v) => v.reg.ty(), StmtDeclaration::Instance(v) => CanonicalType::Bundle(v.instance.ty()), } } @@ -533,19 +590,21 @@ wrapper_enum! { #[to(() Stmt)] #[derive(Clone, PartialEq, Eq, Hash)] pub enum Stmt { - #[is = is_connect, as_ref = connect] + #[is = is_connect, as_ref = connect, from = from] Connect(StmtConnect), - #[is = is_formal, as_ref = formal] + #[is = is_formal, as_ref = formal, from = from] Formal(StmtFormal), - #[is = is_if, as_ref = if_] + #[is = is_if, as_ref = if_, from = from] If(StmtIf), - #[is = is_match, as_ref = match_] + #[is = is_match, as_ref = match_, from = from] Match(StmtMatch), - #[is = is_declaration, as_ref = declaration] + #[is = is_declaration, as_ref = declaration, from = from] Declaration(StmtDeclaration), } } +impl Copy for Stmt {} + impl Stmt { pub fn sub_stmt_blocks(&self) -> &[S::Block] { match self { @@ -714,6 +773,18 @@ impl Instance { source_location, } } + pub fn from_canonical(v: Instance) -> Self { + let Instance { + scoped_name, + instantiated, + source_location, + } = v; + Self { + scoped_name, + instantiated: Module::from_canonical(*instantiated).intern_sized(), + source_location, + } + } pub fn containing_module_name(self) -> Interned { self.containing_module_name_id().0 } @@ -763,6 +834,8 @@ pub struct AnnotatedModuleIO { pub module_io: ModuleIO, } +impl Copy for AnnotatedModuleIO {} + #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] pub enum ModuleKind { Extern, @@ -958,6 +1031,14 @@ impl From> for NormalModuleBody { annotations: (), reg, }) => StmtReg { annotations, reg }.into(), + StmtDeclaration::RegSync(StmtReg { + annotations: (), + reg, + }) => StmtReg { annotations, reg }.into(), + StmtDeclaration::RegAsync(StmtReg { + annotations: (), + reg, + }) => StmtReg { annotations, reg }.into(), StmtDeclaration::Instance(StmtInstance { annotations: (), instance, @@ -1005,6 +1086,7 @@ pub struct ExternModuleBody< > { pub verilog_name: Interned, pub parameters: P, + pub simulation: Option, } impl From>> for ExternModuleBody { @@ -1012,11 +1094,13 @@ impl From>> for ExternModuleBody { let ExternModuleBody { verilog_name, parameters, + simulation, } = value; let parameters = Intern::intern_owned(parameters); Self { verilog_name, parameters, + simulation, } } } @@ -1131,6 +1215,12 @@ pub struct Module { module_annotations: Interned<[Annotation]>, } +impl AsRef for Module { + fn as_ref(&self) -> &Self { + self + } +} + #[derive(Default)] struct DebugFmtModulesState { seen: HashSet, @@ -1207,10 +1297,12 @@ impl fmt::Debug for DebugModuleBody { ModuleBody::Extern(ExternModuleBody { verilog_name, parameters, + simulation, }) => { debug_struct .field("verilog_name", verilog_name) - .field("parameters", parameters); + .field("parameters", parameters) + .field("simulation", simulation); } } debug_struct.finish_non_exhaustive() @@ -1376,7 +1468,9 @@ impl TargetState { }) .reduce(TargetWritten::conditional_merge_written) else { - unreachable!("merge_conditional_sub_blocks_into_block must be called with at least one sub-block"); + unreachable!( + "merge_conditional_sub_blocks_into_block must be called with at least one sub-block" + ); }; let mut written_in_blocks = written_in_blocks.borrow_mut(); if target_block >= written_in_blocks.len() { @@ -1414,6 +1508,9 @@ impl TargetState { }) .collect(), }, + CanonicalType::PhantomConst(_) => TargetStateInner::Decomposed { + subtargets: HashMap::default(), + }, CanonicalType::Array(ty) => TargetStateInner::Decomposed { subtargets: (0..ty.len()) .map(|index| { @@ -1436,7 +1533,8 @@ impl TargetState { | CanonicalType::Clock(_) | CanonicalType::AsyncReset(_) | CanonicalType::SyncReset(_) - | CanonicalType::Reset(_) => TargetStateInner::Single { + | CanonicalType::Reset(_) + | CanonicalType::DynSimOnly(_) => TargetStateInner::Single { declared_in_block, written_in_blocks: RefCell::default(), }, @@ -1661,6 +1759,14 @@ impl AssertValidityState { annotations: _, reg, })) => self.insert_new_base(TargetBase::intern_sized(reg.into()), block), + Stmt::Declaration(StmtDeclaration::RegSync(StmtReg { + annotations: _, + reg, + })) => self.insert_new_base(TargetBase::intern_sized(reg.into()), block), + Stmt::Declaration(StmtDeclaration::RegAsync(StmtReg { + annotations: _, + reg, + })) => self.insert_new_base(TargetBase::intern_sized(reg.into()), block), Stmt::Declaration(StmtDeclaration::Instance(StmtInstance { annotations: _, instance, @@ -1674,6 +1780,7 @@ impl AssertValidityState { ModuleBody::Extern(ExternModuleBody { verilog_name: _, parameters: _, + simulation: _, }) => {} ModuleBody::Normal(NormalModuleBody { body }) => { let body = self.make_block_index(body); @@ -1695,12 +1802,49 @@ impl Module { pub fn new_unchecked( name_id: NameId, source_location: SourceLocation, - body: ModuleBody, + mut body: ModuleBody, module_io: impl IntoIterator, module_annotations: impl IntoAnnotations, ) -> Module { let module_io: Interned<[_]> = module_io.into_iter().collect(); let module_annotations = module_annotations.into_annotations().into_iter().collect(); + match &mut body { + ModuleBody::Normal(_) => {} + ModuleBody::Extern(ExternModuleBody { + simulation: Some(simulation), + .. + }) => { + if module_io.iter().any(|io| { + !simulation + .sim_io_to_generator_map + .contains_key(&io.module_io.intern()) + }) { + let mut sim_io_to_generator_map = + BTreeMap::clone(&simulation.sim_io_to_generator_map); + for io in module_io.iter() { + let io = io.module_io.intern(); + sim_io_to_generator_map.entry(io).or_insert(io); + } + simulation.sim_io_to_generator_map = sim_io_to_generator_map.intern_sized(); + } + if simulation.sim_io_to_generator_map.len() > module_io.len() { + // if sim_io_to_generator_map is bigger, then there must be a key that's not in module_io + let module_io_set = HashSet::from_iter(module_io.iter().map(|v| v.module_io)); + for (sim_io, generator_io) in simulation.sim_io_to_generator_map.iter() { + if !module_io_set.contains(&**sim_io) { + panic!( + "extern module has invalid `sim_io_to_generator_map`: key is not in containing module's `module_io`:\n\ + key={sim_io:?}\nvalue={generator_io:?}\nmodule location: {source_location}" + ); + } + } + unreachable!(); + } + } + ModuleBody::Extern(ExternModuleBody { + simulation: None, .. + }) => {} + } let retval = Module { name: name_id, source_location, @@ -1769,7 +1913,7 @@ impl Module { AssertValidityState { module: self.canonical(), blocks: vec![], - target_states: HashMap::with_capacity(64), + target_states: HashMap::with_capacity_and_hasher(64, Default::default()), } .assert_validity(); } @@ -1842,10 +1986,10 @@ impl RegBuilder { } impl RegBuilder<(), I, T> { - pub fn clock_domain( + pub fn clock_domain( self, - clock_domain: impl ToExpr, - ) -> RegBuilder, I, T> { + clock_domain: impl ToExpr>, + ) -> RegBuilder>, I, T> { let Self { name, source_location, @@ -1863,7 +2007,7 @@ impl RegBuilder<(), I, T> { } } -impl RegBuilder, Option>, T> { +impl RegBuilder>, Option>, T> { #[track_caller] pub fn build(self) -> Expr { let Self { @@ -1976,6 +2120,27 @@ impl ModuleBuilder { self.output_with_loc(implicit_name.0, SourceLocation::caller(), ty) } #[track_caller] + pub fn add_platform_io_with_loc( + &self, + name: &str, + source_location: SourceLocation, + platform_io_builder: PlatformIOBuilder<'_>, + ) -> Expr { + platform_io_builder.add_platform_io(name, source_location, self) + } + #[track_caller] + pub fn add_platform_io( + &self, + implicit_name: ImplicitName<'_>, + platform_io_builder: PlatformIOBuilder<'_>, + ) -> Expr { + self.add_platform_io_with_loc( + implicit_name.0, + SourceLocation::caller(), + platform_io_builder, + ) + } + #[track_caller] pub fn run( name: &str, module_kind: ModuleKind, @@ -2021,6 +2186,7 @@ impl ModuleBuilder { ModuleKind::Extern => ModuleBody::Extern(ExternModuleBody { verilog_name: name.0, parameters: vec![], + simulation: None, }), ModuleKind::Normal => ModuleBody::Normal(NormalModuleBody { body: BuilderModuleBody { @@ -2029,8 +2195,8 @@ impl ModuleBuilder { incomplete_declarations: vec![], stmts: vec![], }], - annotations_map: HashMap::new(), - memory_map: HashMap::new(), + annotations_map: HashMap::default(), + memory_map: HashMap::default(), }, }), }; @@ -2040,7 +2206,7 @@ impl ModuleBuilder { impl_: RefCell::new(ModuleBuilderImpl { body, io: vec![], - io_indexes: HashMap::new(), + io_indexes: HashMap::default(), module_annotations: vec![], }), }; @@ -2087,6 +2253,7 @@ impl ModuleBuilder { .builder_extern_body() .verilog_name = name.intern(); } + #[track_caller] pub fn parameter(&self, name: impl AsRef, value: ExternModuleParameterValue) { let name = name.as_ref(); self.impl_ @@ -2099,6 +2266,7 @@ impl ModuleBuilder { value, }); } + #[track_caller] pub fn parameter_int(&self, name: impl AsRef, value: impl Into) { let name = name.as_ref(); let value = value.into(); @@ -2112,6 +2280,7 @@ impl ModuleBuilder { value: ExternModuleParameterValue::Integer(value), }); } + #[track_caller] pub fn parameter_str(&self, name: impl AsRef, value: impl AsRef) { let name = name.as_ref(); let value = value.as_ref(); @@ -2125,6 +2294,7 @@ impl ModuleBuilder { value: ExternModuleParameterValue::String(value.intern()), }); } + #[track_caller] pub fn parameter_raw_verilog(&self, name: impl AsRef, raw_verilog: impl AsRef) { let name = name.as_ref(); let raw_verilog = raw_verilog.as_ref(); @@ -2138,6 +2308,26 @@ impl ModuleBuilder { value: ExternModuleParameterValue::RawVerilog(raw_verilog.intern()), }); } + #[track_caller] + pub fn extern_module_simulation(&self, generator: G) { + let mut impl_ = self.impl_.borrow_mut(); + let simulation = &mut impl_.body.builder_extern_body().simulation; + if simulation.is_some() { + panic!("already added an extern module simulation"); + } + *simulation = Some(ExternModuleSimulation::new(generator)); + } + #[track_caller] + pub fn extern_module_simulation_fn< + Args: fmt::Debug + Clone + Hash + Eq + Send + Sync + 'static, + Fut: IntoFuture + 'static, + >( + &self, + args: Args, + f: fn(Args, crate::sim::ExternModuleSimulationState) -> Fut, + ) { + self.extern_module_simulation(crate::sim::SimGeneratorFn { args, f }); + } } #[track_caller] @@ -2170,14 +2360,12 @@ pub fn annotate(target: Expr, annotations: impl IntoAnnotations) { } TargetBase::MemPort(v) => { ModuleBuilder::with(|m| { - RefCell::borrow_mut(unwrap!(unwrap!(m - .impl_ - .borrow_mut() - .body - .builder_normal_body_opt()) - .body - .memory_map - .get_mut(&v.mem_name()))) + RefCell::borrow_mut(unwrap!( + unwrap!(m.impl_.borrow_mut().body.builder_normal_body_opt()) + .body + .memory_map + .get_mut(&v.mem_name()) + )) .port_annotations .extend(annotations) }); @@ -2188,6 +2376,16 @@ pub fn annotate(target: Expr, annotations: impl IntoAnnotations) { reg, } .into(), + TargetBase::RegSync(reg) => StmtReg { + annotations: (), + reg, + } + .into(), + TargetBase::RegAsync(reg) => StmtReg { + annotations: (), + reg, + } + .into(), TargetBase::Wire(wire) => StmtWire { annotations: (), wire, @@ -2567,6 +2765,22 @@ impl ModuleIO { source_location, } } + pub fn from_canonical(canonical_module_io: ModuleIO) -> Self { + let ModuleIO { + containing_module_name, + bundle_field, + id, + ty, + source_location, + } = canonical_module_io; + Self { + containing_module_name, + bundle_field, + id, + ty: T::from_canonical(ty), + source_location, + } + } pub fn bundle_field(&self) -> BundleField { self.bundle_field } @@ -2629,3 +2843,50 @@ impl ModuleIO { self.ty } } + +#[derive(PartialEq, Eq, Hash, Clone, Copy)] +pub enum InstantiatedModule { + Base(Interned>), + Child { + parent: Interned, + instance: Interned>, + }, +} + +impl InstantiatedModule { + pub fn leaf_module(self) -> Interned> { + match self { + InstantiatedModule::Base(base) => base, + InstantiatedModule::Child { instance, .. } => instance.instantiated(), + } + } + fn write_path(self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + InstantiatedModule::Base(base) => fmt::Debug::fmt(&base.name_id(), f), + InstantiatedModule::Child { parent, instance } => { + parent.write_path(f)?; + write!(f, ".{}", instance.name_id()) + } + } + } +} + +impl fmt::Debug for InstantiatedModule { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "InstantiatedModule(")?; + self.write_path(f)?; + write!(f, ": {})", self.leaf_module().name_id()) + } +} + +#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] +pub struct TargetInInstantiatedModule { + pub instantiated_module: InstantiatedModule, + pub target: Target, +} + +#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] +pub struct ExprInInstantiatedModule { + pub instantiated_module: InstantiatedModule, + pub expr: Expr, +} diff --git a/crates/fayalite/src/module/transform.rs b/crates/fayalite/src/module/transform.rs index 4117087..063a1a3 100644 --- a/crates/fayalite/src/module/transform.rs +++ b/crates/fayalite/src/module/transform.rs @@ -1,5 +1,6 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information +pub mod deduce_resets; pub mod simplify_enums; pub mod simplify_memories; pub mod visit; diff --git a/crates/fayalite/src/module/transform/deduce_resets.rs b/crates/fayalite/src/module/transform/deduce_resets.rs new file mode 100644 index 0000000..e84d835 --- /dev/null +++ b/crates/fayalite/src/module/transform/deduce_resets.rs @@ -0,0 +1,2331 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + annotations::{Annotation, TargetedAnnotation}, + bundle::{BundleField, BundleType}, + enum_::{EnumType, EnumVariant}, + expr::{ + ExprEnum, + ops::{self, ArrayLiteral}, + target::{ + Target, TargetBase, TargetChild, TargetPathArrayElement, TargetPathBundleField, + TargetPathDynArrayElement, TargetPathElement, + }, + }, + formal::FormalKind, + int::{SIntValue, UIntValue}, + intern::{Intern, Interned, Memoize}, + memory::{DynPortType, MemPort}, + module::{ + AnnotatedModuleIO, Block, ExprInInstantiatedModule, ExternModuleBody, + ExternModuleParameter, InstantiatedModule, ModuleBody, ModuleIO, NameId, NormalModuleBody, + Stmt, StmtConnect, StmtDeclaration, StmtFormal, StmtIf, StmtInstance, StmtMatch, StmtReg, + StmtWire, + }, + prelude::*, + reset::{ResetType, ResetTypeDispatch}, + sim::ExternModuleSimulation, + util::{HashMap, HashSet}, +}; +use hashbrown::hash_map::Entry; +use num_bigint::BigInt; +use petgraph::unionfind::UnionFind; +use std::{collections::BTreeMap, fmt, marker::PhantomData}; + +#[derive(Debug)] +pub enum DeduceResetsError { + ResetIsNotDrivenByAsyncOrSync { source_location: SourceLocation }, + ResetIsDrivenByBothAsyncAndSync { source_location: SourceLocation }, +} + +impl fmt::Display for DeduceResetsError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + DeduceResetsError::ResetIsNotDrivenByAsyncOrSync { source_location } => { + write!( + f, + "deduce_reset failed: Reset signal is not driven by any AsyncReset or SyncReset signals: {source_location}" + ) + } + DeduceResetsError::ResetIsDrivenByBothAsyncAndSync { source_location } => { + write!( + f, + "deduce_reset failed: Reset signal is driven by both AsyncReset and SyncReset signals: {source_location}" + ) + } + } + } +} + +impl std::error::Error for DeduceResetsError {} + +impl From for std::io::Error { + fn from(value: DeduceResetsError) -> Self { + std::io::Error::new(std::io::ErrorKind::Other, value) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +enum AnyReg { + Reg(Reg), + RegSync(Reg), + RegAsync(Reg), +} + +macro_rules! match_any_reg { + ( + $match_expr:expr, $fn:expr + ) => { + match $match_expr { + AnyReg::Reg(reg) => $fn(reg), + AnyReg::RegSync(reg) => $fn(reg), + AnyReg::RegAsync(reg) => $fn(reg), + } + }; +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +enum ResetsLayout { + NoResets, + Reset, + SyncReset, + AsyncReset, + Bundle { + fields: Interned<[ResetsLayout]>, + reset_count: usize, + }, + Enum { + variants: Interned<[ResetsLayout]>, + reset_count: usize, + }, + Array { + element: Interned, + reset_count: usize, + }, +} + +impl ResetsLayout { + fn reset_count(self) -> usize { + match self { + ResetsLayout::NoResets => 0, + ResetsLayout::Reset | ResetsLayout::SyncReset | ResetsLayout::AsyncReset => 1, + ResetsLayout::Bundle { reset_count, .. } + | ResetsLayout::Enum { reset_count, .. } + | ResetsLayout::Array { reset_count, .. } => reset_count, + } + } + fn new(ty: CanonicalType) -> Self { + #[derive(Clone, Copy, PartialEq, Eq, Hash)] + struct MyMemoize; + impl Memoize for MyMemoize { + type Input = CanonicalType; + type InputOwned = CanonicalType; + type Output = ResetsLayout; + + fn inner(self, ty: &Self::Input) -> Self::Output { + match *ty { + CanonicalType::UInt(_) => ResetsLayout::NoResets, + CanonicalType::SInt(_) => ResetsLayout::NoResets, + CanonicalType::Bool(_) => ResetsLayout::NoResets, + CanonicalType::Array(ty) => { + let element = ResetsLayout::new(ty.element()).intern_sized(); + ResetsLayout::Array { + element, + reset_count: element.reset_count(), + } + } + CanonicalType::Enum(ty) => { + let mut reset_count = 0; + let variants = Interned::from_iter(ty.variants().iter().map(|variant| { + let resets_layout = + variant.ty.map_or(ResetsLayout::NoResets, ResetsLayout::new); + reset_count += resets_layout.reset_count(); + resets_layout + })); + ResetsLayout::Enum { + variants, + reset_count, + } + } + CanonicalType::Bundle(ty) => { + let mut reset_count = 0; + let fields = Interned::from_iter(ty.fields().iter().map(|field| { + let resets_layout = ResetsLayout::new(field.ty); + reset_count += resets_layout.reset_count(); + resets_layout + })); + ResetsLayout::Bundle { + fields, + reset_count, + } + } + CanonicalType::AsyncReset(_) => ResetsLayout::AsyncReset, + CanonicalType::SyncReset(_) => ResetsLayout::SyncReset, + CanonicalType::Reset(_) => ResetsLayout::Reset, + CanonicalType::Clock(_) => ResetsLayout::NoResets, + CanonicalType::PhantomConst(_) => ResetsLayout::NoResets, + CanonicalType::DynSimOnly(_) => ResetsLayout::NoResets, + } + } + } + MyMemoize.get_owned(ty) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +struct ResetNodeIndex(usize); + +#[derive(Copy, Clone, Debug)] +struct ResetNode { + is_async: Option, + source_location: Option, +} + +impl ResetNode { + fn union( + self, + other: Self, + fallback_error_source_location: SourceLocation, + ) -> Result { + match (self.is_async, other.is_async) { + (None, None) => Ok(Self { + is_async: None, + source_location: self.source_location.or(other.source_location), + }), + (None, is_async @ Some(_)) => Ok(Self { + is_async, + // prioritize `other` + source_location: other.source_location.or(self.source_location), + }), + (is_async @ Some(_), None) => Ok(Self { + is_async, + // prioritize `self` + source_location: self.source_location.or(other.source_location), + }), + (Some(self_is_async), Some(other_is_async)) => { + if self_is_async == other_is_async { + Ok(Self { + is_async: Some(self_is_async), + source_location: self.source_location.or(other.source_location), + }) + } else { + Err(DeduceResetsError::ResetIsDrivenByBothAsyncAndSync { + source_location: self + .source_location + .or(other.source_location) + .unwrap_or(fallback_error_source_location), + }) + } + } + } + } +} + +#[derive(Debug, Default)] +struct ResetGraph { + union_find: UnionFind, + nodes: Vec, +} + +impl ResetGraph { + fn new_node( + &mut self, + is_async: Option, + source_location: Option, + ) -> ResetNodeIndex { + let index = self.union_find.new_set(); + assert_eq!(index, self.nodes.len()); + self.nodes.push(ResetNode { + is_async, + source_location, + }); + ResetNodeIndex(index) + } + fn union( + &mut self, + a: ResetNodeIndex, + b: ResetNodeIndex, + fallback_error_source_location: SourceLocation, + ) -> Result<(), DeduceResetsError> { + let a = self.union_find.find_mut(a.0); + let b = self.union_find.find_mut(b.0); + if a != b { + self.union_find.union(a, b); + let merged = self.union_find.find_mut(a); + self.nodes[merged] = + self.nodes[a].union(self.nodes[b], fallback_error_source_location)?; + } + Ok(()) + } + fn is_async( + &mut self, + node: ResetNodeIndex, + fallback_to_sync_reset: bool, + fallback_error_source_location: SourceLocation, + ) -> Result { + let ResetNode { + is_async, + source_location, + } = self.nodes[self.union_find.find_mut(node.0)]; + if let Some(is_async) = is_async { + Ok(is_async) + } else if fallback_to_sync_reset { + Ok(false) + } else { + Err(DeduceResetsError::ResetIsNotDrivenByAsyncOrSync { + source_location: source_location.unwrap_or(fallback_error_source_location), + }) + } + } + fn append_new_nodes_for_layout( + &mut self, + layout: ResetsLayout, + node_indexes: &mut Vec, + source_location: Option, + ) { + match layout { + ResetsLayout::NoResets => {} + ResetsLayout::Reset => node_indexes.push(self.new_node(None, source_location)), + ResetsLayout::SyncReset => { + node_indexes.push(self.new_node(Some(false), source_location)) + } + ResetsLayout::AsyncReset => { + node_indexes.push(self.new_node(Some(true), source_location)) + } + ResetsLayout::Bundle { + fields, + reset_count: _, + } => { + for field in fields { + self.append_new_nodes_for_layout(field, node_indexes, source_location); + } + } + ResetsLayout::Enum { + variants, + reset_count: _, + } => { + for variant in variants { + self.append_new_nodes_for_layout(variant, node_indexes, source_location); + } + } + ResetsLayout::Array { + element, + reset_count: _, + } => { + self.append_new_nodes_for_layout(*element, node_indexes, source_location); + } + } + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +struct Resets { + ty: CanonicalType, + layout: ResetsLayout, + node_indexes: Interned<[ResetNodeIndex]>, +} + +impl Resets { + fn with_new_nodes( + reset_graph: &mut ResetGraph, + ty: CanonicalType, + source_location: Option, + ) -> Self { + let layout = ResetsLayout::new(ty); + let mut node_indexes = Vec::with_capacity(layout.reset_count()); + reset_graph.append_new_nodes_for_layout(layout, &mut node_indexes, source_location); + let node_indexes = Intern::intern_owned(node_indexes); + Self { + ty, + layout, + node_indexes, + } + } + fn array_elements(self) -> Self { + let array = ::from_canonical(self.ty); + let ResetsLayout::Array { + element, + reset_count: _, + } = self.layout + else { + unreachable!(); + }; + Self { + ty: array.element(), + layout: *element, + node_indexes: self.node_indexes, + } + } + fn bundle_fields(self) -> impl Iterator { + let bundle = Bundle::from_canonical(self.ty); + let ResetsLayout::Bundle { + fields, + reset_count: _, + } = self.layout + else { + unreachable!(); + }; + bundle.fields().into_iter().zip(fields).scan( + 0, + move |start_index, (BundleField { ty, .. }, layout)| { + let end_index = *start_index + layout.reset_count(); + let node_indexes = self.node_indexes[*start_index..end_index].intern(); + *start_index = end_index; + Some(Self { + ty, + layout, + node_indexes, + }) + }, + ) + } + fn enum_variants(self) -> impl Iterator> { + let enum_ = Enum::from_canonical(self.ty); + let ResetsLayout::Enum { + variants, + reset_count: _, + } = self.layout + else { + unreachable!(); + }; + enum_.variants().into_iter().zip(variants).scan( + 0, + move |start_index, (EnumVariant { ty, .. }, layout)| { + let end_index = *start_index + layout.reset_count(); + let node_indexes = self.node_indexes[*start_index..end_index].intern(); + *start_index = end_index; + Some(ty.map(|ty| Self { + ty, + layout, + node_indexes, + })) + }, + ) + } + fn substituted_type( + self, + reset_graph: &mut ResetGraph, + fallback_to_sync_reset: bool, + fallback_error_source_location: SourceLocation, + ) -> Result { + if self.layout.reset_count() == 0 { + return Ok(self.ty); + } + match self.ty { + CanonicalType::UInt(_) + | CanonicalType::SInt(_) + | CanonicalType::Bool(_) + | CanonicalType::AsyncReset(_) + | CanonicalType::SyncReset(_) + | CanonicalType::Clock(_) + | CanonicalType::PhantomConst(_) + | CanonicalType::DynSimOnly(_) => Ok(self.ty), + CanonicalType::Array(ty) => Ok(CanonicalType::Array(Array::new_dyn( + self.array_elements().substituted_type( + reset_graph, + fallback_to_sync_reset, + fallback_error_source_location, + )?, + ty.len(), + ))), + CanonicalType::Enum(ty) => Ok(CanonicalType::Enum(Enum::new(Result::from_iter( + self.enum_variants().zip(ty.variants()).map( + |(resets, EnumVariant { name, ty: _ })| { + Ok(EnumVariant { + name, + ty: resets + .map(|resets| { + resets.substituted_type( + reset_graph, + fallback_to_sync_reset, + fallback_error_source_location, + ) + }) + .transpose()?, + }) + }, + ), + )?))), + CanonicalType::Bundle(ty) => Ok(CanonicalType::Bundle(Bundle::new(Result::from_iter( + self.bundle_fields().zip(ty.fields()).map( + |( + resets, + BundleField { + name, + flipped, + ty: _, + }, + )| { + Ok(BundleField { + name, + flipped, + ty: resets.substituted_type( + reset_graph, + fallback_to_sync_reset, + fallback_error_source_location, + )?, + }) + }, + ), + )?))), + CanonicalType::Reset(_) => Ok( + if reset_graph.is_async( + self.node_indexes[0], + fallback_to_sync_reset, + fallback_error_source_location, + )? { + CanonicalType::AsyncReset(AsyncReset) + } else { + CanonicalType::SyncReset(SyncReset) + }, + ), + } + } +} + +#[derive(Debug)] +struct State { + modules_added_to_graph: HashSet, + substituted_modules: HashMap>, + expr_resets: HashMap, Resets>, + reset_graph: ResetGraph, + fallback_to_sync_reset: bool, +} + +impl State { + fn get_resets( + &self, + instantiated_module: InstantiatedModule, + expr: impl ToExpr, + ) -> Option { + self.expr_resets + .get(&ExprInInstantiatedModule { + instantiated_module, + expr: Expr::canonical(expr.to_expr()), + }) + .copied() + } + fn get_or_make_resets( + &mut self, + instantiated_module: InstantiatedModule, + expr: impl ToExpr, + source_location: Option, + ) -> (Resets, bool) { + let expr = Expr::canonical(expr.to_expr()); + match self.expr_resets.entry(ExprInInstantiatedModule { + instantiated_module, + expr, + }) { + Entry::Occupied(entry) => (*entry.get(), false), + Entry::Vacant(entry) => ( + *entry.insert(Resets::with_new_nodes( + &mut self.reset_graph, + Expr::ty(expr), + source_location, + )), + true, + ), + } + } +} + +struct PassOutput(P::Output); + +impl PassOutput { + fn new(v: T) -> Self { + P::output_new(v) + } + fn from_fn(f: impl FnOnce() -> T) -> Self { + PassOutput::new(()).map(|()| f()) + } + fn map(self, f: impl FnOnce(T) -> U) -> PassOutput { + P::map(self, f) + } +} + +trait PassOutputZip: Sized { + type Zipped; + fn zip(self) -> PassOutput; + fn call(self, f: impl FnOnce(Self::Zipped) -> U) -> PassOutput { + self.zip().map(f) + } +} + +impl PassOutputZip

for () { + type Zipped = (); + fn zip(self) -> PassOutput { + PassOutput::new(()) + } +} + +impl PassOutputZip

for (PassOutput,) { + type Zipped = (T,); + fn zip(self) -> PassOutput { + self.0.map(|v| (v,)) + } +} + +macro_rules! impl_zip { + ($first_arg:ident: $first_T:ident, $($arg:ident: $T:ident),* $(,)?) => { + impl_zip!(@step [], [($first_arg: $first_T) $(($arg: $T))*], (),); + }; + ( + @impl($first_arg:tt,), + $tuple_pat:tt, + ) => {}; + ( + @impl(($first_arg:ident: $first_T:ident), + $(($arg:ident: $T:ident),)*), + $tuple_pat:tt, + ) => { + impl<$first_T, $($T,)* P: Pass> PassOutputZip

for (PassOutput<$first_T, P>, $(PassOutput<$T, P>),*) { + type Zipped = ($first_T, $($T),*); + fn zip(self) -> PassOutput<($first_T, $($T),*), P> { + let (tuples, $($arg),*) = self; + $(let tuples = P::zip(tuples, $arg);)* + tuples.map(|$tuple_pat| ($first_arg, $($arg),*)) + } + } + }; + ( + @step [$($cur:tt)*], + [], + $tuple_pat:tt, + ) => {}; + ( + @step [$($cur:tt)*], + [($next_arg:ident: $next_T:ident) $($rest:tt)*], + (), + ) => { + impl_zip!(@impl($($cur,)* ($next_arg: $next_T),), $next_arg,); + impl_zip!(@step [$($cur)* ($next_arg: $next_T)], [$($rest)*], $next_arg,); + }; + ( + @step [$($cur:tt)*], + [($next_arg:ident: $next_T:ident) $($rest:tt)*], + $tuple_pat:tt, + ) => { + impl_zip!(@impl($($cur,)* ($next_arg: $next_T),), ($tuple_pat, $next_arg),); + impl_zip!(@step [$($cur)* ($next_arg: $next_T)], [$($rest)*], ($tuple_pat, $next_arg),); + }; +} + +impl_zip!(t0: T0, t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11); + +impl, P: Pass, A> FromIterator> for PassOutput { + fn from_iter>>(iter: I) -> Self { + P::output_from_iter(iter) + } +} + +trait PassDispatch: Sized { + type Input; + type Output; + fn build_reset_graph( + self, + input: Self::Input, + ) -> Self::Output; + fn substitute_resets( + self, + input: Self::Input, + ) -> Self::Output; +} + +trait Pass: Sized { + type Output; + fn output_new(v: T) -> PassOutput; + fn output_from_iter, A>( + iter: impl IntoIterator>, + ) -> PassOutput; + fn try_array_from_fn( + f: impl FnMut(usize) -> Result, E>, + ) -> Result, E>; + fn map(v: PassOutput, f: impl FnOnce(T) -> U) -> PassOutput; + fn zip(t: PassOutput, u: PassOutput) -> PassOutput<(T, U), Self>; + fn dispatch(dispatch: D, input: D::Input) -> D::Output; +} + +struct BuildResetGraph; + +impl Pass for BuildResetGraph { + type Output = (); + + fn output_new(_v: T) -> PassOutput { + PassOutput(()) + } + + fn output_from_iter, A>( + iter: impl IntoIterator>, + ) -> PassOutput { + iter.into_iter().for_each(|_| {}); + PassOutput(()) + } + + fn try_array_from_fn( + mut f: impl FnMut(usize) -> Result, E>, + ) -> Result, E> { + for i in 0..N { + f(i)?; + } + Ok(PassOutput(())) + } + + fn map(_v: PassOutput, _f: impl FnOnce(T) -> U) -> PassOutput { + PassOutput(()) + } + + fn zip(_t: PassOutput, _u: PassOutput) -> PassOutput<(T, U), Self> { + PassOutput(()) + } + + fn dispatch(dispatch: D, input: D::Input) -> D::Output { + dispatch.build_reset_graph(input) + } +} + +struct SubstituteResets; + +impl Pass for SubstituteResets { + type Output = T; + + fn output_new(v: T) -> PassOutput { + PassOutput(v) + } + + fn output_from_iter, A>( + iter: impl IntoIterator>, + ) -> PassOutput { + PassOutput(T::from_iter(iter.into_iter().map(|PassOutput(v)| v))) + } + + fn try_array_from_fn( + mut f: impl FnMut(usize) -> Result, E>, + ) -> Result, E> { + let mut retval = [const { None }; N]; + for i in 0..N { + retval[i] = Some(f(i)?.0); + } + Ok(PassOutput( + retval.map(|v| v.expect("just wrote Some to all elements")), + )) + } + + fn map(v: PassOutput, f: impl FnOnce(T) -> U) -> PassOutput { + PassOutput(f(v.0)) + } + + fn zip(t: PassOutput, u: PassOutput) -> PassOutput<(T, U), Self> { + PassOutput((t.0, u.0)) + } + + fn dispatch(dispatch: D, input: D::Input) -> D::Output { + dispatch.substitute_resets(input) + } +} + +struct PassArgs<'a, P: Pass> { + state: &'a mut State, + instantiated_module: InstantiatedModule, + fallback_error_source_location: SourceLocation, + _phantom: PhantomData

, +} + +impl<'a, P: Pass> fmt::Debug for PassArgs<'a, P> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + state, + instantiated_module, + fallback_error_source_location, + _phantom: _, + } = self; + f.debug_struct("PassArgs") + .field("state", state) + .field("instantiated_module", instantiated_module) + .field( + "fallback_error_source_location", + fallback_error_source_location, + ) + .finish() + } +} + +impl<'a, P: Pass> PassArgs<'a, P> { + fn as_mut(&mut self) -> PassArgs<'_, P> { + let PassArgs { + ref mut state, + instantiated_module, + fallback_error_source_location, + _phantom: _, + } = *self; + PassArgs { + state: &mut **state, + instantiated_module, + fallback_error_source_location, + _phantom: PhantomData, + } + } + fn get_resets(&self, expr: impl ToExpr) -> Option { + self.state.get_resets(self.instantiated_module, expr) + } + fn get_or_make_resets( + &mut self, + expr: impl ToExpr, + source_location: Option, + ) -> (Resets, bool) { + self.state + .get_or_make_resets(self.instantiated_module, expr, source_location) + } + fn union( + &mut self, + a: Resets, + b: Resets, + fallback_error_source_location: Option, + ) -> Result<(), DeduceResetsError> { + assert_eq!(a.layout, b.layout); + assert!( + a.ty.can_connect(b.ty), + "can't connect types! a:\n{a:?}\nb:\n{b:?}" + ); + for (a_node_index, b_node_index) in a.node_indexes.into_iter().zip(b.node_indexes) { + self.state.reset_graph.union( + a_node_index, + b_node_index, + fallback_error_source_location.unwrap_or(self.fallback_error_source_location), + )?; + } + Ok(()) + } +} + +trait RunPass: Sized { + fn run_pass( + &self, + pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError>; +} + +trait RunPassDispatch: Sized { + fn build_reset_graph( + &self, + pass_args: PassArgs<'_, BuildResetGraph>, + ) -> Result, DeduceResetsError>; + fn substitute_resets( + &self, + pass_args: PassArgs<'_, SubstituteResets>, + ) -> Result, DeduceResetsError>; + fn dispatch( + &self, + pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError> { + struct Dispatch<'a, T>(T, PhantomData<&'a mut ()>); + impl<'a, T: RunPassDispatch> PassDispatch for Dispatch<'a, &'_ T> { + type Input = PassArgs<'a, P>; + type Output = Result, DeduceResetsError>; + + fn build_reset_graph( + self, + input: Self::Input, + ) -> Self::Output { + self.0.build_reset_graph(input) + } + fn substitute_resets( + self, + input: Self::Input, + ) -> Self::Output { + self.0.substitute_resets(input) + } + } + P::dispatch(Dispatch(self, PhantomData), pass_args) + } +} + +impl RunPass

for T { + fn run_pass( + &self, + pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError> { + T::dispatch(self, pass_args) + } +} + +trait RunPassExpr: ToExpr + Sized { + type Args<'a>: IntoIterator> + 'a + where + Self: 'a; + fn args<'a>(&'a self) -> Self::Args<'a>; + fn source_location(&self) -> Option; + fn union_parts( + &self, + resets: Resets, + args_resets: Vec, + pass_args: PassArgs<'_, BuildResetGraph>, + ) -> Result<(), DeduceResetsError>; + fn new( + &self, + ty: CanonicalType, + new_args: Vec>, + ) -> Result; +} + +impl RunPassDispatch for T { + fn build_reset_graph( + &self, + mut pass_args: PassArgs<'_, BuildResetGraph>, + ) -> Result, DeduceResetsError> { + let source_location = self.source_location(); + let (resets, _) = pass_args.get_or_make_resets(self, source_location); + let args_resets = Result::from_iter(self.args().into_iter().map(|arg| { + arg.run_pass(pass_args.as_mut())?; + let (resets, _) = pass_args.get_or_make_resets(arg, source_location); + Ok(resets) + }))?; + self.union_parts(resets, args_resets, pass_args)?; + Ok(PassOutput(())) + } + + fn substitute_resets( + &self, + mut pass_args: PassArgs<'_, SubstituteResets>, + ) -> Result, DeduceResetsError> { + let source_location = self.source_location(); + let (resets, _) = pass_args.get_or_make_resets(self, source_location); + let ty = resets.substituted_type( + &mut pass_args.state.reset_graph, + pass_args.state.fallback_to_sync_reset, + pass_args.fallback_error_source_location, + )?; + let new_args = Result::from_iter( + self.args() + .into_iter() + .map(|arg| Ok(arg.run_pass(pass_args.as_mut())?.0)), + )?; + Ok(PassOutput(self.new(ty, new_args)?)) + } +} + +impl + Intern + Clone, P: Pass> RunPass

for Interned { + fn run_pass( + &self, + pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError> { + Ok(T::run_pass(self, pass_args)?.map(Intern::intern_sized)) + } +} + +impl + Clone, P: Pass> RunPass

for Interned<[T]> +where + [T]: Intern, +{ + fn run_pass( + &self, + mut pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError> { + Result::from_iter(self.iter().map(|v| v.run_pass(pass_args.as_mut()))) + } +} + +impl, P: Pass, const N: usize> RunPass

for [T; N] { + fn run_pass( + &self, + mut pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError> { + P::try_array_from_fn(|i| self[i].run_pass(pass_args.as_mut())) + } +} + +impl, P: Pass> RunPass

for Option { + fn run_pass( + &self, + pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError> { + match self { + Some(v) => Ok(v.run_pass(pass_args)?.map(Some)), + None => Ok(PassOutput::new(None)), + } + } +} + +fn reg_expr_run_pass( + reg: &Reg, + pass_args: PassArgs<'_, P>, +) -> Result, DeduceResetsError> { + Ok(AnyReg::from(*reg) + .run_pass(pass_args)? + .map(|reg| match_any_reg!(reg, ExprEnum::from))) +} + +fn cast_bit_op( + expr: impl ToExpr, + arg: Expr, + pass_args: PassArgs<'_, P>, +) -> Result, DeduceResetsError> { + struct Dispatch<'a, T: Type, A: Type> { + expr: Expr, + arg: Expr, + _phantom: PhantomData<&'a mut ()>, + } + impl<'a, T: Type, A: Type> PassDispatch for Dispatch<'a, T, A> { + type Input = PassArgs<'a, P>; + type Output = Result, DeduceResetsError>; + + fn build_reset_graph( + self, + mut pass_args: Self::Input, + ) -> Self::Output { + Expr::canonical(self.arg).run_pass(pass_args.as_mut())?; + let (expr_resets, _) = pass_args.get_or_make_resets(self.expr, None); + let (arg_resets, _) = pass_args.get_or_make_resets(self.arg, None); + // don't use PassArgs::union since types don't match and we want to just union resets if they exist + for (expr_node_index, arg_node_index) in expr_resets + .node_indexes + .into_iter() + .zip(arg_resets.node_indexes) + { + pass_args.state.reset_graph.union( + expr_node_index, + arg_node_index, + pass_args.fallback_error_source_location, + )?; + } + Ok(PassOutput(())) + } + + fn substitute_resets( + self, + mut pass_args: Self::Input, + ) -> Self::Output { + let resets = pass_args + .get_resets(self.expr) + .expect("added resets in build_reset_graph"); + let arg = Expr::canonical(self.arg).run_pass(pass_args.as_mut())?; + let ty = resets.substituted_type( + &mut pass_args.state.reset_graph, + pass_args.state.fallback_to_sync_reset, + pass_args.fallback_error_source_location, + )?; + Ok(arg.map(|arg| { + macro_rules! match_expr_ty { + ($arg:ident, $($Variant:ident),*) => { + match ty { + CanonicalType::Array(_) + | CanonicalType::Enum(_) + | CanonicalType::Bundle(_) + | CanonicalType::Reset(_) + | CanonicalType::PhantomConst(_) + | CanonicalType::DynSimOnly(_) => unreachable!(), + $(CanonicalType::$Variant(ty) => Expr::expr_enum($arg.cast_to(ty)),)* + } + }; + } + macro_rules! match_arg_ty { + ($($Variant:ident),*) => { + *match Expr::ty(arg) { + CanonicalType::Array(_) + | CanonicalType::Enum(_) + | CanonicalType::Bundle(_) + | CanonicalType::Reset(_) => unreachable!(), + CanonicalType::PhantomConst(_) | + CanonicalType::DynSimOnly(_) => Expr::expr_enum(arg), + $(CanonicalType::$Variant(_) => { + let arg = Expr::<$Variant>::from_canonical(arg); + match_expr_ty!(arg, UInt, SInt, Bool, AsyncReset, SyncReset, Clock) + })* + } + }; + } + match_arg_ty!(UInt, SInt, Bool, AsyncReset, SyncReset, Clock) + })) + } + } + P::dispatch( + Dispatch { + expr: expr.to_expr(), + arg, + _phantom: PhantomData, + }, + pass_args, + ) +} + +impl RunPass

for ExprEnum { + fn run_pass( + &self, + pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError> { + match self { + ExprEnum::UIntLiteral(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::SIntLiteral(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::BoolLiteral(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::PhantomConst(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::BundleLiteral(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::ArrayLiteral(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::EnumLiteral(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::Uninit(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::NotU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::NotS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::NotB(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::Neg(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::BitAndU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::BitAndS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::BitAndB(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::BitOrU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::BitOrS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::BitOrB(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::BitXorU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::BitXorS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::BitXorB(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::AddU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::AddS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::SubU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::SubS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::MulU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::MulS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::DivU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::DivS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::RemU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::RemS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::DynShlU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::DynShlS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::DynShrU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::DynShrS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::FixedShlU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::FixedShlS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::FixedShrU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::FixedShrS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CmpLtB(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CmpLeB(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CmpGtB(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CmpGeB(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CmpEqB(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CmpNeB(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CmpLtU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CmpLeU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CmpGtU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CmpGeU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CmpEqU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CmpNeU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CmpLtS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CmpLeS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CmpGtS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CmpGeS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CmpEqS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CmpNeS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CastUIntToUInt(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CastUIntToSInt(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CastSIntToUInt(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CastSIntToSInt(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CastBoolToUInt(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CastBoolToSInt(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CastUIntToBool(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CastSIntToBool(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CastBoolToSyncReset(expr) => cast_bit_op(expr, expr.arg(), pass_args), + ExprEnum::CastUIntToSyncReset(expr) => cast_bit_op(expr, expr.arg(), pass_args), + ExprEnum::CastSIntToSyncReset(expr) => cast_bit_op(expr, expr.arg(), pass_args), + ExprEnum::CastBoolToAsyncReset(expr) => cast_bit_op(expr, expr.arg(), pass_args), + ExprEnum::CastUIntToAsyncReset(expr) => cast_bit_op(expr, expr.arg(), pass_args), + ExprEnum::CastSIntToAsyncReset(expr) => cast_bit_op(expr, expr.arg(), pass_args), + ExprEnum::CastSyncResetToBool(expr) => cast_bit_op(expr, expr.arg(), pass_args), + ExprEnum::CastSyncResetToUInt(expr) => cast_bit_op(expr, expr.arg(), pass_args), + ExprEnum::CastSyncResetToSInt(expr) => cast_bit_op(expr, expr.arg(), pass_args), + ExprEnum::CastSyncResetToReset(expr) => cast_bit_op(expr, expr.arg(), pass_args), + ExprEnum::CastAsyncResetToBool(expr) => cast_bit_op(expr, expr.arg(), pass_args), + ExprEnum::CastAsyncResetToUInt(expr) => cast_bit_op(expr, expr.arg(), pass_args), + ExprEnum::CastAsyncResetToSInt(expr) => cast_bit_op(expr, expr.arg(), pass_args), + ExprEnum::CastAsyncResetToReset(expr) => cast_bit_op(expr, expr.arg(), pass_args), + ExprEnum::CastResetToBool(expr) => cast_bit_op(expr, expr.arg(), pass_args), + ExprEnum::CastResetToUInt(expr) => cast_bit_op(expr, expr.arg(), pass_args), + ExprEnum::CastResetToSInt(expr) => cast_bit_op(expr, expr.arg(), pass_args), + ExprEnum::CastBoolToClock(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CastUIntToClock(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CastSIntToClock(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CastClockToBool(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CastClockToUInt(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CastClockToSInt(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::FieldAccess(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::VariantAccess(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::ArrayIndex(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::DynArrayIndex(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::ReduceBitAndU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::ReduceBitAndS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::ReduceBitOrU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::ReduceBitOrS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::ReduceBitXorU(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::ReduceBitXorS(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::SliceUInt(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::SliceSInt(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CastToBits(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::CastBitsTo(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::ModuleIO(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::Instance(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::Wire(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + ExprEnum::Reg(expr) => reg_expr_run_pass(expr, pass_args), + ExprEnum::RegSync(expr) => reg_expr_run_pass(expr, pass_args), + ExprEnum::RegAsync(expr) => reg_expr_run_pass(expr, pass_args), + ExprEnum::MemPort(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)), + } + } +} + +impl RunPass

for Expr { + fn run_pass( + &self, + pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError> { + Ok(Expr::expr_enum(*self) + .run_pass(pass_args)? + .map(|expr_enum| expr_enum.to_expr())) + } +} + +impl RunPass

for Expr> { + fn run_pass( + &self, + pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError> { + Ok(Expr::canonical(*self) + .run_pass(pass_args)? + .map(Expr::from_canonical)) + } +} + +impl RunPass

for Expr> { + fn run_pass( + &self, + pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError> { + Ok(Expr::canonical(*self) + .run_pass(pass_args)? + .map(Expr::from_canonical)) + } +} + +impl RunPass

for Expr { + fn run_pass( + &self, + pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError> { + Ok(Expr::canonical(*self) + .run_pass(pass_args)? + .map(Expr::from_canonical)) + } +} + +impl RunPass

for Expr { + fn run_pass( + &self, + pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError> { + Ok(Expr::canonical(*self) + .run_pass(pass_args)? + .map(Expr::from_canonical)) + } +} + +impl RunPass

for Expr { + fn run_pass( + &self, + pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError> { + Ok(Expr::canonical(*self) + .run_pass(pass_args)? + .map(Expr::from_canonical)) + } +} + +impl RunPassExpr for ops::Uninit { + type Args<'a> = [Expr; 0]; + + fn args<'a>(&'a self) -> Self::Args<'a> { + [] + } + + fn source_location(&self) -> Option { + None + } + + fn union_parts( + &self, + _resets: Resets, + _args_resets: Vec, + _pass_args: PassArgs<'_, BuildResetGraph>, + ) -> Result<(), DeduceResetsError> { + Ok(()) + } + + fn new( + &self, + ty: CanonicalType, + _new_args: Vec>, + ) -> Result { + Ok(ops::Uninit::new(ty)) + } +} + +impl RunPassExpr for ops::BundleLiteral { + type Args<'a> = Interned<[Expr]>; + + fn args<'a>(&'a self) -> Self::Args<'a> { + self.field_values() + } + + fn source_location(&self) -> Option { + None + } + + fn union_parts( + &self, + resets: Resets, + args_resets: Vec, + mut pass_args: PassArgs<'_, BuildResetGraph>, + ) -> Result<(), DeduceResetsError> { + for (resets_field, field_expr_resets) in resets.bundle_fields().zip(args_resets) { + pass_args.union(resets_field, field_expr_resets, None)?; + } + Ok(()) + } + + fn new( + &self, + ty: CanonicalType, + new_args: Vec>, + ) -> Result { + Ok(ops::BundleLiteral::new( + Bundle::from_canonical(ty), + Intern::intern_owned(new_args), + )) + } +} + +impl RunPassExpr for ArrayLiteral { + type Args<'a> = Interned<[Expr]>; + + fn args<'a>(&'a self) -> Self::Args<'a> { + self.element_values() + } + + fn source_location(&self) -> Option { + None + } + + fn union_parts( + &self, + resets: Resets, + args_resets: Vec, + mut pass_args: PassArgs<'_, BuildResetGraph>, + ) -> Result<(), DeduceResetsError> { + let resets_elements = resets.array_elements(); + for arg_resets in args_resets { + pass_args.union(resets_elements, arg_resets, None)?; + } + Ok(()) + } + + fn new( + &self, + ty: CanonicalType, + new_args: Vec>, + ) -> Result { + Ok(Self::new( + ::from_canonical(ty).element(), + Intern::intern_owned(new_args), + )) + } +} + +impl RunPassExpr for ops::EnumLiteral { + type Args<'a> = Option>; + + fn args<'a>(&'a self) -> Self::Args<'a> { + self.variant_value() + } + + fn source_location(&self) -> Option { + None + } + + fn union_parts( + &self, + resets: Resets, + args_resets: Vec, + mut pass_args: PassArgs<'_, BuildResetGraph>, + ) -> Result<(), DeduceResetsError> { + if let Some(Some(variant_resets)) = resets.enum_variants().nth(self.variant_index()) { + pass_args.union(variant_resets, args_resets[0], None)?; + } + Ok(()) + } + + fn new( + &self, + ty: CanonicalType, + new_args: Vec>, + ) -> Result { + Ok(Self::new_by_index( + Enum::from_canonical(ty), + self.variant_index(), + new_args.get(0).copied(), + )) + } +} + +impl RunPassExpr for ops::FieldAccess { + type Args<'a> = [Expr; 1]; + + fn args<'a>(&'a self) -> Self::Args<'a> { + [Expr::canonical(self.base())] + } + + fn source_location(&self) -> Option { + None + } + + fn union_parts( + &self, + resets: Resets, + args_resets: Vec, + mut pass_args: PassArgs<'_, BuildResetGraph>, + ) -> Result<(), DeduceResetsError> { + let Some(field_resets) = args_resets[0].bundle_fields().nth(self.field_index()) else { + unreachable!(); + }; + pass_args.union(resets, field_resets, None) + } + + fn new( + &self, + _ty: CanonicalType, + new_args: Vec>, + ) -> Result { + Ok(Self::new_by_index( + Expr::from_canonical(new_args[0]), + self.field_index(), + )) + } +} + +impl RunPassExpr for ops::VariantAccess { + type Args<'a> = [Expr; 1]; + + fn args<'a>(&'a self) -> Self::Args<'a> { + [Expr::canonical(self.base())] + } + + fn source_location(&self) -> Option { + None + } + + fn union_parts( + &self, + resets: Resets, + args_resets: Vec, + mut pass_args: PassArgs<'_, BuildResetGraph>, + ) -> Result<(), DeduceResetsError> { + if let Some(Some(variant_resets)) = args_resets[0].enum_variants().nth(self.variant_index()) + { + pass_args.union(resets, variant_resets, None)?; + } + Ok(()) + } + + fn new( + &self, + _ty: CanonicalType, + new_args: Vec>, + ) -> Result { + Ok(Self::new_by_index( + Expr::from_canonical(new_args[0]), + self.variant_index(), + )) + } +} + +impl RunPassExpr for ops::ArrayIndex { + type Args<'a> = [Expr; 1]; + + fn args<'a>(&'a self) -> Self::Args<'a> { + [Expr::canonical(self.base())] + } + + fn source_location(&self) -> Option { + None + } + + fn union_parts( + &self, + resets: Resets, + args_resets: Vec, + mut pass_args: PassArgs<'_, BuildResetGraph>, + ) -> Result<(), DeduceResetsError> { + pass_args.union(resets, args_resets[0].array_elements(), None) + } + + fn new( + &self, + _ty: CanonicalType, + new_args: Vec>, + ) -> Result { + Ok(Self::new( + Expr::from_canonical(new_args[0]), + self.element_index(), + )) + } +} + +impl RunPassExpr for ops::DynArrayIndex { + type Args<'a> = [Expr; 2]; + + fn args<'a>(&'a self) -> Self::Args<'a> { + [ + Expr::canonical(self.base()), + Expr::canonical(self.element_index()), + ] + } + + fn source_location(&self) -> Option { + None + } + + fn union_parts( + &self, + resets: Resets, + args_resets: Vec, + mut pass_args: PassArgs<'_, BuildResetGraph>, + ) -> Result<(), DeduceResetsError> { + pass_args.union(resets, args_resets[0].array_elements(), None) + } + + fn new( + &self, + _ty: CanonicalType, + new_args: Vec>, + ) -> Result { + Ok(Self::new( + Expr::from_canonical(new_args[0]), + Expr::from_canonical(new_args[1]), + )) + } +} + +impl RunPassExpr for ops::CastBitsTo { + type Args<'a> = [Expr; 1]; + + fn args<'a>(&'a self) -> Self::Args<'a> { + [Expr::canonical(self.arg())] + } + + fn source_location(&self) -> Option { + None + } + + fn union_parts( + &self, + _resets: Resets, + _args_resets: Vec, + _pass_args: PassArgs<'_, BuildResetGraph>, + ) -> Result<(), DeduceResetsError> { + Ok(()) + } + + fn new( + &self, + ty: CanonicalType, + new_args: Vec>, + ) -> Result { + Ok(Self::new(Expr::from_canonical(new_args[0]), ty)) + } +} + +impl RunPassExpr for ModuleIO { + type Args<'a> = [Expr; 0]; + + fn args<'a>(&'a self) -> Self::Args<'a> { + [] + } + + fn source_location(&self) -> Option { + Some(self.source_location()) + } + + fn union_parts( + &self, + _resets: Resets, + _args_resets: Vec, + _pass_args: PassArgs<'_, BuildResetGraph>, + ) -> Result<(), DeduceResetsError> { + Ok(()) + } + + fn new( + &self, + ty: CanonicalType, + _new_args: Vec>, + ) -> Result { + Ok(Self::new_unchecked( + self.containing_module_name_id(), + self.name_id(), + self.source_location(), + self.is_input(), + ty, + )) + } +} + +impl RunPassExpr for Wire { + type Args<'a> = [Expr; 0]; + + fn args<'a>(&'a self) -> Self::Args<'a> { + [] + } + + fn source_location(&self) -> Option { + Some(self.source_location()) + } + + fn union_parts( + &self, + _resets: Resets, + _args_resets: Vec, + _pass_args: PassArgs<'_, BuildResetGraph>, + ) -> Result<(), DeduceResetsError> { + Ok(()) + } + + fn new( + &self, + ty: CanonicalType, + _new_args: Vec>, + ) -> Result { + Ok(Self::new_unchecked( + self.scoped_name(), + self.source_location(), + ty, + )) + } +} + +impl From> for AnyReg { + fn from(value: Reg) -> Self { + struct Dispatch; + impl ResetTypeDispatch for Dispatch { + type Input = Reg; + type Output = AnyReg; + + fn reset(self, input: Self::Input) -> Self::Output { + AnyReg::Reg(input) + } + + fn sync_reset(self, input: Self::Input) -> Self::Output { + AnyReg::RegSync(input) + } + + fn async_reset(self, input: Self::Input) -> Self::Output { + AnyReg::RegAsync(input) + } + } + T::dispatch(value, Dispatch) + } +} + +impl RunPassDispatch for AnyReg { + fn build_reset_graph( + &self, + mut pass_args: PassArgs<'_, BuildResetGraph>, + ) -> Result, DeduceResetsError> { + match_any_reg!(self, |reg: &Reg| { + pass_args + .get_or_make_resets(Expr::canonical(reg.to_expr()), Some(reg.source_location())); + reg.init().run_pass(pass_args.as_mut())?; + Expr::canonical(reg.clock_domain()).run_pass(pass_args)?; + Ok(PassOutput(())) + }) + } + + fn substitute_resets( + &self, + mut pass_args: PassArgs<'_, SubstituteResets>, + ) -> Result, DeduceResetsError> { + match_any_reg!(self, |reg: &Reg| { + let scoped_name = reg.scoped_name(); + let source_location = reg.source_location(); + let resets = pass_args + .get_resets(Expr::canonical(reg.to_expr())) + .expect("added resets in build_reset_graph"); + let ty = resets.substituted_type( + &mut pass_args.state.reset_graph, + pass_args.state.fallback_to_sync_reset, + source_location, + )?; + let init = reg.init().run_pass(pass_args.as_mut())?.0; + let clock_domain = Expr::::from_canonical( + Expr::canonical(reg.clock_domain()).run_pass(pass_args)?.0, + ); + match Expr::ty(clock_domain) + .field_by_name("rst".intern()) + .expect("ClockDomain has rst field") + .ty + { + CanonicalType::AsyncReset(_) => { + Ok(PassOutput(AnyReg::RegAsync(Reg::new_unchecked( + scoped_name, + source_location, + ty, + Expr::from_bundle(clock_domain), + init, + )))) + } + CanonicalType::SyncReset(_) => Ok(PassOutput(AnyReg::RegSync(Reg::new_unchecked( + scoped_name, + source_location, + ty, + Expr::from_bundle(clock_domain), + init, + )))), + CanonicalType::UInt(_) + | CanonicalType::SInt(_) + | CanonicalType::Bool(_) + | CanonicalType::Array(_) + | CanonicalType::Enum(_) + | CanonicalType::Bundle(_) + | CanonicalType::Reset(_) + | CanonicalType::Clock(_) + | CanonicalType::PhantomConst(_) + | CanonicalType::DynSimOnly(_) => unreachable!(), + } + }) + } +} + +impl RunPassDispatch for Instance { + fn build_reset_graph( + &self, + mut pass_args: PassArgs<'_, BuildResetGraph>, + ) -> Result, DeduceResetsError> { + self.instantiated().run_pass(PassArgs:: { + state: pass_args.state, + instantiated_module: InstantiatedModule::Child { + parent: pass_args.instantiated_module.intern_sized(), + instance: self.intern(), + }, + fallback_error_source_location: self.instantiated().source_location(), + _phantom: PhantomData, + })?; + let (resets, _) = pass_args.get_or_make_resets(self, Some(self.source_location())); + for (resets_field, module_io) in resets.bundle_fields().zip(self.instantiated().module_io()) + { + let (module_io_resets, _) = pass_args.get_or_make_resets( + module_io.module_io, + Some(self.instantiated().source_location()), + ); + pass_args.union(resets_field, module_io_resets, Some(self.source_location()))?; + } + Ok(PassOutput(())) + } + + fn substitute_resets( + &self, + pass_args: PassArgs<'_, SubstituteResets>, + ) -> Result, DeduceResetsError> { + let PassOutput(instantiated) = + self.instantiated().run_pass(PassArgs:: { + state: pass_args.state, + instantiated_module: InstantiatedModule::Child { + parent: pass_args.instantiated_module.intern_sized(), + instance: self.intern(), + }, + fallback_error_source_location: self.instantiated().source_location(), + _phantom: PhantomData, + })?; + Ok(PassOutput(Self::new_unchecked( + self.scoped_name(), + instantiated, + self.source_location(), + ))) + } +} + +impl RunPass

for ExternModuleSimulation { + fn run_pass( + &self, + mut pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError> { + let Self { + generator, + sim_io_to_generator_map, + source_location, + } = *self; + let sim_io_to_generator_map = Result::, P>, _>::from_iter( + sim_io_to_generator_map + .iter() + .map(|(sim_io, generator_io)| { + Ok(sim_io + .run_pass(pass_args.as_mut())? + .map(|v| (v, *generator_io))) + }), + )?; + Ok(sim_io_to_generator_map.map(|sim_io_to_generator_map| Self { + generator, + sim_io_to_generator_map: sim_io_to_generator_map.intern_sized(), + source_location, + })) + } +} + +macro_rules! impl_run_pass_copy { + ([$($generics:tt)*] $ty:ty) => { + impl RunPass

for $ty { + fn run_pass( + &self, + _pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError> { + Ok(PassOutput::new(*self)) + } + } + }; +} + +macro_rules! impl_run_pass_clone { + ([$($generics:tt)*] $ty:ty) => { + impl RunPass

for $ty { + fn run_pass( + &self, + _pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError> { + Ok(PassOutput::from_fn(|| self.clone())) + } + } + }; +} + +impl_run_pass_clone!([] BigInt); +impl_run_pass_clone!([] ExternModuleParameter); +impl_run_pass_clone!([] SIntValue); +impl_run_pass_clone!([] std::ops::Range); +impl_run_pass_clone!([] UIntValue); +impl_run_pass_clone!([] crate::vendor::xilinx::XilinxAnnotation); +impl_run_pass_copy!([] BlackBoxInlineAnnotation); +impl_run_pass_copy!([] BlackBoxPathAnnotation); +impl_run_pass_copy!([] bool); +impl_run_pass_copy!([] CustomFirrtlAnnotation); +impl_run_pass_copy!([] DocStringAnnotation); +impl_run_pass_copy!([] DontTouchAnnotation); +impl_run_pass_copy!([] Interned); +impl_run_pass_copy!([] NameId); +impl_run_pass_copy!([] SInt); +impl_run_pass_copy!([] SourceLocation); +impl_run_pass_copy!([] SVAttributeAnnotation); +impl_run_pass_copy!([] UInt); +impl_run_pass_copy!([] usize); +impl_run_pass_copy!([] FormalKind); +impl_run_pass_copy!([] PhantomConst); + +macro_rules! impl_run_pass_for_struct { + ( + $(#[adjust_pass_args = $adjust_pass_args:expr])? + #[constructor = $constructor:expr] + impl[$($generics:tt)*] $RunPass:ident for $ty:ty { + $($field:ident $(($($args:tt)*))?: _,)* + } + ) => { + impl $RunPass

for $ty { + #[allow(unused_mut, unused_variables)] + fn run_pass( + &self, + mut pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError> { + $($adjust_pass_args(self, &mut pass_args);)? + Ok(($(self.$field$(($($args)*))?.run_pass(pass_args.as_mut())?,)*).call(|($($field,)*)| $constructor)) + } + } + }; + ( + $(#[adjust_pass_args = $adjust_pass_args:expr])? + impl[$($generics:tt)*] $RunPass:ident for $ty:ty { + $($field:ident: _,)* + } + ) => { + impl_run_pass_for_struct! { + $(#[adjust_pass_args = $adjust_pass_args])? + #[constructor = { + type Ty = T; + Ty::<$ty> { $($field,)* } + }] + impl[$($generics)*] $RunPass for $ty { + $($field: _,)* + } + } + }; +} + +macro_rules! impl_run_pass_for_enum { + (impl[$($generics:tt)*] $RunPass:ident for $ty:ty { + $($variant:ident($arg:ident),)* + }) => { + impl $RunPass

for $ty { + fn run_pass( + &self, + pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError> { + type Ty = T; + match self { + $(Ty::<$ty>::$variant($arg) => Ok($arg.run_pass(pass_args)?.map(<$ty>::$variant)),)* + } + } + } + }; +} + +macro_rules! impl_run_pass_for_unary_op { + ($path:path) => { + impl_run_pass_for_struct! { + #[constructor = <$path>::new(arg)] + impl[] RunPass for $path { + arg(): _, + } + } + }; +} + +impl_run_pass_for_unary_op!(ops::NotU); +impl_run_pass_for_unary_op!(ops::NotS); +impl_run_pass_for_unary_op!(ops::NotB); +impl_run_pass_for_unary_op!(ops::Neg); +impl_run_pass_for_unary_op!(ops::CastBoolToUInt); +impl_run_pass_for_unary_op!(ops::CastBoolToSInt); +impl_run_pass_for_unary_op!(ops::CastUIntToBool); +impl_run_pass_for_unary_op!(ops::CastSIntToBool); +impl_run_pass_for_unary_op!(ops::CastBoolToClock); +impl_run_pass_for_unary_op!(ops::CastUIntToClock); +impl_run_pass_for_unary_op!(ops::CastSIntToClock); +impl_run_pass_for_unary_op!(ops::CastClockToBool); +impl_run_pass_for_unary_op!(ops::CastClockToUInt); +impl_run_pass_for_unary_op!(ops::CastClockToSInt); +impl_run_pass_for_unary_op!(ops::ReduceBitAndU); +impl_run_pass_for_unary_op!(ops::ReduceBitAndS); +impl_run_pass_for_unary_op!(ops::ReduceBitOrU); +impl_run_pass_for_unary_op!(ops::ReduceBitOrS); +impl_run_pass_for_unary_op!(ops::ReduceBitXorU); +impl_run_pass_for_unary_op!(ops::ReduceBitXorS); + +macro_rules! impl_run_pass_for_binary_op { + ($path:path) => { + impl_run_pass_for_struct! { + #[constructor = <$path>::new(lhs, rhs)] + impl[] RunPass for $path { + lhs(): _, + rhs(): _, + } + } + }; +} + +impl_run_pass_for_binary_op!(ops::BitAndU); +impl_run_pass_for_binary_op!(ops::BitAndS); +impl_run_pass_for_binary_op!(ops::BitAndB); +impl_run_pass_for_binary_op!(ops::BitOrU); +impl_run_pass_for_binary_op!(ops::BitOrS); +impl_run_pass_for_binary_op!(ops::BitOrB); +impl_run_pass_for_binary_op!(ops::BitXorU); +impl_run_pass_for_binary_op!(ops::BitXorS); +impl_run_pass_for_binary_op!(ops::BitXorB); +impl_run_pass_for_binary_op!(ops::AddU); +impl_run_pass_for_binary_op!(ops::AddS); +impl_run_pass_for_binary_op!(ops::SubU); +impl_run_pass_for_binary_op!(ops::SubS); +impl_run_pass_for_binary_op!(ops::MulU); +impl_run_pass_for_binary_op!(ops::MulS); +impl_run_pass_for_binary_op!(ops::DivU); +impl_run_pass_for_binary_op!(ops::DivS); +impl_run_pass_for_binary_op!(ops::RemU); +impl_run_pass_for_binary_op!(ops::RemS); +impl_run_pass_for_binary_op!(ops::DynShlU); +impl_run_pass_for_binary_op!(ops::DynShlS); +impl_run_pass_for_binary_op!(ops::DynShrU); +impl_run_pass_for_binary_op!(ops::DynShrS); +impl_run_pass_for_binary_op!(ops::FixedShlU); +impl_run_pass_for_binary_op!(ops::FixedShlS); +impl_run_pass_for_binary_op!(ops::FixedShrU); +impl_run_pass_for_binary_op!(ops::FixedShrS); +impl_run_pass_for_binary_op!(ops::CmpLtB); +impl_run_pass_for_binary_op!(ops::CmpLeB); +impl_run_pass_for_binary_op!(ops::CmpGtB); +impl_run_pass_for_binary_op!(ops::CmpGeB); +impl_run_pass_for_binary_op!(ops::CmpEqB); +impl_run_pass_for_binary_op!(ops::CmpNeB); +impl_run_pass_for_binary_op!(ops::CmpLtU); +impl_run_pass_for_binary_op!(ops::CmpLeU); +impl_run_pass_for_binary_op!(ops::CmpGtU); +impl_run_pass_for_binary_op!(ops::CmpGeU); +impl_run_pass_for_binary_op!(ops::CmpEqU); +impl_run_pass_for_binary_op!(ops::CmpNeU); +impl_run_pass_for_binary_op!(ops::CmpLtS); +impl_run_pass_for_binary_op!(ops::CmpLeS); +impl_run_pass_for_binary_op!(ops::CmpGtS); +impl_run_pass_for_binary_op!(ops::CmpGeS); +impl_run_pass_for_binary_op!(ops::CmpEqS); +impl_run_pass_for_binary_op!(ops::CmpNeS); + +macro_rules! impl_run_pass_for_int_cast_op { + ($path:path) => { + impl_run_pass_for_struct! { + #[constructor = <$path>::new(arg, ty)] + impl[] RunPass for $path { + arg(): _, + ty(): _, + } + } + }; +} + +impl_run_pass_for_int_cast_op!(ops::CastUIntToUInt); +impl_run_pass_for_int_cast_op!(ops::CastUIntToSInt); +impl_run_pass_for_int_cast_op!(ops::CastSIntToUInt); +impl_run_pass_for_int_cast_op!(ops::CastSIntToSInt); + +impl_run_pass_for_struct! { + #[constructor = ops::SliceUInt::new(base, range)] + impl[] RunPass for ops::SliceUInt { + base(): _, + range(): _, + } +} + +impl_run_pass_for_struct! { + #[constructor = ops::SliceSInt::new(base, range)] + impl[] RunPass for ops::SliceSInt { + base(): _, + range(): _, + } +} + +impl_run_pass_for_struct! { + #[constructor = ops::CastToBits::new(arg)] + impl[] RunPass for ops::CastToBits { + arg(): _, + } +} + +impl_run_pass_for_struct! { + impl[] RunPass for StmtFormal { + kind: _, + clk: _, + pred: _, + en: _, + text: _, + source_location: _, + } +} + +impl_run_pass_for_struct! { + impl[] RunPass for StmtIf { + cond: _, + source_location: _, + blocks: _, + } +} + +impl_run_pass_for_struct! { + impl[] RunPass for StmtMatch { + expr: _, + source_location: _, + blocks: _, + } +} + +impl_run_pass_for_struct! { + impl[] RunPass for StmtWire { + annotations: _, + wire: _, + } +} + +impl_run_pass_for_struct! { + impl[] RunPass for StmtInstance { + annotations: _, + instance: _, + } +} + +impl_run_pass_for_enum! { + impl[] RunPass for Stmt { + Connect(v), + Formal(v), + If(v), + Match(v), + Declaration(v), + } +} + +impl_run_pass_for_struct! { + impl[] RunPass for Block { + memories: _, + stmts: _, + } +} + +impl_run_pass_for_struct! { + impl[] RunPass for NormalModuleBody { + body: _, + } +} + +impl_run_pass_for_struct! { + impl[] RunPass for ExternModuleBody { + verilog_name: _, + parameters: _, + simulation: _, + } +} + +impl_run_pass_copy!([] MemPort); // Mem can't contain any `Reset` types +impl_run_pass_copy!([] Mem); // Mem can't contain any `Reset` types + +impl RunPassDispatch for StmtConnect { + fn build_reset_graph( + &self, + mut pass_args: PassArgs<'_, BuildResetGraph>, + ) -> Result, DeduceResetsError> { + let Self { + lhs, + rhs, + source_location, + } = *self; + pass_args.fallback_error_source_location = source_location; + lhs.run_pass(pass_args.as_mut())?; + rhs.run_pass(pass_args.as_mut())?; + let (lhs_resets, _) = pass_args.get_or_make_resets(lhs, Some(source_location)); + let (rhs_resets, _) = pass_args.get_or_make_resets(rhs, Some(source_location)); + pass_args.union(lhs_resets, rhs_resets, Some(source_location))?; + Ok(PassOutput(())) + } + + fn substitute_resets( + &self, + mut pass_args: PassArgs<'_, SubstituteResets>, + ) -> Result, DeduceResetsError> { + let StmtConnect { + lhs, + rhs, + source_location, + } = *self; + pass_args.fallback_error_source_location = source_location; + let lhs = lhs.run_pass(pass_args.as_mut())?.0; + let rhs = rhs.run_pass(pass_args)?.0; + Ok(PassOutput(StmtConnect { + lhs, + rhs, + source_location, + })) + } +} + +impl RunPass

for TargetBase { + fn run_pass( + &self, + pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError> { + let reg: AnyReg = match self { + TargetBase::ModuleIO(v) => return Ok(v.run_pass(pass_args)?.map(TargetBase::ModuleIO)), + TargetBase::MemPort(v) => return Ok(v.run_pass(pass_args)?.map(TargetBase::MemPort)), + &TargetBase::Reg(v) => v.into(), + &TargetBase::RegSync(v) => v.into(), + &TargetBase::RegAsync(v) => v.into(), + TargetBase::Wire(v) => return Ok(v.run_pass(pass_args)?.map(TargetBase::Wire)), + TargetBase::Instance(v) => return Ok(v.run_pass(pass_args)?.map(TargetBase::Instance)), + }; + Ok(reg.run_pass(pass_args)?.map(|reg| match reg { + AnyReg::Reg(reg) => TargetBase::Reg(reg), + AnyReg::RegSync(reg) => TargetBase::RegSync(reg), + AnyReg::RegAsync(reg) => TargetBase::RegAsync(reg), + })) + } +} + +impl RunPass

for StmtDeclaration { + fn run_pass( + &self, + mut pass_args: PassArgs<'_, P>, + ) -> Result, DeduceResetsError> { + let (annotations, reg) = match self { + StmtDeclaration::Wire(v) => { + return Ok(v.run_pass(pass_args)?.map(StmtDeclaration::Wire)); + } + &StmtDeclaration::Reg(StmtReg { annotations, reg }) => (annotations, AnyReg::from(reg)), + &StmtDeclaration::RegSync(StmtReg { annotations, reg }) => { + (annotations, AnyReg::from(reg)) + } + &StmtDeclaration::RegAsync(StmtReg { annotations, reg }) => { + (annotations, AnyReg::from(reg)) + } + StmtDeclaration::Instance(v) => { + return Ok(v.run_pass(pass_args)?.map(StmtDeclaration::Instance)); + } + }; + let annotations = annotations.run_pass(pass_args.as_mut())?; + let reg = reg.run_pass(pass_args)?; + Ok((annotations, reg).call(|(annotations, reg)| match reg { + AnyReg::Reg(reg) => StmtReg { annotations, reg }.into(), + AnyReg::RegSync(reg) => StmtReg { annotations, reg }.into(), + AnyReg::RegAsync(reg) => StmtReg { annotations, reg }.into(), + })) + } +} + +impl_run_pass_for_struct! { + impl[] RunPass for TargetPathBundleField { + name: _, + } +} + +impl_run_pass_for_struct! { + impl[] RunPass for TargetPathArrayElement { + index: _, + } +} + +impl_run_pass_for_struct! { + impl[] RunPass for TargetPathDynArrayElement {} +} + +impl_run_pass_for_enum! { + impl[] RunPass for TargetPathElement { + BundleField(v), + ArrayElement(v), + DynArrayElement(v), + } +} + +impl_run_pass_for_enum! { + impl[] RunPass for Target { + Base(v), + Child(v), + } +} + +impl_run_pass_for_struct! { + #[constructor = TargetChild::new(parent, path_element)] + impl[] RunPass for TargetChild { + parent(): _, + path_element(): _, + } +} + +impl_run_pass_for_enum! { + impl[] RunPass for Annotation { + DontTouch(v), + SVAttribute(v), + BlackBoxInline(v), + BlackBoxPath(v), + DocString(v), + CustomFirrtl(v), + Xilinx(v), + } +} + +impl_run_pass_for_enum! { + impl[] RunPass for ModuleBody { + Normal(v), + Extern(v), + } +} + +impl_run_pass_for_struct! { + #[constructor = TargetedAnnotation::new(target, annotation)] + impl[] RunPass for TargetedAnnotation { + target(): _, + annotation(): _, + } +} + +impl_run_pass_for_struct! { + impl[] RunPass for AnnotatedModuleIO { + annotations: _, + module_io: _, + } +} + +impl RunPassDispatch for Module { + fn build_reset_graph( + &self, + mut pass_args: PassArgs<'_, BuildResetGraph>, + ) -> Result, DeduceResetsError> { + pass_args.fallback_error_source_location = self.source_location(); + if pass_args + .state + .modules_added_to_graph + .insert(pass_args.instantiated_module) + { + self.name_id().run_pass(pass_args.as_mut())?; + self.source_location().run_pass(pass_args.as_mut())?; + self.module_io().run_pass(pass_args.as_mut())?; + self.body().run_pass(pass_args.as_mut())?; + self.module_annotations().run_pass(pass_args.as_mut())?; + } + Ok(PassOutput(())) + } + + fn substitute_resets( + &self, + mut pass_args: PassArgs<'_, SubstituteResets>, + ) -> Result, DeduceResetsError> { + pass_args.fallback_error_source_location = self.source_location(); + if let Some(&retval) = pass_args + .state + .substituted_modules + .get(&pass_args.instantiated_module) + { + return Ok(PassOutput(retval)); + } + let PassOutput(name_id) = self.name_id().run_pass(pass_args.as_mut())?; + let PassOutput(source_location) = self.source_location().run_pass(pass_args.as_mut())?; + let PassOutput(module_io) = self.module_io().run_pass(pass_args.as_mut())?; + let PassOutput(body) = self.body().run_pass(pass_args.as_mut())?; + let PassOutput(module_annotations) = + self.module_annotations().run_pass(pass_args.as_mut())?; + let retval = Module::new_unchecked( + name_id, + source_location, + body, + module_io, + module_annotations, + ); + pass_args + .state + .substituted_modules + .insert(pass_args.instantiated_module, retval); + Ok(PassOutput(retval)) + } +} + +pub fn deduce_resets( + module: Interned>, + fallback_to_sync_reset: bool, +) -> Result>, DeduceResetsError> { + let mut state = State { + modules_added_to_graph: HashSet::default(), + substituted_modules: HashMap::default(), + expr_resets: HashMap::default(), + reset_graph: ResetGraph::default(), + fallback_to_sync_reset, + }; + RunPass::::run_pass( + &*module, + PassArgs { + state: &mut state, + instantiated_module: InstantiatedModule::Base(module), + fallback_error_source_location: module.source_location(), + _phantom: PhantomData, + }, + )?; + Ok(RunPass::::run_pass( + &*module, + PassArgs { + state: &mut state, + instantiated_module: InstantiatedModule::Base(module), + fallback_error_source_location: module.source_location(), + _phantom: PhantomData, + }, + )? + .0 + .intern_sized()) +} diff --git a/crates/fayalite/src/module/transform/simplify_enums.rs b/crates/fayalite/src/module/transform/simplify_enums.rs index bb57cf0..bd5f7d5 100644 --- a/crates/fayalite/src/module/transform/simplify_enums.rs +++ b/crates/fayalite/src/module/transform/simplify_enums.rs @@ -5,23 +5,24 @@ use crate::{ bundle::{Bundle, BundleField, BundleType}, enum_::{Enum, EnumType, EnumVariant}, expr::{ - ops::{self, EnumLiteral}, CastBitsTo, CastTo, CastToBits, Expr, ExprEnum, HdlPartialEq, ToExpr, + ops::{self, EnumLiteral}, }, hdl, int::UInt, - intern::{Intern, Interned, Memoize}, + intern::{Intern, InternSlice, Interned, Memoize}, memory::{DynPortType, Mem, MemPort}, module::{ - transform::visit::{Fold, Folder}, Block, Id, Module, NameId, ScopedNameId, Stmt, StmtConnect, StmtIf, StmtMatch, StmtWire, + transform::visit::{Fold, Folder}, }, source_location::SourceLocation, ty::{CanonicalType, Type}, + util::HashMap, wire::Wire, }; use core::fmt; -use hashbrown::HashMap; +use serde::{Deserialize, Serialize}; #[derive(Debug)] pub enum SimplifyEnumsError { @@ -69,7 +70,9 @@ fn contains_any_enum_types(ty: CanonicalType) -> bool { | CanonicalType::AsyncReset(_) | CanonicalType::SyncReset(_) | CanonicalType::Reset(_) - | CanonicalType::Clock(_) => false, + | CanonicalType::Clock(_) + | CanonicalType::PhantomConst(_) + | CanonicalType::DynSimOnly(_) => false, } } } @@ -512,7 +515,9 @@ impl State { | CanonicalType::AsyncReset(_) | CanonicalType::SyncReset(_) | CanonicalType::Reset(_) - | CanonicalType::Clock(_) => unreachable!(), + | CanonicalType::Clock(_) + | CanonicalType::PhantomConst(_) + | CanonicalType::DynSimOnly(_) => unreachable!(), } } } @@ -577,7 +582,9 @@ fn connect_port( | (CanonicalType::Clock(_), _) | (CanonicalType::AsyncReset(_), _) | (CanonicalType::SyncReset(_), _) - | (CanonicalType::Reset(_), _) => unreachable!( + | (CanonicalType::Reset(_), _) + | (CanonicalType::PhantomConst(_), _) + | (CanonicalType::DynSimOnly(_), _) => unreachable!( "trying to connect memory ports:\n{:?}\n{:?}", Expr::ty(lhs), Expr::ty(rhs), @@ -613,7 +620,7 @@ fn match_int_tag( block, Block { memories: Default::default(), - stmts: [Stmt::from(retval)][..].intern(), + stmts: [Stmt::from(retval)].intern_slice(), }, ], }; @@ -665,6 +672,7 @@ impl Folder for State { ExprEnum::UIntLiteral(_) | ExprEnum::SIntLiteral(_) | ExprEnum::BoolLiteral(_) + | ExprEnum::PhantomConst(_) | ExprEnum::BundleLiteral(_) | ExprEnum::ArrayLiteral(_) | ExprEnum::Uninit(_) @@ -764,7 +772,9 @@ impl Folder for State { | ExprEnum::ModuleIO(_) | ExprEnum::Instance(_) | ExprEnum::Wire(_) - | ExprEnum::Reg(_) => op.default_fold(self), + | ExprEnum::Reg(_) + | ExprEnum::RegSync(_) + | ExprEnum::RegAsync(_) => op.default_fold(self), } } @@ -804,7 +814,7 @@ impl Folder for State { .unwrap() .gen_name(&format!( "{}_{}", - memory.scoped_name().1 .0, + memory.scoped_name().1.0, port.port_name() )), port.source_location(), @@ -921,7 +931,9 @@ impl Folder for State { | CanonicalType::Clock(_) | CanonicalType::AsyncReset(_) | CanonicalType::SyncReset(_) - | CanonicalType::Reset(_) => canonical_type.default_fold(self), + | CanonicalType::Reset(_) + | CanonicalType::PhantomConst(_) + | CanonicalType::DynSimOnly(_) => canonical_type.default_fold(self), } } @@ -944,12 +956,15 @@ impl Folder for State { } } -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, clap::ValueEnum)] +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, clap::ValueEnum, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum SimplifyEnumsKind { SimplifyToEnumsWithNoBody, #[clap(name = "replace-with-bundle-of-uints")] + #[serde(rename = "replace-with-bundle-of-uints")] ReplaceWithBundleOfUInts, #[clap(name = "replace-with-uint")] + #[serde(rename = "replace-with-uint")] ReplaceWithUInt, } @@ -958,8 +973,8 @@ pub fn simplify_enums( kind: SimplifyEnumsKind, ) -> Result>, SimplifyEnumsError> { module.fold(&mut State { - enum_types: HashMap::new(), - replacement_mem_ports: HashMap::new(), + enum_types: HashMap::default(), + replacement_mem_ports: HashMap::default(), kind, module_state_stack: vec![], }) diff --git a/crates/fayalite/src/module/transform/simplify_memories.rs b/crates/fayalite/src/module/transform/simplify_memories.rs index e8f9cbf..35f186d 100644 --- a/crates/fayalite/src/module/transform/simplify_memories.rs +++ b/crates/fayalite/src/module/transform/simplify_memories.rs @@ -9,16 +9,15 @@ use crate::{ intern::{Intern, Interned}, memory::{Mem, MemPort, PortType}, module::{ - transform::visit::{Fold, Folder}, Block, Id, Module, NameId, ScopedNameId, Stmt, StmtConnect, StmtWire, + transform::visit::{Fold, Folder}, }, source_location::SourceLocation, ty::{CanonicalType, Type}, - util::MakeMutSlice, + util::{HashMap, MakeMutSlice}, wire::Wire, }; use bitvec::{slice::BitSlice, vec::BitVec}; -use hashbrown::HashMap; use std::{ convert::Infallible, fmt::Write, @@ -62,6 +61,7 @@ enum MemSplit { Bundle { fields: Rc<[MemSplit]>, }, + PhantomConst, Single { output_mem: Option, element_type: SingleType, @@ -76,6 +76,7 @@ impl MemSplit { fn mark_changed_element_type(self) -> Self { match self { MemSplit::Bundle { fields: _ } => self, + MemSplit::PhantomConst => self, MemSplit::Single { output_mem, element_type, @@ -97,6 +98,7 @@ impl MemSplit { .map(|field| Self::new(field.ty).mark_changed_element_type()) .collect(), }, + CanonicalType::PhantomConst(_) => MemSplit::PhantomConst, CanonicalType::Array(ty) => { let element = MemSplit::new(ty.element()); if let Self::Single { @@ -192,6 +194,7 @@ impl MemSplit { | CanonicalType::AsyncReset(_) | CanonicalType::SyncReset(_) | CanonicalType::Reset(_) => unreachable!("memory element type is a storable type"), + CanonicalType::DynSimOnly(_) => todo!("memory containing sim-only values"), } } } @@ -321,6 +324,9 @@ impl SplitMemState<'_, '_> { Expr::field(Expr::::from_canonical(e), &field.name) }, |initial_value_element| { + let Some(field_offset) = field_offset.only_bit_width() else { + todo!("memory containing sim-only values"); + }; &initial_value_element[field_offset..][..field_ty_bit_width] }, ); @@ -339,6 +345,7 @@ impl SplitMemState<'_, '_> { self.split_state_stack.pop(); } } + MemSplit::PhantomConst => {} MemSplit::Single { output_mem, element_type: single_type, @@ -538,7 +545,12 @@ impl ModuleState { }; loop { match input_element_type { - CanonicalType::Bundle(_) => unreachable!("bundle types are always split"), + CanonicalType::Bundle(_) => { + unreachable!("bundle types are always split") + } + CanonicalType::PhantomConst(_) => { + unreachable!("PhantomConst are always removed") + } CanonicalType::Enum(_) if input_array_types .first() @@ -612,6 +624,7 @@ impl ModuleState { | CanonicalType::AsyncReset(_) | CanonicalType::SyncReset(_) | CanonicalType::Reset(_) => unreachable!("memory element type is a storable type"), + CanonicalType::DynSimOnly(_) => todo!("memory containing sim-only values"), } break; } @@ -626,7 +639,7 @@ impl ModuleState { split_state: &SplitState<'_>, ) -> Mem { let mem_name = NameId( - Intern::intern_owned(format!("{}{mem_name_path}", input_mem.scoped_name().1 .0)), + Intern::intern_owned(format!("{}{mem_name_path}", input_mem.scoped_name().1.0)), Id::new(), ); let mem_name = ScopedNameId(input_mem.scoped_name().0, mem_name); @@ -743,7 +756,8 @@ impl ModuleState { .. } | MemSplit::Bundle { .. } - | MemSplit::Array { .. } => { + | MemSplit::Array { .. } + | MemSplit::PhantomConst => { let mut replacement_ports = Vec::with_capacity(input_mem.ports().len()); let mut wire_port_rdata = Vec::with_capacity(input_mem.ports().len()); let mut wire_port_wdata = Vec::with_capacity(input_mem.ports().len()); @@ -887,7 +901,7 @@ impl Folder for State { module, ModuleState { output_module: None, - memories: HashMap::new(), + memories: HashMap::default(), }, ); let mut this = PushedState::push_module(self, module); diff --git a/crates/fayalite/src/module/transform/visit.rs b/crates/fayalite/src/module/transform/visit.rs index 2e1e48f..2c33a76 100644 --- a/crates/fayalite/src/module/transform/visit.rs +++ b/crates/fayalite/src/module/transform/visit.rs @@ -11,12 +11,11 @@ use crate::{ clock::Clock, enum_::{Enum, EnumType, EnumVariant}, expr::{ - ops, + Expr, ExprEnum, ops, target::{ Target, TargetBase, TargetChild, TargetPathArrayElement, TargetPathBundleField, TargetPathDynArrayElement, TargetPathElement, }, - Expr, ExprEnum, }, formal::FormalKind, int::{Bool, SIntType, SIntValue, Size, UIntType, UIntValue}, @@ -28,10 +27,15 @@ use crate::{ NormalModuleBody, ScopedNameId, Stmt, StmtConnect, StmtDeclaration, StmtFormal, StmtIf, StmtInstance, StmtMatch, StmtReg, StmtWire, }, + phantom_const::PhantomConst, reg::Reg, - reset::{AsyncReset, Reset, SyncReset}, + reset::{AsyncReset, Reset, ResetType, SyncReset}, + sim::{ExternModuleSimulation, value::DynSimOnly}, source_location::SourceLocation, ty::{CanonicalType, Type}, + vendor::xilinx::{ + XdcCreateClockAnnotation, XdcIOStandardAnnotation, XdcLocationAnnotation, XilinxAnnotation, + }, wire::Wire, }; use num_bigint::{BigInt, BigUint}; diff --git a/crates/fayalite/src/phantom_const.rs b/crates/fayalite/src/phantom_const.rs new file mode 100644 index 0000000..9f25166 --- /dev/null +++ b/crates/fayalite/src/phantom_const.rs @@ -0,0 +1,485 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + expr::{ + Expr, ToExpr, + ops::{ExprPartialEq, ExprPartialOrd}, + }, + int::Bool, + intern::{Intern, Interned, InternedCompare, LazyInterned, LazyInternedTrait, Memoize}, + sim::value::{SimValue, SimValuePartialEq, ToSimValue, ToSimValueWithType}, + source_location::SourceLocation, + ty::{ + CanonicalType, OpaqueSimValueSlice, OpaqueSimValueWriter, OpaqueSimValueWritten, + StaticType, Type, TypeProperties, impl_match_variant_as_self, + serde_impls::{SerdeCanonicalType, SerdePhantomConst}, + }, +}; +use serde::{ + Deserialize, Deserializer, Serialize, Serializer, + de::{DeserializeOwned, Error}, +}; +use std::{ + any::Any, + fmt, + hash::{Hash, Hasher}, + marker::PhantomData, + ops::Index, +}; + +#[derive(Clone)] +pub struct PhantomConstCanonicalValue { + parsed: serde_json::Value, + serialized: Interned, +} + +impl PhantomConstCanonicalValue { + pub fn from_json_value(parsed: serde_json::Value) -> Self { + let serialized = Intern::intern_owned( + serde_json::to_string(&parsed) + .expect("conversion from json value to text shouldn't fail"), + ); + Self { parsed, serialized } + } + pub fn as_json_value(&self) -> &serde_json::Value { + &self.parsed + } + pub fn as_str(&self) -> Interned { + self.serialized + } +} + +impl fmt::Debug for PhantomConstCanonicalValue { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(&self.serialized) + } +} + +impl fmt::Display for PhantomConstCanonicalValue { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(&self.serialized) + } +} + +impl PartialEq for PhantomConstCanonicalValue { + fn eq(&self, other: &Self) -> bool { + self.serialized == other.serialized + } +} + +impl Eq for PhantomConstCanonicalValue {} + +impl Hash for PhantomConstCanonicalValue { + fn hash(&self, state: &mut H) { + self.serialized.hash(state); + } +} + +impl Serialize for PhantomConstCanonicalValue { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + self.parsed.serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for PhantomConstCanonicalValue { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + Ok(Self::from_json_value(serde_json::Value::deserialize( + deserializer, + )?)) + } +} + +pub trait PhantomConstValue: Intern + InternedCompare + Serialize + fmt::Debug { + fn deserialize_value<'de, D>(deserializer: D) -> Result, D::Error> + where + D: serde::Deserializer<'de>; +} + +impl PhantomConstValue for T +where + T: ?Sized + Intern + InternedCompare + Serialize + fmt::Debug, + Interned: DeserializeOwned, +{ + fn deserialize_value<'de, D>(deserializer: D) -> Result, D::Error> + where + D: serde::Deserializer<'de>, + { + as Deserialize<'de>>::deserialize(deserializer) + } +} + +/// Wrapper type that allows any Rust value to be smuggled as a HDL [`Type`]. +/// This only works for values that can be [serialized][Serialize] to and [deserialized][Deserialize] from [JSON][serde_json]. +pub struct PhantomConst { + value: LazyInterned, +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)] +pub struct PhantomConstWithoutGenerics; + +#[allow(non_upper_case_globals)] +pub const PhantomConst: PhantomConstWithoutGenerics = PhantomConstWithoutGenerics; + +impl Index for PhantomConstWithoutGenerics { + type Output = PhantomConst; + + fn index(&self, value: T) -> &Self::Output { + Interned::into_inner(PhantomConst::new(value.intern()).intern_sized()) + } +} + +impl fmt::Debug for PhantomConst { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("PhantomConst").field(&self.get()).finish() + } +} + +impl Clone for PhantomConst { + fn clone(&self) -> Self { + *self + } +} + +impl Copy for PhantomConst {} + +impl PartialEq for PhantomConst { + fn eq(&self, other: &Self) -> bool { + self.get() == other.get() + } +} + +impl Eq for PhantomConst {} + +impl Hash for PhantomConst { + fn hash(&self, state: &mut H) { + self.get().hash(state); + } +} + +struct PhantomConstCanonicalMemoize(PhantomData); + +impl Copy + for PhantomConstCanonicalMemoize +{ +} + +impl Clone + for PhantomConstCanonicalMemoize +{ + fn clone(&self) -> Self { + *self + } +} + +impl Eq + for PhantomConstCanonicalMemoize +{ +} + +impl PartialEq + for PhantomConstCanonicalMemoize +{ + fn eq(&self, _other: &Self) -> bool { + true + } +} + +impl Hash + for PhantomConstCanonicalMemoize +{ + fn hash(&self, _state: &mut H) {} +} + +impl Memoize for PhantomConstCanonicalMemoize { + type Input = Interned; + type InputOwned = Interned; + type Output = Interned; + + fn inner(self, input: &Self::Input) -> Self::Output { + Intern::intern_sized(PhantomConstCanonicalValue::from_json_value( + serde_json::to_value(input) + .expect("serialization failed when constructing a canonical PhantomConst"), + )) + } +} + +impl Memoize for PhantomConstCanonicalMemoize { + type Input = Interned; + type InputOwned = Interned; + type Output = Interned; + + fn inner(self, input: &Self::Input) -> Self::Output { + PhantomConstValue::deserialize_value(input.as_json_value()) + .expect("deserialization failed ") + } +} + +impl PhantomConst { + pub fn new(value: Interned) -> Self { + Self { + value: LazyInterned::Interned(value), + } + } + pub const fn new_lazy(v: &'static dyn LazyInternedTrait) -> Self { + Self { + value: LazyInterned::new_lazy(v), + } + } + pub fn get(self) -> Interned { + self.value.interned() + } + pub fn type_properties(self) -> TypeProperties { + <()>::TYPE_PROPERTIES + } + pub fn can_connect(self, other: Self) -> bool { + self == other + } + pub fn canonical_phantom_const(self) -> PhantomConst { + if let Some(&retval) = ::downcast_ref::(&self) { + return retval; + } + ::new( + PhantomConstCanonicalMemoize::(PhantomData).get_owned(self.get()), + ) + } + pub fn from_canonical_phantom_const(canonical_type: PhantomConst) -> Self { + if let Some(&retval) = ::downcast_ref::(&canonical_type) { + return retval; + } + Self::new( + PhantomConstCanonicalMemoize::(PhantomData).get_owned(canonical_type.get()), + ) + } +} + +impl Type for PhantomConst { + type BaseType = PhantomConst; + type MaskType = (); + type SimValue = PhantomConst; + impl_match_variant_as_self!(); + + fn mask_type(&self) -> Self::MaskType { + () + } + + fn canonical(&self) -> CanonicalType { + CanonicalType::PhantomConst(self.canonical_phantom_const()) + } + + fn from_canonical(canonical_type: CanonicalType) -> Self { + let CanonicalType::PhantomConst(phantom_const) = canonical_type else { + panic!("expected PhantomConst"); + }; + Self::from_canonical_phantom_const(phantom_const) + } + + fn source_location() -> SourceLocation { + SourceLocation::builtin() + } + + fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue { + assert!(opaque.is_empty()); + *self + } + + fn sim_value_clone_from_opaque( + &self, + value: &mut Self::SimValue, + opaque: OpaqueSimValueSlice<'_>, + ) { + assert!(opaque.is_empty()); + assert_eq!(*value, *self); + } + + fn sim_value_to_opaque<'w>( + &self, + value: &Self::SimValue, + writer: OpaqueSimValueWriter<'w>, + ) -> OpaqueSimValueWritten<'w> { + assert_eq!(*value, *self); + writer.fill_cloned_from_slice(OpaqueSimValueSlice::empty()) + } +} + +impl Default for PhantomConst +where + Interned: Default, +{ + fn default() -> Self { + Self::TYPE + } +} + +impl StaticType for PhantomConst +where + Interned: Default, +{ + const TYPE: Self = PhantomConst { + value: LazyInterned::new_lazy(&Interned::::default), + }; + const MASK_TYPE: Self::MaskType = (); + const TYPE_PROPERTIES: TypeProperties = <()>::TYPE_PROPERTIES; + const MASK_TYPE_PROPERTIES: TypeProperties = <()>::TYPE_PROPERTIES; +} + +type SerdeType = SerdeCanonicalType>>; + +impl Serialize for PhantomConst { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + SerdeType::::PhantomConst(SerdePhantomConst(self.get())).serialize(serializer) + } +} + +impl<'de, T: ?Sized + PhantomConstValue> Deserialize<'de> for PhantomConst { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + match SerdeType::::deserialize(deserializer)? { + SerdeCanonicalType::PhantomConst(SerdePhantomConst(value)) => Ok(Self::new(value)), + ty => Err(Error::invalid_value( + serde::de::Unexpected::Other(ty.as_serde_unexpected_str()), + &"a PhantomConst", + )), + } + } +} + +impl ExprPartialEq for PhantomConst { + fn cmp_eq(lhs: Expr, rhs: Expr) -> Expr { + assert_eq!(Expr::ty(lhs), Expr::ty(rhs)); + true.to_expr() + } + + fn cmp_ne(lhs: Expr, rhs: Expr) -> Expr { + assert_eq!(Expr::ty(lhs), Expr::ty(rhs)); + false.to_expr() + } +} + +impl ExprPartialOrd for PhantomConst { + fn cmp_lt(lhs: Expr, rhs: Expr) -> Expr { + assert_eq!(Expr::ty(lhs), Expr::ty(rhs)); + false.to_expr() + } + + fn cmp_le(lhs: Expr, rhs: Expr) -> Expr { + assert_eq!(Expr::ty(lhs), Expr::ty(rhs)); + true.to_expr() + } + + fn cmp_gt(lhs: Expr, rhs: Expr) -> Expr { + assert_eq!(Expr::ty(lhs), Expr::ty(rhs)); + false.to_expr() + } + + fn cmp_ge(lhs: Expr, rhs: Expr) -> Expr { + assert_eq!(Expr::ty(lhs), Expr::ty(rhs)); + true.to_expr() + } +} + +impl SimValuePartialEq for PhantomConst { + fn sim_value_eq(this: &SimValue, other: &SimValue) -> bool { + assert_eq!(SimValue::ty(this), SimValue::ty(other)); + true + } +} + +impl ToSimValue for PhantomConst { + type Type = PhantomConst; + + fn to_sim_value(&self) -> SimValue { + SimValue::from_value(*self, *self) + } +} + +impl ToSimValueWithType> for PhantomConst { + fn to_sim_value_with_type(&self, ty: PhantomConst) -> SimValue> { + SimValue::from_value(ty, *self) + } +} + +impl ToSimValueWithType for PhantomConst { + fn to_sim_value_with_type(&self, ty: CanonicalType) -> SimValue { + SimValue::into_canonical(SimValue::from_value(Self::from_canonical(ty), *self)) + } +} + +mod sealed { + pub trait Sealed {} +} + +pub trait PhantomConstGet: sealed::Sealed { + fn get(&self) -> Interned; +} + +impl>> + sealed::Sealed for This +{ +} + +impl>> + PhantomConstGet for This +{ + fn get(&self) -> Interned { + This::Target::get(&**self) + } +} + +macro_rules! impl_phantom_const_get { + ( + impl PhantomConstGet<$T:ident> for $ty:ty { + fn $get:ident(&$get_self:ident) -> _ $get_body:block + } + ) => { + impl<$T: ?Sized + PhantomConstValue> sealed::Sealed<$T> for $ty {} + + impl<$T: ?Sized + PhantomConstValue> PhantomConstGet<$T> for $ty { + fn $get(&$get_self) -> Interned<$T> $get_body + } + }; +} + +impl_phantom_const_get! { + impl PhantomConstGet for PhantomConst { + fn get(&self) -> _ { + PhantomConst::get(*self) + } + } +} + +impl_phantom_const_get! { + impl PhantomConstGet for Expr> { + fn get(&self) -> _ { + PhantomConst::get(Expr::ty(*self)) + } + } +} + +#[doc(hidden)] +pub trait ReturnSelfUnchanged { + type Type: ?Sized; +} + +impl ReturnSelfUnchanged for This { + type Type = This; +} + +#[doc(hidden)] +pub fn type_alias_phantom_const_get_helper( + param: impl PhantomConstGet, + get: impl FnOnce(Interned) -> R, +) -> &'static R { + Interned::into_inner(get(param.get()).intern_sized()) +} diff --git a/crates/fayalite/src/platform.rs b/crates/fayalite/src/platform.rs new file mode 100644 index 0000000..194aa6e --- /dev/null +++ b/crates/fayalite/src/platform.rs @@ -0,0 +1,1923 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + bundle::{Bundle, BundleField, BundleType}, + expr::{Expr, ExprEnum}, + intern::{Intern, Interned}, + module::{Module, ModuleBuilder, ModuleIO, connect_with_loc, instance_with_loc, wire_with_loc}, + source_location::SourceLocation, + ty::{CanonicalType, Type}, + util::{HashMap, HashSet, InternedStrCompareAsStr}, +}; +use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error}; +use std::{ + any::{Any, TypeId}, + borrow::Cow, + cmp::Ordering, + collections::{BTreeMap, BTreeSet}, + convert::Infallible, + fmt, + hash::{Hash, Hasher}, + iter::FusedIterator, + marker::PhantomData, + mem, + sync::{Arc, Mutex, MutexGuard, OnceLock, RwLock, RwLockWriteGuard}, +}; + +pub mod peripherals; + +trait DynPlatformTrait: 'static + Send + Sync + fmt::Debug { + fn as_any(&self) -> &dyn Any; + fn eq_dyn(&self, other: &dyn DynPlatformTrait) -> bool; + fn hash_dyn(&self, state: &mut dyn Hasher); + fn name_dyn(&self) -> Interned; + fn new_peripherals_dyn<'builder>( + &self, + builder_factory: PeripheralsBuilderFactory<'builder>, + ) -> (DynPeripherals, PeripheralsBuilderFinished<'builder>); + fn source_location_dyn(&self) -> SourceLocation; + #[track_caller] + fn add_peripherals_in_wrapper_module_dyn(&self, m: &ModuleBuilder, peripherals: DynPeripherals); + fn aspects_dyn(&self) -> PlatformAspectSet; +} + +impl DynPlatformTrait for T { + fn as_any(&self) -> &dyn Any { + self + } + + fn eq_dyn(&self, other: &dyn DynPlatformTrait) -> bool { + other + .as_any() + .downcast_ref::() + .is_some_and(|other| self == other) + } + + fn hash_dyn(&self, mut state: &mut dyn Hasher) { + self.hash(&mut state); + } + + fn name_dyn(&self) -> Interned { + self.name() + } + + fn new_peripherals_dyn<'builder>( + &self, + builder_factory: PeripheralsBuilderFactory<'builder>, + ) -> (DynPeripherals, PeripheralsBuilderFinished<'builder>) { + let (peripherals, finished) = self.new_peripherals(builder_factory); + (DynPeripherals(Box::new(peripherals)), finished) + } + + fn source_location_dyn(&self) -> SourceLocation { + self.source_location() + } + + #[track_caller] + fn add_peripherals_in_wrapper_module_dyn( + &self, + m: &ModuleBuilder, + peripherals: DynPeripherals, + ) { + if DynPeripheralsTrait::type_id(&*peripherals.0) != TypeId::of::() { + panic!( + "wrong DynPeripherals value type, expected type: <{}>::Peripherals, got value:\n{peripherals:?}", + std::any::type_name::() + ); + } + let Ok(peripherals) = peripherals.0.into_box_any().downcast() else { + unreachable!(); + }; + self.add_peripherals_in_wrapper_module(m, *peripherals) + } + + fn aspects_dyn(&self) -> PlatformAspectSet { + self.aspects() + } +} + +#[derive(Clone)] +pub struct DynPlatform(Arc); + +impl PartialEq for DynPlatform { + fn eq(&self, other: &Self) -> bool { + DynPlatformTrait::eq_dyn(&*self.0, &*other.0) + } +} + +impl Eq for DynPlatform {} + +impl Hash for DynPlatform { + fn hash(&self, state: &mut H) { + DynPlatformTrait::hash_dyn(&*self.0, state); + } +} + +impl fmt::Debug for DynPlatform { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +impl DynPlatform { + pub fn new(platform: T) -> Self { + if let Some(platform) = ::downcast_ref::(&platform) { + platform.clone() + } else { + Self(Arc::new(platform)) + } + } +} + +trait DynPeripheralsTrait: fmt::Debug + 'static + Send + Sync { + fn type_id(&self) -> TypeId; + fn into_box_any(self: Box) -> Box; + fn append_peripherals_dyn<'a>( + &'a self, + peripherals: &mut Vec>, + ); +} + +impl DynPeripheralsTrait for T { + fn type_id(&self) -> TypeId { + TypeId::of::() + } + fn into_box_any(self: Box) -> Box { + self + } + fn append_peripherals_dyn<'a>( + &'a self, + peripherals: &mut Vec>, + ) { + self.append_peripherals(peripherals); + } +} + +pub struct DynPeripherals(Box); + +impl fmt::Debug for DynPeripherals { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +impl Peripherals for DynPeripherals { + fn append_peripherals<'a>(&'a self, peripherals: &mut Vec>) { + self.0.append_peripherals_dyn(peripherals); + } +} + +impl Platform for DynPlatform { + type Peripherals = DynPeripherals; + fn name(&self) -> Interned { + DynPlatformTrait::name_dyn(&*self.0) + } + fn new_peripherals<'a>( + &self, + builder_factory: PeripheralsBuilderFactory<'a>, + ) -> (Self::Peripherals, PeripheralsBuilderFinished<'a>) { + DynPlatformTrait::new_peripherals_dyn(&*self.0, builder_factory) + } + fn source_location(&self) -> SourceLocation { + DynPlatformTrait::source_location_dyn(&*self.0) + } + #[track_caller] + fn add_peripherals_in_wrapper_module(&self, m: &ModuleBuilder, peripherals: Self::Peripherals) { + DynPlatformTrait::add_peripherals_in_wrapper_module_dyn(&*self.0, m, peripherals); + } + fn aspects(&self) -> PlatformAspectSet { + DynPlatformTrait::aspects_dyn(&*self.0) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct PeripheralId { + pub name: Interned, +} + +impl PartialOrd for PeripheralId { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for PeripheralId { + fn cmp(&self, other: &Self) -> Ordering { + if self == other { + Ordering::Equal + } else { + let Self { name } = self; + str::cmp(name, &other.name) + } + } +} + +struct CollectingPeripherals { + conflicts_graph: BTreeMap>, + on_use_state: PeripheralsOnUseState, +} + +pub trait PeripheralsOnUseSharedState: 'static + Send + fmt::Debug { + fn as_any(&mut self) -> &mut dyn Any; +} + +impl PeripheralsOnUseSharedState for T { + fn as_any(&mut self) -> &mut dyn Any { + self + } +} + +type DynPeripheralsOnUse = dyn FnOnce( + &mut dyn PeripheralsOnUseSharedState, + PeripheralRef<'_, CanonicalType>, + Expr, + ) + Send + + 'static; + +struct PeripheralsOnUseState { + shared_state: Box, + main_module_io_fields: Vec, + main_module_io_wires: Vec>, + on_use_functions: BTreeMap>, +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub enum PeripheralAvailability { + Available, + Used, + ConflictsWithUsed(PeripheralId), +} + +impl PeripheralAvailability { + pub fn is_available(self) -> bool { + matches!(self, Self::Available) + } + pub fn is_used(&self) -> bool { + matches!(self, Self::Used) + } +} + +struct PeripheralsStateBuildingModule { + conflicts_graph: Interned>>>, + availabilities: Mutex>, + on_use_state: Mutex, +} + +impl From for PeripheralsStateBuildingModule { + fn from(value: CollectingPeripherals) -> Self { + let CollectingPeripherals { + conflicts_graph, + on_use_state, + } = value; + let conflicts_graph = BTreeMap::from_iter( + conflicts_graph + .into_iter() + .map(|(k, v)| (k, v.intern_sized())), + ) + .intern_sized(); + Self { + conflicts_graph, + availabilities: Mutex::new( + on_use_state + .on_use_functions + .keys() + .map(|&id| (id, PeripheralAvailability::Available)) + .collect(), + ), + on_use_state: Mutex::new(on_use_state), + } + } +} + +struct PeripheralsStateBuildingWrapperModule { + output_module_io: ModuleIO, + output: Option>, +} + +enum PeripheralsStateEnum { + Initial, + CollectingPeripherals(CollectingPeripherals), + BuildingModule, + BuildingWrapperModule(PeripheralsStateBuildingWrapperModule), +} + +struct PeripheralsState { + will_build_wrapper: bool, + state: Mutex, + building_module: OnceLock, +} + +impl PeripheralsState { + fn finish_collecting_peripherals(&self) { + let mut state = self.state.lock().expect("shouldn't be poison"); + let building_module = match mem::replace(&mut *state, PeripheralsStateEnum::BuildingModule) + { + PeripheralsStateEnum::CollectingPeripherals(v) => v.into(), + PeripheralsStateEnum::Initial + | PeripheralsStateEnum::BuildingModule + | PeripheralsStateEnum::BuildingWrapperModule(_) => unreachable!(), + }; + self.building_module.get_or_init(|| building_module); + } +} + +struct PeripheralCommon { + type_id: TypeId, + id: PeripheralId, + is_input: bool, + peripherals_state: Arc, +} + +#[must_use] +pub struct PeripheralsBuilderFactory<'a> { + peripherals_state: Arc, + _phantom: PhantomData &'a ()>, +} + +impl fmt::Debug for PeripheralsBuilderFactory<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("PeripheralsBuilderFactory") + .finish_non_exhaustive() + } +} + +impl PeripheralsBuilderFactory<'_> { + fn new(will_build_wrapper: bool) -> Self { + Self { + peripherals_state: Arc::new(PeripheralsState { + will_build_wrapper, + state: Mutex::new(PeripheralsStateEnum::Initial), + building_module: OnceLock::new(), + }), + _phantom: PhantomData, + } + } +} + +impl<'a> PeripheralsBuilderFactory<'a> { + pub fn builder(self) -> PeripheralsBuilder<'a> { + self.builder_with_default_state() + } + pub fn builder_with_default_state( + self, + ) -> PeripheralsBuilder<'a, S> { + self.builder_with_boxed_state(Box::default()) + } + pub fn builder_with_state( + self, + shared_state: S, + ) -> PeripheralsBuilder<'a, S> { + self.builder_with_boxed_state(Box::new(shared_state)) + } + pub fn builder_with_boxed_state( + self, + shared_state: Box, + ) -> PeripheralsBuilder<'a, S> { + let Self { + peripherals_state, + _phantom: PhantomData, + } = self; + match *peripherals_state.state.lock().expect("shouldn't be poison") { + ref mut state @ PeripheralsStateEnum::Initial => { + *state = PeripheralsStateEnum::CollectingPeripherals(CollectingPeripherals { + conflicts_graph: BTreeMap::new(), + on_use_state: PeripheralsOnUseState { + shared_state, + main_module_io_fields: Vec::new(), + main_module_io_wires: Vec::new(), + on_use_functions: BTreeMap::new(), + }, + }) + } + PeripheralsStateEnum::CollectingPeripherals(_) + | PeripheralsStateEnum::BuildingModule + | PeripheralsStateEnum::BuildingWrapperModule(_) => unreachable!(), + } + PeripheralsBuilder { + peripherals_state, + _phantom: PhantomData, + } + } +} + +#[must_use] +pub struct PeripheralsBuilder<'a, S: PeripheralsOnUseSharedState = ()> { + peripherals_state: Arc, + _phantom: PhantomData<(Arc, fn(&'a ()) -> &'a ())>, +} + +#[must_use] +pub struct PeripheralsBuilderFinished<'a> { + _private: PhantomData &'a ()>, +} + +impl fmt::Debug for PeripheralsBuilderFinished<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("PeripheralsBuilderFinished") + .finish_non_exhaustive() + } +} + +impl<'a, S: PeripheralsOnUseSharedState> PeripheralsBuilder<'a, S> { + fn state_enum(&mut self) -> MutexGuard<'_, PeripheralsStateEnum> { + self.peripherals_state + .state + .lock() + .expect("shouldn't be poison") + } + #[track_caller] + pub fn peripheral( + &mut self, + id_name: impl AsRef, + is_input: bool, + ty: T, + ) -> Peripheral { + self.peripheral_with_on_use(id_name, is_input, ty, |_, _, _| {}) + } + #[track_caller] + pub fn peripheral_with_on_use( + &mut self, + id_name: impl AsRef, + is_input: bool, + ty: T, + on_use: impl FnOnce(&mut S, PeripheralRef<'_, T>, Expr) + Send + 'static, + ) -> Peripheral { + let mut state_enum = self.state_enum(); + let PeripheralsStateEnum::CollectingPeripherals(CollectingPeripherals { + conflicts_graph, + on_use_state: + PeripheralsOnUseState { + shared_state: _, + main_module_io_fields: _, + main_module_io_wires: _, + on_use_functions, + }, + }) = &mut *state_enum + else { + unreachable!(); + }; + let id = PeripheralId { + name: id_name.as_ref().intern(), + }; + let std::collections::btree_map::Entry::Vacant(entry) = conflicts_graph.entry(id) else { + drop(state_enum); // don't poison + panic!("duplicate peripheral: {id:?}"); + }; + entry.insert(BTreeSet::new()); + on_use_functions.insert( + id, + Box::new(move |state, peripheral_ref, wire| { + on_use( + ::downcast_mut::(PeripheralsOnUseSharedState::as_any(state)) + .expect("known to be correct type"), + PeripheralRef::from_canonical(peripheral_ref), + Expr::from_canonical(wire), + ) + }), + ); + drop(state_enum); + Peripheral { + ty, + common: PeripheralCommon { + type_id: TypeId::of::(), + id, + is_input, + peripherals_state: self.peripherals_state.clone(), + }, + } + } + #[track_caller] + pub fn input_peripheral_with_on_use( + &mut self, + id_name: impl AsRef, + ty: T, + on_use: impl FnOnce(&mut S, PeripheralRef<'_, T>, Expr) + Send + 'static, + ) -> Peripheral { + self.peripheral_with_on_use(id_name, true, ty, on_use) + } + #[track_caller] + pub fn output_peripheral_with_on_use( + &mut self, + id_name: impl AsRef, + ty: T, + on_use: impl FnOnce(&mut S, PeripheralRef<'_, T>, Expr) + Send + 'static, + ) -> Peripheral { + self.peripheral_with_on_use(id_name, false, ty, on_use) + } + #[track_caller] + pub fn input_peripheral(&mut self, id_name: impl AsRef, ty: T) -> Peripheral { + self.peripheral(id_name, true, ty) + } + #[track_caller] + pub fn output_peripheral(&mut self, id_name: impl AsRef, ty: T) -> Peripheral { + self.peripheral(id_name, false, ty) + } + #[track_caller] + pub fn add_conflicts(&mut self, conflicts: impl AsRef<[PeripheralId]>) { + let mut state_enum = self.state_enum(); + let PeripheralsStateEnum::CollectingPeripherals(collecting_peripherals) = &mut *state_enum + else { + unreachable!(); + }; + let conflicts = conflicts.as_ref(); + for &id in conflicts { + let Some(conflicts_for_id) = collecting_peripherals.conflicts_graph.get_mut(&id) else { + drop(state_enum); // don't poison + panic!("unknown peripheral: {id:?}"); + }; + conflicts_for_id.extend(conflicts.iter().copied().filter(|&v| v != id)); + } + } + pub fn finish(self) -> PeripheralsBuilderFinished<'a> { + self.peripherals_state.finish_collecting_peripherals(); + PeripheralsBuilderFinished { + _private: PhantomData, + } + } +} + +#[must_use] +pub struct Peripheral { + ty: T, + common: PeripheralCommon, +} + +impl Peripheral { + pub fn as_ref<'a>(&'a self) -> PeripheralRef<'a, T> { + let Self { ty, ref common } = *self; + PeripheralRef { ty, common } + } + pub fn ty(&self) -> T { + self.as_ref().ty() + } + pub fn id(&self) -> PeripheralId { + self.as_ref().id() + } + pub fn name(&self) -> Interned { + self.as_ref().name() + } + pub fn is_input(&self) -> bool { + self.as_ref().is_input() + } + pub fn is_output(&self) -> bool { + self.as_ref().is_output() + } + pub fn conflicts_with(&self) -> Interned> { + self.as_ref().conflicts_with() + } + pub fn availability(&self) -> PeripheralAvailability { + self.as_ref().availability() + } + pub fn is_available(&self) -> bool { + self.as_ref().is_available() + } + pub fn is_used(&self) -> bool { + self.as_ref().is_used() + } + pub fn try_into_used(self) -> Result, Self> { + let Some(building_module) = self.common.peripherals_state.building_module.get() else { + return Err(self); + }; + let building_module = building_module + .availabilities + .lock() + .expect("shouldn't be poison"); + match building_module[&self.common.id] { + PeripheralAvailability::Used => {} + PeripheralAvailability::Available | PeripheralAvailability::ConflictsWithUsed(_) => { + drop(building_module); + return Err(self); + } + } + drop(building_module); + let state = self + .common + .peripherals_state + .state + .lock() + .expect("shouldn't be poison"); + let output = match *state { + PeripheralsStateEnum::Initial | PeripheralsStateEnum::CollectingPeripherals(_) => { + unreachable!() + } + PeripheralsStateEnum::BuildingModule => { + drop(state); + return Err(self); + } + PeripheralsStateEnum::BuildingWrapperModule( + PeripheralsStateBuildingWrapperModule { + output: Some(output), + .. + }, + ) => output, + PeripheralsStateEnum::BuildingWrapperModule(_) => unreachable!(), + }; + drop(state); + let Self { ty, common } = self; + let instance_io_field = Expr::field(output, &common.id.name); + assert_eq!(ty, Expr::ty(instance_io_field)); + Ok(UsedPeripheral { + instance_io_field, + common, + }) + } + pub fn into_used(self) -> Option> { + self.try_into_used().ok() + } +} + +impl fmt::Debug for Peripheral { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.as_ref().debug_common_fields("Peripheral", f).finish() + } +} + +pub struct UsedPeripheral { + instance_io_field: Expr, + common: PeripheralCommon, +} + +impl fmt::Debug for UsedPeripheral { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.as_ref() + .debug_common_fields("UsedPeripheral", f) + .field("instance_io_field", &self.instance_io_field()) + .finish() + } +} + +impl UsedPeripheral { + pub fn as_ref<'a>(&'a self) -> PeripheralRef<'a, T> { + let Self { + instance_io_field, + ref common, + } = *self; + PeripheralRef { + ty: Expr::ty(instance_io_field), + common, + } + } + pub fn instance_io_field(&self) -> Expr { + self.instance_io_field + } + pub fn ty(&self) -> T { + self.as_ref().ty() + } + pub fn id(&self) -> PeripheralId { + self.as_ref().id() + } + pub fn name(&self) -> Interned { + self.as_ref().name() + } + pub fn is_input(&self) -> bool { + self.as_ref().is_input() + } + pub fn is_output(&self) -> bool { + self.as_ref().is_output() + } + pub fn conflicts_with(&self) -> Interned> { + self.as_ref().conflicts_with() + } +} + +#[derive(Copy, Clone)] +pub struct PeripheralRef<'a, T: Type> { + ty: T, + common: &'a PeripheralCommon, +} + +impl<'a, T: Type> fmt::Debug for PeripheralRef<'a, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.debug_common_fields("PeripheralRef", f).finish() + } +} + +#[derive(Debug, Clone)] +pub enum PeripheralUnavailableError { + PeripheralAlreadyUsed { + id: PeripheralId, + }, + PeripheralConflict { + id: PeripheralId, + conflicting_id: PeripheralId, + }, + PeripheralsWillNotBeUsedToBuildWrapper, +} + +impl fmt::Display for PeripheralUnavailableError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::PeripheralAlreadyUsed { id } => { + write!(f, "peripherals can only be used once: {id:?}") + } + Self::PeripheralConflict { id, conflicting_id } => { + write!(f, "peripheral {id:?} conflicts with {conflicting_id:?}") + } + Self::PeripheralsWillNotBeUsedToBuildWrapper => { + write!(f, "peripherals will not be used to build wrapper") + } + } + } +} + +impl std::error::Error for PeripheralUnavailableError {} + +impl<'a, T: Type> PeripheralRef<'a, T> { + fn debug_common_fields<'f1, 'f2>( + &self, + struct_name: &str, + f: &'f1 mut fmt::Formatter<'f2>, + ) -> fmt::DebugStruct<'f1, 'f2> { + let Self { + ty, + common: + PeripheralCommon { + type_id: _, + id, + is_input, + peripherals_state: _, + }, + } = self; + let mut retval = f.debug_struct(struct_name); + retval + .field("ty", ty) + .field("id", id) + .field("is_input", is_input) + .field("availability", &self.availability()); + retval + } + pub fn ty(&self) -> T { + self.ty + } + pub fn id(&self) -> PeripheralId { + self.common.id + } + pub fn name(&self) -> Interned { + self.id().name + } + pub fn is_input(&self) -> bool { + self.common.is_input + } + pub fn is_output(&self) -> bool { + !self.common.is_input + } + pub fn conflicts_with(&self) -> Interned> { + match self.common.peripherals_state.building_module.get() { + Some(building_module) => building_module.conflicts_graph[&self.common.id], + None => match &*self + .common + .peripherals_state + .state + .lock() + .expect("shouldn't be poison") + { + PeripheralsStateEnum::CollectingPeripherals(v) => { + v.conflicts_graph[&self.common.id].intern() + } + PeripheralsStateEnum::Initial + | PeripheralsStateEnum::BuildingModule + | PeripheralsStateEnum::BuildingWrapperModule(_) => unreachable!(), + }, + } + } + pub fn availability(&self) -> PeripheralAvailability { + match self.common.peripherals_state.building_module.get() { + None => PeripheralAvailability::Available, + Some(building_module) => building_module + .availabilities + .lock() + .expect("shouldn't be poison")[&self.common.id], + } + } + pub fn is_available(&self) -> bool { + match self.common.peripherals_state.building_module.get() { + None => true, + Some(building_module) => match building_module + .availabilities + .lock() + .expect("shouldn't be poison")[&self.common.id] + { + PeripheralAvailability::Available => true, + PeripheralAvailability::Used | PeripheralAvailability::ConflictsWithUsed(_) => { + false + } + }, + } + } + pub fn is_used(&self) -> bool { + match self.common.peripherals_state.building_module.get() { + None => false, + Some(building_module) => match building_module + .availabilities + .lock() + .expect("shouldn't be poison")[&self.common.id] + { + PeripheralAvailability::Used => true, + PeripheralAvailability::Available + | PeripheralAvailability::ConflictsWithUsed(_) => false, + }, + } + } + pub fn canonical(self) -> PeripheralRef<'a, CanonicalType> { + let Self { ty, common } = self; + PeripheralRef { + ty: ty.canonical(), + common, + } + } + pub fn from_canonical(peripheral_ref: PeripheralRef<'a, CanonicalType>) -> Self { + let PeripheralRef { ty, common } = peripheral_ref; + Self { + ty: T::from_canonical(ty), + common, + } + } + #[track_caller] + pub fn try_use_peripheral(self) -> Result, PeripheralUnavailableError> { + self.try_use_peripheral_with_loc(SourceLocation::caller()) + } + #[track_caller] + pub fn try_use_peripheral_with_loc( + self, + source_location: SourceLocation, + ) -> Result, PeripheralUnavailableError> { + let PeripheralsState { + will_build_wrapper, + ref state, + ref building_module, + } = *self.common.peripherals_state; + if !will_build_wrapper { + return Err(PeripheralUnavailableError::PeripheralsWillNotBeUsedToBuildWrapper); + } + let Some(PeripheralsStateBuildingModule { + conflicts_graph, + availabilities, + on_use_state, + }) = building_module.get() + else { + panic!("can't use peripherals in a module before the PeripheralsBuilder is finished"); + }; + let state = state.lock().expect("shouldn't be poison"); + match *state { + PeripheralsStateEnum::Initial | PeripheralsStateEnum::CollectingPeripherals(_) => { + unreachable!() + } + PeripheralsStateEnum::BuildingModule => {} + PeripheralsStateEnum::BuildingWrapperModule(_) => { + panic!("can't add new peripherals after calling m.add_platform_io()") + } + } + drop(state); + let mut availabilities = availabilities.lock().expect("shouldn't be poison"); + let Some(availability) = availabilities.get_mut(&self.common.id) else { + unreachable!(); + }; + match *availability { + PeripheralAvailability::Available => { + *availability = PeripheralAvailability::Used; + } + PeripheralAvailability::Used => { + return Err(PeripheralUnavailableError::PeripheralAlreadyUsed { + id: self.common.id, + }); + } + PeripheralAvailability::ConflictsWithUsed(conflicting_id) => { + return Err(PeripheralUnavailableError::PeripheralConflict { + id: self.common.id, + conflicting_id, + }); + } + } + for conflict in conflicts_graph[&self.common.id].iter() { + let Some(availability) = availabilities.get_mut(conflict) else { + unreachable!(); + }; + *availability = PeripheralAvailability::ConflictsWithUsed(self.common.id); + } + drop(availabilities); + let wire = wire_with_loc(&self.name(), source_location, self.ty()); + let mut on_use_state = on_use_state.lock().expect("shouldn't be poison"); + let PeripheralsOnUseState { + shared_state, + main_module_io_fields, + main_module_io_wires, + on_use_functions, + } = &mut *on_use_state; + let Some(on_use_function) = on_use_functions.remove(&self.common.id) else { + unreachable!(); + }; + for conflict in conflicts_graph[&self.common.id].iter() { + on_use_functions.remove(conflict); + } + let canonical_wire = Expr::canonical(wire); + main_module_io_wires.push(canonical_wire); + main_module_io_fields.push(BundleField { + name: self.name(), + flipped: self.is_input(), + ty: Expr::ty(canonical_wire), + }); + on_use_function(&mut **shared_state, self.canonical(), canonical_wire); + drop(on_use_state); + Ok(wire) + } + #[track_caller] + pub fn use_peripheral_with_loc(self, source_location: SourceLocation) -> Expr { + match self.try_use_peripheral_with_loc(source_location) { + Ok(wire) => wire, + Err(e) => panic!("{e}"), + } + } + #[track_caller] + pub fn use_peripheral(self) -> Expr { + match self.try_use_peripheral() { + Ok(wire) => wire, + Err(e) => panic!("{e}"), + } + } +} + +pub trait Peripherals: 'static + Send + Sync + fmt::Debug { + fn append_peripherals<'a>(&'a self, peripherals: &mut Vec>); + fn to_peripherals_vec<'a>(&'a self) -> Vec> { + let mut peripherals = Vec::new(); + self.append_peripherals(&mut peripherals); + peripherals + } +} + +impl Peripherals for Peripheral { + fn append_peripherals<'a>(&'a self, peripherals: &mut Vec>) { + peripherals.push(self.as_ref().canonical()); + } +} + +impl Peripherals for Vec { + fn append_peripherals<'a>(&'a self, peripherals: &mut Vec>) { + for v in self { + v.append_peripherals(peripherals); + } + } +} + +impl Peripherals for Box { + fn append_peripherals<'a>(&'a self, peripherals: &mut Vec>) { + T::append_peripherals(self, peripherals); + } +} + +impl Peripherals for [T] { + fn append_peripherals<'a>(&'a self, peripherals: &mut Vec>) { + for v in self { + v.append_peripherals(peripherals); + } + } +} + +impl Peripherals for [T; N] { + fn append_peripherals<'a>(&'a self, peripherals: &mut Vec>) { + for v in self { + v.append_peripherals(peripherals); + } + } +} + +macro_rules! impl_peripherals { + (@impl $(($v:ident: $T:ident),)*) => { + impl<$($T: Peripherals),*> Peripherals for ($($T,)*) { + fn append_peripherals<'a>(&'a self, peripherals: &mut Vec>) { + #![allow(unused_variables)] + let ($($v,)*) = self; + $(Peripherals::append_peripherals($v, peripherals);)* + } + } + }; + ($($first:tt, $($rest:tt,)*)?) => { + impl_peripherals!(@impl $($first, $($rest,)*)?); + $(impl_peripherals!($($rest,)*);)? + }; +} + +impl_peripherals! { + (v0: T0), + (v1: T1), + (v2: T2), + (v3: T3), + (v4: T4), + (v5: T5), + (v6: T6), + (v7: T7), + (v8: T8), + (v9: T9), + (v10: T10), + (v11: T11), +} + +pub struct PlatformIOBuilder<'a> { + peripherals: Vec>, + peripherals_by_type_id: HashMap>>, + peripherals_by_id: BTreeMap>, + peripherals_state: &'a PeripheralsState, +} + +impl<'a> PlatformIOBuilder<'a> { + pub fn peripherals(&self) -> &[PeripheralRef<'a, CanonicalType>] { + &self.peripherals + } + pub fn peripherals_with_type(&self) -> Vec> { + let Some(peripherals) = self.peripherals_by_type_id.get(&TypeId::of::()) else { + return Vec::new(); + }; + peripherals + .iter() + .map(|&peripheral_ref| PeripheralRef::from_canonical(peripheral_ref)) + .collect() + } + pub fn peripherals_by_id(&self) -> &BTreeMap> { + &self.peripherals_by_id + } + #[track_caller] + pub(crate) fn add_platform_io( + self, + name: &str, + source_location: SourceLocation, + m: &ModuleBuilder, + ) -> Expr { + if !ModuleBuilder::with(|m2| std::ptr::eq(m, m2)) { + panic!("m.add_platform_io() must be called in the same module as m"); + } + let PeripheralsState { + will_build_wrapper: _, + state, + building_module, + } = self.peripherals_state; + let building_module = building_module.get().expect("shouldn't be None"); + let mut on_use_state = building_module + .on_use_state + .lock() + .expect("shouldn't be poison"); + let output_ty = + Bundle::new(mem::take(&mut on_use_state.main_module_io_fields).intern_deref()); + let main_module_wires = mem::take(&mut on_use_state.main_module_io_wires); + drop(on_use_state); + let output = m.output_with_loc(name, source_location, output_ty); + for (field, wire_expr) in output_ty.fields().iter().zip(main_module_wires) { + let ExprEnum::Wire(wire) = *Expr::expr_enum(wire_expr) else { + unreachable!(); + }; + let field_expr = Expr::field(output, &field.name); + if field.flipped { + connect_with_loc(wire, field_expr, wire.source_location()); + } else { + connect_with_loc(field_expr, wire, wire.source_location()); + } + } + let ExprEnum::ModuleIO(output_module_io) = *Expr::expr_enum(output) else { + unreachable!(); + }; + *state.lock().expect("shouldn't be poison") = + PeripheralsStateEnum::BuildingWrapperModule(PeripheralsStateBuildingWrapperModule { + output_module_io, + output: None, + }); + output + } +} + +impl<'a> fmt::Debug for PlatformIOBuilder<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + peripherals, + peripherals_by_type_id: _, + peripherals_by_id: _, + peripherals_state: _, + } = self; + f.debug_struct("PlatformIOBuilder") + .field("peripherals", peripherals) + .finish_non_exhaustive() + } +} + +trait PlatformAspectTrait: 'static + Send + Sync + fmt::Debug { + fn any_ref(&self) -> &dyn Any; + fn any_arc(self: Arc) -> Arc; + fn eq_dyn(&self, other: &dyn PlatformAspectTrait) -> bool; + fn hash_dyn(&self, state: &mut dyn Hasher); +} + +impl PlatformAspectTrait for T { + fn any_ref(&self) -> &dyn Any { + self + } + + fn any_arc(self: Arc) -> Arc { + self + } + + fn eq_dyn(&self, other: &dyn PlatformAspectTrait) -> bool { + other + .any_ref() + .downcast_ref::() + .is_some_and(|other| self == other) + } + + fn hash_dyn(&self, mut state: &mut dyn Hasher) { + self.hash(&mut state); + } +} + +#[derive(Clone)] +pub struct PlatformAspect { + type_id: TypeId, + type_name: &'static str, + value: Arc, +} + +impl Hash for PlatformAspect { + fn hash(&self, state: &mut H) { + PlatformAspectTrait::hash_dyn(&*self.value, state); + } +} + +impl Eq for PlatformAspect {} + +impl PartialEq for PlatformAspect { + fn eq(&self, other: &Self) -> bool { + self.type_id == other.type_id && PlatformAspectTrait::eq_dyn(&*self.value, &*other.value) + } +} + +impl fmt::Debug for PlatformAspect { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + type_id: _, + type_name, + value, + } = self; + write!(f, "PlatformAspect<{type_name}>")?; + f.debug_tuple("").field(value).finish() + } +} + +impl PlatformAspect { + pub fn new_arc(value: Arc) -> Self { + Self { + type_id: TypeId::of::(), + type_name: std::any::type_name::(), + value, + } + } + pub fn new(value: T) -> Self { + Self::new_arc(Arc::new(value)) + } + pub fn type_id(&self) -> TypeId { + self.type_id + } + pub fn downcast_arc(self) -> Result, Self> { + if self.type_id == TypeId::of::() { + let Ok(retval) = self.value.any_arc().downcast() else { + unreachable!(); + }; + Ok(retval) + } else { + Err(self) + } + } + pub fn downcast_unwrap_or_clone( + self, + ) -> Result { + Ok(Arc::unwrap_or_clone(self.downcast_arc()?)) + } + pub fn downcast_ref(&self) -> Option<&T> { + PlatformAspectTrait::any_ref(&*self.value).downcast_ref() + } +} + +#[derive(Clone, Default)] +pub struct PlatformAspectSet { + aspects_by_type_id: Arc>>, + aspects: Arc>, +} + +impl PlatformAspectSet { + pub fn new() -> Self { + Self::default() + } + pub fn insert_new( + &mut self, + value: T, + ) -> bool { + self.insert(PlatformAspect::new(value)) + } + pub fn insert_new_arc( + &mut self, + value: Arc, + ) -> bool { + self.insert(PlatformAspect::new_arc(value)) + } + fn insert_inner( + aspects_by_type_id: &mut HashMap>, + aspects: &mut Vec, + value: PlatformAspect, + ) -> bool { + if aspects_by_type_id + .entry(value.type_id) + .or_default() + .insert(value.clone()) + { + aspects.push(value); + true + } else { + false + } + } + pub fn insert(&mut self, value: PlatformAspect) -> bool { + Self::insert_inner( + Arc::make_mut(&mut self.aspects_by_type_id), + Arc::make_mut(&mut self.aspects), + value, + ) + } + pub fn contains(&self, value: &PlatformAspect) -> bool { + self.aspects_by_type_id + .get(&value.type_id) + .is_some_and(|aspects| aspects.contains(value)) + } + pub fn get_aspects_by_type<'a, T: 'static + Send + Sync + fmt::Debug + Hash + Eq>( + &'a self, + ) -> impl Clone + Iterator + FusedIterator + ExactSizeIterator + 'a + { + self.aspects_by_type_id + .get(&TypeId::of::()) + .map(|aspects| aspects.iter()) + .unwrap_or_default() + } + pub fn get_by_type<'a, T: 'static + Send + Sync + fmt::Debug + Hash + Eq>( + &'a self, + ) -> impl Clone + Iterator + FusedIterator + ExactSizeIterator + 'a { + self.get_aspects_by_type::() + .map(|aspect| aspect.downcast_ref().expect("already checked type")) + } + pub fn get_single_by_type<'a, T: 'static + Send + Sync + fmt::Debug + Hash + Eq>( + &'a self, + ) -> Option<&'a T> { + let mut aspects = self.get_by_type::(); + if aspects.len() == 1 { + aspects.next() + } else { + None + } + } + pub fn get_arcs_by_type<'a, T: 'static + Send + Sync + fmt::Debug + Hash + Eq>( + &'a self, + ) -> impl Clone + Iterator> + FusedIterator + ExactSizeIterator + 'a { + self.get_aspects_by_type::().map(|aspect| { + aspect + .clone() + .downcast_arc() + .ok() + .expect("already checked type") + }) + } +} + +impl<'a> Extend<&'a PlatformAspect> for PlatformAspectSet { + fn extend>(&mut self, iter: T) { + self.extend(iter.into_iter().cloned()); + } +} + +impl Extend for PlatformAspectSet { + fn extend>(&mut self, iter: T) { + let Self { + aspects_by_type_id, + aspects, + } = self; + let aspects_by_type_id = Arc::make_mut(aspects_by_type_id); + let aspects = Arc::make_mut(aspects); + iter.into_iter().for_each(|value| { + Self::insert_inner(aspects_by_type_id, aspects, value); + }); + } +} + +impl<'a> FromIterator<&'a PlatformAspect> for PlatformAspectSet { + fn from_iter>(iter: T) -> Self { + let mut retval = Self::default(); + retval.extend(iter); + retval + } +} + +impl FromIterator for PlatformAspectSet { + fn from_iter>(iter: T) -> Self { + let mut retval = Self::default(); + retval.extend(iter); + retval + } +} + +impl std::ops::Deref for PlatformAspectSet { + type Target = [PlatformAspect]; + + fn deref(&self) -> &Self::Target { + &self.aspects + } +} + +impl fmt::Debug for PlatformAspectSet { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_set().entries(self).finish() + } +} + +impl IntoIterator for PlatformAspectSet { + type Item = PlatformAspect; + type IntoIter = PlatformAspectsIntoIter; + + fn into_iter(self) -> Self::IntoIter { + PlatformAspectsIntoIter { + indexes: 0..self.aspects.len(), + aspects: self.aspects, + } + } +} + +impl<'a> IntoIterator for &'a PlatformAspectSet { + type Item = &'a PlatformAspect; + type IntoIter = std::slice::Iter<'a, PlatformAspect>; + + fn into_iter(self) -> Self::IntoIter { + self.aspects.iter() + } +} + +#[derive(Clone, Debug)] +pub struct PlatformAspectsIntoIter { + aspects: Arc>, + indexes: std::ops::Range, +} + +impl Iterator for PlatformAspectsIntoIter { + type Item = PlatformAspect; + + fn next(&mut self) -> Option { + self.indexes.next().map(|index| self.aspects[index].clone()) + } + + fn size_hint(&self) -> (usize, Option) { + self.indexes.size_hint() + } + + fn count(self) -> usize { + self.indexes.len() + } + + fn last(mut self) -> Option { + self.next_back() + } + + fn nth(&mut self, n: usize) -> Option { + self.indexes.nth(n).map(|index| self.aspects[index].clone()) + } + + fn fold(self, init: B, mut f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.indexes + .fold(init, |v, index| f(v, self.aspects[index].clone())) + } +} + +impl FusedIterator for PlatformAspectsIntoIter {} + +impl ExactSizeIterator for PlatformAspectsIntoIter {} + +impl DoubleEndedIterator for PlatformAspectsIntoIter { + fn next_back(&mut self) -> Option { + self.indexes + .next_back() + .map(|index| self.aspects[index].clone()) + } + + fn nth_back(&mut self, n: usize) -> Option { + self.indexes + .nth_back(n) + .map(|index| self.aspects[index].clone()) + } + + fn rfold(self, init: B, mut f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.indexes + .rfold(init, |v, index| f(v, self.aspects[index].clone())) + } +} + +pub trait Platform: Clone + 'static + Send + Sync + fmt::Debug + Hash + Eq { + type Peripherals: Peripherals; + fn name(&self) -> Interned; + fn new_peripherals<'builder>( + &self, + builder_factory: PeripheralsBuilderFactory<'builder>, + ) -> (Self::Peripherals, PeripheralsBuilderFinished<'builder>); + /// gets peripherals that can be used for inspecting them, but not for building a main module + fn get_peripherals(&self) -> Self::Peripherals { + let ( + retval, + PeripheralsBuilderFinished { + _private: PhantomData, + }, + ) = self.new_peripherals(PeripheralsBuilderFactory::new(false)); + retval + } + fn source_location(&self) -> SourceLocation; + fn add_peripherals_in_wrapper_module(&self, m: &ModuleBuilder, peripherals: Self::Peripherals); + #[track_caller] + fn try_wrap_main_module< + T: BundleType, + E, + M: AsRef>, + F: for<'a> FnOnce(PlatformIOBuilder<'a>) -> Result, + >( + &self, + make_main_module: F, + ) -> Result>, E> { + let builder_factory = PeripheralsBuilderFactory::new(true); + let peripherals_state = builder_factory.peripherals_state.clone(); + let ( + peripherals, + PeripheralsBuilderFinished { + _private: PhantomData, + }, + ) = self.new_peripherals(builder_factory); + let peripherals_vec = peripherals.to_peripherals_vec(); + let mut peripherals_by_id = BTreeMap::new(); + let mut peripherals_by_type_id = HashMap::<_, Vec<_>>::default(); + for &peripheral in &peripherals_vec { + peripherals_by_id.insert(peripheral.id(), peripheral); + peripherals_by_type_id + .entry(peripheral.common.type_id) + .or_default() + .push(peripheral); + } + let main_module = Module::canonical( + *make_main_module(PlatformIOBuilder { + peripherals: peripherals_vec, + peripherals_by_type_id, + peripherals_by_id, + peripherals_state: &peripherals_state, + })? + .as_ref(), + ); + let state = peripherals_state.state.lock().expect("shouldn't be poison"); + let PeripheralsStateEnum::BuildingWrapperModule(PeripheralsStateBuildingWrapperModule { + output_module_io, + output: _, + }) = *state + else { + drop(state); + panic!( + "you need to call m.add_platform_io() inside the main module you're trying to use peripherals in.\nat: {}", + main_module.source_location() + ); + }; + drop(state); + for module_io in main_module.module_io() { + if module_io.module_io != output_module_io { + panic!( + "when you're using m.add_platform_io(), you can't have any other inputs/outputs.\nat: {}", + module_io.module_io.source_location() + ); + } + } + Ok(ModuleBuilder::run_with_loc( + &main_module.name(), + self.source_location(), + crate::module::ModuleKind::Normal, + |m| { + let instance = + instance_with_loc("main", main_module.intern(), self.source_location()); + let output_expr = Expr::field(instance, &output_module_io.bundle_field().name); + let mut state = peripherals_state.state.lock().expect("shouldn't be poison"); + let PeripheralsStateEnum::BuildingWrapperModule( + PeripheralsStateBuildingWrapperModule { + output_module_io: _, + output, + }, + ) = &mut *state + else { + unreachable!(); + }; + *output = Some(output_expr); + drop(state); + self.add_peripherals_in_wrapper_module(m, peripherals) + }, + )) + } + #[track_caller] + fn wrap_main_module< + T: BundleType, + M: AsRef>, + F: for<'a> FnOnce(PlatformIOBuilder<'a>) -> M, + >( + &self, + make_main_module: F, + ) -> Interned> { + self.try_wrap_main_module(|p| Ok(make_main_module(p))) + .unwrap_or_else(|e: Infallible| match e {}) + } + fn aspects(&self) -> PlatformAspectSet; +} + +impl DynPlatform { + pub fn registry() -> PlatformRegistrySnapshot { + PlatformRegistrySnapshot(PlatformRegistry::get()) + } + #[track_caller] + pub fn register(self) { + PlatformRegistry::register(PlatformRegistry::lock(), self); + } +} + +#[derive(Clone, Debug)] +struct PlatformRegistry { + platforms: BTreeMap, +} + +enum PlatformRegisterError { + SameName { + name: InternedStrCompareAsStr, + old_platform: DynPlatform, + new_platform: DynPlatform, + }, +} + +impl fmt::Display for PlatformRegisterError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::SameName { + name, + old_platform, + new_platform, + } => write!( + f, + "two different `Platform` can't share the same name:\n\ + {name:?}\n\ + old platform:\n\ + {old_platform:?}\n\ + new platform:\n\ + {new_platform:?}", + ), + } + } +} + +trait PlatformRegistryRegisterLock { + type Locked; + fn lock(self) -> Self::Locked; + fn make_mut(locked: &mut Self::Locked) -> &mut PlatformRegistry; +} + +impl PlatformRegistryRegisterLock for &'static RwLock> { + type Locked = RwLockWriteGuard<'static, Arc>; + fn lock(self) -> Self::Locked { + self.write().expect("shouldn't be poisoned") + } + fn make_mut(locked: &mut Self::Locked) -> &mut PlatformRegistry { + Arc::make_mut(locked) + } +} + +impl PlatformRegistryRegisterLock for &'_ mut PlatformRegistry { + type Locked = Self; + + fn lock(self) -> Self::Locked { + self + } + + fn make_mut(locked: &mut Self::Locked) -> &mut PlatformRegistry { + locked + } +} + +impl PlatformRegistry { + fn lock() -> &'static RwLock> { + static REGISTRY: OnceLock>> = OnceLock::new(); + REGISTRY.get_or_init(Default::default) + } + fn try_register( + lock: L, + platform: DynPlatform, + ) -> Result<(), PlatformRegisterError> { + use std::collections::btree_map::Entry; + let name = InternedStrCompareAsStr(platform.name()); + // run user code only outside of lock + let mut locked = lock.lock(); + let this = L::make_mut(&mut locked); + let result = match this.platforms.entry(name) { + Entry::Occupied(entry) => Err(PlatformRegisterError::SameName { + name, + old_platform: entry.get().clone(), + new_platform: platform, + }), + Entry::Vacant(entry) => { + entry.insert(platform); + Ok(()) + } + }; + drop(locked); + // outside of lock now, so we can test if it's the same DynPlatform + match result { + Err(PlatformRegisterError::SameName { + name: _, + old_platform, + new_platform, + }) if old_platform == new_platform => Ok(()), + result => result, + } + } + #[track_caller] + fn register(lock: L, platform: DynPlatform) { + match Self::try_register(lock, platform) { + Err(e) => panic!("{e}"), + Ok(()) => {} + } + } + fn get() -> Arc { + Self::lock().read().expect("shouldn't be poisoned").clone() + } +} + +impl Default for PlatformRegistry { + fn default() -> Self { + let mut retval = Self { + platforms: BTreeMap::new(), + }; + for platform in built_in_platforms() { + Self::register(&mut retval, platform); + } + retval + } +} + +#[derive(Clone, Debug)] +pub struct PlatformRegistrySnapshot(Arc); + +impl PlatformRegistrySnapshot { + pub fn get() -> Self { + PlatformRegistrySnapshot(PlatformRegistry::get()) + } + pub fn get_by_name<'a>(&'a self, name: &str) -> Option<&'a DynPlatform> { + self.0.platforms.get(name) + } + pub fn iter_with_names(&self) -> PlatformRegistryIterWithNames<'_> { + PlatformRegistryIterWithNames(self.0.platforms.iter()) + } + pub fn iter(&self) -> PlatformRegistryIter<'_> { + PlatformRegistryIter(self.0.platforms.values()) + } +} + +impl<'a> IntoIterator for &'a PlatformRegistrySnapshot { + type Item = &'a DynPlatform; + type IntoIter = PlatformRegistryIter<'a>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +impl<'a> IntoIterator for &'a mut PlatformRegistrySnapshot { + type Item = &'a DynPlatform; + type IntoIter = PlatformRegistryIter<'a>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +#[derive(Clone, Debug)] +pub struct PlatformRegistryIter<'a>( + std::collections::btree_map::Values<'a, InternedStrCompareAsStr, DynPlatform>, +); + +impl<'a> Iterator for PlatformRegistryIter<'a> { + type Item = &'a DynPlatform; + + fn next(&mut self) -> Option { + self.0.next() + } + + fn size_hint(&self) -> (usize, Option) { + self.0.size_hint() + } + + fn count(self) -> usize + where + Self: Sized, + { + self.0.count() + } + + fn last(self) -> Option { + self.0.last() + } + + fn nth(&mut self, n: usize) -> Option { + self.0.nth(n) + } + + fn fold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.0.fold(init, f) + } +} + +impl<'a> std::iter::FusedIterator for PlatformRegistryIter<'a> {} + +impl<'a> ExactSizeIterator for PlatformRegistryIter<'a> {} + +impl<'a> DoubleEndedIterator for PlatformRegistryIter<'a> { + fn next_back(&mut self) -> Option { + self.0.next_back() + } + + fn nth_back(&mut self, n: usize) -> Option { + self.0.nth_back(n) + } + + fn rfold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.0.rfold(init, f) + } +} + +#[derive(Clone, Debug)] +pub struct PlatformRegistryIterWithNames<'a>( + std::collections::btree_map::Iter<'a, InternedStrCompareAsStr, DynPlatform>, +); + +impl<'a> Iterator for PlatformRegistryIterWithNames<'a> { + type Item = (Interned, &'a DynPlatform); + + fn next(&mut self) -> Option { + self.0.next().map(|(name, platform)| (name.0, platform)) + } + + fn size_hint(&self) -> (usize, Option) { + self.0.size_hint() + } + + fn count(self) -> usize + where + Self: Sized, + { + self.0.count() + } + + fn last(self) -> Option { + self.0.last().map(|(name, platform)| (name.0, platform)) + } + + fn nth(&mut self, n: usize) -> Option { + self.0.nth(n).map(|(name, platform)| (name.0, platform)) + } + + fn fold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.0 + .map(|(name, platform)| (name.0, platform)) + .fold(init, f) + } +} + +impl<'a> std::iter::FusedIterator for PlatformRegistryIterWithNames<'a> {} + +impl<'a> ExactSizeIterator for PlatformRegistryIterWithNames<'a> {} + +impl<'a> DoubleEndedIterator for PlatformRegistryIterWithNames<'a> { + fn next_back(&mut self) -> Option { + self.0 + .next_back() + .map(|(name, platform)| (name.0, platform)) + } + + fn nth_back(&mut self, n: usize) -> Option { + self.0 + .nth_back(n) + .map(|(name, platform)| (name.0, platform)) + } + + fn rfold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.0 + .map(|(name, platform)| (name.0, platform)) + .rfold(init, f) + } +} + +#[track_caller] +pub fn register_platform(kind: K) { + DynPlatform::new(kind).register(); +} + +impl Serialize for DynPlatform { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + self.name().serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for DynPlatform { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let name = Cow::::deserialize(deserializer)?; + match Self::registry().get_by_name(&name) { + Some(retval) => Ok(retval.clone()), + None => Err(D::Error::custom(format_args!( + "unknown platform: name not found in registry: {name:?}" + ))), + } + } +} + +#[derive(Copy, Clone, Debug, Default)] +pub struct DynPlatformValueParser; + +#[derive(Clone, PartialEq, Eq, Hash)] +struct DynPlatformValueEnum { + name: Interned, + platform: DynPlatform, +} + +impl clap::ValueEnum for DynPlatformValueEnum { + fn value_variants<'a>() -> &'a [Self] { + Interned::into_inner( + PlatformRegistrySnapshot::get() + .iter_with_names() + .map(|(name, platform)| Self { + name, + platform: platform.clone(), + }) + .collect(), + ) + } + + fn to_possible_value(&self) -> Option { + Some(clap::builder::PossibleValue::new(Interned::into_inner( + self.name, + ))) + } +} + +impl clap::builder::TypedValueParser for DynPlatformValueParser { + type Value = DynPlatform; + + fn parse_ref( + &self, + cmd: &clap::Command, + arg: Option<&clap::Arg>, + value: &std::ffi::OsStr, + ) -> clap::error::Result { + clap::builder::EnumValueParser::::new() + .parse_ref(cmd, arg, value) + .map(|v| v.platform) + } + + fn possible_values( + &self, + ) -> Option + '_>> { + static ENUM_VALUE_PARSER: OnceLock> = + OnceLock::new(); + ENUM_VALUE_PARSER + .get_or_init(clap::builder::EnumValueParser::::new) + .possible_values() + } +} + +impl clap::builder::ValueParserFactory for DynPlatform { + type Parser = DynPlatformValueParser; + + fn value_parser() -> Self::Parser { + DynPlatformValueParser::default() + } +} + +pub(crate) fn built_in_platforms() -> impl IntoIterator { + crate::vendor::built_in_platforms() +} diff --git a/crates/fayalite/src/platform/peripherals.rs b/crates/fayalite/src/platform/peripherals.rs new file mode 100644 index 0000000..90c6640 --- /dev/null +++ b/crates/fayalite/src/platform/peripherals.rs @@ -0,0 +1,62 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{intern::Intern, prelude::*}; +use ordered_float::NotNan; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)] +#[non_exhaustive] +pub struct ClockInputProperties { + pub frequency: NotNan, +} + +#[hdl(no_runtime_generics, no_static)] +pub struct ClockInput { + pub clk: Clock, + pub properties: PhantomConst, +} + +impl ClockInput { + #[track_caller] + pub fn new(frequency: f64) -> Self { + assert!( + frequency > 0.0 && frequency.is_finite(), + "invalid clock frequency: {frequency}" + ); + Self { + clk: Clock, + properties: PhantomConst::new( + ClockInputProperties { + frequency: NotNan::new(frequency).expect("just checked"), + } + .intern_sized(), + ), + } + } + pub fn frequency(self) -> f64 { + self.properties.get().frequency.into_inner() + } +} + +#[hdl] +pub struct Led { + pub on: Bool, +} + +#[hdl] +pub struct RgbLed { + pub r: Bool, + pub g: Bool, + pub b: Bool, +} + +#[hdl] +/// UART, used as an output from the FPGA +pub struct Uart { + /// transmit from the FPGA's perspective + pub tx: Bool, + /// receive from the FPGA's perspective + #[hdl(flip)] + pub rx: Bool, +} diff --git a/crates/fayalite/src/prelude.rs b/crates/fayalite/src/prelude.rs index 9e7a85e..4cc173e 100644 --- a/crates/fayalite/src/prelude.rs +++ b/crates/fayalite/src/prelude.rs @@ -1,36 +1,45 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information pub use crate::{ + __, annotations::{ BlackBoxInlineAnnotation, BlackBoxPathAnnotation, CustomFirrtlAnnotation, DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation, }, array::{Array, ArrayType}, + build::{BuildCli, JobParams, RunBuild}, bundle::Bundle, - cli::Cli, clock::{Clock, ClockDomain, ToClock}, enum_::{Enum, HdlNone, HdlOption, HdlSome}, expr::{ - repeat, CastBitsTo, CastTo, CastToBits, Expr, HdlPartialEq, HdlPartialOrd, MakeUninitExpr, - ReduceBits, ToExpr, + CastBitsTo, CastTo, CastToBits, Expr, HdlPartialEq, HdlPartialOrd, MakeUninitExpr, + ReduceBits, ToExpr, repeat, }, formal::{ - all_const, all_seq, any_const, any_seq, formal_global_clock, formal_reset, hdl_assert, - hdl_assert_with_enable, hdl_assume, hdl_assume_with_enable, hdl_cover, - hdl_cover_with_enable, MakeFormalExpr, + MakeFormalExpr, all_const, all_seq, any_const, any_seq, formal_global_clock, formal_reset, + hdl_assert, hdl_assert_with_enable, hdl_assume, hdl_assume_with_enable, hdl_cover, + hdl_cover_with_enable, }, hdl, hdl_module, - int::{Bool, DynSize, KnownSize, SInt, SIntType, Size, UInt, UIntType}, + int::{Bool, DynSize, KnownSize, SInt, SIntType, SIntValue, Size, UInt, UIntType, UIntValue}, memory::{Mem, MemBuilder, ReadUnderWrite}, module::{ - annotate, connect, connect_any, incomplete_wire, instance, memory, memory_array, - memory_with_init, reg_builder, wire, Instance, Module, ModuleBuilder, + Instance, Module, ModuleBuilder, annotate, connect, connect_any, incomplete_wire, instance, + memory, memory_array, memory_with_init, reg_builder, wire, }, + phantom_const::{PhantomConst, PhantomConstGet}, + platform::{DynPlatform, Platform, PlatformIOBuilder, peripherals}, reg::Reg, reset::{AsyncReset, Reset, SyncReset, ToAsyncReset, ToReset, ToSyncReset}, + sim::{ + ExternModuleSimulationState, Simulation, + time::{SimDuration, SimInstant}, + value::{SimOnly, SimOnlyValue, SimValue, ToSimValue, ToSimValueWithType}, + }, source_location::SourceLocation, + testing::{FormalMode, assert_formal}, ty::{AsMask, CanonicalType, Type}, util::{ConstUsize, GenericConstUsize}, wire::Wire, - __, }; +pub use bitvec::{slice::BitSlice, vec::BitVec}; diff --git a/crates/fayalite/src/reg.rs b/crates/fayalite/src/reg.rs index 8f757f2..20e0b94 100644 --- a/crates/fayalite/src/reg.rs +++ b/crates/fayalite/src/reg.rs @@ -5,21 +5,22 @@ use crate::{ expr::{Expr, Flow}, intern::Interned, module::{NameId, ScopedNameId}, + reset::{Reset, ResetType}, source_location::SourceLocation, ty::{CanonicalType, Type}, }; use std::fmt; #[derive(Copy, Clone, Eq, PartialEq, Hash)] -pub struct Reg { +pub struct Reg { name: ScopedNameId, source_location: SourceLocation, ty: T, - clock_domain: Expr, + clock_domain: Expr>, init: Option>, } -impl fmt::Debug for Reg { +impl fmt::Debug for Reg { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let Self { name, @@ -37,8 +38,8 @@ impl fmt::Debug for Reg { } } -impl Reg { - pub fn canonical(&self) -> Reg { +impl Reg { + pub fn canonical(&self) -> Reg { let Self { name, source_location, @@ -59,7 +60,7 @@ impl Reg { scoped_name: ScopedNameId, source_location: SourceLocation, ty: T, - clock_domain: Expr, + clock_domain: Expr>, init: Option>, ) -> Self { assert!( @@ -98,7 +99,7 @@ impl Reg { pub fn scoped_name(&self) -> ScopedNameId { self.name } - pub fn clock_domain(&self) -> Expr { + pub fn clock_domain(&self) -> Expr> { self.clock_domain } pub fn init(&self) -> Option> { diff --git a/crates/fayalite/src/reset.rs b/crates/fayalite/src/reset.rs index 70d5f02..5dff278 100644 --- a/crates/fayalite/src/reset.rs +++ b/crates/fayalite/src/reset.rs @@ -1,26 +1,55 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information use crate::{ - expr::{Expr, ToExpr}, - int::Bool, + clock::Clock, + expr::{Expr, ToExpr, ops}, + int::{Bool, SInt, UInt}, source_location::SourceLocation, - ty::{impl_match_variant_as_self, CanonicalType, StaticType, Type, TypeProperties}, + ty::{ + CanonicalType, OpaqueSimValueSize, OpaqueSimValueSlice, OpaqueSimValueWriter, + OpaqueSimValueWritten, StaticType, Type, TypeProperties, impl_match_variant_as_self, + }, }; +use bitvec::{bits, order::Lsb0}; mod sealed { pub trait ResetTypeSealed {} } -pub trait ResetType: StaticType + sealed::ResetTypeSealed {} +pub trait ResetType: + StaticType + + sealed::ResetTypeSealed + + ops::ExprCastTo + + ops::ExprCastTo + + ops::ExprCastTo + + ops::ExprCastTo + + ops::ExprCastTo + + ops::ExprCastTo> + + ops::ExprCastTo> + + ops::ExprCastTo + + ops::ExprCastTo +{ + fn dispatch(input: D::Input, dispatch: D) -> D::Output; +} + +pub trait ResetTypeDispatch: Sized { + type Input; + type Output; + + fn reset(self, input: Self::Input) -> Self::Output; + fn sync_reset(self, input: Self::Input) -> Self::Output; + fn async_reset(self, input: Self::Input) -> Self::Output; +} macro_rules! reset_type { - ($name:ident, $Trait:ident::$trait_fn:ident, $is_castable_from_bits:literal) => { + ($name:ident, $(#[$impl_trait:ident])? $Trait:ident::$trait_fn:ident, $is_castable_from_bits:literal, $dispatch_fn:ident) => { #[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)] pub struct $name; impl Type for $name { type BaseType = $name; type MaskType = Bool; + type SimValue = bool; impl_match_variant_as_self!(); @@ -42,6 +71,31 @@ macro_rules! reset_type { }; retval } + + fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue { + assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1)); + opaque.bits()[0] + } + + fn sim_value_clone_from_opaque( + &self, + value: &mut Self::SimValue, + opaque: OpaqueSimValueSlice<'_>, + ) { + assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1)); + *value = opaque.bits()[0]; + } + + fn sim_value_to_opaque<'w>( + &self, + value: &Self::SimValue, + writer: OpaqueSimValueWriter<'w>, + ) -> OpaqueSimValueWritten<'w> { + assert_eq!(writer.size(), OpaqueSimValueSize::from_bit_width(1)); + writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice( + [bits![0], bits![1]][*value as usize], + )) + } } impl $name { @@ -61,13 +115,21 @@ macro_rules! reset_type { is_storable: false, is_castable_from_bits: $is_castable_from_bits, bit_width: 1, + sim_only_values_len: 0, }; const MASK_TYPE_PROPERTIES: TypeProperties = Bool::TYPE_PROPERTIES; } impl sealed::ResetTypeSealed for $name {} - impl ResetType for $name {} + impl ResetType for $name { + fn dispatch( + input: D::Input, + dispatch: D, + ) -> D::Output { + dispatch.$dispatch_fn(input) + } + } pub trait $Trait { fn $trait_fn(&self) -> Expr<$name>; @@ -91,20 +153,21 @@ macro_rules! reset_type { } } - impl $Trait for Expr<$name> { + $($impl_trait $Trait for Expr<$name> { fn $trait_fn(&self) -> Expr<$name> { *self } - } + })? }; } -reset_type!(AsyncReset, ToAsyncReset::to_async_reset, true); -reset_type!(SyncReset, ToSyncReset::to_sync_reset, true); +reset_type!(AsyncReset, #[impl] ToAsyncReset::to_async_reset, true, async_reset); +reset_type!(SyncReset, #[impl] ToSyncReset::to_sync_reset, true, sync_reset); reset_type!( Reset, ToReset::to_reset, - false // Reset is not castable from bits because we don't know if it's async or sync + false, // Reset is not castable from bits because we don't know if it's async or sync + reset ); impl ToSyncReset for bool { diff --git a/crates/fayalite/src/sim.rs b/crates/fayalite/src/sim.rs new file mode 100644 index 0000000..44030c1 --- /dev/null +++ b/crates/fayalite/src/sim.rs @@ -0,0 +1,3038 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +//! Fayalite Simulation + +use crate::{ + bundle::{BundleField, BundleType}, + expr::{ + Flow, ToLiteralBits, + target::{ + GetTarget, Target, TargetPathArrayElement, TargetPathBundleField, TargetPathElement, + }, + }, + int::BoolOrIntType, + intern::{ + Intern, InternSlice, Interned, InternedCompare, PtrEqWithTypeId, SupportsPtrEqWithTypeId, + }, + module::{ + ModuleIO, + transform::visit::{Fold, Folder, Visit, Visitor}, + }, + prelude::*, + reset::ResetType, + sim::{ + compiler::{ + Compiled, CompiledBundleField, CompiledExternModule, CompiledTypeLayoutBody, + CompiledValue, + }, + interpreter::{ + BreakAction, BreakpointsSet, RunResult, SmallUInt, State, + parts::{ + StatePartIndex, StatePartKindBigSlots, StatePartKindMemories, + StatePartKindSimOnlySlots, StatePartKindSmallSlots, TypeIndexRange, TypeLenSingle, + }, + }, + time::{SimDuration, SimInstant}, + value::{DynSimOnly, DynSimOnlyValue, SimValue}, + }, + ty::{ + OpaqueSimValue, OpaqueSimValueSize, OpaqueSimValueSizeRange, OpaqueSimValueSlice, + OpaqueSimValueWriter, + }, + util::{BitSliceWriteWithBase, DebugAsDisplay, HashMap, HashSet}, +}; +use bitvec::{bits, order::Lsb0, slice::BitSlice, vec::BitVec, view::BitView}; +use num_bigint::BigInt; +use num_traits::{Signed, Zero}; +use std::{ + any::Any, + borrow::Cow, + cell::RefCell, + collections::BTreeMap, + fmt, + future::{Future, IntoFuture}, + hash::Hash, + mem, + pin::Pin, + ptr, + rc::Rc, + sync::Arc, + task::Poll, +}; + +pub mod compiler; +mod interpreter; +pub mod time; +pub mod value; +pub mod vcd; + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct TraceScalarId(usize); + +impl fmt::Debug for TraceScalarId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "TraceScalarId({})", self.0) + } +} + +impl TraceScalarId { + pub fn as_usize(self) -> usize { + self.0 + } +} + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct TraceMemoryId(usize); + +impl fmt::Debug for TraceMemoryId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "TraceMemoryId({})", self.0) + } +} + +impl TraceMemoryId { + pub fn as_usize(self) -> usize { + self.0 + } +} + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +pub struct TraceMemoryLocation { + pub id: TraceMemoryId, + pub depth: usize, + pub stride: usize, + pub start: usize, + pub len: usize, +} + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum TraceLocation { + Scalar(TraceScalarId), + Memory(TraceMemoryLocation), +} + +impl fmt::Debug for TraceLocation { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Scalar(v) => v.fmt(f), + Self::Memory(v) => v.fmt(f), + } + } +} + +macro_rules! impl_trace_decl { + ( + $( + #[kind = $category_kind:ident] + $(#[$category_meta:meta])* + $category_variant:ident($category_enum:ident { + fn $category_property_fn:ident(self) -> $category_property_fn_ret_ty:ty; + $( + $(#[$meta:meta])* + $variant:ident($struct:ident { + fn $property_fn:ident($property_fn_self:ident) -> _ $property_fn_block:block + $($(#[$field_meta:meta])* + $field_name:ident: $field_ty:ty,)* + }), + )* + }), + )* + ) => { + $( + #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] + #[non_exhaustive] + $(#[$category_meta])* + pub enum $category_kind { + $($(#[$meta])* + $variant,)* + } + + impl From<$category_kind> for TraceKind { + fn from(v: $category_kind) -> Self { + TraceKind::$category_variant(v) + } + } + + #[derive(Copy, Clone, PartialEq, Eq, Hash)] + #[non_exhaustive] + $(#[$category_meta])* + pub enum $category_enum { + $($(#[$meta])* + $variant($struct),)* + } + + impl fmt::Debug for $category_enum { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + $(Self::$variant(v) => v.fmt(f),)* + } + } + } + + impl $category_enum { + pub fn kind(self) -> $category_kind { + match self { + $(Self::$variant(_) => $category_kind::$variant,)* + } + } + pub fn name(self) -> Interned { + match self { + $(Self::$variant(v) => v.name,)* + } + } + pub fn $category_property_fn(self) -> $category_property_fn_ret_ty { + match self { + $(Self::$variant(v) => v.$property_fn(),)* + } + } + } + + impl From<$category_enum> for TraceDecl { + fn from(v: $category_enum) -> Self { + TraceDecl::$category_variant(v) + } + } + + $( + #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] + #[non_exhaustive] + $(#[$meta])* + pub struct $struct { + $($(#[$field_meta])* + pub $field_name: $field_ty,)* + } + + impl $struct { + pub fn $property_fn($property_fn_self) -> $category_property_fn_ret_ty $property_fn_block + } + + impl From<$struct> for $category_enum { + fn from(v: $struct) -> Self { + $category_enum::$variant(v) + } + } + + impl From<$struct> for TraceDecl { + fn from(v: $struct) -> Self { + TraceDecl::$category_variant($category_enum::$variant(v)) + } + } + )* + )* + + #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] + pub enum TraceKind { + $($(#[$category_meta])* + $category_variant($category_kind),)* + } + + impl fmt::Debug for TraceKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + $(Self::$category_variant(v) => v.fmt(f),)* + } + } + } + + #[derive(Copy, Clone, PartialEq, Eq, Hash)] + pub enum TraceDecl { + $($(#[$category_meta])* + $category_variant($category_enum),)* + } + + impl fmt::Debug for TraceDecl { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + $(Self::$category_variant(v) => v.fmt(f),)* + } + } + } + }; +} + +impl_trace_decl! { + #[kind = TraceScopeKind] + Scope(TraceScope { + fn children(self) -> Interned<[TraceDecl]>; + Module(TraceModule { + fn children(self) -> _ { + self.children + } + name: Interned, + children: Interned<[TraceDecl]>, + }), + Instance(TraceInstance { + fn children(self) -> _ { + [self.instance_io.into(), self.module.into()].intern_slice() + } + name: Interned, + instance_io: TraceBundle, + module: TraceModule, + ty: Bundle, + }), + Mem(TraceMem { + fn children(self) -> _ { + Interned::from_iter([*self.element_type].into_iter().chain(self.ports.iter().map(|&v| v.into()))) + } + id: TraceMemoryId, + name: Interned, + stride: usize, + element_type: Interned, + ports: Interned<[TraceMemPort]>, + array_type: Array, + }), + MemPort(TraceMemPort { + fn children(self) -> _ { + [self.bundle.into()].intern_slice() + } + name: Interned, + bundle: TraceBundle, + ty: Bundle, + }), + Wire(TraceWire { + fn children(self) -> _ { + [*self.child].intern_slice() + } + name: Interned, + child: Interned, + ty: CanonicalType, + }), + Reg(TraceReg { + fn children(self) -> _ { + [*self.child].intern_slice() + } + name: Interned, + child: Interned, + ty: CanonicalType, + }), + ModuleIO(TraceModuleIO { + fn children(self) -> _ { + [*self.child].intern_slice() + } + name: Interned, + child: Interned, + ty: CanonicalType, + flow: Flow, + }), + Bundle(TraceBundle { + fn children(self) -> _ { + self.fields + } + name: Interned, + fields: Interned<[TraceDecl]>, + ty: Bundle, + flow: Flow, + }), + Array(TraceArray { + fn children(self) -> _ { + self.elements + } + name: Interned, + elements: Interned<[TraceDecl]>, + ty: Array, + flow: Flow, + }), + EnumWithFields(TraceEnumWithFields { + fn children(self) -> _ { + Interned::from_iter([self.discriminant.into()].into_iter().chain(self.non_empty_fields)) + } + name: Interned, + discriminant: TraceEnumDiscriminant, + non_empty_fields: Interned<[TraceDecl]>, + ty: Enum, + flow: Flow, + }), + }), + #[kind = TraceScalarKind] + Scalar(TraceScalar { + fn location(self) -> TraceLocation; + UInt(TraceUInt { + fn location(self) -> _ { + self.location + } + location: TraceLocation, + name: Interned, + ty: UInt, + flow: Flow, + }), + SInt(TraceSInt { + fn location(self) -> _ { + self.location + } + location: TraceLocation, + name: Interned, + ty: SInt, + flow: Flow, + }), + Bool(TraceBool { + fn location(self) -> _ { + self.location + } + location: TraceLocation, + name: Interned, + flow: Flow, + }), + FieldlessEnum(TraceFieldlessEnum { + fn location(self) -> _ { + self.location + } + location: TraceLocation, + name: Interned, + ty: Enum, + flow: Flow, + }), + EnumDiscriminant(TraceEnumDiscriminant { + fn location(self) -> _ { + self.location + } + location: TraceLocation, + name: Interned, + ty: Enum, + flow: Flow, + }), + Clock(TraceClock { + fn location(self) -> _ { + self.location + } + location: TraceLocation, + name: Interned, + flow: Flow, + }), + SyncReset(TraceSyncReset { + fn location(self) -> _ { + self.location + } + location: TraceLocation, + name: Interned, + flow: Flow, + }), + AsyncReset(TraceAsyncReset { + fn location(self) -> _ { + self.location + } + location: TraceLocation, + name: Interned, + flow: Flow, + }), + SimOnly(TraceSimOnly { + fn location(self) -> _ { + self.location + } + location: TraceLocation, + name: Interned, + ty: DynSimOnly, + flow: Flow, + }), + }), +} + +pub trait TraceWriterDecls: fmt::Debug + 'static + Sized { + type Error: std::error::Error + Send + Sync + 'static; + type TraceWriter: TraceWriter; + fn write_decls( + self, + module: TraceModule, + trace_scalar_id_count: usize, + trace_memory_id_count: usize, + ) -> Result; +} + +trait TraceWriterDeclsDynTrait: fmt::Debug { + fn write_decls_dyn( + self: Box, + module: TraceModule, + trace_scalar_id_count: usize, + trace_memory_id_count: usize, + ) -> std::io::Result; +} + +fn err_into_io(e: E) -> std::io::Error { + match ::downcast::(Box::new(e)) { + Ok(retval) => *retval, + Err(e) => std::io::Error::other(e), + } +} + +impl TraceWriterDeclsDynTrait for T { + fn write_decls_dyn( + self: Box, + module: TraceModule, + trace_scalar_id_count: usize, + trace_memory_id_count: usize, + ) -> std::io::Result { + Ok(DynTraceWriter(Box::new( + TraceWriterDecls::write_decls( + *self, + module, + trace_scalar_id_count, + trace_memory_id_count, + ) + .map_err(err_into_io)?, + ))) + } +} + +pub trait TraceWriter: fmt::Debug + 'static { + type Error: std::error::Error + Send + Sync + 'static; + fn finish_init(&mut self) -> Result<(), Self::Error> { + Ok(()) + } + fn change_time_to(&mut self, instant: SimInstant) -> Result<(), Self::Error> { + let _ = instant; + Ok(()) + } + fn flush(&mut self) -> Result<(), Self::Error> { + Ok(()) + } + fn close(self) -> Result<(), Self::Error> + where + Self: Sized, + { + Ok(()) + } + fn set_memory_element( + &mut self, + memory: TraceMemoryId, + element_index: usize, + element_data: &BitSlice, + ) -> Result<(), Self::Error>; + fn set_signal_uint(&mut self, id: TraceScalarId, value: &BitSlice) -> Result<(), Self::Error>; + fn set_signal_sint(&mut self, id: TraceScalarId, value: &BitSlice) -> Result<(), Self::Error>; + fn set_signal_bool(&mut self, id: TraceScalarId, value: bool) -> Result<(), Self::Error> { + if value { + self.set_signal_uint(id, bits![1]) + } else { + self.set_signal_uint(id, bits![0]) + } + } + fn set_signal_clock(&mut self, id: TraceScalarId, value: bool) -> Result<(), Self::Error> { + self.set_signal_bool(id, value) + } + fn set_signal_sync_reset(&mut self, id: TraceScalarId, value: bool) -> Result<(), Self::Error> { + self.set_signal_bool(id, value) + } + fn set_signal_async_reset( + &mut self, + id: TraceScalarId, + value: bool, + ) -> Result<(), Self::Error> { + self.set_signal_bool(id, value) + } + fn set_signal_enum_discriminant( + &mut self, + id: TraceScalarId, + variant_index: usize, + ty: Enum, + ) -> Result<(), Self::Error>; + fn set_signal_sim_only_value( + &mut self, + id: TraceScalarId, + value: &DynSimOnlyValue, + ) -> Result<(), Self::Error>; +} + +pub struct DynTraceWriterDecls(Box); + +impl DynTraceWriterDecls { + pub fn new(writer: W) -> Self { + Self(Box::new(writer)) + } +} + +impl fmt::Debug for DynTraceWriterDecls { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +impl TraceWriterDecls for DynTraceWriterDecls { + type Error = std::io::Error; + type TraceWriter = DynTraceWriter; + fn write_decls( + self, + module: TraceModule, + trace_scalar_id_count: usize, + trace_memory_id_count: usize, + ) -> Result { + self.0 + .write_decls_dyn(module, trace_scalar_id_count, trace_memory_id_count) + } +} + +trait TraceWriterDynTrait: fmt::Debug + 'static { + fn finish_init_dyn(&mut self) -> std::io::Result<()>; + fn change_time_to_dyn(&mut self, instant: SimInstant) -> std::io::Result<()>; + fn flush_dyn(&mut self) -> std::io::Result<()>; + fn close_dyn(self: Box) -> std::io::Result<()>; + fn set_memory_element_dyn( + &mut self, + memory: TraceMemoryId, + element_index: usize, + element_data: &BitSlice, + ) -> std::io::Result<()>; + fn set_signal_uint_dyn(&mut self, id: TraceScalarId, value: &BitSlice) -> std::io::Result<()>; + fn set_signal_sint_dyn(&mut self, id: TraceScalarId, value: &BitSlice) -> std::io::Result<()>; + fn set_signal_bool_dyn(&mut self, id: TraceScalarId, value: bool) -> std::io::Result<()>; + fn set_signal_clock_dyn(&mut self, id: TraceScalarId, value: bool) -> std::io::Result<()>; + fn set_signal_sync_reset_dyn(&mut self, id: TraceScalarId, value: bool) -> std::io::Result<()>; + fn set_signal_async_reset_dyn(&mut self, id: TraceScalarId, value: bool) + -> std::io::Result<()>; + fn set_signal_enum_discriminant_dyn( + &mut self, + id: TraceScalarId, + variant_index: usize, + ty: Enum, + ) -> std::io::Result<()>; + fn set_signal_sim_only_value_dyn( + &mut self, + id: TraceScalarId, + value: &DynSimOnlyValue, + ) -> std::io::Result<()>; +} + +impl TraceWriterDynTrait for T { + fn finish_init_dyn(&mut self) -> std::io::Result<()> { + Ok(TraceWriter::finish_init(self).map_err(err_into_io)?) + } + fn change_time_to_dyn(&mut self, instant: SimInstant) -> std::io::Result<()> { + Ok(TraceWriter::change_time_to(self, instant).map_err(err_into_io)?) + } + fn flush_dyn(&mut self) -> std::io::Result<()> { + Ok(TraceWriter::flush(self).map_err(err_into_io)?) + } + fn close_dyn(self: Box) -> std::io::Result<()> { + Ok(TraceWriter::close(*self).map_err(err_into_io)?) + } + fn set_memory_element_dyn( + &mut self, + memory: TraceMemoryId, + element_index: usize, + element_data: &BitSlice, + ) -> std::io::Result<()> { + Ok( + TraceWriter::set_memory_element(self, memory, element_index, element_data) + .map_err(err_into_io)?, + ) + } + fn set_signal_uint_dyn(&mut self, id: TraceScalarId, value: &BitSlice) -> std::io::Result<()> { + Ok(TraceWriter::set_signal_uint(self, id, value).map_err(err_into_io)?) + } + fn set_signal_sint_dyn(&mut self, id: TraceScalarId, value: &BitSlice) -> std::io::Result<()> { + Ok(TraceWriter::set_signal_sint(self, id, value).map_err(err_into_io)?) + } + fn set_signal_bool_dyn(&mut self, id: TraceScalarId, value: bool) -> std::io::Result<()> { + Ok(TraceWriter::set_signal_bool(self, id, value).map_err(err_into_io)?) + } + fn set_signal_clock_dyn(&mut self, id: TraceScalarId, value: bool) -> std::io::Result<()> { + Ok(TraceWriter::set_signal_clock(self, id, value).map_err(err_into_io)?) + } + fn set_signal_sync_reset_dyn(&mut self, id: TraceScalarId, value: bool) -> std::io::Result<()> { + Ok(TraceWriter::set_signal_sync_reset(self, id, value).map_err(err_into_io)?) + } + fn set_signal_async_reset_dyn( + &mut self, + id: TraceScalarId, + value: bool, + ) -> std::io::Result<()> { + Ok(TraceWriter::set_signal_async_reset(self, id, value).map_err(err_into_io)?) + } + fn set_signal_enum_discriminant_dyn( + &mut self, + id: TraceScalarId, + variant_index: usize, + ty: Enum, + ) -> std::io::Result<()> { + Ok( + TraceWriter::set_signal_enum_discriminant(self, id, variant_index, ty) + .map_err(err_into_io)?, + ) + } + fn set_signal_sim_only_value_dyn( + &mut self, + id: TraceScalarId, + value: &DynSimOnlyValue, + ) -> std::io::Result<()> { + Ok(TraceWriter::set_signal_sim_only_value(self, id, value).map_err(err_into_io)?) + } +} + +pub struct DynTraceWriter(Box); + +impl fmt::Debug for DynTraceWriter { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +impl TraceWriter for DynTraceWriter { + type Error = std::io::Error; + fn finish_init(&mut self) -> Result<(), Self::Error> { + self.0.finish_init_dyn() + } + fn flush(&mut self) -> Result<(), Self::Error> { + self.0.flush_dyn() + } + fn close(self) -> Result<(), Self::Error> { + self.0.close_dyn() + } + fn set_memory_element( + &mut self, + memory: TraceMemoryId, + element_index: usize, + element_data: &BitSlice, + ) -> Result<(), Self::Error> { + self.0 + .set_memory_element_dyn(memory, element_index, element_data) + } + fn change_time_to(&mut self, instant: SimInstant) -> Result<(), Self::Error> { + self.0.change_time_to_dyn(instant) + } + fn set_signal_uint(&mut self, id: TraceScalarId, value: &BitSlice) -> Result<(), Self::Error> { + self.0.set_signal_uint_dyn(id, value) + } + fn set_signal_sint(&mut self, id: TraceScalarId, value: &BitSlice) -> Result<(), Self::Error> { + self.0.set_signal_sint_dyn(id, value) + } + fn set_signal_bool(&mut self, id: TraceScalarId, value: bool) -> Result<(), Self::Error> { + self.0.set_signal_bool_dyn(id, value) + } + fn set_signal_clock(&mut self, id: TraceScalarId, value: bool) -> Result<(), Self::Error> { + self.0.set_signal_clock_dyn(id, value) + } + fn set_signal_sync_reset(&mut self, id: TraceScalarId, value: bool) -> Result<(), Self::Error> { + self.0.set_signal_sync_reset_dyn(id, value) + } + fn set_signal_async_reset( + &mut self, + id: TraceScalarId, + value: bool, + ) -> Result<(), Self::Error> { + self.0.set_signal_async_reset_dyn(id, value) + } + fn set_signal_enum_discriminant( + &mut self, + id: TraceScalarId, + variant_index: usize, + ty: Enum, + ) -> Result<(), Self::Error> { + self.0 + .set_signal_enum_discriminant_dyn(id, variant_index, ty) + } + fn set_signal_sim_only_value( + &mut self, + id: TraceScalarId, + value: &DynSimOnlyValue, + ) -> Result<(), Self::Error> { + self.0.set_signal_sim_only_value_dyn(id, value) + } +} + +#[derive(Debug)] +enum TraceWriterState { + Decls(T), + Init(T::TraceWriter), + Running(T::TraceWriter), + Errored(Option), +} + +trait SimTraceDebug { + fn fmt(&self, id: I, f: &mut fmt::Formatter<'_>) -> fmt::Result; +} + +struct SimTraceDebugAsDebug(T, I); + +impl fmt::Debug for SimTraceDebugAsDebug<&'_ T, I> +where + T: SimTraceDebug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(self.1, f) + } +} + +impl SimTraceDebug for Vec +where + [T]: SimTraceDebug, +{ + fn fmt(&self, id: I, f: &mut fmt::Formatter<'_>) -> fmt::Result { + <[T]>::fmt(&**self, id, f) + } +} + +impl SimTraceDebug for Interned +where + T: SimTraceDebug, +{ + fn fmt(&self, id: I, f: &mut fmt::Formatter<'_>) -> fmt::Result { + T::fmt(&**self, id, f) + } +} + +impl SimTraceDebug for Box +where + T: SimTraceDebug, +{ + fn fmt(&self, id: I, f: &mut fmt::Formatter<'_>) -> fmt::Result { + T::fmt(&**self, id, f) + } +} + +impl SimTraceDebug<()> for [T] +where + T: SimTraceDebug, +{ + fn fmt(&self, _id: (), f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list() + .entries( + self.iter() + .enumerate() + .map(|(id, v)| SimTraceDebugAsDebug(v, TraceScalarId(id))), + ) + .finish() + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub(crate) struct SimTrace { + kind: K, + state: S, + last_state: S, +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub(crate) struct SimTraces(T); + +impl fmt::Debug for SimTraces +where + T: SimTraceDebug<()>, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt((), f) + } +} + +impl SimTraceDebug for SimTrace { + fn fmt(&self, id: TraceScalarId, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + kind, + state, + last_state, + } = self; + f.debug_struct("SimTrace") + .field("id", &id) + .field("kind", kind) + .field("state", state) + .field("last_state", last_state) + .finish() + } +} + +impl SimTraceDebug for SimTrace { + fn fmt(&self, id: TraceScalarId, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + kind, + state, + last_state, + } = self; + f.debug_struct("SimTrace") + .field("id", &id) + .field("kind", kind) + .field("state", state) + .field("last_state", last_state) + .finish() + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub(crate) enum SimTraceKind { + BigUInt { + index: StatePartIndex, + ty: UInt, + }, + BigSInt { + index: StatePartIndex, + ty: SInt, + }, + BigBool { + index: StatePartIndex, + }, + BigAsyncReset { + index: StatePartIndex, + }, + BigSyncReset { + index: StatePartIndex, + }, + BigClock { + index: StatePartIndex, + }, + SmallUInt { + index: StatePartIndex, + ty: UInt, + }, + SmallSInt { + index: StatePartIndex, + ty: SInt, + }, + SmallBool { + index: StatePartIndex, + }, + SmallAsyncReset { + index: StatePartIndex, + }, + SmallSyncReset { + index: StatePartIndex, + }, + SmallClock { + index: StatePartIndex, + }, + EnumDiscriminant { + index: StatePartIndex, + ty: Enum, + }, + SimOnly { + index: StatePartIndex, + ty: DynSimOnly, + }, +} + +#[derive(PartialEq, Eq)] +pub(crate) enum SimTraceState { + Bits(BitVec), + SimOnly(DynSimOnlyValue), +} + +impl Clone for SimTraceState { + fn clone(&self) -> Self { + match self { + Self::Bits(v) => Self::Bits(v.clone()), + Self::SimOnly(v) => Self::SimOnly(v.clone()), + } + } + fn clone_from(&mut self, source: &Self) { + match (&mut *self, source) { + (SimTraceState::Bits(dest), SimTraceState::Bits(source)) => { + dest.clone_from_bitslice(source); + } + _ => *self = source.clone(), + } + } +} + +impl SimTraceState { + fn unwrap_bits_ref(&self) -> &BitVec { + if let SimTraceState::Bits(v) = self { + v + } else { + unreachable!() + } + } + fn unwrap_bits_mut(&mut self) -> &mut BitVec { + if let SimTraceState::Bits(v) = self { + v + } else { + unreachable!() + } + } + fn unwrap_sim_only_ref(&self) -> &DynSimOnlyValue { + if let SimTraceState::SimOnly(v) = self { + v + } else { + unreachable!() + } + } + fn unwrap_sim_only_mut(&mut self) -> &mut DynSimOnlyValue { + if let SimTraceState::SimOnly(v) = self { + v + } else { + unreachable!() + } + } +} + +impl fmt::Debug for SimTraceState { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + SimTraceState::Bits(v) => BitSliceWriteWithBase(v).fmt(f), + SimTraceState::SimOnly(v) => v.fmt(f), + } + } +} + +impl SimTraceKind { + fn make_state(self) -> SimTraceState { + match self { + SimTraceKind::BigUInt { index: _, ty } | SimTraceKind::SmallUInt { index: _, ty } => { + SimTraceState::Bits(BitVec::repeat(false, ty.width)) + } + SimTraceKind::BigSInt { index: _, ty } | SimTraceKind::SmallSInt { index: _, ty } => { + SimTraceState::Bits(BitVec::repeat(false, ty.width)) + } + SimTraceKind::BigBool { index: _ } + | SimTraceKind::BigAsyncReset { index: _ } + | SimTraceKind::BigSyncReset { index: _ } + | SimTraceKind::BigClock { index: _ } + | SimTraceKind::SmallBool { index: _ } + | SimTraceKind::SmallAsyncReset { index: _ } + | SimTraceKind::SmallSyncReset { index: _ } + | SimTraceKind::SmallClock { index: _ } => { + SimTraceState::Bits(BitVec::repeat(false, 1)) + } + SimTraceKind::EnumDiscriminant { index: _, ty } => { + SimTraceState::Bits(BitVec::repeat(false, ty.discriminant_bit_width())) + } + SimTraceKind::SimOnly { index: _, ty } => SimTraceState::SimOnly(ty.default_value()), + } + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +enum MaybeNeedsSettle { + NeedsSettle(S), + NoSettleNeeded(N), +} + +impl MaybeNeedsSettle { + fn map(self, f: impl FnOnce(T) -> U) -> MaybeNeedsSettle { + match self { + MaybeNeedsSettle::NeedsSettle(v) => MaybeNeedsSettle::NeedsSettle(f(v)), + MaybeNeedsSettle::NoSettleNeeded(v) => MaybeNeedsSettle::NoSettleNeeded(f(v)), + } + } +} + +// workaround implementing FnOnce not being stable +trait MaybeNeedsSettleFn { + type Output; + + fn call(self, arg: A) -> Self::Output; +} + +impl O, A, O> MaybeNeedsSettleFn for T { + type Output = O; + + fn call(self, arg: A) -> Self::Output { + self(arg) + } +} + +impl MaybeNeedsSettle { + fn apply_no_settle(self, arg: T) -> MaybeNeedsSettle + where + N: MaybeNeedsSettleFn, + { + match self { + MaybeNeedsSettle::NeedsSettle(v) => MaybeNeedsSettle::NeedsSettle(v), + MaybeNeedsSettle::NoSettleNeeded(v) => MaybeNeedsSettle::NoSettleNeeded(v.call(arg)), + } + } +} + +struct SimulationModuleState { + base_targets: Vec, + uninitialized_ios: HashMap>, + io_targets: HashMap>, + did_initial_settle: bool, +} + +impl fmt::Debug for SimulationModuleState { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + base_targets, + uninitialized_ios, + io_targets, + did_initial_settle, + } = self; + f.debug_struct("SimulationModuleState") + .field("base_targets", base_targets) + .field("uninitialized_ios", &SortedSetDebug(uninitialized_ios)) + .field("io_targets", &SortedSetDebug(io_targets)) + .field("did_initial_settle", did_initial_settle) + .finish() + } +} + +impl SimulationModuleState { + fn new(base_targets: impl IntoIterator)>) -> Self { + let mut retval = Self { + base_targets: Vec::new(), + uninitialized_ios: HashMap::default(), + io_targets: HashMap::default(), + did_initial_settle: false, + }; + for (base_target, value) in base_targets { + retval.base_targets.push(base_target); + retval.parse_io(base_target, value); + } + retval + } + /// returns `true` if `target` or any sub-targets are uninitialized inputs + fn parse_io(&mut self, target: Target, value: CompiledValue) -> bool { + self.io_targets.insert(target, value); + match value.layout.body { + CompiledTypeLayoutBody::Scalar => match target.flow() { + Flow::Source => false, + Flow::Sink => { + self.uninitialized_ios.insert(target, vec![]); + true + } + Flow::Duplex => unreachable!(), + }, + CompiledTypeLayoutBody::Array { .. } => { + let value = value.map_ty(Array::from_canonical); + let mut sub_targets = Vec::new(); + for index in 0..value.layout.ty.len() { + let sub_target = target.join( + TargetPathElement::from(TargetPathArrayElement { index }).intern_sized(), + ); + if self.parse_io(sub_target, value.element(index)) { + sub_targets.push(sub_target); + } + } + if sub_targets.is_empty() { + false + } else { + self.uninitialized_ios.insert(target, sub_targets); + true + } + } + CompiledTypeLayoutBody::Bundle { .. } => { + let value = value.map_ty(Bundle::from_canonical); + let mut sub_targets = Vec::new(); + for BundleField { name, .. } in value.layout.ty.fields() { + let sub_target = target.join( + TargetPathElement::from(TargetPathBundleField { name }).intern_sized(), + ); + if self.parse_io(sub_target, value.field_by_name(name)) { + sub_targets.push(sub_target); + } + } + if sub_targets.is_empty() { + false + } else { + self.uninitialized_ios.insert(target, sub_targets); + true + } + } + } + } + fn mark_target_as_initialized(&mut self, mut target: Target) { + fn remove_target_and_children( + uninitialized_ios: &mut HashMap>, + target: Target, + ) { + let Some(children) = uninitialized_ios.remove(&target) else { + return; + }; + for child in children { + remove_target_and_children(uninitialized_ios, child); + } + } + remove_target_and_children(&mut self.uninitialized_ios, target); + while let Some(target_child) = target.child() { + let parent = target_child.parent(); + for child in self + .uninitialized_ios + .get(&*parent) + .map(|v| &**v) + .unwrap_or(&[]) + { + if self.uninitialized_ios.contains_key(child) { + return; + } + } + target = *parent; + self.uninitialized_ios.remove(&target); + } + } + #[track_caller] + fn get_io( + &self, + mut target: Target, + which_module: WhichModule, + ) -> CompiledValue { + assert!( + target.canonical_ty().is_passive(), + "simulator read/write expression must have a passive type \ + (recursively contains no fields with `#[hdl(flip)]`)" + ); + if let Some(&retval) = self.io_targets.get(&target) { + return retval; + } + loop { + target = match target { + Target::Base(_) => break, + Target::Child(child) => { + match *child.path_element() { + TargetPathElement::BundleField(_) | TargetPathElement::ArrayElement(_) => {} + TargetPathElement::DynArrayElement(_) => panic!( + "simulator read/write expression must not have dynamic array indexes" + ), + } + *child.parent() + } + }; + } + match which_module { + WhichModule::Main => panic!( + "simulator read/write expression must be \ + an array element/field of `Simulation::io()`" + ), + WhichModule::Extern { .. } => panic!( + "simulator read/write expression must be \ + one of this module's inputs/outputs or an \ + array element/field of one of this module's inputs/outputs" + ), + } + } + #[track_caller] + fn read_helper( + &self, + io: Expr, + which_module: WhichModule, + ) -> MaybeNeedsSettle> { + let Some(target) = io.target() else { + match which_module { + WhichModule::Main => panic!( + "can't read from an expression that's not a field/element of `Simulation::io()`" + ), + WhichModule::Extern { .. } => panic!( + "can't read from an expression that's not based on one of this module's inputs/outputs" + ), + } + }; + let compiled_value = self.get_io(*target, which_module); + match target.flow() { + Flow::Source => { + if !self.uninitialized_ios.is_empty() { + match which_module { + WhichModule::Main => { + panic!("can't read from an output before initializing all inputs"); + } + WhichModule::Extern { .. } => { + panic!("can't read from an input before initializing all outputs"); + } + } + } + MaybeNeedsSettle::NeedsSettle(compiled_value) + } + Flow::Sink => { + if self.uninitialized_ios.contains_key(&*target) { + match which_module { + WhichModule::Main => panic!("can't read from an uninitialized input"), + WhichModule::Extern { .. } => { + panic!("can't read from an uninitialized output"); + } + } + } + MaybeNeedsSettle::NoSettleNeeded(compiled_value) + } + Flow::Duplex => unreachable!(), + } + } + #[track_caller] + fn write_helper( + &mut self, + io: Expr, + which_module: WhichModule, + ) -> CompiledValue { + let Some(target) = io.target() else { + match which_module { + WhichModule::Main => panic!( + "can't write to an expression that's not a field/element of `Simulation::io()`" + ), + WhichModule::Extern { .. } => panic!( + "can't write to an expression that's not based on one of this module's outputs" + ), + } + }; + let compiled_value = self.get_io(*target, which_module); + match target.flow() { + Flow::Source => match which_module { + WhichModule::Main => panic!("can't write to an output"), + WhichModule::Extern { .. } => panic!("can't write to an input"), + }, + Flow::Sink => {} + Flow::Duplex => unreachable!(), + } + if !self.did_initial_settle { + self.mark_target_as_initialized(*target); + } + compiled_value + } +} + +#[derive(Copy, Clone, Debug)] +enum WaitTarget { + Settle, + Instant(SimInstant), + Change { key: ChangeKey, value: ChangeValue }, +} + +#[derive(Clone)] +struct EarliestWaitTargets { + settle: bool, + instant: Option, + changes: HashMap, SimValue>, +} + +impl fmt::Debug for EarliestWaitTargets { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_set().entries(self.iter()).finish() + } +} + +impl Default for EarliestWaitTargets { + fn default() -> Self { + Self { + settle: false, + instant: None, + changes: HashMap::default(), + } + } +} + +impl EarliestWaitTargets { + fn settle() -> Self { + Self { + settle: true, + instant: None, + changes: HashMap::default(), + } + } + fn instant(instant: SimInstant) -> Self { + Self { + settle: false, + instant: Some(instant), + changes: HashMap::default(), + } + } + fn len(&self) -> usize { + self.settle as usize + self.instant.is_some() as usize + self.changes.len() + } + fn is_empty(&self) -> bool { + self.len() == 0 + } + fn clear(&mut self) { + let Self { + settle, + instant, + changes, + } = self; + *settle = false; + *instant = None; + changes.clear(); + } + fn insert( + &mut self, + value: impl std::borrow::Borrow, ChangeValue>>, + ) where + ChangeValue: std::borrow::Borrow>, + { + let value = value.borrow(); + match value { + WaitTarget::Settle => self.settle = true, + WaitTarget::Instant(instant) => { + if self.instant.is_none_or(|v| v > *instant) { + self.instant = Some(*instant); + } + } + WaitTarget::Change { key, value } => { + self.changes + .entry(*key) + .or_insert_with(|| value.borrow().clone()); + } + } + } + fn convert_earlier_instants_to_settle(&mut self, instant: SimInstant) { + if self.instant.is_some_and(|v| v <= instant) { + self.settle = true; + self.instant = None; + } + } + fn iter<'a>( + &'a self, + ) -> impl Clone + + Iterator, &'a SimValue>> + + 'a { + self.settle + .then_some(WaitTarget::Settle) + .into_iter() + .chain(self.instant.map(|instant| WaitTarget::Instant(instant))) + .chain( + self.changes + .iter() + .map(|(&key, value)| WaitTarget::Change { key, value }), + ) + } +} + +impl>> + Extend, ChangeValue>> for EarliestWaitTargets +{ + fn extend, ChangeValue>>>( + &mut self, + iter: T, + ) { + iter.into_iter().for_each(|v| self.insert(v)) + } +} + +impl<'a, ChangeValue: std::borrow::Borrow>> + Extend<&'a WaitTarget, ChangeValue>> for EarliestWaitTargets +{ + fn extend, ChangeValue>>>( + &mut self, + iter: T, + ) { + iter.into_iter().for_each(|v| self.insert(v)) + } +} + +impl FromIterator for EarliestWaitTargets +where + Self: Extend, +{ + fn from_iter>(iter: T) -> Self { + let mut retval = Self::default(); + retval.extend(iter); + retval + } +} + +struct SimulationExternModuleState { + module_state: SimulationModuleState, + sim: ExternModuleSimulation, + running_generator: Option + 'static>>>, + wait_targets: EarliestWaitTargets, +} + +impl fmt::Debug for SimulationExternModuleState { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + module_state, + sim, + running_generator, + wait_targets, + } = self; + f.debug_struct("SimulationExternModuleState") + .field("module_state", module_state) + .field("sim", sim) + .field( + "running_generator", + &running_generator.as_ref().map(|_| DebugAsDisplay("...")), + ) + .field("wait_targets", wait_targets) + .finish() + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +enum WhichModule { + Main, + Extern { module_index: usize }, +} + +struct ReadBitFn { + compiled_value: CompiledValue, +} + +impl MaybeNeedsSettleFn<&'_ mut interpreter::State> for ReadBitFn { + type Output = bool; + + fn call(self, state: &mut interpreter::State) -> Self::Output { + match self.compiled_value.range.len().as_single() { + Some(TypeLenSingle::SmallSlot) => { + state.small_slots[self.compiled_value.range.small_slots.start] != 0 + } + Some(TypeLenSingle::BigSlot) => !state.big_slots + [self.compiled_value.range.big_slots.start] + .clone() + .is_zero(), + Some(TypeLenSingle::SimOnlySlot) | None => unreachable!(), + } + } +} + +struct ReadBoolOrIntFn { + compiled_value: CompiledValue, + io: Expr, +} + +impl MaybeNeedsSettleFn<&'_ mut interpreter::State> for ReadBoolOrIntFn { + type Output = I::Value; + + fn call(self, state: &mut interpreter::State) -> Self::Output { + let Self { compiled_value, io } = self; + match compiled_value.range.len().as_single() { + Some(TypeLenSingle::SmallSlot) => Expr::ty(io) + .value_from_int_wrapping(state.small_slots[compiled_value.range.small_slots.start]), + Some(TypeLenSingle::BigSlot) => Expr::ty(io).value_from_int_wrapping( + state.big_slots[compiled_value.range.big_slots.start].clone(), + ), + Some(TypeLenSingle::SimOnlySlot) | None => unreachable!(), + } + } +} + +struct ReadFn { + compiled_value: CompiledValue, + io: Expr, +} + +impl MaybeNeedsSettleFn<&'_ mut interpreter::State> for ReadFn { + type Output = SimValue; + + fn call(self, state: &mut interpreter::State) -> Self::Output { + let Self { compiled_value, io } = self; + SimulationImpl::read_no_settle_helper(state, io, compiled_value) + } +} + +struct GeneratorWaker; + +impl std::task::Wake for GeneratorWaker { + fn wake(self: Arc) { + panic!("can't await other kinds of futures in function passed to ExternalModuleSimulation"); + } +} + +#[derive(Default)] +struct ReadyToRunSet { + state_ready_to_run: bool, + extern_modules_ready_to_run: Vec, +} + +impl ReadyToRunSet { + fn clear(&mut self) { + let Self { + state_ready_to_run, + extern_modules_ready_to_run, + } = self; + *state_ready_to_run = false; + extern_modules_ready_to_run.clear(); + } +} + +struct SimulationImpl { + state: interpreter::State, + io: Expr, + main_module: SimulationModuleState, + extern_modules: Box<[SimulationExternModuleState]>, + state_ready_to_run: bool, + trace_decls: TraceModule, + traces: SimTraces]>>, + trace_memories: BTreeMap, TraceMem>, + trace_writers: Vec>, + instant: SimInstant, + clocks_triggered: Interned<[StatePartIndex]>, + breakpoints: Option, + generator_waker: std::task::Waker, +} + +impl fmt::Debug for SimulationImpl { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.debug_fmt(None, f) + } +} + +impl SimulationImpl { + fn debug_fmt(&self, io: Option<&dyn fmt::Debug>, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + state, + io: self_io, + main_module, + extern_modules, + state_ready_to_run, + trace_decls, + traces, + trace_memories, + trace_writers, + instant, + clocks_triggered, + breakpoints: _, + generator_waker: _, + } = self; + f.debug_struct("Simulation") + .field("state", state) + .field("io", io.unwrap_or(self_io)) + .field("main_module", main_module) + .field("extern_modules", extern_modules) + .field("state_ready_to_run", state_ready_to_run) + .field("trace_decls", trace_decls) + .field("traces", traces) + .field("trace_memories", trace_memories) + .field("trace_writers", trace_writers) + .field("instant", instant) + .field("clocks_triggered", clocks_triggered) + .finish_non_exhaustive() + } + fn new(compiled: Compiled) -> Self { + let io_target = Target::from(compiled.io); + let extern_modules = Box::from_iter(compiled.extern_modules.iter().map( + |&CompiledExternModule { + module_io_targets, + module_io, + simulation, + }| { + SimulationExternModuleState { + module_state: SimulationModuleState::new( + module_io_targets + .iter() + .copied() + .zip(module_io.iter().copied()), + ), + sim: simulation, + running_generator: None, + wait_targets: EarliestWaitTargets::settle(), + } + }, + )); + Self { + state: State::new(compiled.insns), + io: compiled.io.to_expr(), + main_module: SimulationModuleState::new( + compiled + .io + .ty() + .fields() + .into_iter() + .zip(compiled.base_module.module_io) + .map(|(BundleField { name, .. }, value)| { + ( + io_target.join( + TargetPathElement::from(TargetPathBundleField { name }) + .intern_sized(), + ), + value, + ) + }), + ), + extern_modules, + state_ready_to_run: true, + trace_decls: compiled.base_module.trace_decls, + traces: SimTraces(Box::from_iter(compiled.traces.0.iter().map( + |&SimTrace { + kind, + state: _, + last_state: _, + }| SimTrace { + kind, + state: kind.make_state(), + last_state: kind.make_state(), + }, + ))), + trace_memories: BTreeMap::from_iter(compiled.trace_memories.iter().copied()), + trace_writers: vec![], + instant: SimInstant::START, + clocks_triggered: compiled.clocks_triggered, + breakpoints: None, + generator_waker: Arc::new(GeneratorWaker).into(), + } + } + fn write_traces( + &mut self, + mut trace_writer: DynTraceWriter, + ) -> std::io::Result { + let mut set_memory_element = |memory: StatePartIndex, + trace_mem: &TraceMem, + element_index: usize| { + let start = trace_mem.stride * element_index; + let end = start + trace_mem.stride; + trace_writer.set_memory_element( + self.trace_memories[&memory].id, + element_index, + &self.state.memories[memory].data[start..end], + ) + }; + if ONLY_IF_CHANGED { + for &(memory, element_index) in &self.state.memory_write_log { + set_memory_element(memory, &self.trace_memories[&memory], element_index)?; + } + } else { + for (&memory, trace_mem) in &self.trace_memories { + for element_index in 0..trace_mem.array_type.len() { + set_memory_element(memory, trace_mem, element_index)?; + } + } + } + for ( + id, + &SimTrace { + kind, + ref state, + ref last_state, + }, + ) in self.traces.0.iter().enumerate() + { + if ONLY_IF_CHANGED && state == last_state { + continue; + } + let id = TraceScalarId(id); + match kind { + SimTraceKind::BigUInt { .. } | SimTraceKind::SmallUInt { .. } => { + trace_writer.set_signal_uint(id, state.unwrap_bits_ref())?; + } + SimTraceKind::BigSInt { .. } | SimTraceKind::SmallSInt { .. } => { + trace_writer.set_signal_sint(id, state.unwrap_bits_ref())?; + } + SimTraceKind::BigBool { .. } | SimTraceKind::SmallBool { .. } => { + trace_writer.set_signal_bool(id, state.unwrap_bits_ref()[0])?; + } + SimTraceKind::BigAsyncReset { .. } | SimTraceKind::SmallAsyncReset { .. } => { + trace_writer.set_signal_async_reset(id, state.unwrap_bits_ref()[0])?; + } + SimTraceKind::BigSyncReset { .. } | SimTraceKind::SmallSyncReset { .. } => { + trace_writer.set_signal_sync_reset(id, state.unwrap_bits_ref()[0])?; + } + SimTraceKind::BigClock { .. } | SimTraceKind::SmallClock { .. } => { + trace_writer.set_signal_clock(id, state.unwrap_bits_ref()[0])?; + } + SimTraceKind::EnumDiscriminant { ty, .. } => { + let state = state.unwrap_bits_ref(); + let mut variant_index = [0; mem::size_of::()]; + variant_index.view_bits_mut::()[0..state.len()] + .clone_from_bitslice(state); + trace_writer.set_signal_enum_discriminant( + id, + usize::from_le_bytes(variant_index), + ty, + )?; + } + SimTraceKind::SimOnly { .. } => { + trace_writer.set_signal_sim_only_value(id, state.unwrap_sim_only_ref())? + } + } + } + Ok(trace_writer) + } + fn init_trace_writer( + &mut self, + trace_writer: DynTraceWriter, + ) -> std::io::Result { + let mut trace_writer = self.write_traces::(trace_writer)?; + trace_writer.finish_init()?; + Ok(trace_writer) + } + fn update_trace_writer( + &mut self, + trace_writer: DynTraceWriter, + ) -> std::io::Result { + self.write_traces::(trace_writer) + } + #[inline(never)] + fn read_traces(&mut self) { + for &mut SimTrace { + kind, + ref mut state, + ref mut last_state, + } in &mut self.traces.0 + { + if !IS_INITIAL_STEP { + mem::swap(state, last_state); + } + match kind { + SimTraceKind::BigUInt { index, ty: _ } | SimTraceKind::BigSInt { index, ty: _ } => { + let state = state.unwrap_bits_mut(); + let bigint = &self.state.big_slots[index]; + let mut bytes = bigint.to_signed_bytes_le(); + bytes.resize( + state.len().div_ceil(8), + if bigint.is_negative() { 0xFF } else { 0 }, + ); + let bitslice = BitSlice::::from_slice(&bytes); + let bitslice = &bitslice[..state.len()]; + state.clone_from_bitslice(bitslice); + } + SimTraceKind::BigBool { index } + | SimTraceKind::BigAsyncReset { index } + | SimTraceKind::BigSyncReset { index } + | SimTraceKind::BigClock { index } => { + state + .unwrap_bits_mut() + .set(0, !self.state.big_slots[index].is_zero()); + } + SimTraceKind::SmallUInt { index, ty: _ } + | SimTraceKind::SmallSInt { index, ty: _ } + | SimTraceKind::EnumDiscriminant { index, ty: _ } => { + let state = state.unwrap_bits_mut(); + let bytes = self.state.small_slots[index].to_le_bytes(); + let bitslice = BitSlice::::from_slice(&bytes); + let bitslice = &bitslice[..state.len()]; + state.clone_from_bitslice(bitslice); + } + SimTraceKind::SmallBool { index } + | SimTraceKind::SmallAsyncReset { index } + | SimTraceKind::SmallSyncReset { index } + | SimTraceKind::SmallClock { index } => { + state + .unwrap_bits_mut() + .set(0, self.state.small_slots[index] != 0); + } + SimTraceKind::SimOnly { index, ty: _ } => { + state + .unwrap_sim_only_mut() + .clone_from(&self.state.sim_only_slots[index]); + } + } + if IS_INITIAL_STEP { + last_state.clone_from(state); + } + } + } + #[track_caller] + fn advance_time(this_ref: &Rc>, duration: SimDuration) { + let run_target = this_ref.borrow().instant + duration; + Self::run_until(this_ref, run_target); + } + /// clears `targets` + #[must_use] + fn yield_wait<'a>( + this: Rc>, + module_index: usize, + targets: &'a mut EarliestWaitTargets, + ) -> impl Future + 'a { + struct MyGenerator<'a> { + sim: Rc>, + yielded_at_all: bool, + module_index: usize, + targets: &'a mut EarliestWaitTargets, + } + impl Future for MyGenerator<'_> { + type Output = (); + + fn poll( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> Poll { + let this = &mut *self; + let mut sim = this.sim.borrow_mut(); + let sim = &mut *sim; + assert!( + cx.waker().will_wake(&sim.generator_waker), + "can't use ExternModuleSimulationState's methods outside of ExternModuleSimulation" + ); + this.targets.convert_earlier_instants_to_settle(sim.instant); + if this.targets.is_empty() { + this.targets.settle = true; + } + if this.targets.settle { + if this.yielded_at_all { + this.targets.clear(); + return Poll::Ready(()); + } + } + sim.extern_modules[this.module_index] + .wait_targets + .extend(this.targets.iter()); + this.targets.clear(); + this.yielded_at_all = true; + Poll::Pending + } + } + MyGenerator { + sim: this, + yielded_at_all: false, + module_index, + targets, + } + } + async fn yield_advance_time_or_settle( + this: Rc>, + module_index: usize, + duration: Option, + ) { + let mut targets = duration.map_or(EarliestWaitTargets::settle(), |duration| { + EarliestWaitTargets::instant(this.borrow().instant + duration) + }); + Self::yield_wait(this, module_index, &mut targets).await; + } + fn is_extern_module_ready_to_run(&mut self, module_index: usize) -> Option { + let module = &self.extern_modules[module_index]; + let mut retval = None; + for wait_target in module.wait_targets.iter() { + retval = match (wait_target, retval) { + (WaitTarget::Settle, _) => Some(self.instant), + (WaitTarget::Instant(instant), _) if instant <= self.instant => Some(self.instant), + (WaitTarget::Instant(instant), None) => Some(instant), + (WaitTarget::Instant(instant), Some(retval)) => Some(instant.min(retval)), + (WaitTarget::Change { key, value }, retval) => { + if Self::value_changed(&mut self.state, key, SimValue::opaque(value)) { + Some(self.instant) + } else { + retval + } + } + }; + if retval == Some(self.instant) { + break; + } + } + retval + } + fn get_ready_to_run_set(&mut self, ready_to_run_set: &mut ReadyToRunSet) -> Option { + ready_to_run_set.clear(); + let mut retval = None; + if self.state_ready_to_run { + ready_to_run_set.state_ready_to_run = true; + retval = Some(self.instant); + } + for module_index in 0..self.extern_modules.len() { + let Some(instant) = self.is_extern_module_ready_to_run(module_index) else { + continue; + }; + if let Some(retval) = &mut retval { + match instant.cmp(retval) { + std::cmp::Ordering::Less => ready_to_run_set.clear(), + std::cmp::Ordering::Equal => {} + std::cmp::Ordering::Greater => continue, + } + } else { + retval = Some(instant); + } + ready_to_run_set + .extern_modules_ready_to_run + .push(module_index); + } + retval + } + fn set_instant_no_sim(&mut self, instant: SimInstant) { + self.instant = instant; + self.for_each_trace_writer_storing_error(|this, mut trace_writer_state| { + match &mut trace_writer_state { + TraceWriterState::Decls(_) | TraceWriterState::Init(_) => unreachable!(), + TraceWriterState::Running(trace_writer) => { + trace_writer.change_time_to(this.instant)?; + } + TraceWriterState::Errored(_) => {} + } + Ok(trace_writer_state) + }); + } + #[must_use] + #[track_caller] + fn run_state_settle_cycle(&mut self) -> bool { + self.state_ready_to_run = false; + self.state.setup_call(0); + if self.breakpoints.is_some() { + loop { + match self + .state + .run(self.breakpoints.as_mut().expect("just checked")) + { + RunResult::Break(break_action) => { + println!( + "hit breakpoint at:\n{:?}", + self.state.debug_insn_at(self.state.pc), + ); + match break_action { + BreakAction::DumpStateAndContinue => { + println!("{self:#?}"); + } + BreakAction::Continue => {} + } + } + RunResult::Return(()) => break, + } + } + } else { + let RunResult::Return(()) = self.state.run(()); + } + if self + .clocks_triggered + .iter() + .any(|i| self.state.small_slots[*i] != 0) + { + self.state_ready_to_run = true; + true + } else { + false + } + } + #[track_caller] + fn run_extern_modules_cycle( + this_ref: &Rc>, + generator_waker: &std::task::Waker, + extern_modules_ready_to_run: &[usize], + ) { + let mut this = this_ref.borrow_mut(); + for module_index in extern_modules_ready_to_run.iter().copied() { + let extern_module = &mut this.extern_modules[module_index]; + extern_module.wait_targets.clear(); + let mut generator = if !extern_module.module_state.did_initial_settle { + let sim = extern_module.sim; + drop(this); + Box::into_pin(sim.run(ExternModuleSimulationState { + sim_impl: this_ref.clone(), + module_index, + wait_for_changes_wait_targets: EarliestWaitTargets::default(), + })) + } else if let Some(generator) = extern_module.running_generator.take() { + drop(this); + generator + } else { + continue; + }; + let generator = match generator + .as_mut() + .poll(&mut std::task::Context::from_waker(generator_waker)) + { + Poll::Ready(()) => None, + Poll::Pending => Some(generator), + }; + this = this_ref.borrow_mut(); + this.extern_modules[module_index] + .module_state + .did_initial_settle = true; + if !this.extern_modules[module_index] + .module_state + .uninitialized_ios + .is_empty() + { + panic!( + "extern module didn't initialize all outputs before \ + waiting, settling, or reading any inputs: {}", + this.extern_modules[module_index].sim.source_location + ); + } + this.extern_modules[module_index].running_generator = generator; + } + } + /// clears `targets` + #[track_caller] + fn run_until(this_ref: &Rc>, run_target: SimInstant) { + let mut this = this_ref.borrow_mut(); + let mut ready_to_run_set = ReadyToRunSet::default(); + let generator_waker = this.generator_waker.clone(); + assert!( + this.main_module.uninitialized_ios.is_empty(), + "didn't initialize all inputs", + ); + let run_target = run_target.max(this.instant); + let mut settle_cycle = 0; + let mut run_extern_modules = true; + loop { + assert!(settle_cycle < 100000, "settle(): took too many steps"); + settle_cycle += 1; + let next_wait_target = match this.get_ready_to_run_set(&mut ready_to_run_set) { + Some(next_wait_target) if next_wait_target <= run_target => next_wait_target, + _ => break, + }; + if next_wait_target > this.instant { + settle_cycle = 0; + this.set_instant_no_sim(next_wait_target); + } + if run_extern_modules { + drop(this); + Self::run_extern_modules_cycle( + this_ref, + &generator_waker, + &ready_to_run_set.extern_modules_ready_to_run, + ); + this = this_ref.borrow_mut(); + } + if ready_to_run_set.state_ready_to_run { + if this.run_state_settle_cycle() { + // wait for clocks to settle before running extern modules again + run_extern_modules = false; + } else { + run_extern_modules = true; + } + } + if this.main_module.did_initial_settle { + this.read_traces::(); + } else { + this.read_traces::(); + } + this.state.memory_write_log.sort_unstable(); + this.state.memory_write_log.dedup(); + this.main_module.did_initial_settle = true; + this.for_each_trace_writer_storing_error(|this, trace_writer_state| { + Ok(match trace_writer_state { + TraceWriterState::Decls(trace_writer_decls) => TraceWriterState::Running( + this.init_trace_writer(trace_writer_decls.write_decls( + this.trace_decls, + this.traces.0.len(), + this.trace_memories.len(), + )?)?, + ), + TraceWriterState::Init(trace_writer) => { + TraceWriterState::Running(this.init_trace_writer(trace_writer)?) + } + TraceWriterState::Running(trace_writer) => { + TraceWriterState::Running(this.update_trace_writer(trace_writer)?) + } + TraceWriterState::Errored(e) => TraceWriterState::Errored(e), + }) + }); + this.state.memory_write_log.clear(); + } + if run_target > this.instant { + this.set_instant_no_sim(run_target); + } + } + #[track_caller] + fn settle(this_ref: &Rc>) { + let run_target = this_ref.borrow().instant; + Self::run_until(this_ref, run_target); + } + fn get_module(&self, which_module: WhichModule) -> &SimulationModuleState { + match which_module { + WhichModule::Main => &self.main_module, + WhichModule::Extern { module_index } => &self.extern_modules[module_index].module_state, + } + } + fn get_module_mut(&mut self, which_module: WhichModule) -> &mut SimulationModuleState { + match which_module { + WhichModule::Main => &mut self.main_module, + WhichModule::Extern { module_index } => { + &mut self.extern_modules[module_index].module_state + } + } + } + #[track_caller] + fn read_bit( + &mut self, + io: Expr, + which_module: WhichModule, + ) -> MaybeNeedsSettle { + self.get_module(which_module) + .read_helper(Expr::canonical(io), which_module) + .map(|compiled_value| ReadBitFn { compiled_value }) + .apply_no_settle(&mut self.state) + } + #[track_caller] + fn write_bit(&mut self, io: Expr, value: bool, which_module: WhichModule) { + let compiled_value = self + .get_module_mut(which_module) + .write_helper(io, which_module); + self.state_ready_to_run = true; + match compiled_value.range.len().as_single() { + Some(TypeLenSingle::SmallSlot) => { + self.state.small_slots[compiled_value.range.small_slots.start] = value as _; + } + Some(TypeLenSingle::BigSlot) => { + self.state.big_slots[compiled_value.range.big_slots.start] = value.into() + } + Some(TypeLenSingle::SimOnlySlot) | None => unreachable!(), + } + } + #[track_caller] + fn read_bool_or_int( + &mut self, + io: Expr, + which_module: WhichModule, + ) -> MaybeNeedsSettle, I::Value> { + self.get_module(which_module) + .read_helper(Expr::canonical(io), which_module) + .map(|compiled_value| ReadBoolOrIntFn { compiled_value, io }) + .apply_no_settle(&mut self.state) + } + #[track_caller] + fn write_bool_or_int( + &mut self, + io: Expr, + value: I::Value, + which_module: WhichModule, + ) { + let compiled_value = self + .get_module_mut(which_module) + .write_helper(Expr::canonical(io), which_module); + self.state_ready_to_run = true; + let value: BigInt = value.into(); + match compiled_value.range.len().as_single() { + Some(TypeLenSingle::SmallSlot) => { + let mut small_value = value.iter_u64_digits().next().unwrap_or(0); + if value.is_negative() { + small_value = small_value.wrapping_neg(); + } + self.state.small_slots[compiled_value.range.small_slots.start] = small_value; + } + Some(TypeLenSingle::BigSlot) => { + self.state.big_slots[compiled_value.range.big_slots.start] = value + } + Some(TypeLenSingle::SimOnlySlot) | None => unreachable!(), + } + } + #[track_caller] + fn read_write_sim_value_helper( + state: &mut interpreter::State, + compiled_value: CompiledValue, + start_index: OpaqueSimValueSize, + opaque: &mut Opaque, + read_write_big_scalar: impl Fn(bool, std::ops::Range, &mut Opaque, &mut BigInt) + Copy, + read_write_small_scalar: impl Fn(bool, std::ops::Range, &mut Opaque, &mut SmallUInt) + + Copy, + read_write_sim_only_scalar: impl Fn(usize, &mut Opaque, &mut DynSimOnlyValue) + Copy, + ) { + match compiled_value.layout.body { + CompiledTypeLayoutBody::Scalar => { + let signed = match compiled_value.layout.ty { + CanonicalType::UInt(_) => false, + CanonicalType::SInt(_) => true, + CanonicalType::Bool(_) => false, + CanonicalType::Array(_) => unreachable!(), + CanonicalType::Enum(_) => false, + CanonicalType::Bundle(_) => unreachable!(), + CanonicalType::AsyncReset(_) => false, + CanonicalType::SyncReset(_) => false, + CanonicalType::Reset(_) => false, + CanonicalType::Clock(_) => false, + CanonicalType::PhantomConst(_) => unreachable!(), + CanonicalType::DynSimOnly(_) => false, + }; + let indexes = OpaqueSimValueSizeRange::from( + start_index..start_index + compiled_value.layout.ty.size(), + ); + match compiled_value.range.len().as_single() { + Some(TypeLenSingle::SmallSlot) => read_write_small_scalar( + signed, + indexes.bit_width, + opaque, + &mut state.small_slots[compiled_value.range.small_slots.start], + ), + Some(TypeLenSingle::BigSlot) => read_write_big_scalar( + signed, + indexes.bit_width, + opaque, + &mut state.big_slots[compiled_value.range.big_slots.start], + ), + Some(TypeLenSingle::SimOnlySlot) => read_write_sim_only_scalar( + indexes.sim_only_values_len.start, + opaque, + &mut state.sim_only_slots[compiled_value.range.sim_only_slots.start], + ), + None => unreachable!(), + } + } + CompiledTypeLayoutBody::Array { element } => { + let ty = ::from_canonical(compiled_value.layout.ty); + let element_size = ty.element().size(); + for element_index in 0..ty.len() { + Self::read_write_sim_value_helper( + state, + CompiledValue { + layout: *element, + range: compiled_value + .range + .index_array(element.layout.len(), element_index), + write: None, + }, + start_index + element_index * element_size, + opaque, + read_write_big_scalar, + read_write_small_scalar, + read_write_sim_only_scalar, + ); + } + } + CompiledTypeLayoutBody::Bundle { fields } => { + let ty = Bundle::from_canonical(compiled_value.layout.ty); + for ( + (_field, offset), + CompiledBundleField { + offset: layout_offset, + ty: field_layout, + }, + ) in ty.fields().iter().zip(ty.field_offsets()).zip(fields) + { + Self::read_write_sim_value_helper( + state, + CompiledValue { + layout: field_layout, + range: compiled_value.range.slice(TypeIndexRange::new( + layout_offset, + field_layout.layout.len(), + )), + write: None, + }, + start_index + offset, + opaque, + read_write_big_scalar, + read_write_small_scalar, + read_write_sim_only_scalar, + ); + } + } + } + } + #[track_caller] + fn read_no_settle_helper( + state: &mut interpreter::State, + io: Expr, + compiled_value: CompiledValue, + ) -> SimValue { + #[track_caller] + fn read_write_sim_only_scalar( + index: usize, + writer: &mut OpaqueSimValueWriter<'_>, + value: &mut DynSimOnlyValue, + ) { + assert_eq!(writer.sim_only_values_range().start, index); + writer.fill_prefix_with( + OpaqueSimValueSize { + bit_width: 0, + sim_only_values_len: 1, + }, + |writer| { + writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_parts( + BitSlice::empty(), + std::array::from_ref::(value), + )) + }, + ); + } + let size = Expr::ty(io).size(); + let mut opaque = OpaqueSimValue::with_capacity(size); + opaque.rewrite_with(size, |mut writer| { + SimulationImpl::read_write_sim_value_helper( + state, + compiled_value, + OpaqueSimValueSize::empty(), + &mut writer, + |_signed, bit_range, writer, value| { + writer.fill_prefix_with( + OpaqueSimValueSize::from_bit_width(bit_range.len()), + |writer| { + writer.fill_with_bits_with(|bits| { + ::copy_bits_from_bigint_wrapping(value, bits); + }) + }, + ); + }, + |_signed, bit_range, writer, value| { + let bytes = value.to_le_bytes(); + let bitslice = BitSlice::::from_slice(&bytes); + let bitslice = &bitslice[..bit_range.len()]; + writer.fill_prefix_with( + OpaqueSimValueSize::from_bit_width(bit_range.len()), + |writer| { + writer.fill_with_bits_with(|bits| bits.clone_from_bitslice(bitslice)) + }, + ); + }, + read_write_sim_only_scalar, + ); + writer.fill_cloned_from_slice(OpaqueSimValueSlice::empty()) + }); + SimValue::from_opaque(Expr::ty(io), opaque) + } + /// doesn't modify `opaque` + fn value_changed( + state: &mut interpreter::State, + compiled_value: CompiledValue, + mut opaque: &OpaqueSimValue, + ) -> bool { + assert_eq!(opaque.size(), compiled_value.layout.ty.size()); + let any_change = std::cell::Cell::new(false); + SimulationImpl::read_write_sim_value_helper( + state, + compiled_value, + OpaqueSimValueSize::empty(), + &mut opaque, + |_signed, bit_range, opaque, value| { + if !::bits_equal_bigint_wrapping(value, &opaque.bits().bits()[bit_range]) { + any_change.set(true); + } + }, + |_signed, bit_range, opaque, value| { + let bytes = value.to_le_bytes(); + let bitslice = BitSlice::::from_slice(&bytes); + let bitslice = &bitslice[..bit_range.len()]; + if opaque.bits().bits()[bit_range] != *bitslice { + any_change.set(true); + } + }, + |index, opaque, value| { + if opaque.sim_only_values()[index] != *value { + any_change.set(true); + } + }, + ); + any_change.get() + } + #[track_caller] + fn read( + &mut self, + io: Expr, + which_module: WhichModule, + ) -> ( + CompiledValue, + MaybeNeedsSettle>, + ) { + let compiled_value = self.get_module(which_module).read_helper(io, which_module); + let value = compiled_value + .map(|compiled_value| ReadFn { compiled_value, io }) + .apply_no_settle(&mut self.state); + let (MaybeNeedsSettle::NeedsSettle(compiled_value) + | MaybeNeedsSettle::NoSettleNeeded(compiled_value)) = compiled_value; + (compiled_value, value) + } + #[track_caller] + fn write( + &mut self, + io: Expr, + value: &SimValue, + which_module: WhichModule, + ) { + let compiled_value = self + .get_module_mut(which_module) + .write_helper(io, which_module); + self.state_ready_to_run = true; + assert_eq!(Expr::ty(io), SimValue::ty(value)); + Self::read_write_sim_value_helper( + &mut self.state, + compiled_value, + OpaqueSimValueSize::empty(), + &mut SimValue::opaque(value), + |signed, bit_range, opaque, value| { + if signed { + *value = SInt::bits_to_bigint(&opaque.bits().bits()[bit_range]); + } else { + *value = UInt::bits_to_bigint(&opaque.bits().bits()[bit_range]); + } + }, + |signed, bit_range, opaque, value| { + let mut small_value = [0; mem::size_of::()]; + if signed + && opaque.bits().bits()[bit_range.clone()] + .last() + .as_deref() + .copied() + == Some(true) + { + small_value.fill(u8::MAX); + } + small_value.view_bits_mut::()[0..bit_range.len()] + .clone_from_bitslice(&opaque.bits().bits()[bit_range]); + *value = SmallUInt::from_le_bytes(small_value); + }, + |index, opaque, value: &mut DynSimOnlyValue| { + value.clone_from(&opaque.sim_only_values()[index]); + }, + ); + } + #[track_caller] + fn settle_if_needed(this_ref: &Rc>, v: MaybeNeedsSettle) -> O + where + for<'a> F: MaybeNeedsSettleFn<&'a mut interpreter::State, Output = O>, + { + match v { + MaybeNeedsSettle::NeedsSettle(v) => { + Self::settle(this_ref); + v.call(&mut this_ref.borrow_mut().state) + } + MaybeNeedsSettle::NoSettleNeeded(v) => v, + } + } + async fn yield_settle_if_needed( + this_ref: &Rc>, + module_index: usize, + v: MaybeNeedsSettle, + ) -> O + where + for<'a> F: MaybeNeedsSettleFn<&'a mut interpreter::State, Output = O>, + { + match v { + MaybeNeedsSettle::NeedsSettle(v) => { + Self::yield_advance_time_or_settle(this_ref.clone(), module_index, None).await; + v.call(&mut this_ref.borrow_mut().state) + } + MaybeNeedsSettle::NoSettleNeeded(v) => v, + } + } + fn close_all_trace_writers(&mut self) -> std::io::Result<()> { + let trace_writers = mem::take(&mut self.trace_writers); + let mut retval = Ok(()); + let close_trace_writer = + |trace_writer: TraceWriterState| match trace_writer { + TraceWriterState::Decls(v) => v + .write_decls( + self.trace_decls, + self.traces.0.len(), + self.trace_memories.len(), + )? + .close(), + TraceWriterState::Init(v) => v.close(), + TraceWriterState::Running(v) => v.close(), + TraceWriterState::Errored(Some(e)) => Err(e), + TraceWriterState::Errored(None) => Ok(()), + }; + for trace_writer in trace_writers { + retval = retval.and(close_trace_writer(trace_writer)); + } + retval + } + fn for_each_trace_writer_storing_error( + &mut self, + mut f: impl FnMut( + &mut Self, + TraceWriterState, + ) -> std::io::Result>, + ) { + let mut trace_writers = mem::take(&mut self.trace_writers); + for trace_writer in &mut trace_writers { + *trace_writer = match f( + self, + mem::replace(trace_writer, TraceWriterState::Errored(None)), + ) { + Ok(v) => v, + Err(e) => TraceWriterState::Errored(Some(e)), + }; + } + self.trace_writers = trace_writers; + } + fn for_each_trace_writer_getting_error( + &mut self, + mut f: impl FnMut( + &mut Self, + TraceWriterState, + ) -> std::io::Result>, + ) -> std::io::Result<()> { + let mut retval = Ok(()); + let mut trace_writers = mem::take(&mut self.trace_writers); + for trace_writer in &mut trace_writers { + *trace_writer = match f( + self, + mem::replace(trace_writer, TraceWriterState::Errored(None)), + ) { + Ok(v) => v, + Err(e) => { + if retval.is_ok() { + retval = Err(e); + TraceWriterState::Errored(None) + } else { + TraceWriterState::Errored(Some(e)) + } + } + }; + } + self.trace_writers = trace_writers; + retval + } + fn close(this: Rc>) -> std::io::Result<()> { + if this.borrow().main_module.did_initial_settle { + Self::settle(&this); + } + this.borrow_mut().close_all_trace_writers() + } + fn flush_traces(this_ref: &Rc>) -> std::io::Result<()> { + if this_ref.borrow().main_module.did_initial_settle { + Self::settle(this_ref); + } + this_ref.borrow_mut().for_each_trace_writer_getting_error( + |this, trace_writer: TraceWriterState| match trace_writer { + TraceWriterState::Decls(v) => { + let mut v = v.write_decls( + this.trace_decls, + this.traces.0.len(), + this.trace_memories.len(), + )?; + v.flush()?; + Ok(TraceWriterState::Init(v)) + } + TraceWriterState::Init(mut v) => { + v.flush()?; + Ok(TraceWriterState::Init(v)) + } + TraceWriterState::Running(mut v) => { + v.flush()?; + Ok(TraceWriterState::Running(v)) + } + TraceWriterState::Errored(Some(e)) => Err(e), + TraceWriterState::Errored(None) => Ok(TraceWriterState::Errored(None)), + }, + ) + } +} + +impl Drop for SimulationImpl { + fn drop(&mut self) { + self.close_all_trace_writers() + .expect("error closing trace writers"); + } +} + +pub struct Simulation { + sim_impl: Rc>, + io: Expr, +} + +struct SortedSetDebug<'a, T, V>(&'a HashMap); + +impl fmt::Debug for SortedSetDebug<'_, T, V> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut entries = Vec::from_iter(self.0.iter().map(|(v, _)| { + if f.alternate() { + format!("{v:#?}") + } else { + format!("{v:?}") + } + })); + entries.sort(); + f.debug_set() + .entries(entries.iter().map(DebugAsDisplay)) + .finish() + } +} + +struct SortedMapDebug<'a, K: 'static + Send + Sync, V>(&'a BTreeMap, V>); + +impl fmt::Debug for SortedMapDebug<'_, K, V> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut entries = Vec::from_iter(self.0.iter().map(|(k, v)| { + if f.alternate() { + (format!("{k:#?}"), format!("{v:#?}")) + } else { + (format!("{k:?}"), format!("{v:?}")) + } + })); + entries.sort(); + f.debug_map() + .entries( + entries + .iter() + .map(|(k, v)| (DebugAsDisplay(k), DebugAsDisplay(v))), + ) + .finish() + } +} + +struct SliceAsMapDebug<'a, T>(&'a [Option]); + +impl fmt::Debug for SliceAsMapDebug<'_, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_map() + .entries( + self.0 + .iter() + .enumerate() + .filter_map(|(k, v)| Some((k, v.as_ref()?))), + ) + .finish() + } +} + +impl fmt::Debug for Simulation { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { sim_impl, io } = self; + sim_impl.borrow().debug_fmt(Some(io), f) + } +} + +macro_rules! impl_simulation_methods { + ( + async_await = ($($async:tt, $await:tt)?), + track_caller = ($(#[$track_caller:tt])?), + which_module = |$self:ident| $which_module:expr, + ) => { + $(#[$track_caller])? + pub $($async)? fn read_bool_or_int(&mut $self, io: Expr) -> I::Value { + let retval = $self + .sim_impl + .borrow_mut() + .read_bool_or_int(io, $which_module); + $self.settle_if_needed(retval)$(.$await)? + } + $(#[$track_caller])? + pub $($async)? fn write_bool_or_int( + &mut $self, + io: Expr, + value: impl ToExpr, + ) { + let value = value.to_expr(); + assert_eq!(Expr::ty(io), Expr::ty(value), "type mismatch"); + let value = value + .to_literal_bits() + .expect("the value that is being written to an input must be a literal"); + $self.sim_impl.borrow_mut().write_bool_or_int( + io, + I::bits_to_value(Cow::Borrowed(&value)), + $which_module, + ); + } + $(#[$track_caller])? + pub $($async)? fn write_clock(&mut $self, io: Expr, value: bool) { + $self.sim_impl + .borrow_mut() + .write_bit(Expr::canonical(io), value, $which_module); + } + $(#[$track_caller])? + pub $($async)? fn read_clock(&mut $self, io: Expr) -> bool { + let retval = $self + .sim_impl + .borrow_mut() + .read_bit(Expr::canonical(io), $which_module); + $self.settle_if_needed(retval)$(.$await)? + } + $(#[$track_caller])? + pub $($async)? fn write_bool(&mut $self, io: Expr, value: bool) { + $self.sim_impl + .borrow_mut() + .write_bit(Expr::canonical(io), value, $which_module); + } + $(#[$track_caller])? + pub $($async)? fn read_bool(&mut $self, io: Expr) -> bool { + let retval = $self + .sim_impl + .borrow_mut() + .read_bit(Expr::canonical(io), $which_module); + $self.settle_if_needed(retval)$(.$await)? + } + $(#[$track_caller])? + pub $($async)? fn write_reset(&mut $self, io: Expr, value: bool) { + $self.sim_impl + .borrow_mut() + .write_bit(Expr::canonical(io), value, $which_module); + } + $(#[$track_caller])? + pub $($async)? fn read_reset(&mut $self, io: Expr) -> bool { + let retval = $self + .sim_impl + .borrow_mut() + .read_bit(Expr::canonical(io), $which_module); + $self.settle_if_needed(retval)$(.$await)? + } + $(#[$track_caller])? + pub $($async)? fn read(&mut $self, io: Expr) -> SimValue { + let retval = $self + .sim_impl + .borrow_mut() + .read(Expr::canonical(io), $which_module).1; + SimValue::from_canonical($self.settle_if_needed(retval)$(.$await)?) + } + $(#[$track_caller])? + pub $($async)? fn write>(&mut $self, io: Expr, value: V) { + $self.sim_impl.borrow_mut().write( + Expr::canonical(io), + &SimValue::into_canonical(value.into_sim_value_with_type(Expr::ty(io))), + $which_module, + ); + } + }; +} + +impl Simulation { + pub fn new(module: Interned>) -> Self { + Self::from_compiled(Compiled::new(module)) + } + pub fn add_trace_writer(&mut self, writer: W) { + self.sim_impl + .borrow_mut() + .trace_writers + .push(TraceWriterState::Decls(DynTraceWriterDecls::new(writer))); + } + pub fn flush_traces(&mut self) -> std::io::Result<()> { + SimulationImpl::flush_traces(&self.sim_impl) + } + pub fn close(self) -> std::io::Result<()> { + SimulationImpl::close(self.sim_impl) + } + pub fn canonical(self) -> Simulation { + let Self { sim_impl, io } = self; + Simulation { + sim_impl, + io: Expr::as_bundle(io), + } + } + pub fn from_canonical(canonical: Simulation) -> Self { + let Simulation { sim_impl, io } = canonical; + Self { + sim_impl, + io: Expr::from_bundle(io), + } + } + pub fn io(&self) -> Expr { + self.io.to_expr() + } + pub fn from_compiled(compiled: Compiled) -> Self { + let sim_impl = SimulationImpl::new(compiled.canonical()); + Self { + io: Expr::from_bundle(sim_impl.io), + sim_impl: Rc::new(RefCell::new(sim_impl)), + } + } + #[track_caller] + pub fn settle(&mut self) { + SimulationImpl::settle(&self.sim_impl); + } + #[track_caller] + pub fn advance_time(&mut self, duration: SimDuration) { + SimulationImpl::advance_time(&self.sim_impl, duration); + } + #[track_caller] + fn settle_if_needed(&mut self, v: MaybeNeedsSettle) -> O + where + for<'a> F: MaybeNeedsSettleFn<&'a mut interpreter::State, Output = O>, + { + SimulationImpl::settle_if_needed(&self.sim_impl, v) + } + impl_simulation_methods!( + async_await = (), + track_caller = (#[track_caller]), + which_module = |self| WhichModule::Main, + ); + #[doc(hidden)] + /// This is explicitly unstable and may be changed/removed at any time + pub fn set_breakpoints_unstable(&mut self, pcs: HashSet, trace: bool) { + self.sim_impl.borrow_mut().breakpoints = Some(BreakpointsSet { + last_was_break: false, + set: pcs, + trace, + }); + } +} + +pub struct ExternModuleSimulationState { + sim_impl: Rc>, + module_index: usize, + wait_for_changes_wait_targets: EarliestWaitTargets, +} + +impl fmt::Debug for ExternModuleSimulationState { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + sim_impl: _, + module_index, + wait_for_changes_wait_targets: _, + } = self; + f.debug_struct("ExternModuleSimulationState") + .field("sim_impl", &DebugAsDisplay("...")) + .field("module_index", module_index) + .finish_non_exhaustive() + } +} + +impl ExternModuleSimulationState { + pub async fn settle(&mut self) { + SimulationImpl::yield_advance_time_or_settle(self.sim_impl.clone(), self.module_index, None) + .await + } + pub async fn advance_time(&mut self, duration: SimDuration) { + SimulationImpl::yield_advance_time_or_settle( + self.sim_impl.clone(), + self.module_index, + Some(duration), + ) + .await + } + pub async fn wait_for_changes>( + &mut self, + iter: I, + timeout: Option, + ) { + self.wait_for_changes_wait_targets.clear(); + let which_module = WhichModule::Extern { + module_index: self.module_index, + }; + for io in iter { + let io = Expr::canonical(io.to_expr()); + let (key, value) = self.sim_impl.borrow_mut().read(io, which_module); + let value = self.settle_if_needed(value).await; + self.wait_for_changes_wait_targets + .insert(WaitTarget::Change { key, value }); + } + if let Some(timeout) = timeout { + self.wait_for_changes_wait_targets.instant = + Some(self.sim_impl.borrow().instant + timeout); + } + SimulationImpl::yield_wait( + self.sim_impl.clone(), + self.module_index, + &mut self.wait_for_changes_wait_targets, + ) + .await; + } + pub async fn wait_for_clock_edge(&mut self, clk: impl ToExpr) { + let clk = clk.to_expr(); + while self.read_clock(clk).await { + self.wait_for_changes([clk], None).await; + } + while !self.read_clock(clk).await { + self.wait_for_changes([clk], None).await; + } + } + async fn settle_if_needed(&mut self, v: MaybeNeedsSettle) -> O + where + for<'a> F: MaybeNeedsSettleFn<&'a mut interpreter::State, Output = O>, + { + SimulationImpl::yield_settle_if_needed(&self.sim_impl, self.module_index, v).await + } + impl_simulation_methods!( + async_await = (async, await), + track_caller = (), + which_module = |self| WhichModule::Extern { module_index: self.module_index }, + ); +} + +pub trait ExternModuleSimGenerator: Clone + Eq + Hash + Any + Send + Sync + fmt::Debug { + fn run<'a>(&'a self, sim: ExternModuleSimulationState) -> impl IntoFuture + 'a; +} + +pub struct SimGeneratorFn { + pub args: Args, + pub f: fn(Args, ExternModuleSimulationState) -> Fut, +} + +impl fmt::Debug for SimGeneratorFn { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { args, f: _ } = self; + f.debug_struct("SimGeneratorFn") + .field("args", args) + .field("f", &DebugAsDisplay("...")) + .finish() + } +} + +impl Hash for SimGeneratorFn { + fn hash(&self, state: &mut H) { + let Self { args, f } = self; + args.hash(state); + f.hash(state); + } +} + +impl Eq for SimGeneratorFn {} + +impl PartialEq for SimGeneratorFn { + fn eq(&self, other: &Self) -> bool { + let Self { args, f } = self; + *args == other.args && ptr::fn_addr_eq(*f, other.f) + } +} + +impl Clone for SimGeneratorFn { + fn clone(&self) -> Self { + Self { + args: self.args.clone(), + f: self.f, + } + } +} + +impl Copy for SimGeneratorFn {} + +impl< + T: fmt::Debug + Clone + Eq + Hash + Send + Sync + 'static, + Fut: IntoFuture + 'static, +> ExternModuleSimGenerator for SimGeneratorFn +{ + fn run<'a>(&'a self, sim: ExternModuleSimulationState) -> impl IntoFuture + 'a { + (self.f)(self.args.clone(), sim) + } +} + +pub(crate) trait DynExternModuleSimGenerator: + Any + Send + Sync + SupportsPtrEqWithTypeId + fmt::Debug +{ + fn dyn_run<'a>(&'a self, sim: ExternModuleSimulationState) + -> Box + 'a>; +} + +impl DynExternModuleSimGenerator for T { + fn dyn_run<'a>( + &'a self, + sim: ExternModuleSimulationState, + ) -> Box + 'a> { + Box::new(self.run(sim).into_future()) + } +} + +impl InternedCompare for dyn DynExternModuleSimGenerator { + type InternedCompareKey = PtrEqWithTypeId; + + fn interned_compare_key_ref(this: &Self) -> Self::InternedCompareKey { + this.get_ptr_eq_with_type_id() + } +} + +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +pub struct ExternModuleSimulation { + pub(crate) generator: Interned, + /// Map of [`ModuleIO`]s for the containing module to the [`ModuleIO`]s that the generator will use. + /// Used when transforming the containing module's [`ModuleIO`]s to different types, e.g. with + /// [`deduce_resets`][crate::module::transform::deduce_resets::deduce_resets]. + /// + /// only the keys (sim I/O) are [visited][Visit]/[folded][Fold]. + pub sim_io_to_generator_map: + Interned>, Interned>>>, + pub source_location: SourceLocation, +} + +impl fmt::Debug for ExternModuleSimulation { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("ExternModuleSimulation") + .field("generator", &self.generator) + .field( + "sim_io_to_generator_map", + &SortedMapDebug(&self.sim_io_to_generator_map), + ) + .field("source_location", &self.source_location) + .finish() + } +} + +impl ExternModuleSimulation { + pub fn new_with_loc( + source_location: SourceLocation, + generator: G, + ) -> Self { + Self { + generator: Interned::cast_unchecked( + generator.intern(), + |v| -> &dyn DynExternModuleSimGenerator { v }, + ), + sim_io_to_generator_map: Interned::default(), + source_location, + } + } + #[track_caller] + pub fn new(generator: G) -> Self { + Self::new_with_loc(SourceLocation::caller(), generator) + } + fn run(&self, sim: ExternModuleSimulationState) -> Box + 'static> { + Interned::into_inner(self.generator).dyn_run(sim) + } +} + +impl Visit for ExternModuleSimulation { + fn visit(&self, state: &mut State) -> Result<(), State::Error> { + state.visit_extern_module_simulation(self) + } + + fn default_visit(&self, state: &mut State) -> Result<(), State::Error> { + let Self { + generator: _, + sim_io_to_generator_map, + source_location, + } = self; + sim_io_to_generator_map + .keys() + .try_for_each(|k| k.visit(state))?; + source_location.visit(state) + } +} + +impl Fold for ExternModuleSimulation { + fn fold(self, state: &mut State) -> Result::Error> { + state.fold_extern_module_simulation(self) + } + + fn default_fold(self, state: &mut State) -> Result::Error> { + let Self { + generator, + sim_io_to_generator_map, + source_location, + } = self; + Ok(Self { + generator, + sim_io_to_generator_map: Result::, _>::from_iter( + sim_io_to_generator_map + .iter() + .map(|(&sim_io, generator_io)| Ok((sim_io.fold(state)?, *generator_io))), + )? + .intern_sized(), + source_location: source_location.fold(state)?, + }) + } +} diff --git a/crates/fayalite/src/sim/compiler.rs b/crates/fayalite/src/sim/compiler.rs new file mode 100644 index 0000000..fbede7b --- /dev/null +++ b/crates/fayalite/src/sim/compiler.rs @@ -0,0 +1,5161 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +//! Compiler to Interpreter IR for Fayalite Simulation + +use crate::{ + bundle::{BundleField, BundleType}, + enum_::{EnumType, EnumVariant}, + expr::{ + ExprEnum, Flow, ops, + target::{ + GetTarget, Target, TargetBase, TargetPathArrayElement, TargetPathBundleField, + TargetPathElement, + }, + }, + int::BoolOrIntType, + intern::{Intern, InternSlice, Interned, Memoize}, + memory::PortKind, + module::{ + AnnotatedModuleIO, Block, ExternModuleBody, Id, InstantiatedModule, ModuleBody, NameId, + NormalModuleBody, ScopedNameId, Stmt, StmtConnect, StmtDeclaration, StmtFormal, StmtIf, + StmtInstance, StmtMatch, StmtReg, StmtWire, TargetInInstantiatedModule, + transform::deduce_resets::deduce_resets, + }, + prelude::*, + reset::{ResetType, ResetTypeDispatch}, + sim::{ + ExternModuleSimulation, SimTrace, SimTraceKind, SimTraces, TraceArray, TraceAsyncReset, + TraceBool, TraceBundle, TraceClock, TraceDecl, TraceEnumDiscriminant, TraceEnumWithFields, + TraceFieldlessEnum, TraceInstance, TraceLocation, TraceMem, TraceMemPort, TraceMemoryId, + TraceMemoryLocation, TraceModule, TraceModuleIO, TraceReg, TraceSInt, TraceScalarId, + TraceScope, TraceSimOnly, TraceSyncReset, TraceUInt, TraceWire, + interpreter::{ + Insn, InsnField, InsnFieldKind, InsnFieldType, InsnOrLabel, Insns, InsnsBuilding, + InsnsBuildingDone, InsnsBuildingKind, Label, SmallUInt, StatePartArrayIndex, + StatePartArrayIndexed, + parts::{ + MemoryData, SlotDebugData, StatePartIndex, StatePartIndexRange, StatePartKind, + StatePartKindBigSlots, StatePartKindMemories, StatePartKindSimOnlySlots, + StatePartKindSmallSlots, StatePartLayout, StatePartLen, TypeIndex, TypeIndexRange, + TypeLayout, TypeLen, TypeLenSingle, get_state_part_kinds, + }, + }, + }, + ty::{OpaqueSimValueSize, StaticType}, + util::{HashMap, chain}, +}; +use bitvec::vec::BitVec; +use num_bigint::BigInt; +use petgraph::{ + data::FromElements, + visit::{ + EdgeRef, GraphBase, IntoEdgeReferences, IntoNeighbors, IntoNeighborsDirected, + IntoNodeIdentifiers, IntoNodeReferences, NodeRef, VisitMap, Visitable, + }, +}; +use std::{collections::BTreeSet, fmt, hash::Hash, mem}; + +#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] +enum CondBody { + IfTrue { + cond: CompiledValue, + }, + IfFalse { + cond: CompiledValue, + }, + MatchArm { + discriminant: StatePartIndex, + variant_index: usize, + }, +} + +#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] +struct Cond { + body: CondBody, + source_location: SourceLocation, +} + +#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] +pub(crate) struct CompiledBundleField { + pub(crate) offset: TypeIndex, + pub(crate) ty: CompiledTypeLayout, +} + +#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] +pub(crate) enum CompiledTypeLayoutBody { + Scalar, + Array { + /// debug names are ignored, use parent's layout instead + element: Interned>, + }, + Bundle { + /// debug names are ignored, use parent's layout instead + fields: Interned<[CompiledBundleField]>, + }, +} + +#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] +pub(crate) struct CompiledTypeLayout { + pub(crate) ty: T, + pub(crate) layout: TypeLayout, + pub(crate) body: CompiledTypeLayoutBody, +} + +impl CompiledTypeLayout { + fn with_prefixed_debug_names(self, prefix: &str) -> Self { + let Self { ty, layout, body } = self; + Self { + ty, + layout: layout.with_prefixed_debug_names(prefix), + body, + } + } + fn with_anonymized_debug_info(self) -> Self { + let Self { ty, layout, body } = self; + Self { + ty, + layout: layout.with_anonymized_debug_info(), + body, + } + } + fn get(ty: T) -> Self { + #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] + struct MyMemoize; + impl Memoize for MyMemoize { + type Input = CanonicalType; + type InputOwned = CanonicalType; + type Output = CompiledTypeLayout; + + fn inner(self, input: &Self::Input) -> Self::Output { + match input { + CanonicalType::UInt(_) + | CanonicalType::SInt(_) + | CanonicalType::Bool(_) + | CanonicalType::Enum(_) + | CanonicalType::AsyncReset(_) + | CanonicalType::SyncReset(_) + | CanonicalType::Reset(_) + | CanonicalType::Clock(_) => { + let mut layout = TypeLayout::empty(); + let debug_data = SlotDebugData { + name: Interned::default(), + ty: *input, + }; + layout.big_slots = StatePartLayout::scalar(debug_data, ()); + CompiledTypeLayout { + ty: *input, + layout: layout.into(), + body: CompiledTypeLayoutBody::Scalar, + } + } + CanonicalType::Array(array) => { + let mut layout = TypeLayout::empty(); + let element = CompiledTypeLayout::get(array.element()).intern_sized(); + for index in 0..array.len() { + layout.allocate( + &element + .layout + .with_prefixed_debug_names(&format!("[{index}]")), + ); + } + CompiledTypeLayout { + ty: *input, + layout: layout.into(), + body: CompiledTypeLayoutBody::Array { element }, + } + } + CanonicalType::PhantomConst(_) => { + let unit_layout = CompiledTypeLayout::get(()); + CompiledTypeLayout { + ty: *input, + layout: unit_layout.layout, + body: unit_layout.body, + } + } + CanonicalType::Bundle(bundle) => { + let mut layout = TypeLayout::empty(); + let fields = bundle + .fields() + .iter() + .map( + |BundleField { + name, + flipped: _, + ty, + }| { + let ty = CompiledTypeLayout::get(*ty); + let offset = layout + .allocate( + &ty.layout + .with_prefixed_debug_names(&format!(".{name}")), + ) + .start(); + CompiledBundleField { offset, ty } + }, + ) + .collect(); + CompiledTypeLayout { + ty: *input, + layout: layout.into(), + body: CompiledTypeLayoutBody::Bundle { fields }, + } + } + CanonicalType::DynSimOnly(ty) => { + let mut layout = TypeLayout::empty(); + let debug_data = SlotDebugData { + name: Interned::default(), + ty: *input, + }; + layout.sim_only_slots = StatePartLayout::scalar(debug_data, *ty); + CompiledTypeLayout { + ty: *input, + layout: layout.into(), + body: CompiledTypeLayoutBody::Scalar, + } + } + } + } + } + let CompiledTypeLayout { + ty: _, + layout, + body, + } = MyMemoize.get_owned(ty.canonical()); + Self { ty, layout, body } + } +} + +#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] +pub(crate) struct CompiledValue { + pub(crate) layout: CompiledTypeLayout, + pub(crate) range: TypeIndexRange, + pub(crate) write: Option<(CompiledTypeLayout, TypeIndexRange)>, +} + +impl CompiledValue { + fn write(self) -> (CompiledTypeLayout, TypeIndexRange) { + self.write.unwrap_or((self.layout, self.range)) + } + fn write_value(self) -> Self { + let (layout, range) = self.write(); + Self { + layout, + range, + write: None, + } + } + fn map( + self, + mut f: impl FnMut( + CompiledTypeLayout, + TypeIndexRange, + ) -> (CompiledTypeLayout, TypeIndexRange), + ) -> CompiledValue { + let (layout, range) = f(self.layout, self.range); + CompiledValue { + layout, + range, + write: self.write.map(|(layout, range)| f(layout, range)), + } + } + pub(crate) fn map_ty(self, mut f: impl FnMut(T) -> U) -> CompiledValue { + self.map(|CompiledTypeLayout { ty, layout, body }, range| { + ( + CompiledTypeLayout { + ty: f(ty), + layout, + body, + }, + range, + ) + }) + } +} + +impl CompiledValue { + fn field_by_index(self, field_index: usize) -> CompiledValue { + self.map(|layout, range| { + let CompiledTypeLayout { + ty: _, + layout: _, + body: CompiledTypeLayoutBody::Bundle { fields }, + } = layout + else { + unreachable!(); + }; + ( + fields[field_index].ty, + range.slice(TypeIndexRange::new( + fields[field_index].offset, + fields[field_index].ty.layout.len(), + )), + ) + }) + } + pub(crate) fn field_by_name(self, name: Interned) -> CompiledValue { + self.field_by_index(self.layout.ty.name_indexes()[&name]) + } +} + +impl CompiledValue { + pub(crate) fn element(self, index: usize) -> CompiledValue { + self.map(|layout, range| { + let CompiledTypeLayoutBody::Array { element } = layout.body else { + unreachable!(); + }; + (*element, range.index_array(element.layout.len(), index)) + }) + } + fn element_dyn( + self, + index_slot: StatePartIndex, + ) -> CompiledExpr { + CompiledExpr::from(self).element_dyn(index_slot) + } +} + +macro_rules! make_type_array_indexes { + ( + type_plural_fields = [$($type_plural_field:ident,)*]; + type_kinds = [$($type_kind:ident,)*]; + ) => { + #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)] + pub(crate) struct TypeArrayIndexes { + $(pub(crate) $type_plural_field: Interned<[StatePartArrayIndex<$type_kind>]>,)* + } + + impl TypeArrayIndexes { + pub(crate) fn as_ref(&self) -> TypeArrayIndexesRef<'_> { + TypeArrayIndexesRef { + $($type_plural_field: &self.$type_plural_field,)* + } + } + #[must_use] + pub(crate) fn join(self, next: TypeArrayIndex) -> TypeArrayIndexes { + TypeArrayIndexes { + $($type_plural_field: Interned::from_iter(self.$type_plural_field.iter().copied().chain([next.$type_plural_field])),)* + } + } + } + + #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] + pub(crate) struct TypeArrayIndex { + $(pub(crate) $type_plural_field: StatePartArrayIndex<$type_kind>,)* + } + + impl TypeArrayIndex { + pub(crate) fn from_parts(index: StatePartIndex, len: usize, stride: TypeLen) -> Self { + Self { + $($type_plural_field: StatePartArrayIndex { + index, + len, + stride: stride.$type_plural_field, + },)* + } + } + pub(crate) fn len(self) -> usize { + let len = self.small_slots.len; + $(assert_eq!(self.$type_plural_field.len, len, "array length mismatch");)* + len + } + pub(crate) fn index(self) -> StatePartIndex { + let index = self.small_slots.index; + $(assert_eq!(self.$type_plural_field.index, index, "array index mismatch");)* + index + } + pub(crate) fn is_empty(self) -> bool { + self.len() == 0 + } + pub(crate) fn stride(self) -> TypeLen { + TypeLen { + $($type_plural_field: self.$type_plural_field.stride,)* + } + } + } + + #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)] + pub(crate) struct TypeArrayIndexesRef<'a> { + $(pub(crate) $type_plural_field: &'a [StatePartArrayIndex<$type_kind>],)* + } + + impl<'a> TypeArrayIndexesRef<'a> { + pub(crate) fn len(self) -> usize { + let len = self.small_slots.len(); + $(assert_eq!(self.$type_plural_field.len(), len, "indexes count mismatch");)* + len + } + pub(crate) fn is_empty(self) -> bool { + self.len() == 0 + } + pub(crate) fn iter(self) -> impl Iterator + 'a { + (0..self.len()).map(move |i| TypeArrayIndex { + $($type_plural_field: self.$type_plural_field[i],)* + }) + } + pub(crate) fn for_each_offset( + self, + mut f: impl FnMut(TypeIndex), + ) { + self.for_each_offset2(TypeIndex { + $($type_plural_field: StatePartIndex::new(0),)* + }, &mut f); + } + pub(crate) fn split_first(self) -> Option<(TypeArrayIndex, Self)> { + $(let $type_plural_field = self.$type_plural_field.split_first()?;)* + let next = TypeArrayIndex { + $($type_plural_field: *$type_plural_field.0,)* + }; + let rest = TypeArrayIndexesRef { + $($type_plural_field: $type_plural_field.1,)* + }; + Some((next, rest)) + } + pub(crate) fn for_each_offset2( + self, + base_offset: TypeIndex, + f: &mut (impl FnMut(TypeIndex) + ?Sized), + ) { + if let Some((next, rest)) = self.split_first() { + let stride = next.stride(); + for index in 0..next.len().try_into().expect("array too big") { + let mut offset = TypeIndex { + $($type_plural_field: StatePartIndex::new( + stride + .$type_plural_field + .value + .checked_mul(index) + .expect("array too big"), + ),)* + }; + $(offset.$type_plural_field.value = + base_offset + .$type_plural_field + .value + .checked_add(offset.$type_plural_field.value) + .expect("array too big");)* + rest.for_each_offset2(offset, f); + } + } else { + $(assert!(self.$type_plural_field.is_empty(), "indexes count mismatch");)* + f(base_offset); + } + } + } + + #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] + pub(crate) struct TypeArrayIndexed { + $(pub(crate) $type_plural_field: StatePartArrayIndexed<$type_kind>,)* + } + + impl TypeArrayIndexed { + pub(crate) fn from_parts(base: TypeIndex, indexes: TypeArrayIndexes) -> Self { + Self { + $($type_plural_field: StatePartArrayIndexed { + base: base.$type_plural_field, + indexes: indexes.$type_plural_field, + },)* + } + } + pub(crate) fn base(self) -> TypeIndex { + TypeIndex { + $($type_plural_field: self.$type_plural_field.base,)* + } + } + pub(crate) fn indexes(self) -> TypeArrayIndexes { + TypeArrayIndexes { + $($type_plural_field: self.$type_plural_field.indexes,)* + } + } + } + + impl From for TypeArrayIndexed { + fn from(value: TypeIndex) -> Self { + TypeArrayIndexed { + $($type_plural_field: value.$type_plural_field.into(),)* + } + } + } + }; +} + +get_state_part_kinds! { + make_type_array_indexes! { + type_plural_fields; + type_kinds; + } +} + +#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)] +struct CompiledExpr { + static_part: CompiledValue, + indexes: TypeArrayIndexes, +} + +impl From> for CompiledExpr { + fn from(static_part: CompiledValue) -> Self { + Self { + static_part, + indexes: TypeArrayIndexes::default(), + } + } +} + +impl CompiledExpr { + fn map_ty(self, f: impl FnMut(T) -> U) -> CompiledExpr { + let Self { + static_part, + indexes, + } = self; + CompiledExpr { + static_part: static_part.map_ty(f), + indexes, + } + } + fn add_target_without_indexes_to_set(self, inputs: &mut SlotSet) { + let Self { + static_part, + indexes, + } = self; + indexes.as_ref().for_each_offset(|offset| { + inputs.extend([static_part.range.offset(offset)]); + }); + } + fn add_target_and_indexes_to_set(self, inputs: &mut SlotSet) { + let Self { + static_part: _, + indexes, + } = self; + self.add_target_without_indexes_to_set(inputs); + inputs.extend(indexes.as_ref().iter()); + } +} + +impl CompiledExpr { + fn field_by_index(self, field_index: usize) -> CompiledExpr { + CompiledExpr { + static_part: self.static_part.field_by_index(field_index), + indexes: self.indexes, + } + } + fn field_by_name(self, name: Interned) -> CompiledExpr { + CompiledExpr { + static_part: self.static_part.field_by_name(name), + indexes: self.indexes, + } + } +} + +impl CompiledExpr { + fn element(self, index: usize) -> CompiledExpr { + CompiledExpr { + static_part: self.static_part.element(index), + indexes: self.indexes, + } + } + fn element_dyn( + self, + index_slot: StatePartIndex, + ) -> CompiledExpr { + let CompiledTypeLayoutBody::Array { element } = self.static_part.layout.body else { + unreachable!(); + }; + let stride = element.layout.len(); + let indexes = self.indexes.join(TypeArrayIndex::from_parts( + index_slot, + self.static_part.layout.ty.len(), + stride, + )); + CompiledExpr { + static_part: self.static_part.map(|layout, range| { + let CompiledTypeLayoutBody::Array { element } = layout.body else { + unreachable!(); + }; + (*element, range.index_array(stride, 0)) + }), + indexes, + } + } +} + +macro_rules! make_assignment_graph { + ( + type_plural_fields = [$($type_plural_field:ident,)*]; + type_singular_variants = [$($type_singular_variant:ident,)*]; + type_kinds = [$($type_kind:ident,)*]; + array_indexed_variants = [$($array_indexed_variant:ident,)*]; + input_variants = [$($input_variant:ident,)*]; + output_variants = [$($output_variant:ident,)*]; + ) => { + #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] + enum AssignmentOrSlotIndex { + AssignmentIndex(usize), + $($type_singular_variant(StatePartIndex<$type_kind>),)* + } + + #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] + enum AssignmentIO { + $($input_variant { + assignment_index: usize, + slot: StatePartIndex<$type_kind>, + },)* + $($output_variant { + assignment_index: usize, + slot: StatePartIndex<$type_kind>, + },)* + } + + #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] + enum AssignmentsEdge { + IO(AssignmentIO), + AssignmentImmediatePredecessor { + predecessor_assignment_index: usize, + assignment_index: usize, + }, + } + + #[derive(Debug)] + enum Assignments { + Accumulating { + assignments: Vec, + }, + Finalized { + assignments: Box<[Assignment]>, + slots_layout: TypeLayout, + slot_readers: SlotToAssignmentIndexFullMap, + slot_writers: SlotToAssignmentIndexFullMap, + assignment_immediate_predecessors: Box<[Box<[usize]>]>, + assignment_immediate_successors: Box<[Box<[usize]>]>, + }, + } + + impl Default for Assignments { + fn default() -> Self { + Self::Accumulating { + assignments: Vec::new(), + } + } + } + + impl Assignments { + fn finalize(&mut self, slots_layout: TypeLayout) { + let Self::Accumulating { assignments } = self else { + unreachable!("already finalized"); + }; + let assignments = mem::take(assignments).into_boxed_slice(); + let mut slot_readers = SlotToAssignmentIndexFullMap::new(slots_layout.len()); + let mut slot_writers = SlotToAssignmentIndexFullMap::new(slots_layout.len()); + let mut assignment_immediate_predecessors = vec![BTreeSet::new(); assignments.len()]; + let mut assignment_immediate_successors = vec![BTreeSet::new(); assignments.len()]; + for (assignment_index, assignment) in assignments.iter().enumerate() { + slot_readers + .keys_for_assignment(assignment_index) + .extend([&assignment.inputs]); + slot_readers + .keys_for_assignment(assignment_index) + .extend(&assignment.conditions); + $(for &slot in &assignment.outputs.$type_plural_field { + if let Some(&pred) = slot_writers[slot].last() { + assignment_immediate_predecessors[assignment_index].insert(pred); + assignment_immediate_successors[pred].insert(assignment_index); + } + slot_writers[slot].push(assignment_index); + })* + } + *self = Self::Finalized { + assignments, + slots_layout, + slot_readers, + slot_writers, + assignment_immediate_predecessors: assignment_immediate_predecessors + .into_iter() + .map(Box::from_iter) + .collect(), + assignment_immediate_successors: assignment_immediate_successors + .into_iter() + .map(Box::from_iter) + .collect(), + }; + } + fn push(&mut self, v: Assignment) { + let Self::Accumulating { assignments } = self else { + unreachable!("already finalized"); + }; + assignments.push(v); + } + fn assignments(&self) -> &[Assignment] { + let Self::Finalized { assignments, .. } = self else { + unreachable!("Assignments::finalize should have been called"); + }; + assignments + } + fn slots_layout(&self) -> TypeLayout { + let Self::Finalized { slots_layout, .. } = self else { + unreachable!("Assignments::finalize should have been called"); + }; + *slots_layout + } + fn slot_readers(&self) -> &SlotToAssignmentIndexFullMap { + let Self::Finalized { slot_readers, .. } = self else { + unreachable!("Assignments::finalize should have been called"); + }; + slot_readers + } + fn slot_writers(&self) -> &SlotToAssignmentIndexFullMap { + let Self::Finalized { slot_writers, .. } = self else { + unreachable!("Assignments::finalize should have been called"); + }; + slot_writers + } + fn assignment_immediate_predecessors(&self) -> &[Box<[usize]>] { + let Self::Finalized { + assignment_immediate_predecessors, + .. + } = self + else { + unreachable!("Assignments::finalize should have been called"); + }; + assignment_immediate_predecessors + } + fn assignment_immediate_successors(&self) -> &[Box<[usize]>] { + let Self::Finalized { + assignment_immediate_successors, + .. + } = self + else { + unreachable!("Assignments::finalize should have been called"); + }; + assignment_immediate_successors + } + fn elements(&self) -> AssignmentsElements<'_> { + let SlotToAssignmentIndexFullMap { + $($type_plural_field,)* + } = self.slot_readers(); + AssignmentsElements { + node_indexes: HashMap::with_capacity_and_hasher( + self.assignments().len() $(+ $type_plural_field.len())*, + Default::default(), + ), + nodes: self.node_references(), + edges: self.edge_references(), + } + } + } + + impl GraphBase for Assignments { + type EdgeId = AssignmentsEdge; + type NodeId = AssignmentOrSlotIndex; + } + + #[derive(Debug, Clone, Copy)] + enum AssignmentsNodeRef<'a> { + Assignment { + index: usize, + #[allow(dead_code, reason = "used in Debug impl")] + assignment: &'a Assignment, + }, + $($type_singular_variant( + StatePartIndex<$type_kind>, + #[allow(dead_code, reason = "used in Debug impl")] SlotDebugData, + ),)* + } + + impl<'a> NodeRef for AssignmentsNodeRef<'a> { + type NodeId = AssignmentOrSlotIndex; + type Weight = AssignmentsNodeRef<'a>; + + fn id(&self) -> Self::NodeId { + match *self { + AssignmentsNodeRef::Assignment { + index, + assignment: _, + } => AssignmentOrSlotIndex::AssignmentIndex(index), + $(AssignmentsNodeRef::$type_singular_variant(slot, _) => AssignmentOrSlotIndex::$type_singular_variant(slot),)* + } + } + + fn weight(&self) -> &Self::Weight { + self + } + } + + impl<'a> petgraph::visit::Data for &'a Assignments { + type NodeWeight = AssignmentsNodeRef<'a>; + type EdgeWeight = AssignmentsEdge; + } + + struct AssignmentsElements<'a> { + node_indexes: HashMap, + nodes: AssignmentsNodes<'a>, + edges: AssignmentsEdges<'a>, + } + + impl<'a> Iterator for AssignmentsElements<'a> { + type Item = petgraph::data::Element< + <&'a Assignments as petgraph::visit::Data>::NodeWeight, + <&'a Assignments as petgraph::visit::Data>::EdgeWeight, + >; + + fn next(&mut self) -> Option { + let Self { + node_indexes, + nodes, + edges, + } = self; + if let Some(node) = nodes.next() { + node_indexes.insert(node.id(), node_indexes.len()); + return Some(petgraph::data::Element::Node { weight: node }); + } + let edge = edges.next()?; + Some(petgraph::data::Element::Edge { + source: node_indexes[&edge.source()], + target: node_indexes[&edge.target()], + weight: *edge.weight(), + }) + } + } + + #[derive(Clone)] + struct AssignmentsNodeIdentifiers { + assignment_indexes: std::ops::Range, + $($type_plural_field: std::ops::Range,)* + } + + impl AssignmentsNodeIdentifiers { + fn internal_iter<'a>(&'a mut self) -> impl Iterator + 'a { + let Self { + assignment_indexes, + $($type_plural_field,)* + } = self; + assignment_indexes + .map(AssignmentOrSlotIndex::AssignmentIndex) + $(.chain($type_plural_field.map(|value| { + AssignmentOrSlotIndex::$type_singular_variant(StatePartIndex::new(value)) + })))* + } + } + + impl Iterator for AssignmentsNodeIdentifiers { + type Item = AssignmentOrSlotIndex; + fn next(&mut self) -> Option { + self.internal_iter().next() + } + + fn nth(&mut self, n: usize) -> Option { + self.internal_iter().nth(n) + } + } + + impl<'a> IntoNodeIdentifiers for &'a Assignments { + type NodeIdentifiers = AssignmentsNodeIdentifiers; + + fn node_identifiers(self) -> Self::NodeIdentifiers { + let TypeLen { + $($type_plural_field,)* + } = self.slot_readers().len(); + AssignmentsNodeIdentifiers { + assignment_indexes: 0..self.assignments().len(), + $($type_plural_field: 0..$type_plural_field.value,)* + } + } + } + + struct AssignmentsNodes<'a> { + assignments: &'a Assignments, + nodes: AssignmentsNodeIdentifiers, + } + + impl<'a> Iterator for AssignmentsNodes<'a> { + type Item = AssignmentsNodeRef<'a>; + + fn next(&mut self) -> Option { + self.nodes.next().map(|node| match node { + AssignmentOrSlotIndex::AssignmentIndex(index) => AssignmentsNodeRef::Assignment { + index, + assignment: &self.assignments.assignments()[index], + }, + $(AssignmentOrSlotIndex::$type_singular_variant(slot) => AssignmentsNodeRef::$type_singular_variant( + slot, + *self.assignments.slots_layout().$type_plural_field.debug_data(slot), + ),)* + }) + } + } + + impl<'a> IntoNodeReferences for &'a Assignments { + type NodeRef = AssignmentsNodeRef<'a>; + type NodeReferences = AssignmentsNodes<'a>; + + fn node_references(self) -> Self::NodeReferences { + AssignmentsNodes { + assignments: self, + nodes: self.node_identifiers(), + } + } + } + + #[derive(Default)] + struct AssignmentsNeighborsDirected<'a> { + assignment_indexes: std::slice::Iter<'a, usize>, + $($type_plural_field: std::collections::btree_set::Iter<'a, StatePartIndex<$type_kind>>,)* + } + + impl Iterator for AssignmentsNeighborsDirected<'_> { + type Item = AssignmentOrSlotIndex; + fn next(&mut self) -> Option { + let Self { + assignment_indexes, + $($type_plural_field,)* + } = self; + if let retval @ Some(_) = assignment_indexes + .next() + .copied() + .map(AssignmentOrSlotIndex::AssignmentIndex) + { + retval + } $(else if let retval @ Some(_) = $type_plural_field + .next() + .copied() + .map(AssignmentOrSlotIndex::$type_singular_variant) + { + retval + })* else { + None + } + } + } + + impl<'a> IntoNeighbors for &'a Assignments { + type Neighbors = AssignmentsNeighborsDirected<'a>; + + fn neighbors(self, n: Self::NodeId) -> Self::Neighbors { + self.neighbors_directed(n, petgraph::Direction::Outgoing) + } + } + + impl<'a> IntoNeighborsDirected for &'a Assignments { + type NeighborsDirected = AssignmentsNeighborsDirected<'a>; + + fn neighbors_directed( + self, + n: Self::NodeId, + d: petgraph::Direction, + ) -> Self::NeighborsDirected { + use petgraph::Direction::*; + let slot_map = match d { + Outgoing => self.slot_readers(), + Incoming => self.slot_writers(), + }; + match n { + AssignmentOrSlotIndex::AssignmentIndex(assignment_index) => { + let assignment = &self.assignments()[assignment_index]; + let ( + assignment_indexes, + SlotSet { + $($type_plural_field,)* + }, + ) = match d { + Outgoing => ( + &self.assignment_immediate_successors()[assignment_index], + &assignment.outputs, + ), + Incoming => ( + &self.assignment_immediate_predecessors()[assignment_index], + &assignment.inputs, + ), + }; + AssignmentsNeighborsDirected { + assignment_indexes: assignment_indexes.iter(), + $($type_plural_field: $type_plural_field.iter(),)* + } + } + $(AssignmentOrSlotIndex::$type_singular_variant(slot) => AssignmentsNeighborsDirected { + assignment_indexes: slot_map[slot].iter(), + ..Default::default() + },)* + } + } + } + + impl EdgeRef for AssignmentsEdge { + type NodeId = AssignmentOrSlotIndex; + type EdgeId = AssignmentsEdge; + type Weight = AssignmentsEdge; + + fn source(&self) -> Self::NodeId { + match *self { + $(AssignmentsEdge::IO(AssignmentIO::$input_variant { + assignment_index: _, + slot, + }) => AssignmentOrSlotIndex::$type_singular_variant(slot),)* + $(AssignmentsEdge::IO(AssignmentIO::$output_variant { + assignment_index, + slot: _, + }) => AssignmentOrSlotIndex::AssignmentIndex(assignment_index),)* + AssignmentsEdge::AssignmentImmediatePredecessor { + predecessor_assignment_index, + assignment_index: _, + } => AssignmentOrSlotIndex::AssignmentIndex(predecessor_assignment_index), + } + } + + fn target(&self) -> Self::NodeId { + match *self { + $(AssignmentsEdge::IO(AssignmentIO::$input_variant { + assignment_index, + slot: _, + }) => AssignmentOrSlotIndex::AssignmentIndex(assignment_index),)* + $(AssignmentsEdge::IO(AssignmentIO::$output_variant { + assignment_index: _, + slot, + }) => AssignmentOrSlotIndex::$type_singular_variant(slot),)* + AssignmentsEdge::AssignmentImmediatePredecessor { + predecessor_assignment_index: _, + assignment_index, + } => AssignmentOrSlotIndex::AssignmentIndex(assignment_index), + } + } + + fn weight(&self) -> &Self::Weight { + self + } + + fn id(&self) -> Self::EdgeId { + *self + } + } + + struct AssignmentsEdges<'a> { + assignments: &'a Assignments, + nodes: AssignmentsNodeIdentifiers, + outgoing_neighbors: Option<(AssignmentOrSlotIndex, AssignmentsNeighborsDirected<'a>)>, + } + + impl Iterator for AssignmentsEdges<'_> { + type Item = AssignmentsEdge; + + fn next(&mut self) -> Option { + loop { + if let Some((node, outgoing_neighbors)) = &mut self.outgoing_neighbors { + if let Some(outgoing_neighbor) = outgoing_neighbors.next() { + return Some(match (*node, outgoing_neighbor) { + ( + $(AssignmentOrSlotIndex::$type_singular_variant(_))|*, + $(AssignmentOrSlotIndex::$type_singular_variant(_))|*, + ) => unreachable!(), + ( + AssignmentOrSlotIndex::AssignmentIndex(predecessor_assignment_index), + AssignmentOrSlotIndex::AssignmentIndex(assignment_index), + ) => AssignmentsEdge::AssignmentImmediatePredecessor { + predecessor_assignment_index, + assignment_index, + }, + $(( + AssignmentOrSlotIndex::AssignmentIndex(assignment_index), + AssignmentOrSlotIndex::$type_singular_variant(slot), + ) => AssignmentsEdge::IO(AssignmentIO::$output_variant { + assignment_index, + slot, + }),)* + $(( + AssignmentOrSlotIndex::$type_singular_variant(slot), + AssignmentOrSlotIndex::AssignmentIndex(assignment_index), + ) => AssignmentsEdge::IO(AssignmentIO::$input_variant { + assignment_index, + slot, + }),)* + }); + } + } + let node = self.nodes.next()?; + self.outgoing_neighbors = Some(( + node, + self.assignments + .neighbors_directed(node, petgraph::Direction::Outgoing), + )); + } + } + } + + impl<'a> IntoEdgeReferences for &'a Assignments { + type EdgeRef = AssignmentsEdge; + type EdgeReferences = AssignmentsEdges<'a>; + + fn edge_references(self) -> Self::EdgeReferences { + AssignmentsEdges { + assignments: self, + nodes: self.node_identifiers(), + outgoing_neighbors: None, + } + } + } + + struct AssignmentsVisitMap { + assignments: Vec, + slots: DenseSlotSet, + } + + impl VisitMap for AssignmentsVisitMap { + fn visit(&mut self, n: AssignmentOrSlotIndex) -> bool { + match n { + AssignmentOrSlotIndex::AssignmentIndex(assignment_index) => { + !mem::replace(&mut self.assignments[assignment_index], true) + } + $(AssignmentOrSlotIndex::$type_singular_variant(slot) => self.slots.insert(slot),)* + } + } + + fn is_visited(&self, n: &AssignmentOrSlotIndex) -> bool { + match *n { + AssignmentOrSlotIndex::AssignmentIndex(assignment_index) => { + self.assignments[assignment_index] + } + $(AssignmentOrSlotIndex::$type_singular_variant(slot) => self.slots.contains(slot),)* + } + } + + fn unvisit(&mut self, n: AssignmentOrSlotIndex) -> bool { + match n { + AssignmentOrSlotIndex::AssignmentIndex(assignment_index) => { + mem::replace(&mut self.assignments[assignment_index], false) + } + $(AssignmentOrSlotIndex::$type_singular_variant(slot) => self.slots.remove(slot),)* + } + } + } + + impl Visitable for Assignments { + type Map = AssignmentsVisitMap; + + fn visit_map(self: &Self) -> Self::Map { + AssignmentsVisitMap { + assignments: vec![false; self.assignments().len()], + slots: DenseSlotSet::new(self.slot_readers().len()), + } + } + + fn reset_map(self: &Self, map: &mut Self::Map) { + let AssignmentsVisitMap { assignments, slots } = map; + assignments.clear(); + assignments.resize(self.assignments().len(), false); + if slots.len() != self.slot_readers().len() { + *slots = DenseSlotSet::new(self.slot_readers().len()); + } else { + slots.clear(); + } + } + } + + #[derive(Debug)] + struct Assignment { + inputs: SlotSet, + outputs: SlotSet, + conditions: Interned<[Cond]>, + insns: Vec, + source_location: SourceLocation, + } + + #[derive(Debug)] + struct SlotToAssignmentIndexFullMap { + $($type_plural_field: Box<[Vec]>,)* + } + + impl SlotToAssignmentIndexFullMap { + fn new(len: TypeLen) -> Self { + Self { + $($type_plural_field: vec![Vec::new(); len.$type_plural_field.value.try_into().expect("length too big")] + .into_boxed_slice(),)* + } + } + fn len(&self) -> TypeLen { + TypeLen { + $($type_plural_field: StatePartLen::new(self.$type_plural_field.len() as _),)* + } + } + fn keys_for_assignment( + &mut self, + assignment_index: usize, + ) -> SlotToAssignmentIndexFullMapKeysForAssignment<'_> { + SlotToAssignmentIndexFullMapKeysForAssignment { + map: self, + assignment_index, + } + } + fn for_each( + &self, + $(mut $type_plural_field: impl FnMut(StatePartIndex<$type_kind>, &[usize]),)* + ) { + $(self.$type_plural_field.iter().enumerate().for_each(|(k, v)| { + $type_plural_field(StatePartIndex::new(k as _), v) + });)* + } + } + + $(impl std::ops::Index> for SlotToAssignmentIndexFullMap { + type Output = Vec; + + fn index(&self, index: StatePartIndex<$type_kind>) -> &Self::Output { + &self.$type_plural_field[index.as_usize()] + } + } + + impl std::ops::IndexMut> for SlotToAssignmentIndexFullMap { + fn index_mut(&mut self, index: StatePartIndex<$type_kind>) -> &mut Self::Output { + &mut self.$type_plural_field[index.as_usize()] + } + })* + + struct SlotToAssignmentIndexFullMapKeysForAssignment<'a> { + map: &'a mut SlotToAssignmentIndexFullMap, + assignment_index: usize, + } + + $(impl<'a> Extend<&'a StatePartIndex<$type_kind>> + for SlotToAssignmentIndexFullMapKeysForAssignment<'_> { + fn extend>>(&mut self, iter: T) { + self.extend(iter.into_iter().copied()); + } + })* + + $(impl Extend> + for SlotToAssignmentIndexFullMapKeysForAssignment<'_> { + fn extend>>(&mut self, iter: T) { + iter.into_iter() + .for_each(|slot| self.map[slot].push(self.assignment_index)); + } + })* + + impl<'a> Extend<&'a SlotSet> for SlotToAssignmentIndexFullMapKeysForAssignment<'_> { + fn extend>(&mut self, iter: T) { + iter.into_iter().for_each( + |set| { + $(self.extend(&set.$type_plural_field);)* + }, + ); + } + } + + impl<'a> Extend<&'a Cond> for SlotToAssignmentIndexFullMapKeysForAssignment<'_> { + fn extend>(&mut self, iter: T) { + iter.into_iter().for_each(|cond| match cond.body { + CondBody::IfTrue { cond } | CondBody::IfFalse { cond } => { + let CompiledValue { + range, + layout: _, + write: _, + } = cond; + $(self.extend(range.$type_plural_field.iter());)* + } + CondBody::MatchArm { + discriminant, + variant_index: _, + } => self.extend([discriminant]), + }); + } + } + + impl Assignment { + fn new( + conditions: Interned<[Cond]>, + insns: Vec, + source_location: SourceLocation, + ) -> Self { + let mut inputs = SlotSet::default(); + let mut outputs = SlotSet::default(); + for insn in &insns { + let insn = match insn { + InsnOrLabel::Insn(insn) => insn, + InsnOrLabel::Label(_) => continue, + }; + for InsnField { ty, kind } in insn.fields() { + match (kind, ty) { + $((InsnFieldKind::Input, InsnFieldType::$type_singular_variant(&slot)) => { + inputs.extend([slot]); + })* + $(( + InsnFieldKind::Input, + InsnFieldType::$array_indexed_variant(&array_indexed), + ) => { + array_indexed.for_each_target(|slot| inputs.extend([slot])); + inputs.extend(array_indexed.indexes); + })* + $((InsnFieldKind::Output, InsnFieldType::$type_singular_variant(&slot)) => { + outputs.extend([slot]); + })* + $(( + InsnFieldKind::Output, + InsnFieldType::$array_indexed_variant(&array_indexed), + ) => { + array_indexed.for_each_target(|slot| { + outputs.extend([slot]); + }); + inputs.extend(array_indexed.indexes); + })* + ( + _, + InsnFieldType::Memory(_) + | InsnFieldType::SmallUInt(_) + | InsnFieldType::SmallSInt(_) + | InsnFieldType::InternedBigInt(_) + | InsnFieldType::U8(_) + | InsnFieldType::USize(_) + | InsnFieldType::Empty(_), + ) + | ( + InsnFieldKind::Immediate + | InsnFieldKind::Memory + | InsnFieldKind::BranchTarget, + _, + ) => {} + } + } + } + Self { + inputs, + outputs, + conditions, + insns, + source_location, + } + } + } + }; +} + +get_state_part_kinds! { + make_assignment_graph! { + type_plural_fields; + type_singular_variants; + type_kinds; + array_indexed_variants; + #[custom] input_variants = [small_slot = SmallInput, big_slot = BigInput, sim_only_slot = SimOnlyInput,]; + #[custom] output_variants = [small_slot = SmallOutput, big_slot = BigOutput, sim_only_slot = SimOnlyOutput,]; + } +} + +macro_rules! make_dense_slot_set { + ( + type_plural_fields = [$($type_plural_field:ident,)*]; + type_kinds = [$($type_kind:ident,)*]; + ) => { + #[derive(Clone, Debug, PartialEq, Eq, Hash)] + struct DenseSlotSet { + $($type_plural_field: Box<[bool]>,)* + } + + impl DenseSlotSet { + fn new(len: TypeLen) -> Self { + Self { + $($type_plural_field: vec![false; len.$type_plural_field.value.try_into().expect("length too big")] + .into_boxed_slice(),)* + } + } + fn len(&self) -> TypeLen { + TypeLen { + $($type_plural_field: StatePartLen::new(self.$type_plural_field.len() as _),)* + } + } + fn clear(&mut self) { + $(self.$type_plural_field.fill(false);)* + } + } + + trait DenseSlotSetMethods: Extend> { + fn contains(&self, k: StatePartIndex) -> bool; + fn remove(&mut self, k: StatePartIndex) -> bool { + self.take(k).is_some() + } + fn take(&mut self, k: StatePartIndex) -> Option>; + fn replace(&mut self, k: StatePartIndex) -> Option>; + fn insert(&mut self, k: StatePartIndex) -> bool { + self.replace(k).is_none() + } + } + + impl Extend> for DenseSlotSet + where + Self: DenseSlotSetMethods, + { + fn extend>>(&mut self, iter: T) { + iter.into_iter().for_each(|v| { + self.insert(v); + }); + } + } + + $(impl DenseSlotSetMethods<$type_kind> for DenseSlotSet { + fn contains(&self, k: StatePartIndex<$type_kind>) -> bool { + self.$type_plural_field[k.as_usize()] + } + + fn take( + &mut self, + k: StatePartIndex<$type_kind>, + ) -> Option> { + mem::replace(self.$type_plural_field.get_mut(k.as_usize())?, false).then_some(k) + } + + fn replace( + &mut self, + k: StatePartIndex<$type_kind>, + ) -> Option> { + mem::replace(&mut self.$type_plural_field[k.as_usize()], true).then_some(k) + } + })* + }; +} + +get_state_part_kinds! { + make_dense_slot_set! { + type_plural_fields; + type_kinds; + } +} + +macro_rules! make_slot_vec { + ( + type_plural_fields = [$($type_plural_field:ident,)*]; + type_kinds = [$($type_kind:ident,)*]; + ) => { + #[derive(Clone, Debug, Default, PartialEq, Eq, Hash)] + struct SlotVec { + $($type_plural_field: Vec>,)* + } + + impl SlotVec { + fn is_empty(&self) -> bool { + true $(&& self.$type_plural_field.is_empty())* + } + } + }; +} + +get_state_part_kinds! { + make_slot_vec! { + type_plural_fields; + type_kinds; + } +} + +macro_rules! make_slot_set { + ( + type_plural_fields = [$($type_plural_field:ident,)*]; + type_kinds = [$($type_kind:ident,)*]; + ) => { + #[derive(Clone, Debug, Default, PartialEq, Eq, Hash)] + struct SlotSet { + $($type_plural_field: BTreeSet>,)* + } + + impl SlotSet { + fn is_empty(&self) -> bool { + true $(&& self.$type_plural_field.is_empty())* + } + fn for_each( + &self, + $($type_plural_field: impl FnMut(StatePartIndex<$type_kind>),)* + ) { + $(self.$type_plural_field.iter().copied().for_each($type_plural_field);)* + } + fn all( + &self, + $($type_plural_field: impl FnMut(StatePartIndex<$type_kind>) -> bool,)* + ) -> bool { + true $(&& self.$type_plural_field.iter().copied().all($type_plural_field))* + } + } + + $(impl Extend> for SlotSet { + fn extend>>( + &mut self, + iter: T, + ) { + self.$type_plural_field.extend(iter); + } + })* + + $(impl Extend> for SlotSet { + fn extend>>(&mut self, iter: T) { + self.extend(iter.into_iter().flat_map(|v| v.iter())); + } + })* + + impl Extend for SlotSet { + fn extend>(&mut self, iter: T) { + iter.into_iter().for_each( + |range| { + $(self.extend(range.$type_plural_field.iter());)* + }, + ) + } + } + + impl Extend for SlotSet { + fn extend>(&mut self, iter: T) { + iter.into_iter().for_each( + |v| { + $(self.extend([v.$type_plural_field]);)* + }, + ) + } + } + + $(impl Extend> for SlotSet { + fn extend>>(&mut self, iter: T) { + self.extend(iter.into_iter().map(|v| v.index)); + } + })* + + impl Extend for SlotSet { + fn extend>(&mut self, iter: T) { + iter.into_iter().for_each(|cond_body| match cond_body { + CondBody::IfTrue { cond } | CondBody::IfFalse { cond } => { + self.extend([cond.range]); + } + CondBody::MatchArm { + discriminant, + variant_index: _, + } => self.extend([discriminant]), + }) + } + } + + impl Extend for SlotSet { + fn extend>(&mut self, iter: T) { + self.extend(iter.into_iter().map(|v| v.body)) + } + } + }; +} + +get_state_part_kinds! { + make_slot_set! { + type_plural_fields; + type_kinds; + } +} + +#[derive(Debug)] +struct RegisterReset { + is_async: bool, + init: CompiledValue, + rst: StatePartIndex, +} + +#[derive(Debug, Clone, Copy)] +struct ClockTrigger { + last_clk_was_low: StatePartIndex, + clk: StatePartIndex, + clk_triggered: StatePartIndex, + source_location: SourceLocation, +} + +#[derive(Debug)] +struct Register { + value: CompiledValue, + clk_triggered: StatePartIndex, + reset: Option, + source_location: SourceLocation, +} + +#[derive(Debug)] + +struct MemoryPort { + clk_triggered: StatePartIndex, + addr_delayed: Vec>, + en_delayed: Vec>, + #[allow(dead_code, reason = "used in Debug impl")] + data_layout: CompiledTypeLayout, + read_data_delayed: Vec, + write_data_delayed: Vec, + write_mask_delayed: Vec, + write_mode_delayed: Vec>, + write_insns: Vec, +} + +struct MemoryPortReadInsns<'a> { + addr: StatePartIndex, + en: StatePartIndex, + write_mode: Option>, + data: TypeIndexRange, + insns: &'a mut Vec, +} + +struct MemoryPortWriteInsns<'a> { + addr: StatePartIndex, + en: StatePartIndex, + write_mode: Option>, + data: TypeIndexRange, + mask: TypeIndexRange, + insns: &'a mut Vec, +} + +#[derive(Debug)] +struct Memory { + mem: Mem, + memory: StatePartIndex, + trace: TraceMem, + ports: Vec, +} + +#[derive(Copy, Clone)] +enum MakeTraceDeclTarget { + Expr(Expr), + Memory { + id: TraceMemoryId, + depth: usize, + stride: usize, + start: usize, + ty: CanonicalType, + }, +} + +impl MakeTraceDeclTarget { + fn flow(self) -> Flow { + match self { + MakeTraceDeclTarget::Expr(expr) => Expr::flow(expr), + MakeTraceDeclTarget::Memory { .. } => Flow::Duplex, + } + } + fn ty(self) -> CanonicalType { + match self { + MakeTraceDeclTarget::Expr(expr) => Expr::ty(expr), + MakeTraceDeclTarget::Memory { ty, .. } => ty, + } + } +} + +struct DebugOpaque(T); + +impl fmt::Debug for DebugOpaque { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("<...>") + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub(crate) struct CompiledExternModule { + pub(crate) module_io_targets: Interned<[Target]>, + pub(crate) module_io: Interned<[CompiledValue]>, + pub(crate) simulation: ExternModuleSimulation, +} + +#[derive(Debug)] +pub struct Compiler { + insns: Insns, + original_base_module: Interned>, + base_module: Interned>, + modules: HashMap, + extern_modules: Vec, + compiled_values: HashMap>, + compiled_exprs: HashMap, CompiledExpr>, + compiled_exprs_to_values: HashMap, CompiledValue>, + decl_conditions: HashMap>, + compiled_values_to_dyn_array_indexes: + HashMap, StatePartIndex>, + compiled_value_bool_dest_is_small_map: + HashMap, StatePartIndex>, + assignments: Assignments, + clock_triggers: Vec, + compiled_value_to_clock_trigger_map: HashMap, ClockTrigger>, + enum_discriminants: HashMap, StatePartIndex>, + registers: Vec, + traces: SimTraces>>, + memories: Vec, + dump_assignments_dot: Option>>, +} + +macro_rules! impl_compiler { + ( + type_plural_fields = [$($type_plural_field:ident,)*]; + type_singular_fields = [$($type_singular_field:ident,)*]; + type_singular_variants = [$($type_singular_variant:ident,)*]; + type_kinds = [$($type_kind:ident,)*]; + copy_insns = [$($copy_insn:ident,)*]; + read_indexed_insns = [$($read_indexed_insn:ident,)*]; + write_indexed_insns = [$($write_indexed_insn:ident,)*]; + ) => { + impl Compiler { + fn make_trace_scalar_helper( + &mut self, + instantiated_module: InstantiatedModule, + target: MakeTraceDeclTarget, + source_location: SourceLocation, + $($type_singular_field: impl FnOnce(StatePartIndex<$type_kind>) -> SimTraceKind,)* + ) -> TraceLocation { + match target { + MakeTraceDeclTarget::Expr(target) => { + let compiled_value = self.compile_expr(instantiated_module, target); + let compiled_value = self.compiled_expr_to_value(compiled_value, source_location); + TraceLocation::Scalar(self.new_sim_trace(match compiled_value.range.len().as_single() { + $(Some(TypeLenSingle::$type_singular_variant) => { + $type_singular_field(compiled_value.range.$type_plural_field.start) + })* + None => unreachable!(), + })) + } + MakeTraceDeclTarget::Memory { + id, + depth, + stride, + start, + ty, + } => TraceLocation::Memory(TraceMemoryLocation { + id, + depth, + stride, + start, + len: ty.bit_width(), + }), + } + } + fn make_trace_scalar( + &mut self, + instantiated_module: InstantiatedModule, + target: MakeTraceDeclTarget, + name: Interned, + source_location: SourceLocation, + ) -> TraceDecl { + let flow = target.flow(); + match target.ty() { + CanonicalType::UInt(ty) => TraceUInt { + location: self.make_trace_scalar_helper( + instantiated_module, + target, + source_location, + |index| SimTraceKind::SmallUInt { index, ty }, + |index| SimTraceKind::BigUInt { index, ty }, + |_| unreachable!(""), + ), + name, + ty, + flow, + } + .into(), + CanonicalType::SInt(ty) => TraceSInt { + location: self.make_trace_scalar_helper( + instantiated_module, + target, + source_location, + |index| SimTraceKind::SmallSInt { index, ty }, + |index| SimTraceKind::BigSInt { index, ty }, + |_| unreachable!(""), + ), + name, + ty, + flow, + } + .into(), + CanonicalType::Bool(_) => TraceBool { + location: self.make_trace_scalar_helper( + instantiated_module, + target, + source_location, + |index| SimTraceKind::SmallBool { index }, + |index| SimTraceKind::BigBool { index }, + |_| unreachable!(""), + ), + name, + flow, + } + .into(), + CanonicalType::Array(_) => unreachable!(), + CanonicalType::Enum(ty) => { + assert_eq!(ty.discriminant_bit_width(), ty.type_properties().bit_width); + let location = match target { + MakeTraceDeclTarget::Expr(target) => { + let compiled_value = self.compile_expr(instantiated_module, target); + let compiled_value = + self.compiled_expr_to_value(compiled_value, source_location); + let discriminant = self.compile_enum_discriminant( + compiled_value.map_ty(Enum::from_canonical), + source_location, + ); + TraceLocation::Scalar(self.new_sim_trace(SimTraceKind::EnumDiscriminant { + index: discriminant, + ty, + })) + } + MakeTraceDeclTarget::Memory { + id, + depth, + stride, + start, + ty: _, + } => TraceLocation::Memory(TraceMemoryLocation { + id, + depth, + stride, + start, + len: ty.type_properties().bit_width, + }), + }; + TraceFieldlessEnum { + location, + name, + ty, + flow, + } + .into() + } + CanonicalType::Bundle(_) | CanonicalType::PhantomConst(_) => unreachable!(), + CanonicalType::AsyncReset(_) => TraceAsyncReset { + location: self.make_trace_scalar_helper( + instantiated_module, + target, + source_location, + |index| SimTraceKind::SmallAsyncReset { index }, + |index| SimTraceKind::BigAsyncReset { index }, + |_| unreachable!(""), + ), + name, + flow, + } + .into(), + CanonicalType::SyncReset(_) => TraceSyncReset { + location: self.make_trace_scalar_helper( + instantiated_module, + target, + source_location, + |index| SimTraceKind::SmallSyncReset { index }, + |index| SimTraceKind::BigSyncReset { index }, + |_| unreachable!(""), + ), + name, + flow, + } + .into(), + CanonicalType::Reset(_) => unreachable!(), + CanonicalType::Clock(_) => TraceClock { + location: self.make_trace_scalar_helper( + instantiated_module, + target, + source_location, + |index| SimTraceKind::SmallClock { index }, + |index| SimTraceKind::BigClock { index }, + |_| unreachable!(""), + ), + name, + flow, + } + .into(), + CanonicalType::DynSimOnly(ty) => TraceSimOnly { + location: self.make_trace_scalar_helper( + instantiated_module, + target, + source_location, + |_| unreachable!(""), + |_| unreachable!(""), + |index| SimTraceKind::SimOnly { index, ty }, + ), + name, + ty, + flow, + } + .into(), + } + } + fn compiled_expr_to_value( + &mut self, + expr: CompiledExpr, + source_location: SourceLocation, + ) -> CompiledValue { + if let Some(&retval) = self.compiled_exprs_to_values.get(&expr) { + return retval; + } + assert!( + expr.static_part.layout.ty.is_passive(), + "invalid expression passed to compiled_expr_to_value -- type must be passive", + ); + let CompiledExpr { + static_part, + indexes, + } = expr; + let retval = if indexes.as_ref().is_empty() { + CompiledValue { + layout: static_part.layout, + range: static_part.range, + write: None, + } + } else { + let layout = static_part.layout.with_anonymized_debug_info(); + let retval = CompiledValue { + layout, + range: self.insns.allocate_variable(&layout.layout), + write: None, + }; + let TypeIndexRange { + $($type_plural_field,)* + } = retval.range; + self.add_assignment( + Interned::default(), + chain!( + $($type_plural_field + .iter() + .zip(static_part.range.$type_plural_field.iter()) + .map(|(dest, base)| Insn::$read_indexed_insn { + dest, + src: StatePartArrayIndexed { + base, + indexes: indexes.$type_plural_field, + }, + }),)* + ), + source_location, + ); + retval + }; + self.compiled_exprs_to_values.insert(expr, retval); + retval + } + fn compile_simple_connect( + &mut self, + conditions: Interned<[Cond]>, + lhs: CompiledExpr, + rhs: CompiledValue, + source_location: SourceLocation, + ) { + let CompiledExpr { + static_part: lhs_static_part, + indexes, + } = lhs; + let (lhs_layout, lhs_range) = lhs_static_part.write(); + assert!( + lhs_layout.ty.is_passive(), + "invalid expression passed to compile_simple_connect -- type must be passive", + ); + self.add_assignment( + conditions, + chain!( + $(lhs_range.$type_plural_field + .iter() + .zip(rhs.range.$type_plural_field.iter()) + .map(|(base, src)| { + if indexes.$type_plural_field.is_empty() { + Insn::$copy_insn { dest: base, src } + } else { + Insn::$write_indexed_insn { + dest: StatePartArrayIndexed { + base, + indexes: indexes.$type_plural_field, + }, + src, + } + } + }),)* + ), + source_location, + ); + } + fn process_assignments(&mut self) { + self.assignments + .finalize(self.insns.state_layout().ty.clone().into()); + if let Some(DebugOpaque(dump_assignments_dot)) = &self.dump_assignments_dot { + let graph = + petgraph::graph::DiGraph::<_, _, usize>::from_elements(self.assignments.elements()); + dump_assignments_dot(&petgraph::dot::Dot::new(&graph)); + } + let assignments_order: Vec<_> = match petgraph::algo::toposort(&self.assignments, None) { + Ok(nodes) => nodes + .into_iter() + .filter_map(|n| match n { + AssignmentOrSlotIndex::AssignmentIndex(v) => Some(v), + _ => None, + }) + .collect(), + Err(e) => match e.node_id() { + AssignmentOrSlotIndex::AssignmentIndex(assignment_index) => panic!( + "combinatorial logic cycle detected at: {}", + self.assignments.assignments()[assignment_index].source_location, + ), + $(AssignmentOrSlotIndex::$type_singular_variant(slot) => panic!( + "combinatorial logic cycle detected through: {}", + self.insns.state_layout().ty.$type_plural_field.debug_data[slot.as_usize()].name, + ),)* + }, + }; + struct CondStackEntry<'a> { + cond: &'a Cond, + end_label: Label, + } + let mut cond_stack = Vec::>::new(); + for assignment_index in assignments_order { + let Assignment { + inputs: _, + outputs: _, + conditions, + insns, + source_location, + } = &self.assignments.assignments()[assignment_index]; + let mut same_len = 0; + for (index, (entry, cond)) in cond_stack.iter().zip(conditions).enumerate() { + if entry.cond != cond { + break; + } + same_len = index + 1; + } + while cond_stack.len() > same_len { + let CondStackEntry { cond: _, end_label } = + cond_stack.pop().expect("just checked len"); + self.insns.define_label_at_next_insn(end_label); + } + for cond in &conditions[cond_stack.len()..] { + let end_label = self.insns.new_label(); + match cond.body { + CondBody::IfTrue { cond: cond_value } + | CondBody::IfFalse { cond: cond_value } => { + let (branch_if_zero, branch_if_non_zero) = match cond_value.range.len().as_single() { + Some(TypeLenSingle::SmallSlot) => ( + Insn::BranchIfSmallZero { + target: end_label.0, + value: cond_value.range.small_slots.start, + }, + Insn::BranchIfSmallNonZero { + target: end_label.0, + value: cond_value.range.small_slots.start, + }, + ), + Some(TypeLenSingle::BigSlot) => ( + Insn::BranchIfZero { + target: end_label.0, + value: cond_value.range.big_slots.start, + }, + Insn::BranchIfNonZero { + target: end_label.0, + value: cond_value.range.big_slots.start, + }, + ), + Some(TypeLenSingle::SimOnlySlot) | None => unreachable!(), + }; + self.insns.push( + if let CondBody::IfTrue { .. } = cond.body { + branch_if_zero + } else { + branch_if_non_zero + }, + cond.source_location, + ); + } + CondBody::MatchArm { + discriminant, + variant_index, + } => { + self.insns.push( + Insn::BranchIfSmallNeImmediate { + target: end_label.0, + lhs: discriminant, + rhs: variant_index as _, + }, + cond.source_location, + ); + } + } + cond_stack.push(CondStackEntry { cond, end_label }); + } + self.insns.extend(insns.iter().copied(), *source_location); + } + for CondStackEntry { cond: _, end_label } in cond_stack { + self.insns.define_label_at_next_insn(end_label); + } + } + } + }; +} + +get_state_part_kinds! { + impl_compiler! { + type_plural_fields; + type_singular_fields; + type_singular_variants; + type_kinds; + copy_insns; + read_indexed_insns; + write_indexed_insns; + } +} + +impl Compiler { + pub fn new(base_module: Interned>) -> Self { + let original_base_module = base_module; + let base_module = deduce_resets(base_module, true) + .unwrap_or_else(|e| panic!("failed to deduce reset types: {e}")); + Self { + insns: Insns::new(), + original_base_module, + base_module, + modules: HashMap::default(), + extern_modules: Vec::new(), + compiled_values: HashMap::default(), + compiled_exprs: HashMap::default(), + compiled_exprs_to_values: HashMap::default(), + decl_conditions: HashMap::default(), + compiled_values_to_dyn_array_indexes: HashMap::default(), + compiled_value_bool_dest_is_small_map: HashMap::default(), + assignments: Assignments::default(), + clock_triggers: Vec::new(), + compiled_value_to_clock_trigger_map: HashMap::default(), + enum_discriminants: HashMap::default(), + registers: Vec::new(), + traces: SimTraces(Vec::new()), + memories: Vec::new(), + dump_assignments_dot: None, + } + } + #[doc(hidden)] + /// This is explicitly unstable and may be changed/removed at any time + pub fn dump_assignments_dot(&mut self, callback: Box) { + self.dump_assignments_dot = Some(DebugOpaque(callback)); + } + fn new_sim_trace(&mut self, kind: SimTraceKind) -> TraceScalarId { + let id = TraceScalarId(self.traces.0.len()); + self.traces.0.push(SimTrace { + kind, + state: (), + last_state: (), + }); + id + } + fn make_trace_decl_child( + &mut self, + instantiated_module: InstantiatedModule, + target: MakeTraceDeclTarget, + name: Interned, + source_location: SourceLocation, + ) -> TraceDecl { + match target.ty() { + CanonicalType::Array(ty) => { + let elements = Interned::from_iter((0..ty.len()).map(|index| { + self.make_trace_decl_child( + instantiated_module, + match target { + MakeTraceDeclTarget::Expr(target) => MakeTraceDeclTarget::Expr( + Expr::::from_canonical(target)[index], + ), + MakeTraceDeclTarget::Memory { + id, + depth, + stride, + start, + ty: _, + } => MakeTraceDeclTarget::Memory { + id, + depth, + stride, + start: start + ty.element().bit_width() * index, + ty: ty.element(), + }, + }, + Intern::intern_owned(format!("[{index}]")), + source_location, + ) + })); + TraceArray { + name, + elements, + ty, + flow: target.flow(), + } + .into() + } + CanonicalType::Enum(ty) => { + if ty.variants().iter().all(|v| v.ty.is_none()) { + self.make_trace_scalar(instantiated_module, target, name, source_location) + } else { + let flow = target.flow(); + let location = match target { + MakeTraceDeclTarget::Expr(target) => { + let compiled_value = self.compile_expr(instantiated_module, target); + let compiled_value = + self.compiled_expr_to_value(compiled_value, source_location); + let discriminant = self.compile_enum_discriminant( + compiled_value.map_ty(Enum::from_canonical), + source_location, + ); + TraceLocation::Scalar(self.new_sim_trace( + SimTraceKind::EnumDiscriminant { + index: discriminant, + ty, + }, + )) + } + MakeTraceDeclTarget::Memory { + id, + depth, + stride, + start, + ty: _, + } => TraceLocation::Memory(TraceMemoryLocation { + id, + depth, + stride, + start, + len: ty.discriminant_bit_width(), + }), + }; + let discriminant = TraceEnumDiscriminant { + location, + name: "$tag".intern(), + ty, + flow, + }; + let non_empty_fields = + Interned::from_iter(ty.variants().into_iter().enumerate().flat_map( + |(variant_index, variant)| { + variant.ty.map(|variant_ty| { + self.make_trace_decl_child( + instantiated_module, + match target { + MakeTraceDeclTarget::Expr(target) => { + MakeTraceDeclTarget::Expr( + ops::VariantAccess::new_by_index( + Expr::::from_canonical(target), + variant_index, + ) + .to_expr(), + ) + } + MakeTraceDeclTarget::Memory { + id, + depth, + stride, + start, + ty: _, + } => MakeTraceDeclTarget::Memory { + id, + depth, + stride, + start: start + ty.discriminant_bit_width(), + ty: variant_ty, + }, + }, + variant.name, + source_location, + ) + }) + }, + )); + TraceEnumWithFields { + name, + discriminant, + non_empty_fields, + ty, + flow, + } + .into() + } + } + CanonicalType::Bundle(ty) => { + let fields = Interned::from_iter(ty.fields().iter().zip(ty.field_offsets()).map( + |(field, field_offset)| { + self.make_trace_decl_child( + instantiated_module, + match target { + MakeTraceDeclTarget::Expr(target) => { + MakeTraceDeclTarget::Expr(Expr::field( + Expr::::from_canonical(target), + &field.name, + )) + } + MakeTraceDeclTarget::Memory { + id, + depth, + stride, + start, + ty: _, + } => { + let Some(bit_width) = field_offset.only_bit_width() else { + todo!("memory containing sim-only values"); + }; + MakeTraceDeclTarget::Memory { + id, + depth, + stride, + start: start + bit_width, + ty: field.ty, + } + } + }, + field.name, + source_location, + ) + }, + )); + TraceBundle { + name, + fields, + ty, + flow: target.flow(), + } + .into() + } + CanonicalType::UInt(_) + | CanonicalType::SInt(_) + | CanonicalType::Bool(_) + | CanonicalType::AsyncReset(_) + | CanonicalType::SyncReset(_) + | CanonicalType::Reset(_) + | CanonicalType::Clock(_) + | CanonicalType::DynSimOnly(_) => { + self.make_trace_scalar(instantiated_module, target, name, source_location) + } + CanonicalType::PhantomConst(_) => TraceBundle { + name, + fields: Interned::default(), + ty: Bundle::new(Interned::default()), + flow: target.flow(), + } + .into(), + } + } + fn make_trace_decl( + &mut self, + instantiated_module: InstantiatedModule, + target_base: TargetBase, + ) -> TraceDecl { + let target = MakeTraceDeclTarget::Expr(target_base.to_expr()); + match target_base { + TargetBase::ModuleIO(module_io) => TraceModuleIO { + name: module_io.name(), + child: self + .make_trace_decl_child( + instantiated_module, + target, + module_io.name(), + module_io.source_location(), + ) + .intern(), + ty: module_io.ty(), + flow: module_io.flow(), + } + .into(), + TargetBase::MemPort(mem_port) => { + let name = Intern::intern_owned(mem_port.port_name().to_string()); + let TraceDecl::Scope(TraceScope::Bundle(bundle)) = self.make_trace_decl_child( + instantiated_module, + target, + name, + mem_port.source_location(), + ) else { + unreachable!() + }; + TraceMemPort { + name, + bundle, + ty: mem_port.ty(), + } + .into() + } + TargetBase::Reg(reg) => TraceReg { + name: reg.name(), + child: self + .make_trace_decl_child( + instantiated_module, + target, + reg.name(), + reg.source_location(), + ) + .intern(), + ty: reg.ty(), + } + .into(), + TargetBase::RegSync(reg) => TraceReg { + name: reg.name(), + child: self + .make_trace_decl_child( + instantiated_module, + target, + reg.name(), + reg.source_location(), + ) + .intern(), + ty: reg.ty(), + } + .into(), + TargetBase::RegAsync(reg) => TraceReg { + name: reg.name(), + child: self + .make_trace_decl_child( + instantiated_module, + target, + reg.name(), + reg.source_location(), + ) + .intern(), + ty: reg.ty(), + } + .into(), + TargetBase::Wire(wire) => TraceWire { + name: wire.name(), + child: self + .make_trace_decl_child( + instantiated_module, + target, + wire.name(), + wire.source_location(), + ) + .intern(), + ty: wire.ty(), + } + .into(), + TargetBase::Instance(instance) => { + let TraceDecl::Scope(TraceScope::Bundle(instance_io)) = self.make_trace_decl_child( + instantiated_module, + target, + instance.name(), + instance.source_location(), + ) else { + unreachable!() + }; + let compiled_module = &self.modules[&InstantiatedModule::Child { + parent: instantiated_module.intern(), + instance: instance.intern(), + }]; + TraceInstance { + name: instance.name(), + instance_io, + module: compiled_module.trace_decls, + ty: instance.ty(), + } + .into() + } + } + } + fn compile_value( + &mut self, + target: TargetInInstantiatedModule, + ) -> CompiledValue { + if let Some(&retval) = self.compiled_values.get(&target) { + return retval; + } + let retval = match target.target { + Target::Base(base) => { + let unprefixed_layout = CompiledTypeLayout::get(base.canonical_ty()); + let layout = unprefixed_layout.with_prefixed_debug_names(&format!( + "{:?}.{:?}", + target.instantiated_module, + base.target_name() + )); + let range = self.insns.allocate_variable(&layout.layout); + let write = match *base { + TargetBase::ModuleIO(_) + | TargetBase::MemPort(_) + | TargetBase::Wire(_) + | TargetBase::Instance(_) => None, + TargetBase::Reg(_) | TargetBase::RegSync(_) | TargetBase::RegAsync(_) => { + let write_layout = unprefixed_layout.with_prefixed_debug_names(&format!( + "{:?}.{:?}$next", + target.instantiated_module, + base.target_name() + )); + Some(( + write_layout, + self.insns.allocate_variable(&write_layout.layout), + )) + } + }; + CompiledValue { + range, + layout, + write, + } + } + Target::Child(target_child) => { + let parent = self.compile_value(TargetInInstantiatedModule { + instantiated_module: target.instantiated_module, + target: *target_child.parent(), + }); + match *target_child.path_element() { + TargetPathElement::BundleField(TargetPathBundleField { name }) => { + parent.map_ty(Bundle::from_canonical).field_by_name(name) + } + TargetPathElement::ArrayElement(TargetPathArrayElement { index }) => { + parent.map_ty(Array::from_canonical).element(index) + } + TargetPathElement::DynArrayElement(_) => unreachable!(), + } + } + }; + self.compiled_values.insert(target, retval); + retval + } + fn add_assignment>( + &mut self, + conditions: Interned<[Cond]>, + insns: impl IntoIterator, + source_location: SourceLocation, + ) { + let insns = Vec::from_iter(insns.into_iter().map(Into::into)); + self.assignments + .push(Assignment::new(conditions, insns, source_location)); + } + fn simple_big_expr_input( + &mut self, + instantiated_module: InstantiatedModule, + input: Expr, + ) -> StatePartIndex { + let input = self.compile_expr(instantiated_module, input); + let input = + self.compiled_expr_to_value(input, instantiated_module.leaf_module().source_location()); + assert_eq!(input.range.len(), TypeLen::big_slot()); + input.range.big_slots.start + } + fn compile_expr_helper( + &mut self, + instantiated_module: InstantiatedModule, + dest_ty: CanonicalType, + make_insns: impl FnOnce(&mut Self, TypeIndexRange) -> Vec, + ) -> CompiledValue { + let layout = CompiledTypeLayout::get(dest_ty); + let range = self.insns.allocate_variable(&layout.layout); + let retval = CompiledValue { + layout, + range, + write: None, + }; + let insns = make_insns(self, range); + self.add_assignment( + Interned::default(), + insns, + instantiated_module.leaf_module().source_location(), + ); + retval + } + fn simple_nary_big_expr_helper( + &mut self, + instantiated_module: InstantiatedModule, + dest_ty: CanonicalType, + make_insns: impl FnOnce(StatePartIndex) -> Vec, + ) -> CompiledValue { + self.compile_expr_helper(instantiated_module, dest_ty, |_, dest| { + assert_eq!(dest.len(), TypeLen::big_slot()); + make_insns(dest.big_slots.start) + }) + } + fn simple_nary_big_expr( + &mut self, + instantiated_module: InstantiatedModule, + dest_ty: CanonicalType, + inputs: [Expr; N], + make_insns: impl FnOnce( + StatePartIndex, + [StatePartIndex; N], + ) -> Vec, + ) -> CompiledValue { + let inputs = inputs.map(|input| self.simple_big_expr_input(instantiated_module, input)); + self.simple_nary_big_expr_helper(instantiated_module, dest_ty, |dest| { + make_insns(dest, inputs) + }) + } + fn compiled_value_to_dyn_array_index( + &mut self, + compiled_value: CompiledValue, + source_location: SourceLocation, + ) -> StatePartIndex { + if let Some(&retval) = self + .compiled_values_to_dyn_array_indexes + .get(&compiled_value) + { + return retval; + } + let mut ty = compiled_value.layout.ty; + ty.width = ty.width.min(SmallUInt::BITS as usize); + let retval = match compiled_value.range.len().as_single() { + Some(TypeLenSingle::SmallSlot) => compiled_value.range.small_slots.start, + Some(TypeLenSingle::BigSlot) => { + let debug_data = SlotDebugData { + name: Interned::default(), + ty: ty.canonical(), + }; + let dest = self + .insns + .allocate_variable(&TypeLayout { + small_slots: StatePartLayout::scalar(debug_data, ()), + ..TypeLayout::empty() + }) + .small_slots + .start; + self.add_assignment( + Interned::default(), + vec![Insn::CastBigToArrayIndex { + dest, + src: compiled_value.range.big_slots.start, + }], + source_location, + ); + dest + } + Some(TypeLenSingle::SimOnlySlot) | None => unreachable!(), + }; + self.compiled_values_to_dyn_array_indexes + .insert(compiled_value, retval); + retval + } + fn compiled_value_bool_dest_is_small( + &mut self, + compiled_value: CompiledValue, + source_location: SourceLocation, + ) -> StatePartIndex { + if let Some(&retval) = self + .compiled_value_bool_dest_is_small_map + .get(&compiled_value) + { + return retval; + } + let retval = match compiled_value.range.len().as_single() { + Some(TypeLenSingle::SmallSlot) => compiled_value.range.small_slots.start, + Some(TypeLenSingle::BigSlot) => { + let debug_data = SlotDebugData { + name: Interned::default(), + ty: Bool.canonical(), + }; + let dest = self + .insns + .allocate_variable(&TypeLayout { + small_slots: StatePartLayout::scalar(debug_data, ()), + ..TypeLayout::empty() + }) + .small_slots + .start; + self.add_assignment( + Interned::default(), + vec![Insn::IsNonZeroDestIsSmall { + dest, + src: compiled_value.range.big_slots.start, + }], + source_location, + ); + dest + } + Some(TypeLenSingle::SimOnlySlot) | None => unreachable!(), + }; + self.compiled_value_bool_dest_is_small_map + .insert(compiled_value, retval); + retval + } + fn compile_cast_scalar_to_bits( + &mut self, + instantiated_module: InstantiatedModule, + arg: Expr, + cast_fn: impl FnOnce(Expr) -> Expr, + ) -> CompiledValue { + let arg = Expr::::from_canonical(arg); + let retval = cast_fn(arg); + let retval = self.compile_expr(instantiated_module, Expr::canonical(retval)); + let retval = self + .compiled_expr_to_value(retval, instantiated_module.leaf_module().source_location()); + retval.map_ty(UInt::from_canonical) + } + fn compile_cast_aggregate_to_bits( + &mut self, + instantiated_module: InstantiatedModule, + parts: impl IntoIterator>, + ) -> CompiledValue { + let retval = parts + .into_iter() + .map(|part| part.cast_to_bits()) + .reduce(|accumulator, part| accumulator | (part << Expr::ty(accumulator).width)) + .unwrap_or_else(|| UInt[0].zero().to_expr()); + let retval = self.compile_expr(instantiated_module, Expr::canonical(retval)); + let retval = self + .compiled_expr_to_value(retval, instantiated_module.leaf_module().source_location()); + retval.map_ty(UInt::from_canonical) + } + fn compile_cast_to_bits( + &mut self, + instantiated_module: InstantiatedModule, + expr: ops::CastToBits, + ) -> CompiledValue { + match Expr::ty(expr.arg()) { + CanonicalType::UInt(_) => { + self.compile_cast_scalar_to_bits(instantiated_module, expr.arg(), |arg| arg) + } + CanonicalType::SInt(ty) => self.compile_cast_scalar_to_bits( + instantiated_module, + expr.arg(), + |arg: Expr| arg.cast_to(ty.as_same_width_uint()), + ), + CanonicalType::Bool(_) + | CanonicalType::AsyncReset(_) + | CanonicalType::SyncReset(_) + | CanonicalType::Reset(_) + | CanonicalType::Clock(_) => self.compile_cast_scalar_to_bits( + instantiated_module, + expr.arg(), + |arg: Expr| arg.cast_to(UInt[1]), + ), + CanonicalType::Array(ty) => self.compile_cast_aggregate_to_bits( + instantiated_module, + (0..ty.len()).map(|index| Expr::::from_canonical(expr.arg())[index]), + ), + CanonicalType::Enum(ty) => self + .simple_nary_big_expr( + instantiated_module, + UInt[ty.type_properties().bit_width].canonical(), + [Expr::canonical(expr.arg())], + |dest, [src]| vec![Insn::Copy { dest, src }], + ) + .map_ty(UInt::from_canonical), + CanonicalType::Bundle(ty) => self.compile_cast_aggregate_to_bits( + instantiated_module, + ty.fields().iter().map(|field| { + Expr::field(Expr::::from_canonical(expr.arg()), &field.name) + }), + ), + CanonicalType::PhantomConst(_) | CanonicalType::DynSimOnly(_) => { + self.compile_cast_aggregate_to_bits(instantiated_module, []) + } + } + } + fn compile_cast_bits_to_or_uninit( + &mut self, + instantiated_module: InstantiatedModule, + arg: Option>, + ty: CanonicalType, + ) -> CompiledValue { + let retval = match ty { + CanonicalType::UInt(ty) => Expr::canonical(arg.unwrap_or_else(|| ty.zero().to_expr())), + CanonicalType::SInt(ty) => { + Expr::canonical(arg.map_or_else(|| ty.zero().to_expr(), |arg| arg.cast_to(ty))) + } + CanonicalType::Bool(ty) => { + Expr::canonical(arg.map_or_else(|| false.to_expr(), |arg| arg.cast_to(ty))) + } + CanonicalType::Array(ty) => { + let stride = ty.element().bit_width(); + Expr::::canonical(match arg { + Some(arg) => ops::ArrayLiteral::new( + ty.element(), + Interned::from_iter((0..ty.len()).map(|index| { + let start = stride * index; + let end = start + stride; + arg[start..end].cast_bits_to(ty.element()) + })), + ) + .to_expr(), + None => repeat(ty.element().uninit(), ty.len()), + }) + } + ty @ CanonicalType::Enum(_) => { + return self.simple_nary_big_expr( + instantiated_module, + ty, + [Expr::canonical(arg.unwrap_or_else(|| { + UInt::new_dyn(ty.bit_width()).zero().to_expr() + }))], + |dest, [src]| vec![Insn::Copy { dest, src }], + ); + } + CanonicalType::Bundle(ty) => Expr::canonical( + ops::BundleLiteral::new( + ty, + Interned::from_iter(ty.field_offsets().iter().zip(&ty.fields()).map( + |(&offset, &field)| { + let OpaqueSimValueSize { + bit_width: offset, + sim_only_values_len: 0, + } = offset + else { + unreachable!(); + }; + let end = offset + field.ty.bit_width(); + match arg { + Some(arg) => arg[offset..end].cast_bits_to(field.ty), + None => field.ty.uninit(), + } + }, + )), + ) + .to_expr(), + ), + CanonicalType::AsyncReset(ty) => Expr::canonical( + arg.unwrap_or_else(|| UInt::new_dyn(1).zero().to_expr()) + .cast_to(ty), + ), + CanonicalType::SyncReset(ty) => Expr::canonical( + arg.unwrap_or_else(|| UInt::new_dyn(1).zero().to_expr()) + .cast_to(ty), + ), + CanonicalType::Reset(_) => unreachable!(), + CanonicalType::Clock(ty) => Expr::canonical( + arg.unwrap_or_else(|| UInt::new_dyn(1).zero().to_expr()) + .cast_to(ty), + ), + CanonicalType::PhantomConst(ty) => { + if let Some(arg) = arg { + let _ = self.compile_expr(instantiated_module, Expr::canonical(arg)); + } + Expr::canonical(ty.to_expr()) + } + CanonicalType::DynSimOnly(ty) => { + assert!(arg.is_none(), "can't cast bits to SimOnly"); + return self.compile_expr_helper(instantiated_module, ty.canonical(), |_, dest| { + assert_eq!(dest.len(), TypeLen::sim_only_slot()); + vec![] + }); + } + }; + let retval = self.compile_expr(instantiated_module, Expr::canonical(retval)); + self.compiled_expr_to_value(retval, instantiated_module.leaf_module().source_location()) + } + fn compile_aggregate_literal( + &mut self, + instantiated_module: InstantiatedModule, + dest_ty: CanonicalType, + inputs: Interned<[Expr]>, + ) -> CompiledValue { + self.compile_expr_helper(instantiated_module, dest_ty, |this, dest| { + let mut insns = Vec::new(); + let mut offset = TypeIndex::ZERO; + for input in inputs { + let input = this.compile_expr(instantiated_module, input); + let input = this + .compiled_expr_to_value( + input, + instantiated_module.leaf_module().source_location(), + ) + .range; + insns.extend( + input.insns_for_copy_to(dest.slice(TypeIndexRange::new(offset, input.len()))), + ); + offset = offset.offset(input.len().as_index()); + } + insns + }) + } + fn compile_expr( + &mut self, + instantiated_module: InstantiatedModule, + expr: Expr, + ) -> CompiledExpr { + if let Some(&retval) = self.compiled_exprs.get(&expr) { + return retval; + } + let mut cast_bit = |arg: Expr| { + let src_signed = match Expr::ty(arg) { + CanonicalType::UInt(_) => false, + CanonicalType::SInt(_) => true, + CanonicalType::Bool(_) => false, + CanonicalType::Array(_) => unreachable!(), + CanonicalType::Enum(_) => unreachable!(), + CanonicalType::Bundle(_) => unreachable!(), + CanonicalType::AsyncReset(_) => false, + CanonicalType::SyncReset(_) => false, + CanonicalType::Reset(_) => false, + CanonicalType::Clock(_) => false, + CanonicalType::PhantomConst(_) => unreachable!(), + CanonicalType::DynSimOnly(_) => unreachable!(), + }; + let dest_signed = match Expr::ty(expr) { + CanonicalType::UInt(_) => false, + CanonicalType::SInt(_) => true, + CanonicalType::Bool(_) => false, + CanonicalType::Array(_) => unreachable!(), + CanonicalType::Enum(_) => unreachable!(), + CanonicalType::Bundle(_) => unreachable!(), + CanonicalType::AsyncReset(_) => false, + CanonicalType::SyncReset(_) => false, + CanonicalType::Reset(_) => false, + CanonicalType::Clock(_) => false, + CanonicalType::PhantomConst(_) => unreachable!(), + CanonicalType::DynSimOnly(_) => unreachable!(), + }; + self.simple_nary_big_expr(instantiated_module, Expr::ty(expr), [arg], |dest, [src]| { + match (src_signed, dest_signed) { + (false, false) | (true, true) => { + vec![Insn::Copy { dest, src }] + } + (false, true) => vec![Insn::CastToSInt { + dest, + src, + dest_width: 1, + }], + (true, false) => vec![Insn::CastToUInt { + dest, + src, + dest_width: 1, + }], + } + }) + .into() + }; + let retval: CompiledExpr<_> = match *Expr::expr_enum(expr) { + ExprEnum::UIntLiteral(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [], + |dest, []| { + vec![Insn::Const { + dest, + value: expr.to_bigint().intern_sized(), + }] + }, + ) + .into(), + ExprEnum::SIntLiteral(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [], + |dest, []| { + vec![Insn::Const { + dest, + value: expr.to_bigint().intern_sized(), + }] + }, + ) + .into(), + ExprEnum::BoolLiteral(expr) => self + .simple_nary_big_expr(instantiated_module, Bool.canonical(), [], |dest, []| { + vec![Insn::Const { + dest, + value: BigInt::from(expr).intern_sized(), + }] + }) + .into(), + ExprEnum::PhantomConst(_) => self + .compile_aggregate_literal(instantiated_module, Expr::ty(expr), Interned::default()) + .into(), + ExprEnum::BundleLiteral(literal) => self + .compile_aggregate_literal( + instantiated_module, + Expr::ty(expr), + literal.field_values(), + ) + .into(), + ExprEnum::ArrayLiteral(literal) => self + .compile_aggregate_literal( + instantiated_module, + Expr::ty(expr), + literal.element_values(), + ) + .into(), + ExprEnum::EnumLiteral(expr) => { + let enum_bits_ty = UInt[expr.ty().type_properties().bit_width]; + let enum_bits = if let Some(variant_value) = expr.variant_value() { + ( + UInt[expr.ty().discriminant_bit_width()] + .from_int_wrapping(expr.variant_index()), + variant_value, + ) + .cast_to_bits() + .cast_to(enum_bits_ty) + } else { + enum_bits_ty + .from_int_wrapping(expr.variant_index()) + .to_expr() + }; + self.compile_expr( + instantiated_module, + enum_bits.cast_bits_to(expr.ty().canonical()), + ) + } + ExprEnum::Uninit(expr) => self + .compile_cast_bits_to_or_uninit(instantiated_module, None, expr.ty()) + .into(), + ExprEnum::NotU(expr) => self + .simple_nary_big_expr( + instantiated_module, + Expr::ty(expr.arg()).canonical(), + [Expr::canonical(expr.arg())], + |dest, [src]| { + vec![Insn::NotU { + dest, + src, + width: Expr::ty(expr.arg()).width(), + }] + }, + ) + .into(), + ExprEnum::NotS(expr) => self + .simple_nary_big_expr( + instantiated_module, + Expr::ty(expr.arg()).canonical(), + [Expr::canonical(expr.arg())], + |dest, [src]| vec![Insn::NotS { dest, src }], + ) + .into(), + ExprEnum::NotB(expr) => self + .simple_nary_big_expr( + instantiated_module, + Expr::ty(expr.arg()).canonical(), + [Expr::canonical(expr.arg())], + |dest, [src]| { + vec![Insn::NotU { + dest, + src, + width: 1, + }] + }, + ) + .into(), + ExprEnum::Neg(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.arg())], + |dest, [src]| vec![Insn::Neg { dest, src }], + ) + .into(), + ExprEnum::BitAndU(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::And { dest, lhs, rhs }], + ) + .into(), + ExprEnum::BitAndS(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::And { dest, lhs, rhs }], + ) + .into(), + ExprEnum::BitAndB(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::And { dest, lhs, rhs }], + ) + .into(), + ExprEnum::BitOrU(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::Or { dest, lhs, rhs }], + ) + .into(), + ExprEnum::BitOrS(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::Or { dest, lhs, rhs }], + ) + .into(), + ExprEnum::BitOrB(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::Or { dest, lhs, rhs }], + ) + .into(), + ExprEnum::BitXorU(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::Xor { dest, lhs, rhs }], + ) + .into(), + ExprEnum::BitXorS(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::Xor { dest, lhs, rhs }], + ) + .into(), + ExprEnum::BitXorB(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::Xor { dest, lhs, rhs }], + ) + .into(), + ExprEnum::AddU(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::Add { dest, lhs, rhs }], + ) + .into(), + ExprEnum::AddS(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::Add { dest, lhs, rhs }], + ) + .into(), + ExprEnum::SubU(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| { + vec![Insn::SubU { + dest, + lhs, + rhs, + dest_width: expr.ty().width(), + }] + }, + ) + .into(), + ExprEnum::SubS(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::SubS { dest, lhs, rhs }], + ) + .into(), + ExprEnum::MulU(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::Mul { dest, lhs, rhs }], + ) + .into(), + ExprEnum::MulS(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::Mul { dest, lhs, rhs }], + ) + .into(), + ExprEnum::DivU(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::Div { dest, lhs, rhs }], + ) + .into(), + ExprEnum::DivS(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::Div { dest, lhs, rhs }], + ) + .into(), + ExprEnum::RemU(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::Rem { dest, lhs, rhs }], + ) + .into(), + ExprEnum::RemS(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::Rem { dest, lhs, rhs }], + ) + .into(), + ExprEnum::DynShlU(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::DynShl { dest, lhs, rhs }], + ) + .into(), + ExprEnum::DynShlS(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::DynShl { dest, lhs, rhs }], + ) + .into(), + ExprEnum::DynShrU(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::DynShr { dest, lhs, rhs }], + ) + .into(), + ExprEnum::DynShrS(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::DynShr { dest, lhs, rhs }], + ) + .into(), + ExprEnum::FixedShlU(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs())], + |dest, [lhs]| { + vec![Insn::Shl { + dest, + lhs, + rhs: expr.rhs(), + }] + }, + ) + .into(), + ExprEnum::FixedShlS(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs())], + |dest, [lhs]| { + vec![Insn::Shl { + dest, + lhs, + rhs: expr.rhs(), + }] + }, + ) + .into(), + ExprEnum::FixedShrU(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs())], + |dest, [lhs]| { + vec![Insn::Shr { + dest, + lhs, + rhs: expr.rhs(), + }] + }, + ) + .into(), + ExprEnum::FixedShrS(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.lhs())], + |dest, [lhs]| { + vec![Insn::Shr { + dest, + lhs, + rhs: expr.rhs(), + }] + }, + ) + .into(), + ExprEnum::CmpLtB(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::CmpLt { dest, lhs, rhs }], + ) + .into(), + ExprEnum::CmpLeB(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::CmpLe { dest, lhs, rhs }], + ) + .into(), + ExprEnum::CmpGtB(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + // swap both comparison direction and lhs/rhs + [Expr::canonical(expr.rhs()), Expr::canonical(expr.lhs())], + |dest, [lhs, rhs]| vec![Insn::CmpLt { dest, lhs, rhs }], + ) + .into(), + ExprEnum::CmpGeB(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + // swap both comparison direction and lhs/rhs + [Expr::canonical(expr.rhs()), Expr::canonical(expr.lhs())], + |dest, [lhs, rhs]| vec![Insn::CmpLe { dest, lhs, rhs }], + ) + .into(), + ExprEnum::CmpEqB(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::CmpEq { dest, lhs, rhs }], + ) + .into(), + ExprEnum::CmpNeB(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::CmpNe { dest, lhs, rhs }], + ) + .into(), + ExprEnum::CmpLtU(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::CmpLt { dest, lhs, rhs }], + ) + .into(), + ExprEnum::CmpLeU(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::CmpLe { dest, lhs, rhs }], + ) + .into(), + ExprEnum::CmpGtU(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + // swap both comparison direction and lhs/rhs + [Expr::canonical(expr.rhs()), Expr::canonical(expr.lhs())], + |dest, [lhs, rhs]| vec![Insn::CmpLt { dest, lhs, rhs }], + ) + .into(), + ExprEnum::CmpGeU(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + // swap both comparison direction and lhs/rhs + [Expr::canonical(expr.rhs()), Expr::canonical(expr.lhs())], + |dest, [lhs, rhs]| vec![Insn::CmpLe { dest, lhs, rhs }], + ) + .into(), + ExprEnum::CmpEqU(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::CmpEq { dest, lhs, rhs }], + ) + .into(), + ExprEnum::CmpNeU(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::CmpNe { dest, lhs, rhs }], + ) + .into(), + ExprEnum::CmpLtS(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::CmpLt { dest, lhs, rhs }], + ) + .into(), + ExprEnum::CmpLeS(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::CmpLe { dest, lhs, rhs }], + ) + .into(), + ExprEnum::CmpGtS(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + // swap both comparison direction and lhs/rhs + [Expr::canonical(expr.rhs()), Expr::canonical(expr.lhs())], + |dest, [lhs, rhs]| vec![Insn::CmpLt { dest, lhs, rhs }], + ) + .into(), + ExprEnum::CmpGeS(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + // swap both comparison direction and lhs/rhs + [Expr::canonical(expr.rhs()), Expr::canonical(expr.lhs())], + |dest, [lhs, rhs]| vec![Insn::CmpLe { dest, lhs, rhs }], + ) + .into(), + ExprEnum::CmpEqS(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::CmpEq { dest, lhs, rhs }], + ) + .into(), + ExprEnum::CmpNeS(expr) => self + .simple_nary_big_expr( + instantiated_module, + Bool.canonical(), + [Expr::canonical(expr.lhs()), Expr::canonical(expr.rhs())], + |dest, [lhs, rhs]| vec![Insn::CmpNe { dest, lhs, rhs }], + ) + .into(), + ExprEnum::CastUIntToUInt(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.arg())], + |dest, [src]| { + vec![Insn::CastToUInt { + dest, + src, + dest_width: expr.ty().width(), + }] + }, + ) + .into(), + ExprEnum::CastUIntToSInt(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.arg())], + |dest, [src]| { + vec![Insn::CastToSInt { + dest, + src, + dest_width: expr.ty().width(), + }] + }, + ) + .into(), + ExprEnum::CastSIntToUInt(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.arg())], + |dest, [src]| { + vec![Insn::CastToUInt { + dest, + src, + dest_width: expr.ty().width(), + }] + }, + ) + .into(), + ExprEnum::CastSIntToSInt(expr) => self + .simple_nary_big_expr( + instantiated_module, + expr.ty().canonical(), + [Expr::canonical(expr.arg())], + |dest, [src]| { + vec![Insn::CastToSInt { + dest, + src, + dest_width: expr.ty().width(), + }] + }, + ) + .into(), + ExprEnum::CastBoolToUInt(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastBoolToSInt(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastUIntToBool(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastSIntToBool(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastBoolToSyncReset(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastUIntToSyncReset(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastSIntToSyncReset(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastBoolToAsyncReset(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastUIntToAsyncReset(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastSIntToAsyncReset(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastSyncResetToBool(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastSyncResetToUInt(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastSyncResetToSInt(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastSyncResetToReset(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastAsyncResetToBool(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastAsyncResetToUInt(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastAsyncResetToSInt(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastAsyncResetToReset(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastResetToBool(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastResetToUInt(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastResetToSInt(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastBoolToClock(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastUIntToClock(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastSIntToClock(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastClockToBool(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastClockToUInt(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::CastClockToSInt(expr) => cast_bit(Expr::canonical(expr.arg())), + ExprEnum::FieldAccess(expr) => self + .compile_expr(instantiated_module, Expr::canonical(expr.base())) + .map_ty(Bundle::from_canonical) + .field_by_index(expr.field_index()), + ExprEnum::VariantAccess(variant_access) => { + let start = Expr::ty(variant_access.base()).discriminant_bit_width(); + let len = Expr::ty(expr).bit_width(); + self.compile_expr( + instantiated_module, + variant_access.base().cast_to_bits()[start..start + len] + .cast_bits_to(Expr::ty(expr)), + ) + } + ExprEnum::ArrayIndex(expr) => self + .compile_expr(instantiated_module, Expr::canonical(expr.base())) + .map_ty(Array::from_canonical) + .element(expr.element_index()), + ExprEnum::DynArrayIndex(expr) => { + let element_index = + self.compile_expr(instantiated_module, Expr::canonical(expr.element_index())); + let element_index = self.compiled_expr_to_value( + element_index, + instantiated_module.leaf_module().source_location(), + ); + let index_slot = self.compiled_value_to_dyn_array_index( + element_index.map_ty(UInt::from_canonical), + instantiated_module.leaf_module().source_location(), + ); + self.compile_expr(instantiated_module, Expr::canonical(expr.base())) + .map_ty(Array::from_canonical) + .element_dyn(index_slot) + } + ExprEnum::ReduceBitAndU(expr) => if Expr::ty(expr.arg()).width() == 0 { + self.compile_expr(instantiated_module, Expr::canonical(true.to_expr())) + } else { + self.compile_expr( + instantiated_module, + Expr::canonical( + expr.arg() + .cmp_eq(Expr::ty(expr.arg()).from_int_wrapping(-1)), + ), + ) + } + .into(), + ExprEnum::ReduceBitAndS(expr) => if Expr::ty(expr.arg()).width() == 0 { + self.compile_expr(instantiated_module, Expr::canonical(true.to_expr())) + } else { + self.compile_expr( + instantiated_module, + Expr::canonical( + expr.arg() + .cmp_eq(Expr::ty(expr.arg()).from_int_wrapping(-1)), + ), + ) + } + .into(), + ExprEnum::ReduceBitOrU(expr) => if Expr::ty(expr.arg()).width() == 0 { + self.compile_expr(instantiated_module, Expr::canonical(false.to_expr())) + } else { + self.compile_expr( + instantiated_module, + Expr::canonical(expr.arg().cmp_ne(Expr::ty(expr.arg()).from_int_wrapping(0))), + ) + } + .into(), + ExprEnum::ReduceBitOrS(expr) => if Expr::ty(expr.arg()).width() == 0 { + self.compile_expr(instantiated_module, Expr::canonical(false.to_expr())) + } else { + self.compile_expr( + instantiated_module, + Expr::canonical(expr.arg().cmp_ne(Expr::ty(expr.arg()).from_int_wrapping(0))), + ) + } + .into(), + ExprEnum::ReduceBitXorU(expr) => self + .simple_nary_big_expr( + instantiated_module, + UInt::<1>::TYPE.canonical(), + [Expr::canonical(expr.arg())], + |dest, [src]| { + vec![Insn::ReduceBitXor { + dest, + src, + input_width: Expr::ty(expr.arg()).width(), + }] + }, + ) + .into(), + ExprEnum::ReduceBitXorS(expr) => self + .simple_nary_big_expr( + instantiated_module, + UInt::<1>::TYPE.canonical(), + [Expr::canonical(expr.arg())], + |dest, [src]| { + vec![Insn::ReduceBitXor { + dest, + src, + input_width: Expr::ty(expr.arg()).width(), + }] + }, + ) + .into(), + ExprEnum::SliceUInt(expr) => self + .simple_nary_big_expr( + instantiated_module, + UInt::new_dyn(expr.range().len()).canonical(), + [Expr::canonical(expr.base())], + |dest, [src]| { + vec![Insn::SliceInt { + dest, + src, + start: expr.range().start, + len: expr.range().len(), + }] + }, + ) + .into(), + ExprEnum::SliceSInt(expr) => self + .simple_nary_big_expr( + instantiated_module, + UInt::new_dyn(expr.range().len()).canonical(), + [Expr::canonical(expr.base())], + |dest, [src]| { + vec![Insn::SliceInt { + dest, + src, + start: expr.range().start, + len: expr.range().len(), + }] + }, + ) + .into(), + ExprEnum::CastToBits(expr) => self + .compile_cast_to_bits(instantiated_module, expr) + .map_ty(CanonicalType::UInt) + .into(), + ExprEnum::CastBitsTo(expr) => self + .compile_cast_bits_to_or_uninit(instantiated_module, Some(expr.arg()), expr.ty()) + .into(), + ExprEnum::ModuleIO(expr) => self + .compile_value(TargetInInstantiatedModule { + instantiated_module, + target: expr.into(), + }) + .into(), + ExprEnum::Instance(expr) => self + .compile_value(TargetInInstantiatedModule { + instantiated_module, + target: expr.into(), + }) + .into(), + ExprEnum::Wire(expr) => self + .compile_value(TargetInInstantiatedModule { + instantiated_module, + target: expr.into(), + }) + .into(), + ExprEnum::Reg(expr) => self + .compile_value(TargetInInstantiatedModule { + instantiated_module, + target: expr.into(), + }) + .into(), + ExprEnum::RegSync(expr) => self + .compile_value(TargetInInstantiatedModule { + instantiated_module, + target: expr.into(), + }) + .into(), + ExprEnum::RegAsync(expr) => self + .compile_value(TargetInInstantiatedModule { + instantiated_module, + target: expr.into(), + }) + .into(), + ExprEnum::MemPort(expr) => self + .compile_value(TargetInInstantiatedModule { + instantiated_module, + target: expr.into(), + }) + .into(), + }; + self.compiled_exprs.insert(expr, retval); + retval + } + fn compile_connect( + &mut self, + lhs_instantiated_module: InstantiatedModule, + lhs_conditions: Interned<[Cond]>, + lhs: Expr, + rhs_instantiated_module: InstantiatedModule, + rhs_conditions: Interned<[Cond]>, + mut rhs: Expr, + source_location: SourceLocation, + ) { + if Expr::ty(lhs) != Expr::ty(rhs) || !Expr::ty(lhs).is_passive() { + match Expr::ty(lhs) { + CanonicalType::UInt(lhs_ty) => { + rhs = Expr::canonical(Expr::::from_canonical(rhs).cast_to(lhs_ty)); + } + CanonicalType::SInt(lhs_ty) => { + rhs = Expr::canonical(Expr::::from_canonical(rhs).cast_to(lhs_ty)); + } + CanonicalType::Bool(_) => unreachable!(), + CanonicalType::Array(lhs_ty) => { + let CanonicalType::Array(rhs_ty) = Expr::ty(rhs) else { + unreachable!(); + }; + assert_eq!(lhs_ty.len(), rhs_ty.len()); + let lhs = Expr::::from_canonical(lhs); + let rhs = Expr::::from_canonical(rhs); + for index in 0..lhs_ty.len() { + self.compile_connect( + lhs_instantiated_module, + lhs_conditions, + lhs[index], + rhs_instantiated_module, + rhs_conditions, + rhs[index], + source_location, + ); + } + return; + } + CanonicalType::Enum(lhs_ty) => { + let CanonicalType::Enum(rhs_ty) = Expr::ty(rhs) else { + unreachable!(); + }; + todo!("handle connect with different enum types"); + } + CanonicalType::Bundle(lhs_ty) => { + let CanonicalType::Bundle(rhs_ty) = Expr::ty(rhs) else { + unreachable!(); + }; + assert_eq!(lhs_ty.fields().len(), rhs_ty.fields().len()); + let lhs = Expr::::from_canonical(lhs); + let rhs = Expr::::from_canonical(rhs); + for ( + field_index, + ( + BundleField { + name, + flipped, + ty: _, + }, + rhs_field, + ), + ) in lhs_ty.fields().into_iter().zip(rhs_ty.fields()).enumerate() + { + assert_eq!(name, rhs_field.name); + assert_eq!(flipped, rhs_field.flipped); + let lhs_expr = ops::FieldAccess::new_by_index(lhs, field_index).to_expr(); + let rhs_expr = ops::FieldAccess::new_by_index(rhs, field_index).to_expr(); + if flipped { + // swap lhs/rhs + self.compile_connect( + rhs_instantiated_module, + rhs_conditions, + rhs_expr, + lhs_instantiated_module, + lhs_conditions, + lhs_expr, + source_location, + ); + } else { + self.compile_connect( + lhs_instantiated_module, + lhs_conditions, + lhs_expr, + rhs_instantiated_module, + rhs_conditions, + rhs_expr, + source_location, + ); + } + } + return; + } + CanonicalType::AsyncReset(_) => unreachable!(), + CanonicalType::SyncReset(_) => unreachable!(), + CanonicalType::Reset(_) => unreachable!(), + CanonicalType::Clock(_) => unreachable!(), + CanonicalType::PhantomConst(_) => unreachable!("PhantomConst mismatch"), + CanonicalType::DynSimOnly(_) => { + unreachable!("DynSimOnly mismatch"); + } + } + } + let Some(target) = lhs.target() else { + unreachable!("connect lhs must have target"); + }; + let lhs_decl_conditions = self.decl_conditions[&TargetInInstantiatedModule { + instantiated_module: lhs_instantiated_module, + target: target.base().into(), + }]; + let lhs = self.compile_expr(lhs_instantiated_module, lhs); + let rhs = self.compile_expr(rhs_instantiated_module, rhs); + let rhs = self.compiled_expr_to_value(rhs, source_location); + self.compile_simple_connect( + lhs_conditions[lhs_decl_conditions.len()..].intern(), + lhs, + rhs, + source_location, + ); + } + fn compile_clock( + &mut self, + clk: CompiledValue, + source_location: SourceLocation, + ) -> ClockTrigger { + if let Some(&retval) = self.compiled_value_to_clock_trigger_map.get(&clk) { + return retval; + } + let mut alloc_small_slot = |part_name: &str| { + self.insns + .state_layout + .ty + .small_slots + .allocate(&StatePartLayout::scalar( + SlotDebugData { + name: Interned::default(), + ty: Bool.canonical(), + }, + (), + )) + .start + }; + let last_clk_was_low = alloc_small_slot("last_clk_was_low"); + let clk_triggered = alloc_small_slot("clk_triggered"); + let retval = ClockTrigger { + last_clk_was_low, + clk: self.compiled_value_bool_dest_is_small( + clk.map_ty(CanonicalType::Clock), + source_location, + ), + clk_triggered, + source_location, + }; + self.add_assignment( + Interned::default(), + [Insn::AndSmall { + dest: clk_triggered, + lhs: retval.clk, + rhs: last_clk_was_low, + }], + source_location, + ); + self.clock_triggers.push(retval); + self.compiled_value_to_clock_trigger_map.insert(clk, retval); + retval + } + fn compile_enum_discriminant( + &mut self, + enum_value: CompiledValue, + source_location: SourceLocation, + ) -> StatePartIndex { + if let Some(&retval) = self.enum_discriminants.get(&enum_value) { + return retval; + } + let retval_ty = Enum::new( + enum_value + .layout + .ty + .variants() + .iter() + .map(|variant| EnumVariant { + name: variant.name, + ty: None, + }) + .collect(), + ); + let retval = if retval_ty == enum_value.layout.ty + && enum_value.range.len() == TypeLen::small_slot() + { + enum_value.range.small_slots.start + } else { + let retval = self + .insns + .state_layout + .ty + .small_slots + .allocate(&StatePartLayout::scalar( + SlotDebugData { + name: Interned::default(), + ty: retval_ty.canonical(), + }, + (), + )) + .start; + let discriminant_bit_width = enum_value.layout.ty.discriminant_bit_width(); + let discriminant_mask = !(!0u64 << discriminant_bit_width); + let insn = match enum_value.range.len().as_single() { + Some(TypeLenSingle::BigSlot) => Insn::AndBigWithSmallImmediate { + dest: retval, + lhs: enum_value.range.big_slots.start, + rhs: discriminant_mask, + }, + Some(TypeLenSingle::SmallSlot) => { + if discriminant_bit_width == enum_value.layout.ty.type_properties().bit_width { + Insn::CopySmall { + dest: retval, + src: enum_value.range.small_slots.start, + } + } else { + Insn::AndSmallImmediate { + dest: retval, + lhs: enum_value.range.small_slots.start, + rhs: discriminant_mask, + } + } + } + Some(TypeLenSingle::SimOnlySlot) | None => unreachable!(), + }; + self.add_assignment(Interned::default(), [insn], source_location); + retval + }; + self.enum_discriminants.insert(enum_value, retval); + retval + } + fn compile_stmt_reg( + &mut self, + stmt_reg: StmtReg, + instantiated_module: InstantiatedModule, + value: CompiledValue, + ) { + let StmtReg { annotations, reg } = stmt_reg; + let clk = self.compile_expr(instantiated_module, Expr::canonical(reg.clock_domain().clk)); + let clk = self + .compiled_expr_to_value(clk, reg.source_location()) + .map_ty(Clock::from_canonical); + let clk = self.compile_clock(clk, reg.source_location()); + struct Dispatch; + impl ResetTypeDispatch for Dispatch { + type Input = (); + + type Output = bool; + + fn reset(self, _input: Self::Input) -> Self::Output { + unreachable!() + } + + fn sync_reset(self, _input: Self::Input) -> Self::Output { + false + } + + fn async_reset(self, _input: Self::Input) -> Self::Output { + true + } + } + let reset = if let Some(init) = reg.init() { + let init = self.compile_expr(instantiated_module, init); + let init = self.compiled_expr_to_value(init, reg.source_location()); + let rst = + self.compile_expr(instantiated_module, Expr::canonical(reg.clock_domain().rst)); + let rst = self.compiled_expr_to_value(rst, reg.source_location()); + let rst = self.compiled_value_bool_dest_is_small(rst, reg.source_location()); + let is_async = R::dispatch((), Dispatch); + if is_async { + let cond = Expr::canonical(reg.clock_domain().rst.cast_to(Bool)); + let cond = self.compile_expr(instantiated_module, cond); + let cond = self.compiled_expr_to_value(cond, reg.source_location()); + let cond = cond.map_ty(Bool::from_canonical); + // write to the register's current value since asynchronous reset is combinational + let lhs = CompiledValue { + layout: value.layout, + range: value.range, + write: None, + } + .into(); + self.compile_simple_connect( + [Cond { + body: CondBody::IfTrue { cond }, + source_location: reg.source_location(), + }] + .intern_slice(), + lhs, + init, + reg.source_location(), + ); + } + Some(RegisterReset { + is_async, + init, + rst, + }) + } else { + None + }; + self.registers.push(Register { + value, + clk_triggered: clk.clk_triggered, + reset, + source_location: reg.source_location(), + }); + } + fn compile_declaration( + &mut self, + declaration: StmtDeclaration, + parent_module: Interned, + conditions: Interned<[Cond]>, + ) -> TraceDecl { + let target_base: TargetBase = match &declaration { + StmtDeclaration::Wire(v) => v.wire.into(), + StmtDeclaration::Reg(v) => v.reg.into(), + StmtDeclaration::RegSync(v) => v.reg.into(), + StmtDeclaration::RegAsync(v) => v.reg.into(), + StmtDeclaration::Instance(v) => v.instance.into(), + }; + let target = TargetInInstantiatedModule { + instantiated_module: *parent_module, + target: target_base.into(), + }; + self.decl_conditions.insert(target, conditions); + let compiled_value = self.compile_value(target); + match declaration { + StmtDeclaration::Wire(StmtWire { annotations, wire }) => {} + StmtDeclaration::Reg(_) => { + unreachable!("Reset types were already replaced by SyncReset or AsyncReset"); + } + StmtDeclaration::RegSync(stmt_reg) => { + self.compile_stmt_reg(stmt_reg, *parent_module, compiled_value) + } + StmtDeclaration::RegAsync(stmt_reg) => { + self.compile_stmt_reg(stmt_reg, *parent_module, compiled_value) + } + StmtDeclaration::Instance(StmtInstance { + annotations, + instance, + }) => { + let inner_instantiated_module = InstantiatedModule::Child { + parent: parent_module, + instance: instance.intern_sized(), + } + .intern_sized(); + let instance_expr = instance.to_expr(); + self.compile_module(inner_instantiated_module); + for (field_index, module_io) in + instance.instantiated().module_io().into_iter().enumerate() + { + let instance_field = + ops::FieldAccess::new_by_index(instance_expr, field_index).to_expr(); + match Expr::flow(instance_field) { + Flow::Source => { + // we need to supply the value to the instance since the + // parent module expects to read from the instance + self.compile_connect( + *parent_module, + conditions, + instance_field, + *inner_instantiated_module, + Interned::default(), + module_io.module_io.to_expr(), + instance.source_location(), + ); + } + Flow::Sink => { + // we need to take the value from the instance since the + // parent module expects to write to the instance + self.compile_connect( + *inner_instantiated_module, + Interned::default(), + module_io.module_io.to_expr(), + *parent_module, + conditions, + instance_field, + instance.source_location(), + ); + } + Flow::Duplex => unreachable!(), + } + } + } + } + self.make_trace_decl(*parent_module, target_base) + } + fn allocate_delay_chain( + &mut self, + len: usize, + layout: &TypeLayout, + first: Option, + last: Option, + mut from_allocation: impl FnMut(TypeIndexRange) -> T, + ) -> Vec { + match (len, first, last) { + (0, _, _) => Vec::new(), + (1, Some(v), _) | (1, None, Some(v)) => vec![v], + (2, Some(first), Some(last)) => vec![first, last], + (len, first, last) => { + let inner_len = len - first.is_some() as usize - last.is_some() as usize; + first + .into_iter() + .chain( + (0..inner_len) + .map(|_| from_allocation(self.insns.allocate_variable(layout))), + ) + .chain(last) + .collect() + } + } + } + fn allocate_delay_chain_small( + &mut self, + len: usize, + ty: CanonicalType, + first: Option>, + last: Option>, + ) -> Vec> { + self.allocate_delay_chain( + len, + &TypeLayout { + small_slots: StatePartLayout::scalar( + SlotDebugData { + name: Interned::default(), + ty, + }, + (), + ), + ..TypeLayout::empty() + }, + first, + last, + |range| range.small_slots.start, + ) + } + fn compile_memory_port_rw_helper( + &mut self, + memory: StatePartIndex, + stride: usize, + mut start: usize, + data_layout: CompiledTypeLayout, + mask_layout: CompiledTypeLayout, + mut read: Option>, + mut write: Option>, + ) { + match data_layout.body { + CompiledTypeLayoutBody::Scalar => { + let CompiledTypeLayoutBody::Scalar = mask_layout.body else { + unreachable!(); + }; + let signed = match data_layout.ty { + CanonicalType::UInt(_) => false, + CanonicalType::SInt(_) => true, + CanonicalType::Bool(_) => false, + CanonicalType::Array(_) => unreachable!(), + CanonicalType::Enum(_) => false, + CanonicalType::Bundle(_) => unreachable!(), + CanonicalType::AsyncReset(_) => false, + CanonicalType::SyncReset(_) => false, + CanonicalType::Reset(_) => false, + CanonicalType::Clock(_) => false, + CanonicalType::PhantomConst(_) => unreachable!(), + CanonicalType::DynSimOnly(_) => false, + }; + let width = data_layout.ty.bit_width(); + if let Some(MemoryPortReadInsns { + addr, + en: _, + write_mode: _, + data, + insns, + }) = read + { + insns.push( + match data.len().as_single() { + Some(TypeLenSingle::BigSlot) => { + let dest = data.big_slots.start; + if signed { + Insn::MemoryReadSInt { + dest, + memory, + addr, + stride, + start, + width, + } + } else { + Insn::MemoryReadUInt { + dest, + memory, + addr, + stride, + start, + width, + } + } + } + Some(TypeLenSingle::SmallSlot) => { + let _dest = data.small_slots.start; + todo!("memory ports' data are always big for now"); + } + Some(TypeLenSingle::SimOnlySlot) => { + todo!("memory containing sim-only values"); + } + None => unreachable!(), + } + .into(), + ); + } + if let Some(MemoryPortWriteInsns { + addr, + en: _, + write_mode: _, + data, + mask, + insns, + }) = write + { + let end_label = self.insns.new_label(); + insns.push( + match mask.len().as_single() { + Some(TypeLenSingle::BigSlot) => Insn::BranchIfZero { + target: end_label.0, + value: mask.big_slots.start, + }, + Some(TypeLenSingle::SmallSlot) => Insn::BranchIfSmallZero { + target: end_label.0, + value: mask.small_slots.start, + }, + Some(TypeLenSingle::SimOnlySlot) | None => unreachable!(), + } + .into(), + ); + insns.push( + match data.len().as_single() { + Some(TypeLenSingle::BigSlot) => { + let value = data.big_slots.start; + if signed { + Insn::MemoryWriteSInt { + value, + memory, + addr, + stride, + start, + width, + } + } else { + Insn::MemoryWriteUInt { + value, + memory, + addr, + stride, + start, + width, + } + } + } + Some(TypeLenSingle::SmallSlot) => { + let _value = data.small_slots.start; + todo!("memory ports' data are always big for now"); + } + Some(TypeLenSingle::SimOnlySlot) => { + todo!("memory containing sim-only values"); + } + None => unreachable!(), + } + .into(), + ); + insns.push(end_label.into()); + } + } + CompiledTypeLayoutBody::Array { element } => { + let CompiledTypeLayoutBody::Array { + element: mask_element, + } = mask_layout.body + else { + unreachable!(); + }; + let ty = ::from_canonical(data_layout.ty); + let element_bit_width = ty.element().bit_width(); + let element_size = element.layout.len(); + let mask_element_size = mask_element.layout.len(); + for element_index in 0..ty.len() { + self.compile_memory_port_rw_helper( + memory, + stride, + start, + *element, + *mask_element, + read.as_mut().map( + |MemoryPortReadInsns { + addr, + en, + write_mode, + data, + insns, + }| MemoryPortReadInsns { + addr: *addr, + en: *en, + write_mode: *write_mode, + data: data.index_array(element_size, element_index), + insns, + }, + ), + write.as_mut().map( + |MemoryPortWriteInsns { + addr, + en, + write_mode, + data, + mask, + insns, + }| { + MemoryPortWriteInsns { + addr: *addr, + en: *en, + write_mode: *write_mode, + data: data.index_array(element_size, element_index), + mask: mask.index_array(mask_element_size, element_index), + insns, + } + }, + ), + ); + start += element_bit_width; + } + } + CompiledTypeLayoutBody::Bundle { fields } => { + let CompiledTypeLayoutBody::Bundle { + fields: mask_fields, + } = mask_layout.body + else { + unreachable!(); + }; + assert_eq!(fields.len(), mask_fields.len()); + for (field, mask_field) in fields.into_iter().zip(mask_fields) { + let field_index_range = + TypeIndexRange::new(field.offset, field.ty.layout.len()); + let mask_field_index_range = + TypeIndexRange::new(mask_field.offset, mask_field.ty.layout.len()); + self.compile_memory_port_rw_helper( + memory, + stride, + start, + field.ty, + mask_field.ty, + read.as_mut().map( + |MemoryPortReadInsns { + addr, + en, + write_mode, + data, + insns, + }| MemoryPortReadInsns { + addr: *addr, + en: *en, + write_mode: *write_mode, + data: data.slice(field_index_range), + insns, + }, + ), + write.as_mut().map( + |MemoryPortWriteInsns { + addr, + en, + write_mode, + data, + mask, + insns, + }| { + MemoryPortWriteInsns { + addr: *addr, + en: *en, + write_mode: *write_mode, + data: data.slice(field_index_range), + mask: mask.slice(mask_field_index_range), + insns, + } + }, + ), + ); + start = start + field.ty.ty.bit_width(); + } + } + } + } + fn compile_memory_port_rw( + &mut self, + memory: StatePartIndex, + data_layout: CompiledTypeLayout, + mask_layout: CompiledTypeLayout, + mut read: Option>, + mut write: Option>, + ) { + let read_else_label = read.as_mut().map( + |MemoryPortReadInsns { + addr: _, + en, + write_mode, + data: _, + insns, + }| { + let else_label = self.insns.new_label(); + insns.push( + Insn::BranchIfSmallZero { + target: else_label.0, + value: *en, + } + .into(), + ); + if let Some(write_mode) = *write_mode { + insns.push( + Insn::BranchIfSmallNonZero { + target: else_label.0, + value: write_mode, + } + .into(), + ); + } + else_label + }, + ); + let write_end_label = write.as_mut().map( + |MemoryPortWriteInsns { + addr: _, + en, + write_mode, + data: _, + mask: _, + insns, + }| { + let end_label = self.insns.new_label(); + insns.push( + Insn::BranchIfSmallZero { + target: end_label.0, + value: *en, + } + .into(), + ); + if let Some(write_mode) = *write_mode { + insns.push( + Insn::BranchIfSmallZero { + target: end_label.0, + value: write_mode, + } + .into(), + ); + } + end_label + }, + ); + self.compile_memory_port_rw_helper( + memory, + data_layout.ty.bit_width(), + 0, + data_layout, + mask_layout, + read.as_mut().map( + |MemoryPortReadInsns { + addr, + en, + write_mode, + data, + insns, + }| MemoryPortReadInsns { + addr: *addr, + en: *en, + write_mode: *write_mode, + data: *data, + insns: *insns, + }, + ), + write.as_mut().map( + |MemoryPortWriteInsns { + addr, + en, + write_mode, + data, + mask, + insns, + }| MemoryPortWriteInsns { + addr: *addr, + en: *en, + write_mode: *write_mode, + data: *data, + mask: *mask, + insns: *insns, + }, + ), + ); + if let ( + Some(else_label), + Some(MemoryPortReadInsns { + addr: _, + en: _, + write_mode: _, + data, + insns, + }), + ) = (read_else_label, read) + { + let end_label = self.insns.new_label(); + insns.push( + Insn::Branch { + target: end_label.0, + } + .into(), + ); + insns.push(else_label.into()); + let TypeIndexRange { + small_slots, + big_slots, + sim_only_slots, + } = data; + for dest in small_slots.iter() { + insns.push(Insn::ConstSmall { dest, value: 0 }.into()); + } + for dest in big_slots.iter() { + insns.push( + Insn::Const { + dest, + value: BigInt::ZERO.intern_sized(), + } + .into(), + ); + } + for _dest in sim_only_slots.iter() { + todo!("memory containing sim-only values"); + } + insns.push(end_label.into()); + } + if let (Some(end_label), Some(write)) = (write_end_label, write) { + write.insns.push(end_label.into()); + } + } + fn compile_memory( + &mut self, + mem: Mem, + instantiated_module: InstantiatedModule, + conditions: Interned<[Cond]>, + trace_decls: &mut Vec, + ) { + let data_layout = CompiledTypeLayout::get(mem.array_type().element()); + let mask_layout = CompiledTypeLayout::get(mem.array_type().element().mask_type()); + let read_latency_plus_1 = mem + .read_latency() + .checked_add(1) + .expect("read latency too big"); + let write_latency_plus_1 = mem + .write_latency() + .get() + .checked_add(1) + .expect("write latency too big"); + let read_cycle = match mem.read_under_write() { + ReadUnderWrite::Old => 0, + ReadUnderWrite::New => mem.read_latency(), + ReadUnderWrite::Undefined => mem.read_latency() / 2, // something other than Old or New + }; + let memory = self + .insns + .state_layout + .memories + .allocate(&StatePartLayout::scalar( + (), + MemoryData { + array_type: mem.array_type(), + data: mem.initial_value().unwrap_or_else(|| { + Intern::intern_owned(BitVec::repeat( + false, + mem.array_type().type_properties().bit_width, + )) + }), + }, + )) + .start; + let (ports, trace_ports) = mem + .ports() + .iter() + .map(|&port| { + let target_base = TargetBase::MemPort(port); + let target = TargetInInstantiatedModule { + instantiated_module, + target: target_base.into(), + }; + self.decl_conditions.insert(target, conditions); + let TraceDecl::Scope(TraceScope::MemPort(trace_port)) = + self.make_trace_decl(instantiated_module, target_base) + else { + unreachable!(); + }; + let clk = Expr::field(port.to_expr(), "clk"); + let clk = self.compile_expr(instantiated_module, clk); + let clk = self.compiled_expr_to_value(clk, mem.source_location()); + let clk_triggered = self + .compile_clock(clk.map_ty(Clock::from_canonical), mem.source_location()) + .clk_triggered; + let en = Expr::field(port.to_expr(), "en"); + let en = self.compile_expr(instantiated_module, en); + let en = self.compiled_expr_to_value(en, mem.source_location()); + let en = self.compiled_value_bool_dest_is_small(en, mem.source_location()); + let addr = Expr::field(port.to_expr(), "addr"); + let addr = self.compile_expr(instantiated_module, addr); + let addr = self.compiled_expr_to_value(addr, mem.source_location()); + let addr_ty = addr.layout.ty; + let addr = self.compiled_value_to_dyn_array_index( + addr.map_ty(UInt::from_canonical), + mem.source_location(), + ); + let read_data = port.port_kind().rdata_name().map(|name| { + let read_data = + self.compile_expr(instantiated_module, Expr::field(port.to_expr(), name)); + let read_data = self.compiled_expr_to_value(read_data, mem.source_location()); + read_data.range + }); + let write_data = port.port_kind().wdata_name().map(|name| { + let write_data = + self.compile_expr(instantiated_module, Expr::field(port.to_expr(), name)); + let write_data = self.compiled_expr_to_value(write_data, mem.source_location()); + write_data.range + }); + let write_mask = port.port_kind().wmask_name().map(|name| { + let write_mask = + self.compile_expr(instantiated_module, Expr::field(port.to_expr(), name)); + let write_mask = self.compiled_expr_to_value(write_mask, mem.source_location()); + write_mask.range + }); + let write_mode = port.port_kind().wmode_name().map(|name| { + let write_mode = + self.compile_expr(instantiated_module, Expr::field(port.to_expr(), name)); + let write_mode = self.compiled_expr_to_value(write_mode, mem.source_location()); + self.compiled_value_bool_dest_is_small(write_mode, mem.source_location()) + }); + struct PortParts { + en_delayed_len: usize, + addr_delayed_len: usize, + read_data_delayed_len: usize, + write_data_delayed_len: usize, + write_mask_delayed_len: usize, + write_mode_delayed_len: usize, + read_cycle: Option, + write_cycle: Option, + } + let PortParts { + en_delayed_len, + addr_delayed_len, + read_data_delayed_len, + write_data_delayed_len, + write_mask_delayed_len, + write_mode_delayed_len, + read_cycle, + write_cycle, + } = match port.port_kind() { + PortKind::ReadOnly => PortParts { + en_delayed_len: read_cycle + 1, + addr_delayed_len: read_cycle + 1, + read_data_delayed_len: read_latency_plus_1 - read_cycle, + write_data_delayed_len: 0, + write_mask_delayed_len: 0, + write_mode_delayed_len: 0, + read_cycle: Some(read_cycle), + write_cycle: None, + }, + PortKind::WriteOnly => PortParts { + en_delayed_len: write_latency_plus_1, + addr_delayed_len: write_latency_plus_1, + read_data_delayed_len: 0, + write_data_delayed_len: write_latency_plus_1, + write_mask_delayed_len: write_latency_plus_1, + write_mode_delayed_len: 0, + read_cycle: None, + write_cycle: Some(mem.write_latency().get()), + }, + PortKind::ReadWrite => { + let can_rw_at_end = match mem.read_under_write() { + ReadUnderWrite::Old => false, + ReadUnderWrite::New | ReadUnderWrite::Undefined => true, + }; + let latency_plus_1 = read_latency_plus_1; + if latency_plus_1 != write_latency_plus_1 || !can_rw_at_end { + todo!( + "not sure what to do, issue: \ + https://github.com/chipsalliance/firrtl-spec/issues/263" + ); + } + PortParts { + en_delayed_len: latency_plus_1, + addr_delayed_len: latency_plus_1, + read_data_delayed_len: 1, + write_data_delayed_len: latency_plus_1, + write_mask_delayed_len: latency_plus_1, + write_mode_delayed_len: latency_plus_1, + read_cycle: Some(latency_plus_1 - 1), + write_cycle: Some(latency_plus_1 - 1), + } + } + }; + let addr_delayed = self.allocate_delay_chain_small( + addr_delayed_len, + addr_ty.canonical(), + Some(addr), + None, + ); + let en_delayed = self.allocate_delay_chain_small( + en_delayed_len, + Bool.canonical(), + Some(en), + None, + ); + let read_data_delayed = self.allocate_delay_chain( + read_data_delayed_len, + &data_layout.layout, + None, + read_data, + |v| v, + ); + let write_data_delayed = self.allocate_delay_chain( + write_data_delayed_len, + &data_layout.layout, + write_data, + None, + |v| v, + ); + let write_mask_delayed = self.allocate_delay_chain( + write_mask_delayed_len, + &mask_layout.layout, + write_mask, + None, + |v| v, + ); + let write_mode_delayed = self.allocate_delay_chain_small( + write_mode_delayed_len, + Bool.canonical(), + write_mode, + None, + ); + let mut read_insns = Vec::new(); + let mut write_insns = Vec::new(); + self.compile_memory_port_rw( + memory, + data_layout, + mask_layout, + read_cycle.map(|read_cycle| MemoryPortReadInsns { + addr: addr_delayed[read_cycle], + en: en_delayed[read_cycle], + write_mode: write_mode_delayed.get(read_cycle).copied(), + data: read_data_delayed[0], + insns: &mut read_insns, + }), + write_cycle.map(|write_cycle| MemoryPortWriteInsns { + addr: addr_delayed[write_cycle], + en: en_delayed[write_cycle], + write_mode: write_mode_delayed.get(write_cycle).copied(), + data: write_data_delayed[write_cycle], + mask: write_mask_delayed[write_cycle], + insns: &mut write_insns, + }), + ); + self.add_assignment(Interned::default(), read_insns, mem.source_location()); + ( + MemoryPort { + clk_triggered, + addr_delayed, + en_delayed, + data_layout, + read_data_delayed, + write_data_delayed, + write_mask_delayed, + write_mode_delayed, + write_insns, + }, + trace_port, + ) + }) + .unzip(); + let name = mem.scoped_name().1.0; + let id = TraceMemoryId(self.memories.len()); + let stride = mem.array_type().element().bit_width(); + let trace = TraceMem { + id, + name, + stride, + element_type: self + .make_trace_decl_child( + instantiated_module, + MakeTraceDeclTarget::Memory { + id, + depth: mem.array_type().len(), + stride, + start: 0, + ty: mem.array_type().element(), + }, + name, + mem.source_location(), + ) + .intern_sized(), + ports: Intern::intern_owned(trace_ports), + array_type: mem.array_type(), + }; + trace_decls.push(trace.into()); + self.memories.push(Memory { + mem, + memory, + trace, + ports, + }); + } + fn compile_block( + &mut self, + parent_module: Interned, + block: Block, + conditions: Interned<[Cond]>, + trace_decls: &mut Vec, + ) { + let Block { memories, stmts } = block; + for memory in memories { + self.compile_memory(memory, *parent_module, conditions, trace_decls); + } + for stmt in stmts { + match stmt { + Stmt::Connect(StmtConnect { + lhs, + rhs, + source_location, + }) => self.compile_connect( + *parent_module, + conditions, + lhs, + *parent_module, + conditions, + rhs, + source_location, + ), + Stmt::Formal(StmtFormal { .. }) => todo!("implement simulating formal statements"), + Stmt::If(StmtIf { + cond, + source_location, + blocks: [then_block, else_block], + }) => { + let cond = self.compile_expr(*parent_module, Expr::canonical(cond)); + let cond = self.compiled_expr_to_value(cond, source_location); + let cond = cond.map_ty(Bool::from_canonical); + self.compile_block( + parent_module, + then_block, + Interned::from_iter(conditions.iter().copied().chain([Cond { + body: CondBody::IfTrue { cond }, + source_location, + }])), + trace_decls, + ); + self.compile_block( + parent_module, + else_block, + Interned::from_iter(conditions.iter().copied().chain([Cond { + body: CondBody::IfFalse { cond }, + source_location, + }])), + trace_decls, + ); + } + Stmt::Match(StmtMatch { + expr, + source_location, + blocks, + }) => { + let enum_expr = self.compile_expr(*parent_module, Expr::canonical(expr)); + let enum_expr = self.compiled_expr_to_value(enum_expr, source_location); + let enum_expr = enum_expr.map_ty(Enum::from_canonical); + let discriminant = self.compile_enum_discriminant(enum_expr, source_location); + for (variant_index, block) in blocks.into_iter().enumerate() { + self.compile_block( + parent_module, + block, + Interned::from_iter(conditions.iter().copied().chain([Cond { + body: CondBody::MatchArm { + discriminant, + variant_index, + }, + source_location, + }])), + trace_decls, + ); + } + } + Stmt::Declaration(declaration) => { + trace_decls.push(self.compile_declaration( + declaration, + parent_module, + conditions, + )); + } + } + } + } + fn compile_module(&mut self, module: Interned) -> &CompiledModule { + let mut trace_decls = Vec::new(); + let module_io = module + .leaf_module() + .module_io() + .iter() + .map( + |&AnnotatedModuleIO { + annotations: _, + module_io, + }| { + let target = TargetInInstantiatedModule { + instantiated_module: *module, + target: Target::from(module_io), + }; + self.decl_conditions.insert(target, Interned::default()); + trace_decls.push(self.make_trace_decl(*module, module_io.into())); + self.compile_value(target) + }, + ) + .collect(); + match module.leaf_module().body() { + ModuleBody::Normal(NormalModuleBody { body }) => { + self.compile_block(module, body, Interned::default(), &mut trace_decls); + } + ModuleBody::Extern(ExternModuleBody { + verilog_name: _, + parameters: _, + simulation, + }) => { + let Some(simulation) = simulation else { + panic!( + "can't simulate extern module without extern_module_simulation: {}", + module.leaf_module().source_location() + ); + }; + let module_io_targets = module + .leaf_module() + .module_io() + .iter() + .map(|v| { + Target::from(*simulation.sim_io_to_generator_map[&v.module_io.intern()]) + }) + .collect(); + self.extern_modules.push(CompiledExternModule { + module_io_targets, + module_io, + simulation, + }); + } + } + let hashbrown::hash_map::Entry::Vacant(entry) = self.modules.entry(*module) else { + unreachable!("compiled same instantiated module twice"); + }; + entry.insert(CompiledModule { + module_io, + trace_decls: TraceModule { + name: module.leaf_module().name(), + children: Intern::intern_owned(trace_decls), + }, + }) + } + fn process_clocks(&mut self) -> Interned<[StatePartIndex]> { + mem::take(&mut self.clock_triggers) + .into_iter() + .map( + |ClockTrigger { + last_clk_was_low, + clk, + clk_triggered, + source_location, + }| { + self.insns.push( + Insn::XorSmallImmediate { + dest: last_clk_was_low, + lhs: clk, + rhs: 1, + }, + source_location, + ); + clk_triggered + }, + ) + .collect() + } + fn process_registers(&mut self) { + for Register { + value, + clk_triggered, + reset, + source_location, + } in mem::take(&mut self.registers) + { + match reset { + Some(RegisterReset { + is_async, + init, + rst, + }) => { + let reg_end = self.insns.new_label(); + let reg_reset = self.insns.new_label(); + let branch_if_reset = Insn::BranchIfSmallNonZero { + target: reg_reset.0, + value: rst, + }; + let branch_if_not_triggered = Insn::BranchIfSmallZero { + target: reg_end.0, + value: clk_triggered, + }; + if is_async { + self.insns.push(branch_if_reset, source_location); + self.insns.push(branch_if_not_triggered, source_location); + } else { + self.insns.push(branch_if_not_triggered, source_location); + self.insns.push(branch_if_reset, source_location); + } + self.insns.extend( + value.range.insns_for_copy_from(value.write_value().range), + source_location, + ); + self.insns + .push(Insn::Branch { target: reg_end.0 }, source_location); + self.insns.define_label_at_next_insn(reg_reset); + self.insns + .extend(value.range.insns_for_copy_from(init.range), source_location); + self.insns.define_label_at_next_insn(reg_end); + } + None => { + let reg_end = self.insns.new_label(); + self.insns.push( + Insn::BranchIfSmallZero { + target: reg_end.0, + value: clk_triggered, + }, + source_location, + ); + self.insns.extend( + value.range.insns_for_copy_from(value.write_value().range), + source_location, + ); + self.insns.define_label_at_next_insn(reg_end); + } + } + } + } + fn process_memories(&mut self) { + for memory_index in 0..self.memories.len() { + let Memory { + mem, + memory: _, + trace: _, + ref mut ports, + } = self.memories[memory_index]; + for MemoryPort { + clk_triggered, + addr_delayed, + en_delayed, + data_layout: _, + read_data_delayed, + write_data_delayed, + write_mask_delayed, + write_mode_delayed, + write_insns, + } in mem::take(ports) + { + let port_end = self.insns.new_label(); + let small_shift_reg = + |this: &mut Self, values: &[StatePartIndex]| { + for pair in values.windows(2).rev() { + this.insns.push( + Insn::CopySmall { + dest: pair[1], + src: pair[0], + }, + mem.source_location(), + ); + } + }; + let shift_reg = |this: &mut Self, values: &[TypeIndexRange]| { + for pair in values.windows(2).rev() { + this.insns + .extend(pair[0].insns_for_copy_to(pair[1]), mem.source_location()); + } + }; + self.insns.push( + Insn::BranchIfSmallZero { + target: port_end.0, + value: clk_triggered, + }, + mem.source_location(), + ); + small_shift_reg(self, &addr_delayed); + small_shift_reg(self, &en_delayed); + shift_reg(self, &write_data_delayed); + shift_reg(self, &write_mask_delayed); + small_shift_reg(self, &write_mode_delayed); + shift_reg(self, &read_data_delayed); + self.insns.extend(write_insns, mem.source_location()); + self.insns.define_label_at_next_insn(port_end); + } + } + } + pub fn compile(mut self) -> Compiled { + let base_module = + *self.compile_module(InstantiatedModule::Base(self.base_module).intern_sized()); + self.process_assignments(); + self.process_registers(); + self.process_memories(); + let clocks_triggered = self.process_clocks(); + self.insns + .push(Insn::Return, self.base_module.source_location()); + Compiled { + insns: Insns::from(self.insns).intern_sized(), + base_module, + extern_modules: Intern::intern_owned(self.extern_modules), + io: Instance::new_unchecked( + ScopedNameId( + NameId("".intern(), Id::new()), + self.original_base_module.name_id(), + ), + self.original_base_module, + self.original_base_module.source_location(), + ), + traces: SimTraces(Intern::intern_owned(self.traces.0)), + trace_memories: Interned::from_iter(self.memories.iter().map( + |&Memory { + mem: _, + memory, + trace, + ports: _, + }| (memory, trace), + )), + clocks_triggered, + } + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub(crate) struct CompiledModule { + pub(crate) module_io: Interned<[CompiledValue]>, + pub(crate) trace_decls: TraceModule, +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct Compiled { + pub(crate) insns: Interned>, + pub(crate) base_module: CompiledModule, + pub(crate) extern_modules: Interned<[CompiledExternModule]>, + pub(crate) io: Instance, + pub(crate) traces: SimTraces]>>, + pub(crate) trace_memories: Interned<[(StatePartIndex, TraceMem)]>, + pub(crate) clocks_triggered: Interned<[StatePartIndex]>, +} + +impl Compiled { + pub fn new(module: Interned>) -> Self { + Self::from_canonical(Compiler::new(module.canonical().intern()).compile()) + } + pub fn canonical(self) -> Compiled { + let Self { + insns, + base_module, + extern_modules, + io, + traces, + trace_memories, + clocks_triggered, + } = self; + Compiled { + insns, + base_module, + extern_modules, + io: Instance::from_canonical(io.canonical()), + traces, + trace_memories, + clocks_triggered, + } + } + pub fn from_canonical(canonical: Compiled) -> Self { + let Compiled { + insns, + base_module, + extern_modules, + io, + traces, + trace_memories, + clocks_triggered, + } = canonical; + Self { + insns, + base_module, + extern_modules, + io: Instance::from_canonical(io.canonical()), + traces, + trace_memories, + clocks_triggered, + } + } +} diff --git a/crates/fayalite/src/sim/interpreter.rs b/crates/fayalite/src/sim/interpreter.rs new file mode 100644 index 0000000..1a6c269 --- /dev/null +++ b/crates/fayalite/src/sim/interpreter.rs @@ -0,0 +1,2096 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + int::{BoolOrIntType, SInt, UInt}, + intern::{Intern, Interned, Memoize}, + sim::interpreter::parts::{ + StateLayout, StatePartIndex, StatePartKind, StatePartKindBigSlots, StatePartKindMemories, + StatePartKindSimOnlySlots, StatePartKindSmallSlots, StatePartLen, TypeIndexRange, + TypeLayout, get_state_part_kinds, + }, + source_location::SourceLocation, + util::{HashMap, HashSet}, +}; +use bitvec::slice::BitSlice; +use num_bigint::BigInt; +use num_traits::{One, Signed, ToPrimitive, Zero}; +use std::{ + borrow::BorrowMut, + convert::Infallible, + fmt::{self, Write}, + hash::Hash, + marker::PhantomData, + ops::{ControlFlow, Deref, DerefMut, Index, IndexMut}, +}; +use vec_map::VecMap; + +pub(crate) mod parts; + +pub(crate) type SmallUInt = u64; +pub(crate) type SmallSInt = i64; +pub(crate) const MIN_BITS_FOR_NEEDING_BIG: usize = SmallUInt::BITS as usize + 1; + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub(crate) enum InsnFieldKind { + Input, + Output, + Memory, + Immediate, + BranchTarget, +} + +pub(crate) trait InsnFieldTypeTransform: Eq + Hash + fmt::Debug + Send + Sync { + type Type: Eq + Hash + fmt::Debug + Send + Sync; + fn empty_type() -> Self::Type<[(); 0]>; +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)] +pub(crate) struct InsnFieldTypeTransformUnit; + +impl InsnFieldTypeTransform for InsnFieldTypeTransformUnit { + type Type = (); + fn empty_type() -> Self::Type<[(); 0]> { + () + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub(crate) struct InsnFieldTypeTransformRef<'a>(PhantomData<&'a ()>); + +impl<'a> InsnFieldTypeTransform for InsnFieldTypeTransformRef<'a> { + type Type = &'a FieldType; + fn empty_type() -> Self::Type<[(); 0]> { + &[] + } +} + +#[derive(PartialEq, Eq, Hash, Debug)] +pub(crate) struct InsnFieldTypeTransformRefMut<'a>(PhantomData<&'a mut ()>); + +impl<'a> InsnFieldTypeTransform for InsnFieldTypeTransformRefMut<'a> { + type Type = &'a mut FieldType; + fn empty_type() -> Self::Type<[(); 0]> { + &mut [] + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub(crate) struct InsnFieldTypeTransformValue; + +impl InsnFieldTypeTransform for InsnFieldTypeTransformValue { + type Type = FieldType; + fn empty_type() -> Self::Type<[(); 0]> { + [] + } +} + +pub trait InsnFieldTrait: Send + Sync + 'static + Copy + Eq + Hash + fmt::Debug { + const UNIT: InsnFieldType; + fn variant( + v: Transform::Type, + ) -> InsnFieldType; +} + +macro_rules! insn_field_enum { + ( + $enum_vis:vis enum $InsnFieldType:ident<$Transform:ident: $InsnFieldTypeTransform:ident> { + $($Variant:ident($Transform2:ident::$Type:ident<$variant_ty:ty>),)* + } + ) => { + #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] + $enum_vis enum $InsnFieldType<$Transform: $InsnFieldTypeTransform> { + $($Variant($Transform2::$Type<$variant_ty>),)* + } + + $(impl InsnFieldTrait for $variant_ty { + const UNIT: $InsnFieldType = $InsnFieldType::$Variant(()); + fn variant<$Transform2: $InsnFieldTypeTransform>( + v: $Transform2::$Type, + ) -> $InsnFieldType<$Transform2> { + $InsnFieldType::$Variant(v) + } + })* + }; +} + +macro_rules! insn_field_enum2 { + ( + type_singular_variants = [$($type_singular_variant:ident,)*]; + array_indexed_variants = [$($array_indexed_variant:ident,)*]; + type_kinds = [$($type_kind:ident,)*]; + ) => { + insn_field_enum! { + pub(crate) enum InsnFieldType { + Memory(Transform::Type>), + $($type_singular_variant(Transform::Type>),)* + $($array_indexed_variant(Transform::Type>),)* + SmallUInt(Transform::Type), + SmallSInt(Transform::Type), + InternedBigInt(Transform::Type>), + U8(Transform::Type), + USize(Transform::Type), + Empty(Transform::Type<[(); 0]>), + } + } + }; +} + +get_state_part_kinds! { + insn_field_enum2! { + type_singular_variants; + array_indexed_variants; + type_kinds; + } +} + +impl InsnFieldType { + pub(crate) fn empty() -> Self { + Self::Empty(Transform::empty_type()) + } +} + +#[derive(PartialEq, Eq, Hash, Debug)] +pub(crate) struct InsnField { + pub(crate) ty: InsnFieldType, + pub(crate) kind: InsnFieldKind, +} + +impl Clone for InsnField +where + InsnFieldType: Clone, +{ + fn clone(&self) -> Self { + Self { + ty: self.ty.clone(), + kind: self.kind.clone(), + } + } +} + +impl Copy for InsnField where + InsnFieldType: Copy +{ +} + +fn make_array_into_iter( + input: [T; I], + mut default: impl FnMut() -> T, +) -> std::array::IntoIter { + const { + assert!(I <= N); + }; + let mut input = input.into_iter(); + let array = std::array::from_fn(|_| input.next().unwrap_or_else(&mut default)); + let mut retval = array.into_iter(); + // remove unneeded trailing elements + if I < N { + retval.nth_back(N - I - 1); + } + retval +} + +impl fmt::Debug for Insn { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.debug_fmt::(f, None, None, None) + } +} + +struct PrefixLinesWrapper<'a, W> { + writer: W, + at_beginning_of_line: bool, + blank_line_prefix: &'a str, + line_prefix: &'a str, +} + +impl fmt::Write for PrefixLinesWrapper<'_, T> { + fn write_str(&mut self, input: &str) -> fmt::Result { + for part in input.split_inclusive('\n') { + if part.is_empty() { + continue; + } + if self.at_beginning_of_line { + let prefix = match part { + "\n" => self.blank_line_prefix, + _ => self.line_prefix, + }; + if !prefix.is_empty() { + self.writer.write_str(prefix)?; + } + self.at_beginning_of_line = false; + } + self.writer.write_str(part)?; + self.at_beginning_of_line = part.ends_with('\n'); + } + Ok(()) + } +} + +impl Insn { + fn debug_fmt( + &self, + f: &mut fmt::Formatter<'_>, + labels: Option<&Labels>, + state_layout: Option<&StateLayout>, + state: Option<&State>, + ) -> fmt::Result { + let (insn_name, fields) = self.fields_with_names(); + write!(f, "{insn_name}")?; + if fields.len() == 0 { + return Ok(()); + } + let mut f = PrefixLinesWrapper { + writer: f, + at_beginning_of_line: false, + blank_line_prefix: "", + line_prefix: " ", + }; + writeln!(f, " {{")?; + for (field_name, field) in fields { + write!(f, "{field_name}: ")?; + match field.kind { + InsnFieldKind::BranchTarget => match field.ty { + InsnFieldType::USize(&label_index) => { + if let Some(labels) = labels { + write!(f, "L{label_index}")?; + if let Some(label) = labels.labels.get(label_index) { + if let Some(address) = label.address { + write!(f, " (at {address})")?; + } else { + write!(f, " (not yet defined)")?; + } + } else { + write!(f, " (invalid)")?; + } + writeln!(f, ",")?; + continue; + } + } + InsnFieldType::Memory(_) + | InsnFieldType::SmallSlot(_) + | InsnFieldType::BigSlot(_) + | InsnFieldType::SimOnlySlot(_) + | InsnFieldType::SmallSlotArrayIndexed(_) + | InsnFieldType::BigSlotArrayIndexed(_) + | InsnFieldType::SimOnlySlotArrayIndexed(_) + | InsnFieldType::SmallUInt(_) + | InsnFieldType::SmallSInt(_) + | InsnFieldType::InternedBigInt(_) + | InsnFieldType::U8(_) + | InsnFieldType::Empty(_) => {} + }, + InsnFieldKind::Input + | InsnFieldKind::Memory + | InsnFieldKind::Output + | InsnFieldKind::Immediate => {} + } + macro_rules! debug_fmt_state_part { + ($v:expr) => { + $v.debug_fmt(&mut f, ",", " // ", " // ", "", state_layout, state) + }; + } + match field.ty { + InsnFieldType::Memory(v) => { + debug_fmt_state_part!(v)?; + } + InsnFieldType::SmallSlot(v) => { + debug_fmt_state_part!(v)?; + } + InsnFieldType::BigSlot(v) => { + debug_fmt_state_part!(v)?; + } + InsnFieldType::SimOnlySlot(v) => { + debug_fmt_state_part!(v)?; + } + InsnFieldType::SmallSlotArrayIndexed(v) => { + debug_fmt_state_part!(v)?; + } + InsnFieldType::BigSlotArrayIndexed(v) => { + debug_fmt_state_part!(v)?; + } + InsnFieldType::SimOnlySlotArrayIndexed(v) => { + debug_fmt_state_part!(v)?; + } + InsnFieldType::SmallUInt(v) => write!(f, "{v:#x}")?, + InsnFieldType::SmallSInt(v) => write!(f, "{v:#x}")?, + InsnFieldType::InternedBigInt(v) => write!(f, "{v:#x}")?, + InsnFieldType::U8(v) => write!(f, "{v:#x}")?, + InsnFieldType::USize(v) => write!(f, "{v}")?, + InsnFieldType::Empty(v) => write!(f, "{v:?}")?, + } + writeln!(f, ",")?; + } + write!(f.writer, "}}") + } +} + +pub(crate) trait Breakpoints { + type Break; + fn check_for_breakpoint(&mut self, pc: usize) -> ControlFlow; +} + +impl Breakpoints for &'_ mut T { + type Break = T::Break; + fn check_for_breakpoint(&mut self, pc: usize) -> ControlFlow { + T::check_for_breakpoint(self, pc) + } +} + +impl Breakpoints for Box { + type Break = T::Break; + fn check_for_breakpoint(&mut self, pc: usize) -> ControlFlow { + T::check_for_breakpoint(self, pc) + } +} + +impl Breakpoints for () { + type Break = Infallible; + fn check_for_breakpoint(&mut self, _pc: usize) -> ControlFlow { + ControlFlow::Continue(()) + } +} + +pub(crate) struct BreakpointsSet { + pub(crate) last_was_break: bool, + pub(crate) set: HashSet, + pub(crate) trace: bool, +} + +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] +pub(crate) enum BreakAction { + DumpStateAndContinue, + Continue, +} + +impl Breakpoints for BreakpointsSet { + type Break = BreakAction; + fn check_for_breakpoint(&mut self, pc: usize) -> ControlFlow { + let retval = if self.last_was_break { + ControlFlow::Continue(()) + } else if self.set.contains(&pc) { + ControlFlow::Break(BreakAction::DumpStateAndContinue) + } else if self.trace { + ControlFlow::Break(BreakAction::Continue) + } else { + ControlFlow::Continue(()) + }; + self.last_was_break = retval.is_break(); + retval + } +} + +pub(crate) enum RunResult { + Break(Break), + Return(Return), +} + +macro_rules! impl_insns { + ( + #[insn = $Insn:ident, next_macro = $next_macro:ident, branch_macro = $branch_macro:ident] + $vis:vis fn $State:ident::$run:ident(&mut $self:ident) -> $run_ret_ty:ty { + #[state] + let mut $state:ident = $state_init:expr; + setup! { + $($setup:tt)* + } + main_loop!(); + cleanup! { + $($cleanup:tt)* + } + } + $( + $(#[$insn_meta:meta])* + $insn_name:ident $({ + $( + #[kind = $field_kind:ident] + $(#[$field_meta:meta])* + $field_name:ident: $field_ty:ty, + )* + })? => $block:block + )* + ) => { + #[derive(Copy, Clone, Eq, PartialEq, Hash)] + $vis enum $Insn { + $( + $(#[$insn_meta])* + $insn_name $({ + $( + $(#[$field_meta])* + $field_name: $field_ty, + )* + })?, + )* + } + + impl $Insn { + $vis const MAX_FIELDS: usize = { + let mut retval = 0; + $($( + let fields = [$(stringify!($field_name),)*].len(); + if retval < fields { + retval = fields; + } + )?)* + retval + }; + } + + impl $Insn { + $vis const fn fields_unit(&self) -> &'static [InsnField] { + match self { + $( + $Insn::$insn_name {..} => &[ + $($(InsnField { + ty: <$field_ty as InsnFieldTrait>::UNIT, + kind: InsnFieldKind::$field_kind, + },)*)? + ], + )* + } + } + $vis fn fields<'a>(&'a self) -> std::array::IntoIter>, { $Insn::MAX_FIELDS }> { + match self { + $( + $Insn::$insn_name $({ + $($field_name,)* + })? => make_array_into_iter([ + $($(InsnField { + ty: <$field_ty as InsnFieldTrait>::variant($field_name), + kind: InsnFieldKind::$field_kind, + },)*)? + ], + || InsnField { + ty: InsnFieldType::empty(), + kind: InsnFieldKind::Immediate, + }, + ), + )* + } + } + $vis fn fields_with_names<'a>(&'a self) -> (&'static str, std::array::IntoIter<(&'static str, InsnField>), { $Insn::MAX_FIELDS }>) { + match self { + $( + $Insn::$insn_name $({ + $($field_name,)* + })? => ( + stringify!($insn_name), + make_array_into_iter([ + $($((stringify!($field_name), InsnField { + ty: <$field_ty as InsnFieldTrait>::variant($field_name), + kind: InsnFieldKind::$field_kind, + }),)*)? + ], + || ("", InsnField { + ty: InsnFieldType::empty(), + kind: InsnFieldKind::Immediate, + }), + ), + ), + )* + } + } + $vis fn fields_mut<'a>(&'a mut self) -> std::array::IntoIter>, { $Insn::MAX_FIELDS }> { + match self { + $( + $Insn::$insn_name $({ + $($field_name,)* + })? => make_array_into_iter([ + $($(InsnField { + ty: <$field_ty as InsnFieldTrait>::variant($field_name), + kind: InsnFieldKind::$field_kind, + },)*)? + ], + || InsnField { + ty: InsnFieldType::empty(), + kind: InsnFieldKind::Immediate, + }, + ), + )* + } + } + $vis fn into_fields(self) -> std::array::IntoIter, { $Insn::MAX_FIELDS }> { + match self { + $( + $Insn::$insn_name $({ + $($field_name,)* + })? => make_array_into_iter([ + $($(InsnField { + ty: <$field_ty as InsnFieldTrait>::variant($field_name), + kind: InsnFieldKind::$field_kind, + },)*)? + ], + || InsnField { + ty: InsnFieldType::empty(), + kind: InsnFieldKind::Immediate, + }, + ), + )* + } + } + } + + impl $State { + $vis fn $run(&mut $self, mut breakpoints: B) -> RunResult { + let mut $state = $state_init; + $($setup)* + let mut insn = $state.insns[$state.pc]; + let retval = 'main_loop: loop { + if let ControlFlow::Break(b) = breakpoints.check_for_breakpoint($state.pc) { + break RunResult::Break(b); + } + macro_rules! $next_macro { + () => { + $state.pc += 1; + insn = $state.insns[$state.pc]; + continue 'main_loop; + }; + } + macro_rules! $branch_macro { + ($next_pc:expr) => { + $state.pc = $next_pc; + insn = $state.insns[$state.pc]; + continue 'main_loop; + }; + } + let _: Infallible = match insn { + $( + $Insn::$insn_name $({ + $( + $field_name, + )* + })? => { + $block + } + )* + }; + }; + $($cleanup)* + retval + } + } + }; +} + +pub(crate) trait InsnsBuildingKind: Copy + Eq + fmt::Debug + Hash + Default { + type Vec: fmt::Debug + + Clone + + Eq + + Hash + + Send + + Sync + + 'static + + Default + + Deref + + FromIterator; + type Labels: Default; + fn labels(labels: &Self::Labels) -> Option<&Labels>; +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)] +pub(crate) struct InsnsBuildingDone; + +impl InsnsBuildingKind for InsnsBuildingDone { + type Vec = Interned<[T]>; + type Labels = (); + fn labels(_labels: &Self::Labels) -> Option<&Labels> { + None + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)] +pub(crate) struct InsnsBuilding; + +impl InsnsBuildingKind for InsnsBuilding { + type Vec = Vec; + type Labels = Labels; + fn labels(labels: &Self::Labels) -> Option<&Labels> { + Some(labels) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub(crate) struct Label(pub(crate) usize); + +impl fmt::Debug for Label { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "L{}:", self.0) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub(crate) enum InsnOrLabel { + Insn(Insn), + Label(Label), +} + +impl From for InsnOrLabel { + fn from(value: Insn) -> Self { + Self::Insn(value) + } +} + +impl From