Compare commits
No commits in common. "master" and "square-brackets-generics-experiment" have entirely different histories.
master
...
square-bra
165 changed files with 2875 additions and 86861 deletions
|
|
@ -1,25 +1,19 @@
|
|||
# SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
# See Notices.txt for copyright information
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: debian-12
|
||||
container:
|
||||
image: git.libre-chip.org/libre-chip/fayalite-deps:latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: https://code.forgejo.org/actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- run: |
|
||||
scripts/check-copyright.sh
|
||||
- uses: https://git.libre-chip.org/mirrors/rust-cache@v2
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain 1.80.1
|
||||
source "$HOME/.cargo/env"
|
||||
echo "$PATH" >> "$GITHUB_PATH"
|
||||
- uses: https://github.com/Swatinem/rust-cache@v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/master' }}
|
||||
- run: cargo test
|
||||
- run: cargo build --tests --features=unstable-doc
|
||||
- run: cargo test --doc --features=unstable-doc
|
||||
- run: cargo test --features=unstable-doc
|
||||
- run: cargo doc --features=unstable-doc
|
||||
- run: FAYALITE_TEST_HASHER=always_zero cargo test --test=module --features=unstable-doc,unstable-test-hasher
|
||||
- run: cargo run --example blinky yosys-nextpnr-xray --platform=arty-a7-100t --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db -o target/blinky-out
|
||||
- run: cargo run --example tx_only_uart yosys-nextpnr-xray --platform=arty-a7-100t --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db -o target/tx_only_uart-out
|
||||
|
|
|
|||
2
.gitignore
vendored
2
.gitignore
vendored
|
|
@ -1,4 +1,2 @@
|
|||
# SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
# See Notices.txt for copyright information
|
||||
/target
|
||||
.vscode
|
||||
370
Cargo.lock
generated
370
Cargo.lock
generated
|
|
@ -1,6 +1,18 @@
|
|||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 4
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.8.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
"version_check",
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "allocator-api2"
|
||||
|
|
@ -25,9 +37,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "anstyle"
|
||||
version = "1.0.13"
|
||||
version = "1.0.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78"
|
||||
checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b"
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-parse"
|
||||
|
|
@ -44,7 +56,7 @@ version = "1.1.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391"
|
||||
dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -54,21 +66,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "arrayref"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb"
|
||||
|
||||
[[package]]
|
||||
name = "arrayvec"
|
||||
version = "0.7.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
|
||||
|
||||
[[package]]
|
||||
name = "autocfg"
|
||||
version = "1.1.0"
|
||||
|
|
@ -81,12 +81,6 @@ version = "0.2.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.22.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
|
||||
|
||||
[[package]]
|
||||
name = "basic-toml"
|
||||
version = "0.1.8"
|
||||
|
|
@ -115,20 +109,6 @@ dependencies = [
|
|||
"wyz",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "blake3"
|
||||
version = "1.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d82033247fd8e890df8f740e407ad4d038debb9eb1f40533fffb32e7d17dc6f7"
|
||||
dependencies = [
|
||||
"arrayref",
|
||||
"arrayvec",
|
||||
"cc",
|
||||
"cfg-if",
|
||||
"constant_time_eq",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "block-buffer"
|
||||
version = "0.10.4"
|
||||
|
|
@ -138,15 +118,6 @@ dependencies = [
|
|||
"generic-array",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.1.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2e80e3b6a3ab07840e1cae9b0666a63970dc28e8ed5ffbcdacbfc760c281bfc1"
|
||||
dependencies = [
|
||||
"shlex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
|
|
@ -155,9 +126,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
|||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.48"
|
||||
version = "4.5.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2134bb3ea021b78629caa971416385309e0131b351b25e01dc16fb54e1b5fae"
|
||||
checksum = "64acc1846d54c1fe936a78dc189c34e28d3f5afc348403f28ecf53660b9b8462"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
|
|
@ -165,9 +136,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.48"
|
||||
version = "4.5.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2ba64afa3c0a6df7fa517765e31314e983f51dda798ffba27b988194fb65dc9"
|
||||
checksum = "6fb8393d67ba2e7bfaf28a23458e4e2b543cc73a99595511eb207fdb8aede942"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
|
|
@ -175,20 +146,11 @@ dependencies = [
|
|||
"strsim",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_complete"
|
||||
version = "4.5.58"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "75bf0b32ad2e152de789bb635ea4d3078f6b838ad7974143e99b99f45a04af4a"
|
||||
dependencies = [
|
||||
"clap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.5.47"
|
||||
version = "4.5.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbfd7eae0b0f1a6e63d4b13c9c478de77c2eb546fba158ad50b4203dc24b9f9c"
|
||||
checksum = "2bac35c6dafb060fd4d275d9a4ffae97917c13a6327903a8be2153cd964f7085"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
|
|
@ -198,9 +160,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_lex"
|
||||
version = "0.7.5"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675"
|
||||
checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70"
|
||||
|
||||
[[package]]
|
||||
name = "colorchoice"
|
||||
|
|
@ -208,12 +170,6 @@ version = "1.0.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422"
|
||||
|
||||
[[package]]
|
||||
name = "constant_time_eq"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6"
|
||||
|
||||
[[package]]
|
||||
name = "cpufeatures"
|
||||
version = "0.2.12"
|
||||
|
|
@ -233,27 +189,6 @@ dependencies = [
|
|||
"typenum",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ctor"
|
||||
version = "0.2.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "edb49164822f3ee45b17acd4a208cfc1251410cf0cad9a833234c9890774dd9f"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive_destructure2"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "64b697ac90ff296f0fc031ee5a61c7ac31fb9fff50e3fb32873b09223613fc0c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "digest"
|
||||
version = "0.10.7"
|
||||
|
|
@ -283,7 +218,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -304,41 +239,32 @@ checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
|
|||
|
||||
[[package]]
|
||||
name = "fayalite"
|
||||
version = "0.3.0"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bitvec",
|
||||
"blake3",
|
||||
"clap",
|
||||
"clap_complete",
|
||||
"ctor",
|
||||
"eyre",
|
||||
"fayalite-proc-macros",
|
||||
"fayalite-visit-gen",
|
||||
"hashbrown",
|
||||
"jobslot",
|
||||
"num-bigint",
|
||||
"num-traits",
|
||||
"ordered-float",
|
||||
"petgraph",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"trybuild",
|
||||
"vec_map",
|
||||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fayalite-proc-macros"
|
||||
version = "0.3.0"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"fayalite-proc-macros-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fayalite-proc-macros-impl"
|
||||
version = "0.3.0"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"base16ct",
|
||||
"num-bigint",
|
||||
|
|
@ -352,7 +278,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "fayalite-visit-gen"
|
||||
version = "0.3.0"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"prettyplease",
|
||||
|
|
@ -364,18 +290,6 @@ dependencies = [
|
|||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fixedbitset"
|
||||
version = "0.5.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99"
|
||||
|
||||
[[package]]
|
||||
name = "foldhash"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
|
||||
|
||||
[[package]]
|
||||
name = "funty"
|
||||
version = "2.0.0"
|
||||
|
|
@ -392,18 +306,6 @@ dependencies = [
|
|||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"r-efi",
|
||||
"wasi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "glob"
|
||||
version = "0.3.1"
|
||||
|
|
@ -412,13 +314,12 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
|
|||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.15.2"
|
||||
version = "0.14.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
|
||||
checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"allocator-api2",
|
||||
"equivalent",
|
||||
"foldhash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -433,7 +334,7 @@ version = "0.5.9"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5"
|
||||
dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -444,9 +345,9 @@ checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683"
|
|||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.9.0"
|
||||
version = "2.2.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
|
||||
checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown",
|
||||
|
|
@ -465,25 +366,11 @@ version = "1.0.10"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c"
|
||||
|
||||
[[package]]
|
||||
name = "jobslot"
|
||||
version = "0.2.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "58715c67c327da7f1558708348d68c207fd54900c4ae0529e29305d04d795b8c"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"derive_destructure2",
|
||||
"getrandom",
|
||||
"libc",
|
||||
"scopeguard",
|
||||
"windows-sys 0.61.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.176"
|
||||
version = "0.2.153"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174"
|
||||
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
|
|
@ -493,10 +380,11 @@ checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c"
|
|||
|
||||
[[package]]
|
||||
name = "num-bigint"
|
||||
version = "0.4.6"
|
||||
version = "0.4.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9"
|
||||
checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"num-integer",
|
||||
"num-traits",
|
||||
]
|
||||
|
|
@ -525,29 +413,6 @@ version = "1.19.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
|
||||
|
||||
[[package]]
|
||||
name = "ordered-float"
|
||||
version = "5.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7f4779c6901a562440c3786d08192c6fbda7c1c2060edd10006b05ee35d10f2d"
|
||||
dependencies = [
|
||||
"num-traits",
|
||||
"rand",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "petgraph"
|
||||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a98c6720655620a521dcc722d0ad66cd8afd5d86e34a89ef691c50b7b24de06"
|
||||
dependencies = [
|
||||
"fixedbitset",
|
||||
"hashbrown",
|
||||
"indexmap",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "prettyplease"
|
||||
version = "0.2.20"
|
||||
|
|
@ -560,9 +425,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.92"
|
||||
version = "1.0.83"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0"
|
||||
checksum = "0b33eb56c327dec362a9e55b3ad14f9d2f0904fb5a5b03b513ab5465399e9f43"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
|
@ -576,37 +441,12 @@ dependencies = [
|
|||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "r-efi"
|
||||
version = "5.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
|
||||
|
||||
[[package]]
|
||||
name = "radium"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
|
||||
dependencies = [
|
||||
"rand_core",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_core"
|
||||
version = "0.6.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
version = "0.38.31"
|
||||
|
|
@ -617,7 +457,7 @@ dependencies = [
|
|||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -626,12 +466,6 @@ version = "1.0.17"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
|
||||
|
||||
[[package]]
|
||||
name = "scopeguard"
|
||||
version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.202"
|
||||
|
|
@ -675,12 +509,6 @@ dependencies = [
|
|||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "shlex"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
||||
|
||||
[[package]]
|
||||
name = "strsim"
|
||||
version = "0.11.1"
|
||||
|
|
@ -689,9 +517,9 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
|||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.93"
|
||||
version = "2.0.66"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c786062daee0d6db1132800e623df74274a0a87322d8e183338e01b3d98d058"
|
||||
checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
|
@ -713,7 +541,7 @@ dependencies = [
|
|||
"cfg-if",
|
||||
"fastrand",
|
||||
"rustix",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -778,36 +606,12 @@ version = "0.2.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
|
||||
|
||||
[[package]]
|
||||
name = "vec_map"
|
||||
version = "0.8.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.9.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.14.7+wasi-0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c"
|
||||
dependencies = [
|
||||
"wasip2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasip2"
|
||||
version = "1.0.1+wasi-0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7"
|
||||
dependencies = [
|
||||
"wit-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "which"
|
||||
version = "6.0.1"
|
||||
|
|
@ -851,12 +655,6 @@ version = "0.4.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
|
||||
[[package]]
|
||||
name = "windows-link"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.52.0"
|
||||
|
|
@ -866,25 +664,15 @@ dependencies = [
|
|||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.61.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
|
||||
dependencies = [
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.52.6"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
|
||||
checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm",
|
||||
"windows_aarch64_msvc",
|
||||
"windows_i686_gnu",
|
||||
"windows_i686_gnullvm",
|
||||
"windows_i686_msvc",
|
||||
"windows_x86_64_gnu",
|
||||
"windows_x86_64_gnullvm",
|
||||
|
|
@ -893,51 +681,45 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.52.6"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
|
||||
checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.52.6"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
|
||||
checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.52.6"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
|
||||
checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.52.6"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
|
||||
checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.52.6"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
|
||||
checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.52.6"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
|
||||
checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.52.6"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
||||
checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8"
|
||||
|
||||
[[package]]
|
||||
name = "winsafe"
|
||||
|
|
@ -945,12 +727,6 @@ version = "0.0.19"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904"
|
||||
|
||||
[[package]]
|
||||
name = "wit-bindgen"
|
||||
version = "0.46.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59"
|
||||
|
||||
[[package]]
|
||||
name = "wyz"
|
||||
version = "0.5.1"
|
||||
|
|
@ -959,3 +735,23 @@ checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed"
|
|||
dependencies = [
|
||||
"tap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.7.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be"
|
||||
dependencies = [
|
||||
"zerocopy-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy-derive"
|
||||
version = "0.7.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
|
|
|||
31
Cargo.toml
31
Cargo.toml
|
|
@ -5,42 +5,31 @@ resolver = "2"
|
|||
members = ["crates/*"]
|
||||
|
||||
[workspace.package]
|
||||
version = "0.3.0"
|
||||
version = "0.2.0"
|
||||
license = "LGPL-3.0-or-later"
|
||||
edition = "2024"
|
||||
edition = "2021"
|
||||
repository = "https://git.libre-chip.org/libre-chip/fayalite"
|
||||
keywords = ["hdl", "hardware", "semiconductors", "firrtl", "fpga"]
|
||||
categories = ["simulation", "development-tools", "compilers"]
|
||||
rust-version = "1.89.0"
|
||||
rust-version = "1.80.1"
|
||||
|
||||
[workspace.dependencies]
|
||||
fayalite-proc-macros = { version = "=0.3.0", path = "crates/fayalite-proc-macros" }
|
||||
fayalite-proc-macros-impl = { version = "=0.3.0", path = "crates/fayalite-proc-macros-impl" }
|
||||
fayalite-visit-gen = { version = "=0.3.0", path = "crates/fayalite-visit-gen" }
|
||||
fayalite-proc-macros = { version = "=0.2.0", path = "crates/fayalite-proc-macros" }
|
||||
fayalite-proc-macros-impl = { version = "=0.2.0", path = "crates/fayalite-proc-macros-impl" }
|
||||
fayalite-visit-gen = { version = "=0.2.0", path = "crates/fayalite-visit-gen" }
|
||||
base16ct = "0.2.0"
|
||||
base64 = "0.22.1"
|
||||
bitvec = { version = "1.0.1", features = ["serde"] }
|
||||
blake3 = { version = "1.5.4", features = ["serde"] }
|
||||
clap = { version = "4.5.9", features = ["derive", "env", "string"] }
|
||||
clap_complete = "4.5.58"
|
||||
ctor = "0.2.8"
|
||||
eyre = "0.6.12"
|
||||
hashbrown = "0.15.2"
|
||||
indexmap = { version = "2.5.0", features = ["serde"] }
|
||||
jobslot = "0.2.23"
|
||||
num-bigint = "0.4.6"
|
||||
hashbrown = "0.14.3"
|
||||
indexmap = { version = "2.2.6", features = ["serde"] }
|
||||
num-bigint = "0.4.4"
|
||||
num-traits = "0.2.16"
|
||||
ordered-float = { version = "5.1.0", features = ["serde"] }
|
||||
petgraph = "0.8.1"
|
||||
prettyplease = "0.2.20"
|
||||
proc-macro2 = "1.0.83"
|
||||
quote = "1.0.36"
|
||||
serde = { version = "1.0.202", features = ["derive"] }
|
||||
serde_json = { version = "1.0.117", features = ["preserve_order"] }
|
||||
sha2 = "0.10.8"
|
||||
syn = { version = "2.0.93", features = ["full", "fold", "visit", "extra-traits"] }
|
||||
syn = { version = "2.0.66", features = ["full", "fold", "visit", "extra-traits"] }
|
||||
tempfile = "3.10.1"
|
||||
thiserror = "1.0.61"
|
||||
trybuild = "1.0"
|
||||
vec_map = "0.8.2"
|
||||
which = "6.0.1"
|
||||
|
|
|
|||
79
README.md
79
README.md
|
|
@ -1,84 +1,5 @@
|
|||
<!--
|
||||
SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
See Notices.txt for copyright information
|
||||
-->
|
||||
# Fayalite
|
||||
|
||||
Fayalite is a library for designing digital hardware -- a hardware description language (HDL) embedded in the Rust programming language. Fayalite's semantics are based on [FIRRTL] as interpreted by [LLVM CIRCT](https://circt.llvm.org/docs/Dialects/FIRRTL/FIRRTLAnnotations/).
|
||||
|
||||
[FIRRTL]: https://github.com/chipsalliance/firrtl-spec
|
||||
|
||||
# Building the [Blinky example] for the Arty A7 100T on Linux
|
||||
|
||||
[Blinky example]: crates/fayalite/examples/blinky.rs
|
||||
|
||||
This uses the container image containing all the external programs and files that Fayalite needs to build for FPGAs, the sources for the container image are in <https://git.libre-chip.org/libre-chip/fayalite-deps>
|
||||
|
||||
Steps:
|
||||
|
||||
Install podman (or docker).
|
||||
|
||||
Run:
|
||||
```bash
|
||||
podman run --rm --security-opt label=disable --volume="$(pwd):$(pwd)" -w="$(pwd)" -it git.libre-chip.org/libre-chip/fayalite-deps:latest cargo run --example blinky yosys-nextpnr-xray --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db --platform arty-a7-100t -o target/blinky-out
|
||||
```
|
||||
|
||||
To actually program the FPGA, you'll need to install [openFPGALoader] on your host OS:
|
||||
|
||||
[openFPGALoader]: https://github.com/trabucayre/openFPGALoader
|
||||
|
||||
On Debian 12:
|
||||
```bash
|
||||
sudo apt update && sudo apt install openfpgaloader
|
||||
```
|
||||
|
||||
Then program the FPGA:
|
||||
```bash
|
||||
sudo openFPGALoader --board arty_a7_100t target/blinky-out/blinky.bit
|
||||
```
|
||||
|
||||
This will program the FPGA but leave the Flash chip unmodified, so the FPGA will revert when the board is power-cycled.
|
||||
|
||||
To program the Flash also, so it stays programmed when power-cycling the board:
|
||||
|
||||
```bash
|
||||
sudo openFPGALoader --board arty_a7_100t -f target/blinky-out/blinky.bit
|
||||
```
|
||||
|
||||
# Building the [Transmit-only UART example] for the Arty A7 100T on Linux
|
||||
|
||||
[Transmit-only UART example]: crates/fayalite/examples/tx_only_uart.rs
|
||||
|
||||
Follow the steps above of building the Blinky example, but replace `blinky` with `tx_only_uart`.
|
||||
|
||||
View the output using [tio](https://github.com/tio/tio) which you can install in Debian using `apt`.
|
||||
|
||||
Find the correct USB device:
|
||||
```bash
|
||||
sudo tio --list
|
||||
```
|
||||
|
||||
You want the device with a name like (note the `if01`, `if00` is presumably the JTAG port):
|
||||
`/dev/serial/by-id/usb-Digilent_Digilent_USB_Device_210319B4A51E-if01-port0`
|
||||
|
||||
Connect to the serial port:
|
||||
```bash
|
||||
sudo tio -b115200 /dev/serial/by-id/put-your-device-id-here
|
||||
```
|
||||
|
||||
You'll see (repeating endlessly):
|
||||
```text
|
||||
Hello World from Fayalite!!!
|
||||
Hello World from Fayalite!!!
|
||||
Hello World from Fayalite!!!
|
||||
```
|
||||
|
||||
Press Ctrl+T then `q` to exit tio.
|
||||
|
||||
# Funding
|
||||
|
||||
## NLnet Grants
|
||||
|
||||
* [Libre-Chip CPU with proof of No Spectre bugs](https://nlnet.nl/project/Libre-Chip-proof/) 2024-12-324 [(progress)](https://git.libre-chip.org/libre-chip/grant-tracking/src/branch/master/nlnet-2024-12-324/progress.md)
|
||||
|
||||
This project was funded through the [NGI0 Commons Fund](https://nlnet.nl/commonsfund), a fund established by [NLnet](https://nlnet.nl/) with financial support from the European Commission's [Next Generation Internet](https://ngi.eu) programme, under the aegis of [DG Communications Networks, Content and Technology](https://commission.europa.eu/about-european-commission/departments-and-executive-agencies/communications-networks-content-and-technology_en) under grant agreement № [101135429](https://cordis.europa.eu/project/id/101135429). Additional funding is made available by the [Swiss State Secretariat for Education, Research and Innovation](https://www.sbfi.admin.ch/sbfi/en/home.html) (SERI).
|
||||
|
|
|
|||
|
|
@ -13,11 +13,11 @@ rust-version.workspace = true
|
|||
version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
base16ct.workspace = true
|
||||
num-bigint.workspace = true
|
||||
prettyplease.workspace = true
|
||||
proc-macro2.workspace = true
|
||||
quote.workspace = true
|
||||
sha2.workspace = true
|
||||
syn.workspace = true
|
||||
tempfile.workspace = true
|
||||
base16ct = { workspace = true }
|
||||
num-bigint = { workspace = true }
|
||||
prettyplease = { workspace = true }
|
||||
proc-macro2 = { workspace = true }
|
||||
quote = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
syn = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -220,7 +220,6 @@ forward_fold!(syn::ExprArray => fold_expr_array);
|
|||
forward_fold!(syn::ExprCall => fold_expr_call);
|
||||
forward_fold!(syn::ExprIf => fold_expr_if);
|
||||
forward_fold!(syn::ExprMatch => fold_expr_match);
|
||||
forward_fold!(syn::ExprMethodCall => fold_expr_method_call);
|
||||
forward_fold!(syn::ExprPath => fold_expr_path);
|
||||
forward_fold!(syn::ExprRepeat => fold_expr_repeat);
|
||||
forward_fold!(syn::ExprStruct => fold_expr_struct);
|
||||
|
|
|
|||
|
|
@ -1,39 +1,34 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
Errors, HdlAttr, PairsIterExt,
|
||||
hdl_type_common::{
|
||||
ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedField, ParsedFieldsNamed, ParsedGenerics,
|
||||
SplitForImpl, TypesParser, WrappedInConst, common_derives, get_target,
|
||||
common_derives, get_target, ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedField,
|
||||
ParsedFieldsNamed, ParsedGenerics, SplitForImpl, TypesParser, WrappedInConst,
|
||||
},
|
||||
kw,
|
||||
kw, Errors, HdlAttr, PairsIterExt,
|
||||
};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, format_ident, quote_spanned};
|
||||
use quote::{format_ident, quote_spanned, ToTokens};
|
||||
use syn::{
|
||||
AngleBracketedGenericArguments, Attribute, Field, FieldMutability, Fields, FieldsNamed,
|
||||
GenericParam, Generics, Ident, ItemStruct, Path, Token, Type, Visibility, parse_quote,
|
||||
parse_quote_spanned,
|
||||
parse_quote, parse_quote_spanned,
|
||||
punctuated::{Pair, Punctuated},
|
||||
spanned::Spanned,
|
||||
token::Brace,
|
||||
AngleBracketedGenericArguments, Attribute, Field, FieldMutability, Fields, FieldsNamed,
|
||||
GenericParam, Generics, Ident, ItemStruct, Path, Token, Type, Visibility,
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct ParsedBundle {
|
||||
pub(crate) attrs: Vec<Attribute>,
|
||||
pub(crate) options: HdlAttr<ItemOptions, kw::hdl>,
|
||||
pub(crate) options: HdlAttr<ItemOptions>,
|
||||
pub(crate) vis: Visibility,
|
||||
pub(crate) struct_token: Token![struct],
|
||||
pub(crate) ident: Ident,
|
||||
pub(crate) generics: MaybeParsed<ParsedGenerics, Generics>,
|
||||
pub(crate) fields: MaybeParsed<ParsedFieldsNamed, FieldsNamed>,
|
||||
pub(crate) field_flips: Vec<Option<HdlAttr<kw::flip, kw::hdl>>>,
|
||||
pub(crate) field_flips: Vec<Option<HdlAttr<kw::flip>>>,
|
||||
pub(crate) mask_type_ident: Ident,
|
||||
pub(crate) mask_type_match_variant_ident: Ident,
|
||||
pub(crate) mask_type_sim_value_ident: Ident,
|
||||
pub(crate) match_variant_ident: Ident,
|
||||
pub(crate) sim_value_ident: Ident,
|
||||
pub(crate) builder_ident: Ident,
|
||||
pub(crate) mask_type_builder_ident: Ident,
|
||||
}
|
||||
|
|
@ -43,7 +38,7 @@ impl ParsedBundle {
|
|||
errors: &mut Errors,
|
||||
field: &mut Field,
|
||||
index: usize,
|
||||
) -> Option<HdlAttr<kw::flip, kw::hdl>> {
|
||||
) -> Option<HdlAttr<kw::flip>> {
|
||||
let Field {
|
||||
attrs,
|
||||
vis: _,
|
||||
|
|
@ -61,7 +56,8 @@ impl ParsedBundle {
|
|||
}
|
||||
*mutability = FieldMutability::None;
|
||||
colon_token.get_or_insert(Token));
|
||||
errors.unwrap_or_default(HdlAttr::parse_and_take_attr(attrs))
|
||||
let options = errors.unwrap_or_default(HdlAttr::parse_and_take_attr(attrs));
|
||||
options
|
||||
}
|
||||
fn parse(item: ItemStruct) -> syn::Result<Self> {
|
||||
let ItemStruct {
|
||||
|
|
@ -75,9 +71,7 @@ impl ParsedBundle {
|
|||
} = item;
|
||||
let mut errors = Errors::new();
|
||||
let mut options = errors
|
||||
.unwrap_or_default(HdlAttr::<ItemOptions, kw::hdl>::parse_and_take_attr(
|
||||
&mut attrs,
|
||||
))
|
||||
.unwrap_or_default(HdlAttr::<ItemOptions>::parse_and_take_attr(&mut attrs))
|
||||
.unwrap_or_default();
|
||||
errors.ok(options.body.validate());
|
||||
let ItemOptions {
|
||||
|
|
@ -86,12 +80,7 @@ impl ParsedBundle {
|
|||
custom_bounds,
|
||||
no_static: _,
|
||||
no_runtime_generics: _,
|
||||
cmp_eq: _,
|
||||
ref get,
|
||||
} = options.body;
|
||||
if let Some((get, ..)) = get {
|
||||
errors.error(get, "#[hdl(get(...))] is not allowed on structs");
|
||||
}
|
||||
let mut fields = match fields {
|
||||
syn::Fields::Named(fields) => fields,
|
||||
syn::Fields::Unnamed(fields) => {
|
||||
|
|
@ -132,9 +121,7 @@ impl ParsedBundle {
|
|||
field_flips,
|
||||
mask_type_ident: format_ident!("__{}__MaskType", ident),
|
||||
mask_type_match_variant_ident: format_ident!("__{}__MaskType__MatchVariant", ident),
|
||||
mask_type_sim_value_ident: format_ident!("__{}__MaskType__SimValue", ident),
|
||||
match_variant_ident: format_ident!("__{}__MatchVariant", ident),
|
||||
sim_value_ident: format_ident!("__{}__SimValue", ident),
|
||||
mask_type_builder_ident: format_ident!("__{}__MaskType__Builder", ident),
|
||||
builder_ident: format_ident!("__{}__Builder", ident),
|
||||
ident,
|
||||
|
|
@ -349,7 +336,8 @@ impl ToTokens for Builder {
|
|||
}
|
||||
}));
|
||||
quote_spanned! {self.ident.span()=>
|
||||
#[allow(non_camel_case_types, non_snake_case, dead_code)]
|
||||
#[automatically_derived]
|
||||
#[allow(non_camel_case_types, dead_code)]
|
||||
impl #impl_generics #unfilled_ty
|
||||
#where_clause
|
||||
{
|
||||
|
|
@ -435,25 +423,20 @@ impl ToTokens for ParsedBundle {
|
|||
field_flips,
|
||||
mask_type_ident,
|
||||
mask_type_match_variant_ident,
|
||||
mask_type_sim_value_ident,
|
||||
match_variant_ident,
|
||||
sim_value_ident,
|
||||
builder_ident,
|
||||
mask_type_builder_ident,
|
||||
} = self;
|
||||
let span = ident.span();
|
||||
let ItemOptions {
|
||||
outline_generated: _,
|
||||
target,
|
||||
custom_bounds: _,
|
||||
no_static,
|
||||
no_runtime_generics,
|
||||
cmp_eq,
|
||||
get: _,
|
||||
} = &options.body;
|
||||
let target = get_target(target, ident);
|
||||
let mut item_attrs = attrs.clone();
|
||||
item_attrs.push(common_derives(span));
|
||||
item_attrs.push(common_derives(ident.span()));
|
||||
ItemStruct {
|
||||
attrs: item_attrs,
|
||||
vis: vis.clone(),
|
||||
|
|
@ -475,19 +458,19 @@ impl ToTokens for ParsedBundle {
|
|||
.map(|ParsedField { ident, ty, .. }| {
|
||||
let ident = ident.as_ref().unwrap();
|
||||
let expr = ty.make_hdl_type_expr(context);
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: #expr,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
parse_quote_spanned! {span=>
|
||||
parse_quote_spanned! {ident.span()=>
|
||||
#target {
|
||||
#(#fields)*
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
let mut wrapped_in_const = WrappedInConst::new(tokens, span);
|
||||
let mut wrapped_in_const = WrappedInConst::new(tokens, ident.span());
|
||||
let tokens = wrapped_in_const.inner();
|
||||
let builder = Builder {
|
||||
vis: vis.clone(),
|
||||
|
|
@ -501,8 +484,9 @@ impl ToTokens for ParsedBundle {
|
|||
let unfilled_builder_ty = builder.builder_struct_ty(|_| BuilderFieldState::Unfilled);
|
||||
let filled_builder_ty = builder.builder_struct_ty(|_| BuilderFieldState::Filled);
|
||||
let mut mask_type_fields = FieldsNamed::from(fields.clone());
|
||||
for Field { ty, .. } in &mut mask_type_fields.named {
|
||||
*ty = parse_quote_spanned! {span=>
|
||||
for Field { ident, ty, .. } in &mut mask_type_fields.named {
|
||||
let ident = ident.as_ref().unwrap();
|
||||
*ty = parse_quote_spanned! {ident.span()=>
|
||||
<#ty as ::fayalite::ty::Type>::MaskType
|
||||
};
|
||||
}
|
||||
|
|
@ -521,8 +505,8 @@ impl ToTokens for ParsedBundle {
|
|||
mask_type_builder.builder_struct_ty(|_| BuilderFieldState::Filled);
|
||||
ItemStruct {
|
||||
attrs: vec![
|
||||
common_derives(span),
|
||||
parse_quote_spanned! {span=>
|
||||
common_derives(ident.span()),
|
||||
parse_quote_spanned! {ident.span()=>
|
||||
#[allow(non_camel_case_types, dead_code)]
|
||||
},
|
||||
],
|
||||
|
|
@ -534,16 +518,17 @@ impl ToTokens for ParsedBundle {
|
|||
semi_token: None,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let mut mask_type_match_variant_fields = mask_type_fields.clone();
|
||||
for Field { ty, .. } in &mut mask_type_match_variant_fields.named {
|
||||
*ty = parse_quote_spanned! {span=>
|
||||
let mut mask_type_match_variant_fields = mask_type_fields;
|
||||
for Field { ident, ty, .. } in &mut mask_type_match_variant_fields.named {
|
||||
let ident = ident.as_ref().unwrap();
|
||||
*ty = parse_quote_spanned! {ident.span()=>
|
||||
::fayalite::expr::Expr<#ty>
|
||||
};
|
||||
}
|
||||
ItemStruct {
|
||||
attrs: vec![
|
||||
common_derives(span),
|
||||
parse_quote_spanned! {span=>
|
||||
common_derives(ident.span()),
|
||||
parse_quote_spanned! {ident.span()=>
|
||||
#[allow(non_camel_case_types, dead_code)]
|
||||
},
|
||||
],
|
||||
|
|
@ -556,15 +541,16 @@ impl ToTokens for ParsedBundle {
|
|||
}
|
||||
.to_tokens(tokens);
|
||||
let mut match_variant_fields = FieldsNamed::from(fields.clone());
|
||||
for Field { ty, .. } in &mut match_variant_fields.named {
|
||||
*ty = parse_quote_spanned! {span=>
|
||||
for Field { ident, ty, .. } in &mut match_variant_fields.named {
|
||||
let ident = ident.as_ref().unwrap();
|
||||
*ty = parse_quote_spanned! {ident.span()=>
|
||||
::fayalite::expr::Expr<#ty>
|
||||
};
|
||||
}
|
||||
ItemStruct {
|
||||
attrs: vec![
|
||||
common_derives(span),
|
||||
parse_quote_spanned! {span=>
|
||||
common_derives(ident.span()),
|
||||
parse_quote_spanned! {ident.span()=>
|
||||
#[allow(non_camel_case_types, dead_code)]
|
||||
},
|
||||
],
|
||||
|
|
@ -576,72 +562,17 @@ impl ToTokens for ParsedBundle {
|
|||
semi_token: None,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let mut mask_type_sim_value_fields = mask_type_fields;
|
||||
for Field { ty, .. } in &mut mask_type_sim_value_fields.named {
|
||||
*ty = parse_quote_spanned! {span=>
|
||||
::fayalite::sim::value::SimValue<#ty>
|
||||
};
|
||||
}
|
||||
ItemStruct {
|
||||
attrs: vec![
|
||||
parse_quote_spanned! {span=>
|
||||
#[::fayalite::__std::prelude::v1::derive(
|
||||
::fayalite::__std::fmt::Debug,
|
||||
::fayalite::__std::clone::Clone,
|
||||
)]
|
||||
},
|
||||
parse_quote_spanned! {span=>
|
||||
#[allow(non_camel_case_types, dead_code)]
|
||||
},
|
||||
],
|
||||
vis: vis.clone(),
|
||||
struct_token: *struct_token,
|
||||
ident: mask_type_sim_value_ident.clone(),
|
||||
generics: generics.into(),
|
||||
fields: Fields::Named(mask_type_sim_value_fields),
|
||||
semi_token: None,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let mut sim_value_fields = FieldsNamed::from(fields.clone());
|
||||
for Field { ty, .. } in &mut sim_value_fields.named {
|
||||
*ty = parse_quote_spanned! {span=>
|
||||
::fayalite::sim::value::SimValue<#ty>
|
||||
};
|
||||
}
|
||||
ItemStruct {
|
||||
attrs: vec![
|
||||
parse_quote_spanned! {span=>
|
||||
#[::fayalite::__std::prelude::v1::derive(
|
||||
::fayalite::__std::fmt::Debug,
|
||||
::fayalite::__std::clone::Clone,
|
||||
)]
|
||||
},
|
||||
parse_quote_spanned! {span=>
|
||||
#[allow(non_camel_case_types, dead_code)]
|
||||
},
|
||||
],
|
||||
vis: vis.clone(),
|
||||
struct_token: *struct_token,
|
||||
ident: sim_value_ident.clone(),
|
||||
generics: generics.into(),
|
||||
fields: Fields::Named(sim_value_fields),
|
||||
semi_token: None,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let this_token = Ident::new("__this", span);
|
||||
let fields_token = Ident::new("__fields", span);
|
||||
let self_token = Token;
|
||||
let match_variant_body_fields = Vec::from_iter(fields.named().into_iter().map(|field| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
let ident_str = ident.to_string();
|
||||
quote_spanned! {span=>
|
||||
#ident: ::fayalite::expr::Expr::field(#this_token, #ident_str),
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: ::fayalite::expr::Expr::field(__this, #ident_str),
|
||||
}
|
||||
}));
|
||||
let mask_type_body_fields = Vec::from_iter(fields.named().into_iter().map(|field| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
quote_spanned! {span=>
|
||||
#ident: ::fayalite::ty::Type::mask_type(&#self_token.#ident),
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: ::fayalite::ty::Type::mask_type(&self.#ident),
|
||||
}
|
||||
}));
|
||||
let from_canonical_body_fields =
|
||||
|
|
@ -649,16 +580,16 @@ impl ToTokens for ParsedBundle {
|
|||
|((index, field), flip)| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
let ident_str = ident.to_string();
|
||||
let not_flipped = flip.is_none().then(|| Token);
|
||||
quote_spanned! {span=>
|
||||
let flipped = flip.is_some();
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: {
|
||||
let ::fayalite::bundle::BundleField {
|
||||
name: __name,
|
||||
flipped: __flipped,
|
||||
ty: __ty,
|
||||
} = #fields_token[#index];
|
||||
} = __fields[#index];
|
||||
::fayalite::__std::assert_eq!(&*__name, #ident_str);
|
||||
::fayalite::__std::assert!(#not_flipped __flipped);
|
||||
::fayalite::__std::assert_eq!(__flipped, #flipped);
|
||||
::fayalite::ty::Type::from_canonical(__ty)
|
||||
},
|
||||
}
|
||||
|
|
@ -669,50 +600,23 @@ impl ToTokens for ParsedBundle {
|
|||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
let ident_str = ident.to_string();
|
||||
let flipped = flip.is_some();
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
::fayalite::bundle::BundleField {
|
||||
name: ::fayalite::intern::Intern::intern(#ident_str),
|
||||
flipped: #flipped,
|
||||
ty: ::fayalite::ty::Type::canonical(&#self_token.#ident),
|
||||
ty: ::fayalite::ty::Type::canonical(&self.#ident),
|
||||
},
|
||||
}
|
||||
},
|
||||
));
|
||||
let sim_value_from_opaque_fields =
|
||||
Vec::from_iter(fields.named().into_iter().map(|field| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
quote_spanned! {span=>
|
||||
#ident: v.field_from_opaque(),
|
||||
}
|
||||
}));
|
||||
let sim_value_clone_from_opaque_fields =
|
||||
Vec::from_iter(fields.named().into_iter().map(|field| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
quote_spanned! {span=>
|
||||
v.field_clone_from_opaque(&mut value.#ident);
|
||||
}
|
||||
}));
|
||||
let sim_value_to_opaque_fields = Vec::from_iter(fields.named().into_iter().map(|field| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
quote_spanned! {span=>
|
||||
v.field(&value.#ident);
|
||||
}
|
||||
}));
|
||||
let to_sim_value_fields = Vec::from_iter(fields.named().into_iter().map(|field| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
quote_spanned! {span=>
|
||||
#ident: ::fayalite::sim::value::SimValue::ty(&self.#ident),
|
||||
}
|
||||
}));
|
||||
let fields_len = fields.named().into_iter().len();
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::ty::Type for #mask_type_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
type BaseType = ::fayalite::bundle::Bundle;
|
||||
type MaskType = #mask_type_ident #type_generics;
|
||||
type SimValue = #mask_type_sim_value_ident #type_generics;
|
||||
type MatchVariant = #mask_type_match_variant_ident #type_generics;
|
||||
type MatchActiveScope = ();
|
||||
type MatchVariantAndInactiveScope = ::fayalite::ty::MatchVariantWithoutScope<
|
||||
|
|
@ -722,7 +626,7 @@ impl ToTokens for ParsedBundle {
|
|||
<Self as ::fayalite::ty::Type>::MatchVariantAndInactiveScope,
|
||||
>;
|
||||
fn match_variants(
|
||||
#this_token: ::fayalite::expr::Expr<Self>,
|
||||
__this: ::fayalite::expr::Expr<Self>,
|
||||
__source_location: ::fayalite::source_location::SourceLocation,
|
||||
) -> <Self as ::fayalite::ty::Type>::MatchVariantsIter {
|
||||
let __retval = #mask_type_match_variant_ident {
|
||||
|
|
@ -730,19 +634,19 @@ impl ToTokens for ParsedBundle {
|
|||
};
|
||||
::fayalite::__std::iter::once(::fayalite::ty::MatchVariantWithoutScope(__retval))
|
||||
}
|
||||
fn mask_type(&#self_token) -> <Self as ::fayalite::ty::Type>::MaskType {
|
||||
*#self_token
|
||||
fn mask_type(&self) -> <Self as ::fayalite::ty::Type>::MaskType {
|
||||
*self
|
||||
}
|
||||
fn canonical(&#self_token) -> ::fayalite::ty::CanonicalType {
|
||||
::fayalite::ty::Type::canonical(&::fayalite::bundle::Bundle::new(::fayalite::bundle::BundleType::fields(#self_token)))
|
||||
fn canonical(&self) -> ::fayalite::ty::CanonicalType {
|
||||
::fayalite::ty::Type::canonical(&::fayalite::bundle::Bundle::new(::fayalite::bundle::BundleType::fields(self)))
|
||||
}
|
||||
#[track_caller]
|
||||
fn from_canonical(__canonical_type: ::fayalite::ty::CanonicalType) -> Self {
|
||||
let ::fayalite::ty::CanonicalType::Bundle(__bundle) = __canonical_type else {
|
||||
::fayalite::__std::panic!("expected bundle");
|
||||
};
|
||||
let #fields_token = ::fayalite::bundle::BundleType::fields(&__bundle);
|
||||
::fayalite::__std::assert_eq!(#fields_token.len(), #fields_len, "bundle has wrong number of fields");
|
||||
let __fields = ::fayalite::bundle::BundleType::fields(&__bundle);
|
||||
::fayalite::__std::assert_eq!(__fields.len(), #fields_len, "bundle has wrong number of fields");
|
||||
Self {
|
||||
#(#from_canonical_body_fields)*
|
||||
}
|
||||
|
|
@ -750,35 +654,6 @@ impl ToTokens for ParsedBundle {
|
|||
fn source_location() -> ::fayalite::source_location::SourceLocation {
|
||||
::fayalite::source_location::SourceLocation::caller()
|
||||
}
|
||||
fn sim_value_from_opaque(
|
||||
&self,
|
||||
opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>,
|
||||
) -> <Self as ::fayalite::ty::Type>::SimValue {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = ::fayalite::bundle::BundleSimValueFromOpaque::new(*self, opaque);
|
||||
#mask_type_sim_value_ident {
|
||||
#(#sim_value_from_opaque_fields)*
|
||||
}
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut <Self as ::fayalite::ty::Type>::SimValue,
|
||||
opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = ::fayalite::bundle::BundleSimValueFromOpaque::new(*self, opaque);
|
||||
#(#sim_value_clone_from_opaque_fields)*
|
||||
}
|
||||
fn sim_value_to_opaque<'__w>(
|
||||
&self,
|
||||
value: &<Self as ::fayalite::ty::Type>::SimValue,
|
||||
writer: ::fayalite::ty::OpaqueSimValueWriter<'__w>,
|
||||
) -> ::fayalite::ty::OpaqueSimValueWritten<'__w> {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = ::fayalite::bundle::BundleSimValueToOpaque::new(*self, writer);
|
||||
#(#sim_value_to_opaque_fields)*
|
||||
v.finish()
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::bundle::BundleType for #mask_type_ident #type_generics
|
||||
|
|
@ -786,7 +661,7 @@ impl ToTokens for ParsedBundle {
|
|||
{
|
||||
type Builder = #unfilled_mask_type_builder_ty;
|
||||
type FilledBuilder = #filled_mask_type_builder_ty;
|
||||
fn fields(&#self_token) -> ::fayalite::intern::Interned<[::fayalite::bundle::BundleField]> {
|
||||
fn fields(&self) -> ::fayalite::intern::Interned<[::fayalite::bundle::BundleField]> {
|
||||
::fayalite::intern::Intern::intern(&[#(#fields_body_fields)*][..])
|
||||
}
|
||||
}
|
||||
|
|
@ -801,57 +676,12 @@ impl ToTokens for ParsedBundle {
|
|||
impl #impl_generics ::fayalite::ty::TypeWithDeref for #mask_type_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn expr_deref(#this_token: &::fayalite::expr::Expr<Self>) -> &<Self as ::fayalite::ty::Type>::MatchVariant {
|
||||
let #this_token = *#this_token;
|
||||
fn expr_deref(__this: &::fayalite::expr::Expr<Self>) -> &<Self as ::fayalite::ty::Type>::MatchVariant {
|
||||
let __this = *__this;
|
||||
let __retval = #mask_type_match_variant_ident {
|
||||
#(#match_variant_body_fields)*
|
||||
};
|
||||
::fayalite::intern::Interned::into_inner(::fayalite::intern::Intern::intern_sized(__retval))
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::sim::value::ToSimValue for #mask_type_sim_value_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
type Type = #mask_type_ident #type_generics;
|
||||
|
||||
fn to_sim_value(
|
||||
&self,
|
||||
) -> ::fayalite::sim::value::SimValue<
|
||||
<Self as ::fayalite::sim::value::ToSimValue>::Type,
|
||||
> {
|
||||
let ty = #mask_type_ident {
|
||||
#(#to_sim_value_fields)*
|
||||
};
|
||||
::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self))
|
||||
}
|
||||
fn into_sim_value(
|
||||
self,
|
||||
) -> ::fayalite::sim::value::SimValue<
|
||||
<Self as ::fayalite::sim::value::ToSimValue>::Type,
|
||||
> {
|
||||
let ty = #mask_type_ident {
|
||||
#(#to_sim_value_fields)*
|
||||
};
|
||||
::fayalite::sim::value::SimValue::from_value(ty, self)
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::sim::value::ToSimValueWithType<#mask_type_ident #type_generics>
|
||||
for #mask_type_sim_value_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn to_sim_value_with_type(
|
||||
&self,
|
||||
ty: #mask_type_ident #type_generics,
|
||||
) -> ::fayalite::sim::value::SimValue<#mask_type_ident #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self))
|
||||
}
|
||||
fn into_sim_value_with_type(
|
||||
self,
|
||||
ty: #mask_type_ident #type_generics,
|
||||
) -> ::fayalite::sim::value::SimValue<#mask_type_ident #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(ty, self)
|
||||
::fayalite::intern::Interned::<_>::into_inner(::fayalite::intern::Intern::intern_sized(__retval))
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
|
|
@ -860,7 +690,6 @@ impl ToTokens for ParsedBundle {
|
|||
{
|
||||
type BaseType = ::fayalite::bundle::Bundle;
|
||||
type MaskType = #mask_type_ident #type_generics;
|
||||
type SimValue = #sim_value_ident #type_generics;
|
||||
type MatchVariant = #match_variant_ident #type_generics;
|
||||
type MatchActiveScope = ();
|
||||
type MatchVariantAndInactiveScope = ::fayalite::ty::MatchVariantWithoutScope<
|
||||
|
|
@ -870,7 +699,7 @@ impl ToTokens for ParsedBundle {
|
|||
<Self as ::fayalite::ty::Type>::MatchVariantAndInactiveScope,
|
||||
>;
|
||||
fn match_variants(
|
||||
#this_token: ::fayalite::expr::Expr<Self>,
|
||||
__this: ::fayalite::expr::Expr<Self>,
|
||||
__source_location: ::fayalite::source_location::SourceLocation,
|
||||
) -> <Self as ::fayalite::ty::Type>::MatchVariantsIter {
|
||||
let __retval = #match_variant_ident {
|
||||
|
|
@ -878,21 +707,21 @@ impl ToTokens for ParsedBundle {
|
|||
};
|
||||
::fayalite::__std::iter::once(::fayalite::ty::MatchVariantWithoutScope(__retval))
|
||||
}
|
||||
fn mask_type(&#self_token) -> <Self as ::fayalite::ty::Type>::MaskType {
|
||||
fn mask_type(&self) -> <Self as ::fayalite::ty::Type>::MaskType {
|
||||
#mask_type_ident {
|
||||
#(#mask_type_body_fields)*
|
||||
}
|
||||
}
|
||||
fn canonical(&#self_token) -> ::fayalite::ty::CanonicalType {
|
||||
::fayalite::ty::Type::canonical(&::fayalite::bundle::Bundle::new(::fayalite::bundle::BundleType::fields(#self_token)))
|
||||
fn canonical(&self) -> ::fayalite::ty::CanonicalType {
|
||||
::fayalite::ty::Type::canonical(&::fayalite::bundle::Bundle::new(::fayalite::bundle::BundleType::fields(self)))
|
||||
}
|
||||
#[track_caller]
|
||||
fn from_canonical(__canonical_type: ::fayalite::ty::CanonicalType) -> Self {
|
||||
let ::fayalite::ty::CanonicalType::Bundle(__bundle) = __canonical_type else {
|
||||
::fayalite::__std::panic!("expected bundle");
|
||||
};
|
||||
let #fields_token = ::fayalite::bundle::BundleType::fields(&__bundle);
|
||||
::fayalite::__std::assert_eq!(#fields_token.len(), #fields_len, "bundle has wrong number of fields");
|
||||
let __fields = ::fayalite::bundle::BundleType::fields(&__bundle);
|
||||
::fayalite::__std::assert_eq!(__fields.len(), #fields_len, "bundle has wrong number of fields");
|
||||
Self {
|
||||
#(#from_canonical_body_fields)*
|
||||
}
|
||||
|
|
@ -900,35 +729,6 @@ impl ToTokens for ParsedBundle {
|
|||
fn source_location() -> ::fayalite::source_location::SourceLocation {
|
||||
::fayalite::source_location::SourceLocation::caller()
|
||||
}
|
||||
fn sim_value_from_opaque(
|
||||
&self,
|
||||
opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>,
|
||||
) -> <Self as ::fayalite::ty::Type>::SimValue {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = ::fayalite::bundle::BundleSimValueFromOpaque::new(*self, opaque);
|
||||
#sim_value_ident {
|
||||
#(#sim_value_from_opaque_fields)*
|
||||
}
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut <Self as ::fayalite::ty::Type>::SimValue,
|
||||
opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = ::fayalite::bundle::BundleSimValueFromOpaque::new(*self, opaque);
|
||||
#(#sim_value_clone_from_opaque_fields)*
|
||||
}
|
||||
fn sim_value_to_opaque<'__w>(
|
||||
&self,
|
||||
value: &<Self as ::fayalite::ty::Type>::SimValue,
|
||||
writer: ::fayalite::ty::OpaqueSimValueWriter<'__w>,
|
||||
) -> ::fayalite::ty::OpaqueSimValueWritten<'__w> {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = ::fayalite::bundle::BundleSimValueToOpaque::new(*self, writer);
|
||||
#(#sim_value_to_opaque_fields)*
|
||||
v.finish()
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::bundle::BundleType for #target #type_generics
|
||||
|
|
@ -936,7 +736,7 @@ impl ToTokens for ParsedBundle {
|
|||
{
|
||||
type Builder = #unfilled_builder_ty;
|
||||
type FilledBuilder = #filled_builder_ty;
|
||||
fn fields(&#self_token) -> ::fayalite::intern::Interned<[::fayalite::bundle::BundleField]> {
|
||||
fn fields(&self) -> ::fayalite::intern::Interned<[::fayalite::bundle::BundleField]> {
|
||||
::fayalite::intern::Intern::intern(&[#(#fields_body_fields)*][..])
|
||||
}
|
||||
}
|
||||
|
|
@ -951,152 +751,16 @@ impl ToTokens for ParsedBundle {
|
|||
impl #impl_generics ::fayalite::ty::TypeWithDeref for #target #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn expr_deref(#this_token: &::fayalite::expr::Expr<Self>) -> &<Self as ::fayalite::ty::Type>::MatchVariant {
|
||||
let #this_token = *#this_token;
|
||||
fn expr_deref(__this: &::fayalite::expr::Expr<Self>) -> &<Self as ::fayalite::ty::Type>::MatchVariant {
|
||||
let __this = *__this;
|
||||
let __retval = #match_variant_ident {
|
||||
#(#match_variant_body_fields)*
|
||||
};
|
||||
::fayalite::intern::Interned::into_inner(::fayalite::intern::Intern::intern_sized(__retval))
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::sim::value::ToSimValue for #sim_value_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
type Type = #target #type_generics;
|
||||
|
||||
fn to_sim_value(
|
||||
&self,
|
||||
) -> ::fayalite::sim::value::SimValue<
|
||||
<Self as ::fayalite::sim::value::ToSimValue>::Type,
|
||||
> {
|
||||
let ty = #target {
|
||||
#(#to_sim_value_fields)*
|
||||
};
|
||||
::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self))
|
||||
}
|
||||
fn into_sim_value(
|
||||
self,
|
||||
) -> ::fayalite::sim::value::SimValue<
|
||||
<Self as ::fayalite::sim::value::ToSimValue>::Type,
|
||||
> {
|
||||
let ty = #target {
|
||||
#(#to_sim_value_fields)*
|
||||
};
|
||||
::fayalite::sim::value::SimValue::from_value(ty, self)
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::sim::value::ToSimValueWithType<#target #type_generics>
|
||||
for #sim_value_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn to_sim_value_with_type(
|
||||
&self,
|
||||
ty: #target #type_generics,
|
||||
) -> ::fayalite::sim::value::SimValue<#target #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self))
|
||||
}
|
||||
fn into_sim_value_with_type(
|
||||
self,
|
||||
ty: #target #type_generics,
|
||||
) -> ::fayalite::sim::value::SimValue<#target #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(ty, self)
|
||||
::fayalite::intern::Interned::<_>::into_inner(::fayalite::intern::Intern::intern_sized(__retval))
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
if let Some((cmp_eq,)) = cmp_eq {
|
||||
let mut expr_where_clause =
|
||||
Generics::from(generics)
|
||||
.where_clause
|
||||
.unwrap_or_else(|| syn::WhereClause {
|
||||
where_token: Token,
|
||||
predicates: Punctuated::new(),
|
||||
});
|
||||
let mut sim_value_where_clause = expr_where_clause.clone();
|
||||
let mut fields_sim_value_eq = vec![];
|
||||
let mut fields_cmp_eq = vec![];
|
||||
let mut fields_cmp_ne = vec![];
|
||||
for field in fields.named() {
|
||||
let field_ident = field.ident();
|
||||
let field_ty = field.ty();
|
||||
expr_where_clause
|
||||
.predicates
|
||||
.push(parse_quote_spanned! {cmp_eq.span=>
|
||||
#field_ty: ::fayalite::expr::ops::ExprPartialEq<#field_ty>
|
||||
});
|
||||
sim_value_where_clause
|
||||
.predicates
|
||||
.push(parse_quote_spanned! {cmp_eq.span=>
|
||||
#field_ty: ::fayalite::sim::value::SimValuePartialEq<#field_ty>
|
||||
});
|
||||
fields_sim_value_eq.push(quote_spanned! {span=>
|
||||
::fayalite::sim::value::SimValuePartialEq::sim_value_eq(&__lhs.#field_ident, &__rhs.#field_ident)
|
||||
});
|
||||
fields_cmp_eq.push(quote_spanned! {span=>
|
||||
::fayalite::expr::ops::ExprPartialEq::cmp_eq(__lhs.#field_ident, __rhs.#field_ident)
|
||||
});
|
||||
fields_cmp_ne.push(quote_spanned! {span=>
|
||||
::fayalite::expr::ops::ExprPartialEq::cmp_ne(__lhs.#field_ident, __rhs.#field_ident)
|
||||
});
|
||||
}
|
||||
let sim_value_eq_body;
|
||||
let cmp_eq_body;
|
||||
let cmp_ne_body;
|
||||
if fields_len == 0 {
|
||||
sim_value_eq_body = quote_spanned! {span=>
|
||||
true
|
||||
};
|
||||
cmp_eq_body = quote_spanned! {span=>
|
||||
::fayalite::expr::ToExpr::to_expr(&true)
|
||||
};
|
||||
cmp_ne_body = quote_spanned! {span=>
|
||||
::fayalite::expr::ToExpr::to_expr(&false)
|
||||
};
|
||||
} else {
|
||||
sim_value_eq_body = quote_spanned! {span=>
|
||||
#(#fields_sim_value_eq)&&*
|
||||
};
|
||||
cmp_eq_body = quote_spanned! {span=>
|
||||
#(#fields_cmp_eq)&*
|
||||
};
|
||||
cmp_ne_body = quote_spanned! {span=>
|
||||
#(#fields_cmp_ne)|*
|
||||
};
|
||||
};
|
||||
quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::expr::ops::ExprPartialEq<Self> for #target #type_generics
|
||||
#expr_where_clause
|
||||
{
|
||||
fn cmp_eq(
|
||||
__lhs: ::fayalite::expr::Expr<Self>,
|
||||
__rhs: ::fayalite::expr::Expr<Self>,
|
||||
) -> ::fayalite::expr::Expr<::fayalite::int::Bool> {
|
||||
#cmp_eq_body
|
||||
}
|
||||
fn cmp_ne(
|
||||
__lhs: ::fayalite::expr::Expr<Self>,
|
||||
__rhs: ::fayalite::expr::Expr<Self>,
|
||||
) -> ::fayalite::expr::Expr<::fayalite::int::Bool> {
|
||||
#cmp_ne_body
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::sim::value::SimValuePartialEq<Self> for #target #type_generics
|
||||
#sim_value_where_clause
|
||||
{
|
||||
fn sim_value_eq(
|
||||
__lhs: &::fayalite::sim::value::SimValue<Self>,
|
||||
__rhs: &::fayalite::sim::value::SimValue<Self>,
|
||||
) -> bool {
|
||||
#sim_value_eq_body
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
if let (None, MaybeParsed::Parsed(generics)) = (no_static, &self.generics) {
|
||||
let static_generics = generics.clone().for_static_type();
|
||||
let (static_impl_generics, static_type_generics, static_where_clause) =
|
||||
|
|
@ -1104,7 +768,7 @@ impl ToTokens for ParsedBundle {
|
|||
let static_type_body_fields = Vec::from_iter(fields.named().into_iter().map(|field| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
let ty = field.ty();
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: <#ty as ::fayalite::ty::StaticType>::TYPE,
|
||||
}
|
||||
}));
|
||||
|
|
@ -1112,34 +776,28 @@ impl ToTokens for ParsedBundle {
|
|||
Vec::from_iter(fields.named().into_iter().map(|field| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
let ty = field.ty();
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: <#ty as ::fayalite::ty::StaticType>::MASK_TYPE,
|
||||
}
|
||||
}));
|
||||
let type_properties = format_ident!("__type_properties", span = span);
|
||||
let type_properties = format_ident!("__type_properties", span = ident.span());
|
||||
let type_properties_fields = Vec::from_iter(fields.named().into_iter().zip(field_flips).map(|(field, field_flip)| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
let flipped = field_flip.is_some();
|
||||
let ty = field.ty();
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
let #type_properties = #type_properties.field(#flipped, <#ty as ::fayalite::ty::StaticType>::TYPE_PROPERTIES);
|
||||
}
|
||||
}));
|
||||
let type_properties_mask_fields = Vec::from_iter(fields.named().into_iter().zip(field_flips).map(|(field, field_flip)| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
let flipped = field_flip.is_some();
|
||||
let ty = field.ty();
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
let #type_properties = #type_properties.field(#flipped, <#ty as ::fayalite::ty::StaticType>::MASK_TYPE_PROPERTIES);
|
||||
}
|
||||
}));
|
||||
quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::__std::default::Default for #mask_type_ident #static_type_generics
|
||||
#static_where_clause
|
||||
{
|
||||
fn default() -> Self {
|
||||
<Self as ::fayalite::ty::StaticType>::TYPE
|
||||
}
|
||||
}
|
||||
quote_spanned! {ident.span()=>
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::ty::StaticType for #mask_type_ident #static_type_generics
|
||||
#static_where_clause
|
||||
|
|
@ -1162,15 +820,6 @@ impl ToTokens for ParsedBundle {
|
|||
};
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::__std::default::Default
|
||||
for #target #static_type_generics
|
||||
#static_where_clause
|
||||
{
|
||||
fn default() -> Self {
|
||||
<Self as ::fayalite::ty::StaticType>::TYPE
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::ty::StaticType for #target #static_type_generics
|
||||
#static_where_clause
|
||||
{
|
||||
|
|
|
|||
|
|
@ -1,20 +1,18 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
Errors, HdlAttr, PairsIterExt,
|
||||
hdl_type_common::{
|
||||
ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics, ParsedType, SplitForImpl,
|
||||
TypesParser, WrappedInConst, common_derives, get_target,
|
||||
common_derives, get_target, ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics,
|
||||
ParsedType, SplitForImpl, TypesParser, WrappedInConst,
|
||||
},
|
||||
kw,
|
||||
Errors, HdlAttr, PairsIterExt,
|
||||
};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, format_ident, quote_spanned};
|
||||
use quote::{format_ident, quote_spanned, ToTokens};
|
||||
use syn::{
|
||||
Attribute, Field, FieldMutability, Fields, FieldsNamed, FieldsUnnamed, Generics, Ident,
|
||||
ItemEnum, ItemStruct, Token, Type, Variant, Visibility, parse_quote_spanned,
|
||||
parse_quote_spanned,
|
||||
punctuated::{Pair, Punctuated},
|
||||
token::{Brace, Paren},
|
||||
Attribute, Field, FieldMutability, Fields, FieldsNamed, FieldsUnnamed, Generics, Ident,
|
||||
ItemEnum, ItemStruct, Token, Type, Variant, Visibility,
|
||||
};
|
||||
|
||||
crate::options! {
|
||||
|
|
@ -31,7 +29,7 @@ crate::options! {
|
|||
pub(crate) struct ParsedVariantField {
|
||||
pub(crate) paren_token: Paren,
|
||||
pub(crate) attrs: Vec<Attribute>,
|
||||
pub(crate) options: HdlAttr<FieldOptions, kw::hdl>,
|
||||
pub(crate) options: HdlAttr<FieldOptions>,
|
||||
pub(crate) ty: MaybeParsed<ParsedType, Type>,
|
||||
pub(crate) comma_token: Option<Token![,]>,
|
||||
}
|
||||
|
|
@ -39,7 +37,7 @@ pub(crate) struct ParsedVariantField {
|
|||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct ParsedVariant {
|
||||
pub(crate) attrs: Vec<Attribute>,
|
||||
pub(crate) options: HdlAttr<VariantOptions, kw::hdl>,
|
||||
pub(crate) options: HdlAttr<VariantOptions>,
|
||||
pub(crate) ident: Ident,
|
||||
pub(crate) field: Option<ParsedVariantField>,
|
||||
}
|
||||
|
|
@ -121,7 +119,7 @@ impl ParsedVariant {
|
|||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct ParsedEnum {
|
||||
pub(crate) attrs: Vec<Attribute>,
|
||||
pub(crate) options: HdlAttr<ItemOptions, kw::hdl>,
|
||||
pub(crate) options: HdlAttr<ItemOptions>,
|
||||
pub(crate) vis: Visibility,
|
||||
pub(crate) enum_token: Token![enum],
|
||||
pub(crate) ident: Ident,
|
||||
|
|
@ -129,9 +127,6 @@ pub(crate) struct ParsedEnum {
|
|||
pub(crate) brace_token: Brace,
|
||||
pub(crate) variants: Punctuated<ParsedVariant, Token![,]>,
|
||||
pub(crate) match_variant_ident: Ident,
|
||||
pub(crate) sim_value_ident: Ident,
|
||||
pub(crate) sim_builder_ident: Ident,
|
||||
pub(crate) sim_builder_ty_field_ident: Ident,
|
||||
}
|
||||
|
||||
impl ParsedEnum {
|
||||
|
|
@ -147,9 +142,7 @@ impl ParsedEnum {
|
|||
} = item;
|
||||
let mut errors = Errors::new();
|
||||
let mut options = errors
|
||||
.unwrap_or_default(HdlAttr::<ItemOptions, kw::hdl>::parse_and_take_attr(
|
||||
&mut attrs,
|
||||
))
|
||||
.unwrap_or_default(HdlAttr::<ItemOptions>::parse_and_take_attr(&mut attrs))
|
||||
.unwrap_or_default();
|
||||
errors.ok(options.body.validate());
|
||||
let ItemOptions {
|
||||
|
|
@ -158,15 +151,7 @@ impl ParsedEnum {
|
|||
custom_bounds,
|
||||
no_static: _,
|
||||
no_runtime_generics: _,
|
||||
cmp_eq,
|
||||
ref get,
|
||||
} = options.body;
|
||||
if let Some((cmp_eq,)) = cmp_eq {
|
||||
errors.error(cmp_eq, "#[hdl(cmp_eq)] is not yet implemented for enums");
|
||||
}
|
||||
if let Some((get, ..)) = get {
|
||||
errors.error(get, "#[hdl(get(...))] is not allowed on enums");
|
||||
}
|
||||
attrs.retain(|attr| {
|
||||
if attr.path().is_ident("repr") {
|
||||
errors.error(attr, "#[repr] is not supported on #[hdl] enums");
|
||||
|
|
@ -197,9 +182,6 @@ impl ParsedEnum {
|
|||
brace_token,
|
||||
variants,
|
||||
match_variant_ident: format_ident!("__{}__MatchVariant", ident),
|
||||
sim_value_ident: format_ident!("__{}__SimValue", ident),
|
||||
sim_builder_ident: format_ident!("__{}__SimBuilder", ident),
|
||||
sim_builder_ty_field_ident: format_ident!("__ty", span = ident.span()),
|
||||
ident,
|
||||
})
|
||||
}
|
||||
|
|
@ -217,24 +199,18 @@ impl ToTokens for ParsedEnum {
|
|||
brace_token,
|
||||
variants,
|
||||
match_variant_ident,
|
||||
sim_value_ident,
|
||||
sim_builder_ident,
|
||||
sim_builder_ty_field_ident,
|
||||
} = self;
|
||||
let span = ident.span();
|
||||
let ItemOptions {
|
||||
outline_generated: _,
|
||||
target,
|
||||
custom_bounds: _,
|
||||
no_static,
|
||||
no_runtime_generics,
|
||||
cmp_eq: _, // TODO: implement cmp_eq for enums
|
||||
get: _,
|
||||
} = &options.body;
|
||||
let target = get_target(target, ident);
|
||||
let mut struct_attrs = attrs.clone();
|
||||
struct_attrs.push(common_derives(span));
|
||||
struct_attrs.push(parse_quote_spanned! {span=>
|
||||
struct_attrs.push(common_derives(ident.span()));
|
||||
struct_attrs.push(parse_quote_spanned! {ident.span()=>
|
||||
#[allow(non_snake_case)]
|
||||
});
|
||||
let struct_fields = Punctuated::from_iter(variants.pairs().map_pair_value_ref(
|
||||
|
|
@ -258,8 +234,8 @@ impl ToTokens for ParsedEnum {
|
|||
colon_token = Token);
|
||||
ty.clone().into()
|
||||
} else {
|
||||
colon_token = Token;
|
||||
parse_quote_spanned! {span=>
|
||||
colon_token = Token);
|
||||
parse_quote_spanned! {ident.span()=>
|
||||
()
|
||||
}
|
||||
};
|
||||
|
|
@ -302,30 +278,30 @@ impl ToTokens for ParsedEnum {
|
|||
}) = field
|
||||
{
|
||||
let expr = ty.make_hdl_type_expr(context);
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: #expr,
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: (),
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
parse_quote_spanned! {span=>
|
||||
parse_quote_spanned! {ident.span()=>
|
||||
#target {
|
||||
#(#fields)*
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
let mut wrapped_in_const = WrappedInConst::new(tokens, span);
|
||||
let mut wrapped_in_const = WrappedInConst::new(tokens, ident.span());
|
||||
let tokens = wrapped_in_const.inner();
|
||||
{
|
||||
let mut wrapped_in_const = WrappedInConst::new(tokens, span);
|
||||
let mut wrapped_in_const = WrappedInConst::new(tokens, ident.span());
|
||||
let tokens = wrapped_in_const.inner();
|
||||
let mut enum_attrs = attrs.clone();
|
||||
enum_attrs.push(parse_quote_spanned! {span=>
|
||||
enum_attrs.push(parse_quote_spanned! {ident.span()=>
|
||||
#[allow(dead_code)]
|
||||
});
|
||||
ItemEnum {
|
||||
|
|
@ -374,7 +350,7 @@ impl ToTokens for ParsedEnum {
|
|||
.to_tokens(tokens);
|
||||
}
|
||||
let mut enum_attrs = attrs.clone();
|
||||
enum_attrs.push(parse_quote_spanned! {span=>
|
||||
enum_attrs.push(parse_quote_spanned! {ident.span()=>
|
||||
#[allow(dead_code, non_camel_case_types)]
|
||||
});
|
||||
ItemEnum {
|
||||
|
|
@ -409,7 +385,7 @@ impl ToTokens for ParsedEnum {
|
|||
mutability: FieldMutability::None,
|
||||
ident: None,
|
||||
colon_token: None,
|
||||
ty: parse_quote_spanned! {span=>
|
||||
ty: parse_quote_spanned! {ident.span()=>
|
||||
::fayalite::expr::Expr<#ty>
|
||||
},
|
||||
},
|
||||
|
|
@ -423,148 +399,21 @@ impl ToTokens for ParsedEnum {
|
|||
)),
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let mut struct_attrs = attrs.clone();
|
||||
struct_attrs.push(parse_quote_spanned! {span=>
|
||||
#[allow(dead_code, non_camel_case_types)]
|
||||
});
|
||||
ItemStruct {
|
||||
attrs: struct_attrs,
|
||||
vis: vis.clone(),
|
||||
struct_token: Token,
|
||||
ident: sim_builder_ident.clone(),
|
||||
generics: generics.into(),
|
||||
fields: FieldsNamed {
|
||||
brace_token: *brace_token,
|
||||
named: Punctuated::from_iter([Field {
|
||||
attrs: vec![],
|
||||
vis: Visibility::Inherited,
|
||||
mutability: FieldMutability::None,
|
||||
ident: Some(sim_builder_ty_field_ident.clone()),
|
||||
colon_token: Some(Token),
|
||||
ty: parse_quote_spanned! {span=>
|
||||
#target #type_generics
|
||||
},
|
||||
}]),
|
||||
}
|
||||
.into(),
|
||||
semi_token: None,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let mut enum_attrs = attrs.clone();
|
||||
enum_attrs.push(parse_quote_spanned! {span=>
|
||||
#[::fayalite::__std::prelude::v1::derive(
|
||||
::fayalite::__std::fmt::Debug,
|
||||
::fayalite::__std::clone::Clone,
|
||||
)]
|
||||
});
|
||||
enum_attrs.push(parse_quote_spanned! {span=>
|
||||
#[allow(dead_code, non_camel_case_types)]
|
||||
});
|
||||
let sim_value_has_unknown_variant = !variants.len().is_power_of_two();
|
||||
let sim_value_unknown_variant_name = sim_value_has_unknown_variant.then(|| {
|
||||
let mut name = String::new();
|
||||
let unknown = "Unknown";
|
||||
loop {
|
||||
let orig_len = name.len();
|
||||
name.push_str(unknown);
|
||||
if variants.iter().all(|v| v.ident != name) {
|
||||
break Ident::new(&name, span);
|
||||
}
|
||||
name.truncate(orig_len);
|
||||
name.push('_');
|
||||
}
|
||||
});
|
||||
let sim_value_unknown_variant =
|
||||
sim_value_unknown_variant_name
|
||||
.as_ref()
|
||||
.map(|unknown_variant_name| {
|
||||
Pair::End(parse_quote_spanned! {span=>
|
||||
#unknown_variant_name(::fayalite::enum_::UnknownVariantSimValue)
|
||||
})
|
||||
});
|
||||
ItemEnum {
|
||||
attrs: enum_attrs,
|
||||
vis: vis.clone(),
|
||||
enum_token: *enum_token,
|
||||
ident: sim_value_ident.clone(),
|
||||
generics: generics.into(),
|
||||
brace_token: *brace_token,
|
||||
variants: Punctuated::from_iter(
|
||||
variants
|
||||
.pairs()
|
||||
.map_pair_value_ref(
|
||||
|ParsedVariant {
|
||||
attrs,
|
||||
options: _,
|
||||
ident,
|
||||
field,
|
||||
}| Variant {
|
||||
attrs: attrs.clone(),
|
||||
ident: ident.clone(),
|
||||
fields: match field {
|
||||
Some(ParsedVariantField {
|
||||
paren_token,
|
||||
attrs,
|
||||
options: _,
|
||||
ty,
|
||||
comma_token,
|
||||
}) => Fields::Unnamed(FieldsUnnamed {
|
||||
paren_token: *paren_token,
|
||||
unnamed: Punctuated::from_iter([
|
||||
Pair::new(
|
||||
Field {
|
||||
attrs: attrs.clone(),
|
||||
vis: Visibility::Inherited,
|
||||
mutability: FieldMutability::None,
|
||||
ident: None,
|
||||
colon_token: None,
|
||||
ty: parse_quote_spanned! {span=>
|
||||
::fayalite::sim::value::SimValue<#ty>
|
||||
},
|
||||
},
|
||||
Some(comma_token.unwrap_or(Token))),
|
||||
),
|
||||
Pair::new(
|
||||
Field {
|
||||
attrs: vec![],
|
||||
vis: Visibility::Inherited,
|
||||
mutability: FieldMutability::None,
|
||||
ident: None,
|
||||
colon_token: None,
|
||||
ty: parse_quote_spanned! {span=>
|
||||
::fayalite::enum_::EnumPaddingSimValue
|
||||
},
|
||||
},
|
||||
None,
|
||||
),
|
||||
]),
|
||||
}),
|
||||
None => Fields::Unnamed(parse_quote_spanned! {span=>
|
||||
(::fayalite::enum_::EnumPaddingSimValue)
|
||||
}),
|
||||
},
|
||||
discriminant: None,
|
||||
},
|
||||
)
|
||||
.chain(sim_value_unknown_variant),
|
||||
),
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let self_token = Token;
|
||||
for (index, ParsedVariant { ident, field, .. }) in variants.iter().enumerate() {
|
||||
if let Some(ParsedVariantField { ty, .. }) = field {
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#[automatically_derived]
|
||||
impl #impl_generics #target #type_generics
|
||||
#where_clause
|
||||
{
|
||||
#[allow(non_snake_case, dead_code)]
|
||||
#vis fn #ident<__V: ::fayalite::expr::ToExpr<Type = #ty>>(
|
||||
#self_token,
|
||||
self,
|
||||
v: __V,
|
||||
) -> ::fayalite::expr::Expr<Self> {
|
||||
::fayalite::expr::ToExpr::to_expr(
|
||||
&::fayalite::expr::ops::EnumLiteral::new_by_index(
|
||||
#self_token,
|
||||
self,
|
||||
#index,
|
||||
::fayalite::__std::option::Option::Some(
|
||||
::fayalite::expr::Expr::canonical(
|
||||
|
|
@ -575,98 +424,68 @@ impl ToTokens for ParsedEnum {
|
|||
)
|
||||
}
|
||||
}
|
||||
impl #impl_generics #sim_builder_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
#[allow(non_snake_case, dead_code)]
|
||||
#vis fn #ident<__V: ::fayalite::sim::value::ToSimValueWithType<#ty>>(
|
||||
#self_token,
|
||||
v: __V,
|
||||
) -> ::fayalite::sim::value::SimValue<#target #type_generics> {
|
||||
let v = ::fayalite::sim::value::ToSimValueWithType::into_sim_value_with_type(
|
||||
v,
|
||||
#self_token.#sim_builder_ty_field_ident.#ident,
|
||||
);
|
||||
::fayalite::sim::value::SimValue::from_value(
|
||||
#self_token.#sim_builder_ty_field_ident,
|
||||
#sim_value_ident::#ident(v, ::fayalite::enum_::EnumPaddingSimValue::new()),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#[automatically_derived]
|
||||
impl #impl_generics #target #type_generics
|
||||
#where_clause
|
||||
{
|
||||
#[allow(non_snake_case, dead_code)]
|
||||
#vis fn #ident(#self_token) -> ::fayalite::expr::Expr<Self> {
|
||||
#vis fn #ident(self) -> ::fayalite::expr::Expr<Self> {
|
||||
::fayalite::expr::ToExpr::to_expr(
|
||||
&::fayalite::expr::ops::EnumLiteral::new_by_index(
|
||||
#self_token,
|
||||
self,
|
||||
#index,
|
||||
::fayalite::__std::option::Option::None,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
impl #impl_generics #sim_builder_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
#[allow(non_snake_case, dead_code)]
|
||||
#vis fn #ident(#self_token) -> ::fayalite::sim::value::SimValue<#target #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(
|
||||
#self_token.#sim_builder_ty_field_ident,
|
||||
#sim_value_ident::#ident(::fayalite::enum_::EnumPaddingSimValue::new()),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
let variants_token = Ident::new("variants", span);
|
||||
let from_canonical_body_fields = Vec::from_iter(variants.iter().enumerate().map(
|
||||
|(index, ParsedVariant { ident, field, .. })| {
|
||||
let ident_str = ident.to_string();
|
||||
let val = if field.is_some() {
|
||||
let missing_value_msg = format!("expected variant {ident} to have a field");
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
::fayalite::ty::Type::from_canonical(ty.expect(#missing_value_msg))
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
::fayalite::__std::assert!(ty.is_none());
|
||||
}
|
||||
};
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: {
|
||||
let ::fayalite::enum_::EnumVariant {
|
||||
name,
|
||||
ty,
|
||||
} = #variants_token[#index];
|
||||
} = variants[#index];
|
||||
::fayalite::__std::assert_eq!(&*name, #ident_str);
|
||||
#val
|
||||
},
|
||||
}
|
||||
},
|
||||
));
|
||||
let variant_access_token = Ident::new("variant_access", span);
|
||||
let match_active_scope_match_arms = Vec::from_iter(variants.iter().enumerate().map(
|
||||
|(index, ParsedVariant { ident, field, .. })| {
|
||||
if field.is_some() {
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#index => #match_variant_ident::#ident(
|
||||
::fayalite::expr::ToExpr::to_expr(
|
||||
&::fayalite::expr::ops::VariantAccess::new_by_index(
|
||||
#variant_access_token.base(),
|
||||
#variant_access_token.variant_index(),
|
||||
variant_access.base(),
|
||||
variant_access.variant_index(),
|
||||
),
|
||||
),
|
||||
),
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#index => #match_variant_ident::#ident,
|
||||
}
|
||||
}
|
||||
|
|
@ -684,16 +503,16 @@ impl ToTokens for ParsedEnum {
|
|||
match field {
|
||||
Some(ParsedVariantField { options, .. }) => {
|
||||
let FieldOptions {} = options.body;
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
::fayalite::enum_::EnumVariant {
|
||||
name: ::fayalite::intern::Intern::intern(#ident_str),
|
||||
ty: ::fayalite::__std::option::Option::Some(
|
||||
::fayalite::ty::Type::canonical(&#self_token.#ident),
|
||||
::fayalite::ty::Type::canonical(&self.#ident),
|
||||
),
|
||||
},
|
||||
}
|
||||
}
|
||||
None => quote_spanned! {span=>
|
||||
None => quote_spanned! {ident.span()=>
|
||||
::fayalite::enum_::EnumVariant {
|
||||
name: ::fayalite::intern::Intern::intern(#ident_str),
|
||||
ty: ::fayalite::__std::option::Option::None,
|
||||
|
|
@ -702,151 +521,14 @@ impl ToTokens for ParsedEnum {
|
|||
}
|
||||
},
|
||||
));
|
||||
let sim_value_from_opaque_unknown_match_arm = if let Some(sim_value_unknown_variant_name) =
|
||||
&sim_value_unknown_variant_name
|
||||
{
|
||||
quote_spanned! {span=>
|
||||
_ => #sim_value_ident::#sim_value_unknown_variant_name(v.unknown_variant_from_opaque()),
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
_ => ::fayalite::__std::unreachable!(),
|
||||
}
|
||||
};
|
||||
let sim_value_from_opaque_match_arms = Vec::from_iter(
|
||||
variants
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(
|
||||
|(
|
||||
index,
|
||||
ParsedVariant {
|
||||
attrs: _,
|
||||
options: _,
|
||||
ident,
|
||||
field,
|
||||
},
|
||||
)| {
|
||||
if let Some(_) = field {
|
||||
quote_spanned! {span=>
|
||||
#index => {
|
||||
let (field, padding) = v.variant_with_field_from_opaque();
|
||||
#sim_value_ident::#ident(field, padding)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
#index => #sim_value_ident::#ident(
|
||||
v.variant_no_field_from_opaque(),
|
||||
),
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
.chain([sim_value_from_opaque_unknown_match_arm]),
|
||||
);
|
||||
let sim_value_clone_from_opaque_unknown_match_arm =
|
||||
if let Some(sim_value_unknown_variant_name) = &sim_value_unknown_variant_name {
|
||||
quote_spanned! {span=>
|
||||
_ => if let #sim_value_ident::#sim_value_unknown_variant_name(value) = value {
|
||||
v.unknown_variant_clone_from_opaque(value);
|
||||
} else {
|
||||
*value = #sim_value_ident::#sim_value_unknown_variant_name(
|
||||
v.unknown_variant_from_opaque(),
|
||||
);
|
||||
},
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
_ => ::fayalite::__std::unreachable!(),
|
||||
}
|
||||
};
|
||||
let sim_value_clone_from_opaque_match_arms = Vec::from_iter(
|
||||
variants
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(
|
||||
|(
|
||||
index,
|
||||
ParsedVariant {
|
||||
attrs: _,
|
||||
options: _,
|
||||
ident,
|
||||
field,
|
||||
},
|
||||
)| {
|
||||
if let Some(_) = field {
|
||||
quote_spanned! {span=>
|
||||
#index => if let #sim_value_ident::#ident(field, padding) = value {
|
||||
v.variant_with_field_clone_from_opaque(field, padding);
|
||||
} else {
|
||||
let (field, padding) = v.variant_with_field_from_opaque();
|
||||
*value = #sim_value_ident::#ident(field, padding);
|
||||
},
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
#index => if let #sim_value_ident::#ident(padding) = value {
|
||||
v.variant_no_field_clone_from_opaque(padding);
|
||||
} else {
|
||||
*value = #sim_value_ident::#ident(
|
||||
v.variant_no_field_from_opaque(),
|
||||
);
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
.chain([sim_value_clone_from_opaque_unknown_match_arm]),
|
||||
);
|
||||
let sim_value_to_opaque_match_arms = Vec::from_iter(
|
||||
variants
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(
|
||||
|(
|
||||
index,
|
||||
ParsedVariant {
|
||||
attrs: _,
|
||||
options: _,
|
||||
ident,
|
||||
field,
|
||||
},
|
||||
)| {
|
||||
if let Some(_) = field {
|
||||
quote_spanned! {span=>
|
||||
#sim_value_ident::#ident(field, padding) => {
|
||||
v.variant_with_field_to_opaque(#index, field, padding)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
#sim_value_ident::#ident(padding) => {
|
||||
v.variant_no_field_to_opaque(#index, padding)
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
.chain(sim_value_unknown_variant_name.as_ref().map(
|
||||
|sim_value_unknown_variant_name| {
|
||||
quote_spanned! {span=>
|
||||
#sim_value_ident::#sim_value_unknown_variant_name(value) => {
|
||||
v.unknown_variant_to_opaque(value)
|
||||
}
|
||||
}
|
||||
},
|
||||
)),
|
||||
);
|
||||
let variants_len = variants.len();
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::ty::Type for #target #type_generics
|
||||
#where_clause
|
||||
{
|
||||
type BaseType = ::fayalite::enum_::Enum;
|
||||
type MaskType = ::fayalite::int::Bool;
|
||||
type SimValue = #sim_value_ident #type_generics;
|
||||
type MatchVariant = #match_variant_ident #type_generics;
|
||||
type MatchActiveScope = ::fayalite::module::Scope;
|
||||
type MatchVariantAndInactiveScope = ::fayalite::enum_::EnumMatchVariantAndInactiveScope<Self>;
|
||||
|
|
@ -858,11 +540,11 @@ impl ToTokens for ParsedEnum {
|
|||
) -> <Self as ::fayalite::ty::Type>::MatchVariantsIter {
|
||||
::fayalite::module::enum_match_variants_helper(this, source_location)
|
||||
}
|
||||
fn mask_type(&#self_token) -> <Self as ::fayalite::ty::Type>::MaskType {
|
||||
fn mask_type(&self) -> <Self as ::fayalite::ty::Type>::MaskType {
|
||||
::fayalite::int::Bool
|
||||
}
|
||||
fn canonical(&#self_token) -> ::fayalite::ty::CanonicalType {
|
||||
::fayalite::ty::CanonicalType::Enum(::fayalite::enum_::Enum::new(::fayalite::enum_::EnumType::variants(#self_token)))
|
||||
fn canonical(&self) -> ::fayalite::ty::CanonicalType {
|
||||
::fayalite::ty::CanonicalType::Enum(::fayalite::enum_::Enum::new(::fayalite::enum_::EnumType::variants(self)))
|
||||
}
|
||||
#[track_caller]
|
||||
#[allow(non_snake_case)]
|
||||
|
|
@ -870,8 +552,8 @@ impl ToTokens for ParsedEnum {
|
|||
let ::fayalite::ty::CanonicalType::Enum(enum_) = canonical_type else {
|
||||
::fayalite::__std::panic!("expected enum");
|
||||
};
|
||||
let #variants_token = ::fayalite::enum_::EnumType::variants(&enum_);
|
||||
::fayalite::__std::assert_eq!(#variants_token.len(), #variants_len, "enum has wrong number of variants");
|
||||
let variants = ::fayalite::enum_::EnumType::variants(&enum_);
|
||||
::fayalite::__std::assert_eq!(variants.len(), #variants_len, "enum has wrong number of variants");
|
||||
Self {
|
||||
#(#from_canonical_body_fields)*
|
||||
}
|
||||
|
|
@ -879,86 +561,29 @@ impl ToTokens for ParsedEnum {
|
|||
fn source_location() -> ::fayalite::source_location::SourceLocation {
|
||||
::fayalite::source_location::SourceLocation::caller()
|
||||
}
|
||||
fn sim_value_from_opaque(
|
||||
&self,
|
||||
opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>,
|
||||
) -> <Self as ::fayalite::ty::Type>::SimValue {
|
||||
let v = ::fayalite::enum_::EnumSimValueFromOpaque::new(*self, opaque);
|
||||
match v.discriminant() {
|
||||
#(#sim_value_from_opaque_match_arms)*
|
||||
}
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut <Self as ::fayalite::ty::Type>::SimValue,
|
||||
opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
let v = ::fayalite::enum_::EnumSimValueFromOpaque::new(*self, opaque);
|
||||
match v.discriminant() {
|
||||
#(#sim_value_clone_from_opaque_match_arms)*
|
||||
}
|
||||
}
|
||||
fn sim_value_to_opaque<'__w>(
|
||||
&self,
|
||||
value: &<Self as ::fayalite::ty::Type>::SimValue,
|
||||
writer: ::fayalite::ty::OpaqueSimValueWriter<'__w>,
|
||||
) -> ::fayalite::ty::OpaqueSimValueWritten<'__w> {
|
||||
let v = ::fayalite::enum_::EnumSimValueToOpaque::new(*self, writer);
|
||||
match value {
|
||||
#(#sim_value_to_opaque_match_arms)*
|
||||
}
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::enum_::EnumType for #target #type_generics
|
||||
#where_clause
|
||||
{
|
||||
type SimBuilder = #sim_builder_ident #type_generics;
|
||||
fn match_activate_scope(
|
||||
v: <Self as ::fayalite::ty::Type>::MatchVariantAndInactiveScope,
|
||||
) -> (<Self as ::fayalite::ty::Type>::MatchVariant, <Self as ::fayalite::ty::Type>::MatchActiveScope) {
|
||||
let (#variant_access_token, scope) = v.activate();
|
||||
let (variant_access, scope) = v.activate();
|
||||
(
|
||||
match #variant_access_token.variant_index() {
|
||||
match variant_access.variant_index() {
|
||||
#(#match_active_scope_match_arms)*
|
||||
#variants_len.. => ::fayalite::__std::panic!("invalid variant index"),
|
||||
},
|
||||
scope,
|
||||
)
|
||||
}
|
||||
fn variants(&#self_token) -> ::fayalite::intern::Interned<[::fayalite::enum_::EnumVariant]> {
|
||||
fn variants(&self) -> ::fayalite::intern::Interned<[::fayalite::enum_::EnumVariant]> {
|
||||
::fayalite::intern::Intern::intern(&[
|
||||
#(#variants_body_variants)*
|
||||
][..])
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::sim::value::ToSimValueWithType<#target #type_generics>
|
||||
for #sim_value_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn to_sim_value_with_type(
|
||||
&self,
|
||||
ty: #target #type_generics,
|
||||
) -> ::fayalite::sim::value::SimValue<#target #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self))
|
||||
}
|
||||
fn into_sim_value_with_type(
|
||||
self,
|
||||
ty: #target #type_generics,
|
||||
) -> ::fayalite::sim::value::SimValue<#target #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(ty, self)
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::__std::convert::From<#target #type_generics>
|
||||
for #sim_builder_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn from(#sim_builder_ty_field_ident: #target #type_generics) -> Self {
|
||||
Self { #sim_builder_ty_field_ident }
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
if let (None, MaybeParsed::Parsed(generics)) = (no_static, &self.generics) {
|
||||
|
|
@ -967,44 +592,35 @@ impl ToTokens for ParsedEnum {
|
|||
static_generics.split_for_impl();
|
||||
let static_type_body_variants =
|
||||
Vec::from_iter(variants.iter().map(|ParsedVariant { ident, field, .. }| {
|
||||
if field.is_some() {
|
||||
quote_spanned! {span=>
|
||||
if let Some(_) = field {
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: ::fayalite::ty::StaticType::TYPE,
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: (),
|
||||
}
|
||||
}
|
||||
}));
|
||||
let type_properties = format_ident!("__type_properties", span = span);
|
||||
let type_properties = format_ident!("__type_properties", span = ident.span());
|
||||
let type_properties_variants =
|
||||
Vec::from_iter(variants.iter().map(|ParsedVariant { field, .. }| {
|
||||
Vec::from_iter(variants.iter().map(|ParsedVariant { ident, field, .. }| {
|
||||
let variant = if let Some(ParsedVariantField { ty, .. }) = field {
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
::fayalite::__std::option::Option::Some(
|
||||
<#ty as ::fayalite::ty::StaticType>::TYPE_PROPERTIES,
|
||||
)
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
::fayalite::__std::option::Option::None
|
||||
}
|
||||
};
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
let #type_properties = #type_properties.variant(#variant);
|
||||
}
|
||||
}));
|
||||
quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::__std::default::Default
|
||||
for #target #static_type_generics
|
||||
#static_where_clause
|
||||
{
|
||||
fn default() -> Self {
|
||||
<Self as ::fayalite::ty::StaticType>::TYPE
|
||||
}
|
||||
}
|
||||
quote_spanned! {ident.span()=>
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::ty::StaticType
|
||||
for #target #static_type_generics
|
||||
|
|
@ -1023,34 +639,6 @@ impl ToTokens for ParsedEnum {
|
|||
const MASK_TYPE_PROPERTIES: ::fayalite::ty::TypeProperties =
|
||||
<::fayalite::int::Bool as ::fayalite::ty::StaticType>::TYPE_PROPERTIES;
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::sim::value::ToSimValue
|
||||
for #sim_value_ident #static_type_generics
|
||||
#static_where_clause
|
||||
{
|
||||
type Type = #target #static_type_generics;
|
||||
|
||||
fn to_sim_value(
|
||||
&self,
|
||||
) -> ::fayalite::sim::value::SimValue<
|
||||
<Self as ::fayalite::sim::value::ToSimValue>::Type,
|
||||
> {
|
||||
::fayalite::sim::value::SimValue::from_value(
|
||||
::fayalite::ty::StaticType::TYPE,
|
||||
::fayalite::__std::clone::Clone::clone(self),
|
||||
)
|
||||
}
|
||||
fn into_sim_value(
|
||||
self,
|
||||
) -> ::fayalite::sim::value::SimValue<
|
||||
<Self as ::fayalite::sim::value::ToSimValue>::Type,
|
||||
> {
|
||||
::fayalite::sim::value::SimValue::from_value(
|
||||
::fayalite::ty::StaticType::TYPE,
|
||||
self,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,493 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
Errors, HdlAttr,
|
||||
hdl_type_common::{
|
||||
ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics, ParsedType,
|
||||
PhantomConstGetBound, TypesParser, WrappedInConst, common_derives, get_target, known_items,
|
||||
},
|
||||
kw,
|
||||
};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, format_ident, quote_spanned};
|
||||
use syn::{
|
||||
Attribute, Expr, Fields, GenericParam, Generics, Ident, ItemStruct, ItemType, Token, Type,
|
||||
TypeGroup, TypeParam, TypeParen, Visibility, parse_quote_spanned, punctuated::Pair,
|
||||
token::Paren,
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct PhantomConstAccessorTypeParam {
|
||||
attrs: Vec<Attribute>,
|
||||
ident: Ident,
|
||||
colon_token: Token![:],
|
||||
phantom_const_get_bound: PhantomConstGetBound,
|
||||
plus_token: Option<Token![+]>,
|
||||
}
|
||||
|
||||
impl From<PhantomConstAccessorTypeParam> for TypeParam {
|
||||
fn from(value: PhantomConstAccessorTypeParam) -> Self {
|
||||
let PhantomConstAccessorTypeParam {
|
||||
attrs,
|
||||
ident,
|
||||
colon_token,
|
||||
phantom_const_get_bound,
|
||||
plus_token,
|
||||
} = value;
|
||||
TypeParam {
|
||||
attrs,
|
||||
ident,
|
||||
colon_token: Some(colon_token),
|
||||
bounds: FromIterator::from_iter([Pair::new(
|
||||
phantom_const_get_bound.into(),
|
||||
plus_token,
|
||||
)]),
|
||||
eq_token: None,
|
||||
default: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PhantomConstAccessorTypeParam> for GenericParam {
|
||||
fn from(value: PhantomConstAccessorTypeParam) -> Self {
|
||||
TypeParam::from(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl PhantomConstAccessorTypeParam {
|
||||
fn parse_opt(generic_param: GenericParam) -> Option<Self> {
|
||||
let GenericParam::Type(TypeParam {
|
||||
attrs,
|
||||
ident,
|
||||
colon_token,
|
||||
bounds,
|
||||
eq_token: None,
|
||||
default: None,
|
||||
}) = generic_param
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
let colon_token = colon_token.unwrap_or(Token));
|
||||
let mut bounds = bounds.into_pairs();
|
||||
let (bound, plus_token) = bounds.next()?.into_tuple();
|
||||
let phantom_const_get_bound = PhantomConstGetBound::parse_type_param_bound(bound)
|
||||
.ok()?
|
||||
.ok()?;
|
||||
let None = bounds.next() else {
|
||||
return None;
|
||||
};
|
||||
Some(Self {
|
||||
attrs,
|
||||
ident,
|
||||
colon_token,
|
||||
phantom_const_get_bound,
|
||||
plus_token,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct PhantomConstAccessorGenerics {
|
||||
lt_token: Token![<],
|
||||
type_param: PhantomConstAccessorTypeParam,
|
||||
comma_token: Option<Token![,]>,
|
||||
gt_token: Token![>],
|
||||
}
|
||||
|
||||
impl From<PhantomConstAccessorGenerics> for Generics {
|
||||
fn from(value: PhantomConstAccessorGenerics) -> Self {
|
||||
let PhantomConstAccessorGenerics {
|
||||
lt_token,
|
||||
type_param,
|
||||
comma_token,
|
||||
gt_token,
|
||||
} = value;
|
||||
Generics {
|
||||
lt_token: Some(lt_token),
|
||||
params: FromIterator::from_iter([Pair::new(type_param.into(), comma_token)]),
|
||||
gt_token: Some(gt_token),
|
||||
where_clause: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a PhantomConstAccessorGenerics> for Generics {
|
||||
fn from(value: &'a PhantomConstAccessorGenerics) -> Self {
|
||||
value.clone().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl PhantomConstAccessorGenerics {
|
||||
fn parse_opt(generics: Generics) -> Option<Self> {
|
||||
let Generics {
|
||||
lt_token,
|
||||
params,
|
||||
gt_token,
|
||||
where_clause: None,
|
||||
} = generics
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
let mut params = params.into_pairs();
|
||||
let (generic_param, comma_token) = params.next()?.into_tuple();
|
||||
let type_param = PhantomConstAccessorTypeParam::parse_opt(generic_param)?;
|
||||
let span = type_param.ident.span();
|
||||
let lt_token = lt_token.unwrap_or(Token);
|
||||
let gt_token = gt_token.unwrap_or(Token);
|
||||
let None = params.next() else {
|
||||
return None;
|
||||
};
|
||||
Some(Self {
|
||||
lt_token,
|
||||
type_param,
|
||||
comma_token,
|
||||
gt_token,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) enum ParsedTypeAlias {
|
||||
TypeAlias {
|
||||
attrs: Vec<Attribute>,
|
||||
options: HdlAttr<ItemOptions, kw::hdl>,
|
||||
vis: Visibility,
|
||||
type_token: Token![type],
|
||||
ident: Ident,
|
||||
generics: MaybeParsed<ParsedGenerics, Generics>,
|
||||
eq_token: Token![=],
|
||||
ty: MaybeParsed<ParsedType, Type>,
|
||||
semi_token: Token![;],
|
||||
},
|
||||
PhantomConstAccessor {
|
||||
attrs: Vec<Attribute>,
|
||||
options: HdlAttr<ItemOptions, kw::hdl>,
|
||||
get: (kw::get, Paren, Expr),
|
||||
vis: Visibility,
|
||||
type_token: Token![type],
|
||||
ident: Ident,
|
||||
generics: PhantomConstAccessorGenerics,
|
||||
eq_token: Token![=],
|
||||
ty: Type,
|
||||
ty_is_dyn_size: Option<known_items::DynSize>,
|
||||
semi_token: Token![;],
|
||||
},
|
||||
}
|
||||
|
||||
impl ParsedTypeAlias {
|
||||
fn ty_is_dyn_size(ty: &Type) -> Option<known_items::DynSize> {
|
||||
match ty {
|
||||
Type::Group(TypeGroup {
|
||||
group_token: _,
|
||||
elem,
|
||||
}) => Self::ty_is_dyn_size(elem),
|
||||
Type::Paren(TypeParen {
|
||||
paren_token: _,
|
||||
elem,
|
||||
}) => Self::ty_is_dyn_size(elem),
|
||||
Type::Path(syn::TypePath { qself: None, path }) => {
|
||||
known_items::DynSize::parse_path(path.clone()).ok()
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn parse_phantom_const_accessor(
|
||||
item: ItemType,
|
||||
mut errors: Errors,
|
||||
options: HdlAttr<ItemOptions, kw::hdl>,
|
||||
get: (kw::get, Paren, Expr),
|
||||
) -> syn::Result<Self> {
|
||||
let ItemType {
|
||||
attrs,
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty,
|
||||
semi_token,
|
||||
} = item;
|
||||
let ItemOptions {
|
||||
outline_generated: _,
|
||||
ref target,
|
||||
custom_bounds,
|
||||
no_static,
|
||||
no_runtime_generics,
|
||||
cmp_eq,
|
||||
get: _,
|
||||
} = options.body;
|
||||
if let Some((no_static,)) = no_static {
|
||||
errors.error(no_static, "no_static is not valid on type aliases");
|
||||
}
|
||||
if let Some((target, ..)) = target {
|
||||
errors.error(
|
||||
target,
|
||||
"target is not implemented on PhantomConstGet type aliases",
|
||||
);
|
||||
}
|
||||
if let Some((no_runtime_generics,)) = no_runtime_generics {
|
||||
errors.error(
|
||||
no_runtime_generics,
|
||||
"no_runtime_generics is not implemented on PhantomConstGet type aliases",
|
||||
);
|
||||
}
|
||||
if let Some((cmp_eq,)) = cmp_eq {
|
||||
errors.error(cmp_eq, "cmp_eq is not valid on type aliases");
|
||||
}
|
||||
if let Some((custom_bounds,)) = custom_bounds {
|
||||
errors.error(
|
||||
custom_bounds,
|
||||
"custom_bounds is not implemented on PhantomConstGet type aliases",
|
||||
);
|
||||
}
|
||||
let Some(generics) = PhantomConstAccessorGenerics::parse_opt(generics) else {
|
||||
errors.error(ident, "#[hdl(get(...))] type alias must be of the form:\ntype MyTypeGetter<P: PhantomConstGet<MyType>> = RetType;");
|
||||
errors.finish()?;
|
||||
unreachable!();
|
||||
};
|
||||
errors.finish()?;
|
||||
let ty_is_dyn_size = Self::ty_is_dyn_size(&ty);
|
||||
Ok(Self::PhantomConstAccessor {
|
||||
attrs,
|
||||
options,
|
||||
get,
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty: *ty,
|
||||
ty_is_dyn_size,
|
||||
semi_token,
|
||||
})
|
||||
}
|
||||
fn parse(item: ItemType) -> syn::Result<Self> {
|
||||
let ItemType {
|
||||
mut attrs,
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
mut generics,
|
||||
eq_token,
|
||||
ty,
|
||||
semi_token,
|
||||
} = item;
|
||||
let mut errors = Errors::new();
|
||||
let mut options = errors
|
||||
.unwrap_or_default(HdlAttr::<ItemOptions, kw::hdl>::parse_and_take_attr(
|
||||
&mut attrs,
|
||||
))
|
||||
.unwrap_or_default();
|
||||
errors.ok(options.body.validate());
|
||||
let ItemOptions {
|
||||
outline_generated: _,
|
||||
target: _,
|
||||
custom_bounds,
|
||||
no_static,
|
||||
no_runtime_generics: _,
|
||||
cmp_eq,
|
||||
ref mut get,
|
||||
} = options.body;
|
||||
if let Some(get) = get.take() {
|
||||
return Self::parse_phantom_const_accessor(
|
||||
ItemType {
|
||||
attrs,
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty,
|
||||
semi_token,
|
||||
},
|
||||
errors,
|
||||
options,
|
||||
get,
|
||||
);
|
||||
}
|
||||
if let Some((no_static,)) = no_static {
|
||||
errors.error(no_static, "no_static is not valid on type aliases");
|
||||
}
|
||||
if let Some((cmp_eq,)) = cmp_eq {
|
||||
errors.error(cmp_eq, "cmp_eq is not valid on type aliases");
|
||||
}
|
||||
let generics = if custom_bounds.is_some() {
|
||||
MaybeParsed::Unrecognized(generics)
|
||||
} else if let Some(generics) = errors.ok(ParsedGenerics::parse(&mut generics)) {
|
||||
MaybeParsed::Parsed(generics)
|
||||
} else {
|
||||
MaybeParsed::Unrecognized(generics)
|
||||
};
|
||||
let ty = TypesParser::maybe_run(generics.as_ref(), *ty, &mut errors);
|
||||
errors.finish()?;
|
||||
Ok(Self::TypeAlias {
|
||||
attrs,
|
||||
options,
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty,
|
||||
semi_token,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ParsedTypeAlias {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
Self::TypeAlias {
|
||||
attrs,
|
||||
options,
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty,
|
||||
semi_token,
|
||||
} => {
|
||||
let ItemOptions {
|
||||
outline_generated: _,
|
||||
target,
|
||||
custom_bounds: _,
|
||||
no_static: _,
|
||||
no_runtime_generics,
|
||||
cmp_eq: _,
|
||||
get: _,
|
||||
} = &options.body;
|
||||
let target = get_target(target, ident);
|
||||
let mut type_attrs = attrs.clone();
|
||||
type_attrs.push(parse_quote_spanned! {ident.span()=>
|
||||
#[allow(type_alias_bounds)]
|
||||
});
|
||||
ItemType {
|
||||
attrs: type_attrs,
|
||||
vis: vis.clone(),
|
||||
type_token: *type_token,
|
||||
ident: ident.clone(),
|
||||
generics: generics.into(),
|
||||
eq_token: *eq_token,
|
||||
ty: Box::new(ty.clone().into()),
|
||||
semi_token: *semi_token,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
if let (MaybeParsed::Parsed(generics), MaybeParsed::Parsed(ty), None) =
|
||||
(generics, ty, no_runtime_generics)
|
||||
{
|
||||
generics.make_runtime_generics(tokens, vis, ident, &target, |context| {
|
||||
ty.make_hdl_type_expr(context)
|
||||
})
|
||||
}
|
||||
}
|
||||
Self::PhantomConstAccessor {
|
||||
attrs,
|
||||
options,
|
||||
get: (_get_kw, _get_paren, get_expr),
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty,
|
||||
ty_is_dyn_size,
|
||||
semi_token,
|
||||
} => {
|
||||
let ItemOptions {
|
||||
outline_generated: _,
|
||||
target: _,
|
||||
custom_bounds: _,
|
||||
no_static: _,
|
||||
no_runtime_generics: _,
|
||||
cmp_eq: _,
|
||||
get: _,
|
||||
} = &options.body;
|
||||
let span = ident.span();
|
||||
let mut type_attrs = attrs.clone();
|
||||
type_attrs.push(parse_quote_spanned! {span=>
|
||||
#[allow(type_alias_bounds)]
|
||||
});
|
||||
let type_param_ident = &generics.type_param.ident;
|
||||
let syn_generics = Generics::from(generics);
|
||||
ItemType {
|
||||
attrs: type_attrs,
|
||||
vis: vis.clone(),
|
||||
type_token: *type_token,
|
||||
ident: ident.clone(),
|
||||
generics: syn_generics.clone(),
|
||||
eq_token: *eq_token,
|
||||
ty: parse_quote_spanned! {span=>
|
||||
<#ty as ::fayalite::phantom_const::ReturnSelfUnchanged<#type_param_ident>>::Type
|
||||
},
|
||||
semi_token: *semi_token,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let generics_accumulation_ident =
|
||||
format_ident!("__{}__GenericsAccumulation", ident);
|
||||
ItemStruct {
|
||||
attrs: vec![
|
||||
common_derives(span),
|
||||
parse_quote_spanned! {span=>
|
||||
#[allow(non_camel_case_types)]
|
||||
},
|
||||
],
|
||||
vis: vis.clone(),
|
||||
struct_token: Token,
|
||||
ident: generics_accumulation_ident.clone(),
|
||||
generics: Generics::default(),
|
||||
fields: Fields::Unnamed(parse_quote_spanned! {span=>
|
||||
(())
|
||||
}),
|
||||
semi_token: Some(Token),
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
quote_spanned! {span=>
|
||||
#[allow(non_upper_case_globals, dead_code)]
|
||||
#vis const #ident: #generics_accumulation_ident = #generics_accumulation_ident(());
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let mut wrapped_in_const = WrappedInConst::new(tokens, span);
|
||||
let tokens = wrapped_in_const.inner();
|
||||
let (impl_generics, _type_generics, where_clause) = syn_generics.split_for_impl();
|
||||
let phantom_const_get_ty = &generics.type_param.phantom_const_get_bound.ty;
|
||||
let index_output = if let Some(ty_is_dyn_size) = ty_is_dyn_size {
|
||||
known_items::usize(ty_is_dyn_size.span).to_token_stream()
|
||||
} else {
|
||||
ty.to_token_stream()
|
||||
};
|
||||
quote_spanned! {span=>
|
||||
#[allow(non_upper_case_globals)]
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::__std::ops::Index<#type_param_ident>
|
||||
for #generics_accumulation_ident
|
||||
#where_clause
|
||||
{
|
||||
type Output = #index_output;
|
||||
|
||||
fn index(&self, __param: #type_param_ident) -> &Self::Output {
|
||||
::fayalite::phantom_const::type_alias_phantom_const_get_helper::<#phantom_const_get_ty, #index_output>(
|
||||
__param,
|
||||
#get_expr,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn hdl_type_alias_impl(item: ItemType) -> syn::Result<TokenStream> {
|
||||
let item = ParsedTypeAlias::parse(item)?;
|
||||
let outline_generated = match &item {
|
||||
ParsedTypeAlias::TypeAlias { options, .. }
|
||||
| ParsedTypeAlias::PhantomConstAccessor { options, .. } => options.body.outline_generated,
|
||||
};
|
||||
let mut contents = item.to_token_stream();
|
||||
if outline_generated.is_some() {
|
||||
contents = crate::outline_generated(contents, "hdl-type-alias-");
|
||||
}
|
||||
Ok(contents)
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -2,43 +2,23 @@
|
|||
// See Notices.txt for copyright information
|
||||
#![cfg_attr(test, recursion_limit = "512")]
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{ToTokens, quote};
|
||||
use std::{
|
||||
collections::{HashMap, hash_map::Entry},
|
||||
io::{ErrorKind, Write},
|
||||
};
|
||||
use quote::{quote, ToTokens};
|
||||
use std::io::{ErrorKind, Write};
|
||||
use syn::{
|
||||
AttrStyle, Attribute, Error, Ident, Item, ItemFn, LitBool, LitStr, Meta, Token, bracketed,
|
||||
ext::IdentExt,
|
||||
parenthesized,
|
||||
bracketed, parenthesized,
|
||||
parse::{Parse, ParseStream, Parser},
|
||||
parse_quote,
|
||||
punctuated::{Pair, Punctuated},
|
||||
spanned::Spanned,
|
||||
token::{Bracket, Paren},
|
||||
punctuated::Pair,
|
||||
AttrStyle, Attribute, Error, Item, Token,
|
||||
};
|
||||
|
||||
mod fold;
|
||||
mod hdl_bundle;
|
||||
mod hdl_enum;
|
||||
mod hdl_type_alias;
|
||||
mod hdl_type_common;
|
||||
mod module;
|
||||
mod process_cfg;
|
||||
|
||||
pub(crate) trait CustomToken:
|
||||
Copy
|
||||
+ Spanned
|
||||
+ ToTokens
|
||||
+ std::fmt::Debug
|
||||
+ Eq
|
||||
+ std::hash::Hash
|
||||
+ Default
|
||||
+ quote::IdentFragment
|
||||
+ Parse
|
||||
{
|
||||
const IDENT_STR: &'static str;
|
||||
}
|
||||
//mod value_derive_common;
|
||||
//mod value_derive_struct;
|
||||
|
||||
mod kw {
|
||||
pub(crate) use syn::token::Extern as extern_;
|
||||
|
|
@ -58,28 +38,14 @@ mod kw {
|
|||
}
|
||||
|
||||
crate::fold::no_op_fold!($kw);
|
||||
|
||||
impl crate::CustomToken for $kw {
|
||||
const IDENT_STR: &'static str = stringify!($kw);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
custom_keyword!(__evaluated_cfgs);
|
||||
custom_keyword!(add_platform_io);
|
||||
custom_keyword!(all);
|
||||
custom_keyword!(any);
|
||||
custom_keyword!(cfg);
|
||||
custom_keyword!(cfg_attr);
|
||||
custom_keyword!(clock_domain);
|
||||
custom_keyword!(cmp_eq);
|
||||
custom_keyword!(connect_inexact);
|
||||
custom_keyword!(custom_bounds);
|
||||
custom_keyword!(flip);
|
||||
custom_keyword!(get);
|
||||
custom_keyword!(hdl);
|
||||
custom_keyword!(hdl_module);
|
||||
custom_keyword!(incomplete_wire);
|
||||
custom_keyword!(input);
|
||||
custom_keyword!(instance);
|
||||
custom_keyword!(m);
|
||||
|
|
@ -89,12 +55,11 @@ mod kw {
|
|||
custom_keyword!(no_reset);
|
||||
custom_keyword!(no_runtime_generics);
|
||||
custom_keyword!(no_static);
|
||||
custom_keyword!(not);
|
||||
custom_keyword!(outline_generated);
|
||||
custom_keyword!(output);
|
||||
custom_keyword!(reg_builder);
|
||||
custom_keyword!(reset);
|
||||
custom_keyword!(sim);
|
||||
custom_keyword!(reset_default);
|
||||
custom_keyword!(skip);
|
||||
custom_keyword!(target);
|
||||
custom_keyword!(wire);
|
||||
|
|
@ -103,34 +68,34 @@ mod kw {
|
|||
type Pound = Token![#]; // work around https://github.com/rust-lang/rust/issues/50676
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct HdlAttr<T, KW> {
|
||||
pub(crate) struct HdlAttr<T> {
|
||||
pub(crate) pound_token: Pound,
|
||||
pub(crate) style: AttrStyle,
|
||||
pub(crate) bracket_token: syn::token::Bracket,
|
||||
pub(crate) kw: KW,
|
||||
pub(crate) hdl: kw::hdl,
|
||||
pub(crate) paren_token: Option<syn::token::Paren>,
|
||||
pub(crate) body: T,
|
||||
}
|
||||
|
||||
crate::fold::impl_fold! {
|
||||
struct HdlAttr<T, KW,> {
|
||||
struct HdlAttr<T,> {
|
||||
pound_token: Pound,
|
||||
style: AttrStyle,
|
||||
bracket_token: syn::token::Bracket,
|
||||
kw: KW,
|
||||
hdl: kw::hdl,
|
||||
paren_token: Option<syn::token::Paren>,
|
||||
body: T,
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl<T, KW> HdlAttr<T, KW> {
|
||||
pub(crate) fn split_body(self) -> (HdlAttr<(), KW>, T) {
|
||||
impl<T> HdlAttr<T> {
|
||||
pub(crate) fn split_body(self) -> (HdlAttr<()>, T) {
|
||||
let Self {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
hdl,
|
||||
paren_token,
|
||||
body,
|
||||
} = self;
|
||||
|
|
@ -139,19 +104,19 @@ impl<T, KW> HdlAttr<T, KW> {
|
|||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
hdl,
|
||||
paren_token,
|
||||
body: (),
|
||||
},
|
||||
body,
|
||||
)
|
||||
}
|
||||
pub(crate) fn replace_body<T2>(self, body: T2) -> HdlAttr<T2, KW> {
|
||||
pub(crate) fn replace_body<T2>(self, body: T2) -> HdlAttr<T2> {
|
||||
let Self {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
hdl,
|
||||
paren_token,
|
||||
body: _,
|
||||
} = self;
|
||||
|
|
@ -159,20 +124,17 @@ impl<T, KW> HdlAttr<T, KW> {
|
|||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
hdl,
|
||||
paren_token,
|
||||
body,
|
||||
}
|
||||
}
|
||||
pub(crate) fn as_ref(&self) -> HdlAttr<&T, KW>
|
||||
where
|
||||
KW: Clone,
|
||||
{
|
||||
pub(crate) fn as_ref(&self) -> HdlAttr<&T> {
|
||||
let Self {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
ref kw,
|
||||
hdl,
|
||||
paren_token,
|
||||
ref body,
|
||||
} = *self;
|
||||
|
|
@ -180,20 +142,17 @@ impl<T, KW> HdlAttr<T, KW> {
|
|||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw: kw.clone(),
|
||||
hdl,
|
||||
paren_token,
|
||||
body,
|
||||
}
|
||||
}
|
||||
pub(crate) fn try_map<R, E, F: FnOnce(T) -> Result<R, E>>(
|
||||
self,
|
||||
f: F,
|
||||
) -> Result<HdlAttr<R, KW>, E> {
|
||||
pub(crate) fn try_map<R, E, F: FnOnce(T) -> Result<R, E>>(self, f: F) -> Result<HdlAttr<R>, E> {
|
||||
let Self {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
hdl,
|
||||
paren_token,
|
||||
body,
|
||||
} = self;
|
||||
|
|
@ -201,17 +160,17 @@ impl<T, KW> HdlAttr<T, KW> {
|
|||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
hdl,
|
||||
paren_token,
|
||||
body: f(body)?,
|
||||
})
|
||||
}
|
||||
pub(crate) fn map<R, F: FnOnce(T) -> R>(self, f: F) -> HdlAttr<R, KW> {
|
||||
pub(crate) fn map<R, F: FnOnce(T) -> R>(self, f: F) -> HdlAttr<R> {
|
||||
let Self {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
hdl,
|
||||
paren_token,
|
||||
body,
|
||||
} = self;
|
||||
|
|
@ -219,7 +178,7 @@ impl<T, KW> HdlAttr<T, KW> {
|
|||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
hdl,
|
||||
paren_token,
|
||||
body: f(body),
|
||||
}
|
||||
|
|
@ -227,32 +186,31 @@ impl<T, KW> HdlAttr<T, KW> {
|
|||
fn to_attr(&self) -> Attribute
|
||||
where
|
||||
T: ToTokens,
|
||||
KW: ToTokens,
|
||||
{
|
||||
parse_quote! { #self }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Default, KW: Default> Default for HdlAttr<T, KW> {
|
||||
impl<T: Default> Default for HdlAttr<T> {
|
||||
fn default() -> Self {
|
||||
T::default().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, KW: Default> From<T> for HdlAttr<T, KW> {
|
||||
impl<T> From<T> for HdlAttr<T> {
|
||||
fn from(body: T) -> Self {
|
||||
HdlAttr {
|
||||
pound_token: Default::default(),
|
||||
style: AttrStyle::Outer,
|
||||
bracket_token: Default::default(),
|
||||
kw: Default::default(),
|
||||
hdl: Default::default(),
|
||||
paren_token: Default::default(),
|
||||
body,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ToTokens, KW: ToTokens + Spanned> ToTokens for HdlAttr<T, KW> {
|
||||
impl<T: ToTokens> ToTokens for HdlAttr<T> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.pound_token.to_tokens(tokens);
|
||||
match self.style {
|
||||
|
|
@ -260,7 +218,7 @@ impl<T: ToTokens, KW: ToTokens + Spanned> ToTokens for HdlAttr<T, KW> {
|
|||
AttrStyle::Outer => {}
|
||||
};
|
||||
self.bracket_token.surround(tokens, |tokens| {
|
||||
self.kw.to_tokens(tokens);
|
||||
self.hdl.to_tokens(tokens);
|
||||
match self.paren_token {
|
||||
Some(paren_token) => {
|
||||
paren_token.surround(tokens, |tokens| self.body.to_tokens(tokens))
|
||||
|
|
@ -268,7 +226,7 @@ impl<T: ToTokens, KW: ToTokens + Spanned> ToTokens for HdlAttr<T, KW> {
|
|||
None => {
|
||||
let body = self.body.to_token_stream();
|
||||
if !body.is_empty() {
|
||||
syn::token::Paren(self.kw.span())
|
||||
syn::token::Paren(self.hdl.span)
|
||||
.surround(tokens, |tokens| tokens.extend([body]));
|
||||
}
|
||||
}
|
||||
|
|
@ -277,24 +235,18 @@ impl<T: ToTokens, KW: ToTokens + Spanned> ToTokens for HdlAttr<T, KW> {
|
|||
}
|
||||
}
|
||||
|
||||
fn is_hdl_attr<KW: CustomToken>(attr: &Attribute) -> bool {
|
||||
attr.path().is_ident(KW::IDENT_STR)
|
||||
fn is_hdl_attr(attr: &Attribute) -> bool {
|
||||
attr.path().is_ident("hdl")
|
||||
}
|
||||
|
||||
impl<T: Parse, KW: Parse> HdlAttr<T, KW> {
|
||||
fn parse_and_take_attr(attrs: &mut Vec<Attribute>) -> syn::Result<Option<Self>>
|
||||
where
|
||||
KW: ToTokens,
|
||||
{
|
||||
impl<T: Parse> HdlAttr<T> {
|
||||
fn parse_and_take_attr(attrs: &mut Vec<Attribute>) -> syn::Result<Option<Self>> {
|
||||
let mut retval = None;
|
||||
let mut errors = Errors::new();
|
||||
attrs.retain(|attr| {
|
||||
if let Ok(kw) = syn::parse2::<KW>(attr.path().to_token_stream()) {
|
||||
if is_hdl_attr(attr) {
|
||||
if retval.is_some() {
|
||||
errors.push(Error::new_spanned(
|
||||
attr,
|
||||
format_args!("more than one #[{}] attribute", kw.to_token_stream()),
|
||||
));
|
||||
errors.push(Error::new_spanned(attr, "more than one #[hdl] attribute"));
|
||||
}
|
||||
errors.unwrap_or_default(Self::parse_attr(attr).map(|v| retval = Some(v)));
|
||||
false
|
||||
|
|
@ -305,19 +257,13 @@ impl<T: Parse, KW: Parse> HdlAttr<T, KW> {
|
|||
errors.finish()?;
|
||||
Ok(retval)
|
||||
}
|
||||
fn parse_and_leave_attr(attrs: &[Attribute]) -> syn::Result<Option<Self>>
|
||||
where
|
||||
KW: ToTokens,
|
||||
{
|
||||
fn parse_and_leave_attr(attrs: &[Attribute]) -> syn::Result<Option<Self>> {
|
||||
let mut retval = None;
|
||||
let mut errors = Errors::new();
|
||||
for attr in attrs {
|
||||
if let Ok(kw) = syn::parse2::<KW>(attr.path().to_token_stream()) {
|
||||
if is_hdl_attr(attr) {
|
||||
if retval.is_some() {
|
||||
errors.push(Error::new_spanned(
|
||||
attr,
|
||||
format_args!("more than one #[{}] attribute", kw.to_token_stream()),
|
||||
));
|
||||
errors.push(Error::new_spanned(attr, "more than one #[hdl] attribute"));
|
||||
}
|
||||
errors.unwrap_or_default(Self::parse_attr(attr).map(|v| retval = Some(v)));
|
||||
}
|
||||
|
|
@ -338,7 +284,7 @@ impl<T: Parse, KW: Parse> HdlAttr<T, KW> {
|
|||
) -> syn::Result<Self> {
|
||||
let bracket_content;
|
||||
let bracket_token = bracketed!(bracket_content in input);
|
||||
let kw = bracket_content.parse()?;
|
||||
let hdl = bracket_content.parse()?;
|
||||
let paren_content;
|
||||
let body;
|
||||
let paren_token;
|
||||
|
|
@ -359,7 +305,7 @@ impl<T: Parse, KW: Parse> HdlAttr<T, KW> {
|
|||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
hdl,
|
||||
paren_token,
|
||||
body,
|
||||
})
|
||||
|
|
@ -867,7 +813,6 @@ macro_rules! options {
|
|||
};
|
||||
}
|
||||
|
||||
use crate::hdl_type_alias::hdl_type_alias_impl;
|
||||
pub(crate) use options;
|
||||
|
||||
pub(crate) fn outline_generated(contents: TokenStream, prefix: &str) -> TokenStream {
|
||||
|
|
@ -907,372 +852,25 @@ pub(crate) fn outline_generated(contents: TokenStream, prefix: &str) -> TokenStr
|
|||
}
|
||||
}
|
||||
|
||||
fn hdl_module_impl(item: ItemFn) -> syn::Result<TokenStream> {
|
||||
let func = module::ModuleFn::parse_from_fn(item)?;
|
||||
let options = func.config_options();
|
||||
pub fn hdl_module(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
|
||||
let options = syn::parse2::<module::ConfigOptions>(attr)?;
|
||||
let options = HdlAttr::from(options);
|
||||
let func = syn::parse2::<module::ModuleFn>(quote! { #options #item })?;
|
||||
let mut contents = func.generate();
|
||||
if options.outline_generated.is_some() {
|
||||
if options.body.outline_generated.is_some() {
|
||||
contents = outline_generated(contents, "module-");
|
||||
}
|
||||
Ok(contents)
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub(crate) enum CfgExpr {
|
||||
Option {
|
||||
ident: Ident,
|
||||
value: Option<(Token![=], LitStr)>,
|
||||
},
|
||||
All {
|
||||
all: kw::all,
|
||||
paren: Paren,
|
||||
exprs: Punctuated<CfgExpr, Token![,]>,
|
||||
},
|
||||
Any {
|
||||
any: kw::any,
|
||||
paren: Paren,
|
||||
exprs: Punctuated<CfgExpr, Token![,]>,
|
||||
},
|
||||
Not {
|
||||
not: kw::not,
|
||||
paren: Paren,
|
||||
expr: Box<CfgExpr>,
|
||||
trailing_comma: Option<Token![,]>,
|
||||
},
|
||||
}
|
||||
|
||||
impl Parse for CfgExpr {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
match input.cursor().ident() {
|
||||
Some((_, cursor)) if cursor.eof() => {
|
||||
return Ok(CfgExpr::Option {
|
||||
ident: input.call(Ident::parse_any)?,
|
||||
value: None,
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
if input.peek(Ident::peek_any) && input.peek2(Token![=]) {
|
||||
return Ok(CfgExpr::Option {
|
||||
ident: input.call(Ident::parse_any)?,
|
||||
value: Some((input.parse()?, input.parse()?)),
|
||||
});
|
||||
}
|
||||
let contents;
|
||||
if input.peek(kw::all) {
|
||||
Ok(CfgExpr::All {
|
||||
all: input.parse()?,
|
||||
paren: parenthesized!(contents in input),
|
||||
exprs: contents.call(Punctuated::parse_terminated)?,
|
||||
})
|
||||
} else if input.peek(kw::any) {
|
||||
Ok(CfgExpr::Any {
|
||||
any: input.parse()?,
|
||||
paren: parenthesized!(contents in input),
|
||||
exprs: contents.call(Punctuated::parse_terminated)?,
|
||||
})
|
||||
} else if input.peek(kw::not) {
|
||||
Ok(CfgExpr::Not {
|
||||
not: input.parse()?,
|
||||
paren: parenthesized!(contents in input),
|
||||
expr: contents.parse()?,
|
||||
trailing_comma: contents.parse()?,
|
||||
})
|
||||
} else {
|
||||
Err(input.error("expected cfg-pattern"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for CfgExpr {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
CfgExpr::Option { ident, value } => {
|
||||
ident.to_tokens(tokens);
|
||||
if let Some((eq, value)) = value {
|
||||
eq.to_tokens(tokens);
|
||||
value.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
CfgExpr::All { all, paren, exprs } => {
|
||||
all.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| exprs.to_tokens(tokens));
|
||||
}
|
||||
CfgExpr::Any { any, paren, exprs } => {
|
||||
any.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| exprs.to_tokens(tokens));
|
||||
}
|
||||
CfgExpr::Not {
|
||||
not,
|
||||
paren,
|
||||
expr,
|
||||
trailing_comma,
|
||||
} => {
|
||||
not.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| {
|
||||
expr.to_tokens(tokens);
|
||||
trailing_comma.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub(crate) struct Cfg {
|
||||
cfg: kw::cfg,
|
||||
paren: Paren,
|
||||
expr: CfgExpr,
|
||||
trailing_comma: Option<Token![,]>,
|
||||
}
|
||||
|
||||
impl Cfg {
|
||||
fn parse_meta(meta: &Meta) -> syn::Result<Self> {
|
||||
syn::parse2(meta.to_token_stream())
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Cfg {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
cfg,
|
||||
paren,
|
||||
expr,
|
||||
trailing_comma,
|
||||
} = self;
|
||||
cfg.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| {
|
||||
expr.to_tokens(tokens);
|
||||
trailing_comma.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for Cfg {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let contents;
|
||||
Ok(Self {
|
||||
cfg: input.parse()?,
|
||||
paren: parenthesized!(contents in input),
|
||||
expr: contents.parse()?,
|
||||
trailing_comma: contents.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub(crate) struct CfgAttr {
|
||||
cfg_attr: kw::cfg_attr,
|
||||
paren: Paren,
|
||||
expr: CfgExpr,
|
||||
comma: Token![,],
|
||||
attrs: Punctuated<Meta, Token![,]>,
|
||||
}
|
||||
|
||||
impl CfgAttr {
|
||||
pub(crate) fn to_cfg(&self) -> Cfg {
|
||||
Cfg {
|
||||
cfg: kw::cfg(self.cfg_attr.span),
|
||||
paren: self.paren,
|
||||
expr: self.expr.clone(),
|
||||
trailing_comma: None,
|
||||
}
|
||||
}
|
||||
fn parse_meta(meta: &Meta) -> syn::Result<Self> {
|
||||
syn::parse2(meta.to_token_stream())
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for CfgAttr {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let contents;
|
||||
Ok(Self {
|
||||
cfg_attr: input.parse()?,
|
||||
paren: parenthesized!(contents in input),
|
||||
expr: contents.parse()?,
|
||||
comma: contents.parse()?,
|
||||
attrs: contents.call(Punctuated::parse_terminated)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct CfgAndValue {
|
||||
cfg: Cfg,
|
||||
eq_token: Token![=],
|
||||
value: LitBool,
|
||||
}
|
||||
|
||||
impl Parse for CfgAndValue {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
Ok(Self {
|
||||
cfg: input.parse()?,
|
||||
eq_token: input.parse()?,
|
||||
value: input.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Cfgs<T> {
|
||||
pub(crate) bracket: Bracket,
|
||||
pub(crate) cfgs_map: HashMap<Cfg, T>,
|
||||
pub(crate) cfgs_list: Vec<Cfg>,
|
||||
}
|
||||
|
||||
impl<T> Default for Cfgs<T> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
bracket: Default::default(),
|
||||
cfgs_map: Default::default(),
|
||||
cfgs_list: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Cfgs<T> {
|
||||
fn insert_cfg(&mut self, cfg: Cfg, value: T) {
|
||||
match self.cfgs_map.entry(cfg) {
|
||||
Entry::Occupied(_) => {}
|
||||
Entry::Vacant(entry) => {
|
||||
self.cfgs_list.push(entry.key().clone());
|
||||
entry.insert(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for Cfgs<bool> {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let contents;
|
||||
let bracket = bracketed!(contents in input);
|
||||
let mut cfgs_map = HashMap::new();
|
||||
let mut cfgs_list = Vec::new();
|
||||
for CfgAndValue {
|
||||
cfg,
|
||||
eq_token,
|
||||
value,
|
||||
} in contents.call(Punctuated::<CfgAndValue, Token![,]>::parse_terminated)?
|
||||
{
|
||||
let _ = eq_token;
|
||||
match cfgs_map.entry(cfg) {
|
||||
Entry::Occupied(_) => {}
|
||||
Entry::Vacant(entry) => {
|
||||
cfgs_list.push(entry.key().clone());
|
||||
entry.insert(value.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(Self {
|
||||
bracket,
|
||||
cfgs_map,
|
||||
cfgs_list,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for Cfgs<()> {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let contents;
|
||||
let bracket = bracketed!(contents in input);
|
||||
let mut cfgs_map = HashMap::new();
|
||||
let mut cfgs_list = Vec::new();
|
||||
for cfg in contents.call(Punctuated::<Cfg, Token![,]>::parse_terminated)? {
|
||||
match cfgs_map.entry(cfg) {
|
||||
Entry::Occupied(_) => {}
|
||||
Entry::Vacant(entry) => {
|
||||
cfgs_list.push(entry.key().clone());
|
||||
entry.insert(());
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(Self {
|
||||
bracket,
|
||||
cfgs_map,
|
||||
cfgs_list,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Cfgs<()> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
bracket,
|
||||
cfgs_map: _,
|
||||
cfgs_list,
|
||||
} = self;
|
||||
bracket.surround(tokens, |tokens| {
|
||||
for cfg in cfgs_list {
|
||||
cfg.to_tokens(tokens);
|
||||
<Token![,]>::default().to_tokens(tokens);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn hdl_main(
|
||||
kw: impl CustomToken,
|
||||
attr: TokenStream,
|
||||
item: TokenStream,
|
||||
) -> syn::Result<TokenStream> {
|
||||
fn parse_evaluated_cfgs_attr<R>(
|
||||
input: ParseStream,
|
||||
parse_inner: impl FnOnce(ParseStream) -> syn::Result<R>,
|
||||
) -> syn::Result<R> {
|
||||
let _: Token![#] = input.parse()?;
|
||||
let bracket_content;
|
||||
bracketed!(bracket_content in input);
|
||||
let _: kw::__evaluated_cfgs = bracket_content.parse()?;
|
||||
let paren_content;
|
||||
parenthesized!(paren_content in bracket_content);
|
||||
parse_inner(&paren_content)
|
||||
}
|
||||
let (evaluated_cfgs, item): (_, TokenStream) = Parser::parse2(
|
||||
|input: ParseStream| {
|
||||
let peek = input.fork();
|
||||
if parse_evaluated_cfgs_attr(&peek, |_| Ok(())).is_ok() {
|
||||
let evaluated_cfgs = parse_evaluated_cfgs_attr(input, Cfgs::<bool>::parse)?;
|
||||
Ok((Some(evaluated_cfgs), input.parse()?))
|
||||
} else {
|
||||
Ok((None, input.parse()?))
|
||||
}
|
||||
},
|
||||
item,
|
||||
)?;
|
||||
let cfgs = if let Some(cfgs) = evaluated_cfgs {
|
||||
cfgs
|
||||
} else {
|
||||
let cfgs = process_cfg::collect_cfgs(syn::parse2(item.clone())?)?;
|
||||
if cfgs.cfgs_list.is_empty() {
|
||||
Cfgs::default()
|
||||
} else {
|
||||
return Ok(quote! {
|
||||
::fayalite::__cfg_expansion_helper! {
|
||||
[]
|
||||
#cfgs
|
||||
{#[::fayalite::#kw(#attr)]} { #item }
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
let item = syn::parse2(quote! { #[#kw(#attr)] #item })?;
|
||||
let Some(item) = process_cfg::process_cfgs(item, cfgs)? else {
|
||||
return Ok(TokenStream::new());
|
||||
};
|
||||
pub fn hdl_attr(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
|
||||
let item = syn::parse2::<Item>(quote! { #[hdl(#attr)] #item })?;
|
||||
match item {
|
||||
Item::Enum(item) => hdl_enum::hdl_enum(item),
|
||||
Item::Struct(item) => hdl_bundle::hdl_bundle(item),
|
||||
Item::Fn(item) => hdl_module_impl(item),
|
||||
Item::Type(item) => hdl_type_alias_impl(item),
|
||||
_ => Err(syn::Error::new(
|
||||
Span::call_site(),
|
||||
"top-level #[hdl] can only be used on structs, enums, type aliases, or functions",
|
||||
"top-level #[hdl] can only be used on structs or enums",
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hdl_module(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
|
||||
hdl_main(kw::hdl_module::default(), attr, item)
|
||||
}
|
||||
|
||||
pub fn hdl_attr(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
|
||||
hdl_main(kw::hdl::default(), attr, item)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,20 +1,19 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
Errors, HdlAttr, PairsIterExt,
|
||||
hdl_type_common::{ParsedGenerics, SplitForImpl},
|
||||
kw,
|
||||
module::transform_body::{HdlLet, HdlLetKindIO, ModuleIOOrAddPlatformIO},
|
||||
options,
|
||||
module::transform_body::{HdlLet, HdlLetKindIO},
|
||||
options, Errors, HdlAttr, PairsIterExt,
|
||||
};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, format_ident, quote, quote_spanned};
|
||||
use quote::{format_ident, quote, quote_spanned, ToTokens};
|
||||
use std::collections::HashSet;
|
||||
use syn::{
|
||||
parse::{Parse, ParseStream},
|
||||
parse_quote,
|
||||
visit::{visit_pat, Visit},
|
||||
Attribute, Block, ConstParam, Error, FnArg, GenericParam, Generics, Ident, ItemFn, ItemStruct,
|
||||
LifetimeParam, ReturnType, Signature, TypeParam, Visibility, WhereClause, WherePredicate,
|
||||
parse_quote,
|
||||
visit::{Visit, visit_pat},
|
||||
};
|
||||
|
||||
mod transform_body;
|
||||
|
|
@ -39,7 +38,7 @@ pub(crate) fn check_name_conflicts_with_module_builder(name: &Ident) -> syn::Res
|
|||
if name == "m" {
|
||||
Err(Error::new_spanned(
|
||||
name,
|
||||
"name conflicts with implicit `m: &ModuleBuilder`",
|
||||
"name conflicts with implicit `m: &mut ModuleBuilder<_>`",
|
||||
))
|
||||
} else {
|
||||
Ok(())
|
||||
|
|
@ -60,37 +59,17 @@ impl Visit<'_> for CheckNameConflictsWithModuleBuilderVisitor<'_> {
|
|||
|
||||
pub(crate) type ModuleIO = HdlLet<HdlLetKindIO>;
|
||||
|
||||
struct ModuleFnModule {
|
||||
pub(crate) struct ModuleFn {
|
||||
attrs: Vec<Attribute>,
|
||||
config_options: HdlAttr<ConfigOptions, kw::hdl_module>,
|
||||
config_options: HdlAttr<ConfigOptions>,
|
||||
module_kind: ModuleKind,
|
||||
vis: Visibility,
|
||||
sig: Signature,
|
||||
block: Box<Block>,
|
||||
struct_generics: Option<ParsedGenerics>,
|
||||
struct_generics: ParsedGenerics,
|
||||
the_struct: TokenStream,
|
||||
}
|
||||
|
||||
enum ModuleFnImpl {
|
||||
Module(ModuleFnModule),
|
||||
Fn {
|
||||
attrs: Vec<Attribute>,
|
||||
config_options: HdlAttr<ConfigOptions, kw::hdl>,
|
||||
vis: Visibility,
|
||||
sig: Signature,
|
||||
block: Box<Block>,
|
||||
},
|
||||
}
|
||||
|
||||
options! {
|
||||
pub(crate) enum HdlOrHdlModule {
|
||||
Hdl(hdl),
|
||||
HdlModule(hdl_module),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct ModuleFn(ModuleFnImpl);
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
|
||||
pub(crate) enum ModuleKind {
|
||||
Extern,
|
||||
|
|
@ -110,25 +89,14 @@ impl Visit<'_> for ContainsSkippedIdent<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
impl ModuleFn {
|
||||
pub(crate) fn config_options(&self) -> ConfigOptions {
|
||||
let (ModuleFnImpl::Module(ModuleFnModule {
|
||||
config_options: HdlAttr { body, .. },
|
||||
..
|
||||
})
|
||||
| ModuleFnImpl::Fn {
|
||||
config_options: HdlAttr { body, .. },
|
||||
..
|
||||
}) = &self.0;
|
||||
body.clone()
|
||||
}
|
||||
pub(crate) fn parse_from_fn(item: ItemFn) -> syn::Result<Self> {
|
||||
impl Parse for ModuleFn {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let ItemFn {
|
||||
mut attrs,
|
||||
vis,
|
||||
mut sig,
|
||||
block,
|
||||
} = item;
|
||||
} = input.parse()?;
|
||||
let Signature {
|
||||
ref constness,
|
||||
ref asyncness,
|
||||
|
|
@ -143,60 +111,43 @@ impl ModuleFn {
|
|||
ref output,
|
||||
} = sig;
|
||||
let mut errors = Errors::new();
|
||||
let Some(mut config_options) =
|
||||
errors.unwrap_or_default(
|
||||
HdlAttr::<ConfigOptions, HdlOrHdlModule>::parse_and_take_attr(&mut attrs),
|
||||
)
|
||||
else {
|
||||
errors.error(sig.ident, "missing #[hdl] or #[hdl_module] attribute");
|
||||
errors.finish()?;
|
||||
unreachable!();
|
||||
};
|
||||
let config_options = errors
|
||||
.unwrap_or_default(HdlAttr::parse_and_take_attr(&mut attrs))
|
||||
.unwrap_or_default();
|
||||
let ConfigOptions {
|
||||
outline_generated: _,
|
||||
extern_,
|
||||
} = config_options.body;
|
||||
let module_kind = match (config_options.kw, extern_) {
|
||||
(HdlOrHdlModule::Hdl(_), None) => None,
|
||||
(HdlOrHdlModule::Hdl(_), Some(extern2)) => {
|
||||
config_options.body.extern_ = None;
|
||||
errors.error(
|
||||
extern2.0,
|
||||
"extern can only be used as #[hdl_module(extern)]",
|
||||
);
|
||||
None
|
||||
}
|
||||
(HdlOrHdlModule::HdlModule(_), None) => Some(ModuleKind::Normal),
|
||||
(HdlOrHdlModule::HdlModule(_), Some(_)) => Some(ModuleKind::Extern),
|
||||
let module_kind = match extern_ {
|
||||
Some(_) => ModuleKind::Extern,
|
||||
None => ModuleKind::Normal,
|
||||
};
|
||||
if let HdlOrHdlModule::HdlModule(_) = config_options.kw {
|
||||
for fn_arg in inputs {
|
||||
match fn_arg {
|
||||
FnArg::Receiver(_) => {
|
||||
errors.push(syn::Error::new_spanned(fn_arg, "self not allowed here"));
|
||||
}
|
||||
FnArg::Typed(fn_arg) => {
|
||||
visit_pat(
|
||||
&mut CheckNameConflictsWithModuleBuilderVisitor {
|
||||
errors: &mut errors,
|
||||
},
|
||||
&fn_arg.pat,
|
||||
);
|
||||
}
|
||||
for fn_arg in inputs {
|
||||
match fn_arg {
|
||||
FnArg::Receiver(_) => {
|
||||
errors.push(syn::Error::new_spanned(fn_arg, "self not allowed here"));
|
||||
}
|
||||
FnArg::Typed(fn_arg) => {
|
||||
visit_pat(
|
||||
&mut CheckNameConflictsWithModuleBuilderVisitor {
|
||||
errors: &mut errors,
|
||||
},
|
||||
&fn_arg.pat,
|
||||
);
|
||||
}
|
||||
}
|
||||
if let Some(constness) = constness {
|
||||
errors.push(syn::Error::new_spanned(constness, "const not allowed here"));
|
||||
}
|
||||
if let Some(asyncness) = asyncness {
|
||||
errors.push(syn::Error::new_spanned(asyncness, "async not allowed here"));
|
||||
}
|
||||
if let Some(unsafety) = unsafety {
|
||||
errors.push(syn::Error::new_spanned(unsafety, "unsafe not allowed here"));
|
||||
}
|
||||
if let Some(abi) = abi {
|
||||
errors.push(syn::Error::new_spanned(abi, "extern not allowed here"));
|
||||
}
|
||||
}
|
||||
if let Some(constness) = constness {
|
||||
errors.push(syn::Error::new_spanned(constness, "const not allowed here"));
|
||||
}
|
||||
if let Some(asyncness) = asyncness {
|
||||
errors.push(syn::Error::new_spanned(asyncness, "async not allowed here"));
|
||||
}
|
||||
if let Some(unsafety) = unsafety {
|
||||
errors.push(syn::Error::new_spanned(unsafety, "unsafe not allowed here"));
|
||||
}
|
||||
if let Some(abi) = abi {
|
||||
errors.push(syn::Error::new_spanned(abi, "extern not allowed here"));
|
||||
}
|
||||
let mut skipped_idents = HashSet::new();
|
||||
let struct_generic_params = generics
|
||||
|
|
@ -204,17 +155,14 @@ impl ModuleFn {
|
|||
.pairs_mut()
|
||||
.filter_map_pair_value_mut(|v| match v {
|
||||
GenericParam::Lifetime(LifetimeParam { attrs, .. }) => {
|
||||
errors.unwrap_or_default(
|
||||
HdlAttr::<crate::kw::skip, kw::hdl>::parse_and_take_attr(attrs),
|
||||
);
|
||||
errors
|
||||
.unwrap_or_default(HdlAttr::<crate::kw::skip>::parse_and_take_attr(attrs));
|
||||
None
|
||||
}
|
||||
GenericParam::Type(TypeParam { attrs, ident, .. })
|
||||
| GenericParam::Const(ConstParam { attrs, ident, .. }) => {
|
||||
if errors
|
||||
.unwrap_or_default(
|
||||
HdlAttr::<crate::kw::skip, kw::hdl>::parse_and_take_attr(attrs),
|
||||
)
|
||||
.unwrap_or_default(HdlAttr::<crate::kw::skip>::parse_and_take_attr(attrs))
|
||||
.is_some()
|
||||
{
|
||||
skipped_idents.insert(ident.clone());
|
||||
|
|
@ -228,7 +176,6 @@ impl ModuleFn {
|
|||
let struct_where_clause = generics
|
||||
.where_clause
|
||||
.as_mut()
|
||||
.filter(|_| matches!(config_options.kw, HdlOrHdlModule::HdlModule(_)))
|
||||
.map(|where_clause| WhereClause {
|
||||
where_token: where_clause.where_token,
|
||||
predicates: where_clause
|
||||
|
|
@ -251,26 +198,22 @@ impl ModuleFn {
|
|||
})
|
||||
.collect(),
|
||||
});
|
||||
let struct_generics = if let HdlOrHdlModule::HdlModule(_) = config_options.kw {
|
||||
let mut struct_generics = Generics {
|
||||
lt_token: generics.lt_token,
|
||||
params: struct_generic_params,
|
||||
gt_token: generics.gt_token,
|
||||
where_clause: struct_where_clause,
|
||||
};
|
||||
if let Some(variadic) = variadic {
|
||||
errors.push(syn::Error::new_spanned(variadic, "... not allowed here"));
|
||||
}
|
||||
if !matches!(output, ReturnType::Default) {
|
||||
errors.push(syn::Error::new_spanned(
|
||||
output,
|
||||
"return type not allowed here",
|
||||
));
|
||||
}
|
||||
errors.ok(ParsedGenerics::parse(&mut struct_generics))
|
||||
} else {
|
||||
Some(ParsedGenerics::default())
|
||||
let struct_generics = Generics {
|
||||
lt_token: generics.lt_token,
|
||||
params: struct_generic_params,
|
||||
gt_token: generics.gt_token,
|
||||
where_clause: struct_where_clause,
|
||||
};
|
||||
if let Some(variadic) = variadic {
|
||||
errors.push(syn::Error::new_spanned(variadic, "... not allowed here"));
|
||||
}
|
||||
if !matches!(output, ReturnType::Default) {
|
||||
errors.push(syn::Error::new_spanned(
|
||||
output,
|
||||
"return type not allowed here",
|
||||
));
|
||||
}
|
||||
let struct_generics = errors.ok(ParsedGenerics::parse(&mut { struct_generics }));
|
||||
let body_results = struct_generics.as_ref().and_then(|struct_generics| {
|
||||
errors.ok(transform_body::transform_body(
|
||||
module_kind,
|
||||
|
|
@ -281,62 +224,6 @@ impl ModuleFn {
|
|||
errors.finish()?;
|
||||
let struct_generics = struct_generics.unwrap();
|
||||
let (block, io) = body_results.unwrap();
|
||||
let config_options = match config_options {
|
||||
HdlAttr {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw: HdlOrHdlModule::Hdl((kw,)),
|
||||
paren_token,
|
||||
body,
|
||||
} => {
|
||||
debug_assert!(matches!(io, ModuleIOOrAddPlatformIO::ModuleIO(v) if v.is_empty()));
|
||||
return Ok(Self(ModuleFnImpl::Fn {
|
||||
attrs,
|
||||
config_options: HdlAttr {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
paren_token,
|
||||
body,
|
||||
},
|
||||
vis,
|
||||
sig,
|
||||
block,
|
||||
}));
|
||||
}
|
||||
HdlAttr {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw: HdlOrHdlModule::HdlModule((kw,)),
|
||||
paren_token,
|
||||
body,
|
||||
} => HdlAttr {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
paren_token,
|
||||
body,
|
||||
},
|
||||
};
|
||||
let io = match io {
|
||||
ModuleIOOrAddPlatformIO::ModuleIO(io) => io,
|
||||
ModuleIOOrAddPlatformIO::AddPlatformIO => {
|
||||
return Ok(Self(ModuleFnImpl::Module(ModuleFnModule {
|
||||
attrs,
|
||||
config_options,
|
||||
module_kind: module_kind.unwrap(),
|
||||
vis,
|
||||
sig,
|
||||
block,
|
||||
struct_generics: None,
|
||||
the_struct: TokenStream::new(),
|
||||
})));
|
||||
}
|
||||
};
|
||||
let (_struct_impl_generics, _struct_type_generics, struct_where_clause) =
|
||||
struct_generics.split_for_impl();
|
||||
let struct_where_clause: Option<WhereClause> = parse_quote! { #struct_where_clause };
|
||||
|
|
@ -372,52 +259,31 @@ impl ModuleFn {
|
|||
}
|
||||
};
|
||||
let the_struct = crate::hdl_bundle::hdl_bundle(the_struct)?;
|
||||
Ok(Self(ModuleFnImpl::Module(ModuleFnModule {
|
||||
attrs,
|
||||
config_options,
|
||||
module_kind: module_kind.unwrap(),
|
||||
vis,
|
||||
sig,
|
||||
block,
|
||||
struct_generics: Some(struct_generics),
|
||||
the_struct,
|
||||
})))
|
||||
}
|
||||
}
|
||||
|
||||
impl ModuleFn {
|
||||
pub(crate) fn generate(self) -> TokenStream {
|
||||
let ModuleFnModule {
|
||||
Ok(Self {
|
||||
attrs,
|
||||
config_options,
|
||||
module_kind,
|
||||
vis,
|
||||
sig,
|
||||
mut block,
|
||||
block,
|
||||
struct_generics,
|
||||
the_struct,
|
||||
} = match self.0 {
|
||||
ModuleFnImpl::Module(v) => v,
|
||||
ModuleFnImpl::Fn {
|
||||
attrs,
|
||||
config_options,
|
||||
vis,
|
||||
sig,
|
||||
block,
|
||||
} => {
|
||||
let ConfigOptions {
|
||||
outline_generated: _,
|
||||
extern_: _,
|
||||
} = config_options.body;
|
||||
return ItemFn {
|
||||
attrs,
|
||||
vis,
|
||||
sig,
|
||||
block,
|
||||
}
|
||||
.into_token_stream();
|
||||
}
|
||||
};
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ModuleFn {
|
||||
pub(crate) fn generate(self) -> TokenStream {
|
||||
let Self {
|
||||
attrs,
|
||||
config_options,
|
||||
module_kind,
|
||||
vis,
|
||||
sig,
|
||||
block,
|
||||
struct_generics,
|
||||
the_struct,
|
||||
} = self;
|
||||
let ConfigOptions {
|
||||
outline_generated: _,
|
||||
extern_: _,
|
||||
|
|
@ -448,24 +314,13 @@ impl ModuleFn {
|
|||
ModuleKind::Normal => quote! { ::fayalite::module::ModuleKind::Normal },
|
||||
};
|
||||
let fn_name = &outer_sig.ident;
|
||||
let struct_ty = match struct_generics {
|
||||
Some(struct_generics) => {
|
||||
let (_struct_impl_generics, struct_type_generics, _struct_where_clause) =
|
||||
struct_generics.split_for_impl();
|
||||
quote! {#fn_name #struct_type_generics}
|
||||
}
|
||||
None => quote! {::fayalite::bundle::Bundle},
|
||||
};
|
||||
let (_struct_impl_generics, struct_type_generics, _struct_where_clause) =
|
||||
struct_generics.split_for_impl();
|
||||
let struct_ty = quote! {#fn_name #struct_type_generics};
|
||||
body_sig.ident = parse_quote! {__body};
|
||||
body_sig
|
||||
.inputs
|
||||
.insert(0, parse_quote! { m: &::fayalite::module::ModuleBuilder });
|
||||
block.stmts.insert(
|
||||
0,
|
||||
parse_quote! {
|
||||
let _ = m;
|
||||
},
|
||||
);
|
||||
let body_fn = ItemFn {
|
||||
attrs: vec![],
|
||||
vis: Visibility::Inherited,
|
||||
|
|
@ -477,21 +332,12 @@ impl ModuleFn {
|
|||
let fn_name_str = fn_name.to_string();
|
||||
let (_, body_type_generics, _) = body_fn.sig.generics.split_for_impl();
|
||||
let body_turbofish_type_generics = body_type_generics.as_turbofish();
|
||||
let body_lambda = if param_names.is_empty() {
|
||||
quote! {
|
||||
__body #body_turbofish_type_generics
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
|m| __body #body_turbofish_type_generics(m, #(#param_names,)*)
|
||||
}
|
||||
};
|
||||
let block = parse_quote! {{
|
||||
#body_fn
|
||||
::fayalite::module::ModuleBuilder::run(
|
||||
#fn_name_str,
|
||||
#module_kind_value,
|
||||
#body_lambda,
|
||||
|m| __body #body_turbofish_type_generics(m, #(#param_names,)*),
|
||||
)
|
||||
}};
|
||||
let outer_fn = ItemFn {
|
||||
|
|
|
|||
|
|
@ -1,49 +1,39 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
Errors, HdlAttr,
|
||||
fold::{DoFold, impl_fold},
|
||||
fold::{impl_fold, DoFold},
|
||||
hdl_type_common::{
|
||||
ParseFailed, ParseTypes, ParsedGenerics, ParsedType, TypesParser, known_items,
|
||||
known_items, ParseFailed, ParseTypes, ParsedGenerics, ParsedType, TypesParser,
|
||||
},
|
||||
is_hdl_attr, kw,
|
||||
module::{ModuleIO, ModuleIOKind, ModuleKind, check_name_conflicts_with_module_builder},
|
||||
options,
|
||||
module::{check_name_conflicts_with_module_builder, ModuleIO, ModuleIOKind, ModuleKind},
|
||||
options, Errors, HdlAttr,
|
||||
};
|
||||
use num_bigint::BigInt;
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{ToTokens, quote, quote_spanned};
|
||||
use quote::{quote, quote_spanned, ToTokens};
|
||||
use std::{borrow::Borrow, convert::Infallible};
|
||||
use syn::{
|
||||
Attribute, Block, Error, Expr, ExprIf, ExprLet, ExprLit, ExprRepeat, ExprUnary,
|
||||
GenericArgument, Ident, Item, Lit, LitStr, Local, LocalInit, Pat, Token, Type, UnOp,
|
||||
fold::{Fold, fold_expr, fold_expr_lit, fold_expr_unary, fold_local, fold_stmt},
|
||||
fold::{fold_expr, fold_expr_lit, fold_expr_unary, fold_local, fold_stmt, Fold},
|
||||
parenthesized,
|
||||
parse::{Parse, ParseStream},
|
||||
parse::{Nothing, Parse, ParseStream},
|
||||
parse_quote, parse_quote_spanned,
|
||||
spanned::Spanned,
|
||||
token::Paren,
|
||||
Attribute, Block, Error, Expr, ExprIf, ExprLet, ExprLit, ExprRepeat, ExprUnary,
|
||||
GenericArgument, Ident, Item, Lit, LitStr, Local, LocalInit, Pat, Token, Type, UnOp,
|
||||
};
|
||||
|
||||
mod expand_aggregate_literals;
|
||||
mod expand_match;
|
||||
|
||||
options! {
|
||||
#[options = ExprOptions]
|
||||
pub(crate) enum ExprOption {
|
||||
Sim(sim),
|
||||
}
|
||||
}
|
||||
|
||||
options! {
|
||||
pub(crate) enum LetFnKind {
|
||||
Input(input),
|
||||
Output(output),
|
||||
AddPlatformIO(add_platform_io),
|
||||
Instance(instance),
|
||||
RegBuilder(reg_builder),
|
||||
Wire(wire),
|
||||
IncompleteWire(incomplete_wire),
|
||||
Memory(memory),
|
||||
MemoryArray(memory_array),
|
||||
MemoryWithInit(memory_with_init),
|
||||
|
|
@ -217,49 +207,6 @@ impl HdlLetKindToTokens for HdlLetKindInstance {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct HdlLetKindAddPlatformIO {
|
||||
pub(crate) m: kw::m,
|
||||
pub(crate) dot_token: Token![.],
|
||||
pub(crate) add_platform_io: kw::add_platform_io,
|
||||
pub(crate) paren: Paren,
|
||||
pub(crate) platform_io_builder: Box<Expr>,
|
||||
}
|
||||
|
||||
impl ParseTypes<Self> for HdlLetKindAddPlatformIO {
|
||||
fn parse_types(input: &mut Self, _parser: &mut TypesParser<'_>) -> Result<Self, ParseFailed> {
|
||||
Ok(input.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl_fold! {
|
||||
struct HdlLetKindAddPlatformIO<> {
|
||||
m: kw::m,
|
||||
dot_token: Token![.],
|
||||
add_platform_io: kw::add_platform_io,
|
||||
paren: Paren,
|
||||
platform_io_builder: Box<Expr>,
|
||||
}
|
||||
}
|
||||
|
||||
impl HdlLetKindToTokens for HdlLetKindAddPlatformIO {
|
||||
fn ty_to_tokens(&self, _tokens: &mut TokenStream) {}
|
||||
|
||||
fn expr_to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
m,
|
||||
dot_token,
|
||||
add_platform_io,
|
||||
paren,
|
||||
platform_io_builder,
|
||||
} = self;
|
||||
m.to_tokens(tokens);
|
||||
dot_token.to_tokens(tokens);
|
||||
add_platform_io.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| platform_io_builder.to_tokens(tokens));
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct RegBuilderClockDomain {
|
||||
pub(crate) dot_token: Token![.],
|
||||
|
|
@ -317,6 +264,11 @@ pub(crate) enum RegBuilderReset {
|
|||
paren: Paren,
|
||||
init_expr: Box<Expr>,
|
||||
},
|
||||
ResetDefault {
|
||||
dot_token: Token![.],
|
||||
reset_default: kw::reset_default,
|
||||
paren: Paren,
|
||||
},
|
||||
}
|
||||
|
||||
impl_fold! {
|
||||
|
|
@ -333,6 +285,11 @@ impl_fold! {
|
|||
paren: Paren,
|
||||
init_expr: Box<Expr>,
|
||||
},
|
||||
ResetDefault {
|
||||
dot_token: Token![.],
|
||||
reset_default: kw::reset_default,
|
||||
paren: Paren,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -354,6 +311,11 @@ impl Parse for RegBuilderReset {
|
|||
paren: parenthesized!(paren_contents in input),
|
||||
init_expr: paren_contents.call(parse_single_fn_arg)?,
|
||||
}),
|
||||
RegBuilderMethod::ResetDefault(reset_default) => Ok(Self::ResetDefault {
|
||||
dot_token,
|
||||
reset_default,
|
||||
paren: parenthesized!(paren_contents in input),
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -381,6 +343,15 @@ impl ToTokens for RegBuilderReset {
|
|||
reset.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| init_expr.to_tokens(tokens));
|
||||
}
|
||||
RegBuilderReset::ResetDefault {
|
||||
dot_token,
|
||||
reset_default,
|
||||
paren,
|
||||
} => {
|
||||
dot_token.to_tokens(tokens);
|
||||
reset_default.to_tokens(tokens);
|
||||
paren.surround(tokens, |_| {});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -429,6 +400,8 @@ make_builder_method_enum! {
|
|||
NoReset(no_reset),
|
||||
#[cond = need_reset]
|
||||
Reset(reset),
|
||||
#[cond = need_reset]
|
||||
ResetDefault(reset_default),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -471,13 +444,17 @@ impl HdlLetKindRegBuilder {
|
|||
let mut clock_domain = None;
|
||||
match RegBuilderMethod::parse_dot_prefixed(&input.fork(), true, true)?.1 {
|
||||
RegBuilderMethod::ClockDomain(_) => clock_domain = Some(input.parse()?),
|
||||
RegBuilderMethod::NoReset(_) | RegBuilderMethod::Reset(_) => {}
|
||||
RegBuilderMethod::NoReset(_)
|
||||
| RegBuilderMethod::Reset(_)
|
||||
| RegBuilderMethod::ResetDefault(_) => {}
|
||||
}
|
||||
let reset = input.parse()?;
|
||||
if clock_domain.is_none() {
|
||||
match RegBuilderMethod::parse_dot_prefixed(&input.fork(), true, false)?.1 {
|
||||
RegBuilderMethod::ClockDomain(_) => clock_domain = Some(input.parse()?),
|
||||
RegBuilderMethod::NoReset(_) | RegBuilderMethod::Reset(_) => unreachable!(),
|
||||
RegBuilderMethod::NoReset(_)
|
||||
| RegBuilderMethod::Reset(_)
|
||||
| RegBuilderMethod::ResetDefault(_) => unreachable!(),
|
||||
}
|
||||
}
|
||||
Ok(Self {
|
||||
|
|
@ -556,41 +533,6 @@ impl HdlLetKindToTokens for HdlLetKindWire {
|
|||
}
|
||||
}
|
||||
|
||||
options! {
|
||||
pub(crate) enum LetFnKindIncomplete {
|
||||
IncompleteWire(incomplete_wire),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct HdlLetKindIncomplete {
|
||||
pub(crate) kind: LetFnKindIncomplete,
|
||||
pub(crate) paren: Paren,
|
||||
}
|
||||
|
||||
impl ParseTypes<Self> for HdlLetKindIncomplete {
|
||||
fn parse_types(input: &mut Self, _parser: &mut TypesParser<'_>) -> Result<Self, ParseFailed> {
|
||||
Ok(input.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl_fold! {
|
||||
struct HdlLetKindIncomplete<> {
|
||||
kind: LetFnKindIncomplete,
|
||||
paren: Paren,
|
||||
}
|
||||
}
|
||||
|
||||
impl HdlLetKindToTokens for HdlLetKindIncomplete {
|
||||
fn ty_to_tokens(&self, _tokens: &mut TokenStream) {}
|
||||
|
||||
fn expr_to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self { kind, paren } = self;
|
||||
kind.to_tokens(tokens);
|
||||
paren.surround(tokens, |_| {});
|
||||
}
|
||||
}
|
||||
|
||||
options! {
|
||||
pub(crate) enum MemoryFnName {
|
||||
Memory(memory),
|
||||
|
|
@ -755,8 +697,6 @@ impl HdlLetKindMemory {
|
|||
#[derive(Clone, Debug)]
|
||||
pub(crate) enum HdlLetKind<IOType = ParsedType> {
|
||||
IO(HdlLetKindIO<ModuleIOKind, IOType>),
|
||||
AddPlatformIO(HdlLetKindAddPlatformIO),
|
||||
Incomplete(HdlLetKindIncomplete),
|
||||
Instance(HdlLetKindInstance),
|
||||
RegBuilder(HdlLetKindRegBuilder),
|
||||
Wire(HdlLetKindWire),
|
||||
|
|
@ -766,8 +706,6 @@ pub(crate) enum HdlLetKind<IOType = ParsedType> {
|
|||
impl_fold! {
|
||||
enum HdlLetKind<IOType,> {
|
||||
IO(HdlLetKindIO<ModuleIOKind, IOType>),
|
||||
AddPlatformIO(HdlLetKindAddPlatformIO),
|
||||
Incomplete(HdlLetKindIncomplete),
|
||||
Instance(HdlLetKindInstance),
|
||||
RegBuilder(HdlLetKindRegBuilder),
|
||||
Wire(HdlLetKindWire),
|
||||
|
|
@ -782,12 +720,6 @@ impl<T: ParseTypes<I>, I> ParseTypes<HdlLetKind<I>> for HdlLetKind<T> {
|
|||
) -> Result<Self, ParseFailed> {
|
||||
match input {
|
||||
HdlLetKind::IO(input) => ParseTypes::parse_types(input, parser).map(HdlLetKind::IO),
|
||||
HdlLetKind::AddPlatformIO(input) => {
|
||||
ParseTypes::parse_types(input, parser).map(HdlLetKind::AddPlatformIO)
|
||||
}
|
||||
HdlLetKind::Incomplete(input) => {
|
||||
ParseTypes::parse_types(input, parser).map(HdlLetKind::Incomplete)
|
||||
}
|
||||
HdlLetKind::Instance(input) => {
|
||||
ParseTypes::parse_types(input, parser).map(HdlLetKind::Instance)
|
||||
}
|
||||
|
|
@ -910,23 +842,6 @@ impl HdlLetKindParse for HdlLetKind<Type> {
|
|||
ModuleIOKind::Output(output),
|
||||
)
|
||||
.map(Self::IO),
|
||||
LetFnKind::AddPlatformIO((add_platform_io,)) => {
|
||||
if let Some(parsed_ty) = parsed_ty {
|
||||
return Err(Error::new_spanned(
|
||||
parsed_ty.1,
|
||||
"type annotation not allowed for instance",
|
||||
));
|
||||
}
|
||||
let (m, dot_token) = unwrap_m_dot(m_dot, kind)?;
|
||||
let paren_contents;
|
||||
Ok(Self::AddPlatformIO(HdlLetKindAddPlatformIO {
|
||||
m,
|
||||
dot_token,
|
||||
add_platform_io,
|
||||
paren: parenthesized!(paren_contents in input),
|
||||
platform_io_builder: paren_contents.call(parse_single_fn_arg)?,
|
||||
}))
|
||||
}
|
||||
LetFnKind::Instance((instance,)) => {
|
||||
if let Some(parsed_ty) = parsed_ty {
|
||||
return Err(Error::new_spanned(
|
||||
|
|
@ -956,20 +871,6 @@ impl HdlLetKindParse for HdlLetKind<Type> {
|
|||
ty_expr: paren_contents.call(parse_optional_fn_arg)?,
|
||||
}))
|
||||
}
|
||||
LetFnKind::IncompleteWire(incomplete_wire) => {
|
||||
if let Some(parsed_ty) = parsed_ty {
|
||||
return Err(Error::new_spanned(
|
||||
parsed_ty.1,
|
||||
"type annotation not allowed for incomplete_wire",
|
||||
));
|
||||
}
|
||||
check_empty_m_dot(m_dot, kind)?;
|
||||
let _paren_contents;
|
||||
Ok(Self::Incomplete(HdlLetKindIncomplete {
|
||||
kind: LetFnKindIncomplete::IncompleteWire(incomplete_wire),
|
||||
paren: parenthesized!(_paren_contents in input),
|
||||
}))
|
||||
}
|
||||
LetFnKind::Memory(fn_name) => HdlLetKindMemory::rest_of_parse(
|
||||
input,
|
||||
parsed_ty,
|
||||
|
|
@ -1002,8 +903,6 @@ impl HdlLetKindToTokens for HdlLetKind {
|
|||
fn ty_to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
HdlLetKind::IO(v) => v.ty_to_tokens(tokens),
|
||||
HdlLetKind::AddPlatformIO(v) => v.ty_to_tokens(tokens),
|
||||
HdlLetKind::Incomplete(v) => v.ty_to_tokens(tokens),
|
||||
HdlLetKind::Instance(v) => v.ty_to_tokens(tokens),
|
||||
HdlLetKind::RegBuilder(v) => v.ty_to_tokens(tokens),
|
||||
HdlLetKind::Wire(v) => v.ty_to_tokens(tokens),
|
||||
|
|
@ -1014,8 +913,6 @@ impl HdlLetKindToTokens for HdlLetKind {
|
|||
fn expr_to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
HdlLetKind::IO(v) => v.expr_to_tokens(tokens),
|
||||
HdlLetKind::AddPlatformIO(v) => v.expr_to_tokens(tokens),
|
||||
HdlLetKind::Incomplete(v) => v.expr_to_tokens(tokens),
|
||||
HdlLetKind::Instance(v) => v.expr_to_tokens(tokens),
|
||||
HdlLetKind::RegBuilder(v) => v.expr_to_tokens(tokens),
|
||||
HdlLetKind::Wire(v) => v.expr_to_tokens(tokens),
|
||||
|
|
@ -1028,7 +925,7 @@ with_debug_clone_and_fold! {
|
|||
#[allow(dead_code)]
|
||||
pub(crate) struct HdlLet<Kind = HdlLetKind> {
|
||||
pub(crate) attrs: Vec<Attribute>,
|
||||
pub(crate) hdl_attr: HdlAttr<syn::parse::Nothing, kw::hdl>,
|
||||
pub(crate) hdl_attr: HdlAttr<Nothing>,
|
||||
pub(crate) let_token: Token![let],
|
||||
pub(crate) mut_token: Option<Token![mut]>,
|
||||
pub(crate) name: Ident,
|
||||
|
|
@ -1185,7 +1082,7 @@ fn parse_quote_let_pat<T, R: ToTokens, C: Borrow<Token![:]>>(
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn wrap_ty_with_expr(ty: impl ToTokens) -> Type {
|
||||
fn wrap_ty_with_expr(ty: impl ToTokens) -> Type {
|
||||
parse_quote_spanned! {ty.span()=>
|
||||
::fayalite::expr::Expr<#ty>
|
||||
}
|
||||
|
|
@ -1215,41 +1112,30 @@ impl<T: ToString> ToTokens for ImplicitName<T> {
|
|||
}
|
||||
|
||||
struct Visitor<'a> {
|
||||
module_kind: Option<ModuleKind>,
|
||||
module_kind: ModuleKind,
|
||||
errors: Errors,
|
||||
io: ModuleIOOrAddPlatformIO,
|
||||
io: Vec<ModuleIO>,
|
||||
block_depth: usize,
|
||||
parsed_generics: &'a ParsedGenerics,
|
||||
}
|
||||
|
||||
impl Visitor<'_> {
|
||||
fn take_hdl_attr<T: Parse>(
|
||||
&mut self,
|
||||
attrs: &mut Vec<Attribute>,
|
||||
) -> Option<HdlAttr<T, kw::hdl>> {
|
||||
fn take_hdl_attr<T: Parse>(&mut self, attrs: &mut Vec<Attribute>) -> Option<HdlAttr<T>> {
|
||||
self.errors.unwrap_or(
|
||||
HdlAttr::parse_and_take_attr(attrs),
|
||||
Some(syn::parse2::<T>(quote! {}).unwrap().into()),
|
||||
)
|
||||
}
|
||||
fn require_normal_module_or_fn(&mut self, spanned: impl ToTokens) {
|
||||
fn require_normal_module(&mut self, spanned: impl ToTokens) {
|
||||
match self.module_kind {
|
||||
Some(ModuleKind::Extern) => {
|
||||
ModuleKind::Extern => {
|
||||
self.errors
|
||||
.error(spanned, "not allowed in #[hdl_module(extern)]");
|
||||
}
|
||||
Some(ModuleKind::Normal) | None => {}
|
||||
ModuleKind::Normal => {}
|
||||
}
|
||||
}
|
||||
fn require_module(&mut self, spanned: impl ToTokens) {
|
||||
match self.module_kind {
|
||||
None => {
|
||||
self.errors.error(spanned, "not allowed in #[hdl] fn");
|
||||
}
|
||||
Some(_) => {}
|
||||
}
|
||||
}
|
||||
fn process_hdl_if(&mut self, hdl_attr: HdlAttr<ExprOptions, kw::hdl>, expr_if: ExprIf) -> Expr {
|
||||
fn process_hdl_if(&mut self, hdl_attr: HdlAttr<Nothing>, expr_if: ExprIf) -> Expr {
|
||||
let ExprIf {
|
||||
attrs,
|
||||
if_token,
|
||||
|
|
@ -1257,10 +1143,10 @@ impl Visitor<'_> {
|
|||
then_branch,
|
||||
else_branch,
|
||||
} = expr_if;
|
||||
let (else_token, else_expr) = else_branch.unzip();
|
||||
let else_expr = else_expr.map(|else_expr| match *else_expr {
|
||||
Expr::If(expr_if) => Box::new(self.process_hdl_if(hdl_attr.clone(), expr_if)),
|
||||
_ => else_expr,
|
||||
self.require_normal_module(if_token);
|
||||
let else_expr = else_branch.unzip().1.map(|else_expr| match *else_expr {
|
||||
Expr::If(expr_if) => self.process_hdl_if(hdl_attr.clone(), expr_if),
|
||||
expr => expr,
|
||||
});
|
||||
if let Expr::Let(ExprLet {
|
||||
attrs: let_attrs,
|
||||
|
|
@ -1282,19 +1168,7 @@ impl Visitor<'_> {
|
|||
},
|
||||
);
|
||||
}
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
if sim.is_some() {
|
||||
ExprIf {
|
||||
attrs,
|
||||
if_token,
|
||||
cond: parse_quote_spanned! {if_token.span=>
|
||||
*::fayalite::sim::value::SimValue::<::fayalite::int::Bool>::value(&::fayalite::sim::value::ToSimValue::into_sim_value(#cond))
|
||||
},
|
||||
then_branch,
|
||||
else_branch: else_token.zip(else_expr),
|
||||
}
|
||||
.into()
|
||||
} else if let Some(else_expr) = else_expr {
|
||||
if let Some(else_expr) = else_expr {
|
||||
parse_quote_spanned! {if_token.span=>
|
||||
#(#attrs)*
|
||||
{
|
||||
|
|
@ -1334,12 +1208,11 @@ impl Visitor<'_> {
|
|||
.to_tokens(expr);
|
||||
});
|
||||
let mut attrs = hdl_let.attrs.clone();
|
||||
self.require_module(kind);
|
||||
match self.module_kind {
|
||||
Some(ModuleKind::Extern) => attrs.push(parse_quote_spanned! {hdl_let.let_token.span=>
|
||||
ModuleKind::Extern => attrs.push(parse_quote_spanned! {hdl_let.let_token.span=>
|
||||
#[allow(unused_variables)]
|
||||
}),
|
||||
Some(ModuleKind::Normal) | None => {}
|
||||
ModuleKind::Normal => {}
|
||||
}
|
||||
let let_stmt = Local {
|
||||
attrs,
|
||||
|
|
@ -1357,81 +1230,7 @@ impl Visitor<'_> {
|
|||
}),
|
||||
semi_token: hdl_let.semi_token,
|
||||
};
|
||||
match &mut self.io {
|
||||
ModuleIOOrAddPlatformIO::ModuleIO(io) => io.push(hdl_let),
|
||||
ModuleIOOrAddPlatformIO::AddPlatformIO => {
|
||||
self.errors.error(
|
||||
kind,
|
||||
"can't have other inputs/outputs in a module using m.add_platform_io()",
|
||||
);
|
||||
}
|
||||
}
|
||||
let_stmt
|
||||
}
|
||||
fn process_hdl_let_add_platform_io(
|
||||
&mut self,
|
||||
hdl_let: HdlLet<HdlLetKindAddPlatformIO>,
|
||||
) -> Local {
|
||||
let HdlLet {
|
||||
mut attrs,
|
||||
hdl_attr: _,
|
||||
let_token,
|
||||
mut_token,
|
||||
ref name,
|
||||
eq_token,
|
||||
kind:
|
||||
HdlLetKindAddPlatformIO {
|
||||
m,
|
||||
dot_token,
|
||||
add_platform_io,
|
||||
paren,
|
||||
platform_io_builder,
|
||||
},
|
||||
semi_token,
|
||||
} = hdl_let;
|
||||
let mut expr = quote! {#m #dot_token #add_platform_io};
|
||||
paren.surround(&mut expr, |expr| {
|
||||
let name_str = ImplicitName {
|
||||
name,
|
||||
span: name.span(),
|
||||
};
|
||||
quote_spanned! {name.span()=>
|
||||
#name_str, #platform_io_builder
|
||||
}
|
||||
.to_tokens(expr);
|
||||
});
|
||||
self.require_module(add_platform_io);
|
||||
attrs.push(parse_quote_spanned! {let_token.span=>
|
||||
#[allow(unused_variables)]
|
||||
});
|
||||
let let_stmt = Local {
|
||||
attrs,
|
||||
let_token,
|
||||
pat: parse_quote! { #mut_token #name },
|
||||
init: Some(LocalInit {
|
||||
eq_token,
|
||||
expr: parse_quote! { #expr },
|
||||
diverge: None,
|
||||
}),
|
||||
semi_token,
|
||||
};
|
||||
match &mut self.io {
|
||||
ModuleIOOrAddPlatformIO::ModuleIO(io) => {
|
||||
for io in io {
|
||||
self.errors.error(
|
||||
io.kind.kind,
|
||||
"can't have other inputs/outputs in a module using m.add_platform_io()",
|
||||
);
|
||||
}
|
||||
}
|
||||
ModuleIOOrAddPlatformIO::AddPlatformIO => {
|
||||
self.errors.error(
|
||||
add_platform_io,
|
||||
"can't use m.add_platform_io() more than once in a single module",
|
||||
);
|
||||
}
|
||||
}
|
||||
self.io = ModuleIOOrAddPlatformIO::AddPlatformIO;
|
||||
self.io.push(hdl_let);
|
||||
let_stmt
|
||||
}
|
||||
fn process_hdl_let_instance(&mut self, hdl_let: HdlLet<HdlLetKindInstance>) -> Local {
|
||||
|
|
@ -1450,7 +1249,7 @@ impl Visitor<'_> {
|
|||
},
|
||||
semi_token,
|
||||
} = hdl_let;
|
||||
self.require_normal_module_or_fn(instance);
|
||||
self.require_normal_module(instance);
|
||||
let mut expr = instance.to_token_stream();
|
||||
paren.surround(&mut expr, |expr| {
|
||||
let name_str = ImplicitName {
|
||||
|
|
@ -1477,7 +1276,7 @@ impl Visitor<'_> {
|
|||
fn process_hdl_let_reg_builder(&mut self, hdl_let: HdlLet<HdlLetKindRegBuilder>) -> Local {
|
||||
let name = &hdl_let.name;
|
||||
let reg_builder = hdl_let.kind.reg_builder;
|
||||
self.require_normal_module_or_fn(reg_builder);
|
||||
self.require_normal_module(reg_builder);
|
||||
let mut expr = reg_builder.to_token_stream();
|
||||
hdl_let.kind.reg_builder_paren.surround(&mut expr, |expr| {
|
||||
let name_str = ImplicitName {
|
||||
|
|
@ -1502,7 +1301,7 @@ impl Visitor<'_> {
|
|||
no_reset.to_tokens(&mut expr);
|
||||
paren.surround(&mut expr, |expr| ty_expr.to_tokens(expr));
|
||||
}
|
||||
RegBuilderReset::Reset { .. } => {
|
||||
RegBuilderReset::Reset { .. } | RegBuilderReset::ResetDefault { .. } => {
|
||||
hdl_let.kind.reset.to_tokens(&mut expr);
|
||||
}
|
||||
}
|
||||
|
|
@ -1528,7 +1327,7 @@ impl Visitor<'_> {
|
|||
fn process_hdl_let_wire(&mut self, hdl_let: HdlLet<HdlLetKindWire>) -> Local {
|
||||
let name = &hdl_let.name;
|
||||
let wire = hdl_let.kind.wire;
|
||||
self.require_normal_module_or_fn(wire);
|
||||
self.require_normal_module(wire);
|
||||
let ty_expr = unwrap_or_static_type(hdl_let.kind.ty_expr.as_ref(), wire.span());
|
||||
let mut expr = wire.to_token_stream();
|
||||
hdl_let.kind.paren.surround(&mut expr, |expr| {
|
||||
|
|
@ -1558,36 +1357,11 @@ impl Visitor<'_> {
|
|||
semi_token: hdl_let.semi_token,
|
||||
}
|
||||
}
|
||||
fn process_hdl_let_incomplete(&mut self, hdl_let: HdlLet<HdlLetKindIncomplete>) -> Local {
|
||||
let name = &hdl_let.name;
|
||||
let kind = hdl_let.kind.kind;
|
||||
self.require_normal_module_or_fn(kind);
|
||||
let mut expr = kind.to_token_stream();
|
||||
hdl_let.kind.paren.surround(&mut expr, |expr| {
|
||||
ImplicitName {
|
||||
name,
|
||||
span: name.span(),
|
||||
}
|
||||
.to_tokens(expr);
|
||||
});
|
||||
let mut_token = &hdl_let.mut_token;
|
||||
Local {
|
||||
attrs: hdl_let.attrs.clone(),
|
||||
let_token: hdl_let.let_token,
|
||||
pat: parse_quote! { #mut_token #name },
|
||||
init: Some(LocalInit {
|
||||
eq_token: hdl_let.eq_token,
|
||||
expr: parse_quote! { #expr },
|
||||
diverge: None,
|
||||
}),
|
||||
semi_token: hdl_let.semi_token,
|
||||
}
|
||||
}
|
||||
fn process_hdl_let_memory(&mut self, hdl_let: HdlLet<HdlLetKindMemory>) -> Local {
|
||||
let name = &hdl_let.name;
|
||||
let memory_fn = hdl_let.kind.memory_fn;
|
||||
let memory_fn_name = memory_fn.name();
|
||||
self.require_normal_module_or_fn(memory_fn_name);
|
||||
self.require_normal_module(memory_fn_name);
|
||||
let mut expr = memory_fn_name.to_token_stream();
|
||||
let (paren, arg) = match memory_fn {
|
||||
MemoryFn::Memory {
|
||||
|
|
@ -1652,8 +1426,6 @@ impl Visitor<'_> {
|
|||
}
|
||||
the_match! {
|
||||
IO => process_hdl_let_io,
|
||||
AddPlatformIO => process_hdl_let_add_platform_io,
|
||||
Incomplete => process_hdl_let_incomplete,
|
||||
Instance => process_hdl_let_instance,
|
||||
RegBuilder => process_hdl_let_reg_builder,
|
||||
Wire => process_hdl_let_wire,
|
||||
|
|
@ -1749,7 +1521,7 @@ impl Visitor<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn empty_let() -> Local {
|
||||
fn empty_let() -> Local {
|
||||
Local {
|
||||
attrs: vec![],
|
||||
let_token: Default::default(),
|
||||
|
|
@ -1771,7 +1543,7 @@ impl Fold for Visitor<'_> {
|
|||
}
|
||||
|
||||
fn fold_attribute(&mut self, attr: Attribute) -> Attribute {
|
||||
if is_hdl_attr::<kw::hdl>(&attr) {
|
||||
if is_hdl_attr(&attr) {
|
||||
self.errors
|
||||
.error(&attr, "#[hdl] attribute not supported here");
|
||||
}
|
||||
|
|
@ -1831,42 +1603,19 @@ impl Fold for Visitor<'_> {
|
|||
Repeat => process_hdl_repeat,
|
||||
Struct => process_hdl_struct,
|
||||
Tuple => process_hdl_tuple,
|
||||
MethodCall => process_hdl_method_call,
|
||||
Call => process_hdl_call,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_local(&mut self, mut let_stmt: Local) -> Local {
|
||||
fn fold_local(&mut self, let_stmt: Local) -> Local {
|
||||
match self
|
||||
.errors
|
||||
.ok(HdlAttr::<ExprOptions, kw::hdl>::parse_and_leave_attr(
|
||||
&let_stmt.attrs,
|
||||
)) {
|
||||
.ok(HdlAttr::<Nothing>::parse_and_leave_attr(&let_stmt.attrs))
|
||||
{
|
||||
None => return empty_let(),
|
||||
Some(None) => return fold_local(self, let_stmt),
|
||||
Some(Some(HdlAttr { .. })) => {}
|
||||
};
|
||||
let mut pat = &let_stmt.pat;
|
||||
if let Pat::Type(pat_type) = pat {
|
||||
pat = &pat_type.pat;
|
||||
}
|
||||
let Pat::Ident(syn::PatIdent {
|
||||
attrs: _,
|
||||
by_ref: None,
|
||||
mutability: _,
|
||||
ident: _,
|
||||
subpat: None,
|
||||
}) = pat
|
||||
else {
|
||||
let hdl_attr =
|
||||
HdlAttr::<ExprOptions, kw::hdl>::parse_and_take_attr(&mut let_stmt.attrs)
|
||||
.ok()
|
||||
.flatten()
|
||||
.expect("already checked above");
|
||||
let let_stmt = fold_local(self, let_stmt);
|
||||
return self.process_hdl_let_pat(hdl_attr, let_stmt);
|
||||
};
|
||||
let hdl_let = syn::parse2::<HdlLet<HdlLetKind<Type>>>(let_stmt.into_token_stream());
|
||||
let Some(hdl_let) = self.errors.ok(hdl_let) else {
|
||||
return empty_let();
|
||||
|
|
@ -1896,20 +1645,15 @@ impl Fold for Visitor<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) enum ModuleIOOrAddPlatformIO {
|
||||
ModuleIO(Vec<ModuleIO>),
|
||||
AddPlatformIO,
|
||||
}
|
||||
|
||||
pub(crate) fn transform_body(
|
||||
module_kind: Option<ModuleKind>,
|
||||
module_kind: ModuleKind,
|
||||
mut body: Box<Block>,
|
||||
parsed_generics: &ParsedGenerics,
|
||||
) -> syn::Result<(Box<Block>, ModuleIOOrAddPlatformIO)> {
|
||||
) -> syn::Result<(Box<Block>, Vec<ModuleIO>)> {
|
||||
let mut visitor = Visitor {
|
||||
module_kind,
|
||||
errors: Errors::new(),
|
||||
io: ModuleIOOrAddPlatformIO::ModuleIO(vec![]),
|
||||
io: vec![],
|
||||
block_depth: 0,
|
||||
parsed_generics,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,102 +1,45 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
HdlAttr, kw,
|
||||
module::transform_body::{
|
||||
ExprOptions, Visitor,
|
||||
expand_match::{EnumPath, parse_enum_path},
|
||||
},
|
||||
};
|
||||
use crate::{module::transform_body::Visitor, HdlAttr};
|
||||
use quote::{format_ident, quote_spanned};
|
||||
use std::mem;
|
||||
use syn::{
|
||||
Expr, ExprArray, ExprCall, ExprGroup, ExprMethodCall, ExprParen, ExprPath, ExprRepeat,
|
||||
ExprStruct, ExprTuple, FieldValue, Token, TypePath, parse_quote_spanned,
|
||||
punctuated::Punctuated, spanned::Spanned, token::Paren,
|
||||
parse::Nothing, parse_quote, parse_quote_spanned, spanned::Spanned, Expr, ExprArray, ExprPath,
|
||||
ExprRepeat, ExprStruct, ExprTuple, FieldValue, TypePath,
|
||||
};
|
||||
|
||||
impl Visitor<'_> {
|
||||
pub(crate) fn process_hdl_array(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
hdl_attr: HdlAttr<Nothing>,
|
||||
mut expr_array: ExprArray,
|
||||
) -> Expr {
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
let span = hdl_attr.kw.span;
|
||||
if sim.is_some() {
|
||||
for elem in &mut expr_array.elems {
|
||||
*elem = parse_quote_spanned! {elem.span()=>
|
||||
::fayalite::sim::value::ToSimValue::to_sim_value(&(#elem))
|
||||
};
|
||||
}
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::sim::value::ToSimValue::into_sim_value(#expr_array)
|
||||
}
|
||||
} else {
|
||||
for elem in &mut expr_array.elems {
|
||||
*elem = parse_quote_spanned! {elem.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&(#elem))
|
||||
};
|
||||
}
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::expr::ToExpr::to_expr(&#expr_array)
|
||||
}
|
||||
self.require_normal_module(hdl_attr);
|
||||
for elem in &mut expr_array.elems {
|
||||
*elem = parse_quote_spanned! {elem.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&(#elem))
|
||||
};
|
||||
}
|
||||
parse_quote! {::fayalite::expr::ToExpr::to_expr(&#expr_array)}
|
||||
}
|
||||
pub(crate) fn process_hdl_repeat(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
hdl_attr: HdlAttr<Nothing>,
|
||||
mut expr_repeat: ExprRepeat,
|
||||
) -> Expr {
|
||||
self.require_normal_module(hdl_attr);
|
||||
let repeated_value = &expr_repeat.expr;
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
let span = hdl_attr.kw.span;
|
||||
if sim.is_some() {
|
||||
*expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=>
|
||||
::fayalite::sim::value::ToSimValue::to_sim_value(&(#repeated_value))
|
||||
};
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::sim::value::ToSimValue::into_sim_value(#expr_repeat)
|
||||
}
|
||||
} else {
|
||||
*expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&(#repeated_value))
|
||||
};
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::expr::ToExpr::to_expr(&#expr_repeat)
|
||||
}
|
||||
}
|
||||
*expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&(#repeated_value))
|
||||
};
|
||||
parse_quote! {::fayalite::expr::ToExpr::to_expr(&#expr_repeat)}
|
||||
}
|
||||
pub(crate) fn process_hdl_struct(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
mut expr_struct: ExprStruct,
|
||||
hdl_attr: HdlAttr<Nothing>,
|
||||
expr_struct: ExprStruct,
|
||||
) -> Expr {
|
||||
self.require_normal_module(&hdl_attr);
|
||||
let name_span = expr_struct.path.segments.last().unwrap().ident.span();
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
if sim.is_some() {
|
||||
let ty_path = TypePath {
|
||||
qself: expr_struct.qself.take(),
|
||||
path: expr_struct.path,
|
||||
};
|
||||
expr_struct.path = parse_quote_spanned! {name_span=>
|
||||
__SimValue::<#ty_path>
|
||||
};
|
||||
for field in &mut expr_struct.fields {
|
||||
let expr = &field.expr;
|
||||
field.expr = parse_quote_spanned! {field.member.span()=>
|
||||
::fayalite::sim::value::ToSimValue::to_sim_value(&(#expr))
|
||||
};
|
||||
}
|
||||
return parse_quote_spanned! {name_span=>
|
||||
{
|
||||
type __SimValue<T> = <T as ::fayalite::ty::Type>::SimValue;
|
||||
let value: ::fayalite::sim::value::SimValue<#ty_path> = ::fayalite::sim::value::ToSimValue::into_sim_value(#expr_struct);
|
||||
value
|
||||
}
|
||||
};
|
||||
}
|
||||
let builder_ident = format_ident!("__builder", span = name_span);
|
||||
let empty_builder = if expr_struct.qself.is_some()
|
||||
|| expr_struct
|
||||
|
|
@ -148,126 +91,12 @@ impl Visitor<'_> {
|
|||
}
|
||||
pub(crate) fn process_hdl_tuple(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
mut expr_tuple: ExprTuple,
|
||||
hdl_attr: HdlAttr<Nothing>,
|
||||
expr_tuple: ExprTuple,
|
||||
) -> Expr {
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
if sim.is_some() {
|
||||
for element in &mut expr_tuple.elems {
|
||||
*element = parse_quote_spanned! {element.span()=>
|
||||
&(#element)
|
||||
};
|
||||
}
|
||||
parse_quote_spanned! {expr_tuple.span()=>
|
||||
::fayalite::sim::value::ToSimValue::into_sim_value(#expr_tuple)
|
||||
}
|
||||
} else {
|
||||
parse_quote_spanned! {expr_tuple.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&#expr_tuple)
|
||||
}
|
||||
self.require_normal_module(hdl_attr);
|
||||
parse_quote_spanned! {expr_tuple.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&#expr_tuple)
|
||||
}
|
||||
}
|
||||
pub(crate) fn process_hdl_call(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
mut expr_call: ExprCall,
|
||||
) -> Expr {
|
||||
let span = hdl_attr.kw.span;
|
||||
let mut func = &mut *expr_call.func;
|
||||
let EnumPath {
|
||||
variant_path: _,
|
||||
enum_path,
|
||||
variant_name,
|
||||
} = loop {
|
||||
match func {
|
||||
Expr::Group(ExprGroup { expr, .. }) | Expr::Paren(ExprParen { expr, .. }) => {
|
||||
func = &mut **expr;
|
||||
}
|
||||
Expr::Path(_) => {
|
||||
let Expr::Path(ExprPath { attrs, qself, path }) =
|
||||
mem::replace(func, Expr::PLACEHOLDER)
|
||||
else {
|
||||
unreachable!();
|
||||
};
|
||||
match parse_enum_path(TypePath { qself, path }) {
|
||||
Ok(path) => break path,
|
||||
Err(path) => {
|
||||
self.errors.error(&path, "unsupported enum variant path");
|
||||
let TypePath { qself, path } = path;
|
||||
*func = ExprPath { attrs, qself, path }.into();
|
||||
return expr_call.into();
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
self.errors.error(
|
||||
&expr_call.func,
|
||||
"#[hdl] function call -- function must be a possibly-parenthesized path",
|
||||
);
|
||||
return expr_call.into();
|
||||
}
|
||||
}
|
||||
};
|
||||
self.process_hdl_method_call(
|
||||
hdl_attr,
|
||||
ExprMethodCall {
|
||||
attrs: expr_call.attrs,
|
||||
receiver: parse_quote_spanned! {span=>
|
||||
<#enum_path as ::fayalite::ty::StaticType>::TYPE
|
||||
},
|
||||
dot_token: Token,
|
||||
method: variant_name,
|
||||
turbofish: None,
|
||||
paren_token: expr_call.paren_token,
|
||||
args: expr_call.args,
|
||||
},
|
||||
)
|
||||
}
|
||||
pub(crate) fn process_hdl_method_call(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
mut expr_method_call: ExprMethodCall,
|
||||
) -> Expr {
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
let span = hdl_attr.kw.span;
|
||||
// remove any number of groups and up to one paren
|
||||
let mut receiver = &mut *expr_method_call.receiver;
|
||||
let mut has_group = false;
|
||||
let receiver = loop {
|
||||
match receiver {
|
||||
Expr::Group(ExprGroup { expr, .. }) => {
|
||||
has_group = true;
|
||||
receiver = expr;
|
||||
}
|
||||
Expr::Paren(ExprParen { expr, .. }) => break &mut **expr,
|
||||
receiver @ Expr::Path(_) => break receiver,
|
||||
_ => {
|
||||
if !has_group {
|
||||
self.errors.error(
|
||||
&expr_method_call.receiver,
|
||||
"#[hdl] on a method call needs parenthesized receiver",
|
||||
);
|
||||
}
|
||||
break &mut *expr_method_call.receiver;
|
||||
}
|
||||
}
|
||||
};
|
||||
let func = if sim.is_some() {
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::enum_::enum_type_to_sim_builder
|
||||
}
|
||||
} else {
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::enum_::assert_is_enum_type
|
||||
}
|
||||
};
|
||||
*expr_method_call.receiver = ExprCall {
|
||||
attrs: vec![],
|
||||
func,
|
||||
paren_token: Paren(span),
|
||||
args: Punctuated::from_iter([mem::replace(receiver, Expr::PLACEHOLDER)]),
|
||||
}
|
||||
.into();
|
||||
expr_method_call.into()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,121 +1,23 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
fold::{impl_fold, DoFold},
|
||||
module::transform_body::{with_debug_clone_and_fold, Visitor},
|
||||
Errors, HdlAttr, PairsIterExt,
|
||||
fold::{DoFold, impl_fold},
|
||||
kw,
|
||||
module::transform_body::{
|
||||
ExprOptions, Visitor, empty_let, with_debug_clone_and_fold, wrap_ty_with_expr,
|
||||
},
|
||||
};
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{ToTokens, TokenStreamExt, format_ident, quote_spanned};
|
||||
use std::collections::BTreeSet;
|
||||
use quote::{format_ident, quote_spanned, ToTokens, TokenStreamExt};
|
||||
use syn::{
|
||||
Arm, Attribute, Expr, ExprMatch, FieldPat, Ident, Local, Member, Pat, PatIdent, PatOr,
|
||||
PatParen, PatPath, PatRest, PatStruct, PatTuple, PatTupleStruct, PatWild, Path, PathSegment,
|
||||
Token, TypePath,
|
||||
fold::{Fold, fold_arm, fold_expr_match, fold_local, fold_pat},
|
||||
fold::{fold_arm, fold_expr_match, fold_pat, Fold},
|
||||
parse::Nothing,
|
||||
parse_quote_spanned,
|
||||
punctuated::Punctuated,
|
||||
spanned::Spanned,
|
||||
token::{Brace, Paren},
|
||||
Arm, Attribute, Expr, ExprMatch, FieldPat, Ident, Member, Pat, PatIdent, PatOr, PatParen,
|
||||
PatPath, PatRest, PatStruct, PatTupleStruct, PatWild, Path, PathSegment, Token, TypePath,
|
||||
};
|
||||
|
||||
macro_rules! visit_trait {
|
||||
(
|
||||
$($vis:vis fn $fn:ident($state:ident: _, $value:ident: &$Value:ty) $block:block)*
|
||||
) => {
|
||||
trait VisitMatchPat<'a> {
|
||||
$(fn $fn(&mut self, $value: &'a $Value) {
|
||||
$fn(self, $value);
|
||||
})*
|
||||
}
|
||||
|
||||
$($vis fn $fn<'a>($state: &mut (impl ?Sized + VisitMatchPat<'a>), $value: &'a $Value) $block)*
|
||||
};
|
||||
}
|
||||
|
||||
visit_trait! {
|
||||
fn visit_match_pat_binding(_state: _, v: &MatchPatBinding) {
|
||||
let MatchPatBinding { ident: _ } = v;
|
||||
}
|
||||
fn visit_match_pat_wild(_state: _, v: &MatchPatWild) {
|
||||
let MatchPatWild { underscore_token: _ } = v;
|
||||
}
|
||||
fn visit_match_pat_rest(_state: _, v: &MatchPatRest) {
|
||||
let MatchPatRest { dot2_token: _ } = v;
|
||||
}
|
||||
fn visit_match_pat_paren(state: _, v: &MatchPatParen<MatchPat>) {
|
||||
let MatchPatParen { paren_token: _, pat } = v;
|
||||
state.visit_match_pat(pat);
|
||||
}
|
||||
fn visit_match_pat_paren_simple(state: _, v: &MatchPatParen<MatchPatSimple>) {
|
||||
let MatchPatParen { paren_token: _, pat } = v;
|
||||
state.visit_match_pat_simple(pat);
|
||||
}
|
||||
fn visit_match_pat_or(state: _, v: &MatchPatOr<MatchPat>) {
|
||||
let MatchPatOr { leading_vert: _, cases } = v;
|
||||
for v in cases {
|
||||
state.visit_match_pat(v);
|
||||
}
|
||||
}
|
||||
fn visit_match_pat_or_simple(state: _, v: &MatchPatOr<MatchPatSimple>) {
|
||||
let MatchPatOr { leading_vert: _, cases } = v;
|
||||
for v in cases {
|
||||
state.visit_match_pat_simple(v);
|
||||
}
|
||||
}
|
||||
fn visit_match_pat_struct_field(state: _, v: &MatchPatStructField) {
|
||||
let MatchPatStructField { field_name: _, colon_token: _, pat } = v;
|
||||
state.visit_match_pat_simple(pat);
|
||||
}
|
||||
fn visit_match_pat_struct(state: _, v: &MatchPatStruct) {
|
||||
let MatchPatStruct { match_span: _, path: _, brace_token: _, fields, rest: _ } = v;
|
||||
for v in fields {
|
||||
state.visit_match_pat_struct_field(v);
|
||||
}
|
||||
}
|
||||
fn visit_match_pat_tuple(state: _, v: &MatchPatTuple) {
|
||||
let MatchPatTuple { paren_token: _, fields } = v;
|
||||
for v in fields {
|
||||
state.visit_match_pat_simple(v);
|
||||
}
|
||||
}
|
||||
fn visit_match_pat_enum_variant(state: _, v: &MatchPatEnumVariant) {
|
||||
let MatchPatEnumVariant {
|
||||
match_span:_,
|
||||
sim:_,
|
||||
variant_path: _,
|
||||
enum_path: _,
|
||||
variant_name: _,
|
||||
field,
|
||||
} = v;
|
||||
if let Some((_, v)) = field {
|
||||
state.visit_match_pat_simple(v);
|
||||
}
|
||||
}
|
||||
fn visit_match_pat_simple(state: _, v: &MatchPatSimple) {
|
||||
match v {
|
||||
MatchPatSimple::Paren(v) => state.visit_match_pat_paren_simple(v),
|
||||
MatchPatSimple::Or(v) => state.visit_match_pat_or_simple(v),
|
||||
MatchPatSimple::Binding(v) => state.visit_match_pat_binding(v),
|
||||
MatchPatSimple::Wild(v) => state.visit_match_pat_wild(v),
|
||||
MatchPatSimple::Rest(v) => state.visit_match_pat_rest(v),
|
||||
}
|
||||
}
|
||||
fn visit_match_pat(state: _, v: &MatchPat) {
|
||||
match v {
|
||||
MatchPat::Simple(v) => state.visit_match_pat_simple(v),
|
||||
MatchPat::Or(v) => state.visit_match_pat_or(v),
|
||||
MatchPat::Paren(v) => state.visit_match_pat_paren(v),
|
||||
MatchPat::Struct(v) => state.visit_match_pat_struct(v),
|
||||
MatchPat::Tuple(v) => state.visit_match_pat_tuple(v),
|
||||
MatchPat::EnumVariant(v) => state.visit_match_pat_enum_variant(v),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatBinding<> {
|
||||
ident: Ident,
|
||||
|
|
@ -150,15 +52,6 @@ with_debug_clone_and_fold! {
|
|||
}
|
||||
}
|
||||
|
||||
impl<P> MatchPatOr<P> {
|
||||
/// returns the first `|` between two patterns
|
||||
fn first_inner_vert(&self) -> Option<Token![|]> {
|
||||
let mut pairs = self.cases.pairs();
|
||||
pairs.next_back();
|
||||
pairs.next().and_then(|v| v.into_tuple().1.copied())
|
||||
}
|
||||
}
|
||||
|
||||
impl<P: ToTokens> ToTokens for MatchPatOr<P> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
|
|
@ -183,19 +76,6 @@ impl ToTokens for MatchPatWild {
|
|||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatRest<> {
|
||||
dot2_token: Token![..],
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for MatchPatRest {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self { dot2_token } = self;
|
||||
dot2_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatStructField<> {
|
||||
field_name: Ident,
|
||||
|
|
@ -278,29 +158,9 @@ impl ToTokens for MatchPatStruct {
|
|||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatTuple<> {
|
||||
paren_token: Paren,
|
||||
fields: Punctuated<MatchPatSimple, Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for MatchPatTuple {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
paren_token,
|
||||
fields,
|
||||
} = self;
|
||||
paren_token.surround(tokens, |tokens| {
|
||||
fields.to_tokens(tokens);
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatEnumVariant<> {
|
||||
match_span: Span,
|
||||
sim: Option<(kw::sim,)>,
|
||||
variant_path: Path,
|
||||
enum_path: Path,
|
||||
variant_name: Ident,
|
||||
|
|
@ -312,7 +172,6 @@ impl ToTokens for MatchPatEnumVariant {
|
|||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
match_span,
|
||||
sim,
|
||||
variant_path: _,
|
||||
enum_path,
|
||||
variant_name,
|
||||
|
|
@ -322,28 +181,7 @@ impl ToTokens for MatchPatEnumVariant {
|
|||
__MatchTy::<#enum_path>::#variant_name
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
if sim.is_some() {
|
||||
if let Some((paren_token, field)) = field {
|
||||
paren_token.surround(tokens, |tokens| {
|
||||
field.to_tokens(tokens);
|
||||
match field {
|
||||
MatchPatSimple::Paren(_)
|
||||
| MatchPatSimple::Or(_)
|
||||
| MatchPatSimple::Binding(_)
|
||||
| MatchPatSimple::Wild(_) => quote_spanned! {*match_span=>
|
||||
, _
|
||||
}
|
||||
.to_tokens(tokens),
|
||||
MatchPatSimple::Rest(_) => {}
|
||||
}
|
||||
});
|
||||
} else {
|
||||
quote_spanned! {*match_span=>
|
||||
(_)
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
} else if let Some((paren_token, field)) = field {
|
||||
if let Some((paren_token, field)) = field {
|
||||
paren_token.surround(tokens, |tokens| field.to_tokens(tokens));
|
||||
}
|
||||
}
|
||||
|
|
@ -355,7 +193,6 @@ enum MatchPatSimple {
|
|||
Or(MatchPatOr<MatchPatSimple>),
|
||||
Binding(MatchPatBinding),
|
||||
Wild(MatchPatWild),
|
||||
Rest(MatchPatRest),
|
||||
}
|
||||
|
||||
impl_fold! {
|
||||
|
|
@ -364,7 +201,6 @@ impl_fold! {
|
|||
Or(MatchPatOr<MatchPatSimple>),
|
||||
Binding(MatchPatBinding),
|
||||
Wild(MatchPatWild),
|
||||
Rest(MatchPatRest),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -375,18 +211,17 @@ impl ToTokens for MatchPatSimple {
|
|||
Self::Paren(v) => v.to_tokens(tokens),
|
||||
Self::Binding(v) => v.to_tokens(tokens),
|
||||
Self::Wild(v) => v.to_tokens(tokens),
|
||||
Self::Rest(v) => v.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct EnumPath {
|
||||
pub(crate) variant_path: Path,
|
||||
pub(crate) enum_path: Path,
|
||||
pub(crate) variant_name: Ident,
|
||||
struct EnumPath {
|
||||
variant_path: Path,
|
||||
enum_path: Path,
|
||||
variant_name: Ident,
|
||||
}
|
||||
|
||||
pub(crate) fn parse_enum_path(variant_path: TypePath) -> Result<EnumPath, TypePath> {
|
||||
fn parse_enum_path(variant_path: TypePath) -> Result<EnumPath, TypePath> {
|
||||
let TypePath {
|
||||
qself: None,
|
||||
path: variant_path,
|
||||
|
|
@ -442,9 +277,8 @@ trait ParseMatchPat: Sized {
|
|||
fn or(v: MatchPatOr<Self>) -> Self;
|
||||
fn paren(v: MatchPatParen<Self>) -> Self;
|
||||
fn struct_(state: &mut HdlMatchParseState<'_>, v: MatchPatStruct) -> Result<Self, ()>;
|
||||
fn tuple(state: &mut HdlMatchParseState<'_>, v: MatchPatTuple) -> Result<Self, ()>;
|
||||
fn enum_variant(state: &mut HdlMatchParseState<'_>, v: MatchPatEnumVariant)
|
||||
-> Result<Self, ()>;
|
||||
-> Result<Self, ()>;
|
||||
fn parse(state: &mut HdlMatchParseState<'_>, pat: Pat) -> Result<Self, ()> {
|
||||
match pat {
|
||||
Pat::Ident(PatIdent {
|
||||
|
|
@ -478,7 +312,6 @@ trait ParseMatchPat: Sized {
|
|||
state,
|
||||
MatchPatEnumVariant {
|
||||
match_span: state.match_span,
|
||||
sim: state.sim,
|
||||
variant_path,
|
||||
enum_path,
|
||||
variant_name,
|
||||
|
|
@ -525,7 +358,6 @@ trait ParseMatchPat: Sized {
|
|||
state,
|
||||
MatchPatEnumVariant {
|
||||
match_span: state.match_span,
|
||||
sim: state.sim,
|
||||
variant_path,
|
||||
enum_path,
|
||||
variant_name,
|
||||
|
|
@ -610,7 +442,6 @@ trait ParseMatchPat: Sized {
|
|||
state,
|
||||
MatchPatEnumVariant {
|
||||
match_span: state.match_span,
|
||||
sim: state.sim,
|
||||
variant_path,
|
||||
enum_path,
|
||||
variant_name,
|
||||
|
|
@ -630,34 +461,7 @@ trait ParseMatchPat: Sized {
|
|||
}) => Ok(Self::simple(MatchPatSimple::Wild(MatchPatWild {
|
||||
underscore_token,
|
||||
}))),
|
||||
Pat::Tuple(PatTuple {
|
||||
attrs: _,
|
||||
paren_token,
|
||||
elems,
|
||||
}) => {
|
||||
let fields = elems
|
||||
.into_pairs()
|
||||
.filter_map_pair_value(|field_pat| {
|
||||
if let Pat::Rest(PatRest {
|
||||
attrs: _,
|
||||
dot2_token,
|
||||
}) = field_pat
|
||||
{
|
||||
Some(MatchPatSimple::Rest(MatchPatRest { dot2_token }))
|
||||
} else {
|
||||
MatchPatSimple::parse(state, field_pat).ok()
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
Self::tuple(
|
||||
state,
|
||||
MatchPatTuple {
|
||||
paren_token,
|
||||
fields,
|
||||
},
|
||||
)
|
||||
}
|
||||
Pat::Slice(_) | Pat::Const(_) | Pat::Lit(_) | Pat::Range(_) => {
|
||||
Pat::Tuple(_) | Pat::Slice(_) | Pat::Const(_) | Pat::Lit(_) | Pat::Range(_) => {
|
||||
state
|
||||
.errors
|
||||
.error(pat, "not yet implemented in #[hdl] patterns");
|
||||
|
|
@ -692,14 +496,6 @@ impl ParseMatchPat for MatchPatSimple {
|
|||
Err(())
|
||||
}
|
||||
|
||||
fn tuple(state: &mut HdlMatchParseState<'_>, v: MatchPatTuple) -> Result<Self, ()> {
|
||||
state.errors.push(syn::Error::new(
|
||||
v.paren_token.span.open(),
|
||||
"matching tuples is not yet implemented inside structs/enums in #[hdl] patterns",
|
||||
));
|
||||
Err(())
|
||||
}
|
||||
|
||||
fn enum_variant(
|
||||
state: &mut HdlMatchParseState<'_>,
|
||||
v: MatchPatEnumVariant,
|
||||
|
|
@ -718,7 +514,6 @@ enum MatchPat {
|
|||
Or(MatchPatOr<MatchPat>),
|
||||
Paren(MatchPatParen<MatchPat>),
|
||||
Struct(MatchPatStruct),
|
||||
Tuple(MatchPatTuple),
|
||||
EnumVariant(MatchPatEnumVariant),
|
||||
}
|
||||
|
||||
|
|
@ -728,7 +523,6 @@ impl_fold! {
|
|||
Or(MatchPatOr<MatchPat>),
|
||||
Paren(MatchPatParen<MatchPat>),
|
||||
Struct(MatchPatStruct),
|
||||
Tuple(MatchPatTuple),
|
||||
EnumVariant(MatchPatEnumVariant),
|
||||
}
|
||||
}
|
||||
|
|
@ -750,10 +544,6 @@ impl ParseMatchPat for MatchPat {
|
|||
Ok(Self::Struct(v))
|
||||
}
|
||||
|
||||
fn tuple(_state: &mut HdlMatchParseState<'_>, v: MatchPatTuple) -> Result<Self, ()> {
|
||||
Ok(Self::Tuple(v))
|
||||
}
|
||||
|
||||
fn enum_variant(
|
||||
_state: &mut HdlMatchParseState<'_>,
|
||||
v: MatchPatEnumVariant,
|
||||
|
|
@ -769,7 +559,6 @@ impl ToTokens for MatchPat {
|
|||
Self::Or(v) => v.to_tokens(tokens),
|
||||
Self::Paren(v) => v.to_tokens(tokens),
|
||||
Self::Struct(v) => v.to_tokens(tokens),
|
||||
Self::Tuple(v) => v.to_tokens(tokens),
|
||||
Self::EnumVariant(v) => v.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
|
|
@ -832,6 +621,10 @@ struct RewriteAsCheckMatch {
|
|||
}
|
||||
|
||||
impl Fold for RewriteAsCheckMatch {
|
||||
fn fold_field_pat(&mut self, mut i: FieldPat) -> FieldPat {
|
||||
i.colon_token = Some(Token));
|
||||
i
|
||||
}
|
||||
fn fold_pat(&mut self, pat: Pat) -> Pat {
|
||||
match pat {
|
||||
Pat::Ident(mut pat_ident) => match parse_enum_ident(pat_ident.ident) {
|
||||
|
|
@ -946,177 +739,17 @@ impl Fold for RewriteAsCheckMatch {
|
|||
// don't recurse into expressions
|
||||
i
|
||||
}
|
||||
fn fold_local(&mut self, mut let_stmt: Local) -> Local {
|
||||
if let Some(syn::LocalInit {
|
||||
eq_token,
|
||||
expr: _,
|
||||
diverge,
|
||||
}) = let_stmt.init.take()
|
||||
{
|
||||
let_stmt.init = Some(syn::LocalInit {
|
||||
eq_token,
|
||||
expr: parse_quote_spanned! {self.span=>
|
||||
__match_value
|
||||
},
|
||||
diverge: diverge.map(|(else_, _expr)| {
|
||||
(
|
||||
else_,
|
||||
parse_quote_spanned! {self.span=>
|
||||
match __infallible {}
|
||||
},
|
||||
)
|
||||
}),
|
||||
});
|
||||
}
|
||||
fold_local(self, let_stmt)
|
||||
}
|
||||
}
|
||||
|
||||
struct HdlMatchParseState<'a> {
|
||||
sim: Option<(kw::sim,)>,
|
||||
match_span: Span,
|
||||
errors: &'a mut Errors,
|
||||
}
|
||||
|
||||
struct HdlLetPatVisitState<'a> {
|
||||
errors: &'a mut Errors,
|
||||
bindings: BTreeSet<&'a Ident>,
|
||||
}
|
||||
|
||||
impl<'a> VisitMatchPat<'a> for HdlLetPatVisitState<'a> {
|
||||
fn visit_match_pat_binding(&mut self, v: &'a MatchPatBinding) {
|
||||
self.bindings.insert(&v.ident);
|
||||
}
|
||||
|
||||
fn visit_match_pat_or(&mut self, v: &'a MatchPatOr<MatchPat>) {
|
||||
if let Some(first_inner_vert) = v.first_inner_vert() {
|
||||
self.errors.error(
|
||||
first_inner_vert,
|
||||
"or-patterns are not supported in let statements",
|
||||
);
|
||||
}
|
||||
visit_match_pat_or(self, v);
|
||||
}
|
||||
|
||||
fn visit_match_pat_or_simple(&mut self, v: &'a MatchPatOr<MatchPatSimple>) {
|
||||
if let Some(first_inner_vert) = v.first_inner_vert() {
|
||||
self.errors.error(
|
||||
first_inner_vert,
|
||||
"or-patterns are not supported in let statements",
|
||||
);
|
||||
}
|
||||
visit_match_pat_or_simple(self, v);
|
||||
}
|
||||
|
||||
fn visit_match_pat_enum_variant(&mut self, v: &'a MatchPatEnumVariant) {
|
||||
self.errors.error(v, "refutable pattern in let statement");
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitor<'_> {
|
||||
pub(crate) fn process_hdl_let_pat(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
mut let_stmt: Local,
|
||||
) -> Local {
|
||||
let span = let_stmt.let_token.span();
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
if let Pat::Type(pat) = &mut let_stmt.pat {
|
||||
*pat.ty = wrap_ty_with_expr((*pat.ty).clone());
|
||||
}
|
||||
let check_let_stmt = RewriteAsCheckMatch { span }.fold_local(let_stmt.clone());
|
||||
let Local {
|
||||
attrs: _,
|
||||
let_token,
|
||||
pat,
|
||||
init,
|
||||
semi_token,
|
||||
} = let_stmt;
|
||||
let Some(syn::LocalInit {
|
||||
eq_token,
|
||||
expr,
|
||||
diverge,
|
||||
}) = init
|
||||
else {
|
||||
self.errors
|
||||
.error(let_token, "#[hdl] let must be assigned a value");
|
||||
return empty_let();
|
||||
};
|
||||
if let Some((else_, _)) = diverge {
|
||||
// TODO: implement let-else
|
||||
self.errors
|
||||
.error(else_, "#[hdl] let ... else { ... } is not implemented");
|
||||
return empty_let();
|
||||
}
|
||||
let Ok(pat) = MatchPat::parse(
|
||||
&mut HdlMatchParseState {
|
||||
sim,
|
||||
match_span: span,
|
||||
errors: &mut self.errors,
|
||||
},
|
||||
pat,
|
||||
) else {
|
||||
return empty_let();
|
||||
};
|
||||
let mut state = HdlLetPatVisitState {
|
||||
errors: &mut self.errors,
|
||||
bindings: BTreeSet::new(),
|
||||
};
|
||||
state.visit_match_pat(&pat);
|
||||
let HdlLetPatVisitState {
|
||||
errors: _,
|
||||
bindings,
|
||||
} = state;
|
||||
let retval = if sim.is_some() {
|
||||
parse_quote_spanned! {span=>
|
||||
let (#(#bindings,)*) = {
|
||||
type __MatchTy<T> = <T as ::fayalite::ty::Type>::SimValue;
|
||||
let __match_value = ::fayalite::sim::value::ToSimValue::to_sim_value(&(#expr));
|
||||
#let_token #pat #eq_token ::fayalite::sim::value::SimValue::into_value(__match_value) #semi_token
|
||||
(#(#bindings,)*)
|
||||
};
|
||||
}
|
||||
} else {
|
||||
parse_quote_spanned! {span=>
|
||||
let (#(#bindings,)* __scope,) = {
|
||||
type __MatchTy<T> = <T as ::fayalite::ty::Type>::MatchVariant;
|
||||
let __match_expr = ::fayalite::expr::ToExpr::to_expr(&(#expr));
|
||||
::fayalite::expr::check_match_expr(
|
||||
__match_expr,
|
||||
|__match_value, __infallible| {
|
||||
#[allow(unused_variables)]
|
||||
#check_let_stmt
|
||||
match __infallible {}
|
||||
},
|
||||
);
|
||||
let mut __match_iter = ::fayalite::module::match_(__match_expr);
|
||||
let ::fayalite::__std::option::Option::Some(__match_variant) =
|
||||
::fayalite::__std::iter::Iterator::next(&mut __match_iter)
|
||||
else {
|
||||
::fayalite::__std::unreachable!("#[hdl] let with uninhabited type");
|
||||
};
|
||||
let ::fayalite::__std::option::Option::None =
|
||||
::fayalite::__std::iter::Iterator::next(&mut __match_iter)
|
||||
else {
|
||||
::fayalite::__std::unreachable!("#[hdl] let with refutable pattern");
|
||||
};
|
||||
let (__match_variant, __scope) =
|
||||
::fayalite::ty::MatchVariantAndInactiveScope::match_activate_scope(
|
||||
__match_variant,
|
||||
);
|
||||
#let_token #pat #eq_token __match_variant #semi_token
|
||||
(#(#bindings,)* __scope,)
|
||||
};
|
||||
}
|
||||
};
|
||||
match retval {
|
||||
syn::Stmt::Local(retval) => retval,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
pub(crate) fn process_hdl_match(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
_hdl_attr: HdlAttr<Nothing>,
|
||||
expr_match: ExprMatch,
|
||||
) -> Expr {
|
||||
let span = expr_match.match_token.span();
|
||||
|
|
@ -1128,9 +761,8 @@ impl Visitor<'_> {
|
|||
brace_token: _,
|
||||
arms,
|
||||
} = expr_match;
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
self.require_normal_module(match_token);
|
||||
let mut state = HdlMatchParseState {
|
||||
sim,
|
||||
match_span: span,
|
||||
errors: &mut self.errors,
|
||||
};
|
||||
|
|
@ -1138,36 +770,24 @@ impl Visitor<'_> {
|
|||
arms.into_iter()
|
||||
.filter_map(|arm| MatchArm::parse(&mut state, arm).ok()),
|
||||
);
|
||||
let expr = if sim.is_some() {
|
||||
quote_spanned! {span=>
|
||||
{
|
||||
type __MatchTy<T> = <T as ::fayalite::ty::Type>::SimValue;
|
||||
let __match_expr = ::fayalite::sim::value::ToSimValue::to_sim_value(&(#expr));
|
||||
#match_token ::fayalite::sim::value::SimValue::into_value(__match_expr) {
|
||||
let expr = quote_spanned! {span=>
|
||||
{
|
||||
type __MatchTy<T> = <T as ::fayalite::ty::Type>::MatchVariant;
|
||||
let __match_expr = ::fayalite::expr::ToExpr::to_expr(&(#expr));
|
||||
::fayalite::expr::check_match_expr(__match_expr, |__match_value, __infallible| {
|
||||
#[allow(unused_variables)]
|
||||
#check_match
|
||||
});
|
||||
for __match_variant in ::fayalite::module::match_(__match_expr) {
|
||||
let (__match_variant, __scope) =
|
||||
::fayalite::ty::MatchVariantAndInactiveScope::match_activate_scope(
|
||||
__match_variant,
|
||||
);
|
||||
#match_token __match_variant {
|
||||
#(#arms)*
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
{
|
||||
type __MatchTy<T> = <T as ::fayalite::ty::Type>::MatchVariant;
|
||||
let __match_expr = ::fayalite::expr::ToExpr::to_expr(&(#expr));
|
||||
::fayalite::expr::check_match_expr(__match_expr, |__match_value, __infallible| {
|
||||
#[allow(unused_variables)]
|
||||
#check_match
|
||||
});
|
||||
for __match_variant in ::fayalite::module::match_(__match_expr) {
|
||||
let (__match_variant, __scope) =
|
||||
::fayalite::ty::MatchVariantAndInactiveScope::match_activate_scope(
|
||||
__match_variant,
|
||||
);
|
||||
#match_token __match_variant {
|
||||
#(#arms)*
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
syn::parse2(expr).unwrap()
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -16,4 +16,4 @@ version.workspace = true
|
|||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
fayalite-proc-macros-impl.workspace = true
|
||||
fayalite-proc-macros-impl = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -13,11 +13,11 @@ rust-version.workspace = true
|
|||
version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
indexmap.workspace = true
|
||||
prettyplease.workspace = true
|
||||
proc-macro2.workspace = true
|
||||
quote.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
syn.workspace = true
|
||||
thiserror.workspace = true
|
||||
indexmap = { workspace = true }
|
||||
prettyplease = { workspace = true }
|
||||
proc-macro2 = { workspace = true }
|
||||
quote = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
syn = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{ToTokens, format_ident, quote};
|
||||
use quote::{format_ident, quote, ToTokens};
|
||||
use std::{collections::BTreeMap, fs};
|
||||
use syn::{fold::Fold, parse_quote};
|
||||
|
||||
|
|
|
|||
|
|
@ -14,36 +14,25 @@ rust-version.workspace = true
|
|||
version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
base64.workspace = true
|
||||
bitvec.workspace = true
|
||||
blake3.workspace = true
|
||||
clap.workspace = true
|
||||
clap_complete.workspace = true
|
||||
ctor.workspace = true
|
||||
eyre.workspace = true
|
||||
fayalite-proc-macros.workspace = true
|
||||
hashbrown.workspace = true
|
||||
jobslot.workspace = true
|
||||
num-bigint.workspace = true
|
||||
num-traits.workspace = true
|
||||
ordered-float.workspace = true
|
||||
petgraph.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde.workspace = true
|
||||
tempfile.workspace = true
|
||||
vec_map.workspace = true
|
||||
which.workspace = true
|
||||
bitvec = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
num-bigint = { workspace = true }
|
||||
num-traits = { workspace = true }
|
||||
fayalite-proc-macros = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
clap = { version = "4.5.9", features = ["derive", "env"] }
|
||||
eyre = "0.6.12"
|
||||
which = "6.0.1"
|
||||
|
||||
[dev-dependencies]
|
||||
trybuild.workspace = true
|
||||
serde = { workspace = true, features = ["rc"] }
|
||||
trybuild = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
fayalite-visit-gen.workspace = true
|
||||
fayalite-visit-gen = { workspace = true }
|
||||
|
||||
[features]
|
||||
unstable-doc = []
|
||||
unstable-test-hasher = []
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
features = ["unstable-doc"]
|
||||
|
|
|
|||
|
|
@ -5,9 +5,6 @@ use std::{env, fs, path::Path};
|
|||
|
||||
fn main() {
|
||||
println!("cargo::rustc-check-cfg=cfg(todo)");
|
||||
println!("cargo::rustc-check-cfg=cfg(cfg_false_for_tests)");
|
||||
println!("cargo::rustc-check-cfg=cfg(cfg_true_for_tests)");
|
||||
println!("cargo::rustc-cfg=cfg_true_for_tests");
|
||||
let path = "visit_types.json";
|
||||
println!("cargo::rerun-if-changed={path}");
|
||||
println!("cargo::rerun-if-changed=build.rs");
|
||||
|
|
|
|||
|
|
@ -1,64 +1,45 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use fayalite::prelude::*;
|
||||
use clap::Parser;
|
||||
use fayalite::{cli, prelude::*};
|
||||
|
||||
#[hdl_module]
|
||||
fn blinky(platform_io_builder: PlatformIOBuilder<'_>) {
|
||||
let clk_input =
|
||||
platform_io_builder.peripherals_with_type::<peripherals::ClockInput>()[0].use_peripheral();
|
||||
let rst = platform_io_builder.peripherals_with_type::<Reset>()[0].use_peripheral();
|
||||
fn blinky(clock_frequency: u64) {
|
||||
#[hdl]
|
||||
let clk: Clock = m.input();
|
||||
#[hdl]
|
||||
let rst: SyncReset = m.input();
|
||||
let cd = #[hdl]
|
||||
ClockDomain {
|
||||
clk: clk_input.clk,
|
||||
rst,
|
||||
clk,
|
||||
rst: rst.to_reset(),
|
||||
};
|
||||
let max_value = (Expr::ty(clk_input).frequency() / 2.0).round_ties_even() as u64 - 1;
|
||||
let max_value = clock_frequency / 2 - 1;
|
||||
let int_ty = UInt::range_inclusive(0..=max_value);
|
||||
#[hdl]
|
||||
let counter_reg: UInt = reg_builder().clock_domain(cd).reset(0u8.cast_to(int_ty));
|
||||
let counter: UInt = reg_builder().clock_domain(cd).reset(0u8.cast_to(int_ty));
|
||||
#[hdl]
|
||||
let output_reg: Bool = reg_builder().clock_domain(cd).reset(false);
|
||||
#[hdl]
|
||||
let rgb_output_reg = reg_builder().clock_domain(cd).reset(
|
||||
#[hdl]
|
||||
peripherals::RgbLed {
|
||||
r: false,
|
||||
g: false,
|
||||
b: false,
|
||||
},
|
||||
);
|
||||
#[hdl]
|
||||
if counter_reg.cmp_eq(max_value) {
|
||||
connect_any(counter_reg, 0u8);
|
||||
if counter.cmp_eq(max_value) {
|
||||
connect_any(counter, 0u8);
|
||||
connect(output_reg, !output_reg);
|
||||
connect(rgb_output_reg.r, !rgb_output_reg.r);
|
||||
#[hdl]
|
||||
if rgb_output_reg.r {
|
||||
connect(rgb_output_reg.g, !rgb_output_reg.g);
|
||||
#[hdl]
|
||||
if rgb_output_reg.g {
|
||||
connect(rgb_output_reg.b, !rgb_output_reg.b);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
connect_any(counter_reg, counter_reg + 1_hdl_u1);
|
||||
}
|
||||
for led in platform_io_builder.peripherals_with_type::<peripherals::Led>() {
|
||||
if let Ok(led) = led.try_use_peripheral() {
|
||||
connect(led.on, output_reg);
|
||||
}
|
||||
}
|
||||
for rgb_led in platform_io_builder.peripherals_with_type::<peripherals::RgbLed>() {
|
||||
if let Ok(rgb_led) = rgb_led.try_use_peripheral() {
|
||||
connect(rgb_led, rgb_output_reg);
|
||||
}
|
||||
connect_any(counter, counter + 1_hdl_u1);
|
||||
}
|
||||
#[hdl]
|
||||
let io = m.add_platform_io(platform_io_builder);
|
||||
let led: Bool = m.output();
|
||||
connect(led, output_reg);
|
||||
}
|
||||
|
||||
fn main() {
|
||||
<BuildCli>::main("blinky", |_, platform, _| {
|
||||
Ok(JobParams::new(platform.wrap_main_module(blinky)))
|
||||
});
|
||||
#[derive(Parser)]
|
||||
struct Cli {
|
||||
/// clock frequency in hertz
|
||||
#[arg(long, default_value = "1000000", value_parser = clap::value_parser!(u64).range(2..))]
|
||||
clock_frequency: u64,
|
||||
#[command(subcommand)]
|
||||
cli: cli::Cli,
|
||||
}
|
||||
|
||||
fn main() -> cli::Result {
|
||||
let cli = Cli::parse();
|
||||
cli.cli.run(blinky(cli.clock_frequency))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,188 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use clap::builder::TypedValueParser;
|
||||
use fayalite::{
|
||||
build::{ToArgs, WriteArgs},
|
||||
platform::PeripheralRef,
|
||||
prelude::*,
|
||||
};
|
||||
use ordered_float::NotNan;
|
||||
|
||||
fn pick_clock<'a>(
|
||||
platform_io_builder: &PlatformIOBuilder<'a>,
|
||||
) -> PeripheralRef<'a, peripherals::ClockInput> {
|
||||
let mut clks = platform_io_builder.peripherals_with_type::<peripherals::ClockInput>();
|
||||
clks.sort_by_key(|clk| {
|
||||
// sort clocks by preference, smaller return values means higher preference
|
||||
let mut frequency = clk.ty().frequency();
|
||||
let priority;
|
||||
if frequency < 10e6 {
|
||||
frequency = -frequency; // prefer bigger frequencies
|
||||
priority = 1;
|
||||
} else if frequency > 50e6 {
|
||||
// prefer smaller frequencies
|
||||
priority = 2; // least preferred
|
||||
} else {
|
||||
priority = 0; // most preferred
|
||||
frequency = (frequency - 25e6).abs(); // prefer closer to 25MHz
|
||||
}
|
||||
(priority, NotNan::new(frequency).expect("should be valid"))
|
||||
});
|
||||
clks[0]
|
||||
}
|
||||
|
||||
#[hdl_module]
|
||||
fn tx_only_uart(
|
||||
platform_io_builder: PlatformIOBuilder<'_>,
|
||||
divisor: f64,
|
||||
message: impl AsRef<[u8]>,
|
||||
) {
|
||||
let message = message.as_ref();
|
||||
let clk_input = pick_clock(&platform_io_builder).use_peripheral();
|
||||
let rst = platform_io_builder.peripherals_with_type::<Reset>()[0].use_peripheral();
|
||||
let cd = #[hdl]
|
||||
ClockDomain {
|
||||
clk: clk_input.clk,
|
||||
rst,
|
||||
};
|
||||
let numerator = 1u128 << 16;
|
||||
let denominator = (divisor * numerator as f64).round() as u128;
|
||||
|
||||
#[hdl]
|
||||
let remainder_reg: UInt<128> = reg_builder().clock_domain(cd).reset(0u128);
|
||||
|
||||
#[hdl]
|
||||
let sum: UInt<128> = wire();
|
||||
connect_any(sum, remainder_reg + numerator);
|
||||
|
||||
#[hdl]
|
||||
let tick_reg = reg_builder().clock_domain(cd).reset(false);
|
||||
connect(tick_reg, false);
|
||||
|
||||
#[hdl]
|
||||
let next_remainder: UInt<128> = wire();
|
||||
connect(remainder_reg, next_remainder);
|
||||
|
||||
#[hdl]
|
||||
if sum.cmp_ge(denominator) {
|
||||
connect_any(next_remainder, sum - denominator);
|
||||
connect(tick_reg, true);
|
||||
} else {
|
||||
connect(next_remainder, sum);
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
let uart_state_reg = reg_builder().clock_domain(cd).reset(0_hdl_u4);
|
||||
#[hdl]
|
||||
let next_uart_state: UInt<4> = wire();
|
||||
|
||||
connect_any(next_uart_state, uart_state_reg + 1u8);
|
||||
|
||||
#[hdl]
|
||||
let message_mem: Array<UInt<8>> = wire(Array[UInt::new_static()][message.len()]);
|
||||
for (message, message_mem) in message.iter().zip(message_mem) {
|
||||
connect(message_mem, *message);
|
||||
}
|
||||
#[hdl]
|
||||
let addr_reg: UInt<32> = reg_builder().clock_domain(cd).reset(0u32);
|
||||
#[hdl]
|
||||
let next_addr: UInt<32> = wire();
|
||||
connect(next_addr, addr_reg);
|
||||
|
||||
#[hdl]
|
||||
let tx = reg_builder().clock_domain(cd).reset(true);
|
||||
|
||||
#[hdl]
|
||||
let tx_bits: Array<Bool, 10> = wire();
|
||||
|
||||
connect(tx_bits[0], false); // start bit
|
||||
connect(tx_bits[9], true); // stop bit
|
||||
|
||||
for i in 0..8 {
|
||||
connect(tx_bits[i + 1], message_mem[addr_reg][i]); // data bits
|
||||
}
|
||||
|
||||
connect(tx, tx_bits[uart_state_reg]);
|
||||
|
||||
#[hdl]
|
||||
if uart_state_reg.cmp_eq(Expr::ty(tx_bits).len() - 1) {
|
||||
connect(next_uart_state, 0_hdl_u4);
|
||||
let next_addr_val = addr_reg + 1u8;
|
||||
#[hdl]
|
||||
if next_addr_val.cmp_lt(message.len()) {
|
||||
connect_any(next_addr, next_addr_val);
|
||||
} else {
|
||||
connect(next_addr, 0u32);
|
||||
}
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
if tick_reg {
|
||||
connect(uart_state_reg, next_uart_state);
|
||||
connect(addr_reg, next_addr);
|
||||
}
|
||||
|
||||
for uart in platform_io_builder.peripherals_with_type::<peripherals::Uart>() {
|
||||
connect(uart.use_peripheral().tx, tx);
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
let io = m.add_platform_io(platform_io_builder);
|
||||
}
|
||||
|
||||
fn parse_baud_rate(
|
||||
v: impl AsRef<str>,
|
||||
) -> Result<NotNan<f64>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let retval: NotNan<f64> = v
|
||||
.as_ref()
|
||||
.parse()
|
||||
.map_err(|_| "invalid baud rate, must be a finite positive floating-point value")?;
|
||||
if *retval > 0.0 && retval.is_finite() {
|
||||
Ok(retval)
|
||||
} else {
|
||||
Err("baud rate must be finite and positive".into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)]
|
||||
pub struct ExtraArgs {
|
||||
#[arg(long, value_parser = clap::builder::StringValueParser::new().try_map(parse_baud_rate), default_value = "115200")]
|
||||
pub baud_rate: NotNan<f64>,
|
||||
#[arg(long, default_value = "Hello World from Fayalite!!!\r\n", value_parser = clap::builder::NonEmptyStringValueParser::new())]
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl ToArgs for ExtraArgs {
|
||||
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||
let Self { baud_rate, message } = self;
|
||||
args.write_display_arg(format_args!("--baud-rate={baud_rate}"));
|
||||
args.write_long_option_eq("message", message);
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
type Cli = BuildCli<ExtraArgs>;
|
||||
Cli::main(
|
||||
"tx_only_uart",
|
||||
|_, platform, ExtraArgs { baud_rate, message }| {
|
||||
Ok(JobParams::new(platform.try_wrap_main_module(|io| {
|
||||
let clk = pick_clock(&io).ty();
|
||||
let divisor = clk.frequency() / *baud_rate;
|
||||
let baud_rate_error = |msg| {
|
||||
<Cli as clap::CommandFactory>::command()
|
||||
.error(clap::error::ErrorKind::ValueValidation, msg)
|
||||
};
|
||||
const HUGE_DIVISOR: f64 = u64::MAX as f64;
|
||||
match divisor {
|
||||
divisor if !divisor.is_finite() => {
|
||||
return Err(baud_rate_error("bad baud rate"));
|
||||
}
|
||||
HUGE_DIVISOR.. => return Err(baud_rate_error("baud rate is too small")),
|
||||
4.0.. => {}
|
||||
_ => return Err(baud_rate_error("baud rate is too large")),
|
||||
}
|
||||
Ok(tx_only_uart(io, divisor, message))
|
||||
})?))
|
||||
},
|
||||
);
|
||||
}
|
||||
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
#![doc = include_str!("../README.md")]
|
||||
|
||||
//!
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # Fayalite Modules
|
||||
//!
|
||||
//! The [`#[hdl_module]`][`crate::hdl_module`] attribute is applied to a Rust
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! These are for when you want to use modules written in
|
||||
//! some other language, such as Verilog.
|
||||
//!
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # Module Function Bodies
|
||||
//!
|
||||
//! The `#[hdl_module]` attribute lets you have statements/expressions with `#[hdl]` annotations
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # `#[hdl]` Array Expressions
|
||||
//!
|
||||
//! `#[hdl]` can be used on Array Expressions to construct an [`Array<[T; N]>`][type@Array] expression:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # `#[hdl] if` Statements
|
||||
//!
|
||||
//! `#[hdl] if` statements behave similarly to Rust `if` statements, except they end up as muxes
|
||||
|
|
|
|||
|
|
@ -1,8 +1,5 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! ## `#[hdl] let` statements
|
||||
|
||||
pub mod destructuring;
|
||||
pub mod inputs_outputs;
|
||||
pub mod instances;
|
||||
pub mod memories;
|
||||
|
|
|
|||
|
|
@ -1,33 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! ### Destructuring Let
|
||||
//!
|
||||
//! You can use `#[hdl] let` to destructure types, similarly to Rust `let` statements with non-trivial patterns.
|
||||
//!
|
||||
//! `#[hdl] let` statements can only match one level of struct/tuple pattern for now,
|
||||
//! e.g. you can match with the pattern `MyStruct { a, b }`, but not `MyStruct { a, b: Struct2 { v } }`.
|
||||
//!
|
||||
//! ```
|
||||
//! # use fayalite::prelude::*;
|
||||
//! #[hdl]
|
||||
//! struct MyStruct {
|
||||
//! a: UInt<8>,
|
||||
//! b: Bool,
|
||||
//! }
|
||||
//!
|
||||
//! #[hdl_module]
|
||||
//! fn my_module() {
|
||||
//! #[hdl]
|
||||
//! let my_input: MyStruct = m.input();
|
||||
//! #[hdl]
|
||||
//! let my_output: UInt<8> = m.input();
|
||||
//! #[hdl]
|
||||
//! let MyStruct { a, b } = my_input;
|
||||
//! #[hdl]
|
||||
//! if b {
|
||||
//! connect(my_output, a);
|
||||
//! } else {
|
||||
//! connect(my_output, 0_hdl_u8);
|
||||
//! }
|
||||
//! }
|
||||
//! ```
|
||||
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! ### Inputs/Outputs
|
||||
//!
|
||||
//! Inputs/Outputs create a Rust variable with type [`Expr<T>`] where `T` is the type of the input/output.
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! ### Module Instances
|
||||
//!
|
||||
//! module instances are kinda like the hardware equivalent of calling a function,
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # Memories
|
||||
//!
|
||||
//! Memories are optimized for storing large amounts of data.
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! ### Registers
|
||||
//!
|
||||
//! Registers are memory devices that will change their state only on a clock
|
||||
|
|
@ -9,9 +7,6 @@
|
|||
//!
|
||||
//! Registers follow [connection semantics], which are unlike assignments in software, so you should read it.
|
||||
//!
|
||||
//! By convention, register names end in `_reg` -- this helps you tell which values are written
|
||||
//! immediately or on the next clock edge when connecting to them.
|
||||
//!
|
||||
//! ```
|
||||
//! # use fayalite::prelude::*;
|
||||
//! # #[hdl_module]
|
||||
|
|
@ -21,11 +16,11 @@
|
|||
//! #[hdl]
|
||||
//! let cd: ClockDomain = m.input();
|
||||
//! #[hdl]
|
||||
//! let my_reg: UInt<8> = reg_builder().clock_domain(cd).reset(8_hdl_u8);
|
||||
//! let my_register: UInt<8> = reg_builder().clock_domain(cd).reset(8_hdl_u8);
|
||||
//! #[hdl]
|
||||
//! if v {
|
||||
//! // my_reg is only changed when both `v` is set and `cd`'s clock edge occurs.
|
||||
//! connect(my_reg, 0x45_hdl_u8);
|
||||
//! // my_register is only changed when both `v` is set and `cd`'s clock edge occurs.
|
||||
//! connect(my_register, 0x45_hdl_u8);
|
||||
//! }
|
||||
//! # }
|
||||
//! ```
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! ### Wires
|
||||
//!
|
||||
//! Wires are kinda like variables, but unlike registers,
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # `_hdl`-suffixed literals
|
||||
//!
|
||||
//! You can have integer literals with an arbitrary number of bits like so:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # `#[hdl] match` Statements
|
||||
//!
|
||||
//! `#[hdl] match` statements behave similarly to Rust `match` statements, except they end up as muxes
|
||||
|
|
@ -7,5 +5,5 @@
|
|||
//!
|
||||
//! `#[hdl] match` statements' bodies must evaluate to type `()` for now.
|
||||
//!
|
||||
//! `#[hdl] match` statements can only match one level of struct/tuple/enum pattern for now,
|
||||
//! `#[hdl] match` statements can only match one level of struct/enum pattern for now,
|
||||
//! e.g. you can match with the pattern `HdlSome(v)`, but not `HdlSome(HdlSome(_))`.
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # `#[hdl]` Struct/Variant Expressions
|
||||
//!
|
||||
//! Note: Structs are also known as [Bundles] when used in Fayalite, the Bundle name comes from [FIRRTL].
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # Normal Modules
|
||||
//!
|
||||
//! See also: [Extern Modules][`super::extern_module`]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # Fayalite Semantics
|
||||
//!
|
||||
//! Fayalite's semantics are based on [FIRRTL]. Due to their significance, some of the semantics are also documented here.
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # Connection Semantics
|
||||
//!
|
||||
//! Fayalite's connection semantics are unlike assignments in software, so be careful!
|
||||
|
|
|
|||
|
|
@ -8,11 +8,10 @@ use serde::{Deserialize, Serialize};
|
|||
use std::{
|
||||
fmt,
|
||||
hash::{Hash, Hasher},
|
||||
iter::FusedIterator,
|
||||
ops::Deref,
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone)]
|
||||
struct CustomFirrtlAnnotationFieldsImpl {
|
||||
value: serde_json::Map<String, serde_json::Value>,
|
||||
serialized: Interned<str>,
|
||||
|
|
@ -119,109 +118,11 @@ pub struct CustomFirrtlAnnotation {
|
|||
pub additional_fields: CustomFirrtlAnnotationFields,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
|
||||
pub struct DontTouchAnnotation;
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
|
||||
pub struct SVAttributeAnnotation {
|
||||
pub text: Interned<str>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
|
||||
pub struct BlackBoxInlineAnnotation {
|
||||
pub path: Interned<str>,
|
||||
pub text: Interned<str>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
|
||||
pub struct BlackBoxPathAnnotation {
|
||||
pub path: Interned<str>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
|
||||
pub struct DocStringAnnotation {
|
||||
pub text: Interned<str>,
|
||||
}
|
||||
|
||||
macro_rules! make_annotation_enum {
|
||||
(
|
||||
#[$non_exhaustive:ident]
|
||||
$(#[$meta:meta])*
|
||||
$vis:vis enum $AnnotationEnum:ident {
|
||||
$($Variant:ident($T:ty),)*
|
||||
}
|
||||
) => {
|
||||
crate::annotations::make_annotation_enum!(@require_non_exhaustive $non_exhaustive);
|
||||
|
||||
#[$non_exhaustive]
|
||||
$(#[$meta])*
|
||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||
$vis enum $AnnotationEnum {
|
||||
$($Variant($T),)*
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for $AnnotationEnum {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
$(Self::$Variant(v) => v.fmt(f),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$(impl From<$T> for crate::annotations::Annotation {
|
||||
fn from(v: $T) -> Self {
|
||||
$AnnotationEnum::$Variant(v).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::annotations::IntoAnnotations for $T {
|
||||
type IntoAnnotations = [crate::annotations::Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[self.into()]
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::annotations::IntoAnnotations for &'_ $T {
|
||||
type IntoAnnotations = [crate::annotations::Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[crate::annotations::Annotation::from(self.clone())]
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::annotations::IntoAnnotations for &'_ mut $T {
|
||||
type IntoAnnotations = [crate::annotations::Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[crate::annotations::Annotation::from(self.clone())]
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::annotations::IntoAnnotations for Box<$T> {
|
||||
type IntoAnnotations = [crate::annotations::Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[crate::annotations::Annotation::from(*self)]
|
||||
}
|
||||
})*
|
||||
};
|
||||
(@require_non_exhaustive non_exhaustive) => {};
|
||||
}
|
||||
|
||||
pub(crate) use make_annotation_enum;
|
||||
|
||||
make_annotation_enum! {
|
||||
#[non_exhaustive]
|
||||
pub enum Annotation {
|
||||
DontTouch(DontTouchAnnotation),
|
||||
SVAttribute(SVAttributeAnnotation),
|
||||
BlackBoxInline(BlackBoxInlineAnnotation),
|
||||
BlackBoxPath(BlackBoxPathAnnotation),
|
||||
DocString(DocStringAnnotation),
|
||||
CustomFirrtl(CustomFirrtlAnnotation),
|
||||
Xilinx(crate::vendor::xilinx::XilinxAnnotation),
|
||||
}
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[non_exhaustive]
|
||||
pub enum Annotation {
|
||||
DontTouch,
|
||||
CustomFirrtl(CustomFirrtlAnnotation),
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
|
|
@ -286,68 +187,10 @@ impl IntoAnnotations for &'_ mut Annotation {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct IterIntoAnnotations<T: Iterator<Item: IntoAnnotations>> {
|
||||
outer: T,
|
||||
inner: Option<<<T::Item as IntoAnnotations>::IntoAnnotations as IntoIterator>::IntoIter>,
|
||||
}
|
||||
|
||||
impl<T: Iterator<Item: IntoAnnotations>> Iterator for IterIntoAnnotations<T> {
|
||||
type Item = Annotation;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
if let Some(inner) = &mut self.inner {
|
||||
let Some(retval) = inner.next() else {
|
||||
self.inner = None;
|
||||
continue;
|
||||
};
|
||||
return Some(retval);
|
||||
} else {
|
||||
self.inner = Some(self.outer.next()?.into_annotations().into_iter());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
if let (0, Some(0)) = self.outer.size_hint() {
|
||||
self.inner
|
||||
.as_ref()
|
||||
.map(|v| v.size_hint())
|
||||
.unwrap_or((0, Some(0)))
|
||||
} else {
|
||||
(
|
||||
self.inner.as_ref().map(|v| v.size_hint().0).unwrap_or(0),
|
||||
None,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn fold<B, F>(self, init: B, f: F) -> B
|
||||
where
|
||||
Self: Sized,
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.inner
|
||||
.into_iter()
|
||||
.chain(self.outer.map(|v| v.into_annotations().into_iter()))
|
||||
.flatten()
|
||||
.fold(init, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<
|
||||
T: FusedIterator<Item: IntoAnnotations<IntoAnnotations: IntoIterator<IntoIter: FusedIterator>>>,
|
||||
> FusedIterator for IterIntoAnnotations<T>
|
||||
{
|
||||
}
|
||||
|
||||
impl<T: IntoIterator<Item: IntoAnnotations>> IntoAnnotations for T {
|
||||
type IntoAnnotations = IterIntoAnnotations<T::IntoIter>;
|
||||
impl<T: IntoIterator<Item = Annotation>> IntoAnnotations for T {
|
||||
type IntoAnnotations = Self;
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
IterIntoAnnotations {
|
||||
outer: self.into_iter(),
|
||||
inner: None,
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,24 +2,17 @@
|
|||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
expr::{
|
||||
CastToBits, Expr, HdlPartialEq, ReduceBits, ToExpr,
|
||||
ops::{ArrayLiteral, ExprFromIterator, ExprIntoIterator, ExprPartialEq},
|
||||
},
|
||||
int::{Bool, DYN_SIZE, DynSize, KnownSize, Size, SizeType},
|
||||
expr::{ops::ArrayIndex, Expr, ToExpr},
|
||||
int::{DynSize, KnownSize, Size, SizeType, DYN_SIZE},
|
||||
intern::{Intern, Interned, LazyInterned},
|
||||
module::transform::visit::{Fold, Folder, Visit, Visitor},
|
||||
sim::value::{SimValue, SimValuePartialEq},
|
||||
source_location::SourceLocation,
|
||||
ty::{
|
||||
CanonicalType, MatchVariantWithoutScope, OpaqueSimValueSlice, OpaqueSimValueWriter,
|
||||
OpaqueSimValueWritten, StaticType, Type, TypeProperties, TypeWithDeref,
|
||||
serde_impls::SerdeCanonicalType,
|
||||
CanonicalType, MatchVariantWithoutScope, StaticType, Type, TypeProperties, TypeWithDeref,
|
||||
},
|
||||
util::ConstUsize,
|
||||
};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error};
|
||||
use std::{iter::FusedIterator, ops::Index};
|
||||
use std::ops::Index;
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct ArrayType<T: Type = CanonicalType, Len: Size = DynSize> {
|
||||
|
|
@ -48,20 +41,15 @@ impl<T: Type, Len: Size> ArrayType<T, Len> {
|
|||
is_storable,
|
||||
is_castable_from_bits,
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
} = element;
|
||||
let Some(bit_width) = bit_width.checked_mul(len) else {
|
||||
panic!("array too big");
|
||||
};
|
||||
let Some(sim_only_values_len) = sim_only_values_len.checked_mul(len) else {
|
||||
panic!("array too big");
|
||||
};
|
||||
TypeProperties {
|
||||
is_passive,
|
||||
is_storable,
|
||||
is_castable_from_bits,
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
}
|
||||
}
|
||||
pub fn new(element: T, len: Len::SizeType) -> Self {
|
||||
|
|
@ -97,18 +85,12 @@ impl<T: Type, Len: Size> ArrayType<T, Len> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: Type, Len: KnownSize + Size<SizeType = Len>> ArrayType<T, Len> {
|
||||
impl<T: Type, Len: KnownSize> ArrayType<T, Len> {
|
||||
pub fn new_static(element: T) -> Self {
|
||||
Self::new(element, Len::SizeType::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: StaticType, Len: KnownSize> Default for ArrayType<T, Len> {
|
||||
fn default() -> Self {
|
||||
Self::TYPE
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: StaticType, Len: KnownSize> StaticType for ArrayType<T, Len> {
|
||||
const TYPE: Self = Self {
|
||||
element: LazyInterned::new_lazy(&|| T::TYPE.intern_sized()),
|
||||
|
|
@ -157,7 +139,6 @@ impl<T: Type + Visit<State>, Len: Size, State: Visitor + ?Sized> Visit<State>
|
|||
impl<T: Type, Len: Size> Type for ArrayType<T, Len> {
|
||||
type BaseType = Array;
|
||||
type MaskType = ArrayType<T::MaskType, Len>;
|
||||
type SimValue = Len::ArraySimValue<T>;
|
||||
type MatchVariant = Len::ArrayMatch<T>;
|
||||
type MatchActiveScope = ();
|
||||
type MatchVariantAndInactiveScope = MatchVariantWithoutScope<Len::ArrayMatch<T>>;
|
||||
|
|
@ -167,8 +148,10 @@ impl<T: Type, Len: Size> Type for ArrayType<T, Len> {
|
|||
this: Expr<Self>,
|
||||
source_location: SourceLocation,
|
||||
) -> Self::MatchVariantsIter {
|
||||
let base = Expr::as_dyn_array(this);
|
||||
let base_ty = Expr::ty(base);
|
||||
let _ = source_location;
|
||||
let retval = Vec::from_iter(this);
|
||||
let retval = Vec::from_iter((0..base_ty.len()).map(|i| ArrayIndex::new(base, i).to_expr()));
|
||||
std::iter::once(MatchVariantWithoutScope(
|
||||
Len::ArrayMatch::<T>::try_from(retval)
|
||||
.ok()
|
||||
|
|
@ -194,107 +177,17 @@ impl<T: Type, Len: Size> Type for ArrayType<T, Len> {
|
|||
Len::from_usize(array.len()),
|
||||
)
|
||||
}
|
||||
|
||||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
|
||||
fn sim_value_from_opaque(&self, mut opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
let element_ty = self.element();
|
||||
let element_size = element_ty.canonical().size();
|
||||
let mut value = Vec::with_capacity(self.len());
|
||||
for _ in 0..self.len() {
|
||||
let (element_opaque, rest) = opaque.split_at(element_size);
|
||||
value.push(SimValue::from_opaque(element_ty, element_opaque.to_owned()));
|
||||
opaque = rest;
|
||||
}
|
||||
value.try_into().ok().expect("used correct length")
|
||||
}
|
||||
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
mut opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
let element_ty = self.element();
|
||||
let element_size = element_ty.canonical().size();
|
||||
let value = AsMut::<[SimValue<T>]>::as_mut(value);
|
||||
assert_eq!(self.len(), value.len());
|
||||
for element_value in value {
|
||||
assert_eq!(SimValue::ty(element_value), element_ty);
|
||||
let (element_opaque, rest) = opaque.split_at(element_size);
|
||||
SimValue::opaque_mut(element_value).clone_from_slice(element_opaque);
|
||||
opaque = rest;
|
||||
}
|
||||
}
|
||||
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
mut writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
let element_ty = self.element();
|
||||
let element_size = element_ty.canonical().size();
|
||||
let value = AsRef::<[SimValue<T>]>::as_ref(value);
|
||||
assert_eq!(self.len(), value.len());
|
||||
for element_value in value {
|
||||
assert_eq!(SimValue::ty(element_value), element_ty);
|
||||
writer.fill_prefix_with(element_size, |writer| {
|
||||
writer.fill_cloned_from_slice(SimValue::opaque(element_value).as_slice())
|
||||
});
|
||||
}
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::empty())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type + Serialize, Len: Size> Serialize for ArrayType<T, Len> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
SerdeCanonicalType::<T>::Array {
|
||||
element: self.element(),
|
||||
len: self.len(),
|
||||
}
|
||||
.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, T: Type + Deserialize<'de>, Len: Size> Deserialize<'de> for ArrayType<T, Len> {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let name = |len| -> String {
|
||||
if let Some(len) = len {
|
||||
format!("an Array<_, {len}>")
|
||||
} else {
|
||||
"an Array<_>".to_string()
|
||||
}
|
||||
};
|
||||
match SerdeCanonicalType::<T>::deserialize(deserializer)? {
|
||||
SerdeCanonicalType::Array { element, len } => {
|
||||
if let Some(len) = Len::try_from_usize(len) {
|
||||
Ok(Self::new(element, len))
|
||||
} else {
|
||||
Err(Error::invalid_value(
|
||||
serde::de::Unexpected::Other(&name(Some(len))),
|
||||
&&*name(Len::KNOWN_VALUE),
|
||||
))
|
||||
}
|
||||
}
|
||||
ty => Err(Error::invalid_value(
|
||||
serde::de::Unexpected::Other(ty.as_serde_unexpected_str()),
|
||||
&&*name(Len::KNOWN_VALUE),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> TypeWithDeref for ArrayType<T, Len> {
|
||||
fn expr_deref(this: &Expr<Self>) -> &Self::MatchVariant {
|
||||
let retval = Vec::from_iter(*this);
|
||||
Interned::into_inner(Intern::intern_sized(
|
||||
let base = Expr::as_dyn_array(*this);
|
||||
let base_ty = Expr::ty(base);
|
||||
let retval = Vec::from_iter((0..base_ty.len()).map(|i| ArrayIndex::new(base, i).to_expr()));
|
||||
Interned::<_>::into_inner(Intern::intern_sized(
|
||||
Len::ArrayMatch::<T>::try_from(retval)
|
||||
.ok()
|
||||
.expect("unreachable"),
|
||||
|
|
@ -309,7 +202,7 @@ impl<T: Type> Index<T> for ArrayWithoutGenerics {
|
|||
type Output = ArrayWithoutLen<T>;
|
||||
|
||||
fn index(&self, element: T) -> &Self::Output {
|
||||
Interned::into_inner(Intern::intern_sized(ArrayWithoutLen { element }))
|
||||
Interned::<_>::into_inner(Intern::intern_sized(ArrayWithoutLen { element }))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -322,146 +215,6 @@ impl<T: Type, L: SizeType> Index<L> for ArrayWithoutLen<T> {
|
|||
type Output = ArrayType<T, L::Size>;
|
||||
|
||||
fn index(&self, len: L) -> &Self::Output {
|
||||
Interned::into_inner(Intern::intern_sized(ArrayType::new(self.element, len)))
|
||||
}
|
||||
}
|
||||
|
||||
impl<Lhs: Type, Rhs: Type, Len: Size> ExprPartialEq<ArrayType<Rhs, Len>> for ArrayType<Lhs, Len>
|
||||
where
|
||||
Lhs: ExprPartialEq<Rhs>,
|
||||
{
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<ArrayType<Rhs, Len>>) -> Expr<Bool> {
|
||||
let lhs_ty = Expr::ty(lhs);
|
||||
let rhs_ty = Expr::ty(rhs);
|
||||
assert_eq!(lhs_ty.len(), rhs_ty.len());
|
||||
lhs.into_iter()
|
||||
.zip(rhs)
|
||||
.map(|(l, r)| l.cmp_eq(r))
|
||||
.collect::<Expr<Array<Bool>>>()
|
||||
.cast_to_bits()
|
||||
.all_one_bits()
|
||||
}
|
||||
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<ArrayType<Rhs, Len>>) -> Expr<Bool> {
|
||||
let lhs_ty = Expr::ty(lhs);
|
||||
let rhs_ty = Expr::ty(rhs);
|
||||
assert_eq!(lhs_ty.len(), rhs_ty.len());
|
||||
lhs.into_iter()
|
||||
.zip(rhs)
|
||||
.map(|(l, r)| l.cmp_ne(r))
|
||||
.collect::<Expr<Array<Bool>>>()
|
||||
.cast_to_bits()
|
||||
.any_one_bits()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Lhs: Type, Rhs: Type, Len: Size> SimValuePartialEq<ArrayType<Rhs, Len>> for ArrayType<Lhs, Len>
|
||||
where
|
||||
Lhs: SimValuePartialEq<Rhs>,
|
||||
{
|
||||
fn sim_value_eq(this: &SimValue<Self>, other: &SimValue<ArrayType<Rhs, Len>>) -> bool {
|
||||
AsRef::<[_]>::as_ref(&**this)
|
||||
.iter()
|
||||
.zip(AsRef::<[_]>::as_ref(&**other))
|
||||
.all(|(l, r)| SimValuePartialEq::sim_value_eq(l, r))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> ExprIntoIterator for ArrayType<T, Len> {
|
||||
type Item = T;
|
||||
type ExprIntoIter = ExprArrayIter<T, Len>;
|
||||
|
||||
fn expr_into_iter(e: Expr<Self>) -> Self::ExprIntoIter {
|
||||
ExprArrayIter {
|
||||
base: e,
|
||||
indexes: 0..Expr::ty(e).len(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ExprArrayIter<T: Type, Len: Size> {
|
||||
base: Expr<ArrayType<T, Len>>,
|
||||
indexes: std::ops::Range<usize>,
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> ExprArrayIter<T, Len> {
|
||||
pub fn base(&self) -> Expr<ArrayType<T, Len>> {
|
||||
self.base
|
||||
}
|
||||
pub fn indexes(&self) -> std::ops::Range<usize> {
|
||||
self.indexes.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> Iterator for ExprArrayIter<T, Len> {
|
||||
type Item = Expr<T>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.indexes.next().map(|i| self.base[i])
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.indexes.size_hint()
|
||||
}
|
||||
|
||||
fn count(self) -> usize {
|
||||
self.indexes.count()
|
||||
}
|
||||
|
||||
fn last(mut self) -> Option<Self::Item> {
|
||||
self.next_back()
|
||||
}
|
||||
|
||||
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.indexes.nth(n).map(|i| self.base[i])
|
||||
}
|
||||
|
||||
fn fold<B, F>(self, init: B, mut f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.indexes.fold(init, |b, i| f(b, self.base[i]))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> DoubleEndedIterator for ExprArrayIter<T, Len> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
self.indexes.next_back().map(|i| self.base[i])
|
||||
}
|
||||
|
||||
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.indexes.nth_back(n).map(|i| self.base[i])
|
||||
}
|
||||
|
||||
fn rfold<B, F>(self, init: B, mut f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.indexes.rfold(init, |b, i| f(b, self.base[i]))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> ExactSizeIterator for ExprArrayIter<T, Len> {
|
||||
fn len(&self) -> usize {
|
||||
self.indexes.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> FusedIterator for ExprArrayIter<T, Len> {}
|
||||
|
||||
impl<A: StaticType> ExprFromIterator<Expr<A>> for Array<A> {
|
||||
fn expr_from_iter<T: IntoIterator<Item = Expr<A>>>(iter: T) -> Expr<Self> {
|
||||
ArrayLiteral::new(
|
||||
A::TYPE,
|
||||
iter.into_iter().map(|v| Expr::canonical(v)).collect(),
|
||||
)
|
||||
.to_expr()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, A: StaticType> ExprFromIterator<&'a Expr<A>> for Array<A> {
|
||||
fn expr_from_iter<T: IntoIterator<Item = &'a Expr<A>>>(iter: T) -> Expr<Self> {
|
||||
iter.into_iter().copied().collect()
|
||||
Interned::<_>::into_inner(Intern::intern_sized(ArrayType::new(self.element, len)))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -1,128 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
build::{
|
||||
BaseJob, BaseJobKind, CommandParams, DynJobKind, GlobalParams, JobAndDependencies,
|
||||
JobArgsAndDependencies, JobItem, JobItemName, JobKind, JobKindAndDependencies, JobParams,
|
||||
ToArgs, WriteArgs,
|
||||
},
|
||||
firrtl::{ExportOptions, FileBackend},
|
||||
intern::{Intern, InternSlice, Interned},
|
||||
util::job_server::AcquiredJob,
|
||||
};
|
||||
use clap::Args;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug)]
|
||||
pub struct FirrtlJobKind;
|
||||
|
||||
#[derive(Args, Debug, Clone, Hash, PartialEq, Eq)]
|
||||
#[group(id = "Firrtl")]
|
||||
#[non_exhaustive]
|
||||
pub struct FirrtlArgs {
|
||||
#[command(flatten)]
|
||||
pub export_options: ExportOptions,
|
||||
}
|
||||
|
||||
impl ToArgs for FirrtlArgs {
|
||||
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||
let Self { export_options } = self;
|
||||
export_options.to_args(args);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct Firrtl {
|
||||
base: BaseJob,
|
||||
export_options: ExportOptions,
|
||||
}
|
||||
|
||||
impl Firrtl {
|
||||
fn make_firrtl_file_backend(&self) -> FileBackend {
|
||||
FileBackend {
|
||||
dir_path: PathBuf::from(&*self.base.output_dir()),
|
||||
top_fir_file_stem: Some(self.base.file_stem().into()),
|
||||
circuit_name: None,
|
||||
}
|
||||
}
|
||||
pub fn firrtl_file(&self) -> Interned<Path> {
|
||||
self.base.file_with_ext("fir")
|
||||
}
|
||||
}
|
||||
|
||||
impl JobKind for FirrtlJobKind {
|
||||
type Args = FirrtlArgs;
|
||||
type Job = Firrtl;
|
||||
type Dependencies = JobKindAndDependencies<BaseJobKind>;
|
||||
|
||||
fn dependencies(self) -> Self::Dependencies {
|
||||
JobKindAndDependencies::new(BaseJobKind)
|
||||
}
|
||||
|
||||
fn args_to_jobs(
|
||||
args: JobArgsAndDependencies<Self>,
|
||||
params: &JobParams,
|
||||
global_params: &GlobalParams,
|
||||
) -> eyre::Result<JobAndDependencies<Self>> {
|
||||
args.args_to_jobs_simple(
|
||||
params,
|
||||
global_params,
|
||||
|_kind, FirrtlArgs { export_options }, dependencies| {
|
||||
Ok(Firrtl {
|
||||
base: dependencies.get_job::<BaseJob, _>().clone(),
|
||||
export_options,
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[JobItemName::Path {
|
||||
path: job.base.output_dir(),
|
||||
}]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[JobItemName::Path {
|
||||
path: job.firrtl_file(),
|
||||
}]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn name(self) -> Interned<str> {
|
||||
"firrtl".intern()
|
||||
}
|
||||
|
||||
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
|
||||
None
|
||||
}
|
||||
|
||||
fn run(
|
||||
self,
|
||||
job: &Self::Job,
|
||||
inputs: &[JobItem],
|
||||
params: &JobParams,
|
||||
_global_params: &GlobalParams,
|
||||
_acquired_job: &mut AcquiredJob,
|
||||
) -> eyre::Result<Vec<JobItem>> {
|
||||
let [JobItem::Path { path: input_path }] = *inputs else {
|
||||
panic!("wrong inputs, expected a single `Path`");
|
||||
};
|
||||
assert_eq!(input_path, job.base.output_dir());
|
||||
crate::firrtl::export(
|
||||
job.make_firrtl_file_backend(),
|
||||
params.main_module(),
|
||||
job.export_options,
|
||||
)?;
|
||||
Ok(vec![JobItem::Path {
|
||||
path: job.firrtl_file(),
|
||||
}])
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = DynJobKind> {
|
||||
[DynJobKind::new(FirrtlJobKind)]
|
||||
}
|
||||
|
|
@ -1,388 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
build::{
|
||||
BaseJob, CommandParams, DynJobKind, GetJobPositionDependencies, GlobalParams,
|
||||
JobAndDependencies, JobArgsAndDependencies, JobDependencies, JobItem, JobItemName, JobKind,
|
||||
JobKindAndDependencies, JobParams, ToArgs, WriteArgs,
|
||||
external::{
|
||||
ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait,
|
||||
},
|
||||
verilog::{UnadjustedVerilog, VerilogDialect, VerilogJob, VerilogJobKind},
|
||||
},
|
||||
intern::{Intern, InternSlice, Interned},
|
||||
module::NameId,
|
||||
testing::FormalMode,
|
||||
util::job_server::AcquiredJob,
|
||||
};
|
||||
use clap::Args;
|
||||
use eyre::Context;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
ffi::{OsStr, OsString},
|
||||
fmt::{self, Write},
|
||||
path::Path,
|
||||
};
|
||||
|
||||
#[derive(Args, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub struct FormalArgs {
|
||||
#[arg(long = "sby-extra-arg", value_name = "ARG")]
|
||||
pub sby_extra_args: Vec<OsString>,
|
||||
#[arg(long, default_value_t)]
|
||||
pub formal_mode: FormalMode,
|
||||
#[arg(long, default_value_t = Self::DEFAULT_DEPTH)]
|
||||
pub formal_depth: u64,
|
||||
#[arg(long, default_value = Self::DEFAULT_SOLVER)]
|
||||
pub formal_solver: String,
|
||||
#[arg(long = "smtbmc-extra-arg", value_name = "ARG")]
|
||||
pub smtbmc_extra_args: Vec<OsString>,
|
||||
}
|
||||
|
||||
impl FormalArgs {
|
||||
pub const DEFAULT_DEPTH: u64 = 20;
|
||||
pub const DEFAULT_SOLVER: &'static str = "z3";
|
||||
}
|
||||
|
||||
impl ToArgs for FormalArgs {
|
||||
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||
let Self {
|
||||
sby_extra_args,
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
formal_solver,
|
||||
smtbmc_extra_args,
|
||||
} = self;
|
||||
for arg in sby_extra_args {
|
||||
args.write_long_option_eq("sby-extra-arg", arg);
|
||||
}
|
||||
args.write_display_args([
|
||||
format_args!("--formal-mode={formal_mode}"),
|
||||
format_args!("--formal-depth={formal_depth}"),
|
||||
format_args!("--formal-solver={formal_solver}"),
|
||||
]);
|
||||
for arg in smtbmc_extra_args {
|
||||
args.write_long_option_eq("smtbmc-extra-arg", arg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||
pub struct WriteSbyFileJobKind;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Deserialize, Serialize)]
|
||||
pub struct WriteSbyFileJob {
|
||||
sby_extra_args: Interned<[Interned<OsStr>]>,
|
||||
formal_mode: FormalMode,
|
||||
formal_depth: u64,
|
||||
formal_solver: Interned<str>,
|
||||
smtbmc_extra_args: Interned<[Interned<OsStr>]>,
|
||||
sby_file: Interned<Path>,
|
||||
output_dir: Interned<Path>,
|
||||
main_verilog_file: Interned<Path>,
|
||||
}
|
||||
|
||||
impl WriteSbyFileJob {
|
||||
pub fn sby_extra_args(&self) -> Interned<[Interned<OsStr>]> {
|
||||
self.sby_extra_args
|
||||
}
|
||||
pub fn formal_mode(&self) -> FormalMode {
|
||||
self.formal_mode
|
||||
}
|
||||
pub fn formal_depth(&self) -> u64 {
|
||||
self.formal_depth
|
||||
}
|
||||
pub fn formal_solver(&self) -> Interned<str> {
|
||||
self.formal_solver
|
||||
}
|
||||
pub fn smtbmc_extra_args(&self) -> Interned<[Interned<OsStr>]> {
|
||||
self.smtbmc_extra_args
|
||||
}
|
||||
pub fn sby_file(&self) -> Interned<Path> {
|
||||
self.sby_file
|
||||
}
|
||||
pub fn output_dir(&self) -> Interned<Path> {
|
||||
self.output_dir
|
||||
}
|
||||
pub fn main_verilog_file(&self) -> Interned<Path> {
|
||||
self.main_verilog_file
|
||||
}
|
||||
fn write_sby(
|
||||
&self,
|
||||
output: &mut OsString,
|
||||
additional_files: &[Interned<Path>],
|
||||
main_module_name_id: NameId,
|
||||
) -> eyre::Result<()> {
|
||||
let Self {
|
||||
sby_extra_args: _,
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
formal_solver,
|
||||
smtbmc_extra_args,
|
||||
sby_file: _,
|
||||
output_dir: _,
|
||||
main_verilog_file,
|
||||
} = self;
|
||||
write!(
|
||||
output,
|
||||
"[options]\n\
|
||||
mode {formal_mode}\n\
|
||||
depth {formal_depth}\n\
|
||||
wait on\n\
|
||||
\n\
|
||||
[engines]\n\
|
||||
smtbmc {formal_solver} -- --"
|
||||
)
|
||||
.expect("writing to OsString can't fail");
|
||||
for i in smtbmc_extra_args {
|
||||
output.push(" ");
|
||||
output.push(i);
|
||||
}
|
||||
output.push(
|
||||
"\n\
|
||||
\n\
|
||||
[script]\n",
|
||||
);
|
||||
for verilog_file in VerilogJob::all_verilog_files(*main_verilog_file, additional_files)? {
|
||||
output.push("read_verilog -sv -formal \"");
|
||||
output.push(verilog_file);
|
||||
output.push("\"\n");
|
||||
}
|
||||
let circuit_name = crate::firrtl::get_circuit_name(main_module_name_id);
|
||||
// workaround for wires disappearing -- set `keep` on all wires
|
||||
writeln!(
|
||||
output,
|
||||
"hierarchy -top {circuit_name}\n\
|
||||
proc\n\
|
||||
setattr -set keep 1 w:\\*\n\
|
||||
prep",
|
||||
)
|
||||
.expect("writing to OsString can't fail");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl JobKind for WriteSbyFileJobKind {
|
||||
type Args = FormalArgs;
|
||||
type Job = WriteSbyFileJob;
|
||||
type Dependencies = JobKindAndDependencies<VerilogJobKind>;
|
||||
|
||||
fn dependencies(self) -> Self::Dependencies {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn args_to_jobs(
|
||||
mut args: JobArgsAndDependencies<Self>,
|
||||
params: &JobParams,
|
||||
global_params: &GlobalParams,
|
||||
) -> eyre::Result<JobAndDependencies<Self>> {
|
||||
args.dependencies
|
||||
.dependencies
|
||||
.args
|
||||
.args
|
||||
.additional_args
|
||||
.verilog_dialect
|
||||
.get_or_insert(VerilogDialect::Yosys);
|
||||
args.args_to_jobs_simple(params, global_params, |_kind, args, dependencies| {
|
||||
let FormalArgs {
|
||||
sby_extra_args,
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
formal_solver,
|
||||
smtbmc_extra_args,
|
||||
} = args;
|
||||
let base_job = dependencies.get_job::<BaseJob, _>();
|
||||
Ok(WriteSbyFileJob {
|
||||
sby_extra_args: sby_extra_args.into_iter().map(Interned::from).collect(),
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
formal_solver: formal_solver.intern_deref(),
|
||||
smtbmc_extra_args: smtbmc_extra_args.into_iter().map(Interned::from).collect(),
|
||||
sby_file: base_job.file_with_ext("sby"),
|
||||
output_dir: base_job.output_dir(),
|
||||
main_verilog_file: dependencies.get_job::<VerilogJob, _>().main_verilog_file(),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn inputs(self, _job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[JobItemName::DynamicPaths {
|
||||
source_job_name: VerilogJobKind.name(),
|
||||
}]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[JobItemName::Path { path: job.sby_file }].intern_slice()
|
||||
}
|
||||
|
||||
fn name(self) -> Interned<str> {
|
||||
"write-sby-file".intern()
|
||||
}
|
||||
|
||||
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
|
||||
None
|
||||
}
|
||||
|
||||
fn run(
|
||||
self,
|
||||
job: &Self::Job,
|
||||
inputs: &[JobItem],
|
||||
params: &JobParams,
|
||||
_global_params: &GlobalParams,
|
||||
_acquired_job: &mut AcquiredJob,
|
||||
) -> eyre::Result<Vec<JobItem>> {
|
||||
assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job)));
|
||||
let [additional_files] = inputs else {
|
||||
unreachable!();
|
||||
};
|
||||
let additional_files = VerilogJob::unwrap_additional_files(additional_files);
|
||||
let mut contents = OsString::new();
|
||||
job.write_sby(
|
||||
&mut contents,
|
||||
additional_files,
|
||||
params.main_module().name_id(),
|
||||
)?;
|
||||
let path = job.sby_file;
|
||||
std::fs::write(path, contents.as_encoded_bytes())
|
||||
.wrap_err_with(|| format!("writing {path:?} failed"))?;
|
||||
Ok(vec![JobItem::Path { path }])
|
||||
}
|
||||
|
||||
fn subcommand_hidden(self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct Formal {
|
||||
#[serde(flatten)]
|
||||
write_sby_file: WriteSbyFileJob,
|
||||
sby_file_name: Interned<OsStr>,
|
||||
}
|
||||
|
||||
impl fmt::Debug for Formal {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self {
|
||||
write_sby_file:
|
||||
WriteSbyFileJob {
|
||||
sby_extra_args,
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
formal_solver,
|
||||
smtbmc_extra_args,
|
||||
sby_file,
|
||||
output_dir: _,
|
||||
main_verilog_file,
|
||||
},
|
||||
sby_file_name,
|
||||
} = self;
|
||||
f.debug_struct("Formal")
|
||||
.field("sby_extra_args", sby_extra_args)
|
||||
.field("formal_mode", formal_mode)
|
||||
.field("formal_depth", formal_depth)
|
||||
.field("formal_solver", formal_solver)
|
||||
.field("smtbmc_extra_args", smtbmc_extra_args)
|
||||
.field("sby_file", sby_file)
|
||||
.field("sby_file_name", sby_file_name)
|
||||
.field("main_verilog_file", main_verilog_file)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
|
||||
pub struct Symbiyosys;
|
||||
|
||||
impl ExternalProgramTrait for Symbiyosys {
|
||||
fn default_program_name() -> Interned<str> {
|
||||
"sby".intern()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Hash, PartialEq, Eq, Debug, Args)]
|
||||
pub struct FormalAdditionalArgs {}
|
||||
|
||||
impl ToArgs for FormalAdditionalArgs {
|
||||
fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) {
|
||||
let Self {} = self;
|
||||
}
|
||||
}
|
||||
|
||||
impl ExternalCommand for Formal {
|
||||
type AdditionalArgs = FormalAdditionalArgs;
|
||||
type AdditionalJobData = Formal;
|
||||
type BaseJobPosition = GetJobPositionDependencies<
|
||||
GetJobPositionDependencies<
|
||||
GetJobPositionDependencies<<UnadjustedVerilog as ExternalCommand>::BaseJobPosition>,
|
||||
>,
|
||||
>;
|
||||
type Dependencies = JobKindAndDependencies<WriteSbyFileJobKind>;
|
||||
type ExternalProgram = Symbiyosys;
|
||||
|
||||
fn dependencies() -> Self::Dependencies {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn args_to_jobs(
|
||||
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
|
||||
params: &JobParams,
|
||||
global_params: &GlobalParams,
|
||||
) -> eyre::Result<(
|
||||
Self::AdditionalJobData,
|
||||
<Self::Dependencies as JobDependencies>::JobsAndKinds,
|
||||
)> {
|
||||
args.args_to_jobs_external_simple(params, global_params, |args, dependencies| {
|
||||
let FormalAdditionalArgs {} = args.additional_args;
|
||||
let write_sby_file = dependencies.get_job::<WriteSbyFileJob, _>().clone();
|
||||
Ok(Formal {
|
||||
sby_file_name: write_sby_file
|
||||
.sby_file()
|
||||
.interned_file_name()
|
||||
.expect("known to have file name"),
|
||||
write_sby_file,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]> {
|
||||
[
|
||||
JobItemName::Path {
|
||||
path: job.additional_job_data().write_sby_file.sby_file(),
|
||||
},
|
||||
JobItemName::Path {
|
||||
path: job.additional_job_data().write_sby_file.main_verilog_file(),
|
||||
},
|
||||
JobItemName::DynamicPaths {
|
||||
source_job_name: VerilogJobKind.name(),
|
||||
},
|
||||
]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn output_paths(_job: &ExternalCommandJob<Self>) -> Interned<[Interned<Path>]> {
|
||||
Interned::default()
|
||||
}
|
||||
|
||||
fn command_line_args<W: ?Sized + WriteArgs>(job: &ExternalCommandJob<Self>, args: &mut W) {
|
||||
// args.write_str_arg("-j1"); // sby seems not to respect job count in parallel mode
|
||||
args.write_arg("-f");
|
||||
args.write_interned_arg(job.additional_job_data().sby_file_name);
|
||||
args.write_interned_args(job.additional_job_data().write_sby_file.sby_extra_args());
|
||||
}
|
||||
|
||||
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<Path>> {
|
||||
Some(job.output_dir())
|
||||
}
|
||||
|
||||
fn job_kind_name() -> Interned<str> {
|
||||
"formal".intern()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = DynJobKind> {
|
||||
[
|
||||
DynJobKind::new(WriteSbyFileJobKind),
|
||||
DynJobKind::new(ExternalCommandJobKind::<Formal>::new()),
|
||||
]
|
||||
}
|
||||
|
|
@ -1,855 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
build::{
|
||||
DynJob, GlobalParams, JobItem, JobItemName, JobParams, program_name_for_internal_jobs,
|
||||
},
|
||||
intern::Interned,
|
||||
platform::DynPlatform,
|
||||
util::{HashMap, HashSet, job_server::AcquiredJob},
|
||||
};
|
||||
use eyre::{ContextCompat, eyre};
|
||||
use petgraph::{
|
||||
algo::{DfsSpace, kosaraju_scc, toposort},
|
||||
graph::DiGraph,
|
||||
visit::{GraphBase, Visitable},
|
||||
};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error, ser::SerializeSeq};
|
||||
use std::{
|
||||
cell::OnceCell,
|
||||
collections::{BTreeMap, BTreeSet, VecDeque},
|
||||
convert::Infallible,
|
||||
ffi::OsStr,
|
||||
fmt::{self, Write},
|
||||
panic,
|
||||
rc::Rc,
|
||||
str::Utf8Error,
|
||||
sync::mpsc,
|
||||
thread::{self, ScopedJoinHandle},
|
||||
};
|
||||
|
||||
macro_rules! write_str {
|
||||
($s:expr, $($rest:tt)*) => {
|
||||
write!($s, $($rest)*).expect("String::write_fmt can't fail")
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
enum JobGraphNode {
|
||||
Job(DynJob),
|
||||
Item {
|
||||
#[allow(dead_code, reason = "name used for debugging")]
|
||||
name: JobItemName,
|
||||
source_job: Option<DynJob>,
|
||||
},
|
||||
}
|
||||
|
||||
type JobGraphInner = DiGraph<JobGraphNode, ()>;
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct JobGraph {
|
||||
jobs: HashMap<DynJob, <JobGraphInner as GraphBase>::NodeId>,
|
||||
items: HashMap<JobItemName, <JobGraphInner as GraphBase>::NodeId>,
|
||||
graph: JobGraphInner,
|
||||
topological_order: Vec<<JobGraphInner as GraphBase>::NodeId>,
|
||||
space: DfsSpace<<JobGraphInner as GraphBase>::NodeId, <JobGraphInner as Visitable>::Map>,
|
||||
}
|
||||
|
||||
impl fmt::Debug for JobGraph {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self {
|
||||
jobs: _,
|
||||
items: _,
|
||||
graph,
|
||||
topological_order,
|
||||
space: _,
|
||||
} = self;
|
||||
f.debug_struct("JobGraph")
|
||||
.field("graph", graph)
|
||||
.field("topological_order", topological_order)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum JobGraphError {
|
||||
CycleError {
|
||||
job: DynJob,
|
||||
output: JobItemName,
|
||||
},
|
||||
MultipleJobsCreateSameOutput {
|
||||
output_item: JobItemName,
|
||||
existing_job: DynJob,
|
||||
new_job: DynJob,
|
||||
},
|
||||
}
|
||||
|
||||
impl std::error::Error for JobGraphError {}
|
||||
|
||||
impl fmt::Display for JobGraphError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::CycleError { job, output } => write!(
|
||||
f,
|
||||
"job can't be added to job graph because it would introduce a cyclic dependency through this job output:\n\
|
||||
{output:?}\n\
|
||||
job:\n{job:?}",
|
||||
),
|
||||
JobGraphError::MultipleJobsCreateSameOutput {
|
||||
output_item,
|
||||
existing_job,
|
||||
new_job,
|
||||
} => write!(
|
||||
f,
|
||||
"job can't be added to job graph because the new job has an output that is also produced by an existing job.\n\
|
||||
conflicting output:\n\
|
||||
{output_item:?}\n\
|
||||
existing job:\n\
|
||||
{existing_job:?}\n\
|
||||
new job:\n\
|
||||
{new_job:?}",
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum EscapeForUnixShellState {
|
||||
DollarSingleQuote,
|
||||
SingleQuote,
|
||||
Unquoted,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct EscapeForUnixShell<'a> {
|
||||
state: EscapeForUnixShellState,
|
||||
prefix: [u8; 3],
|
||||
bytes: &'a [u8],
|
||||
}
|
||||
|
||||
impl<'a> fmt::Debug for EscapeForUnixShell<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> fmt::Display for EscapeForUnixShell<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
for c in self.clone() {
|
||||
f.write_char(c)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> EscapeForUnixShell<'a> {
|
||||
pub fn new(s: &'a (impl ?Sized + AsRef<OsStr>)) -> Self {
|
||||
Self::from_bytes(s.as_ref().as_encoded_bytes())
|
||||
}
|
||||
fn make_prefix(bytes: &[u8]) -> [u8; 3] {
|
||||
let mut prefix = [0; 3];
|
||||
prefix[..bytes.len()].copy_from_slice(bytes);
|
||||
prefix
|
||||
}
|
||||
pub fn from_bytes(bytes: &'a [u8]) -> Self {
|
||||
let mut needs_single_quote = bytes.is_empty();
|
||||
for &b in bytes {
|
||||
match b {
|
||||
b'!' | b'\'' | b'\"' | b' ' => needs_single_quote = true,
|
||||
0..0x20 | 0x7F.. => {
|
||||
return Self {
|
||||
state: EscapeForUnixShellState::DollarSingleQuote,
|
||||
prefix: Self::make_prefix(b"$'"),
|
||||
bytes,
|
||||
};
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
if needs_single_quote {
|
||||
Self {
|
||||
state: EscapeForUnixShellState::SingleQuote,
|
||||
prefix: Self::make_prefix(b"'"),
|
||||
bytes,
|
||||
}
|
||||
} else {
|
||||
Self {
|
||||
state: EscapeForUnixShellState::Unquoted,
|
||||
prefix: Self::make_prefix(b""),
|
||||
bytes,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for EscapeForUnixShell<'_> {
|
||||
type Item = char;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match &mut self.prefix {
|
||||
[0, 0, 0] => {}
|
||||
[0, 0, v] | // find first
|
||||
[0, v, _] | // non-zero byte
|
||||
[v, _, _] => {
|
||||
let retval = *v as char;
|
||||
*v = 0;
|
||||
return Some(retval);
|
||||
}
|
||||
}
|
||||
let Some(&next_byte) = self.bytes.split_off_first() else {
|
||||
return match self.state {
|
||||
EscapeForUnixShellState::DollarSingleQuote
|
||||
| EscapeForUnixShellState::SingleQuote => {
|
||||
self.state = EscapeForUnixShellState::Unquoted;
|
||||
Some('\'')
|
||||
}
|
||||
EscapeForUnixShellState::Unquoted => None,
|
||||
};
|
||||
};
|
||||
match self.state {
|
||||
EscapeForUnixShellState::DollarSingleQuote => match next_byte {
|
||||
b'\'' | b'\\' => {
|
||||
self.prefix = Self::make_prefix(&[next_byte]);
|
||||
Some('\\')
|
||||
}
|
||||
b'\t' => {
|
||||
self.prefix = Self::make_prefix(b"t");
|
||||
Some('\\')
|
||||
}
|
||||
b'\n' => {
|
||||
self.prefix = Self::make_prefix(b"n");
|
||||
Some('\\')
|
||||
}
|
||||
b'\r' => {
|
||||
self.prefix = Self::make_prefix(b"r");
|
||||
Some('\\')
|
||||
}
|
||||
0x20..=0x7E => Some(next_byte as char),
|
||||
_ => {
|
||||
self.prefix = [
|
||||
b'x',
|
||||
char::from_digit(next_byte as u32 >> 4, 0x10).expect("known to be in range")
|
||||
as u8,
|
||||
char::from_digit(next_byte as u32 & 0xF, 0x10)
|
||||
.expect("known to be in range") as u8,
|
||||
];
|
||||
Some('\\')
|
||||
}
|
||||
},
|
||||
EscapeForUnixShellState::SingleQuote => {
|
||||
if next_byte == b'\'' {
|
||||
self.prefix = Self::make_prefix(b"\\''");
|
||||
Some('\'')
|
||||
} else {
|
||||
Some(next_byte as char)
|
||||
}
|
||||
}
|
||||
EscapeForUnixShellState::Unquoted => match next_byte {
|
||||
b' ' | b'!' | b'"' | b'#' | b'$' | b'&' | b'\'' | b'(' | b')' | b'*' | b','
|
||||
| b';' | b'<' | b'>' | b'?' | b'[' | b'\\' | b']' | b'^' | b'`' | b'{' | b'|'
|
||||
| b'}' | b'~' => {
|
||||
self.prefix = Self::make_prefix(&[next_byte]);
|
||||
Some('\\')
|
||||
}
|
||||
_ => Some(next_byte as char),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[non_exhaustive]
|
||||
pub enum UnixMakefileEscapeKind {
|
||||
NonRecipe,
|
||||
RecipeWithoutShellEscaping,
|
||||
RecipeWithShellEscaping,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct EscapeForUnixMakefile<'a> {
|
||||
s: &'a OsStr,
|
||||
kind: UnixMakefileEscapeKind,
|
||||
}
|
||||
|
||||
impl<'a> fmt::Debug for EscapeForUnixMakefile<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> fmt::Display for EscapeForUnixMakefile<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.do_write(
|
||||
f,
|
||||
fmt::Write::write_str,
|
||||
fmt::Write::write_char,
|
||||
|_, _| Ok(()),
|
||||
|_| unreachable!("already checked that the input causes no UTF-8 errors"),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> EscapeForUnixMakefile<'a> {
|
||||
fn do_write<S: ?Sized, E>(
|
||||
&self,
|
||||
state: &mut S,
|
||||
write_str: impl Fn(&mut S, &str) -> Result<(), E>,
|
||||
write_char: impl Fn(&mut S, char) -> Result<(), E>,
|
||||
add_variable: impl Fn(&mut S, &'static str) -> Result<(), E>,
|
||||
utf8_error: impl Fn(Utf8Error) -> E,
|
||||
) -> Result<(), E> {
|
||||
let escape_recipe_char = |c| match c {
|
||||
'$' => write_str(state, "$$"),
|
||||
'\0'..='\x1F' | '\x7F' => {
|
||||
panic!("can't escape a control character for Unix Makefile: {c:?}");
|
||||
}
|
||||
_ => write_char(state, c),
|
||||
};
|
||||
match self.kind {
|
||||
UnixMakefileEscapeKind::NonRecipe => str::from_utf8(self.s.as_encoded_bytes())
|
||||
.map_err(&utf8_error)?
|
||||
.chars()
|
||||
.try_for_each(|c| match c {
|
||||
'=' => {
|
||||
add_variable(state, "EQUALS = =")?;
|
||||
write_str(state, "$(EQUALS)")
|
||||
}
|
||||
';' => panic!("can't escape a semicolon (;) for Unix Makefile"),
|
||||
'$' => write_str(state, "$$"),
|
||||
'\\' | ' ' | '#' | ':' | '%' | '*' | '?' | '[' | ']' | '~' => {
|
||||
write_char(state, '\\')?;
|
||||
write_char(state, c)
|
||||
}
|
||||
'\0'..='\x1F' | '\x7F' => {
|
||||
panic!("can't escape a control character for Unix Makefile: {c:?}");
|
||||
}
|
||||
_ => write_char(state, c),
|
||||
}),
|
||||
UnixMakefileEscapeKind::RecipeWithoutShellEscaping => {
|
||||
str::from_utf8(self.s.as_encoded_bytes())
|
||||
.map_err(&utf8_error)?
|
||||
.chars()
|
||||
.try_for_each(escape_recipe_char)
|
||||
}
|
||||
UnixMakefileEscapeKind::RecipeWithShellEscaping => {
|
||||
EscapeForUnixShell::new(self.s).try_for_each(escape_recipe_char)
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn new(
|
||||
s: &'a (impl ?Sized + AsRef<OsStr>),
|
||||
kind: UnixMakefileEscapeKind,
|
||||
needed_variables: &mut BTreeSet<&'static str>,
|
||||
) -> Result<Self, Utf8Error> {
|
||||
let s = s.as_ref();
|
||||
let retval = Self { s, kind };
|
||||
retval.do_write(
|
||||
needed_variables,
|
||||
|_, _| Ok(()),
|
||||
|_, _| Ok(()),
|
||||
|needed_variables, variable| {
|
||||
needed_variables.insert(variable);
|
||||
Ok(())
|
||||
},
|
||||
|e| e,
|
||||
)?;
|
||||
Ok(retval)
|
||||
}
|
||||
}
|
||||
|
||||
impl JobGraph {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
fn try_add_item_node(
|
||||
&mut self,
|
||||
name: JobItemName,
|
||||
new_source_job: Option<DynJob>,
|
||||
new_nodes: &mut HashSet<<JobGraphInner as GraphBase>::NodeId>,
|
||||
) -> Result<<JobGraphInner as GraphBase>::NodeId, JobGraphError> {
|
||||
use hashbrown::hash_map::Entry;
|
||||
match self.items.entry(name) {
|
||||
Entry::Occupied(item_entry) => {
|
||||
let node_id = *item_entry.get();
|
||||
let JobGraphNode::Item {
|
||||
name: _,
|
||||
source_job,
|
||||
} = &mut self.graph[node_id]
|
||||
else {
|
||||
unreachable!("known to be an item");
|
||||
};
|
||||
if let Some(new_source_job) = new_source_job {
|
||||
if let Some(source_job) = source_job {
|
||||
return Err(JobGraphError::MultipleJobsCreateSameOutput {
|
||||
output_item: item_entry.key().clone(),
|
||||
existing_job: source_job.clone(),
|
||||
new_job: new_source_job,
|
||||
});
|
||||
} else {
|
||||
*source_job = Some(new_source_job);
|
||||
}
|
||||
}
|
||||
Ok(node_id)
|
||||
}
|
||||
Entry::Vacant(item_entry) => {
|
||||
let node_id = self.graph.add_node(JobGraphNode::Item {
|
||||
name,
|
||||
source_job: new_source_job,
|
||||
});
|
||||
new_nodes.insert(node_id);
|
||||
item_entry.insert(node_id);
|
||||
Ok(node_id)
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn try_add_jobs<I: IntoIterator<Item = DynJob>>(
|
||||
&mut self,
|
||||
jobs: I,
|
||||
) -> Result<(), JobGraphError> {
|
||||
use hashbrown::hash_map::Entry;
|
||||
let jobs = jobs.into_iter();
|
||||
struct RemoveNewNodesOnError<'a> {
|
||||
this: &'a mut JobGraph,
|
||||
new_nodes: HashSet<<JobGraphInner as GraphBase>::NodeId>,
|
||||
}
|
||||
impl Drop for RemoveNewNodesOnError<'_> {
|
||||
fn drop(&mut self) {
|
||||
for node in self.new_nodes.drain() {
|
||||
self.this.graph.remove_node(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut remove_new_nodes_on_error = RemoveNewNodesOnError {
|
||||
this: self,
|
||||
new_nodes: HashSet::with_capacity_and_hasher(jobs.size_hint().0, Default::default()),
|
||||
};
|
||||
let new_nodes = &mut remove_new_nodes_on_error.new_nodes;
|
||||
let this = &mut *remove_new_nodes_on_error.this;
|
||||
for job in jobs {
|
||||
let Entry::Vacant(job_entry) = this.jobs.entry(job.clone()) else {
|
||||
continue;
|
||||
};
|
||||
let job_node_id = this
|
||||
.graph
|
||||
.add_node(JobGraphNode::Job(job_entry.key().clone()));
|
||||
new_nodes.insert(job_node_id);
|
||||
job_entry.insert(job_node_id);
|
||||
for name in job.outputs() {
|
||||
let item_node_id = this.try_add_item_node(name, Some(job.clone()), new_nodes)?;
|
||||
this.graph.add_edge(job_node_id, item_node_id, ());
|
||||
}
|
||||
for name in job.inputs() {
|
||||
let item_node_id = this.try_add_item_node(name, None, new_nodes)?;
|
||||
this.graph.add_edge(item_node_id, job_node_id, ());
|
||||
}
|
||||
}
|
||||
match toposort(&this.graph, Some(&mut this.space)) {
|
||||
Ok(v) => {
|
||||
this.topological_order = v;
|
||||
// no need to remove any of the new nodes on drop since we didn't encounter any errors
|
||||
remove_new_nodes_on_error.new_nodes.clear();
|
||||
Ok(())
|
||||
}
|
||||
Err(_) => {
|
||||
// there's at least one cycle, find one!
|
||||
let cycle = kosaraju_scc(&this.graph)
|
||||
.into_iter()
|
||||
.find_map(|scc| {
|
||||
if scc.len() <= 1 {
|
||||
// can't be a cycle since our graph is bipartite --
|
||||
// jobs only connect to items, never jobs to jobs or items to items
|
||||
None
|
||||
} else {
|
||||
Some(scc)
|
||||
}
|
||||
})
|
||||
.expect("we know there's a cycle");
|
||||
let cycle_set = HashSet::from_iter(cycle.iter().copied());
|
||||
let job = cycle
|
||||
.into_iter()
|
||||
.find_map(|node_id| {
|
||||
if let JobGraphNode::Job(job) = &this.graph[node_id] {
|
||||
Some(job.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.expect("a job must be part of the cycle");
|
||||
let output = job
|
||||
.outputs()
|
||||
.into_iter()
|
||||
.find(|output| cycle_set.contains(&this.items[output]))
|
||||
.expect("an output must be part of the cycle");
|
||||
Err(JobGraphError::CycleError { job, output })
|
||||
}
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn add_jobs<I: IntoIterator<Item = DynJob>>(&mut self, jobs: I) {
|
||||
match self.try_add_jobs(jobs) {
|
||||
Ok(()) => {}
|
||||
Err(e) => panic!("error: {e}"),
|
||||
}
|
||||
}
|
||||
pub fn to_unix_makefile(
|
||||
&self,
|
||||
platform: Option<&DynPlatform>,
|
||||
extra_args: &[Interned<OsStr>],
|
||||
) -> Result<String, Utf8Error> {
|
||||
self.to_unix_makefile_with_internal_program_prefix(
|
||||
&[program_name_for_internal_jobs()],
|
||||
platform,
|
||||
extra_args,
|
||||
)
|
||||
}
|
||||
pub fn to_unix_makefile_with_internal_program_prefix(
|
||||
&self,
|
||||
internal_program_prefix: &[Interned<OsStr>],
|
||||
platform: Option<&DynPlatform>,
|
||||
extra_args: &[Interned<OsStr>],
|
||||
) -> Result<String, Utf8Error> {
|
||||
let mut retval = String::new();
|
||||
let mut needed_variables = BTreeSet::new();
|
||||
let mut phony_targets = BTreeSet::new();
|
||||
for &node_id in &self.topological_order {
|
||||
let JobGraphNode::Job(job) = &self.graph[node_id] else {
|
||||
continue;
|
||||
};
|
||||
let outputs = job.outputs();
|
||||
if outputs.is_empty() {
|
||||
retval.push_str(":");
|
||||
} else {
|
||||
for output in job.outputs() {
|
||||
match output {
|
||||
JobItemName::Path { path } => {
|
||||
write_str!(
|
||||
retval,
|
||||
"{} ",
|
||||
EscapeForUnixMakefile::new(
|
||||
&str::from_utf8(path.as_os_str().as_encoded_bytes())?,
|
||||
UnixMakefileEscapeKind::NonRecipe,
|
||||
&mut needed_variables
|
||||
)?
|
||||
);
|
||||
}
|
||||
JobItemName::DynamicPaths { source_job_name } => {
|
||||
write_str!(
|
||||
retval,
|
||||
"{} ",
|
||||
EscapeForUnixMakefile::new(
|
||||
&source_job_name,
|
||||
UnixMakefileEscapeKind::NonRecipe,
|
||||
&mut needed_variables
|
||||
)?
|
||||
);
|
||||
phony_targets.insert(Interned::into_inner(source_job_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
if outputs.len() == 1 {
|
||||
retval.push_str(":");
|
||||
} else {
|
||||
retval.push_str("&:");
|
||||
}
|
||||
}
|
||||
for input in job.inputs() {
|
||||
match input {
|
||||
JobItemName::Path { path } => {
|
||||
write_str!(
|
||||
retval,
|
||||
" {}",
|
||||
EscapeForUnixMakefile::new(
|
||||
&str::from_utf8(path.as_os_str().as_encoded_bytes())?,
|
||||
UnixMakefileEscapeKind::NonRecipe,
|
||||
&mut needed_variables
|
||||
)?
|
||||
);
|
||||
}
|
||||
JobItemName::DynamicPaths { source_job_name } => {
|
||||
write_str!(
|
||||
retval,
|
||||
" {}",
|
||||
EscapeForUnixMakefile::new(
|
||||
&source_job_name,
|
||||
UnixMakefileEscapeKind::NonRecipe,
|
||||
&mut needed_variables
|
||||
)?
|
||||
);
|
||||
phony_targets.insert(Interned::into_inner(source_job_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
retval.push_str("\n\t");
|
||||
job.command_params_with_internal_program_prefix(
|
||||
internal_program_prefix,
|
||||
platform,
|
||||
extra_args,
|
||||
)
|
||||
.to_unix_shell_line(&mut retval, |arg, output| {
|
||||
write_str!(
|
||||
output,
|
||||
"{}",
|
||||
EscapeForUnixMakefile::new(
|
||||
arg,
|
||||
UnixMakefileEscapeKind::RecipeWithShellEscaping,
|
||||
&mut needed_variables
|
||||
)?
|
||||
);
|
||||
Ok(())
|
||||
})?;
|
||||
retval.push_str("\n\n");
|
||||
}
|
||||
if !phony_targets.is_empty() {
|
||||
retval.push_str("\n.PHONY:");
|
||||
for phony_target in phony_targets {
|
||||
write_str!(
|
||||
retval,
|
||||
" {}",
|
||||
EscapeForUnixMakefile::new(
|
||||
phony_target,
|
||||
UnixMakefileEscapeKind::NonRecipe,
|
||||
&mut needed_variables
|
||||
)?
|
||||
);
|
||||
}
|
||||
retval.push_str("\n");
|
||||
}
|
||||
if !needed_variables.is_empty() {
|
||||
retval.insert_str(
|
||||
0,
|
||||
&String::from_iter(needed_variables.into_iter().map(|v| format!("{v}\n"))),
|
||||
);
|
||||
}
|
||||
Ok(retval)
|
||||
}
|
||||
pub fn to_unix_shell_script(
|
||||
&self,
|
||||
platform: Option<&DynPlatform>,
|
||||
extra_args: &[Interned<OsStr>],
|
||||
) -> String {
|
||||
self.to_unix_shell_script_with_internal_program_prefix(
|
||||
&[program_name_for_internal_jobs()],
|
||||
platform,
|
||||
extra_args,
|
||||
)
|
||||
}
|
||||
pub fn to_unix_shell_script_with_internal_program_prefix(
|
||||
&self,
|
||||
internal_program_prefix: &[Interned<OsStr>],
|
||||
platform: Option<&DynPlatform>,
|
||||
extra_args: &[Interned<OsStr>],
|
||||
) -> String {
|
||||
let mut retval = String::from(
|
||||
"#!/bin/sh\n\
|
||||
set -ex\n",
|
||||
);
|
||||
for &node_id in &self.topological_order {
|
||||
let JobGraphNode::Job(job) = &self.graph[node_id] else {
|
||||
continue;
|
||||
};
|
||||
let Ok(()) = job
|
||||
.command_params_with_internal_program_prefix(
|
||||
internal_program_prefix,
|
||||
platform,
|
||||
extra_args,
|
||||
)
|
||||
.to_unix_shell_line(&mut retval, |arg, output| -> Result<(), Infallible> {
|
||||
write_str!(output, "{}", EscapeForUnixShell::new(&arg));
|
||||
Ok(())
|
||||
});
|
||||
retval.push_str("\n");
|
||||
}
|
||||
retval
|
||||
}
|
||||
pub fn run(&self, params: &JobParams, global_params: &GlobalParams) -> eyre::Result<()> {
|
||||
// use scope to auto-join threads on errors
|
||||
thread::scope(|scope| {
|
||||
struct WaitingJobState {
|
||||
job_node_id: <JobGraphInner as GraphBase>::NodeId,
|
||||
job: DynJob,
|
||||
inputs: BTreeMap<JobItemName, OnceCell<JobItem>>,
|
||||
}
|
||||
let mut ready_jobs = VecDeque::new();
|
||||
let mut item_name_to_waiting_jobs_map = HashMap::<_, Vec<_>>::default();
|
||||
for &node_id in &self.topological_order {
|
||||
let JobGraphNode::Job(job) = &self.graph[node_id] else {
|
||||
continue;
|
||||
};
|
||||
let waiting_job = WaitingJobState {
|
||||
job_node_id: node_id,
|
||||
job: job.clone(),
|
||||
inputs: job
|
||||
.inputs()
|
||||
.iter()
|
||||
.map(|&name| (name, OnceCell::new()))
|
||||
.collect(),
|
||||
};
|
||||
if waiting_job.inputs.is_empty() {
|
||||
ready_jobs.push_back(waiting_job);
|
||||
} else {
|
||||
let waiting_job = Rc::new(waiting_job);
|
||||
for &input_item in waiting_job.inputs.keys() {
|
||||
item_name_to_waiting_jobs_map
|
||||
.entry(input_item)
|
||||
.or_default()
|
||||
.push(waiting_job.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
struct RunningJob<'scope> {
|
||||
job: DynJob,
|
||||
thread: ScopedJoinHandle<'scope, eyre::Result<Vec<JobItem>>>,
|
||||
}
|
||||
let mut running_jobs = HashMap::default();
|
||||
let (finished_jobs_sender, finished_jobs_receiver) = mpsc::channel();
|
||||
let mut next_finished_job = None;
|
||||
loop {
|
||||
if let Some(finished_job) = next_finished_job
|
||||
.take()
|
||||
.or_else(|| finished_jobs_receiver.try_recv().ok())
|
||||
{
|
||||
let Some(RunningJob { job, thread }) = running_jobs.remove(&finished_job)
|
||||
else {
|
||||
unreachable!();
|
||||
};
|
||||
let output_items = thread.join().map_err(panic::resume_unwind)??;
|
||||
assert!(
|
||||
output_items.iter().map(JobItem::name).eq(job.outputs()),
|
||||
"job's run() method returned the wrong output items:\n\
|
||||
output items:\n\
|
||||
{output_items:?}\n\
|
||||
expected outputs:\n\
|
||||
{:?}\n\
|
||||
job:\n\
|
||||
{job:?}",
|
||||
job.outputs(),
|
||||
);
|
||||
for output_item in output_items {
|
||||
for waiting_job in item_name_to_waiting_jobs_map
|
||||
.remove(&output_item.name())
|
||||
.unwrap_or_default()
|
||||
{
|
||||
let Ok(()) =
|
||||
waiting_job.inputs[&output_item.name()].set(output_item.clone())
|
||||
else {
|
||||
unreachable!();
|
||||
};
|
||||
if let Some(waiting_job) = Rc::into_inner(waiting_job) {
|
||||
ready_jobs.push_back(waiting_job);
|
||||
}
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if let Some(WaitingJobState {
|
||||
job_node_id,
|
||||
job,
|
||||
inputs,
|
||||
}) = ready_jobs.pop_front()
|
||||
{
|
||||
struct RunningJobInThread<'a> {
|
||||
job_node_id: <JobGraphInner as GraphBase>::NodeId,
|
||||
job: DynJob,
|
||||
inputs: Vec<JobItem>,
|
||||
params: &'a JobParams,
|
||||
global_params: &'a GlobalParams,
|
||||
acquired_job: AcquiredJob,
|
||||
finished_jobs_sender: mpsc::Sender<<JobGraphInner as GraphBase>::NodeId>,
|
||||
}
|
||||
impl RunningJobInThread<'_> {
|
||||
fn run(mut self) -> eyre::Result<Vec<JobItem>> {
|
||||
self.job.run(
|
||||
&self.inputs,
|
||||
self.params,
|
||||
self.global_params,
|
||||
&mut self.acquired_job,
|
||||
)
|
||||
}
|
||||
}
|
||||
impl Drop for RunningJobInThread<'_> {
|
||||
fn drop(&mut self) {
|
||||
let _ = self.finished_jobs_sender.send(self.job_node_id);
|
||||
}
|
||||
}
|
||||
let name = job.kind().name();
|
||||
let running_job_in_thread = RunningJobInThread {
|
||||
job_node_id,
|
||||
job: job.clone(),
|
||||
inputs: Result::from_iter(job.inputs().iter().map(|input_name| {
|
||||
inputs.get(input_name).and_then(|v| v.get().cloned()).wrap_err_with(|| {
|
||||
eyre!("failed when trying to run job {name}: nothing provided the input item: {input_name:?}")
|
||||
})
|
||||
}))?,
|
||||
params,
|
||||
global_params,
|
||||
acquired_job: AcquiredJob::acquire()?,
|
||||
finished_jobs_sender: finished_jobs_sender.clone(),
|
||||
};
|
||||
running_jobs.insert(
|
||||
job_node_id,
|
||||
RunningJob {
|
||||
job,
|
||||
thread: thread::Builder::new()
|
||||
.name(format!("job:{name}"))
|
||||
.spawn_scoped(scope, move || running_job_in_thread.run())
|
||||
.expect("failed to spawn thread for job"),
|
||||
},
|
||||
);
|
||||
continue;
|
||||
}
|
||||
if running_jobs.is_empty() {
|
||||
assert!(item_name_to_waiting_jobs_map.is_empty());
|
||||
assert!(ready_jobs.is_empty());
|
||||
return Ok(());
|
||||
}
|
||||
// nothing to do yet, block to avoid busy waiting
|
||||
next_finished_job = finished_jobs_receiver.recv().ok();
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<DynJob> for JobGraph {
|
||||
#[track_caller]
|
||||
fn extend<T: IntoIterator<Item = DynJob>>(&mut self, iter: T) {
|
||||
self.add_jobs(iter);
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<DynJob> for JobGraph {
|
||||
#[track_caller]
|
||||
fn from_iter<T: IntoIterator<Item = DynJob>>(iter: T) -> Self {
|
||||
let mut retval = Self::new();
|
||||
retval.add_jobs(iter);
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for JobGraph {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut serializer = serializer.serialize_seq(Some(self.jobs.len()))?;
|
||||
for &node_id in &self.topological_order {
|
||||
let JobGraphNode::Job(job) = &self.graph[node_id] else {
|
||||
continue;
|
||||
};
|
||||
serializer.serialize_element(job)?;
|
||||
}
|
||||
serializer.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for JobGraph {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let jobs = Vec::<DynJob>::deserialize(deserializer)?;
|
||||
let mut retval = JobGraph::new();
|
||||
retval.try_add_jobs(jobs).map_err(D::Error::custom)?;
|
||||
Ok(retval)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,313 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
build::{DynJobKind, JobKind, built_in_job_kinds},
|
||||
intern::Interned,
|
||||
util::InternedStrCompareAsStr,
|
||||
};
|
||||
use std::{
|
||||
collections::BTreeMap,
|
||||
fmt,
|
||||
sync::{Arc, OnceLock, RwLock, RwLockWriteGuard},
|
||||
};
|
||||
|
||||
impl DynJobKind {
|
||||
pub fn registry() -> JobKindRegistrySnapshot {
|
||||
JobKindRegistrySnapshot(JobKindRegistry::get())
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn register(self) {
|
||||
JobKindRegistry::register(JobKindRegistry::lock(), self);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct JobKindRegistry {
|
||||
job_kinds: BTreeMap<InternedStrCompareAsStr, DynJobKind>,
|
||||
}
|
||||
|
||||
enum JobKindRegisterError {
|
||||
SameName {
|
||||
name: InternedStrCompareAsStr,
|
||||
old_job_kind: DynJobKind,
|
||||
new_job_kind: DynJobKind,
|
||||
},
|
||||
}
|
||||
|
||||
impl fmt::Display for JobKindRegisterError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::SameName {
|
||||
name,
|
||||
old_job_kind,
|
||||
new_job_kind,
|
||||
} => write!(
|
||||
f,
|
||||
"two different `JobKind` can't share the same name:\n\
|
||||
{name:?}\n\
|
||||
old job kind:\n\
|
||||
{old_job_kind:?}\n\
|
||||
new job kind:\n\
|
||||
{new_job_kind:?}",
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait JobKindRegistryRegisterLock {
|
||||
type Locked;
|
||||
fn lock(self) -> Self::Locked;
|
||||
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry;
|
||||
}
|
||||
|
||||
impl JobKindRegistryRegisterLock for &'static RwLock<Arc<JobKindRegistry>> {
|
||||
type Locked = RwLockWriteGuard<'static, Arc<JobKindRegistry>>;
|
||||
fn lock(self) -> Self::Locked {
|
||||
self.write().expect("shouldn't be poisoned")
|
||||
}
|
||||
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry {
|
||||
Arc::make_mut(locked)
|
||||
}
|
||||
}
|
||||
|
||||
impl JobKindRegistryRegisterLock for &'_ mut JobKindRegistry {
|
||||
type Locked = Self;
|
||||
|
||||
fn lock(self) -> Self::Locked {
|
||||
self
|
||||
}
|
||||
|
||||
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry {
|
||||
locked
|
||||
}
|
||||
}
|
||||
|
||||
impl JobKindRegistry {
|
||||
fn lock() -> &'static RwLock<Arc<Self>> {
|
||||
static REGISTRY: OnceLock<RwLock<Arc<JobKindRegistry>>> = OnceLock::new();
|
||||
REGISTRY.get_or_init(Default::default)
|
||||
}
|
||||
fn try_register<L: JobKindRegistryRegisterLock>(
|
||||
lock: L,
|
||||
job_kind: DynJobKind,
|
||||
) -> Result<(), JobKindRegisterError> {
|
||||
use std::collections::btree_map::Entry;
|
||||
let name = InternedStrCompareAsStr(job_kind.name());
|
||||
// run user code only outside of lock
|
||||
let mut locked = lock.lock();
|
||||
let this = L::make_mut(&mut locked);
|
||||
let result = match this.job_kinds.entry(name) {
|
||||
Entry::Occupied(entry) => Err(JobKindRegisterError::SameName {
|
||||
name,
|
||||
old_job_kind: entry.get().clone(),
|
||||
new_job_kind: job_kind,
|
||||
}),
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(job_kind);
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
drop(locked);
|
||||
// outside of lock now, so we can test if it's the same DynJobKind
|
||||
match result {
|
||||
Err(JobKindRegisterError::SameName {
|
||||
name: _,
|
||||
old_job_kind,
|
||||
new_job_kind,
|
||||
}) if old_job_kind == new_job_kind => Ok(()),
|
||||
result => result,
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
fn register<L: JobKindRegistryRegisterLock>(lock: L, job_kind: DynJobKind) {
|
||||
match Self::try_register(lock, job_kind) {
|
||||
Err(e) => panic!("{e}"),
|
||||
Ok(()) => {}
|
||||
}
|
||||
}
|
||||
fn get() -> Arc<Self> {
|
||||
Self::lock().read().expect("shouldn't be poisoned").clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for JobKindRegistry {
|
||||
fn default() -> Self {
|
||||
let mut retval = Self {
|
||||
job_kinds: BTreeMap::new(),
|
||||
};
|
||||
for job_kind in built_in_job_kinds() {
|
||||
Self::register(&mut retval, job_kind);
|
||||
}
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct JobKindRegistrySnapshot(Arc<JobKindRegistry>);
|
||||
|
||||
impl JobKindRegistrySnapshot {
|
||||
pub fn get() -> Self {
|
||||
JobKindRegistrySnapshot(JobKindRegistry::get())
|
||||
}
|
||||
pub fn get_by_name<'a>(&'a self, name: &str) -> Option<&'a DynJobKind> {
|
||||
self.0.job_kinds.get(name)
|
||||
}
|
||||
pub fn iter_with_names(&self) -> JobKindRegistryIterWithNames<'_> {
|
||||
JobKindRegistryIterWithNames(self.0.job_kinds.iter())
|
||||
}
|
||||
pub fn iter(&self) -> JobKindRegistryIter<'_> {
|
||||
JobKindRegistryIter(self.0.job_kinds.values())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a JobKindRegistrySnapshot {
|
||||
type Item = &'a DynJobKind;
|
||||
type IntoIter = JobKindRegistryIter<'a>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a mut JobKindRegistrySnapshot {
|
||||
type Item = &'a DynJobKind;
|
||||
type IntoIter = JobKindRegistryIter<'a>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct JobKindRegistryIter<'a>(
|
||||
std::collections::btree_map::Values<'a, InternedStrCompareAsStr, DynJobKind>,
|
||||
);
|
||||
|
||||
impl<'a> Iterator for JobKindRegistryIter<'a> {
|
||||
type Item = &'a DynJobKind;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.0.next()
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.0.size_hint()
|
||||
}
|
||||
|
||||
fn count(self) -> usize
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.0.count()
|
||||
}
|
||||
|
||||
fn last(self) -> Option<Self::Item> {
|
||||
self.0.last()
|
||||
}
|
||||
|
||||
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.0.nth(n)
|
||||
}
|
||||
|
||||
fn fold<B, F>(self, init: B, f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.0.fold(init, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> std::iter::FusedIterator for JobKindRegistryIter<'a> {}
|
||||
|
||||
impl<'a> ExactSizeIterator for JobKindRegistryIter<'a> {}
|
||||
|
||||
impl<'a> DoubleEndedIterator for JobKindRegistryIter<'a> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
self.0.next_back()
|
||||
}
|
||||
|
||||
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.0.nth_back(n)
|
||||
}
|
||||
|
||||
fn rfold<B, F>(self, init: B, f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.0.rfold(init, f)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct JobKindRegistryIterWithNames<'a>(
|
||||
std::collections::btree_map::Iter<'a, InternedStrCompareAsStr, DynJobKind>,
|
||||
);
|
||||
|
||||
impl<'a> Iterator for JobKindRegistryIterWithNames<'a> {
|
||||
type Item = (Interned<str>, &'a DynJobKind);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.0.next().map(|(name, job_kind)| (name.0, job_kind))
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.0.size_hint()
|
||||
}
|
||||
|
||||
fn count(self) -> usize
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.0.count()
|
||||
}
|
||||
|
||||
fn last(self) -> Option<Self::Item> {
|
||||
self.0.last().map(|(name, job_kind)| (name.0, job_kind))
|
||||
}
|
||||
|
||||
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.0.nth(n).map(|(name, job_kind)| (name.0, job_kind))
|
||||
}
|
||||
|
||||
fn fold<B, F>(self, init: B, f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.0
|
||||
.map(|(name, job_kind)| (name.0, job_kind))
|
||||
.fold(init, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> std::iter::FusedIterator for JobKindRegistryIterWithNames<'a> {}
|
||||
|
||||
impl<'a> ExactSizeIterator for JobKindRegistryIterWithNames<'a> {}
|
||||
|
||||
impl<'a> DoubleEndedIterator for JobKindRegistryIterWithNames<'a> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
self.0
|
||||
.next_back()
|
||||
.map(|(name, job_kind)| (name.0, job_kind))
|
||||
}
|
||||
|
||||
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.0
|
||||
.nth_back(n)
|
||||
.map(|(name, job_kind)| (name.0, job_kind))
|
||||
}
|
||||
|
||||
fn rfold<B, F>(self, init: B, f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.0
|
||||
.map(|(name, job_kind)| (name.0, job_kind))
|
||||
.rfold(init, f)
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn register_job_kind<K: JobKind>(kind: K) {
|
||||
DynJobKind::new(kind).register();
|
||||
}
|
||||
|
|
@ -1,418 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
build::{
|
||||
BaseJob, CommandParams, DynJobKind, GetJobPositionDependencies, GetJobPositionJob,
|
||||
GlobalParams, JobAndDependencies, JobArgsAndDependencies, JobDependencies, JobItem,
|
||||
JobItemName, JobKind, JobKindAndDependencies, JobParams, ToArgs, WriteArgs,
|
||||
external::{
|
||||
ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait,
|
||||
},
|
||||
firrtl::{Firrtl, FirrtlJobKind},
|
||||
},
|
||||
intern::{Intern, InternSlice, Interned},
|
||||
util::job_server::AcquiredJob,
|
||||
};
|
||||
use clap::Args;
|
||||
use eyre::{Context, bail};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
ffi::{OsStr, OsString},
|
||||
fmt, mem,
|
||||
path::Path,
|
||||
};
|
||||
|
||||
/// based on [LLVM Circt's recommended lowering options][lowering-options]
|
||||
///
|
||||
/// [lowering-options]: https://circt.llvm.org/docs/VerilogGeneration/#recommended-loweringoptions-by-target
|
||||
#[derive(clap::ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub enum VerilogDialect {
|
||||
Questa,
|
||||
Spyglass,
|
||||
Verilator,
|
||||
Vivado,
|
||||
Yosys,
|
||||
}
|
||||
|
||||
impl fmt::Display for VerilogDialect {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl VerilogDialect {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
VerilogDialect::Questa => "questa",
|
||||
VerilogDialect::Spyglass => "spyglass",
|
||||
VerilogDialect::Verilator => "verilator",
|
||||
VerilogDialect::Vivado => "vivado",
|
||||
VerilogDialect::Yosys => "yosys",
|
||||
}
|
||||
}
|
||||
pub fn firtool_extra_args(self) -> &'static [&'static str] {
|
||||
match self {
|
||||
VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"],
|
||||
VerilogDialect::Spyglass => {
|
||||
&["--lowering-options=explicitBitcast,disallowExpressionInliningInPorts"]
|
||||
}
|
||||
VerilogDialect::Verilator => &[
|
||||
"--lowering-options=locationInfoStyle=wrapInAtSquareBracket,disallowLocalVariables",
|
||||
],
|
||||
VerilogDialect::Vivado => &["--lowering-options=mitigateVivadoArrayIndexConstPropBug"],
|
||||
VerilogDialect::Yosys => {
|
||||
&["--lowering-options=disallowLocalVariables,disallowPackedArrays"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Args, Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub struct UnadjustedVerilogArgs {
|
||||
#[arg(long = "firtool-extra-arg", value_name = "ARG")]
|
||||
pub firtool_extra_args: Vec<OsString>,
|
||||
/// adapt the generated Verilog for a particular toolchain
|
||||
#[arg(long)]
|
||||
pub verilog_dialect: Option<VerilogDialect>,
|
||||
#[arg(long)]
|
||||
pub verilog_debug: bool,
|
||||
}
|
||||
|
||||
impl ToArgs for UnadjustedVerilogArgs {
|
||||
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||
let Self {
|
||||
ref firtool_extra_args,
|
||||
verilog_dialect,
|
||||
verilog_debug,
|
||||
} = *self;
|
||||
for arg in firtool_extra_args {
|
||||
args.write_long_option_eq("firtool-extra-arg", arg);
|
||||
}
|
||||
if let Some(verilog_dialect) = verilog_dialect {
|
||||
args.write_long_option_eq("verilog-dialect", verilog_dialect.as_str());
|
||||
}
|
||||
if verilog_debug {
|
||||
args.write_arg("--verilog-debug");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
|
||||
pub struct Firtool;
|
||||
|
||||
impl ExternalProgramTrait for Firtool {
|
||||
fn default_program_name() -> Interned<str> {
|
||||
"firtool".intern()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, Deserialize, Serialize)]
|
||||
pub struct UnadjustedVerilog {
|
||||
firrtl_file: Interned<Path>,
|
||||
firrtl_file_name: Interned<OsStr>,
|
||||
unadjusted_verilog_file: Interned<Path>,
|
||||
unadjusted_verilog_file_name: Interned<OsStr>,
|
||||
firtool_extra_args: Interned<[Interned<OsStr>]>,
|
||||
verilog_dialect: Option<VerilogDialect>,
|
||||
verilog_debug: bool,
|
||||
}
|
||||
|
||||
impl UnadjustedVerilog {
|
||||
pub fn firrtl_file(&self) -> Interned<Path> {
|
||||
self.firrtl_file
|
||||
}
|
||||
pub fn unadjusted_verilog_file(&self) -> Interned<Path> {
|
||||
self.unadjusted_verilog_file
|
||||
}
|
||||
pub fn firtool_extra_args(&self) -> Interned<[Interned<OsStr>]> {
|
||||
self.firtool_extra_args
|
||||
}
|
||||
pub fn verilog_dialect(&self) -> Option<VerilogDialect> {
|
||||
self.verilog_dialect
|
||||
}
|
||||
pub fn verilog_debug(&self) -> bool {
|
||||
self.verilog_debug
|
||||
}
|
||||
}
|
||||
|
||||
impl ExternalCommand for UnadjustedVerilog {
|
||||
type AdditionalArgs = UnadjustedVerilogArgs;
|
||||
type AdditionalJobData = UnadjustedVerilog;
|
||||
type BaseJobPosition = GetJobPositionDependencies<GetJobPositionJob>;
|
||||
type Dependencies = JobKindAndDependencies<FirrtlJobKind>;
|
||||
type ExternalProgram = Firtool;
|
||||
|
||||
fn dependencies() -> Self::Dependencies {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn args_to_jobs(
|
||||
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
|
||||
params: &JobParams,
|
||||
global_params: &GlobalParams,
|
||||
) -> eyre::Result<(
|
||||
Self::AdditionalJobData,
|
||||
<Self::Dependencies as JobDependencies>::JobsAndKinds,
|
||||
)> {
|
||||
args.args_to_jobs_external_simple(params, global_params, |args, dependencies| {
|
||||
let UnadjustedVerilogArgs {
|
||||
firtool_extra_args,
|
||||
verilog_dialect,
|
||||
verilog_debug,
|
||||
} = args.additional_args;
|
||||
let unadjusted_verilog_file = dependencies
|
||||
.dependencies
|
||||
.job
|
||||
.job
|
||||
.file_with_ext("unadjusted.v");
|
||||
let firrtl_job = dependencies.get_job::<Firrtl, _>();
|
||||
Ok(UnadjustedVerilog {
|
||||
firrtl_file: firrtl_job.firrtl_file(),
|
||||
firrtl_file_name: firrtl_job
|
||||
.firrtl_file()
|
||||
.interned_file_name()
|
||||
.expect("known to have file name"),
|
||||
unadjusted_verilog_file,
|
||||
unadjusted_verilog_file_name: unadjusted_verilog_file
|
||||
.interned_file_name()
|
||||
.expect("known to have file name"),
|
||||
firtool_extra_args: firtool_extra_args.into_iter().map(Interned::from).collect(),
|
||||
verilog_dialect,
|
||||
verilog_debug,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]> {
|
||||
[JobItemName::Path {
|
||||
path: job.additional_job_data().firrtl_file,
|
||||
}]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn output_paths(job: &ExternalCommandJob<Self>) -> Interned<[Interned<Path>]> {
|
||||
[job.additional_job_data().unadjusted_verilog_file].intern_slice()
|
||||
}
|
||||
|
||||
fn command_line_args<W: ?Sized + WriteArgs>(job: &ExternalCommandJob<Self>, args: &mut W) {
|
||||
let UnadjustedVerilog {
|
||||
firrtl_file: _,
|
||||
firrtl_file_name,
|
||||
unadjusted_verilog_file: _,
|
||||
unadjusted_verilog_file_name,
|
||||
firtool_extra_args,
|
||||
verilog_dialect,
|
||||
verilog_debug,
|
||||
} = *job.additional_job_data();
|
||||
args.write_interned_arg(firrtl_file_name);
|
||||
args.write_arg("-o");
|
||||
args.write_interned_arg(unadjusted_verilog_file_name);
|
||||
if verilog_debug {
|
||||
args.write_args(["-g", "--preserve-values=all"]);
|
||||
}
|
||||
if let Some(dialect) = verilog_dialect {
|
||||
args.write_args(dialect.firtool_extra_args().iter().copied());
|
||||
}
|
||||
args.write_interned_args(firtool_extra_args);
|
||||
}
|
||||
|
||||
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<Path>> {
|
||||
Some(job.output_dir())
|
||||
}
|
||||
|
||||
fn job_kind_name() -> Interned<str> {
|
||||
"unadjusted-verilog".intern()
|
||||
}
|
||||
|
||||
fn subcommand_hidden() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn run_even_if_cached_arg_name() -> Interned<str> {
|
||||
"firtool-run-even-if-cached".intern()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct VerilogJobKind;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Args)]
|
||||
#[non_exhaustive]
|
||||
pub struct VerilogJobArgs {}
|
||||
|
||||
impl ToArgs for VerilogJobArgs {
|
||||
fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) {
|
||||
let Self {} = self;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct VerilogJob {
|
||||
output_dir: Interned<Path>,
|
||||
unadjusted_verilog_file: Interned<Path>,
|
||||
main_verilog_file: Interned<Path>,
|
||||
}
|
||||
|
||||
impl VerilogJob {
|
||||
pub fn output_dir(&self) -> Interned<Path> {
|
||||
self.output_dir
|
||||
}
|
||||
pub fn unadjusted_verilog_file(&self) -> Interned<Path> {
|
||||
self.unadjusted_verilog_file
|
||||
}
|
||||
pub fn main_verilog_file(&self) -> Interned<Path> {
|
||||
self.main_verilog_file
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn unwrap_additional_files(additional_files: &JobItem) -> &[Interned<Path>] {
|
||||
match additional_files {
|
||||
JobItem::DynamicPaths {
|
||||
paths,
|
||||
source_job_name,
|
||||
} if *source_job_name == VerilogJobKind.name() => paths,
|
||||
v => panic!("expected VerilogJob's additional files JobItem: {v:?}"),
|
||||
}
|
||||
}
|
||||
pub fn all_verilog_files(
|
||||
main_verilog_file: Interned<Path>,
|
||||
additional_files: &[Interned<Path>],
|
||||
) -> eyre::Result<Interned<[Interned<Path>]>> {
|
||||
let mut retval = Vec::with_capacity(additional_files.len().saturating_add(1));
|
||||
for verilog_file in [main_verilog_file].iter().chain(additional_files) {
|
||||
if !["v", "sv"]
|
||||
.iter()
|
||||
.any(|extension| verilog_file.extension() == Some(extension.as_ref()))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
let verilog_file = std::path::absolute(verilog_file).wrap_err_with(|| {
|
||||
format!("converting {verilog_file:?} to an absolute path failed")
|
||||
})?;
|
||||
if verilog_file
|
||||
.as_os_str()
|
||||
.as_encoded_bytes()
|
||||
.iter()
|
||||
.any(|&ch| (ch != b' ' && ch != b'\t' && ch.is_ascii_whitespace()) || ch == b'"')
|
||||
{
|
||||
bail!("verilog file path contains characters that aren't permitted");
|
||||
}
|
||||
retval.push(verilog_file.intern_deref());
|
||||
}
|
||||
Ok(retval.intern_slice())
|
||||
}
|
||||
}
|
||||
|
||||
impl JobKind for VerilogJobKind {
|
||||
type Args = VerilogJobArgs;
|
||||
type Job = VerilogJob;
|
||||
type Dependencies = JobKindAndDependencies<ExternalCommandJobKind<UnadjustedVerilog>>;
|
||||
|
||||
fn dependencies(self) -> Self::Dependencies {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn args_to_jobs(
|
||||
args: JobArgsAndDependencies<Self>,
|
||||
params: &JobParams,
|
||||
global_params: &GlobalParams,
|
||||
) -> eyre::Result<JobAndDependencies<Self>> {
|
||||
args.args_to_jobs_simple(params, global_params, |_kind, args, dependencies| {
|
||||
let VerilogJobArgs {} = args;
|
||||
let base_job = dependencies.get_job::<BaseJob, _>();
|
||||
Ok(VerilogJob {
|
||||
output_dir: base_job.output_dir(),
|
||||
unadjusted_verilog_file: dependencies
|
||||
.job
|
||||
.job
|
||||
.additional_job_data()
|
||||
.unadjusted_verilog_file(),
|
||||
main_verilog_file: base_job.file_with_ext("v"),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[JobItemName::Path {
|
||||
path: job.unadjusted_verilog_file,
|
||||
}]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[
|
||||
JobItemName::Path {
|
||||
path: job.main_verilog_file,
|
||||
},
|
||||
JobItemName::DynamicPaths {
|
||||
source_job_name: self.name(),
|
||||
},
|
||||
]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn name(self) -> Interned<str> {
|
||||
"verilog".intern()
|
||||
}
|
||||
|
||||
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
|
||||
None
|
||||
}
|
||||
|
||||
fn run(
|
||||
self,
|
||||
job: &Self::Job,
|
||||
inputs: &[JobItem],
|
||||
_params: &JobParams,
|
||||
_global_params: &GlobalParams,
|
||||
_acquired_job: &mut AcquiredJob,
|
||||
) -> eyre::Result<Vec<JobItem>> {
|
||||
assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job)));
|
||||
let input = std::fs::read_to_string(job.unadjusted_verilog_file())?;
|
||||
let file_separator_prefix = "\n// ----- 8< ----- FILE \"";
|
||||
let file_separator_suffix = "\" ----- 8< -----\n\n";
|
||||
let mut input = &*input;
|
||||
let main_verilog_file = job.main_verilog_file();
|
||||
let mut file_name = Some(main_verilog_file);
|
||||
let mut additional_outputs = Vec::new();
|
||||
loop {
|
||||
let (chunk, next_file_name) = if let Some((chunk, rest)) =
|
||||
input.split_once(file_separator_prefix)
|
||||
{
|
||||
let Some((next_file_name, rest)) = rest.split_once(file_separator_suffix) else {
|
||||
bail!(
|
||||
"parsing firtool's output failed: found {file_separator_prefix:?} but no {file_separator_suffix:?}"
|
||||
);
|
||||
};
|
||||
input = rest;
|
||||
let next_file_name = job.output_dir.join(next_file_name).intern_deref();
|
||||
additional_outputs.push(next_file_name);
|
||||
(chunk, Some(next_file_name))
|
||||
} else {
|
||||
(mem::take(&mut input), None)
|
||||
};
|
||||
let Some(file_name) = mem::replace(&mut file_name, next_file_name) else {
|
||||
break;
|
||||
};
|
||||
std::fs::write(&file_name, chunk)?;
|
||||
}
|
||||
Ok(vec![
|
||||
JobItem::Path {
|
||||
path: main_verilog_file,
|
||||
},
|
||||
JobItem::DynamicPaths {
|
||||
paths: additional_outputs,
|
||||
source_job_name: self.name(),
|
||||
},
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = DynJobKind> {
|
||||
[
|
||||
DynJobKind::new(ExternalCommandJobKind::<UnadjustedVerilog>::new()),
|
||||
DynJobKind::new(VerilogJobKind),
|
||||
]
|
||||
}
|
||||
|
|
@ -2,25 +2,18 @@
|
|||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
expr::{
|
||||
CastToBits, Expr, ReduceBits, ToExpr,
|
||||
ops::{ArrayLiteral, BundleLiteral, ExprPartialEq},
|
||||
},
|
||||
int::{Bool, DynSize},
|
||||
intern::{Intern, InternSlice, Interned},
|
||||
sim::value::{SimValue, SimValuePartialEq, ToSimValue, ToSimValueWithType},
|
||||
expr::{ops::BundleLiteral, Expr, ToExpr},
|
||||
intern::{Intern, Interned},
|
||||
source_location::SourceLocation,
|
||||
ty::{
|
||||
CanonicalType, MatchVariantWithoutScope, OpaqueSimValue, OpaqueSimValueSize,
|
||||
OpaqueSimValueSlice, OpaqueSimValueWriter, OpaqueSimValueWritten, StaticType, Type,
|
||||
TypeProperties, TypeWithDeref, impl_match_variant_as_self,
|
||||
impl_match_variant_as_self, CanonicalType, MatchVariantWithoutScope, StaticType, Type,
|
||||
TypeProperties, TypeWithDeref,
|
||||
},
|
||||
util::HashMap,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use hashbrown::HashMap;
|
||||
use std::{fmt, marker::PhantomData};
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
|
||||
pub struct BundleField {
|
||||
pub name: Interned<str>,
|
||||
pub flipped: bool,
|
||||
|
|
@ -69,7 +62,7 @@ impl fmt::Display for FmtDebugInStruct {
|
|||
struct BundleImpl {
|
||||
fields: Interned<[BundleField]>,
|
||||
name_indexes: HashMap<Interned<str>, usize>,
|
||||
field_offsets: Interned<[OpaqueSimValueSize]>,
|
||||
field_offsets: Interned<[usize]>,
|
||||
type_properties: TypeProperties,
|
||||
}
|
||||
|
||||
|
|
@ -89,9 +82,12 @@ impl std::fmt::Debug for BundleImpl {
|
|||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str("Bundle ")?;
|
||||
f.debug_set()
|
||||
.entries(self.fields.iter().enumerate().map(|(index, field)| {
|
||||
field.fmt_debug_in_struct(self.field_offsets[index].bit_width)
|
||||
}))
|
||||
.entries(
|
||||
self.fields
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, field)| field.fmt_debug_in_struct(self.field_offsets[index])),
|
||||
)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
|
@ -116,7 +112,6 @@ impl BundleTypePropertiesBuilder {
|
|||
is_storable: true,
|
||||
is_castable_from_bits: true,
|
||||
bit_width: 0,
|
||||
sim_only_values_len: 0,
|
||||
})
|
||||
}
|
||||
pub const fn clone(&self) -> Self {
|
||||
|
|
@ -124,12 +119,8 @@ impl BundleTypePropertiesBuilder {
|
|||
}
|
||||
#[must_use]
|
||||
pub const fn field(self, flipped: bool, field_props: TypeProperties) -> Self {
|
||||
let Some(OpaqueSimValueSize {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
}) = self.0.size().checked_add(field_props.size())
|
||||
else {
|
||||
panic!("bundle is too big: size overflowed");
|
||||
let Some(bit_width) = self.0.bit_width.checked_add(field_props.bit_width) else {
|
||||
panic!("bundle is too big: bit-width overflowed");
|
||||
};
|
||||
if flipped {
|
||||
Self(TypeProperties {
|
||||
|
|
@ -137,7 +128,6 @@ impl BundleTypePropertiesBuilder {
|
|||
is_storable: false,
|
||||
is_castable_from_bits: false,
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
})
|
||||
} else {
|
||||
Self(TypeProperties {
|
||||
|
|
@ -146,7 +136,6 @@ impl BundleTypePropertiesBuilder {
|
|||
is_castable_from_bits: self.0.is_castable_from_bits
|
||||
& field_props.is_castable_from_bits,
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -155,23 +144,17 @@ impl BundleTypePropertiesBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
impl Default for BundleTypePropertiesBuilder {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl Bundle {
|
||||
#[track_caller]
|
||||
pub fn new(fields: Interned<[BundleField]>) -> Self {
|
||||
let mut name_indexes = HashMap::with_capacity_and_hasher(fields.len(), Default::default());
|
||||
let mut name_indexes = HashMap::with_capacity(fields.len());
|
||||
let mut field_offsets = Vec::with_capacity(fields.len());
|
||||
let mut type_props_builder = BundleTypePropertiesBuilder::new();
|
||||
for (index, &BundleField { name, flipped, ty }) in fields.iter().enumerate() {
|
||||
if let Some(old_index) = name_indexes.insert(name, index) {
|
||||
panic!("duplicate field name {name:?}: at both index {old_index} and {index}");
|
||||
}
|
||||
field_offsets.push(type_props_builder.0.size());
|
||||
field_offsets.push(type_props_builder.0.bit_width);
|
||||
type_props_builder = type_props_builder.field(flipped, ty.type_properties());
|
||||
}
|
||||
Self(Intern::intern_sized(BundleImpl {
|
||||
|
|
@ -187,7 +170,7 @@ impl Bundle {
|
|||
pub fn field_by_name(&self, name: Interned<str>) -> Option<BundleField> {
|
||||
Some(self.0.fields[*self.0.name_indexes.get(&name)?])
|
||||
}
|
||||
pub fn field_offsets(self) -> Interned<[OpaqueSimValueSize]> {
|
||||
pub fn field_offsets(self) -> Interned<[usize]> {
|
||||
self.0.field_offsets
|
||||
}
|
||||
pub fn type_properties(self) -> TypeProperties {
|
||||
|
|
@ -221,7 +204,6 @@ impl Bundle {
|
|||
impl Type for Bundle {
|
||||
type BaseType = Bundle;
|
||||
type MaskType = Bundle;
|
||||
type SimValue = OpaqueSimValue;
|
||||
impl_match_variant_as_self!();
|
||||
fn mask_type(&self) -> Self::MaskType {
|
||||
Self::new(Interned::from_iter(self.0.fields.into_iter().map(
|
||||
|
|
@ -245,28 +227,6 @@ impl Type for Bundle {
|
|||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert_eq!(self.type_properties().size(), opaque.size());
|
||||
opaque.to_owned()
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert_eq!(self.type_properties().size(), opaque.size());
|
||||
assert_eq!(value.size(), opaque.size());
|
||||
value.clone_from_slice(opaque);
|
||||
}
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
assert_eq!(self.type_properties().size(), writer.size());
|
||||
assert_eq!(value.size(), writer.size());
|
||||
writer.fill_cloned_from_slice(value.as_slice())
|
||||
}
|
||||
}
|
||||
|
||||
pub trait BundleType: Type<BaseType = Bundle> {
|
||||
|
|
@ -275,102 +235,6 @@ pub trait BundleType: Type<BaseType = Bundle> {
|
|||
fn fields(&self) -> Interned<[BundleField]>;
|
||||
}
|
||||
|
||||
pub struct BundleSimValueFromOpaque<'a> {
|
||||
fields: std::slice::Iter<'static, BundleField>,
|
||||
opaque: OpaqueSimValueSlice<'a>,
|
||||
}
|
||||
|
||||
impl<'a> BundleSimValueFromOpaque<'a> {
|
||||
#[track_caller]
|
||||
pub fn new<T: BundleType>(bundle_ty: T, opaque: OpaqueSimValueSlice<'a>) -> Self {
|
||||
let fields = bundle_ty.fields();
|
||||
assert_eq!(
|
||||
opaque.size(),
|
||||
fields
|
||||
.iter()
|
||||
.map(|BundleField { ty, .. }| ty.size())
|
||||
.sum::<OpaqueSimValueSize>()
|
||||
);
|
||||
Self {
|
||||
fields: Interned::into_inner(fields).iter(),
|
||||
opaque,
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
fn field_ty_and_opaque<T: Type>(&mut self) -> (T, OpaqueSimValueSlice<'a>) {
|
||||
let Some(&BundleField {
|
||||
name: _,
|
||||
flipped: _,
|
||||
ty,
|
||||
}) = self.fields.next()
|
||||
else {
|
||||
panic!("tried to read too many fields from BundleSimValueFromBits");
|
||||
};
|
||||
let (field_opaque, rest) = self.opaque.split_at(ty.size());
|
||||
self.opaque = rest;
|
||||
(T::from_canonical(ty), field_opaque)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn field_from_opaque<T: Type>(&mut self) -> SimValue<T> {
|
||||
let (field_ty, field_opaque) = self.field_ty_and_opaque::<T>();
|
||||
SimValue::from_opaque(field_ty, field_opaque.to_owned())
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn field_clone_from_opaque<T: Type>(&mut self, field_value: &mut SimValue<T>) {
|
||||
let (field_ty, field_opaque) = self.field_ty_and_opaque::<T>();
|
||||
assert_eq!(field_ty, SimValue::ty(field_value));
|
||||
SimValue::opaque_mut(field_value).clone_from_slice(field_opaque);
|
||||
}
|
||||
}
|
||||
|
||||
pub struct BundleSimValueToOpaque<'a> {
|
||||
fields: std::slice::Iter<'static, BundleField>,
|
||||
writer: OpaqueSimValueWriter<'a>,
|
||||
}
|
||||
|
||||
impl<'a> BundleSimValueToOpaque<'a> {
|
||||
#[track_caller]
|
||||
pub fn new<T: BundleType>(bundle_ty: T, writer: OpaqueSimValueWriter<'a>) -> Self {
|
||||
let fields = bundle_ty.fields();
|
||||
assert_eq!(
|
||||
writer.size(),
|
||||
fields
|
||||
.iter()
|
||||
.map(|BundleField { ty, .. }| ty.size())
|
||||
.sum::<OpaqueSimValueSize>()
|
||||
);
|
||||
Self {
|
||||
fields: Interned::into_inner(fields).iter(),
|
||||
writer,
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn field<T: Type>(&mut self, field_value: &SimValue<T>) {
|
||||
let Some(&BundleField {
|
||||
name: _,
|
||||
flipped: _,
|
||||
ty,
|
||||
}) = self.fields.next()
|
||||
else {
|
||||
panic!("tried to write too many fields with BundleSimValueToOpaque");
|
||||
};
|
||||
assert_eq!(T::from_canonical(ty), SimValue::ty(field_value));
|
||||
self.writer.fill_prefix_with(ty.size(), |writer| {
|
||||
writer.fill_cloned_from_slice(SimValue::opaque(field_value).as_slice())
|
||||
});
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn finish(mut self) -> OpaqueSimValueWritten<'a> {
|
||||
assert_eq!(
|
||||
self.fields.next(),
|
||||
None,
|
||||
"wrote too few fields with BundleSimValueToOpaque"
|
||||
);
|
||||
self.writer
|
||||
.fill_cloned_from_slice(OpaqueSimValueSlice::empty())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct NoBuilder;
|
||||
|
||||
|
|
@ -453,19 +317,7 @@ macro_rules! impl_tuple_builder_fields {
|
|||
}
|
||||
|
||||
macro_rules! impl_tuples {
|
||||
(
|
||||
[$({
|
||||
#[
|
||||
num = $num:tt,
|
||||
field = $field:ident,
|
||||
ty = $ty_var:ident: $Ty:ident,
|
||||
lhs = $lhs_var:ident: $Lhs:ident,
|
||||
rhs = $rhs_var:ident: $Rhs:ident
|
||||
]
|
||||
$var:ident: $T:ident
|
||||
})*]
|
||||
[]
|
||||
) => {
|
||||
([$({#[num = $num:literal, field = $field:ident] $var:ident: $T:ident})*] []) => {
|
||||
impl_tuple_builder_fields! {
|
||||
{}
|
||||
[$({
|
||||
|
|
@ -477,7 +329,6 @@ macro_rules! impl_tuples {
|
|||
impl<$($T: Type,)*> Type for ($($T,)*) {
|
||||
type BaseType = Bundle;
|
||||
type MaskType = ($($T::MaskType,)*);
|
||||
type SimValue = ($(SimValue<$T>,)*);
|
||||
type MatchVariant = ($(Expr<$T>,)*);
|
||||
type MatchActiveScope = ();
|
||||
type MatchVariantAndInactiveScope = MatchVariantWithoutScope<Self::MatchVariant>;
|
||||
|
|
@ -491,7 +342,6 @@ macro_rules! impl_tuples {
|
|||
std::iter::once(MatchVariantWithoutScope(($(Expr::field(this, stringify!($num)),)*)))
|
||||
}
|
||||
fn mask_type(&self) -> Self::MaskType {
|
||||
#![allow(clippy::unused_unit)]
|
||||
let ($($var,)*) = self;
|
||||
($($var.mask_type(),)*)
|
||||
}
|
||||
|
|
@ -500,7 +350,6 @@ macro_rules! impl_tuples {
|
|||
}
|
||||
#[track_caller]
|
||||
fn from_canonical(canonical_type: CanonicalType) -> Self {
|
||||
#![allow(clippy::unused_unit)]
|
||||
let CanonicalType::Bundle(bundle) = canonical_type else {
|
||||
panic!("expected bundle");
|
||||
};
|
||||
|
|
@ -509,53 +358,26 @@ macro_rules! impl_tuples {
|
|||
};
|
||||
$(let BundleField { name, flipped, ty } = $var;
|
||||
assert_eq!(&*name, stringify!($num));
|
||||
assert!(!flipped);
|
||||
assert_eq!(flipped, false);
|
||||
let $var = $T::from_canonical(ty);)*
|
||||
($($var,)*)
|
||||
}
|
||||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = BundleSimValueFromOpaque::new(*self, opaque);
|
||||
$(let $var = v.field_from_opaque();)*
|
||||
($($var,)*)
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = BundleSimValueFromOpaque::new(*self, opaque);
|
||||
let ($($var,)*) = value;
|
||||
$(v.field_clone_from_opaque($var);)*
|
||||
}
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = BundleSimValueToOpaque::new(*self, writer);
|
||||
let ($($var,)*) = value;
|
||||
$(v.field($var);)*
|
||||
v.finish()
|
||||
}
|
||||
}
|
||||
impl<$($T: Type,)*> BundleType for ($($T,)*) {
|
||||
type Builder = TupleBuilder<($(Unfilled<$T>,)*)>;
|
||||
type FilledBuilder = TupleBuilder<($(Expr<$T>,)*)>;
|
||||
fn fields(&self) -> Interned<[BundleField]> {
|
||||
let ($($var,)*) = self;
|
||||
[$(BundleField { name: stringify!($num).intern(), flipped: false, ty: $var.canonical() }),*].intern_slice()
|
||||
[$(BundleField { name: stringify!($num).intern(), flipped: false, ty: $var.canonical() }),*][..].intern()
|
||||
}
|
||||
}
|
||||
impl<$($T: Type,)*> TypeWithDeref for ($($T,)*) {
|
||||
fn expr_deref(this: &Expr<Self>) -> &Self::MatchVariant {
|
||||
let _ = this;
|
||||
Interned::into_inner(($(Expr::field(*this, stringify!($num)),)*).intern_sized())
|
||||
Interned::<_>::into_inner(($(Expr::field(*this, stringify!($num)),)*).intern_sized())
|
||||
}
|
||||
}
|
||||
impl<$($T: StaticType,)*> StaticType for ($($T,)*) {
|
||||
|
|
@ -580,7 +402,7 @@ macro_rules! impl_tuples {
|
|||
$(let $var = $var.to_expr();)*
|
||||
let ty = ($(Expr::ty($var),)*);
|
||||
let field_values = [$(Expr::canonical($var)),*];
|
||||
BundleLiteral::new(ty, field_values.intern_slice()).to_expr()
|
||||
BundleLiteral::new(ty, field_values[..].intern()).to_expr()
|
||||
}
|
||||
}
|
||||
impl<$($T: Type,)*> ToExpr for TupleBuilder<($(Expr<$T>,)*)> {
|
||||
|
|
@ -590,107 +412,7 @@ macro_rules! impl_tuples {
|
|||
let ($($var,)*) = self.0;
|
||||
let ty = ($(Expr::ty($var),)*);
|
||||
let field_values = [$(Expr::canonical($var)),*];
|
||||
BundleLiteral::new(ty, field_values.intern_slice()).to_expr()
|
||||
}
|
||||
}
|
||||
impl<$($T: ToSimValueWithType<CanonicalType>,)*> ToSimValueWithType<CanonicalType> for ($($T,)*) {
|
||||
#[track_caller]
|
||||
fn to_sim_value_with_type(&self, ty: CanonicalType) -> SimValue<CanonicalType> {
|
||||
SimValue::into_canonical(ToSimValueWithType::<Bundle>::to_sim_value_with_type(self, Bundle::from_canonical(ty)))
|
||||
}
|
||||
#[track_caller]
|
||||
fn into_sim_value_with_type(self, ty: CanonicalType) -> SimValue<CanonicalType>
|
||||
{
|
||||
SimValue::into_canonical(ToSimValueWithType::<Bundle>::into_sim_value_with_type(self, Bundle::from_canonical(ty)))
|
||||
}
|
||||
}
|
||||
impl<$($T: ToSimValueWithType<CanonicalType>,)*> ToSimValueWithType<Bundle> for ($($T,)*) {
|
||||
#[track_caller]
|
||||
fn to_sim_value_with_type(&self, ty: Bundle) -> SimValue<Bundle> {
|
||||
let ($($var,)*) = self;
|
||||
let [$($ty_var,)*] = *ty.fields() else {
|
||||
panic!("bundle has wrong number of fields");
|
||||
};
|
||||
$(let $var = $var.to_sim_value_with_type($ty_var.ty);)*
|
||||
ToSimValueWithType::into_sim_value_with_type(($($var,)*), ty)
|
||||
}
|
||||
#[track_caller]
|
||||
fn into_sim_value_with_type(self, ty: Bundle) -> SimValue<Bundle> {
|
||||
#![allow(unused_mut)]
|
||||
#![allow(clippy::unused_unit)]
|
||||
let ($($var,)*) = self;
|
||||
let [$($ty_var,)*] = *ty.fields() else {
|
||||
panic!("bundle has wrong number of fields");
|
||||
};
|
||||
let mut opaque = OpaqueSimValue::empty();
|
||||
$(let $var = $var.into_sim_value_with_type($ty_var.ty);
|
||||
assert_eq!(SimValue::ty(&$var), $ty_var.ty);
|
||||
opaque.extend_from_slice(SimValue::opaque(&$var).as_slice());
|
||||
)*
|
||||
SimValue::from_opaque(ty, opaque)
|
||||
}
|
||||
}
|
||||
impl<$($T: ToSimValueWithType<$Ty>, $Ty: Type,)*> ToSimValueWithType<($($Ty,)*)> for ($($T,)*) {
|
||||
#[track_caller]
|
||||
fn to_sim_value_with_type(&self, ty: ($($Ty,)*)) -> SimValue<($($Ty,)*)> {
|
||||
let ($($var,)*) = self;
|
||||
let ($($ty_var,)*) = ty;
|
||||
$(let $var = $var.to_sim_value_with_type($ty_var);)*
|
||||
SimValue::from_value(ty, ($($var,)*))
|
||||
}
|
||||
#[track_caller]
|
||||
fn into_sim_value_with_type(self, ty: ($($Ty,)*)) -> SimValue<($($Ty,)*)> {
|
||||
let ($($var,)*) = self;
|
||||
let ($($ty_var,)*) = ty;
|
||||
$(let $var = $var.into_sim_value_with_type($ty_var);)*
|
||||
SimValue::from_value(ty, ($($var,)*))
|
||||
}
|
||||
}
|
||||
impl<$($T: ToSimValue,)*> ToSimValue for ($($T,)*) {
|
||||
type Type = ($($T::Type,)*);
|
||||
#[track_caller]
|
||||
fn to_sim_value(&self) -> SimValue<Self::Type> {
|
||||
let ($($var,)*) = self;
|
||||
$(let $var = $var.to_sim_value();)*
|
||||
SimValue::from_value(($(SimValue::ty(&$var),)*), ($($var,)*))
|
||||
}
|
||||
#[track_caller]
|
||||
fn into_sim_value(self) -> SimValue<Self::Type> {
|
||||
let ($($var,)*) = self;
|
||||
$(let $var = $var.to_sim_value();)*
|
||||
SimValue::from_value(($(SimValue::ty(&$var),)*), ($($var,)*))
|
||||
}
|
||||
}
|
||||
impl<$($Lhs: Type + ExprPartialEq<$Rhs>, $Rhs: Type,)*> ExprPartialEq<($($Rhs,)*)> for ($($Lhs,)*) {
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<($($Rhs,)*)>) -> Expr<Bool> {
|
||||
let ($($lhs_var,)*) = *lhs;
|
||||
let ($($rhs_var,)*) = *rhs;
|
||||
ArrayLiteral::<Bool, DynSize>::new(
|
||||
Bool,
|
||||
FromIterator::from_iter([$(Expr::canonical(ExprPartialEq::cmp_eq($lhs_var, $rhs_var)),)*]),
|
||||
)
|
||||
.cast_to_bits()
|
||||
.all_one_bits()
|
||||
}
|
||||
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<($($Rhs,)*)>) -> Expr<Bool> {
|
||||
let ($($lhs_var,)*) = *lhs;
|
||||
let ($($rhs_var,)*) = *rhs;
|
||||
ArrayLiteral::<Bool, DynSize>::new(
|
||||
Bool,
|
||||
FromIterator::from_iter([$(Expr::canonical(ExprPartialEq::cmp_ne($lhs_var, $rhs_var)),)*]),
|
||||
)
|
||||
.cast_to_bits()
|
||||
.any_one_bits()
|
||||
}
|
||||
}
|
||||
impl<$($Lhs: SimValuePartialEq<$Rhs>, $Rhs: Type,)*> SimValuePartialEq<($($Rhs,)*)> for ($($Lhs,)*) {
|
||||
fn sim_value_eq(lhs: &SimValue<Self>, rhs: &SimValue<($($Rhs,)*)>) -> bool {
|
||||
let ($($lhs_var,)*) = &**lhs;
|
||||
let ($($rhs_var,)*) = &**rhs;
|
||||
let retval = true;
|
||||
$(let retval = retval && $lhs_var == $rhs_var;)*
|
||||
retval
|
||||
BundleLiteral::new(ty, field_values[..].intern()).to_expr()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
@ -702,151 +424,17 @@ macro_rules! impl_tuples {
|
|||
|
||||
impl_tuples! {
|
||||
[] [
|
||||
{#[num = 0, field = field_0, ty = ty0: Ty0, lhs = lhs0: Lhs0, rhs = rhs0: Rhs0] v0: T0}
|
||||
{#[num = 1, field = field_1, ty = ty1: Ty1, lhs = lhs1: Lhs1, rhs = rhs1: Rhs1] v1: T1}
|
||||
{#[num = 2, field = field_2, ty = ty2: Ty2, lhs = lhs2: Lhs2, rhs = rhs2: Rhs2] v2: T2}
|
||||
{#[num = 3, field = field_3, ty = ty3: Ty3, lhs = lhs3: Lhs3, rhs = rhs3: Rhs3] v3: T3}
|
||||
{#[num = 4, field = field_4, ty = ty4: Ty4, lhs = lhs4: Lhs4, rhs = rhs4: Rhs4] v4: T4}
|
||||
{#[num = 5, field = field_5, ty = ty5: Ty5, lhs = lhs5: Lhs5, rhs = rhs5: Rhs5] v5: T5}
|
||||
{#[num = 6, field = field_6, ty = ty6: Ty6, lhs = lhs6: Lhs6, rhs = rhs6: Rhs6] v6: T6}
|
||||
{#[num = 7, field = field_7, ty = ty7: Ty7, lhs = lhs7: Lhs7, rhs = rhs7: Rhs7] v7: T7}
|
||||
{#[num = 8, field = field_8, ty = ty8: Ty8, lhs = lhs8: Lhs8, rhs = rhs8: Rhs8] v8: T8}
|
||||
{#[num = 9, field = field_9, ty = ty9: Ty9, lhs = lhs9: Lhs9, rhs = rhs9: Rhs9] v9: T9}
|
||||
{#[num = 10, field = field_10, ty = ty10: Ty10, lhs = lhs10: Lhs10, rhs = rhs10: Rhs10] v10: T10}
|
||||
{#[num = 11, field = field_11, ty = ty11: Ty11, lhs = lhs11: Lhs11, rhs = rhs11: Rhs11] v11: T11}
|
||||
{#[num = 0, field = field_0] v0: T0}
|
||||
{#[num = 1, field = field_1] v1: T1}
|
||||
{#[num = 2, field = field_2] v2: T2}
|
||||
{#[num = 3, field = field_3] v3: T3}
|
||||
{#[num = 4, field = field_4] v4: T4}
|
||||
{#[num = 5, field = field_5] v5: T5}
|
||||
{#[num = 6, field = field_6] v6: T6}
|
||||
{#[num = 7, field = field_7] v7: T7}
|
||||
{#[num = 8, field = field_8] v8: T8}
|
||||
{#[num = 9, field = field_9] v9: T9}
|
||||
{#[num = 10, field = field_10] v10: T10}
|
||||
{#[num = 11, field = field_11] v11: T11}
|
||||
]
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> Type for PhantomData<T> {
|
||||
type BaseType = Bundle;
|
||||
type MaskType = ();
|
||||
type SimValue = PhantomData<T>;
|
||||
type MatchVariant = PhantomData<T>;
|
||||
type MatchActiveScope = ();
|
||||
type MatchVariantAndInactiveScope = MatchVariantWithoutScope<Self::MatchVariant>;
|
||||
type MatchVariantsIter = std::iter::Once<Self::MatchVariantAndInactiveScope>;
|
||||
fn match_variants(
|
||||
this: Expr<Self>,
|
||||
source_location: SourceLocation,
|
||||
) -> Self::MatchVariantsIter {
|
||||
let _ = this;
|
||||
let _ = source_location;
|
||||
std::iter::once(MatchVariantWithoutScope(PhantomData))
|
||||
}
|
||||
fn mask_type(&self) -> Self::MaskType {
|
||||
()
|
||||
}
|
||||
fn canonical(&self) -> CanonicalType {
|
||||
Bundle::new(self.fields()).canonical()
|
||||
}
|
||||
#[track_caller]
|
||||
fn from_canonical(canonical_type: CanonicalType) -> Self {
|
||||
let CanonicalType::Bundle(bundle) = canonical_type else {
|
||||
panic!("expected bundle");
|
||||
};
|
||||
assert!(
|
||||
bundle.fields().is_empty(),
|
||||
"bundle has wrong number of fields"
|
||||
);
|
||||
PhantomData
|
||||
}
|
||||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert!(opaque.is_empty());
|
||||
*self
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
_value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert!(opaque.is_empty());
|
||||
}
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
_value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::empty())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PhantomDataBuilder<T: ?Sized + Send + Sync + 'static>(PhantomData<T>);
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> Default for PhantomDataBuilder<T> {
|
||||
fn default() -> Self {
|
||||
Self(PhantomData)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> ToExpr for PhantomDataBuilder<T> {
|
||||
type Type = PhantomData<T>;
|
||||
|
||||
fn to_expr(&self) -> Expr<Self::Type> {
|
||||
PhantomData.to_expr()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> BundleType for PhantomData<T> {
|
||||
type Builder = PhantomDataBuilder<T>;
|
||||
type FilledBuilder = PhantomDataBuilder<T>;
|
||||
fn fields(&self) -> Interned<[BundleField]> {
|
||||
Interned::default()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> TypeWithDeref for PhantomData<T> {
|
||||
fn expr_deref(_this: &Expr<Self>) -> &Self::MatchVariant {
|
||||
&PhantomData
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> StaticType for PhantomData<T> {
|
||||
const TYPE: Self = PhantomData;
|
||||
const MASK_TYPE: Self::MaskType = ();
|
||||
const TYPE_PROPERTIES: TypeProperties = <()>::TYPE_PROPERTIES;
|
||||
const MASK_TYPE_PROPERTIES: TypeProperties = <()>::TYPE_PROPERTIES;
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> ToExpr for PhantomData<T> {
|
||||
type Type = PhantomData<T>;
|
||||
|
||||
fn to_expr(&self) -> Expr<Self::Type> {
|
||||
BundleLiteral::new(PhantomData, Interned::default()).to_expr()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> ToSimValue for PhantomData<T> {
|
||||
type Type = PhantomData<T>;
|
||||
|
||||
#[track_caller]
|
||||
fn to_sim_value(&self) -> SimValue<Self> {
|
||||
SimValue::from_value(*self, *self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> ToSimValueWithType<Self> for PhantomData<T> {
|
||||
#[track_caller]
|
||||
fn to_sim_value_with_type(&self, ty: Self) -> SimValue<Self> {
|
||||
SimValue::from_value(ty, *self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> ToSimValueWithType<Bundle> for PhantomData<T> {
|
||||
#[track_caller]
|
||||
fn to_sim_value_with_type(&self, ty: Bundle) -> SimValue<Bundle> {
|
||||
assert!(ty.fields().is_empty());
|
||||
SimValue::from_opaque(ty, OpaqueSimValue::empty())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> ToSimValueWithType<CanonicalType> for PhantomData<T> {
|
||||
#[track_caller]
|
||||
fn to_sim_value_with_type(&self, canonical_ty: CanonicalType) -> SimValue<CanonicalType> {
|
||||
let ty = Bundle::from_canonical(canonical_ty);
|
||||
assert!(ty.fields().is_empty());
|
||||
SimValue::from_opaque(canonical_ty, OpaqueSimValue::empty())
|
||||
}
|
||||
}
|
||||
|
|
|
|||
320
crates/fayalite/src/cli.rs
Normal file
320
crates/fayalite/src/cli.rs
Normal file
|
|
@ -0,0 +1,320 @@
|
|||
use crate::{
|
||||
bundle::{Bundle, BundleType},
|
||||
firrtl,
|
||||
intern::Interned,
|
||||
module::Module,
|
||||
};
|
||||
use clap::{
|
||||
builder::{OsStringValueParser, TypedValueParser},
|
||||
Args, Parser, Subcommand, ValueEnum, ValueHint,
|
||||
};
|
||||
use eyre::{eyre, Report};
|
||||
use std::{error, ffi::OsString, fmt, io, path::PathBuf, process};
|
||||
|
||||
pub type Result<T = (), E = CliError> = std::result::Result<T, E>;
|
||||
|
||||
pub struct CliError(Report);
|
||||
|
||||
impl fmt::Debug for CliError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for CliError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl error::Error for CliError {}
|
||||
|
||||
impl From<io::Error> for CliError {
|
||||
fn from(value: io::Error) -> Self {
|
||||
CliError(Report::new(value))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait RunPhase<Arg> {
|
||||
type Output;
|
||||
fn run(&self, arg: Arg) -> Result<Self::Output>;
|
||||
}
|
||||
|
||||
#[derive(Args, Debug)]
|
||||
#[non_exhaustive]
|
||||
pub struct BaseArgs {
|
||||
/// the directory to put the generated main output file and associated files in
|
||||
#[arg(short, long, value_hint = ValueHint::DirPath)]
|
||||
pub output: PathBuf,
|
||||
/// the stem of the generated main output file, e.g. to get foo.v, pass --file-stem=foo
|
||||
#[arg(long)]
|
||||
pub file_stem: Option<String>,
|
||||
}
|
||||
|
||||
impl BaseArgs {
|
||||
pub fn to_firrtl_file_backend(&self) -> firrtl::FileBackend {
|
||||
firrtl::FileBackend {
|
||||
dir_path: self.output.clone(),
|
||||
top_fir_file_stem: self.file_stem.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Args, Debug)]
|
||||
#[non_exhaustive]
|
||||
pub struct FirrtlArgs {
|
||||
#[command(flatten)]
|
||||
pub base: BaseArgs,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[non_exhaustive]
|
||||
pub struct FirrtlOutput {
|
||||
pub file_stem: String,
|
||||
}
|
||||
|
||||
impl FirrtlOutput {
|
||||
pub fn firrtl_file(&self, args: &FirrtlArgs) -> PathBuf {
|
||||
let mut retval = args.base.output.join(&self.file_stem);
|
||||
retval.set_extension("fir");
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
impl FirrtlArgs {
|
||||
fn run_impl(&self, top_module: Module<Bundle>) -> Result<FirrtlOutput> {
|
||||
let firrtl::FileBackend {
|
||||
top_fir_file_stem, ..
|
||||
} = firrtl::export(self.base.to_firrtl_file_backend(), &top_module)?;
|
||||
Ok(FirrtlOutput {
|
||||
file_stem: top_fir_file_stem.expect(
|
||||
"export is known to set the file stem from the circuit name if not provided",
|
||||
),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: BundleType> RunPhase<Module<T>> for FirrtlArgs {
|
||||
type Output = FirrtlOutput;
|
||||
fn run(&self, top_module: Module<T>) -> Result<Self::Output> {
|
||||
self.run_impl(top_module.canonical())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: BundleType> RunPhase<Interned<Module<T>>> for FirrtlArgs {
|
||||
type Output = FirrtlOutput;
|
||||
fn run(&self, top_module: Interned<Module<T>>) -> Result<Self::Output> {
|
||||
self.run(*top_module)
|
||||
}
|
||||
}
|
||||
|
||||
/// based on [LLVM Circt's recommended lowering options
|
||||
/// ](https://circt.llvm.org/docs/VerilogGeneration/#recommended-loweringoptions-by-target)
|
||||
#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum VerilogDialect {
|
||||
Questa,
|
||||
Spyglass,
|
||||
Verilator,
|
||||
Vivado,
|
||||
Yosys,
|
||||
}
|
||||
|
||||
impl VerilogDialect {
|
||||
pub fn firtool_extra_args(self) -> &'static [&'static str] {
|
||||
match self {
|
||||
VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"],
|
||||
VerilogDialect::Spyglass => {
|
||||
&["--lowering-options=explicitBitcast,disallowExpressionInliningInPorts"]
|
||||
}
|
||||
VerilogDialect::Verilator => &[
|
||||
"--lowering-options=locationInfoStyle=wrapInAtSquareBracket,disallowLocalVariables",
|
||||
],
|
||||
VerilogDialect::Vivado => &["--lowering-options=mitigateVivadoArrayIndexConstPropBug"],
|
||||
VerilogDialect::Yosys => {
|
||||
&["--lowering-options=disallowLocalVariables,disallowPackedArrays"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Args, Debug)]
|
||||
#[non_exhaustive]
|
||||
pub struct VerilogArgs {
|
||||
#[command(flatten)]
|
||||
pub firrtl: FirrtlArgs,
|
||||
#[arg(
|
||||
long,
|
||||
default_value = "firtool",
|
||||
env = "FIRTOOL",
|
||||
value_hint = ValueHint::CommandName,
|
||||
value_parser = OsStringValueParser::new().try_map(which::which)
|
||||
)]
|
||||
pub firtool: PathBuf,
|
||||
#[arg(long)]
|
||||
pub firtool_extra_args: Vec<OsString>,
|
||||
/// adapt the generated Verilog for a particular toolchain
|
||||
#[arg(long)]
|
||||
pub verilog_dialect: Option<VerilogDialect>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[non_exhaustive]
|
||||
pub struct VerilogOutput {
|
||||
pub firrtl: FirrtlOutput,
|
||||
}
|
||||
|
||||
impl VerilogOutput {
|
||||
pub fn verilog_file(&self, args: &VerilogArgs) -> PathBuf {
|
||||
let mut retval = args.firrtl.base.output.join(&self.firrtl.file_stem);
|
||||
retval.set_extension("v");
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
impl VerilogArgs {
|
||||
fn run_impl(&self, firrtl_output: FirrtlOutput) -> Result<VerilogOutput> {
|
||||
let output = VerilogOutput {
|
||||
firrtl: firrtl_output,
|
||||
};
|
||||
let mut cmd = process::Command::new(&self.firtool);
|
||||
cmd.arg(output.firrtl.firrtl_file(&self.firrtl));
|
||||
cmd.arg("-o");
|
||||
cmd.arg(output.verilog_file(self));
|
||||
if let Some(dialect) = self.verilog_dialect {
|
||||
cmd.args(dialect.firtool_extra_args());
|
||||
}
|
||||
cmd.args(&self.firtool_extra_args);
|
||||
cmd.current_dir(&self.firrtl.base.output);
|
||||
let status = cmd.status()?;
|
||||
if status.success() {
|
||||
Ok(output)
|
||||
} else {
|
||||
Err(CliError(eyre!(
|
||||
"running {} failed: {status}",
|
||||
self.firtool.display()
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Arg> RunPhase<Arg> for VerilogArgs
|
||||
where
|
||||
FirrtlArgs: RunPhase<Arg, Output = FirrtlOutput>,
|
||||
{
|
||||
type Output = VerilogOutput;
|
||||
fn run(&self, arg: Arg) -> Result<Self::Output> {
|
||||
let firrtl_output = self.firrtl.run(arg)?;
|
||||
self.run_impl(firrtl_output)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Subcommand, Debug)]
|
||||
enum CliCommand {
|
||||
/// Generate FIRRTL
|
||||
Firrtl(FirrtlArgs),
|
||||
/// Generate Verilog
|
||||
Verilog(VerilogArgs),
|
||||
}
|
||||
|
||||
/// a simple CLI
|
||||
///
|
||||
/// Use like:
|
||||
///
|
||||
/// ```no_run
|
||||
/// # use fayalite::prelude::*;
|
||||
/// # #[hdl_module]
|
||||
/// # fn my_module() {}
|
||||
/// use fayalite::cli;
|
||||
///
|
||||
/// fn main() -> cli::Result {
|
||||
/// cli::Cli::parse().run(my_module())
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// You can also use it with a larger [`clap`]-based CLI like so:
|
||||
///
|
||||
/// ```no_run
|
||||
/// # use fayalite::prelude::*;
|
||||
/// # #[hdl_module]
|
||||
/// # fn my_module() {}
|
||||
/// use clap::{Subcommand, Parser};
|
||||
/// use fayalite::cli;
|
||||
///
|
||||
/// #[derive(Subcommand)]
|
||||
/// pub enum Cmd {
|
||||
/// #[command(flatten)]
|
||||
/// Fayalite(cli::Cli),
|
||||
/// MySpecialCommand {
|
||||
/// #[arg(long)]
|
||||
/// foo: bool,
|
||||
/// },
|
||||
/// }
|
||||
///
|
||||
/// #[derive(Parser)]
|
||||
/// pub struct Cli {
|
||||
/// #[command(subcommand)]
|
||||
/// cmd: Cmd, // or just use cli::Cli directly if you don't need more subcommands
|
||||
/// }
|
||||
///
|
||||
/// fn main() -> cli::Result {
|
||||
/// match Cli::parse().cmd {
|
||||
/// Cmd::Fayalite(v) => v.run(my_module())?,
|
||||
/// Cmd::MySpecialCommand { foo } => println!("special: foo={foo}"),
|
||||
/// }
|
||||
/// Ok(())
|
||||
/// }
|
||||
/// ```
|
||||
#[derive(Parser, Debug)]
|
||||
// clear things that would be crate-specific
|
||||
#[command(name = "Fayalite Simple CLI", about = None, long_about = None)]
|
||||
pub struct Cli {
|
||||
#[command(subcommand)]
|
||||
subcommand: CliCommand,
|
||||
}
|
||||
|
||||
impl clap::Subcommand for Cli {
|
||||
fn augment_subcommands(cmd: clap::Command) -> clap::Command {
|
||||
CliCommand::augment_subcommands(cmd)
|
||||
}
|
||||
|
||||
fn augment_subcommands_for_update(cmd: clap::Command) -> clap::Command {
|
||||
CliCommand::augment_subcommands_for_update(cmd)
|
||||
}
|
||||
|
||||
fn has_subcommand(name: &str) -> bool {
|
||||
CliCommand::has_subcommand(name)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> RunPhase<T> for Cli
|
||||
where
|
||||
FirrtlArgs: RunPhase<T, Output = FirrtlOutput>,
|
||||
{
|
||||
type Output = ();
|
||||
fn run(&self, arg: T) -> Result<Self::Output> {
|
||||
match &self.subcommand {
|
||||
CliCommand::Firrtl(c) => {
|
||||
c.run(arg)?;
|
||||
}
|
||||
CliCommand::Verilog(c) => {
|
||||
c.run(arg)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Cli {
|
||||
/// forwards to [`clap::Parser::parse()`] so you don't have to import [`clap::Parser`]
|
||||
pub fn parse() -> Self {
|
||||
clap::Parser::parse()
|
||||
}
|
||||
/// forwards to [`RunPhase::run()`] so you don't have to import [`RunPhase`]
|
||||
pub fn run<T>(&self, top_module: T) -> Result<()>
|
||||
where
|
||||
Self: RunPhase<T, Output = ()>,
|
||||
{
|
||||
RunPhase::run(self, top_module)
|
||||
}
|
||||
}
|
||||
|
|
@ -4,14 +4,10 @@ use crate::{
|
|||
expr::{Expr, ToExpr},
|
||||
hdl,
|
||||
int::Bool,
|
||||
reset::{Reset, ResetType},
|
||||
reset::Reset,
|
||||
source_location::SourceLocation,
|
||||
ty::{
|
||||
CanonicalType, OpaqueSimValueSize, OpaqueSimValueSlice, OpaqueSimValueWriter,
|
||||
OpaqueSimValueWritten, StaticType, Type, TypeProperties, impl_match_variant_as_self,
|
||||
},
|
||||
ty::{impl_match_variant_as_self, CanonicalType, StaticType, Type, TypeProperties},
|
||||
};
|
||||
use bitvec::{bits, order::Lsb0};
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)]
|
||||
pub struct Clock;
|
||||
|
|
@ -19,7 +15,6 @@ pub struct Clock;
|
|||
impl Type for Clock {
|
||||
type BaseType = Clock;
|
||||
type MaskType = Bool;
|
||||
type SimValue = bool;
|
||||
|
||||
impl_match_variant_as_self!();
|
||||
|
||||
|
|
@ -41,31 +36,6 @@ impl Type for Clock {
|
|||
};
|
||||
retval
|
||||
}
|
||||
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1));
|
||||
opaque.bits()[0]
|
||||
}
|
||||
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1));
|
||||
*value = opaque.bits()[0];
|
||||
}
|
||||
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
assert_eq!(writer.size(), OpaqueSimValueSize::from_bit_width(1));
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(
|
||||
[bits![0], bits![1]][*value as usize],
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl Clock {
|
||||
|
|
@ -85,7 +55,6 @@ impl StaticType for Clock {
|
|||
is_storable: false,
|
||||
is_castable_from_bits: true,
|
||||
bit_width: 1,
|
||||
sim_only_values_len: 0,
|
||||
};
|
||||
const MASK_TYPE_PROPERTIES: TypeProperties = Bool::TYPE_PROPERTIES;
|
||||
}
|
||||
|
|
@ -119,9 +88,9 @@ impl ToClock for Expr<Clock> {
|
|||
}
|
||||
|
||||
#[hdl]
|
||||
pub struct ClockDomain<R: ResetType = Reset> {
|
||||
pub struct ClockDomain {
|
||||
pub clk: Clock,
|
||||
pub rst: R,
|
||||
pub rst: Reset,
|
||||
}
|
||||
|
||||
impl ToClock for bool {
|
||||
|
|
|
|||
|
|
@ -2,31 +2,21 @@
|
|||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
expr::{
|
||||
Expr, ToExpr,
|
||||
ops::{ExprPartialEq, VariantAccess},
|
||||
},
|
||||
expr::{ops::VariantAccess, Expr, ToExpr},
|
||||
hdl,
|
||||
int::{Bool, UIntValue},
|
||||
int::Bool,
|
||||
intern::{Intern, Interned},
|
||||
module::{
|
||||
EnumMatchVariantAndInactiveScopeImpl, EnumMatchVariantsIterImpl, Scope, connect,
|
||||
enum_match_variants_helper, incomplete_wire, wire,
|
||||
enum_match_variants_helper, EnumMatchVariantAndInactiveScopeImpl,
|
||||
EnumMatchVariantsIterImpl, Scope,
|
||||
},
|
||||
sim::value::{SimValue, SimValuePartialEq},
|
||||
source_location::SourceLocation,
|
||||
ty::{
|
||||
CanonicalType, MatchVariantAndInactiveScope, OpaqueSimValue, OpaqueSimValueSize,
|
||||
OpaqueSimValueSlice, OpaqueSimValueWriter, OpaqueSimValueWritten, StaticType, Type,
|
||||
TypeProperties,
|
||||
},
|
||||
util::HashMap,
|
||||
ty::{CanonicalType, MatchVariantAndInactiveScope, StaticType, Type, TypeProperties},
|
||||
};
|
||||
use bitvec::{order::Lsb0, slice::BitSlice, view::BitView};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{convert::Infallible, fmt, iter::FusedIterator, sync::Arc};
|
||||
use hashbrown::HashMap;
|
||||
use std::{fmt, iter::FusedIterator};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct EnumVariant {
|
||||
pub name: Interned<str>,
|
||||
pub ty: Option<CanonicalType>,
|
||||
|
|
@ -121,7 +111,6 @@ impl EnumTypePropertiesBuilder {
|
|||
is_storable: true,
|
||||
is_castable_from_bits: true,
|
||||
bit_width: 0,
|
||||
sim_only_values_len: 0,
|
||||
},
|
||||
variant_count: 0,
|
||||
}
|
||||
|
|
@ -140,14 +129,9 @@ impl EnumTypePropertiesBuilder {
|
|||
is_storable,
|
||||
is_castable_from_bits,
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
}) = field_props
|
||||
{
|
||||
assert!(is_passive, "variant type must be a passive type");
|
||||
assert!(
|
||||
sim_only_values_len == 0,
|
||||
"can't have `SimOnlyValue`s in an Enum"
|
||||
);
|
||||
type_properties = TypeProperties {
|
||||
is_passive: true,
|
||||
is_storable: type_properties.is_storable & is_storable,
|
||||
|
|
@ -158,7 +142,6 @@ impl EnumTypePropertiesBuilder {
|
|||
} else {
|
||||
type_properties.bit_width
|
||||
},
|
||||
sim_only_values_len: 0,
|
||||
};
|
||||
}
|
||||
Self {
|
||||
|
|
@ -166,12 +149,6 @@ impl EnumTypePropertiesBuilder {
|
|||
variant_count: variant_count + 1,
|
||||
}
|
||||
}
|
||||
#[must_use]
|
||||
pub fn variants(self, variants: impl IntoIterator<Item = EnumVariant>) -> Self {
|
||||
variants.into_iter().fold(self, |this, variant| {
|
||||
this.variant(variant.ty.map(CanonicalType::type_properties))
|
||||
})
|
||||
}
|
||||
pub const fn finish(self) -> TypeProperties {
|
||||
assert!(
|
||||
self.variant_count != 0,
|
||||
|
|
@ -192,17 +169,10 @@ impl EnumTypePropertiesBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
impl Default for EnumTypePropertiesBuilder {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl Enum {
|
||||
#[track_caller]
|
||||
pub fn new(variants: Interned<[EnumVariant]>) -> Self {
|
||||
let mut name_indexes =
|
||||
HashMap::with_capacity_and_hasher(variants.len(), Default::default());
|
||||
let mut name_indexes = HashMap::with_capacity(variants.len());
|
||||
let mut type_props_builder = EnumTypePropertiesBuilder::new();
|
||||
for (index, EnumVariant { name, ty }) in variants.iter().enumerate() {
|
||||
if let Some(old_index) = name_indexes.insert(*name, index) {
|
||||
|
|
@ -262,14 +232,13 @@ impl Enum {
|
|||
|
||||
pub trait EnumType:
|
||||
Type<
|
||||
BaseType = Enum,
|
||||
MaskType = Bool,
|
||||
MatchActiveScope = Scope,
|
||||
MatchVariantAndInactiveScope = EnumMatchVariantAndInactiveScope<Self>,
|
||||
MatchVariantsIter = EnumMatchVariantsIter<Self>,
|
||||
>
|
||||
BaseType = Enum,
|
||||
MaskType = Bool,
|
||||
MatchActiveScope = Scope,
|
||||
MatchVariantAndInactiveScope = EnumMatchVariantAndInactiveScope<Self>,
|
||||
MatchVariantsIter = EnumMatchVariantsIter<Self>,
|
||||
>
|
||||
{
|
||||
type SimBuilder: From<Self>;
|
||||
fn variants(&self) -> Interned<[EnumVariant]>;
|
||||
fn match_activate_scope(
|
||||
v: Self::MatchVariantAndInactiveScope,
|
||||
|
|
@ -332,18 +301,7 @@ impl<T: EnumType> DoubleEndedIterator for EnumMatchVariantsIter<T> {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct NoBuilder {
|
||||
_ty: Enum,
|
||||
}
|
||||
|
||||
impl From<Enum> for NoBuilder {
|
||||
fn from(_ty: Enum) -> Self {
|
||||
Self { _ty }
|
||||
}
|
||||
}
|
||||
|
||||
impl EnumType for Enum {
|
||||
type SimBuilder = NoBuilder;
|
||||
fn match_activate_scope(
|
||||
v: Self::MatchVariantAndInactiveScope,
|
||||
) -> (Self::MatchVariant, Self::MatchActiveScope) {
|
||||
|
|
@ -358,7 +316,6 @@ impl EnumType for Enum {
|
|||
impl Type for Enum {
|
||||
type BaseType = Enum;
|
||||
type MaskType = Bool;
|
||||
type SimValue = OpaqueSimValue;
|
||||
type MatchVariant = Option<Expr<CanonicalType>>;
|
||||
type MatchActiveScope = Scope;
|
||||
type MatchVariantAndInactiveScope = EnumMatchVariantAndInactiveScope<Self>;
|
||||
|
|
@ -389,341 +346,6 @@ impl Type for Enum {
|
|||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert_eq!(self.type_properties().size(), opaque.size());
|
||||
opaque.to_owned()
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert_eq!(self.type_properties().size(), opaque.size());
|
||||
assert_eq!(value.size(), opaque.size());
|
||||
value.clone_from_slice(opaque);
|
||||
}
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
assert_eq!(self.type_properties().size(), writer.size());
|
||||
assert_eq!(value.size(), writer.size());
|
||||
writer.fill_cloned_from_slice(value.as_slice())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, Default)]
|
||||
pub struct EnumPaddingSimValue {
|
||||
bits: Option<UIntValue>,
|
||||
}
|
||||
|
||||
impl EnumPaddingSimValue {
|
||||
pub const fn new() -> Self {
|
||||
Self { bits: None }
|
||||
}
|
||||
pub fn bit_width(&self) -> Option<usize> {
|
||||
self.bits.as_ref().map(UIntValue::width)
|
||||
}
|
||||
pub fn bits(&self) -> &Option<UIntValue> {
|
||||
&self.bits
|
||||
}
|
||||
pub fn bits_mut(&mut self) -> &mut Option<UIntValue> {
|
||||
&mut self.bits
|
||||
}
|
||||
pub fn into_bits(self) -> Option<UIntValue> {
|
||||
self.bits
|
||||
}
|
||||
pub fn from_bits(bits: Option<UIntValue>) -> Self {
|
||||
Self { bits }
|
||||
}
|
||||
pub fn from_bitslice(v: &BitSlice) -> Self {
|
||||
Self {
|
||||
bits: Some(UIntValue::new(Arc::new(v.to_bitvec()))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct UnknownVariantSimValue {
|
||||
discriminant: usize,
|
||||
body_bits: UIntValue,
|
||||
}
|
||||
|
||||
impl UnknownVariantSimValue {
|
||||
pub fn discriminant(&self) -> usize {
|
||||
self.discriminant
|
||||
}
|
||||
pub fn body_bits(&self) -> &UIntValue {
|
||||
&self.body_bits
|
||||
}
|
||||
pub fn body_bits_mut(&mut self) -> &mut UIntValue {
|
||||
&mut self.body_bits
|
||||
}
|
||||
pub fn into_body_bits(self) -> UIntValue {
|
||||
self.body_bits
|
||||
}
|
||||
pub fn into_parts(self) -> (usize, UIntValue) {
|
||||
(self.discriminant, self.body_bits)
|
||||
}
|
||||
pub fn new(discriminant: usize, body_bits: UIntValue) -> Self {
|
||||
Self {
|
||||
discriminant,
|
||||
body_bits,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct EnumSimValueFromOpaque<'a> {
|
||||
variants: Interned<[EnumVariant]>,
|
||||
discriminant: usize,
|
||||
body_bits: &'a BitSlice,
|
||||
}
|
||||
|
||||
impl<'a> EnumSimValueFromOpaque<'a> {
|
||||
#[track_caller]
|
||||
pub fn new<T: EnumType>(ty: T, opaque: OpaqueSimValueSlice<'a>) -> Self {
|
||||
let variants = ty.variants();
|
||||
let size = EnumTypePropertiesBuilder::new()
|
||||
.variants(variants)
|
||||
.finish()
|
||||
.size();
|
||||
assert!(size.only_bit_width().is_some());
|
||||
assert_eq!(size, opaque.size());
|
||||
let (discriminant_bits, body_bits) = opaque
|
||||
.bits()
|
||||
.split_at(discriminant_bit_width_impl(variants.len()));
|
||||
let mut discriminant = 0usize;
|
||||
discriminant.view_bits_mut::<Lsb0>()[..discriminant_bits.len()]
|
||||
.copy_from_bitslice(discriminant_bits);
|
||||
Self {
|
||||
variants,
|
||||
discriminant,
|
||||
body_bits,
|
||||
}
|
||||
}
|
||||
pub fn discriminant(&self) -> usize {
|
||||
self.discriminant
|
||||
}
|
||||
#[track_caller]
|
||||
#[cold]
|
||||
fn usage_error(&self, clone: bool) -> ! {
|
||||
let clone = if clone { "clone_" } else { "" };
|
||||
match self.variants.get(self.discriminant) {
|
||||
None => {
|
||||
panic!("should have called EnumSimValueFromBits::unknown_variant_{clone}from_bits");
|
||||
}
|
||||
Some(EnumVariant { ty: None, .. }) => {
|
||||
panic!(
|
||||
"should have called EnumSimValueFromBits::variant_no_field_{clone}from_bits"
|
||||
);
|
||||
}
|
||||
Some(EnumVariant { ty: Some(_), .. }) => {
|
||||
panic!(
|
||||
"should have called EnumSimValueFromBits::variant_with_field_{clone}from_bits"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
fn known_variant(&self, clone: bool) -> (Option<CanonicalType>, &'a BitSlice, &'a BitSlice) {
|
||||
let Some(EnumVariant { ty, .. }) = self.variants.get(self.discriminant) else {
|
||||
self.usage_error(clone);
|
||||
};
|
||||
let variant_bit_width = ty.map_or(0, CanonicalType::bit_width);
|
||||
let (variant_bits, padding_bits) = self.body_bits.split_at(variant_bit_width);
|
||||
(*ty, variant_bits, padding_bits)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn unknown_variant_from_opaque(self) -> UnknownVariantSimValue {
|
||||
let None = self.variants.get(self.discriminant) else {
|
||||
self.usage_error(false);
|
||||
};
|
||||
UnknownVariantSimValue::new(
|
||||
self.discriminant,
|
||||
UIntValue::new(Arc::new(self.body_bits.to_bitvec())),
|
||||
)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn unknown_variant_clone_from_opaque(self, value: &mut UnknownVariantSimValue) {
|
||||
let None = self.variants.get(self.discriminant) else {
|
||||
self.usage_error(true);
|
||||
};
|
||||
value.discriminant = self.discriminant;
|
||||
assert_eq!(value.body_bits.width(), self.body_bits.len());
|
||||
value
|
||||
.body_bits
|
||||
.bits_mut()
|
||||
.copy_from_bitslice(self.body_bits);
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn variant_no_field_from_opaque(self) -> EnumPaddingSimValue {
|
||||
let (None, _variant_bits, padding_bits) = self.known_variant(false) else {
|
||||
self.usage_error(false);
|
||||
};
|
||||
EnumPaddingSimValue::from_bitslice(padding_bits)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn variant_with_field_from_opaque<T: Type>(self) -> (SimValue<T>, EnumPaddingSimValue) {
|
||||
let (Some(variant_ty), variant_bits, padding_bits) = self.known_variant(false) else {
|
||||
self.usage_error(false);
|
||||
};
|
||||
(
|
||||
SimValue::from_bitslice(T::from_canonical(variant_ty), variant_bits),
|
||||
EnumPaddingSimValue::from_bitslice(padding_bits),
|
||||
)
|
||||
}
|
||||
#[track_caller]
|
||||
fn clone_padding_from_bits(padding: &mut EnumPaddingSimValue, padding_bits: &BitSlice) {
|
||||
match padding.bits_mut() {
|
||||
None => *padding = EnumPaddingSimValue::from_bitslice(padding_bits),
|
||||
Some(padding) => {
|
||||
assert_eq!(padding.width(), padding_bits.len());
|
||||
padding.bits_mut().copy_from_bitslice(padding_bits);
|
||||
}
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn variant_no_field_clone_from_opaque(self, padding: &mut EnumPaddingSimValue) {
|
||||
let (None, _variant_bits, padding_bits) = self.known_variant(true) else {
|
||||
self.usage_error(true);
|
||||
};
|
||||
Self::clone_padding_from_bits(padding, padding_bits);
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn variant_with_field_clone_from_opaque<T: Type>(
|
||||
self,
|
||||
value: &mut SimValue<T>,
|
||||
padding: &mut EnumPaddingSimValue,
|
||||
) {
|
||||
let (Some(variant_ty), variant_bits, padding_bits) = self.known_variant(true) else {
|
||||
self.usage_error(true);
|
||||
};
|
||||
assert_eq!(SimValue::ty(value), T::from_canonical(variant_ty));
|
||||
SimValue::bits_mut(value)
|
||||
.bits_mut()
|
||||
.copy_from_bitslice(variant_bits);
|
||||
Self::clone_padding_from_bits(padding, padding_bits);
|
||||
}
|
||||
}
|
||||
|
||||
pub struct EnumSimValueToOpaque<'a> {
|
||||
variants: Interned<[EnumVariant]>,
|
||||
bit_width: usize,
|
||||
discriminant_bit_width: usize,
|
||||
writer: OpaqueSimValueWriter<'a>,
|
||||
}
|
||||
|
||||
impl<'a> EnumSimValueToOpaque<'a> {
|
||||
#[track_caller]
|
||||
pub fn new<T: EnumType>(ty: T, writer: OpaqueSimValueWriter<'a>) -> Self {
|
||||
let variants = ty.variants();
|
||||
let size = EnumTypePropertiesBuilder::new()
|
||||
.variants(variants)
|
||||
.finish()
|
||||
.size();
|
||||
assert_eq!(size, writer.size());
|
||||
Self {
|
||||
variants,
|
||||
bit_width: size
|
||||
.only_bit_width()
|
||||
.expect("enums should only contain bits"),
|
||||
discriminant_bit_width: discriminant_bit_width_impl(variants.len()),
|
||||
writer,
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
fn write_discriminant(&mut self, mut discriminant: usize) {
|
||||
let orig_discriminant = discriminant;
|
||||
let discriminant_bits =
|
||||
&mut discriminant.view_bits_mut::<Lsb0>()[..self.discriminant_bit_width];
|
||||
self.writer.fill_prefix_with(
|
||||
OpaqueSimValueSize::from_bit_width(self.discriminant_bit_width),
|
||||
|writer| {
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(discriminant_bits))
|
||||
},
|
||||
);
|
||||
discriminant_bits.fill(false);
|
||||
assert!(
|
||||
discriminant == 0,
|
||||
"{orig_discriminant:#x} is too big to fit in enum discriminant bits",
|
||||
);
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn unknown_variant_to_opaque(
|
||||
mut self,
|
||||
value: &UnknownVariantSimValue,
|
||||
) -> OpaqueSimValueWritten<'a> {
|
||||
self.write_discriminant(value.discriminant);
|
||||
let None = self.variants.get(value.discriminant) else {
|
||||
panic!("can't use UnknownVariantSimValue to set known discriminant");
|
||||
};
|
||||
assert_eq!(
|
||||
self.bit_width - self.discriminant_bit_width,
|
||||
value.body_bits.width()
|
||||
);
|
||||
self.writer
|
||||
.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(value.body_bits.bits()))
|
||||
}
|
||||
#[track_caller]
|
||||
fn known_variant(
|
||||
mut self,
|
||||
discriminant: usize,
|
||||
value: Option<&OpaqueSimValue>,
|
||||
padding: &EnumPaddingSimValue,
|
||||
) -> OpaqueSimValueWritten<'a> {
|
||||
self.write_discriminant(discriminant);
|
||||
let variant_ty = self.variants[discriminant].ty;
|
||||
let variant_size = variant_ty.map_or(OpaqueSimValueSize::empty(), CanonicalType::size);
|
||||
if let Some(value) = value {
|
||||
if variant_ty.is_none() {
|
||||
panic!("expected variant to have no field");
|
||||
}
|
||||
self.writer.fill_prefix_with(variant_size, |writer| {
|
||||
writer.fill_cloned_from_slice(value.as_slice())
|
||||
});
|
||||
} else if variant_ty.is_some() {
|
||||
panic!("expected variant to have a field");
|
||||
}
|
||||
if let Some(padding) = padding.bits() {
|
||||
assert_eq!(padding.ty().type_properties().size(), self.writer.size());
|
||||
self.writer
|
||||
.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(padding.bits()))
|
||||
} else {
|
||||
self.writer.fill_with_zeros()
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn variant_no_field_to_opaque(
|
||||
self,
|
||||
discriminant: usize,
|
||||
padding: &EnumPaddingSimValue,
|
||||
) -> OpaqueSimValueWritten<'a> {
|
||||
self.known_variant(discriminant, None, padding)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn variant_with_field_to_opaque<T: Type>(
|
||||
self,
|
||||
discriminant: usize,
|
||||
value: &SimValue<T>,
|
||||
padding: &EnumPaddingSimValue,
|
||||
) -> OpaqueSimValueWritten<'a> {
|
||||
let Some(variant_ty) = self.variants[discriminant].ty else {
|
||||
panic!("expected variant to have no field");
|
||||
};
|
||||
assert_eq!(SimValue::ty(value), T::from_canonical(variant_ty));
|
||||
self.known_variant(discriminant, Some(SimValue::opaque(value)), padding)
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn assert_is_enum_type<T: EnumType>(v: T) -> T {
|
||||
v
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn enum_type_to_sim_builder<T: EnumType>(v: T) -> T::SimBuilder {
|
||||
v.into()
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
|
|
@ -732,79 +354,6 @@ pub enum HdlOption<T: Type> {
|
|||
HdlSome(T),
|
||||
}
|
||||
|
||||
impl<Lhs: Type + ExprPartialEq<Rhs>, Rhs: Type> ExprPartialEq<HdlOption<Rhs>> for HdlOption<Lhs> {
|
||||
#[hdl]
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<HdlOption<Rhs>>) -> Expr<Bool> {
|
||||
#[hdl]
|
||||
let cmp_eq = wire();
|
||||
#[hdl]
|
||||
match lhs {
|
||||
HdlSome(lhs) =>
|
||||
{
|
||||
#[hdl]
|
||||
match rhs {
|
||||
HdlSome(rhs) => connect(cmp_eq, ExprPartialEq::cmp_eq(lhs, rhs)),
|
||||
HdlNone => connect(cmp_eq, false),
|
||||
}
|
||||
}
|
||||
HdlNone =>
|
||||
{
|
||||
#[hdl]
|
||||
match rhs {
|
||||
HdlSome(_) => connect(cmp_eq, false),
|
||||
HdlNone => connect(cmp_eq, true),
|
||||
}
|
||||
}
|
||||
}
|
||||
cmp_eq
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<HdlOption<Rhs>>) -> Expr<Bool> {
|
||||
#[hdl]
|
||||
let cmp_ne = wire();
|
||||
#[hdl]
|
||||
match lhs {
|
||||
HdlSome(lhs) =>
|
||||
{
|
||||
#[hdl]
|
||||
match rhs {
|
||||
HdlSome(rhs) => connect(cmp_ne, ExprPartialEq::cmp_ne(lhs, rhs)),
|
||||
HdlNone => connect(cmp_ne, true),
|
||||
}
|
||||
}
|
||||
HdlNone =>
|
||||
{
|
||||
#[hdl]
|
||||
match rhs {
|
||||
HdlSome(_) => connect(cmp_ne, true),
|
||||
HdlNone => connect(cmp_ne, false),
|
||||
}
|
||||
}
|
||||
}
|
||||
cmp_ne
|
||||
}
|
||||
}
|
||||
|
||||
impl<Lhs: SimValuePartialEq<Rhs>, Rhs: Type> SimValuePartialEq<HdlOption<Rhs>> for HdlOption<Lhs> {
|
||||
fn sim_value_eq(this: &SimValue<Self>, other: &SimValue<HdlOption<Rhs>>) -> bool {
|
||||
type SimValueMatch<T> = <T as Type>::SimValue;
|
||||
match (&**this, &**other) {
|
||||
(SimValueMatch::<Self>::HdlNone(_), SimValueMatch::<HdlOption<Rhs>>::HdlNone(_)) => {
|
||||
true
|
||||
}
|
||||
(SimValueMatch::<Self>::HdlSome(..), SimValueMatch::<HdlOption<Rhs>>::HdlNone(_))
|
||||
| (SimValueMatch::<Self>::HdlNone(_), SimValueMatch::<HdlOption<Rhs>>::HdlSome(..)) => {
|
||||
false
|
||||
}
|
||||
(
|
||||
SimValueMatch::<Self>::HdlSome(l, _),
|
||||
SimValueMatch::<HdlOption<Rhs>>::HdlSome(r, _),
|
||||
) => l == r,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub fn HdlNone<T: StaticType>() -> Expr<HdlOption<T>> {
|
||||
HdlOption[T::TYPE].HdlNone()
|
||||
|
|
@ -815,307 +364,3 @@ pub fn HdlSome<T: Type>(value: impl ToExpr<Type = T>) -> Expr<HdlOption<T>> {
|
|||
let value = value.to_expr();
|
||||
HdlOption[Expr::ty(value)].HdlSome(value)
|
||||
}
|
||||
|
||||
impl<T: Type> HdlOption<T> {
|
||||
#[track_caller]
|
||||
pub fn try_map<R: Type, E>(
|
||||
expr: Expr<Self>,
|
||||
f: impl FnOnce(Expr<T>) -> Result<Expr<R>, E>,
|
||||
) -> Result<Expr<HdlOption<R>>, E> {
|
||||
Self::try_and_then(expr, |v| Ok(HdlSome(f(v)?)))
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn map<R: Type>(
|
||||
expr: Expr<Self>,
|
||||
f: impl FnOnce(Expr<T>) -> Expr<R>,
|
||||
) -> Expr<HdlOption<R>> {
|
||||
Self::and_then(expr, |v| HdlSome(f(v)))
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn try_and_then<R: Type, E>(
|
||||
expr: Expr<Self>,
|
||||
f: impl FnOnce(Expr<T>) -> Result<Expr<HdlOption<R>>, E>,
|
||||
) -> Result<Expr<HdlOption<R>>, E> {
|
||||
// manually run match steps so we can extract the return type to construct HdlNone
|
||||
type Wrap<T> = T;
|
||||
#[hdl]
|
||||
let mut and_then_out = incomplete_wire();
|
||||
let mut iter = Self::match_variants(expr, SourceLocation::caller());
|
||||
let none = iter.next().unwrap();
|
||||
let some = iter.next().unwrap();
|
||||
assert!(iter.next().is_none());
|
||||
let (Wrap::<<Self as Type>::MatchVariant>::HdlSome(value), some_scope) =
|
||||
Self::match_activate_scope(some)
|
||||
else {
|
||||
unreachable!();
|
||||
};
|
||||
let value = f(value).inspect_err(|_| {
|
||||
and_then_out.complete(()); // avoid error
|
||||
})?;
|
||||
let and_then_out = and_then_out.complete(Expr::ty(value));
|
||||
connect(and_then_out, value);
|
||||
drop(some_scope);
|
||||
let (Wrap::<<Self as Type>::MatchVariant>::HdlNone, none_scope) =
|
||||
Self::match_activate_scope(none)
|
||||
else {
|
||||
unreachable!();
|
||||
};
|
||||
connect(and_then_out, Expr::ty(and_then_out).HdlNone());
|
||||
drop(none_scope);
|
||||
Ok(and_then_out)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn and_then<R: Type>(
|
||||
expr: Expr<Self>,
|
||||
f: impl FnOnce(Expr<T>) -> Expr<HdlOption<R>>,
|
||||
) -> Expr<HdlOption<R>> {
|
||||
match Self::try_and_then(expr, |v| Ok::<_, Infallible>(f(v))) {
|
||||
Ok(v) => v,
|
||||
Err(e) => match e {},
|
||||
}
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn and<U: Type>(expr: Expr<Self>, opt_b: Expr<HdlOption<U>>) -> Expr<HdlOption<U>> {
|
||||
#[hdl]
|
||||
let and_out = wire(Expr::ty(opt_b));
|
||||
connect(and_out, Expr::ty(opt_b).HdlNone());
|
||||
#[hdl]
|
||||
if let HdlSome(_) = expr {
|
||||
connect(and_out, opt_b);
|
||||
}
|
||||
and_out
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn try_filter<E>(
|
||||
expr: Expr<Self>,
|
||||
f: impl FnOnce(Expr<T>) -> Result<Expr<Bool>, E>,
|
||||
) -> Result<Expr<Self>, E> {
|
||||
#[hdl]
|
||||
let filtered = wire(Expr::ty(expr));
|
||||
connect(filtered, Expr::ty(expr).HdlNone());
|
||||
let mut f = Some(f);
|
||||
#[hdl]
|
||||
if let HdlSome(v) = expr {
|
||||
#[hdl]
|
||||
if f.take().unwrap()(v)? {
|
||||
connect(filtered, HdlSome(v));
|
||||
}
|
||||
}
|
||||
Ok(filtered)
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn filter(expr: Expr<Self>, f: impl FnOnce(Expr<T>) -> Expr<Bool>) -> Expr<Self> {
|
||||
match Self::try_filter(expr, |v| Ok::<_, Infallible>(f(v))) {
|
||||
Ok(v) => v,
|
||||
Err(e) => match e {},
|
||||
}
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn try_inspect<E>(
|
||||
expr: Expr<Self>,
|
||||
f: impl FnOnce(Expr<T>) -> Result<(), E>,
|
||||
) -> Result<Expr<Self>, E> {
|
||||
let mut f = Some(f);
|
||||
#[hdl]
|
||||
if let HdlSome(v) = expr {
|
||||
f.take().unwrap()(v)?;
|
||||
}
|
||||
Ok(expr)
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn inspect(expr: Expr<Self>, f: impl FnOnce(Expr<T>)) -> Expr<Self> {
|
||||
let mut f = Some(f);
|
||||
#[hdl]
|
||||
if let HdlSome(v) = expr {
|
||||
f.take().unwrap()(v);
|
||||
}
|
||||
expr
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn is_none(expr: Expr<Self>) -> Expr<Bool> {
|
||||
#[hdl]
|
||||
let is_none_out: Bool = wire();
|
||||
connect(is_none_out, false);
|
||||
#[hdl]
|
||||
if let HdlNone = expr {
|
||||
connect(is_none_out, true);
|
||||
}
|
||||
is_none_out
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn is_some(expr: Expr<Self>) -> Expr<Bool> {
|
||||
#[hdl]
|
||||
let is_some_out: Bool = wire();
|
||||
connect(is_some_out, false);
|
||||
#[hdl]
|
||||
if let HdlSome(_) = expr {
|
||||
connect(is_some_out, true);
|
||||
}
|
||||
is_some_out
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn map_or<R: Type>(
|
||||
expr: Expr<Self>,
|
||||
default: Expr<R>,
|
||||
f: impl FnOnce(Expr<T>) -> Expr<R>,
|
||||
) -> Expr<R> {
|
||||
#[hdl]
|
||||
let mapped = wire(Expr::ty(default));
|
||||
let mut f = Some(f);
|
||||
#[hdl]
|
||||
match expr {
|
||||
HdlSome(v) => connect(mapped, f.take().unwrap()(v)),
|
||||
HdlNone => connect(mapped, default),
|
||||
}
|
||||
mapped
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn map_or_else<R: Type>(
|
||||
expr: Expr<Self>,
|
||||
default: impl FnOnce() -> Expr<R>,
|
||||
f: impl FnOnce(Expr<T>) -> Expr<R>,
|
||||
) -> Expr<R> {
|
||||
#[hdl]
|
||||
let mut mapped = incomplete_wire();
|
||||
let mut default = Some(default);
|
||||
let mut f = Some(f);
|
||||
let mut retval = None;
|
||||
#[hdl]
|
||||
match expr {
|
||||
HdlSome(v) => {
|
||||
let v = f.take().unwrap()(v);
|
||||
let mapped = *retval.get_or_insert_with(|| mapped.complete(Expr::ty(v)));
|
||||
connect(mapped, v);
|
||||
}
|
||||
HdlNone => {
|
||||
let v = default.take().unwrap()();
|
||||
let mapped = *retval.get_or_insert_with(|| mapped.complete(Expr::ty(v)));
|
||||
connect(mapped, v);
|
||||
}
|
||||
}
|
||||
retval.unwrap()
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn or(expr: Expr<Self>, opt_b: Expr<Self>) -> Expr<Self> {
|
||||
#[hdl]
|
||||
let or_out = wire(Expr::ty(expr));
|
||||
connect(or_out, opt_b);
|
||||
#[hdl]
|
||||
if let HdlSome(_) = expr {
|
||||
connect(or_out, expr);
|
||||
}
|
||||
or_out
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn or_else(expr: Expr<Self>, f: impl FnOnce() -> Expr<Self>) -> Expr<Self> {
|
||||
#[hdl]
|
||||
let or_else_out = wire(Expr::ty(expr));
|
||||
connect(or_else_out, f());
|
||||
#[hdl]
|
||||
if let HdlSome(_) = expr {
|
||||
connect(or_else_out, expr);
|
||||
}
|
||||
or_else_out
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn unwrap_or(expr: Expr<Self>, default: Expr<T>) -> Expr<T> {
|
||||
#[hdl]
|
||||
let unwrap_or_else_out = wire(Expr::ty(default));
|
||||
connect(unwrap_or_else_out, default);
|
||||
#[hdl]
|
||||
if let HdlSome(v) = expr {
|
||||
connect(unwrap_or_else_out, v);
|
||||
}
|
||||
unwrap_or_else_out
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn unwrap_or_else(expr: Expr<Self>, f: impl FnOnce() -> Expr<T>) -> Expr<T> {
|
||||
#[hdl]
|
||||
let unwrap_or_else_out = wire(Expr::ty(expr).HdlSome);
|
||||
connect(unwrap_or_else_out, f());
|
||||
#[hdl]
|
||||
if let HdlSome(v) = expr {
|
||||
connect(unwrap_or_else_out, v);
|
||||
}
|
||||
unwrap_or_else_out
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn xor(expr: Expr<Self>, opt_b: Expr<Self>) -> Expr<Self> {
|
||||
#[hdl]
|
||||
let xor_out = wire(Expr::ty(expr));
|
||||
#[hdl]
|
||||
if let HdlSome(_) = expr {
|
||||
#[hdl]
|
||||
if let HdlNone = opt_b {
|
||||
connect(xor_out, expr);
|
||||
} else {
|
||||
connect(xor_out, Expr::ty(expr).HdlNone());
|
||||
}
|
||||
} else {
|
||||
connect(xor_out, opt_b);
|
||||
}
|
||||
xor_out
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn zip<U: Type>(expr: Expr<Self>, other: Expr<HdlOption<U>>) -> Expr<HdlOption<(T, U)>> {
|
||||
#[hdl]
|
||||
let zip_out = wire(HdlOption[(Expr::ty(expr).HdlSome, Expr::ty(other).HdlSome)]);
|
||||
connect(zip_out, Expr::ty(zip_out).HdlNone());
|
||||
#[hdl]
|
||||
if let HdlSome(l) = expr {
|
||||
#[hdl]
|
||||
if let HdlSome(r) = other {
|
||||
connect(zip_out, HdlSome((l, r)));
|
||||
}
|
||||
}
|
||||
zip_out
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type> HdlOption<HdlOption<T>> {
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn flatten(expr: Expr<Self>) -> Expr<HdlOption<T>> {
|
||||
#[hdl]
|
||||
let flattened = wire(Expr::ty(expr).HdlSome);
|
||||
#[hdl]
|
||||
match expr {
|
||||
HdlSome(v) => connect(flattened, v),
|
||||
HdlNone => connect(flattened, Expr::ty(expr).HdlSome.HdlNone()),
|
||||
}
|
||||
flattened
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, U: Type> HdlOption<(T, U)> {
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn unzip(expr: Expr<Self>) -> Expr<(HdlOption<T>, HdlOption<U>)> {
|
||||
let (t, u) = Expr::ty(expr).HdlSome;
|
||||
#[hdl]
|
||||
let unzipped = wire((HdlOption[t], HdlOption[u]));
|
||||
connect(unzipped, (HdlOption[t].HdlNone(), HdlOption[u].HdlNone()));
|
||||
#[hdl]
|
||||
if let HdlSome(v) = expr {
|
||||
connect(unzipped.0, HdlSome(v.0));
|
||||
connect(unzipped.1, HdlSome(v.1));
|
||||
}
|
||||
unzipped
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,16 +9,14 @@ use crate::{
|
|||
ops::ExprCastTo,
|
||||
target::{GetTarget, Target},
|
||||
},
|
||||
int::{Bool, DynSize, IntType, SIntType, SIntValue, Size, SizeType, UInt, UIntType, UIntValue},
|
||||
int::{Bool, DynSize, IntType, SIntType, SIntValue, Size, UInt, UIntType, UIntValue},
|
||||
intern::{Intern, Interned},
|
||||
memory::{DynPortType, MemPort, PortType},
|
||||
module::{
|
||||
Instance, ModuleIO,
|
||||
transform::visit::{Fold, Folder, Visit, Visitor},
|
||||
Instance, ModuleIO,
|
||||
},
|
||||
phantom_const::PhantomConst,
|
||||
reg::Reg,
|
||||
reset::{AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset},
|
||||
ty::{CanonicalType, StaticType, Type, TypeWithDeref},
|
||||
wire::Wire,
|
||||
};
|
||||
|
|
@ -110,11 +108,9 @@ expr_enum! {
|
|||
UIntLiteral(Interned<UIntValue>),
|
||||
SIntLiteral(Interned<SIntValue>),
|
||||
BoolLiteral(bool),
|
||||
PhantomConst(PhantomConst),
|
||||
BundleLiteral(ops::BundleLiteral),
|
||||
ArrayLiteral(ops::ArrayLiteral<CanonicalType, DynSize>),
|
||||
EnumLiteral(ops::EnumLiteral),
|
||||
Uninit(ops::Uninit),
|
||||
NotU(ops::NotU),
|
||||
NotS(ops::NotS),
|
||||
NotB(ops::NotB),
|
||||
|
|
@ -212,9 +208,7 @@ expr_enum! {
|
|||
ModuleIO(ModuleIO<CanonicalType>),
|
||||
Instance(Instance<Bundle>),
|
||||
Wire(Wire<CanonicalType>),
|
||||
Reg(Reg<CanonicalType, Reset>),
|
||||
RegSync(Reg<CanonicalType, SyncReset>),
|
||||
RegAsync(Reg<CanonicalType, AsyncReset>),
|
||||
Reg(Reg<CanonicalType>),
|
||||
MemPort(MemPort<DynPortType>),
|
||||
}
|
||||
}
|
||||
|
|
@ -274,20 +268,6 @@ pub struct Expr<T: Type> {
|
|||
|
||||
impl<T: Type + fmt::Debug> fmt::Debug for Expr<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
let Self {
|
||||
__enum,
|
||||
__ty,
|
||||
__flow,
|
||||
} = self;
|
||||
let expr_ty = __ty.canonical();
|
||||
let enum_ty = __enum.to_expr().__ty;
|
||||
assert_eq!(
|
||||
expr_ty, enum_ty,
|
||||
"expr ty mismatch:\nExpr {{\n__enum: {__enum:?},\n__ty: {__ty:?},\n__flow: {__flow:?}\n}}"
|
||||
);
|
||||
}
|
||||
self.__enum.fmt(f)
|
||||
}
|
||||
}
|
||||
|
|
@ -532,7 +512,11 @@ impl Flow {
|
|||
}
|
||||
}
|
||||
pub const fn flip_if(self, flipped: bool) -> Flow {
|
||||
if flipped { self.flip() } else { self }
|
||||
if flipped {
|
||||
self.flip()
|
||||
} else {
|
||||
self
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -608,42 +592,25 @@ impl<T: Type> GetTarget for Wire<T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: Type, R: ResetType> ToExpr for Reg<T, R> {
|
||||
impl<T: Type> ToExpr for Reg<T> {
|
||||
type Type = T;
|
||||
|
||||
fn to_expr(&self) -> Expr<Self::Type> {
|
||||
struct Dispatch;
|
||||
impl ResetTypeDispatch for Dispatch {
|
||||
type Input<T: ResetType> = Reg<CanonicalType, T>;
|
||||
type Output<T: ResetType> = ExprEnum;
|
||||
|
||||
fn reset(self, input: Self::Input<Reset>) -> Self::Output<Reset> {
|
||||
ExprEnum::Reg(input)
|
||||
}
|
||||
|
||||
fn sync_reset(self, input: Self::Input<SyncReset>) -> Self::Output<SyncReset> {
|
||||
ExprEnum::RegSync(input)
|
||||
}
|
||||
|
||||
fn async_reset(self, input: Self::Input<AsyncReset>) -> Self::Output<AsyncReset> {
|
||||
ExprEnum::RegAsync(input)
|
||||
}
|
||||
}
|
||||
Expr {
|
||||
__enum: R::dispatch(self.canonical(), Dispatch).intern_sized(),
|
||||
__enum: ExprEnum::Reg(self.canonical()).intern_sized(),
|
||||
__ty: self.ty(),
|
||||
__flow: self.flow(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, R: ResetType> ToLiteralBits for Reg<T, R> {
|
||||
impl<T: Type> ToLiteralBits for Reg<T> {
|
||||
fn to_literal_bits(&self) -> Result<Interned<BitSlice>, NotALiteralExpr> {
|
||||
Err(NotALiteralExpr)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, R: ResetType> GetTarget for Reg<T, R> {
|
||||
impl<T: Type> GetTarget for Reg<T> {
|
||||
fn target(&self) -> Option<Interned<Target>> {
|
||||
Some(Intern::intern_sized(self.canonical().into()))
|
||||
}
|
||||
|
|
@ -673,18 +640,6 @@ impl<T: PortType> GetTarget for MemPort<T> {
|
|||
}
|
||||
}
|
||||
|
||||
pub trait HdlPartialEq<Rhs> {
|
||||
fn cmp_eq(self, rhs: Rhs) -> Expr<Bool>;
|
||||
fn cmp_ne(self, rhs: Rhs) -> Expr<Bool>;
|
||||
}
|
||||
|
||||
pub trait HdlPartialOrd<Rhs>: HdlPartialEq<Rhs> {
|
||||
fn cmp_lt(self, rhs: Rhs) -> Expr<Bool>;
|
||||
fn cmp_le(self, rhs: Rhs) -> Expr<Bool>;
|
||||
fn cmp_gt(self, rhs: Rhs) -> Expr<Bool>;
|
||||
fn cmp_ge(self, rhs: Rhs) -> Expr<Bool>;
|
||||
}
|
||||
|
||||
pub trait ReduceBits {
|
||||
type UIntOutput;
|
||||
type BoolOutput;
|
||||
|
|
@ -710,7 +665,6 @@ impl<T: ToExpr + ?Sized> CastToBits for T {
|
|||
}
|
||||
|
||||
pub trait CastBitsTo {
|
||||
#[track_caller]
|
||||
fn cast_bits_to<T: Type>(&self, ty: T) -> Expr<T>;
|
||||
}
|
||||
|
||||
|
|
@ -743,52 +697,3 @@ pub fn check_match_expr<T: Type>(
|
|||
_check_fn: impl FnOnce(T::MatchVariant, Infallible),
|
||||
) {
|
||||
}
|
||||
|
||||
pub trait MakeUninitExpr: Type {
|
||||
fn uninit(self) -> Expr<Self>;
|
||||
}
|
||||
|
||||
impl<T: Type> MakeUninitExpr for T {
|
||||
fn uninit(self) -> Expr<Self> {
|
||||
ops::Uninit::new(self).to_expr()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn repeat<T: Type, L: SizeType>(
|
||||
element: impl ToExpr<Type = T>,
|
||||
len: L,
|
||||
) -> Expr<ArrayType<T, L::Size>> {
|
||||
let element = element.to_expr();
|
||||
let canonical_element = Expr::canonical(element);
|
||||
ops::ArrayLiteral::new(
|
||||
Expr::ty(element),
|
||||
std::iter::repeat(canonical_element)
|
||||
.take(L::Size::as_usize(len))
|
||||
.collect(),
|
||||
)
|
||||
.to_expr()
|
||||
}
|
||||
|
||||
impl<T: ?Sized + crate::phantom_const::PhantomConstValue> ToExpr for PhantomConst<T> {
|
||||
type Type = Self;
|
||||
|
||||
fn to_expr(&self) -> Expr<Self::Type> {
|
||||
Expr {
|
||||
__enum: ExprEnum::PhantomConst(self.canonical_phantom_const()).intern_sized(),
|
||||
__ty: *self,
|
||||
__flow: Flow::Source,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + crate::phantom_const::PhantomConstValue> GetTarget for PhantomConst<T> {
|
||||
fn target(&self) -> Option<Interned<Target>> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + crate::phantom_const::PhantomConstValue> ToLiteralBits for PhantomConst<T> {
|
||||
fn to_literal_bits(&self) -> Result<Interned<BitSlice>, NotALiteralExpr> {
|
||||
Ok(Interned::default())
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,23 +7,18 @@ use crate::{
|
|||
clock::{Clock, ToClock},
|
||||
enum_::{Enum, EnumType, EnumVariant},
|
||||
expr::{
|
||||
CastBitsTo as _, CastTo, CastToBits as _, Expr, ExprEnum, Flow, HdlPartialEq,
|
||||
HdlPartialOrd, NotALiteralExpr, ReduceBits, ToExpr, ToLiteralBits,
|
||||
target::{
|
||||
GetTarget, Target, TargetPathArrayElement, TargetPathBundleField,
|
||||
TargetPathDynArrayElement, TargetPathElement,
|
||||
},
|
||||
CastTo, Expr, ExprEnum, Flow, NotALiteralExpr, ReduceBits, ToExpr, ToLiteralBits,
|
||||
},
|
||||
int::{
|
||||
Bool, BoolOrIntType, DynSize, IntType, KnownSize, SInt, SIntType, SIntValue, Size, UInt,
|
||||
UIntType, UIntValue,
|
||||
Bool, BoolOrIntType, DynSize, IntCmp, IntType, KnownSize, SInt, SIntType, SIntValue, Size,
|
||||
UInt, UIntType, UIntValue,
|
||||
},
|
||||
intern::{Intern, Interned},
|
||||
phantom_const::{PhantomConst, PhantomConstValue},
|
||||
reset::{
|
||||
AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset, ToAsyncReset, ToReset,
|
||||
ToSyncReset,
|
||||
},
|
||||
reset::{AsyncReset, Reset, SyncReset, ToAsyncReset, ToReset, ToSyncReset},
|
||||
ty::{CanonicalType, StaticType, Type},
|
||||
util::ConstUsize,
|
||||
};
|
||||
|
|
@ -266,7 +261,7 @@ impl Neg {
|
|||
};
|
||||
let result_ty = retval.ty();
|
||||
retval.literal_bits = arg.to_literal_bits().map(|bits| {
|
||||
Intern::intern_owned(result_ty.bits_from_bigint_wrapping(&-SInt::bits_to_bigint(&bits)))
|
||||
Intern::intern_owned(result_ty.bits_from_bigint_wrapping(-SInt::bits_to_bigint(&bits)))
|
||||
});
|
||||
retval
|
||||
}
|
||||
|
|
@ -373,7 +368,7 @@ fn binary_op_literal_bits<ResultTy: BoolOrIntType, Lhs: BoolOrIntType, Rhs: Bool
|
|||
let rhs = Rhs::bits_to_bigint(&rhs);
|
||||
let result = f(lhs, rhs)?;
|
||||
Ok(Intern::intern_owned(
|
||||
result_ty.bits_from_bigint_wrapping(&result),
|
||||
result_ty.bits_from_bigint_wrapping(result),
|
||||
))
|
||||
}
|
||||
|
||||
|
|
@ -1241,11 +1236,10 @@ macro_rules! impl_dyn_shl {
|
|||
}
|
||||
}
|
||||
|
||||
impl<LhsWidth: Size, RhsWidth: Size> Shl<Expr<UIntType<RhsWidth>>> for Expr<$ty<LhsWidth>> {
|
||||
type Output = Expr<$ty>;
|
||||
|
||||
fn shl(self, rhs: Expr<UIntType<RhsWidth>>) -> Self::Output {
|
||||
$name::new(Expr::as_dyn_int(self), Expr::as_dyn_int(rhs)).to_expr()
|
||||
impl_binary_op_trait! {
|
||||
#[generics(LhsWidth: Size, RhsWidth: Size)]
|
||||
fn Shl::shl(lhs: $ty<LhsWidth>, rhs: UIntType<RhsWidth>) -> $ty {
|
||||
$name::new(Expr::as_dyn_int(lhs), Expr::as_dyn_int(rhs)).to_expr()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
@ -1314,11 +1308,10 @@ macro_rules! impl_dyn_shr {
|
|||
}
|
||||
}
|
||||
|
||||
impl<LhsWidth: Size, RhsWidth: Size> Shr<Expr<UIntType<RhsWidth>>> for Expr<$ty<LhsWidth>> {
|
||||
type Output = Expr<$ty<LhsWidth>>;
|
||||
|
||||
fn shr(self, rhs: Expr<UIntType<RhsWidth>>) -> Self::Output {
|
||||
$name::new(self, Expr::as_dyn_int(rhs)).to_expr()
|
||||
impl_binary_op_trait! {
|
||||
#[generics(LhsWidth: Size, RhsWidth: Size)]
|
||||
fn Shr::shr(lhs: $ty<LhsWidth>, rhs: UIntType<RhsWidth>) -> $ty<LhsWidth> {
|
||||
$name::new(lhs, Expr::as_dyn_int(rhs)).to_expr()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
@ -1348,7 +1341,7 @@ macro_rules! binary_op_fixed_shift {
|
|||
literal_bits: Err(NotALiteralExpr),
|
||||
};
|
||||
retval.literal_bits = lhs.to_literal_bits().map(|bits| {
|
||||
Intern::intern_owned(retval.ty().bits_from_bigint_wrapping(&$Trait::$method(
|
||||
Intern::intern_owned(retval.ty().bits_from_bigint_wrapping($Trait::$method(
|
||||
$ty::bits_to_bigint(&bits),
|
||||
rhs,
|
||||
)))
|
||||
|
|
@ -1427,45 +1420,36 @@ forward_value_to_expr_binary_op_trait! {
|
|||
Shr::shr
|
||||
}
|
||||
|
||||
pub trait ExprPartialEq<Rhs: Type>: Type {
|
||||
pub trait IntCmpExpr<Rhs: Type>: Type {
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<Rhs>) -> Expr<Bool>;
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<Rhs>) -> Expr<Bool>;
|
||||
}
|
||||
|
||||
pub trait ExprPartialOrd<Rhs: Type>: ExprPartialEq<Rhs> {
|
||||
fn cmp_lt(lhs: Expr<Self>, rhs: Expr<Rhs>) -> Expr<Bool>;
|
||||
fn cmp_le(lhs: Expr<Self>, rhs: Expr<Rhs>) -> Expr<Bool>;
|
||||
fn cmp_gt(lhs: Expr<Self>, rhs: Expr<Rhs>) -> Expr<Bool>;
|
||||
fn cmp_ge(lhs: Expr<Self>, rhs: Expr<Rhs>) -> Expr<Bool>;
|
||||
}
|
||||
|
||||
impl<Lhs: ToExpr, Rhs: ToExpr> HdlPartialEq<Rhs> for Lhs
|
||||
impl<Lhs: ToExpr, Rhs: ToExpr> IntCmp<Rhs> for Lhs
|
||||
where
|
||||
Lhs::Type: ExprPartialEq<Rhs::Type>,
|
||||
Lhs::Type: IntCmpExpr<Rhs::Type>,
|
||||
{
|
||||
fn cmp_eq(self, rhs: Rhs) -> Expr<Bool> {
|
||||
ExprPartialEq::cmp_eq(self.to_expr(), rhs.to_expr())
|
||||
IntCmpExpr::cmp_eq(self.to_expr(), rhs.to_expr())
|
||||
}
|
||||
fn cmp_ne(self, rhs: Rhs) -> Expr<Bool> {
|
||||
ExprPartialEq::cmp_ne(self.to_expr(), rhs.to_expr())
|
||||
IntCmpExpr::cmp_ne(self.to_expr(), rhs.to_expr())
|
||||
}
|
||||
}
|
||||
|
||||
impl<Lhs: ToExpr, Rhs: ToExpr> HdlPartialOrd<Rhs> for Lhs
|
||||
where
|
||||
Lhs::Type: ExprPartialOrd<Rhs::Type>,
|
||||
{
|
||||
fn cmp_lt(self, rhs: Rhs) -> Expr<Bool> {
|
||||
ExprPartialOrd::cmp_lt(self.to_expr(), rhs.to_expr())
|
||||
IntCmpExpr::cmp_lt(self.to_expr(), rhs.to_expr())
|
||||
}
|
||||
fn cmp_le(self, rhs: Rhs) -> Expr<Bool> {
|
||||
ExprPartialOrd::cmp_le(self.to_expr(), rhs.to_expr())
|
||||
IntCmpExpr::cmp_le(self.to_expr(), rhs.to_expr())
|
||||
}
|
||||
fn cmp_gt(self, rhs: Rhs) -> Expr<Bool> {
|
||||
ExprPartialOrd::cmp_gt(self.to_expr(), rhs.to_expr())
|
||||
IntCmpExpr::cmp_gt(self.to_expr(), rhs.to_expr())
|
||||
}
|
||||
fn cmp_ge(self, rhs: Rhs) -> Expr<Bool> {
|
||||
ExprPartialOrd::cmp_ge(self.to_expr(), rhs.to_expr())
|
||||
IntCmpExpr::cmp_ge(self.to_expr(), rhs.to_expr())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1475,7 +1459,6 @@ macro_rules! impl_compare_op {
|
|||
#[dyn_type($DynTy:ident)]
|
||||
#[to_dyn_type($lhs:ident => $dyn_lhs:expr, $rhs:ident => $dyn_rhs:expr)]
|
||||
#[type($Lhs:ty, $Rhs:ty)]
|
||||
#[trait($Trait:ident)]
|
||||
$(
|
||||
struct $name:ident;
|
||||
fn $method:ident();
|
||||
|
|
@ -1527,7 +1510,7 @@ macro_rules! impl_compare_op {
|
|||
}
|
||||
})*
|
||||
|
||||
impl$(<$LhsWidth: Size, $RhsWidth: Size>)? $Trait<$Rhs> for $Lhs {
|
||||
impl$(<$LhsWidth: Size, $RhsWidth: Size>)? IntCmpExpr<$Rhs> for $Lhs {
|
||||
$(fn $method($lhs: Expr<Self>, $rhs: Expr<$Rhs>) -> Expr<Bool> {
|
||||
$name::new($dyn_lhs, $dyn_rhs).to_expr()
|
||||
})*
|
||||
|
|
@ -1539,16 +1522,8 @@ impl_compare_op! {
|
|||
#[dyn_type(Bool)]
|
||||
#[to_dyn_type(lhs => lhs, rhs => rhs)]
|
||||
#[type(Bool, Bool)]
|
||||
#[trait(ExprPartialEq)]
|
||||
struct CmpEqB; fn cmp_eq(); PartialEq::eq();
|
||||
struct CmpNeB; fn cmp_ne(); PartialEq::ne();
|
||||
}
|
||||
|
||||
impl_compare_op! {
|
||||
#[dyn_type(Bool)]
|
||||
#[to_dyn_type(lhs => lhs, rhs => rhs)]
|
||||
#[type(Bool, Bool)]
|
||||
#[trait(ExprPartialOrd)]
|
||||
struct CmpLtB; fn cmp_lt(); PartialOrd::lt();
|
||||
struct CmpLeB; fn cmp_le(); PartialOrd::le();
|
||||
struct CmpGtB; fn cmp_gt(); PartialOrd::gt();
|
||||
|
|
@ -1560,17 +1535,8 @@ impl_compare_op! {
|
|||
#[dyn_type(UInt)]
|
||||
#[to_dyn_type(lhs => Expr::as_dyn_int(lhs), rhs => Expr::as_dyn_int(rhs))]
|
||||
#[type(UIntType<LhsWidth>, UIntType<RhsWidth>)]
|
||||
#[trait(ExprPartialEq)]
|
||||
struct CmpEqU; fn cmp_eq(); PartialEq::eq();
|
||||
struct CmpNeU; fn cmp_ne(); PartialEq::ne();
|
||||
}
|
||||
|
||||
impl_compare_op! {
|
||||
#[width(LhsWidth, RhsWidth)]
|
||||
#[dyn_type(UInt)]
|
||||
#[to_dyn_type(lhs => Expr::as_dyn_int(lhs), rhs => Expr::as_dyn_int(rhs))]
|
||||
#[type(UIntType<LhsWidth>, UIntType<RhsWidth>)]
|
||||
#[trait(ExprPartialOrd)]
|
||||
struct CmpLtU; fn cmp_lt(); PartialOrd::lt();
|
||||
struct CmpLeU; fn cmp_le(); PartialOrd::le();
|
||||
struct CmpGtU; fn cmp_gt(); PartialOrd::gt();
|
||||
|
|
@ -1582,17 +1548,8 @@ impl_compare_op! {
|
|||
#[dyn_type(SInt)]
|
||||
#[to_dyn_type(lhs => Expr::as_dyn_int(lhs), rhs => Expr::as_dyn_int(rhs))]
|
||||
#[type(SIntType<LhsWidth>, SIntType<RhsWidth>)]
|
||||
#[trait(ExprPartialEq)]
|
||||
struct CmpEqS; fn cmp_eq(); PartialEq::eq();
|
||||
struct CmpNeS; fn cmp_ne(); PartialEq::ne();
|
||||
}
|
||||
|
||||
impl_compare_op! {
|
||||
#[width(LhsWidth, RhsWidth)]
|
||||
#[dyn_type(SInt)]
|
||||
#[to_dyn_type(lhs => Expr::as_dyn_int(lhs), rhs => Expr::as_dyn_int(rhs))]
|
||||
#[type(SIntType<LhsWidth>, SIntType<RhsWidth>)]
|
||||
#[trait(ExprPartialOrd)]
|
||||
struct CmpLtS; fn cmp_lt(); PartialOrd::lt();
|
||||
struct CmpLeS; fn cmp_le(); PartialOrd::le();
|
||||
struct CmpGtS; fn cmp_gt(); PartialOrd::gt();
|
||||
|
|
@ -1625,7 +1582,7 @@ macro_rules! impl_cast_int_op {
|
|||
ty,
|
||||
literal_bits: arg.to_literal_bits().map(|bits| {
|
||||
Intern::intern_owned(
|
||||
ty.bits_from_bigint_wrapping(&$from::bits_to_bigint(&bits)),
|
||||
ty.bits_from_bigint_wrapping($from::bits_to_bigint(&bits)),
|
||||
)
|
||||
}),
|
||||
}
|
||||
|
|
@ -1777,11 +1734,11 @@ impl_cast_bit_op!(CastSIntToAsyncReset, SInt<1>, #[dyn] SInt, AsyncReset, #[trai
|
|||
impl_cast_bit_op!(CastSyncResetToBool, SyncReset, Bool);
|
||||
impl_cast_bit_op!(CastSyncResetToUInt, SyncReset, UInt<1>, #[dyn] UInt);
|
||||
impl_cast_bit_op!(CastSyncResetToSInt, SyncReset, SInt<1>, #[dyn] SInt);
|
||||
impl_cast_bit_op!(CastSyncResetToReset, SyncReset, Reset);
|
||||
impl_cast_bit_op!(CastSyncResetToReset, SyncReset, Reset, #[trait] ToReset::to_reset);
|
||||
impl_cast_bit_op!(CastAsyncResetToBool, AsyncReset, Bool);
|
||||
impl_cast_bit_op!(CastAsyncResetToUInt, AsyncReset, UInt<1>, #[dyn] UInt);
|
||||
impl_cast_bit_op!(CastAsyncResetToSInt, AsyncReset, SInt<1>, #[dyn] SInt);
|
||||
impl_cast_bit_op!(CastAsyncResetToReset, AsyncReset, Reset);
|
||||
impl_cast_bit_op!(CastAsyncResetToReset, AsyncReset, Reset, #[trait] ToReset::to_reset);
|
||||
impl_cast_bit_op!(CastResetToBool, Reset, Bool);
|
||||
impl_cast_bit_op!(CastResetToUInt, Reset, UInt<1>, #[dyn] UInt);
|
||||
impl_cast_bit_op!(CastResetToSInt, Reset, SInt<1>, #[dyn] SInt);
|
||||
|
|
@ -1792,127 +1749,6 @@ impl_cast_bit_op!(CastClockToBool, Clock, Bool);
|
|||
impl_cast_bit_op!(CastClockToUInt, Clock, UInt<1>, #[dyn] UInt);
|
||||
impl_cast_bit_op!(CastClockToSInt, Clock, SInt<1>, #[dyn] SInt);
|
||||
|
||||
impl<T: ResetType> ToReset for Expr<T> {
|
||||
fn to_reset(&self) -> Expr<Reset> {
|
||||
struct Dispatch;
|
||||
impl ResetTypeDispatch for Dispatch {
|
||||
type Input<T: ResetType> = Expr<T>;
|
||||
type Output<T: ResetType> = Expr<Reset>;
|
||||
|
||||
fn reset(self, input: Self::Input<Reset>) -> Self::Output<Reset> {
|
||||
input
|
||||
}
|
||||
|
||||
fn sync_reset(self, input: Self::Input<SyncReset>) -> Self::Output<SyncReset> {
|
||||
input.cast_to_static()
|
||||
}
|
||||
|
||||
fn async_reset(self, input: Self::Input<AsyncReset>) -> Self::Output<AsyncReset> {
|
||||
input.cast_to_static()
|
||||
}
|
||||
}
|
||||
T::dispatch(*self, Dispatch)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<AsyncReset> for AsyncReset {
|
||||
fn cast_to(src: Expr<Self>, _to_type: AsyncReset) -> Expr<AsyncReset> {
|
||||
src
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<SyncReset> for AsyncReset {
|
||||
fn cast_to(src: Expr<Self>, to_type: SyncReset) -> Expr<SyncReset> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<Clock> for AsyncReset {
|
||||
fn cast_to(src: Expr<Self>, to_type: Clock) -> Expr<Clock> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<AsyncReset> for SyncReset {
|
||||
fn cast_to(src: Expr<Self>, to_type: AsyncReset) -> Expr<AsyncReset> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<SyncReset> for SyncReset {
|
||||
fn cast_to(src: Expr<Self>, _to_type: SyncReset) -> Expr<SyncReset> {
|
||||
src
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<Clock> for SyncReset {
|
||||
fn cast_to(src: Expr<Self>, to_type: Clock) -> Expr<Clock> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<AsyncReset> for Reset {
|
||||
fn cast_to(src: Expr<Self>, to_type: AsyncReset) -> Expr<AsyncReset> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<SyncReset> for Reset {
|
||||
fn cast_to(src: Expr<Self>, to_type: SyncReset) -> Expr<SyncReset> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<Reset> for Reset {
|
||||
fn cast_to(src: Expr<Self>, _to_type: Reset) -> Expr<Reset> {
|
||||
src
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<Clock> for Reset {
|
||||
fn cast_to(src: Expr<Self>, to_type: Clock) -> Expr<Clock> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<AsyncReset> for Clock {
|
||||
fn cast_to(src: Expr<Self>, to_type: AsyncReset) -> Expr<AsyncReset> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<SyncReset> for Clock {
|
||||
fn cast_to(src: Expr<Self>, to_type: SyncReset) -> Expr<SyncReset> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<Clock> for Clock {
|
||||
fn cast_to(src: Expr<Self>, _to_type: Clock) -> Expr<Clock> {
|
||||
src
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ExprCastTo<()> for PhantomConst<T> {
|
||||
fn cast_to(src: Expr<Self>, to_type: ()) -> Expr<()> {
|
||||
src.cast_to_bits().cast_bits_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ExprCastTo<PhantomConst<T>> for () {
|
||||
fn cast_to(src: Expr<Self>, to_type: PhantomConst<T>) -> Expr<PhantomConst<T>> {
|
||||
src.cast_to_bits().cast_bits_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue, U: ?Sized + PhantomConstValue> ExprCastTo<PhantomConst<T>>
|
||||
for PhantomConst<U>
|
||||
{
|
||||
fn cast_to(src: Expr<Self>, to_type: PhantomConst<T>) -> Expr<PhantomConst<T>> {
|
||||
src.cast_to_bits().cast_bits_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct FieldAccess<FieldType: Type = CanonicalType> {
|
||||
base: Expr<Bundle>,
|
||||
|
|
@ -1937,8 +1773,7 @@ impl<FieldType: Type> FieldAccess<FieldType> {
|
|||
let field = Expr::ty(base).fields()[field_index];
|
||||
let field_type = FieldType::from_canonical(field.ty);
|
||||
let literal_bits = base.to_literal_bits().map(|bits| {
|
||||
bits[Expr::ty(base).field_offsets()[field_index].bit_width..][..field.ty.bit_width()]
|
||||
.intern()
|
||||
bits[Expr::ty(base).field_offsets()[field_index]..][..field.ty.bit_width()].intern()
|
||||
});
|
||||
let target = base.target().map(|base| {
|
||||
Intern::intern_sized(base.join(TargetPathElement::intern_sized(
|
||||
|
|
@ -2183,7 +2018,7 @@ impl<ElementType: Type, Len: Size> ExprIndex<usize> for ArrayType<ElementType, L
|
|||
|
||||
#[track_caller]
|
||||
fn expr_index(this: &Expr<Self>, index: usize) -> &Expr<Self::Output> {
|
||||
Interned::into_inner(
|
||||
Interned::<_>::into_inner(
|
||||
ArrayIndex::<ElementType>::new(Expr::as_dyn_array(*this), index)
|
||||
.to_expr()
|
||||
.intern_sized(),
|
||||
|
|
@ -2280,7 +2115,7 @@ impl<ElementType: Type, Len: Size, Width: Size> ExprIndex<Expr<UIntType<Width>>>
|
|||
type Output = ElementType;
|
||||
|
||||
fn expr_index(this: &Expr<Self>, index: Expr<UIntType<Width>>) -> &Expr<Self::Output> {
|
||||
Interned::into_inner(
|
||||
Interned::<_>::into_inner(
|
||||
DynArrayIndex::<ElementType>::new(Expr::as_dyn_array(*this), Expr::as_dyn_int(index))
|
||||
.to_expr()
|
||||
.intern_sized(),
|
||||
|
|
@ -2405,7 +2240,7 @@ macro_rules! impl_int_slice {
|
|||
let base = Expr::as_dyn_int(*this);
|
||||
let base_ty = Expr::ty(base);
|
||||
let range = base_ty.slice_index_to_range(index);
|
||||
Interned::into_inner($name::new(base, range).to_expr().intern_sized())
|
||||
Interned::<_>::into_inner($name::new(base, range).to_expr().intern_sized())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -2417,7 +2252,7 @@ macro_rules! impl_int_slice {
|
|||
let base = Expr::as_dyn_int(*this);
|
||||
let base_ty = Expr::ty(base);
|
||||
assert!(index < base_ty.width());
|
||||
Interned::into_inner(
|
||||
Interned::<_>::into_inner(
|
||||
$name::new(base, index..(index + 1))
|
||||
.to_expr()
|
||||
.cast_to_static::<Bool>()
|
||||
|
|
@ -2692,85 +2527,3 @@ impl<T: Type> ToExpr for CastBitsTo<T> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct Uninit<T: Type = CanonicalType> {
|
||||
ty: T,
|
||||
}
|
||||
|
||||
impl<T: Type> Uninit<T> {
|
||||
#[track_caller]
|
||||
pub fn new(ty: T) -> Self {
|
||||
Self { ty }
|
||||
}
|
||||
pub fn ty(self) -> T {
|
||||
self.ty
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type> ToLiteralBits for Uninit<T> {
|
||||
fn to_literal_bits(&self) -> Result<Interned<BitSlice>, NotALiteralExpr> {
|
||||
Err(NotALiteralExpr)
|
||||
}
|
||||
}
|
||||
|
||||
impl_get_target_none!([T: Type] Uninit<T>);
|
||||
|
||||
impl<T: Type> ToExpr for Uninit<T> {
|
||||
type Type = T;
|
||||
|
||||
fn to_expr(&self) -> Expr<Self::Type> {
|
||||
Expr {
|
||||
__enum: ExprEnum::Uninit(Uninit {
|
||||
ty: self.ty.canonical(),
|
||||
})
|
||||
.intern(),
|
||||
__ty: self.ty,
|
||||
__flow: Flow::Source,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ExprIntoIterator: Type {
|
||||
type Item: Type;
|
||||
type ExprIntoIter: Iterator<Item = Expr<Self::Item>>;
|
||||
|
||||
fn expr_into_iter(e: Expr<Self>) -> Self::ExprIntoIter;
|
||||
}
|
||||
|
||||
impl<T: ExprIntoIterator> IntoIterator for Expr<T> {
|
||||
type Item = Expr<T::Item>;
|
||||
type IntoIter = T::ExprIntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
T::expr_into_iter(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ExprIntoIterator> IntoIterator for &'_ Expr<T> {
|
||||
type Item = Expr<T::Item>;
|
||||
type IntoIter = T::ExprIntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
T::expr_into_iter(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ExprIntoIterator> IntoIterator for &'_ mut Expr<T> {
|
||||
type Item = Expr<T::Item>;
|
||||
type IntoIter = T::ExprIntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
T::expr_into_iter(*self)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ExprFromIterator<A>: Type {
|
||||
fn expr_from_iter<T: IntoIterator<Item = A>>(iter: T) -> Expr<Self>;
|
||||
}
|
||||
|
||||
impl<This: ExprFromIterator<A>, A> FromIterator<A> for Expr<This> {
|
||||
fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> Self {
|
||||
This::expr_from_iter(iter)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,21 +1,18 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
array::Array,
|
||||
bundle::{Bundle, BundleField},
|
||||
expr::{Expr, Flow, ToExpr},
|
||||
expr::Flow,
|
||||
intern::{Intern, Interned},
|
||||
memory::{DynPortType, MemPort},
|
||||
module::{Instance, ModuleIO, TargetName},
|
||||
reg::Reg,
|
||||
reset::{AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
wire::Wire,
|
||||
};
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct TargetPathBundleField {
|
||||
pub name: Interned<str>,
|
||||
}
|
||||
|
|
@ -26,7 +23,7 @@ impl fmt::Display for TargetPathBundleField {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct TargetPathArrayElement {
|
||||
pub index: usize,
|
||||
}
|
||||
|
|
@ -37,7 +34,7 @@ impl fmt::Display for TargetPathArrayElement {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct TargetPathDynArrayElement {}
|
||||
|
||||
impl fmt::Display for TargetPathDynArrayElement {
|
||||
|
|
@ -46,7 +43,7 @@ impl fmt::Display for TargetPathDynArrayElement {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum TargetPathElement {
|
||||
BundleField(TargetPathBundleField),
|
||||
ArrayElement(TargetPathArrayElement),
|
||||
|
|
@ -128,7 +125,6 @@ macro_rules! impl_target_base {
|
|||
$(#[$enum_meta:meta])*
|
||||
$enum_vis:vis enum $TargetBase:ident {
|
||||
$(
|
||||
$(#[from = $from:ident])?
|
||||
#[is = $is_fn:ident]
|
||||
#[to = $to_fn:ident]
|
||||
$(#[$variant_meta:meta])*
|
||||
|
|
@ -152,19 +148,19 @@ macro_rules! impl_target_base {
|
|||
}
|
||||
}
|
||||
|
||||
$($(
|
||||
$(
|
||||
impl From<$VariantTy> for $TargetBase {
|
||||
fn $from(value: $VariantTy) -> Self {
|
||||
fn from(value: $VariantTy) -> Self {
|
||||
Self::$Variant(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<$VariantTy> for Target {
|
||||
fn $from(value: $VariantTy) -> Self {
|
||||
fn from(value: $VariantTy) -> Self {
|
||||
$TargetBase::$Variant(value).into()
|
||||
}
|
||||
}
|
||||
)*)?
|
||||
)*
|
||||
|
||||
impl $TargetBase {
|
||||
$(
|
||||
|
|
@ -195,79 +191,30 @@ macro_rules! impl_target_base {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToExpr for $TargetBase {
|
||||
type Type = CanonicalType;
|
||||
|
||||
fn to_expr(&self) -> Expr<Self::Type> {
|
||||
match self {
|
||||
$(Self::$Variant(v) => Expr::canonical(v.to_expr()),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_target_base! {
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||
pub enum TargetBase {
|
||||
#[from = from]
|
||||
#[is = is_module_io]
|
||||
#[to = module_io]
|
||||
ModuleIO(ModuleIO<CanonicalType>),
|
||||
#[from = from]
|
||||
#[is = is_mem_port]
|
||||
#[to = mem_port]
|
||||
MemPort(MemPort<DynPortType>),
|
||||
#[is = is_reg]
|
||||
#[to = reg]
|
||||
Reg(Reg<CanonicalType, Reset>),
|
||||
#[is = is_reg_sync]
|
||||
#[to = reg_sync]
|
||||
RegSync(Reg<CanonicalType, SyncReset>),
|
||||
#[is = is_reg_async]
|
||||
#[to = reg_async]
|
||||
RegAsync(Reg<CanonicalType, AsyncReset>),
|
||||
#[from = from]
|
||||
Reg(Reg<CanonicalType>),
|
||||
#[is = is_wire]
|
||||
#[to = wire]
|
||||
Wire(Wire<CanonicalType>),
|
||||
#[from = from]
|
||||
#[is = is_instance]
|
||||
#[to = instance]
|
||||
Instance(Instance<Bundle>),
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: ResetType> From<Reg<CanonicalType, R>> for TargetBase {
|
||||
fn from(value: Reg<CanonicalType, R>) -> Self {
|
||||
struct Dispatch;
|
||||
impl ResetTypeDispatch for Dispatch {
|
||||
type Input<T: ResetType> = Reg<CanonicalType, T>;
|
||||
type Output<T: ResetType> = TargetBase;
|
||||
|
||||
fn reset(self, input: Self::Input<Reset>) -> Self::Output<Reset> {
|
||||
TargetBase::Reg(input)
|
||||
}
|
||||
|
||||
fn sync_reset(self, input: Self::Input<SyncReset>) -> Self::Output<SyncReset> {
|
||||
TargetBase::RegSync(input)
|
||||
}
|
||||
|
||||
fn async_reset(self, input: Self::Input<AsyncReset>) -> Self::Output<AsyncReset> {
|
||||
TargetBase::RegAsync(input)
|
||||
}
|
||||
}
|
||||
R::dispatch(value, Dispatch)
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: ResetType> From<Reg<CanonicalType, R>> for Target {
|
||||
fn from(value: Reg<CanonicalType, R>) -> Self {
|
||||
TargetBase::from(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for TargetBase {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}", self.target_name())
|
||||
|
|
@ -280,8 +227,6 @@ impl TargetBase {
|
|||
TargetBase::ModuleIO(v) => TargetName(v.scoped_name(), None),
|
||||
TargetBase::MemPort(v) => TargetName(v.mem_name(), Some(v.port_name())),
|
||||
TargetBase::Reg(v) => TargetName(v.scoped_name(), None),
|
||||
TargetBase::RegSync(v) => TargetName(v.scoped_name(), None),
|
||||
TargetBase::RegAsync(v) => TargetName(v.scoped_name(), None),
|
||||
TargetBase::Wire(v) => TargetName(v.scoped_name(), None),
|
||||
TargetBase::Instance(v) => TargetName(v.scoped_name(), None),
|
||||
}
|
||||
|
|
@ -291,8 +236,6 @@ impl TargetBase {
|
|||
TargetBase::ModuleIO(v) => v.ty(),
|
||||
TargetBase::MemPort(v) => v.ty().canonical(),
|
||||
TargetBase::Reg(v) => v.ty(),
|
||||
TargetBase::RegSync(v) => v.ty(),
|
||||
TargetBase::RegAsync(v) => v.ty(),
|
||||
TargetBase::Wire(v) => v.ty(),
|
||||
TargetBase::Instance(v) => v.ty().canonical(),
|
||||
}
|
||||
|
|
@ -368,7 +311,7 @@ impl TargetChild {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||
pub enum Target {
|
||||
Base(Interned<TargetBase>),
|
||||
Child(TargetChild),
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,247 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
int::BoolOrIntType,
|
||||
intern::{Intern, Interned, Memoize},
|
||||
prelude::*,
|
||||
};
|
||||
use std::sync::OnceLock;
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
|
||||
pub enum FormalKind {
|
||||
Assert,
|
||||
Assume,
|
||||
Cover,
|
||||
}
|
||||
|
||||
impl FormalKind {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Assert => "assert",
|
||||
Self::Assume => "assume",
|
||||
Self::Cover => "cover",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn formal_stmt_with_enable_and_loc(
|
||||
kind: FormalKind,
|
||||
clk: Expr<Clock>,
|
||||
pred: Expr<Bool>,
|
||||
en: Expr<Bool>,
|
||||
text: &str,
|
||||
source_location: SourceLocation,
|
||||
) {
|
||||
crate::module::add_stmt_formal(crate::module::StmtFormal {
|
||||
kind,
|
||||
clk,
|
||||
pred,
|
||||
en: en & !formal_reset().cast_to_static::<Bool>(),
|
||||
text: text.intern(),
|
||||
source_location,
|
||||
});
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn formal_stmt_with_enable(
|
||||
kind: FormalKind,
|
||||
clk: Expr<Clock>,
|
||||
pred: Expr<Bool>,
|
||||
en: Expr<Bool>,
|
||||
text: &str,
|
||||
) {
|
||||
formal_stmt_with_enable_and_loc(kind, clk, pred, en, text, SourceLocation::caller());
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn formal_stmt_with_loc(
|
||||
kind: FormalKind,
|
||||
clk: Expr<Clock>,
|
||||
pred: Expr<Bool>,
|
||||
text: &str,
|
||||
source_location: SourceLocation,
|
||||
) {
|
||||
formal_stmt_with_enable_and_loc(kind, clk, pred, true.to_expr(), text, source_location);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn formal_stmt(kind: FormalKind, clk: Expr<Clock>, pred: Expr<Bool>, text: &str) {
|
||||
formal_stmt_with_loc(kind, clk, pred, text, SourceLocation::caller());
|
||||
}
|
||||
|
||||
macro_rules! make_formal {
|
||||
($kind:ident, $formal_stmt_with_enable_and_loc:ident, $formal_stmt_with_enable:ident, $formal_stmt_with_loc:ident, $formal_stmt:ident) => {
|
||||
#[track_caller]
|
||||
pub fn $formal_stmt_with_enable_and_loc(
|
||||
clk: Expr<Clock>,
|
||||
pred: Expr<Bool>,
|
||||
en: Expr<Bool>,
|
||||
text: &str,
|
||||
source_location: SourceLocation,
|
||||
) {
|
||||
formal_stmt_with_enable_and_loc(
|
||||
FormalKind::$kind,
|
||||
clk,
|
||||
pred,
|
||||
en,
|
||||
text,
|
||||
source_location,
|
||||
);
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn $formal_stmt_with_enable(
|
||||
clk: Expr<Clock>,
|
||||
pred: Expr<Bool>,
|
||||
en: Expr<Bool>,
|
||||
text: &str,
|
||||
) {
|
||||
formal_stmt_with_enable(FormalKind::$kind, clk, pred, en, text);
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn $formal_stmt_with_loc(
|
||||
clk: Expr<Clock>,
|
||||
pred: Expr<Bool>,
|
||||
text: &str,
|
||||
source_location: SourceLocation,
|
||||
) {
|
||||
formal_stmt_with_loc(FormalKind::$kind, clk, pred, text, source_location);
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn $formal_stmt(clk: Expr<Clock>, pred: Expr<Bool>, text: &str) {
|
||||
formal_stmt(FormalKind::$kind, clk, pred, text);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
make_formal!(
|
||||
Assert,
|
||||
hdl_assert_with_enable_and_loc,
|
||||
hdl_assert_with_enable,
|
||||
hdl_assert_with_loc,
|
||||
hdl_assert
|
||||
);
|
||||
|
||||
make_formal!(
|
||||
Assume,
|
||||
hdl_assume_with_enable_and_loc,
|
||||
hdl_assume_with_enable,
|
||||
hdl_assume_with_loc,
|
||||
hdl_assume
|
||||
);
|
||||
|
||||
make_formal!(
|
||||
Cover,
|
||||
hdl_cover_with_enable_and_loc,
|
||||
hdl_cover_with_enable,
|
||||
hdl_cover_with_loc,
|
||||
hdl_cover
|
||||
);
|
||||
|
||||
pub trait MakeFormalExpr: Type {}
|
||||
|
||||
impl<T: Type> MakeFormalExpr for T {}
|
||||
|
||||
#[hdl]
|
||||
pub fn formal_global_clock() -> Expr<Clock> {
|
||||
#[hdl_module(extern)]
|
||||
fn formal_global_clock() {
|
||||
#[hdl]
|
||||
let clk: Clock = m.output();
|
||||
m.annotate_module(BlackBoxInlineAnnotation {
|
||||
path: "fayalite_formal_global_clock.v".intern(),
|
||||
text: r"module __fayalite_formal_global_clock(output clk);
|
||||
(* gclk *)
|
||||
reg clk;
|
||||
endmodule
|
||||
"
|
||||
.intern(),
|
||||
});
|
||||
m.verilog_name("__fayalite_formal_global_clock");
|
||||
}
|
||||
#[hdl]
|
||||
let formal_global_clock = instance(formal_global_clock());
|
||||
formal_global_clock.clk
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
pub fn formal_reset() -> Expr<SyncReset> {
|
||||
#[hdl_module(extern)]
|
||||
fn formal_reset() {
|
||||
#[hdl]
|
||||
let rst: SyncReset = m.output();
|
||||
m.annotate_module(BlackBoxInlineAnnotation {
|
||||
path: "fayalite_formal_reset.v".intern(),
|
||||
text: r"module __fayalite_formal_reset(output rst);
|
||||
assign rst = $initstate;
|
||||
endmodule
|
||||
"
|
||||
.intern(),
|
||||
});
|
||||
m.verilog_name("__fayalite_formal_reset");
|
||||
}
|
||||
static MOD: OnceLock<Interned<Module<formal_reset>>> = OnceLock::new();
|
||||
#[hdl]
|
||||
let formal_reset = instance(*MOD.get_or_init(formal_reset));
|
||||
formal_reset.rst
|
||||
}
|
||||
|
||||
macro_rules! make_any_const_fn {
|
||||
($ident:ident, $verilog_attribute:literal) => {
|
||||
#[hdl]
|
||||
pub fn $ident<T: BoolOrIntType>(ty: T) -> Expr<T> {
|
||||
#[hdl_module(extern)]
|
||||
pub(super) fn $ident<T: BoolOrIntType>(ty: T) {
|
||||
#[hdl]
|
||||
let out: T = m.output(ty);
|
||||
let width = ty.width();
|
||||
let verilog_bitslice = if width == 1 {
|
||||
String::new()
|
||||
} else {
|
||||
format!(" [{}:0]", width - 1)
|
||||
};
|
||||
m.annotate_module(BlackBoxInlineAnnotation {
|
||||
path: Intern::intern_owned(format!(
|
||||
"fayalite_{}_{width}.v",
|
||||
stringify!($ident),
|
||||
)),
|
||||
text: Intern::intern_owned(format!(
|
||||
r"module __fayalite_{}_{width}(output{verilog_bitslice} out);
|
||||
(* {} *)
|
||||
reg{verilog_bitslice} out;
|
||||
endmodule
|
||||
",
|
||||
stringify!($ident),
|
||||
$verilog_attribute,
|
||||
)),
|
||||
});
|
||||
m.verilog_name(format!("__fayalite_{}_{width}", stringify!($ident)));
|
||||
}
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
struct TheMemoize<T>(T);
|
||||
impl<T: BoolOrIntType> Memoize for TheMemoize<T> {
|
||||
type Input = ();
|
||||
type InputOwned = ();
|
||||
type Output = Option<Interned<Module<$ident<T>>>>;
|
||||
fn inner(self, _input: &Self::Input) -> Self::Output {
|
||||
if self.0.width() == 0 {
|
||||
None
|
||||
} else {
|
||||
Some($ident(self.0))
|
||||
}
|
||||
}
|
||||
}
|
||||
let Some(module) = TheMemoize(ty).get_owned(()) else {
|
||||
return 0_hdl_u0.cast_bits_to(ty);
|
||||
};
|
||||
#[hdl]
|
||||
let $ident = instance(module);
|
||||
$ident.out
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
make_any_const_fn!(any_const, "anyconst");
|
||||
make_any_const_fn!(any_seq, "anyseq");
|
||||
make_any_const_fn!(all_const, "allconst");
|
||||
make_any_const_fn!(all_seq, "allseq");
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,656 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
bundle::{Bundle, BundleField, BundleType, BundleTypePropertiesBuilder, NoBuilder},
|
||||
expr::{
|
||||
CastBitsTo, CastTo, CastToBits, Expr, HdlPartialEq, HdlPartialOrd,
|
||||
ops::{ExprCastTo, ExprPartialEq, ExprPartialOrd},
|
||||
},
|
||||
int::{Bool, DynSize, KnownSize, Size, SizeType, UInt, UIntType},
|
||||
intern::{Intern, InternSlice, Interned},
|
||||
phantom_const::PhantomConst,
|
||||
sim::value::{SimValue, SimValuePartialEq, ToSimValueWithType},
|
||||
source_location::SourceLocation,
|
||||
ty::{
|
||||
CanonicalType, OpaqueSimValueSlice, OpaqueSimValueWriter, OpaqueSimValueWritten,
|
||||
StaticType, Type, TypeProperties, impl_match_variant_as_self,
|
||||
},
|
||||
};
|
||||
use bitvec::{order::Lsb0, view::BitView};
|
||||
use serde::{
|
||||
Deserialize, Deserializer, Serialize, Serializer,
|
||||
de::{Error, Visitor, value::UsizeDeserializer},
|
||||
};
|
||||
use std::{fmt, marker::PhantomData, ops::Index};
|
||||
|
||||
const UINT_IN_RANGE_TYPE_FIELD_NAMES: [&'static str; 2] = ["value", "range"];
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)]
|
||||
pub struct UIntInRangeMaskType {
|
||||
value: Bool,
|
||||
range: PhantomConstRangeMaskType,
|
||||
}
|
||||
|
||||
impl Type for UIntInRangeMaskType {
|
||||
type BaseType = Bundle;
|
||||
type MaskType = Self;
|
||||
type SimValue = bool;
|
||||
impl_match_variant_as_self!();
|
||||
|
||||
fn mask_type(&self) -> Self::MaskType {
|
||||
*self
|
||||
}
|
||||
|
||||
fn canonical(&self) -> CanonicalType {
|
||||
CanonicalType::Bundle(Bundle::new(self.fields()))
|
||||
}
|
||||
|
||||
fn from_canonical(canonical_type: CanonicalType) -> Self {
|
||||
let fields = Bundle::from_canonical(canonical_type).fields();
|
||||
let [
|
||||
BundleField {
|
||||
name: value_name,
|
||||
flipped: false,
|
||||
ty: value,
|
||||
},
|
||||
BundleField {
|
||||
name: range_name,
|
||||
flipped: false,
|
||||
ty: range,
|
||||
},
|
||||
] = *fields
|
||||
else {
|
||||
panic!("expected UIntInRangeMaskType");
|
||||
};
|
||||
assert_eq!([&*value_name, &*range_name], UINT_IN_RANGE_TYPE_FIELD_NAMES);
|
||||
let value = Bool::from_canonical(value);
|
||||
let range = PhantomConstRangeMaskType::from_canonical(range);
|
||||
Self { value, range }
|
||||
}
|
||||
|
||||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
Bool.sim_value_from_opaque(opaque)
|
||||
}
|
||||
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
Bool.sim_value_clone_from_opaque(value, opaque);
|
||||
}
|
||||
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
Bool.sim_value_to_opaque(value, writer)
|
||||
}
|
||||
}
|
||||
|
||||
impl BundleType for UIntInRangeMaskType {
|
||||
type Builder = NoBuilder;
|
||||
type FilledBuilder = Expr<UIntInRangeMaskType>;
|
||||
|
||||
fn fields(&self) -> Interned<[BundleField]> {
|
||||
let [value_name, range_name] = UINT_IN_RANGE_TYPE_FIELD_NAMES;
|
||||
let Self { value, range } = self;
|
||||
[
|
||||
BundleField {
|
||||
name: value_name.intern(),
|
||||
flipped: false,
|
||||
ty: value.canonical(),
|
||||
},
|
||||
BundleField {
|
||||
name: range_name.intern(),
|
||||
flipped: false,
|
||||
ty: range.canonical(),
|
||||
},
|
||||
]
|
||||
.intern_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl StaticType for UIntInRangeMaskType {
|
||||
const TYPE: Self = Self {
|
||||
value: Bool,
|
||||
range: PhantomConstRangeMaskType::TYPE,
|
||||
};
|
||||
const MASK_TYPE: Self::MaskType = Self::TYPE;
|
||||
const TYPE_PROPERTIES: TypeProperties = BundleTypePropertiesBuilder::new()
|
||||
.field(false, Bool::TYPE_PROPERTIES)
|
||||
.field(false, PhantomConstRangeMaskType::TYPE_PROPERTIES)
|
||||
.finish();
|
||||
const MASK_TYPE_PROPERTIES: TypeProperties = Self::TYPE_PROPERTIES;
|
||||
}
|
||||
|
||||
impl ToSimValueWithType<UIntInRangeMaskType> for bool {
|
||||
fn to_sim_value_with_type(&self, ty: UIntInRangeMaskType) -> SimValue<UIntInRangeMaskType> {
|
||||
SimValue::from_value(ty, *self)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<Bool> for UIntInRangeMaskType {
|
||||
fn cast_to(src: Expr<Self>, to_type: Bool) -> Expr<Bool> {
|
||||
src.cast_to_bits().cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<UIntInRangeMaskType> for Bool {
|
||||
fn cast_to(src: Expr<Self>, to_type: UIntInRangeMaskType) -> Expr<UIntInRangeMaskType> {
|
||||
src.cast_to_static::<UInt<1>>().cast_bits_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprPartialEq<Self> for UIntInRangeMaskType {
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_eq(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_ne(rhs.cast_to_bits())
|
||||
}
|
||||
}
|
||||
|
||||
impl SimValuePartialEq<Self> for UIntInRangeMaskType {
|
||||
fn sim_value_eq(this: &SimValue<Self>, other: &SimValue<Self>) -> bool {
|
||||
**this == **other
|
||||
}
|
||||
}
|
||||
|
||||
type PhantomConstRangeMaskType = <PhantomConst<SerdeRange<DynSize, DynSize>> as Type>::MaskType;
|
||||
|
||||
#[derive(Default, Copy, Clone, Debug)]
|
||||
struct RangeParseError;
|
||||
|
||||
macro_rules! define_uint_in_range_type {
|
||||
(
|
||||
$UIntInRange:ident,
|
||||
$UIntInRangeType:ident,
|
||||
$UIntInRangeTypeWithoutGenerics:ident,
|
||||
$UIntInRangeTypeWithStart:ident,
|
||||
$SerdeRange:ident,
|
||||
$range_operator_str:literal,
|
||||
|$uint_range_usize_start:ident, $uint_range_usize_end:ident| $uint_range_usize:expr,
|
||||
) => {
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
struct $SerdeRange<Start: Size, End: Size> {
|
||||
start: Start::SizeType,
|
||||
end: End::SizeType,
|
||||
}
|
||||
|
||||
impl<Start: KnownSize, End: KnownSize> Default for $SerdeRange<Start, End> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
start: Start::SIZE,
|
||||
end: End::SIZE,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for $SerdeRange<DynSize, DynSize> {
|
||||
type Err = RangeParseError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let Some((start, end)) = s.split_once($range_operator_str) else {
|
||||
return Err(RangeParseError);
|
||||
};
|
||||
if start.is_empty()
|
||||
|| start.bytes().any(|b| !b.is_ascii_digit())
|
||||
|| end.is_empty()
|
||||
|| end.bytes().any(|b| !b.is_ascii_digit())
|
||||
{
|
||||
return Err(RangeParseError);
|
||||
}
|
||||
let start = start.parse().map_err(|_| RangeParseError)?;
|
||||
let end = end.parse().map_err(|_| RangeParseError)?;
|
||||
let retval = Self { start, end };
|
||||
if retval.is_empty() {
|
||||
Err(RangeParseError)
|
||||
} else {
|
||||
Ok(retval)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> fmt::Display for $SerdeRange<Start, End> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self { start, end } = *self;
|
||||
write!(
|
||||
f,
|
||||
"{}{}{}",
|
||||
Start::as_usize(start),
|
||||
$range_operator_str,
|
||||
End::as_usize(end),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> Serialize for $SerdeRange<Start, End> {
|
||||
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
||||
serializer.collect_str(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, Start: Size, End: Size> Deserialize<'de> for $SerdeRange<Start, End> {
|
||||
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
|
||||
struct SerdeRangeVisitor<Start: Size, End: Size>(PhantomData<(Start, End)>);
|
||||
impl<'de, Start: Size, End: Size> Visitor<'de> for SerdeRangeVisitor<Start, End> {
|
||||
type Value = $SerdeRange<Start, End>;
|
||||
|
||||
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("a string with format \"")?;
|
||||
if let Some(start) = Start::KNOWN_VALUE {
|
||||
write!(f, "{start}")?;
|
||||
} else {
|
||||
f.write_str("<int>")?;
|
||||
};
|
||||
f.write_str($range_operator_str)?;
|
||||
if let Some(end) = End::KNOWN_VALUE {
|
||||
write!(f, "{end}")?;
|
||||
} else {
|
||||
f.write_str("<int>")?;
|
||||
};
|
||||
f.write_str("\" that is a non-empty range")
|
||||
}
|
||||
|
||||
fn visit_str<E: Error>(self, v: &str) -> Result<Self::Value, E> {
|
||||
let $SerdeRange::<DynSize, DynSize> { start, end } =
|
||||
v.parse().map_err(|_| {
|
||||
Error::invalid_value(serde::de::Unexpected::Str(v), &self)
|
||||
})?;
|
||||
let start =
|
||||
Start::SizeType::deserialize(UsizeDeserializer::<E>::new(start))?;
|
||||
let end = End::SizeType::deserialize(UsizeDeserializer::<E>::new(end))?;
|
||||
Ok($SerdeRange { start, end })
|
||||
}
|
||||
|
||||
fn visit_bytes<E: Error>(self, v: &[u8]) -> Result<Self::Value, E> {
|
||||
match std::str::from_utf8(v) {
|
||||
Ok(v) => self.visit_str(v),
|
||||
Err(_) => {
|
||||
Err(Error::invalid_value(serde::de::Unexpected::Bytes(v), &self))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
deserializer.deserialize_str(SerdeRangeVisitor(PhantomData))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct $UIntInRangeType<Start: Size, End: Size> {
|
||||
value: UInt,
|
||||
range: PhantomConst<$SerdeRange<Start, End>>,
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> $UIntInRangeType<Start, End> {
|
||||
fn from_phantom_const_range(range: PhantomConst<$SerdeRange<Start, End>>) -> Self {
|
||||
let $SerdeRange { start, end } = *range.get();
|
||||
let $uint_range_usize_start = Start::as_usize(start);
|
||||
let $uint_range_usize_end = End::as_usize(end);
|
||||
Self {
|
||||
value: $uint_range_usize,
|
||||
range,
|
||||
}
|
||||
}
|
||||
pub fn new(start: Start::SizeType, end: End::SizeType) -> Self {
|
||||
Self::from_phantom_const_range(PhantomConst::new(
|
||||
$SerdeRange { start, end }.intern_sized(),
|
||||
))
|
||||
}
|
||||
pub fn bit_width(self) -> usize {
|
||||
self.value.width()
|
||||
}
|
||||
pub fn start(self) -> Start::SizeType {
|
||||
self.range.get().start
|
||||
}
|
||||
pub fn end(self) -> End::SizeType {
|
||||
self.range.get().end
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> fmt::Debug for $UIntInRangeType<Start, End> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self { value, range } = self;
|
||||
let $SerdeRange { start, end } = *range.get();
|
||||
f.debug_struct(&format!(
|
||||
"{}<{}, {}>",
|
||||
stringify!($UIntInRange),
|
||||
Start::as_usize(start),
|
||||
End::as_usize(end),
|
||||
))
|
||||
.field("value", value)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> Type for $UIntInRangeType<Start, End> {
|
||||
type BaseType = Bundle;
|
||||
type MaskType = UIntInRangeMaskType;
|
||||
type SimValue = usize;
|
||||
impl_match_variant_as_self!();
|
||||
|
||||
fn mask_type(&self) -> Self::MaskType {
|
||||
UIntInRangeMaskType::TYPE
|
||||
}
|
||||
|
||||
fn canonical(&self) -> CanonicalType {
|
||||
CanonicalType::Bundle(Bundle::new(self.fields()))
|
||||
}
|
||||
|
||||
fn from_canonical(canonical_type: CanonicalType) -> Self {
|
||||
let fields = Bundle::from_canonical(canonical_type).fields();
|
||||
let [
|
||||
BundleField {
|
||||
name: value_name,
|
||||
flipped: false,
|
||||
ty: value,
|
||||
},
|
||||
BundleField {
|
||||
name: range_name,
|
||||
flipped: false,
|
||||
ty: range,
|
||||
},
|
||||
] = *fields
|
||||
else {
|
||||
panic!("expected {}", stringify!($UIntInRange));
|
||||
};
|
||||
assert_eq!([&*value_name, &*range_name], UINT_IN_RANGE_TYPE_FIELD_NAMES);
|
||||
let value = UInt::from_canonical(value);
|
||||
let range = PhantomConst::<$SerdeRange<Start, End>>::from_canonical(range);
|
||||
let retval = Self::from_phantom_const_range(range);
|
||||
assert_eq!(retval, Self { value, range });
|
||||
retval
|
||||
}
|
||||
|
||||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert_eq!(opaque.size(), self.value.type_properties().size());
|
||||
let mut retval = 0usize;
|
||||
retval.view_bits_mut::<Lsb0>()[..opaque.bit_width()]
|
||||
.clone_from_bitslice(opaque.bits());
|
||||
retval
|
||||
}
|
||||
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
*value = self.sim_value_from_opaque(opaque);
|
||||
}
|
||||
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(
|
||||
&value.view_bits::<Lsb0>()[..self.value.width()],
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> BundleType for $UIntInRangeType<Start, End> {
|
||||
type Builder = NoBuilder;
|
||||
type FilledBuilder = Expr<Self>;
|
||||
|
||||
fn fields(&self) -> Interned<[BundleField]> {
|
||||
let [value_name, range_name] = UINT_IN_RANGE_TYPE_FIELD_NAMES;
|
||||
let Self { value, range } = self;
|
||||
[
|
||||
BundleField {
|
||||
name: value_name.intern(),
|
||||
flipped: false,
|
||||
ty: value.canonical(),
|
||||
},
|
||||
BundleField {
|
||||
name: range_name.intern(),
|
||||
flipped: false,
|
||||
ty: range.canonical(),
|
||||
},
|
||||
]
|
||||
.intern_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: KnownSize, End: KnownSize> Default for $UIntInRangeType<Start, End> {
|
||||
fn default() -> Self {
|
||||
Self::TYPE
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: KnownSize, End: KnownSize> StaticType for $UIntInRangeType<Start, End> {
|
||||
const TYPE: Self = {
|
||||
let $uint_range_usize_start = Start::VALUE;
|
||||
let $uint_range_usize_end = End::VALUE;
|
||||
Self {
|
||||
value: $uint_range_usize,
|
||||
range: PhantomConst::<$SerdeRange<Start, End>>::TYPE,
|
||||
}
|
||||
};
|
||||
const MASK_TYPE: Self::MaskType = UIntInRangeMaskType::TYPE;
|
||||
const TYPE_PROPERTIES: TypeProperties = BundleTypePropertiesBuilder::new()
|
||||
.field(false, Self::TYPE.value.type_properties_dyn())
|
||||
.field(
|
||||
false,
|
||||
PhantomConst::<$SerdeRange<Start, End>>::TYPE_PROPERTIES,
|
||||
)
|
||||
.finish();
|
||||
const MASK_TYPE_PROPERTIES: TypeProperties = UIntInRangeMaskType::TYPE_PROPERTIES;
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> ToSimValueWithType<$UIntInRangeType<Start, End>> for usize {
|
||||
fn to_sim_value_with_type(
|
||||
&self,
|
||||
ty: $UIntInRangeType<Start, End>,
|
||||
) -> SimValue<$UIntInRangeType<Start, End>> {
|
||||
SimValue::from_value(ty, *self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)]
|
||||
pub struct $UIntInRangeTypeWithoutGenerics;
|
||||
|
||||
#[allow(non_upper_case_globals)]
|
||||
pub const $UIntInRangeType: $UIntInRangeTypeWithoutGenerics =
|
||||
$UIntInRangeTypeWithoutGenerics;
|
||||
|
||||
impl<StartSize: SizeType> Index<StartSize> for $UIntInRangeTypeWithoutGenerics {
|
||||
type Output = $UIntInRangeTypeWithStart<StartSize::Size>;
|
||||
|
||||
fn index(&self, start: StartSize) -> &Self::Output {
|
||||
Interned::into_inner($UIntInRangeTypeWithStart(start).intern_sized())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct $UIntInRangeTypeWithStart<Start: Size>(Start::SizeType);
|
||||
|
||||
impl<Start: Size, EndSize: SizeType<Size = End>, End: Size<SizeType = EndSize>>
|
||||
Index<EndSize> for $UIntInRangeTypeWithStart<Start>
|
||||
{
|
||||
type Output = $UIntInRangeType<Start, End>;
|
||||
|
||||
fn index(&self, end: EndSize) -> &Self::Output {
|
||||
Interned::into_inner($UIntInRangeType::new(self.0, end).intern_sized())
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size, Width: Size> ExprCastTo<UIntType<Width>>
|
||||
for $UIntInRangeType<Start, End>
|
||||
{
|
||||
fn cast_to(src: Expr<Self>, to_type: UIntType<Width>) -> Expr<UIntType<Width>> {
|
||||
src.cast_to_bits().cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size, Width: Size> ExprCastTo<$UIntInRangeType<Start, End>>
|
||||
for UIntType<Width>
|
||||
{
|
||||
fn cast_to(
|
||||
src: Expr<Self>,
|
||||
to_type: $UIntInRangeType<Start, End>,
|
||||
) -> Expr<$UIntInRangeType<Start, End>> {
|
||||
src.cast_to(to_type.value).cast_bits_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl<LhsStart: Size, LhsEnd: Size, RhsStart: Size, RhsEnd: Size>
|
||||
ExprPartialEq<$UIntInRangeType<RhsStart, RhsEnd>>
|
||||
for $UIntInRangeType<LhsStart, LhsEnd>
|
||||
{
|
||||
fn cmp_eq(
|
||||
lhs: Expr<Self>,
|
||||
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_eq(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_ne(
|
||||
lhs: Expr<Self>,
|
||||
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_ne(rhs.cast_to_bits())
|
||||
}
|
||||
}
|
||||
|
||||
impl<LhsStart: Size, LhsEnd: Size, RhsStart: Size, RhsEnd: Size>
|
||||
ExprPartialOrd<$UIntInRangeType<RhsStart, RhsEnd>>
|
||||
for $UIntInRangeType<LhsStart, LhsEnd>
|
||||
{
|
||||
fn cmp_lt(
|
||||
lhs: Expr<Self>,
|
||||
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_lt(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_le(
|
||||
lhs: Expr<Self>,
|
||||
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_le(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_gt(
|
||||
lhs: Expr<Self>,
|
||||
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_gt(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_ge(
|
||||
lhs: Expr<Self>,
|
||||
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_ge(rhs.cast_to_bits())
|
||||
}
|
||||
}
|
||||
|
||||
impl<LhsStart: Size, LhsEnd: Size, RhsStart: Size, RhsEnd: Size>
|
||||
SimValuePartialEq<$UIntInRangeType<RhsStart, RhsEnd>>
|
||||
for $UIntInRangeType<LhsStart, LhsEnd>
|
||||
{
|
||||
fn sim_value_eq(
|
||||
this: &SimValue<Self>,
|
||||
other: &SimValue<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> bool {
|
||||
**this == **other
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size, Width: Size> ExprPartialEq<UIntType<Width>>
|
||||
for $UIntInRangeType<Start, End>
|
||||
{
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_eq(rhs)
|
||||
}
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_ne(rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size, Width: Size> ExprPartialEq<$UIntInRangeType<Start, End>>
|
||||
for UIntType<Width>
|
||||
{
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
|
||||
lhs.cmp_eq(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
|
||||
lhs.cmp_ne(rhs.cast_to_bits())
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size, Width: Size> ExprPartialOrd<UIntType<Width>>
|
||||
for $UIntInRangeType<Start, End>
|
||||
{
|
||||
fn cmp_lt(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_lt(rhs)
|
||||
}
|
||||
fn cmp_le(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_le(rhs)
|
||||
}
|
||||
fn cmp_gt(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_gt(rhs)
|
||||
}
|
||||
fn cmp_ge(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_ge(rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size, Width: Size> ExprPartialOrd<$UIntInRangeType<Start, End>>
|
||||
for UIntType<Width>
|
||||
{
|
||||
fn cmp_lt(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
|
||||
lhs.cmp_lt(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_le(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
|
||||
lhs.cmp_le(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_gt(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
|
||||
lhs.cmp_gt(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_ge(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
|
||||
lhs.cmp_ge(rhs.cast_to_bits())
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
define_uint_in_range_type! {
|
||||
UIntInRange,
|
||||
UIntInRangeType,
|
||||
UIntInRangeTypeWithoutGenerics,
|
||||
UIntInRangeTypeWithStart,
|
||||
SerdeRange,
|
||||
"..",
|
||||
|start, end| UInt::range_usize(start..end),
|
||||
}
|
||||
|
||||
define_uint_in_range_type! {
|
||||
UIntInRangeInclusive,
|
||||
UIntInRangeInclusiveType,
|
||||
UIntInRangeInclusiveTypeWithoutGenerics,
|
||||
UIntInRangeInclusiveTypeWithStart,
|
||||
SerdeRangeInclusive,
|
||||
"..=",
|
||||
|start, end| UInt::range_inclusive_usize(start..=end),
|
||||
}
|
||||
|
||||
impl SerdeRange<DynSize, DynSize> {
|
||||
fn is_empty(self) -> bool {
|
||||
self.start >= self.end
|
||||
}
|
||||
}
|
||||
|
||||
impl SerdeRangeInclusive<DynSize, DynSize> {
|
||||
fn is_empty(self) -> bool {
|
||||
self.start > self.end
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,8 +1,10 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use hashbrown::HashMap;
|
||||
use std::{
|
||||
any::{Any, TypeId},
|
||||
hash::{BuildHasher, Hasher},
|
||||
ptr::NonNull,
|
||||
sync::RwLock,
|
||||
};
|
||||
|
||||
|
|
@ -73,36 +75,59 @@ impl BuildHasher for TypeIdBuildHasher {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) struct TypeIdMap(
|
||||
RwLock<hashbrown::HashMap<TypeId, &'static (dyn Any + Send + Sync), TypeIdBuildHasher>>,
|
||||
);
|
||||
struct Value(NonNull<dyn Any + Send + Sync>);
|
||||
|
||||
impl Value {
|
||||
unsafe fn get_transmute_lifetime<'b>(&self) -> &'b (dyn Any + Send + Sync) {
|
||||
unsafe { &*self.0.as_ptr() }
|
||||
}
|
||||
fn new(v: Box<dyn Any + Send + Sync>) -> Self {
|
||||
unsafe { Self(NonNull::new_unchecked(Box::into_raw(v))) }
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl Send for Value {}
|
||||
unsafe impl Sync for Value {}
|
||||
|
||||
impl Drop for Value {
|
||||
fn drop(&mut self) {
|
||||
unsafe { std::ptr::drop_in_place(self.0.as_ptr()) }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TypeIdMap(RwLock<HashMap<TypeId, Value, TypeIdBuildHasher>>);
|
||||
|
||||
impl TypeIdMap {
|
||||
pub(crate) const fn new() -> Self {
|
||||
Self(RwLock::new(hashbrown::HashMap::with_hasher(
|
||||
TypeIdBuildHasher,
|
||||
)))
|
||||
pub const fn new() -> Self {
|
||||
Self(RwLock::new(HashMap::with_hasher(TypeIdBuildHasher)))
|
||||
}
|
||||
#[cold]
|
||||
fn insert_slow(
|
||||
unsafe fn insert_slow(
|
||||
&self,
|
||||
type_id: TypeId,
|
||||
make: fn() -> Box<dyn Any + Sync + Send>,
|
||||
) -> &'static (dyn Any + Sync + Send) {
|
||||
let value = Box::leak(make());
|
||||
) -> &(dyn Any + Sync + Send) {
|
||||
let value = Value::new(make());
|
||||
let mut write_guard = self.0.write().unwrap();
|
||||
*write_guard.entry(type_id).or_insert(value)
|
||||
unsafe {
|
||||
write_guard
|
||||
.entry(type_id)
|
||||
.or_insert(value)
|
||||
.get_transmute_lifetime()
|
||||
}
|
||||
}
|
||||
pub(crate) fn get_or_insert_default<T: Sized + Any + Send + Sync + Default>(&self) -> &T {
|
||||
pub fn get_or_insert_default<T: Sized + Any + Send + Sync + Default>(&self) -> &T {
|
||||
let type_id = TypeId::of::<T>();
|
||||
let read_guard = self.0.read().unwrap();
|
||||
let retval = read_guard.get(&type_id).map(|v| *v);
|
||||
let retval = read_guard
|
||||
.get(&type_id)
|
||||
.map(|v| unsafe { Value::get_transmute_lifetime(v) });
|
||||
drop(read_guard);
|
||||
let retval = match retval {
|
||||
Some(retval) => retval,
|
||||
None => self.insert_slow(type_id, move || Box::new(T::default())),
|
||||
None => unsafe { self.insert_slow(type_id, move || Box::new(T::default())) },
|
||||
};
|
||||
retval.downcast_ref().expect("known to have correct TypeId")
|
||||
unsafe { &*(retval as *const dyn Any as *const T) }
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -4,18 +4,6 @@
|
|||
// TODO: enable:
|
||||
// #![warn(missing_docs)]
|
||||
|
||||
#![deny(
|
||||
rustdoc::bare_urls,
|
||||
rustdoc::broken_intra_doc_links,
|
||||
rustdoc::invalid_codeblock_attributes,
|
||||
rustdoc::invalid_html_tags,
|
||||
rustdoc::invalid_rust_codeblocks,
|
||||
rustdoc::private_doc_tests,
|
||||
rustdoc::private_intra_doc_links,
|
||||
rustdoc::redundant_explicit_links,
|
||||
rustdoc::unescaped_backticks
|
||||
)]
|
||||
|
||||
//! [Main Documentation][_docs]
|
||||
|
||||
extern crate self as fayalite;
|
||||
|
|
@ -23,59 +11,6 @@ extern crate self as fayalite;
|
|||
#[doc(hidden)]
|
||||
pub use std as __std;
|
||||
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! __cfg_expansion_helper {
|
||||
(
|
||||
[
|
||||
$($evaluated_cfgs:ident($($evaluated_exprs:tt)*) = $evaluated_results:ident,)*
|
||||
]
|
||||
[
|
||||
$cfg:ident($($expr:tt)*),
|
||||
$($unevaluated_cfgs:ident($($unevaluated_exprs:tt)*),)*
|
||||
]
|
||||
// pass as tt so we get right span for attribute
|
||||
$after_evaluation_attr:tt $after_evaluation_body:tt
|
||||
) => {
|
||||
#[$cfg($($expr)*)]
|
||||
$crate::__cfg_expansion_helper! {
|
||||
[
|
||||
$($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)*
|
||||
$cfg($($expr)*) = true,
|
||||
]
|
||||
[
|
||||
$($unevaluated_cfgs($($unevaluated_exprs)*),)*
|
||||
]
|
||||
$after_evaluation_attr $after_evaluation_body
|
||||
}
|
||||
#[$cfg(not($($expr)*))]
|
||||
$crate::__cfg_expansion_helper! {
|
||||
[
|
||||
$($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)*
|
||||
$cfg($($expr)*) = false,
|
||||
]
|
||||
[
|
||||
$($unevaluated_cfgs($($unevaluated_exprs)*),)*
|
||||
]
|
||||
$after_evaluation_attr $after_evaluation_body
|
||||
}
|
||||
};
|
||||
(
|
||||
[
|
||||
$($evaluated_cfgs:ident($($evaluated_exprs:tt)*) = $evaluated_results:ident,)*
|
||||
]
|
||||
[]
|
||||
// don't use #[...] so we get right span for `#` and `[]` of attribute
|
||||
{$($after_evaluation_attr:tt)*} {$($after_evaluation_body:tt)*}
|
||||
) => {
|
||||
$($after_evaluation_attr)*
|
||||
#[__evaluated_cfgs([
|
||||
$($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)*
|
||||
])]
|
||||
$($after_evaluation_body)*
|
||||
};
|
||||
}
|
||||
|
||||
#[doc(inline)]
|
||||
/// The `#[hdl_module]` attribute is applied to a Rust function so that that function creates
|
||||
/// a [`Module`][`::fayalite::module::Module`] when called.
|
||||
|
|
@ -86,139 +21,8 @@ macro_rules! __cfg_expansion_helper {
|
|||
pub use fayalite_proc_macros::hdl_module;
|
||||
|
||||
#[doc(inline)]
|
||||
/// The `#[hdl]` attribute is supported on several different kinds of [Rust Items](https://doc.rust-lang.org/reference/items.html):
|
||||
///
|
||||
/// # Functions and Methods
|
||||
/// Enable's the stuff that you can use inside a [module's body](crate::_docs::modules::module_bodies),
|
||||
/// but without being a module or changing the function's signature.
|
||||
/// The only exception is that you can't use stuff that requires the automatically-provided `m` variable.
|
||||
///
|
||||
/// # Structs
|
||||
// TODO: expand on struct docs
|
||||
/// e.g.:
|
||||
/// ```
|
||||
/// # use fayalite::prelude::*;
|
||||
/// # #[hdl]
|
||||
/// # pub struct OtherStruct {}
|
||||
/// #[hdl]
|
||||
/// pub struct MyStruct {
|
||||
/// #[hdl(flip)]
|
||||
/// pub a: UInt<5>,
|
||||
/// pub b: Bool,
|
||||
/// #[hdl(flip)]
|
||||
/// pub c: OtherStruct,
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// # Enums
|
||||
// TODO: expand on enum docs
|
||||
/// e.g.:
|
||||
/// ```
|
||||
/// # use fayalite::prelude::*;
|
||||
/// # #[hdl]
|
||||
/// # pub struct MyStruct {}
|
||||
/// #[hdl]
|
||||
/// pub enum MyEnum {
|
||||
/// A(UInt<3>),
|
||||
/// B,
|
||||
/// C(MyStruct),
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// # Type Aliases
|
||||
///
|
||||
/// There's three different ways you can create a type alias:
|
||||
///
|
||||
/// # Normal Type Alias
|
||||
///
|
||||
/// This works exactly how you'd expect:
|
||||
/// ```
|
||||
/// # use fayalite::prelude::*;
|
||||
/// # #[hdl]
|
||||
/// # pub struct MyStruct<T: Type> {
|
||||
/// # v: T,
|
||||
/// # }
|
||||
/// #[hdl]
|
||||
/// pub type MyType<T: Type> = MyStruct<T>;
|
||||
///
|
||||
/// // you can then use Fayalite's standard syntax for creating dynamic types at runtime:
|
||||
///
|
||||
/// let ty = MyType[UInt[3]];
|
||||
/// assert_eq!(ty, MyStruct[UInt[3]]);
|
||||
/// ```
|
||||
///
|
||||
/// # Type Alias that gets a [`Type`] from a [`PhantomConst`]
|
||||
///
|
||||
/// This allows you to use some computed property of a [`PhantomConst`] to get a [`Type`] that you can use in other #[hdl] types.
|
||||
///
|
||||
/// ```
|
||||
/// # use fayalite::{intern::Intern, prelude::*};
|
||||
/// #[derive(Clone, PartialEq, Eq, Hash, Debug, serde::Serialize, serde::Deserialize)]
|
||||
/// pub struct Config {
|
||||
/// pub foo: usize,
|
||||
/// pub bar: Bundle,
|
||||
/// }
|
||||
///
|
||||
/// // the expression inside `get` is called with `Interned<Config>` and returns `Array<Bundle>`
|
||||
/// #[hdl(get(|config| Array[config.bar][config.foo]))]
|
||||
/// pub type GetMyArray<P: PhantomConstGet<Config>> = Array<Bundle>;
|
||||
///
|
||||
/// // you can then use it in other types:
|
||||
///
|
||||
/// #[hdl(no_static)]
|
||||
/// pub struct WrapMyArray<P: PhantomConstGet<Config>> {
|
||||
/// pub my_array: GetMyArray<P>,
|
||||
/// }
|
||||
///
|
||||
/// // you can then use Fayalite's standard syntax for creating dynamic types at runtime:
|
||||
/// let bar = Bundle::new(Default::default());
|
||||
/// let config = PhantomConst::new(Config { foo: 12, bar }.intern_sized());
|
||||
/// let ty = WrapMyArray[config];
|
||||
/// assert_eq!(ty.my_array, Array[bar][12]);
|
||||
/// ```
|
||||
///
|
||||
/// # Type Alias that gets a [`Size`] from a [`PhantomConst`]
|
||||
///
|
||||
/// This allows you to use some computed property of a [`PhantomConst`] to get a [`Size`] that you can use in other #[hdl] types.
|
||||
///
|
||||
/// ```
|
||||
/// # use fayalite::{intern::Intern, prelude::*};
|
||||
/// # #[derive(Clone, PartialEq, Eq, Hash, Debug, serde::Serialize, serde::Deserialize)]
|
||||
/// # pub struct ConfigItem {}
|
||||
/// # impl ConfigItem {
|
||||
/// # pub fn new() -> Self {
|
||||
/// # Self {}
|
||||
/// # }
|
||||
/// # }
|
||||
/// #[derive(Clone, PartialEq, Eq, Hash, Debug, serde::Serialize, serde::Deserialize)]
|
||||
/// pub struct Config {
|
||||
/// pub items: Vec<ConfigItem>,
|
||||
/// }
|
||||
///
|
||||
/// // the expression inside `get` is called with `Interned<Config>` and returns `usize` (not DynSize)
|
||||
/// #[hdl(get(|config| config.items.len()))]
|
||||
/// pub type GetItemsLen<P: PhantomConstGet<Config>> = DynSize; // must be DynSize
|
||||
///
|
||||
/// // you can then use it in other types:
|
||||
///
|
||||
/// #[hdl(no_static)]
|
||||
/// pub struct FlagPerItem<P: PhantomConstGet<Config>> {
|
||||
/// pub flags: ArrayType<Bool, GetItemsLen<P>>,
|
||||
/// }
|
||||
///
|
||||
/// // you can then use Fayalite's standard syntax for creating dynamic types at runtime:
|
||||
/// let config = PhantomConst::new(Config { items: vec![ConfigItem::new(); 5] }.intern_sized());
|
||||
/// let ty = FlagPerItem[config];
|
||||
/// assert_eq!(ty.flags, Array[Bool][5]);
|
||||
/// ```
|
||||
///
|
||||
/// [`PhantomConst`]: crate::phantom_const::PhantomConst
|
||||
/// [`Size`]: crate::int::Size
|
||||
/// [`Type`]: crate::ty::Type
|
||||
pub use fayalite_proc_macros::hdl;
|
||||
|
||||
pub use bitvec;
|
||||
|
||||
/// struct used as a placeholder when applying defaults
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||
pub struct __;
|
||||
|
|
@ -226,28 +30,24 @@ pub struct __;
|
|||
#[cfg(feature = "unstable-doc")]
|
||||
pub mod _docs;
|
||||
|
||||
// FIXME: finish
|
||||
pub mod annotations;
|
||||
pub mod array;
|
||||
pub mod build;
|
||||
pub mod bundle;
|
||||
pub mod cli;
|
||||
pub mod clock;
|
||||
pub mod enum_;
|
||||
pub mod expr;
|
||||
pub mod firrtl;
|
||||
pub mod formal;
|
||||
pub mod int;
|
||||
pub mod intern;
|
||||
pub mod memory;
|
||||
pub mod module;
|
||||
pub mod phantom_const;
|
||||
pub mod platform;
|
||||
pub mod prelude;
|
||||
pub mod reg;
|
||||
pub mod reset;
|
||||
pub mod sim;
|
||||
pub mod source_location;
|
||||
pub mod testing;
|
||||
pub mod ty;
|
||||
pub mod util;
|
||||
pub mod vendor;
|
||||
//pub mod valueless;
|
||||
pub mod prelude;
|
||||
pub mod wire;
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ use crate::{
|
|||
array::{Array, ArrayType},
|
||||
bundle::{Bundle, BundleType},
|
||||
clock::Clock,
|
||||
expr::{Expr, Flow, ToExpr, ToLiteralBits, ops::BundleLiteral, repeat},
|
||||
expr::{Expr, Flow, ToExpr, ToLiteralBits},
|
||||
hdl,
|
||||
int::{Bool, DynSize, Size, UInt, UIntType},
|
||||
intern::{Intern, Interned},
|
||||
|
|
@ -22,7 +22,7 @@ use std::{
|
|||
fmt,
|
||||
hash::{Hash, Hasher},
|
||||
marker::PhantomData,
|
||||
num::NonZeroUsize,
|
||||
num::NonZeroU32,
|
||||
rc::Rc,
|
||||
};
|
||||
|
||||
|
|
@ -470,7 +470,7 @@ pub enum ReadUnderWrite {
|
|||
Undefined,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
struct MemImpl<Element: Type, Len: Size, P> {
|
||||
scoped_name: ScopedNameId,
|
||||
source_location: SourceLocation,
|
||||
|
|
@ -478,7 +478,7 @@ struct MemImpl<Element: Type, Len: Size, P> {
|
|||
initial_value: Option<Interned<BitSlice>>,
|
||||
ports: P,
|
||||
read_latency: usize,
|
||||
write_latency: NonZeroUsize,
|
||||
write_latency: NonZeroU32,
|
||||
read_under_write: ReadUnderWrite,
|
||||
port_annotations: Interned<[TargetedAnnotation]>,
|
||||
mem_annotations: Interned<[Annotation]>,
|
||||
|
|
@ -519,12 +519,7 @@ impl<Element: Type, Len: Size> fmt::Debug for Mem<Element, Len> {
|
|||
f.debug_struct("Mem")
|
||||
.field("name", scoped_name)
|
||||
.field("array_type", array_type)
|
||||
.field(
|
||||
"initial_value",
|
||||
&initial_value.as_ref().map(|initial_value| {
|
||||
DebugMemoryData::from_bit_slice(*array_type, initial_value)
|
||||
}),
|
||||
)
|
||||
.field("initial_value", initial_value)
|
||||
.field("read_latency", read_latency)
|
||||
.field("write_latency", write_latency)
|
||||
.field("read_under_write", read_under_write)
|
||||
|
|
@ -567,7 +562,7 @@ impl<Element: Type, Len: Size> Mem<Element, Len> {
|
|||
initial_value: Option<Interned<BitSlice>>,
|
||||
ports: Interned<[MemPort<DynPortType>]>,
|
||||
read_latency: usize,
|
||||
write_latency: NonZeroUsize,
|
||||
write_latency: NonZeroU32,
|
||||
read_under_write: ReadUnderWrite,
|
||||
port_annotations: Interned<[TargetedAnnotation]>,
|
||||
mem_annotations: Interned<[Annotation]>,
|
||||
|
|
@ -639,7 +634,7 @@ impl<Element: Type, Len: Size> Mem<Element, Len> {
|
|||
self.0.source_location
|
||||
}
|
||||
pub fn array_type(self) -> ArrayType<Element, Len> {
|
||||
self.0.array_type
|
||||
self.0.array_type.clone()
|
||||
}
|
||||
pub fn initial_value(self) -> Option<Interned<BitSlice>> {
|
||||
self.0.initial_value
|
||||
|
|
@ -650,7 +645,7 @@ impl<Element: Type, Len: Size> Mem<Element, Len> {
|
|||
pub fn read_latency(self) -> usize {
|
||||
self.0.read_latency
|
||||
}
|
||||
pub fn write_latency(self) -> NonZeroUsize {
|
||||
pub fn write_latency(self) -> NonZeroU32 {
|
||||
self.0.write_latency
|
||||
}
|
||||
pub fn read_under_write(self) -> ReadUnderWrite {
|
||||
|
|
@ -712,7 +707,7 @@ pub(crate) struct MemBuilderTarget {
|
|||
pub(crate) initial_value: Option<Interned<BitSlice>>,
|
||||
pub(crate) ports: Vec<MemPort<DynPortType>>,
|
||||
pub(crate) read_latency: usize,
|
||||
pub(crate) write_latency: NonZeroUsize,
|
||||
pub(crate) write_latency: NonZeroU32,
|
||||
pub(crate) read_under_write: ReadUnderWrite,
|
||||
pub(crate) port_annotations: Vec<TargetedAnnotation>,
|
||||
pub(crate) mem_annotations: Vec<Annotation>,
|
||||
|
|
@ -872,7 +867,7 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
|
|||
initial_value: None,
|
||||
ports: vec![],
|
||||
read_latency: 0,
|
||||
write_latency: NonZeroUsize::new(1).unwrap(),
|
||||
write_latency: NonZeroU32::new(1).unwrap(),
|
||||
read_under_write: ReadUnderWrite::Old,
|
||||
port_annotations: vec![],
|
||||
mem_annotations: vec![],
|
||||
|
|
@ -992,7 +987,7 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
|
|||
#[allow(clippy::result_unit_err)]
|
||||
pub fn get_array_type(&self) -> Result<ArrayType<Element, Len>, ()> {
|
||||
Ok(ArrayType::new(
|
||||
self.mem_element_type,
|
||||
self.mem_element_type.clone(),
|
||||
Len::from_usize(self.get_depth()?),
|
||||
))
|
||||
}
|
||||
|
|
@ -1035,10 +1030,10 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
|
|||
pub fn read_latency(&mut self, read_latency: usize) {
|
||||
self.target.borrow_mut().read_latency = read_latency;
|
||||
}
|
||||
pub fn get_write_latency(&self) -> NonZeroUsize {
|
||||
pub fn get_write_latency(&self) -> NonZeroU32 {
|
||||
self.target.borrow().write_latency
|
||||
}
|
||||
pub fn write_latency(&mut self, write_latency: NonZeroUsize) {
|
||||
pub fn write_latency(&mut self, write_latency: NonZeroU32) {
|
||||
self.target.borrow_mut().write_latency = write_latency;
|
||||
}
|
||||
pub fn get_read_under_write(&self) -> ReadUnderWrite {
|
||||
|
|
@ -1055,92 +1050,3 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
|
|||
.extend(annotations.into_annotations());
|
||||
}
|
||||
}
|
||||
|
||||
pub fn splat_mask<T: Type>(ty: T, value: Expr<Bool>) -> Expr<AsMask<T>> {
|
||||
let canonical_ty = ty.canonical();
|
||||
match canonical_ty {
|
||||
CanonicalType::UInt(_)
|
||||
| CanonicalType::SInt(_)
|
||||
| CanonicalType::Bool(_)
|
||||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_)
|
||||
| CanonicalType::Clock(_)
|
||||
| CanonicalType::Enum(_)
|
||||
| CanonicalType::DynSimOnly(_) => Expr::from_canonical(Expr::canonical(value)),
|
||||
CanonicalType::Array(array) => Expr::from_canonical(Expr::canonical(repeat(
|
||||
splat_mask(array.element(), value),
|
||||
array.len(),
|
||||
))),
|
||||
CanonicalType::Bundle(bundle) => Expr::from_canonical(Expr::canonical(
|
||||
BundleLiteral::new(
|
||||
bundle.mask_type(),
|
||||
bundle
|
||||
.fields()
|
||||
.iter()
|
||||
.map(|field| splat_mask(field.ty, value))
|
||||
.collect(),
|
||||
)
|
||||
.to_expr(),
|
||||
)),
|
||||
CanonicalType::PhantomConst(_) => Expr::from_canonical(Expr::canonical(().to_expr())),
|
||||
}
|
||||
}
|
||||
|
||||
pub trait DebugMemoryDataGetElement {
|
||||
fn get_element(&self, element_index: usize, array_type: Array) -> &BitSlice;
|
||||
}
|
||||
|
||||
impl<'a, F: ?Sized + Fn(usize, Array) -> &'a BitSlice> DebugMemoryDataGetElement for &'a F {
|
||||
fn get_element(&self, element_index: usize, array_type: Array) -> &BitSlice {
|
||||
self(element_index, array_type)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DebugMemoryData<GetElement: DebugMemoryDataGetElement> {
|
||||
pub array_type: Array,
|
||||
pub get_element: GetElement,
|
||||
}
|
||||
|
||||
impl DebugMemoryDataGetElement for &'_ BitSlice {
|
||||
fn get_element(&self, element_index: usize, array_type: Array) -> &BitSlice {
|
||||
assert!(element_index < array_type.len());
|
||||
let stride = array_type.element().bit_width();
|
||||
let start = element_index
|
||||
.checked_mul(stride)
|
||||
.expect("memory is too big");
|
||||
let end = start.checked_add(stride).expect("memory is too big");
|
||||
&self[start..end]
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> DebugMemoryData<&'a BitSlice> {
|
||||
pub fn from_bit_slice<T: Type, Depth: Size>(
|
||||
array_type: ArrayType<T, Depth>,
|
||||
bit_slice: &'a BitSlice,
|
||||
) -> Self {
|
||||
let array_type = array_type.as_dyn_array();
|
||||
assert_eq!(bit_slice.len(), array_type.type_properties().bit_width);
|
||||
Self {
|
||||
array_type,
|
||||
get_element: bit_slice,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<GetElement: DebugMemoryDataGetElement> fmt::Debug for DebugMemoryData<GetElement> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if self.array_type.len() == 0 {
|
||||
return f.write_str("[]");
|
||||
}
|
||||
writeln!(f, "[\n // len = {:#x}", self.array_type.len())?;
|
||||
for element_index in 0..self.array_type.len() {
|
||||
let element = crate::util::BitSliceWriteWithBase(
|
||||
self.get_element.get_element(element_index, self.array_type),
|
||||
);
|
||||
writeln!(f, " [{element_index:#x}]: {element:#x},")?;
|
||||
}
|
||||
f.write_str("]")
|
||||
}
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,6 +1,5 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
pub mod deduce_resets;
|
||||
pub mod simplify_enums;
|
||||
pub mod simplify_memories;
|
||||
pub mod visit;
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -2,27 +2,26 @@
|
|||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
array::{Array, ArrayType},
|
||||
bundle::{Bundle, BundleField, BundleType},
|
||||
bundle::{Bundle, BundleType},
|
||||
enum_::{Enum, EnumType, EnumVariant},
|
||||
expr::{
|
||||
CastBitsTo, CastTo, CastToBits, Expr, ExprEnum, HdlPartialEq, ToExpr,
|
||||
ops::{self, EnumLiteral},
|
||||
CastBitsTo, CastToBits, Expr, ExprEnum, ToExpr,
|
||||
},
|
||||
hdl,
|
||||
int::UInt,
|
||||
intern::{Intern, InternSlice, Interned, Memoize},
|
||||
int::{DynSize, IntCmp, Size, UInt, UIntType},
|
||||
intern::{Intern, Interned},
|
||||
memory::{DynPortType, Mem, MemPort},
|
||||
module::{
|
||||
Block, Id, Module, NameId, ScopedNameId, Stmt, StmtConnect, StmtIf, StmtMatch, StmtWire,
|
||||
transform::visit::{Fold, Folder},
|
||||
Block, Module, NameIdGen, ScopedNameId, Stmt, StmtConnect, StmtIf, StmtMatch, StmtWire,
|
||||
},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
util::HashMap,
|
||||
wire::Wire,
|
||||
};
|
||||
use core::fmt;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use hashbrown::HashMap;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum SimplifyEnumsError {
|
||||
|
|
@ -42,72 +41,25 @@ impl fmt::Display for SimplifyEnumsError {
|
|||
|
||||
impl std::error::Error for SimplifyEnumsError {}
|
||||
|
||||
impl From<SimplifyEnumsError> for std::io::Error {
|
||||
fn from(value: SimplifyEnumsError) -> Self {
|
||||
std::io::Error::new(std::io::ErrorKind::Other, value)
|
||||
}
|
||||
}
|
||||
|
||||
fn contains_any_enum_types(ty: CanonicalType) -> bool {
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
struct TheMemoize;
|
||||
impl Memoize for TheMemoize {
|
||||
type Input = CanonicalType;
|
||||
type InputOwned = CanonicalType;
|
||||
type Output = bool;
|
||||
|
||||
fn inner(self, ty: &Self::Input) -> Self::Output {
|
||||
match *ty {
|
||||
CanonicalType::Array(array_type) => contains_any_enum_types(array_type.element()),
|
||||
CanonicalType::Enum(_) => true,
|
||||
CanonicalType::Bundle(bundle) => bundle
|
||||
.fields()
|
||||
.iter()
|
||||
.any(|field| contains_any_enum_types(field.ty)),
|
||||
CanonicalType::UInt(_)
|
||||
| CanonicalType::SInt(_)
|
||||
| CanonicalType::Bool(_)
|
||||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_)
|
||||
| CanonicalType::Clock(_)
|
||||
| CanonicalType::PhantomConst(_)
|
||||
| CanonicalType::DynSimOnly(_) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
TheMemoize.get_owned(ty)
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
struct TagAndBody<Tag, Body> {
|
||||
tag: Tag,
|
||||
body: Body,
|
||||
struct TagAndBody<T, BodyWidth: Size> {
|
||||
tag: T,
|
||||
body: UIntType<BodyWidth>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
enum EnumTypeState {
|
||||
TagEnumAndBody(TagAndBody<Enum, UInt>),
|
||||
TagUIntAndBody(TagAndBody<UInt, UInt>),
|
||||
TagEnumAndBody(TagAndBody<Enum, DynSize>),
|
||||
TagUIntAndBody(TagAndBody<UInt, DynSize>),
|
||||
UInt(UInt),
|
||||
Unchanged,
|
||||
}
|
||||
|
||||
struct ModuleState {
|
||||
module_name: NameId,
|
||||
}
|
||||
|
||||
impl ModuleState {
|
||||
fn gen_name(&mut self, name: &str) -> ScopedNameId {
|
||||
ScopedNameId(self.module_name, NameId(name.intern(), Id::new()))
|
||||
}
|
||||
}
|
||||
|
||||
struct State {
|
||||
enum_types: HashMap<Enum, EnumTypeState>,
|
||||
replacement_mem_ports: HashMap<MemPort<DynPortType>, Wire<CanonicalType>>,
|
||||
kind: SimplifyEnumsKind,
|
||||
module_state_stack: Vec<ModuleState>,
|
||||
name_id_gen: NameIdGen,
|
||||
}
|
||||
|
||||
impl State {
|
||||
|
|
@ -155,371 +107,6 @@ impl State {
|
|||
self.enum_types.insert(enum_type, retval.clone());
|
||||
Ok(retval)
|
||||
}
|
||||
#[hdl]
|
||||
fn handle_enum_literal(
|
||||
&mut self,
|
||||
unfolded_enum_type: Enum,
|
||||
variant_index: usize,
|
||||
folded_variant_value: Option<Expr<CanonicalType>>,
|
||||
) -> Result<Expr<CanonicalType>, SimplifyEnumsError> {
|
||||
Ok(
|
||||
match self.get_or_make_enum_type_state(unfolded_enum_type)? {
|
||||
EnumTypeState::TagEnumAndBody(TagAndBody { tag, body }) => Expr::canonical(
|
||||
#[hdl]
|
||||
TagAndBody {
|
||||
tag: EnumLiteral::new_by_index(tag, variant_index, None),
|
||||
body: match folded_variant_value {
|
||||
Some(variant_value) => variant_value.cast_to_bits().cast_to(body),
|
||||
None => body.zero().to_expr(),
|
||||
},
|
||||
},
|
||||
),
|
||||
EnumTypeState::TagUIntAndBody(TagAndBody { tag, body }) => Expr::canonical(
|
||||
#[hdl]
|
||||
TagAndBody {
|
||||
tag: tag.from_int_wrapping(variant_index),
|
||||
body: match folded_variant_value {
|
||||
Some(folded_variant_value) => {
|
||||
folded_variant_value.cast_to_bits().cast_to(body)
|
||||
}
|
||||
None => body.zero().to_expr(),
|
||||
},
|
||||
},
|
||||
),
|
||||
EnumTypeState::UInt(_) => {
|
||||
let tag = UInt[unfolded_enum_type.discriminant_bit_width()];
|
||||
let body = UInt[unfolded_enum_type.type_properties().bit_width - tag.width()];
|
||||
Expr::canonical(
|
||||
(#[hdl]
|
||||
TagAndBody {
|
||||
tag: tag.from_int_wrapping(variant_index),
|
||||
body: match folded_variant_value {
|
||||
Some(folded_variant_value) => {
|
||||
folded_variant_value.cast_to_bits().cast_to(body)
|
||||
}
|
||||
None => body.zero().to_expr(),
|
||||
},
|
||||
})
|
||||
.cast_to_bits(),
|
||||
)
|
||||
}
|
||||
EnumTypeState::Unchanged => Expr::canonical(
|
||||
ops::EnumLiteral::new_by_index(
|
||||
unfolded_enum_type,
|
||||
variant_index,
|
||||
folded_variant_value,
|
||||
)
|
||||
.to_expr(),
|
||||
),
|
||||
},
|
||||
)
|
||||
}
|
||||
fn handle_variant_access(
|
||||
&mut self,
|
||||
unfolded_enum_type: Enum,
|
||||
folded_base_expr: Expr<CanonicalType>,
|
||||
variant_index: usize,
|
||||
) -> Result<Expr<CanonicalType>, SimplifyEnumsError> {
|
||||
let unfolded_variant_type = unfolded_enum_type.variants()[variant_index].ty;
|
||||
Ok(
|
||||
match self.get_or_make_enum_type_state(unfolded_enum_type)? {
|
||||
EnumTypeState::TagEnumAndBody(_) | EnumTypeState::TagUIntAndBody(_) => {
|
||||
match unfolded_variant_type {
|
||||
Some(variant_type) => Expr::canonical(
|
||||
Expr::<TagAndBody<CanonicalType, UInt>>::from_canonical(
|
||||
folded_base_expr,
|
||||
)
|
||||
.body[..variant_type.bit_width()]
|
||||
.cast_bits_to(variant_type.fold(self)?),
|
||||
),
|
||||
None => Expr::canonical(().to_expr()),
|
||||
}
|
||||
}
|
||||
EnumTypeState::UInt(_) => match unfolded_variant_type {
|
||||
Some(variant_type) => {
|
||||
let base_int = Expr::<UInt>::from_canonical(folded_base_expr);
|
||||
let variant_type_bit_width = variant_type.bit_width();
|
||||
Expr::canonical(
|
||||
base_int[unfolded_enum_type.discriminant_bit_width()..]
|
||||
[..variant_type_bit_width]
|
||||
.cast_bits_to(variant_type.fold(self)?),
|
||||
)
|
||||
}
|
||||
None => Expr::canonical(().to_expr()),
|
||||
},
|
||||
EnumTypeState::Unchanged => match unfolded_variant_type {
|
||||
Some(_) => ops::VariantAccess::new_by_index(
|
||||
Expr::from_canonical(folded_base_expr),
|
||||
variant_index,
|
||||
)
|
||||
.to_expr(),
|
||||
None => Expr::canonical(().to_expr()),
|
||||
},
|
||||
},
|
||||
)
|
||||
}
|
||||
fn handle_match(
|
||||
&mut self,
|
||||
unfolded_enum_type: Enum,
|
||||
folded_expr: Expr<CanonicalType>,
|
||||
source_location: SourceLocation,
|
||||
folded_blocks: &[Block],
|
||||
) -> Result<Stmt, SimplifyEnumsError> {
|
||||
match self.get_or_make_enum_type_state(unfolded_enum_type)? {
|
||||
EnumTypeState::TagEnumAndBody(_) => Ok(StmtMatch {
|
||||
expr: Expr::<TagAndBody<Enum, UInt>>::from_canonical(folded_expr).tag,
|
||||
source_location,
|
||||
blocks: folded_blocks.intern(),
|
||||
}
|
||||
.into()),
|
||||
EnumTypeState::TagUIntAndBody(_) => {
|
||||
let int_tag_expr = Expr::<TagAndBody<UInt, UInt>>::from_canonical(folded_expr).tag;
|
||||
Ok(match_int_tag(int_tag_expr, source_location, folded_blocks).into())
|
||||
}
|
||||
EnumTypeState::UInt(_) => {
|
||||
let int_tag_expr = Expr::<UInt>::from_canonical(folded_expr)
|
||||
[..unfolded_enum_type.discriminant_bit_width()];
|
||||
Ok(match_int_tag(int_tag_expr, source_location, folded_blocks).into())
|
||||
}
|
||||
EnumTypeState::Unchanged => Ok(StmtMatch {
|
||||
expr: Expr::from_canonical(folded_expr),
|
||||
source_location,
|
||||
blocks: folded_blocks.intern(),
|
||||
}
|
||||
.into()),
|
||||
}
|
||||
}
|
||||
fn handle_stmt_connect_array(
|
||||
&mut self,
|
||||
unfolded_lhs_ty: Array,
|
||||
unfolded_rhs_ty: Array,
|
||||
folded_lhs: Expr<Array>,
|
||||
folded_rhs: Expr<Array>,
|
||||
source_location: SourceLocation,
|
||||
output_stmts: &mut Vec<Stmt>,
|
||||
) -> Result<(), SimplifyEnumsError> {
|
||||
assert_eq!(unfolded_lhs_ty.len(), unfolded_rhs_ty.len());
|
||||
let unfolded_lhs_element_ty = unfolded_lhs_ty.element();
|
||||
let unfolded_rhs_element_ty = unfolded_rhs_ty.element();
|
||||
for array_index in 0..unfolded_lhs_ty.len() {
|
||||
self.handle_stmt_connect(
|
||||
unfolded_lhs_element_ty,
|
||||
unfolded_rhs_element_ty,
|
||||
folded_lhs[array_index],
|
||||
folded_rhs[array_index],
|
||||
source_location,
|
||||
output_stmts,
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
fn handle_stmt_connect_bundle(
|
||||
&mut self,
|
||||
unfolded_lhs_ty: Bundle,
|
||||
unfolded_rhs_ty: Bundle,
|
||||
folded_lhs: Expr<Bundle>,
|
||||
folded_rhs: Expr<Bundle>,
|
||||
source_location: SourceLocation,
|
||||
output_stmts: &mut Vec<Stmt>,
|
||||
) -> Result<(), SimplifyEnumsError> {
|
||||
let unfolded_lhs_fields = unfolded_lhs_ty.fields();
|
||||
let unfolded_rhs_fields = unfolded_rhs_ty.fields();
|
||||
assert_eq!(unfolded_lhs_fields.len(), unfolded_rhs_fields.len());
|
||||
for (
|
||||
field_index,
|
||||
(
|
||||
&BundleField {
|
||||
name,
|
||||
flipped,
|
||||
ty: unfolded_lhs_field_ty,
|
||||
},
|
||||
unfolded_rhs_field,
|
||||
),
|
||||
) in unfolded_lhs_fields
|
||||
.iter()
|
||||
.zip(&unfolded_rhs_fields)
|
||||
.enumerate()
|
||||
{
|
||||
assert_eq!(name, unfolded_rhs_field.name);
|
||||
assert_eq!(flipped, unfolded_rhs_field.flipped);
|
||||
let folded_lhs_field =
|
||||
ops::FieldAccess::new_by_index(folded_lhs, field_index).to_expr();
|
||||
let folded_rhs_field =
|
||||
ops::FieldAccess::new_by_index(folded_rhs, field_index).to_expr();
|
||||
if flipped {
|
||||
// swap lhs/rhs
|
||||
self.handle_stmt_connect(
|
||||
unfolded_rhs_field.ty,
|
||||
unfolded_lhs_field_ty,
|
||||
folded_rhs_field,
|
||||
folded_lhs_field,
|
||||
source_location,
|
||||
output_stmts,
|
||||
)?;
|
||||
} else {
|
||||
self.handle_stmt_connect(
|
||||
unfolded_lhs_field_ty,
|
||||
unfolded_rhs_field.ty,
|
||||
folded_lhs_field,
|
||||
folded_rhs_field,
|
||||
source_location,
|
||||
output_stmts,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
fn handle_stmt_connect_enum(
|
||||
&mut self,
|
||||
unfolded_lhs_ty: Enum,
|
||||
unfolded_rhs_ty: Enum,
|
||||
folded_lhs: Expr<CanonicalType>,
|
||||
folded_rhs: Expr<CanonicalType>,
|
||||
source_location: SourceLocation,
|
||||
output_stmts: &mut Vec<Stmt>,
|
||||
) -> Result<(), SimplifyEnumsError> {
|
||||
let unfolded_lhs_variants = unfolded_lhs_ty.variants();
|
||||
let unfolded_rhs_variants = unfolded_rhs_ty.variants();
|
||||
assert_eq!(unfolded_lhs_variants.len(), unfolded_rhs_variants.len());
|
||||
let mut folded_blocks = vec![];
|
||||
for (
|
||||
variant_index,
|
||||
(
|
||||
&EnumVariant {
|
||||
name,
|
||||
ty: unfolded_lhs_variant_ty,
|
||||
},
|
||||
unfolded_rhs_variant,
|
||||
),
|
||||
) in unfolded_lhs_variants
|
||||
.iter()
|
||||
.zip(&unfolded_rhs_variants)
|
||||
.enumerate()
|
||||
{
|
||||
let mut output_stmts = vec![];
|
||||
assert_eq!(name, unfolded_rhs_variant.name);
|
||||
assert_eq!(
|
||||
unfolded_lhs_variant_ty.is_some(),
|
||||
unfolded_rhs_variant.ty.is_some()
|
||||
);
|
||||
let folded_variant_value =
|
||||
if let (Some(unfolded_lhs_variant_ty), Some(unfolded_rhs_variant_ty)) =
|
||||
(unfolded_lhs_variant_ty, unfolded_rhs_variant.ty)
|
||||
{
|
||||
let lhs_wire = Wire::new_unchecked(
|
||||
self.module_state_stack
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
.gen_name("__connect_variant_body"),
|
||||
source_location,
|
||||
unfolded_lhs_variant_ty.fold(self)?,
|
||||
);
|
||||
output_stmts.push(
|
||||
StmtWire {
|
||||
annotations: Interned::default(),
|
||||
wire: lhs_wire,
|
||||
}
|
||||
.into(),
|
||||
);
|
||||
let lhs_wire = lhs_wire.to_expr();
|
||||
let folded_rhs_variant =
|
||||
self.handle_variant_access(unfolded_rhs_ty, folded_rhs, variant_index)?;
|
||||
self.handle_stmt_connect(
|
||||
unfolded_lhs_variant_ty,
|
||||
unfolded_rhs_variant_ty,
|
||||
lhs_wire,
|
||||
folded_rhs_variant,
|
||||
source_location,
|
||||
&mut output_stmts,
|
||||
)?;
|
||||
Some(lhs_wire)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
output_stmts.push(
|
||||
StmtConnect {
|
||||
lhs: folded_lhs,
|
||||
rhs: self.handle_enum_literal(
|
||||
unfolded_lhs_ty,
|
||||
variant_index,
|
||||
folded_variant_value,
|
||||
)?,
|
||||
source_location,
|
||||
}
|
||||
.into(),
|
||||
);
|
||||
folded_blocks.push(Block {
|
||||
memories: Interned::default(),
|
||||
stmts: Intern::intern_owned(output_stmts),
|
||||
});
|
||||
}
|
||||
output_stmts.push(self.handle_match(
|
||||
unfolded_rhs_ty,
|
||||
folded_rhs,
|
||||
source_location,
|
||||
&folded_blocks,
|
||||
)?);
|
||||
Ok(())
|
||||
}
|
||||
fn handle_stmt_connect(
|
||||
&mut self,
|
||||
unfolded_lhs_ty: CanonicalType,
|
||||
unfolded_rhs_ty: CanonicalType,
|
||||
folded_lhs: Expr<CanonicalType>,
|
||||
folded_rhs: Expr<CanonicalType>,
|
||||
source_location: SourceLocation,
|
||||
output_stmts: &mut Vec<Stmt>,
|
||||
) -> Result<(), SimplifyEnumsError> {
|
||||
let needs_expansion = unfolded_lhs_ty != unfolded_rhs_ty
|
||||
&& (contains_any_enum_types(unfolded_lhs_ty)
|
||||
|| contains_any_enum_types(unfolded_rhs_ty));
|
||||
if !needs_expansion {
|
||||
output_stmts.push(
|
||||
StmtConnect {
|
||||
lhs: folded_lhs,
|
||||
rhs: folded_rhs,
|
||||
source_location,
|
||||
}
|
||||
.into(),
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
match unfolded_lhs_ty {
|
||||
CanonicalType::Array(unfolded_lhs_ty) => self.handle_stmt_connect_array(
|
||||
unfolded_lhs_ty,
|
||||
Array::from_canonical(unfolded_rhs_ty),
|
||||
Expr::from_canonical(folded_lhs),
|
||||
Expr::from_canonical(folded_rhs),
|
||||
source_location,
|
||||
output_stmts,
|
||||
),
|
||||
CanonicalType::Enum(unfolded_lhs_ty) => self.handle_stmt_connect_enum(
|
||||
unfolded_lhs_ty,
|
||||
Enum::from_canonical(unfolded_rhs_ty),
|
||||
folded_lhs,
|
||||
folded_rhs,
|
||||
source_location,
|
||||
output_stmts,
|
||||
),
|
||||
CanonicalType::Bundle(unfolded_lhs_ty) => self.handle_stmt_connect_bundle(
|
||||
unfolded_lhs_ty,
|
||||
Bundle::from_canonical(unfolded_rhs_ty),
|
||||
Expr::from_canonical(folded_lhs),
|
||||
Expr::from_canonical(folded_rhs),
|
||||
source_location,
|
||||
output_stmts,
|
||||
),
|
||||
CanonicalType::UInt(_)
|
||||
| CanonicalType::SInt(_)
|
||||
| CanonicalType::Bool(_)
|
||||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_)
|
||||
| CanonicalType::Clock(_)
|
||||
| CanonicalType::PhantomConst(_)
|
||||
| CanonicalType::DynSimOnly(_) => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn connect_port(
|
||||
|
|
@ -530,11 +117,11 @@ fn connect_port(
|
|||
) {
|
||||
if Expr::ty(lhs) == Expr::ty(rhs) {
|
||||
stmts.push(
|
||||
StmtConnect {
|
||||
dbg!(StmtConnect {
|
||||
lhs,
|
||||
rhs,
|
||||
source_location,
|
||||
}
|
||||
})
|
||||
.into(),
|
||||
);
|
||||
return;
|
||||
|
|
@ -582,9 +169,7 @@ fn connect_port(
|
|||
| (CanonicalType::Clock(_), _)
|
||||
| (CanonicalType::AsyncReset(_), _)
|
||||
| (CanonicalType::SyncReset(_), _)
|
||||
| (CanonicalType::Reset(_), _)
|
||||
| (CanonicalType::PhantomConst(_), _)
|
||||
| (CanonicalType::DynSimOnly(_), _) => unreachable!(
|
||||
| (CanonicalType::Reset(_), _) => unreachable!(
|
||||
"trying to connect memory ports:\n{:?}\n{:?}",
|
||||
Expr::ty(lhs),
|
||||
Expr::ty(rhs),
|
||||
|
|
@ -592,42 +177,6 @@ fn connect_port(
|
|||
}
|
||||
}
|
||||
|
||||
fn match_int_tag(
|
||||
int_tag_expr: Expr<UInt>,
|
||||
source_location: SourceLocation,
|
||||
folded_blocks: &[Block],
|
||||
) -> StmtIf {
|
||||
let mut blocks_iter = folded_blocks.iter().copied().enumerate();
|
||||
let (_, last_block) = blocks_iter.next_back().unwrap_or_default();
|
||||
let Some((next_to_last_variant_index, next_to_last_block)) = blocks_iter.next_back() else {
|
||||
return StmtIf {
|
||||
cond: true.to_expr(),
|
||||
source_location,
|
||||
blocks: [last_block, Block::default()],
|
||||
};
|
||||
};
|
||||
let mut retval = StmtIf {
|
||||
cond: int_tag_expr
|
||||
.cmp_eq(Expr::ty(int_tag_expr).from_int_wrapping(next_to_last_variant_index)),
|
||||
source_location,
|
||||
blocks: [next_to_last_block, last_block],
|
||||
};
|
||||
for (variant_index, block) in blocks_iter.rev() {
|
||||
retval = StmtIf {
|
||||
cond: int_tag_expr.cmp_eq(Expr::ty(int_tag_expr).from_int_wrapping(variant_index)),
|
||||
source_location,
|
||||
blocks: [
|
||||
block,
|
||||
Block {
|
||||
memories: Default::default(),
|
||||
stmts: [Stmt::from(retval)].intern_slice(),
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
retval
|
||||
}
|
||||
|
||||
impl Folder for State {
|
||||
type Error = SimplifyEnumsError;
|
||||
|
||||
|
|
@ -636,32 +185,96 @@ impl Folder for State {
|
|||
}
|
||||
|
||||
fn fold_module<T: BundleType>(&mut self, v: Module<T>) -> Result<Module<T>, Self::Error> {
|
||||
self.module_state_stack.push(ModuleState {
|
||||
module_name: v.name_id(),
|
||||
});
|
||||
let old_name_id_gen =
|
||||
std::mem::replace(&mut self.name_id_gen, NameIdGen::for_module(v.canonical()));
|
||||
let retval = Fold::default_fold(v, self);
|
||||
self.module_state_stack.pop();
|
||||
self.name_id_gen = old_name_id_gen;
|
||||
retval
|
||||
}
|
||||
|
||||
fn fold_expr_enum(&mut self, op: ExprEnum) -> Result<ExprEnum, Self::Error> {
|
||||
match op {
|
||||
ExprEnum::EnumLiteral(op) => {
|
||||
let folded_variant_value = op.variant_value().map(|v| v.fold(self)).transpose()?;
|
||||
Ok(*Expr::expr_enum(self.handle_enum_literal(
|
||||
ExprEnum::EnumLiteral(op) => Ok(match self.get_or_make_enum_type_state(op.ty())? {
|
||||
EnumTypeState::TagEnumAndBody(TagAndBody { tag, body }) => *Expr::expr_enum(
|
||||
<TagAndBody<Enum, DynSize> as BundleType>::Builder::default()
|
||||
.field_tag(EnumLiteral::new_by_index(tag, op.variant_index(), None))
|
||||
.field_body(match op.variant_value() {
|
||||
Some(variant_value) => variant_value.fold(self)?.cast_to_bits(),
|
||||
None => body.zero().to_expr(),
|
||||
})
|
||||
.to_expr(),
|
||||
),
|
||||
EnumTypeState::TagUIntAndBody(TagAndBody { tag, body }) => *Expr::expr_enum(
|
||||
<TagAndBody<UInt, DynSize> as BundleType>::Builder::default()
|
||||
.field_tag(tag.from_int_wrapping(op.variant_index()))
|
||||
.field_body(match op.variant_value() {
|
||||
Some(variant_value) => variant_value.fold(self)?.cast_to_bits(),
|
||||
None => body.zero().to_expr(),
|
||||
})
|
||||
.to_expr(),
|
||||
),
|
||||
EnumTypeState::UInt(_) => *Expr::expr_enum(
|
||||
<TagAndBody<UInt, DynSize> as BundleType>::Builder::default()
|
||||
.field_tag(
|
||||
UIntType::new(op.ty().discriminant_bit_width())
|
||||
.from_int_wrapping(op.variant_index()),
|
||||
)
|
||||
.field_body(match op.variant_value() {
|
||||
Some(variant_value) => variant_value.fold(self)?.cast_to_bits(),
|
||||
None => UIntType::new(
|
||||
op.ty().type_properties().bit_width
|
||||
- op.ty().discriminant_bit_width(),
|
||||
)
|
||||
.zero()
|
||||
.to_expr(),
|
||||
})
|
||||
.cast_to_bits(),
|
||||
),
|
||||
EnumTypeState::Unchanged => ExprEnum::EnumLiteral(ops::EnumLiteral::new_by_index(
|
||||
op.ty(),
|
||||
op.variant_index(),
|
||||
folded_variant_value,
|
||||
)?))
|
||||
}
|
||||
ExprEnum::VariantAccess(op) => {
|
||||
let folded_base_expr = Expr::canonical(op.base()).fold(self)?;
|
||||
Ok(*Expr::expr_enum(self.handle_variant_access(
|
||||
Expr::ty(op.base()),
|
||||
folded_base_expr,
|
||||
op.variant_index(),
|
||||
)?))
|
||||
}
|
||||
op.variant_value().map(|v| v.fold(self)).transpose()?,
|
||||
)),
|
||||
}),
|
||||
ExprEnum::VariantAccess(op) => Ok(
|
||||
match self.get_or_make_enum_type_state(Expr::ty(op.base()))? {
|
||||
EnumTypeState::TagEnumAndBody(_) | EnumTypeState::TagUIntAndBody(_) => {
|
||||
match op.variant_type() {
|
||||
Some(variant_type) => *Expr::expr_enum(
|
||||
Expr::<TagAndBody<CanonicalType, DynSize>>::from_canonical(
|
||||
(*Expr::expr_enum(op.base())).fold(self)?.to_expr(),
|
||||
)
|
||||
.body[..variant_type.bit_width()]
|
||||
.cast_bits_to(variant_type),
|
||||
),
|
||||
None => *Expr::expr_enum(().to_expr()),
|
||||
}
|
||||
}
|
||||
EnumTypeState::UInt(_) => match op.variant_type() {
|
||||
Some(variant_type) => {
|
||||
let base_int = Expr::<UInt>::from_canonical(
|
||||
(*Expr::expr_enum(op.base())).fold(self)?.to_expr(),
|
||||
);
|
||||
dbg!(base_int);
|
||||
let base_ty = Expr::ty(op.base());
|
||||
let variant_type_bit_width = variant_type.bit_width();
|
||||
*Expr::expr_enum(
|
||||
base_int[base_ty.discriminant_bit_width()..]
|
||||
[..variant_type_bit_width]
|
||||
.cast_bits_to(variant_type),
|
||||
)
|
||||
}
|
||||
None => *Expr::expr_enum(().to_expr()),
|
||||
},
|
||||
EnumTypeState::Unchanged => match op.variant_type() {
|
||||
Some(_) => ExprEnum::VariantAccess(ops::VariantAccess::new_by_index(
|
||||
op.base().fold(self)?,
|
||||
op.variant_index(),
|
||||
)),
|
||||
None => *Expr::expr_enum(().to_expr()),
|
||||
},
|
||||
},
|
||||
),
|
||||
ExprEnum::MemPort(mem_port) => Ok(
|
||||
if let Some(&wire) = self.replacement_mem_ports.get(&mem_port) {
|
||||
ExprEnum::Wire(wire)
|
||||
|
|
@ -672,10 +285,8 @@ impl Folder for State {
|
|||
ExprEnum::UIntLiteral(_)
|
||||
| ExprEnum::SIntLiteral(_)
|
||||
| ExprEnum::BoolLiteral(_)
|
||||
| ExprEnum::PhantomConst(_)
|
||||
| ExprEnum::BundleLiteral(_)
|
||||
| ExprEnum::ArrayLiteral(_)
|
||||
| ExprEnum::Uninit(_)
|
||||
| ExprEnum::NotU(_)
|
||||
| ExprEnum::NotS(_)
|
||||
| ExprEnum::NotB(_)
|
||||
|
|
@ -772,9 +383,7 @@ impl Folder for State {
|
|||
| ExprEnum::ModuleIO(_)
|
||||
| ExprEnum::Instance(_)
|
||||
| ExprEnum::Wire(_)
|
||||
| ExprEnum::Reg(_)
|
||||
| ExprEnum::RegSync(_)
|
||||
| ExprEnum::RegAsync(_) => op.default_fold(self),
|
||||
| ExprEnum::Reg(_) => op.default_fold(self),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -808,15 +417,11 @@ impl Folder for State {
|
|||
if wire_ty == new_port_ty {
|
||||
continue;
|
||||
}
|
||||
let wire_name = self.name_id_gen.gen(
|
||||
(*format!("{}_{}", memory.scoped_name().1 .0, port.port_name())).intern(),
|
||||
);
|
||||
let wire = Wire::new_unchecked(
|
||||
self.module_state_stack
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
.gen_name(&format!(
|
||||
"{}_{}",
|
||||
memory.scoped_name().1.0,
|
||||
port.port_name()
|
||||
)),
|
||||
ScopedNameId(memory.scoped_name().0, wire_name),
|
||||
port.source_location(),
|
||||
wire_ty,
|
||||
);
|
||||
|
|
@ -859,50 +464,80 @@ impl Folder for State {
|
|||
}
|
||||
|
||||
fn fold_stmt(&mut self, stmt: Stmt) -> Result<Stmt, Self::Error> {
|
||||
fn match_int_tag(
|
||||
state: &mut State,
|
||||
int_tag_expr: Expr<UInt>,
|
||||
source_location: SourceLocation,
|
||||
blocks: Interned<[Block]>,
|
||||
) -> Result<StmtIf, SimplifyEnumsError> {
|
||||
let mut blocks_iter = blocks.iter().copied().enumerate();
|
||||
let (_, last_block) = blocks_iter.next_back().unwrap_or_default();
|
||||
let Some((next_to_last_variant_index, next_to_last_block)) = blocks_iter.next_back()
|
||||
else {
|
||||
return Ok(StmtIf {
|
||||
cond: true.to_expr(),
|
||||
source_location,
|
||||
blocks: [last_block.fold(state)?, Block::default()],
|
||||
});
|
||||
};
|
||||
let mut retval = StmtIf {
|
||||
cond: int_tag_expr
|
||||
.cmp_eq(Expr::ty(int_tag_expr).from_int_wrapping(next_to_last_variant_index)),
|
||||
source_location,
|
||||
blocks: [next_to_last_block.fold(state)?, last_block.fold(state)?],
|
||||
};
|
||||
for (variant_index, block) in blocks_iter.rev() {
|
||||
retval = StmtIf {
|
||||
cond: int_tag_expr
|
||||
.cmp_eq(Expr::ty(int_tag_expr).from_int_wrapping(variant_index)),
|
||||
source_location,
|
||||
blocks: [
|
||||
block.fold(state)?,
|
||||
Block {
|
||||
memories: Default::default(),
|
||||
stmts: [Stmt::from(retval)][..].intern(),
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
Ok(retval)
|
||||
}
|
||||
match stmt {
|
||||
Stmt::Match(StmtMatch {
|
||||
expr,
|
||||
source_location,
|
||||
blocks,
|
||||
}) => {
|
||||
let folded_expr = Expr::canonical(expr).fold(self)?;
|
||||
let folded_blocks = blocks.fold(self)?;
|
||||
self.handle_match(Expr::ty(expr), folded_expr, source_location, &folded_blocks)
|
||||
}
|
||||
Stmt::Connect(StmtConnect {
|
||||
lhs,
|
||||
rhs,
|
||||
source_location,
|
||||
}) => {
|
||||
let folded_lhs = lhs.fold(self)?;
|
||||
let folded_rhs = rhs.fold(self)?;
|
||||
let mut output_stmts = vec![];
|
||||
self.handle_stmt_connect(
|
||||
Expr::ty(lhs),
|
||||
Expr::ty(rhs),
|
||||
folded_lhs,
|
||||
folded_rhs,
|
||||
}) => match self.get_or_make_enum_type_state(Expr::ty(expr))? {
|
||||
EnumTypeState::TagEnumAndBody(_) => Ok(StmtMatch {
|
||||
expr: Expr::<TagAndBody<Enum, DynSize>>::from_canonical(
|
||||
Expr::canonical(expr).fold(self)?,
|
||||
)
|
||||
.tag,
|
||||
source_location,
|
||||
&mut output_stmts,
|
||||
)?;
|
||||
if output_stmts.len() == 1 {
|
||||
Ok(output_stmts.pop().unwrap())
|
||||
} else {
|
||||
Ok(StmtIf {
|
||||
cond: true.to_expr(),
|
||||
source_location,
|
||||
blocks: [
|
||||
Block {
|
||||
memories: Interned::default(),
|
||||
stmts: Intern::intern_owned(output_stmts),
|
||||
},
|
||||
Block::default(),
|
||||
],
|
||||
}
|
||||
.into())
|
||||
blocks: blocks.fold(self)?,
|
||||
}
|
||||
}
|
||||
Stmt::Formal(_) | Stmt::If(_) | Stmt::Declaration(_) => stmt.default_fold(self),
|
||||
.into()),
|
||||
EnumTypeState::TagUIntAndBody(_) => {
|
||||
let int_tag_expr = Expr::<TagAndBody<UInt, DynSize>>::from_canonical(
|
||||
Expr::canonical(expr).fold(self)?,
|
||||
)
|
||||
.tag;
|
||||
Ok(match_int_tag(self, int_tag_expr, source_location, blocks)?.into())
|
||||
}
|
||||
EnumTypeState::UInt(_) => {
|
||||
let int_tag_expr =
|
||||
Expr::<UInt>::from_canonical(Expr::canonical(expr).fold(self)?)
|
||||
[..Expr::ty(expr).discriminant_bit_width()];
|
||||
Ok(match_int_tag(self, int_tag_expr, source_location, blocks)?.into())
|
||||
}
|
||||
EnumTypeState::Unchanged => Ok(StmtMatch {
|
||||
expr: expr.fold(self)?,
|
||||
source_location,
|
||||
blocks: blocks.fold(self)?,
|
||||
}
|
||||
.into()),
|
||||
},
|
||||
Stmt::Connect(_) | Stmt::If(_) | Stmt::Declaration(_) => stmt.default_fold(self),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -931,9 +566,7 @@ impl Folder for State {
|
|||
| CanonicalType::Clock(_)
|
||||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_)
|
||||
| CanonicalType::PhantomConst(_)
|
||||
| CanonicalType::DynSimOnly(_) => canonical_type.default_fold(self),
|
||||
| CanonicalType::Reset(_) => canonical_type.default_fold(self),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -941,10 +574,13 @@ impl Folder for State {
|
|||
unreachable!()
|
||||
}
|
||||
|
||||
fn fold_enum_literal<T: EnumType + Fold<Self>>(
|
||||
fn fold_enum_literal<T: EnumType>(
|
||||
&mut self,
|
||||
_v: ops::EnumLiteral<T>,
|
||||
) -> Result<ops::EnumLiteral<T>, Self::Error> {
|
||||
) -> Result<ops::EnumLiteral<T>, Self::Error>
|
||||
where
|
||||
T: Fold<Self>,
|
||||
{
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
|
|
@ -956,15 +592,10 @@ impl Folder for State {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, clap::ValueEnum, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub enum SimplifyEnumsKind {
|
||||
SimplifyToEnumsWithNoBody,
|
||||
#[clap(name = "replace-with-bundle-of-uints")]
|
||||
#[serde(rename = "replace-with-bundle-of-uints")]
|
||||
ReplaceWithBundleOfUInts,
|
||||
#[clap(name = "replace-with-uint")]
|
||||
#[serde(rename = "replace-with-uint")]
|
||||
ReplaceWithUInt,
|
||||
}
|
||||
|
||||
|
|
@ -973,9 +604,9 @@ pub fn simplify_enums(
|
|||
kind: SimplifyEnumsKind,
|
||||
) -> Result<Interned<Module<Bundle>>, SimplifyEnumsError> {
|
||||
module.fold(&mut State {
|
||||
enum_types: HashMap::default(),
|
||||
replacement_mem_ports: HashMap::default(),
|
||||
enum_types: HashMap::new(),
|
||||
replacement_mem_ports: HashMap::new(),
|
||||
kind,
|
||||
module_state_stack: vec![],
|
||||
name_id_gen: NameIdGen::default(),
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,15 +9,16 @@ use crate::{
|
|||
intern::{Intern, Interned},
|
||||
memory::{Mem, MemPort, PortType},
|
||||
module::{
|
||||
Block, Id, Module, NameId, ScopedNameId, Stmt, StmtConnect, StmtWire,
|
||||
transform::visit::{Fold, Folder},
|
||||
Block, Module, NameId, NameIdGen, ScopedNameId, Stmt, StmtConnect, StmtWire,
|
||||
},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
util::{HashMap, MakeMutSlice},
|
||||
util::MakeMutSlice,
|
||||
wire::Wire,
|
||||
};
|
||||
use bitvec::{slice::BitSlice, vec::BitVec};
|
||||
use hashbrown::HashMap;
|
||||
use std::{
|
||||
convert::Infallible,
|
||||
fmt::Write,
|
||||
|
|
@ -61,7 +62,6 @@ enum MemSplit {
|
|||
Bundle {
|
||||
fields: Rc<[MemSplit]>,
|
||||
},
|
||||
PhantomConst,
|
||||
Single {
|
||||
output_mem: Option<Mem>,
|
||||
element_type: SingleType,
|
||||
|
|
@ -76,7 +76,6 @@ impl MemSplit {
|
|||
fn mark_changed_element_type(self) -> Self {
|
||||
match self {
|
||||
MemSplit::Bundle { fields: _ } => self,
|
||||
MemSplit::PhantomConst => self,
|
||||
MemSplit::Single {
|
||||
output_mem,
|
||||
element_type,
|
||||
|
|
@ -98,7 +97,6 @@ impl MemSplit {
|
|||
.map(|field| Self::new(field.ty).mark_changed_element_type())
|
||||
.collect(),
|
||||
},
|
||||
CanonicalType::PhantomConst(_) => MemSplit::PhantomConst,
|
||||
CanonicalType::Array(ty) => {
|
||||
let element = MemSplit::new(ty.element());
|
||||
if let Self::Single {
|
||||
|
|
@ -194,7 +192,6 @@ impl MemSplit {
|
|||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_) => unreachable!("memory element type is a storable type"),
|
||||
CanonicalType::DynSimOnly(_) => todo!("memory containing sim-only values"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -324,9 +321,6 @@ impl SplitMemState<'_, '_> {
|
|||
Expr::field(Expr::<Bundle>::from_canonical(e), &field.name)
|
||||
},
|
||||
|initial_value_element| {
|
||||
let Some(field_offset) = field_offset.only_bit_width() else {
|
||||
todo!("memory containing sim-only values");
|
||||
};
|
||||
&initial_value_element[field_offset..][..field_ty_bit_width]
|
||||
},
|
||||
);
|
||||
|
|
@ -345,7 +339,6 @@ impl SplitMemState<'_, '_> {
|
|||
self.split_state_stack.pop();
|
||||
}
|
||||
}
|
||||
MemSplit::PhantomConst => {}
|
||||
MemSplit::Single {
|
||||
output_mem,
|
||||
element_type: single_type,
|
||||
|
|
@ -424,6 +417,7 @@ impl SplitMemState<'_, '_> {
|
|||
|
||||
struct ModuleState {
|
||||
output_module: Option<Interned<Module<Bundle>>>,
|
||||
name_id_gen: NameIdGen,
|
||||
memories: HashMap<ScopedNameId, MemState>,
|
||||
}
|
||||
|
||||
|
|
@ -545,12 +539,7 @@ impl ModuleState {
|
|||
};
|
||||
loop {
|
||||
match input_element_type {
|
||||
CanonicalType::Bundle(_) => {
|
||||
unreachable!("bundle types are always split")
|
||||
}
|
||||
CanonicalType::PhantomConst(_) => {
|
||||
unreachable!("PhantomConst are always removed")
|
||||
}
|
||||
CanonicalType::Bundle(_) => unreachable!("bundle types are always split"),
|
||||
CanonicalType::Enum(_)
|
||||
if input_array_types
|
||||
.first()
|
||||
|
|
@ -580,7 +569,7 @@ impl ModuleState {
|
|||
port_wmask.map(Expr::from_canonical),
|
||||
connect_read_enum,
|
||||
connect_write_enum,
|
||||
connect_write,
|
||||
connect_write_enum,
|
||||
),
|
||||
CanonicalType::Array(array_type) => {
|
||||
input_array_types.push(array_type);
|
||||
|
|
@ -624,7 +613,6 @@ impl ModuleState {
|
|||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_) => unreachable!("memory element type is a storable type"),
|
||||
CanonicalType::DynSimOnly(_) => todo!("memory containing sim-only values"),
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
|
@ -638,10 +626,10 @@ impl ModuleState {
|
|||
mem_name_path: &str,
|
||||
split_state: &SplitState<'_>,
|
||||
) -> Mem {
|
||||
let mem_name = NameId(
|
||||
Intern::intern_owned(format!("{}{mem_name_path}", input_mem.scoped_name().1.0)),
|
||||
Id::new(),
|
||||
);
|
||||
let mem_name = self.name_id_gen.gen(Intern::intern_owned(format!(
|
||||
"{}{mem_name_path}",
|
||||
input_mem.scoped_name().1 .0
|
||||
)));
|
||||
let mem_name = ScopedNameId(input_mem.scoped_name().0, mem_name);
|
||||
let output_element_type = match single_type {
|
||||
SingleType::UInt(ty) => ty.canonical(),
|
||||
|
|
@ -756,8 +744,7 @@ impl ModuleState {
|
|||
..
|
||||
}
|
||||
| MemSplit::Bundle { .. }
|
||||
| MemSplit::Array { .. }
|
||||
| MemSplit::PhantomConst => {
|
||||
| MemSplit::Array { .. } => {
|
||||
let mut replacement_ports = Vec::with_capacity(input_mem.ports().len());
|
||||
let mut wire_port_rdata = Vec::with_capacity(input_mem.ports().len());
|
||||
let mut wire_port_wdata = Vec::with_capacity(input_mem.ports().len());
|
||||
|
|
@ -766,10 +753,9 @@ impl ModuleState {
|
|||
let port_ty = port.ty();
|
||||
let NameId(mem_name, _) = input_mem.scoped_name().1;
|
||||
let port_name = port.port_name();
|
||||
let wire_name = NameId(
|
||||
Intern::intern_owned(format!("{mem_name}_{port_name}")),
|
||||
Id::new(),
|
||||
);
|
||||
let wire_name = self
|
||||
.name_id_gen
|
||||
.gen(Intern::intern_owned(format!("{mem_name}_{port_name}")));
|
||||
let wire = Wire::new_unchecked(
|
||||
ScopedNameId(input_mem.scoped_name().0, wire_name),
|
||||
port.source_location(),
|
||||
|
|
@ -780,7 +766,7 @@ impl ModuleState {
|
|||
output_stmts.push(
|
||||
StmtWire {
|
||||
annotations: Default::default(),
|
||||
wire: canonical_wire,
|
||||
wire: canonical_wire.clone(),
|
||||
}
|
||||
.into(),
|
||||
);
|
||||
|
|
@ -901,7 +887,8 @@ impl Folder for State {
|
|||
module,
|
||||
ModuleState {
|
||||
output_module: None,
|
||||
memories: HashMap::default(),
|
||||
name_id_gen: NameIdGen::for_module(*module),
|
||||
memories: HashMap::new(),
|
||||
},
|
||||
);
|
||||
let mut this = PushedState::push_module(self, module);
|
||||
|
|
|
|||
|
|
@ -2,40 +2,32 @@
|
|||
// See Notices.txt for copyright information
|
||||
#![allow(clippy::multiple_bound_locations)]
|
||||
use crate::{
|
||||
annotations::{
|
||||
Annotation, BlackBoxInlineAnnotation, BlackBoxPathAnnotation, CustomFirrtlAnnotation,
|
||||
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation, TargetedAnnotation,
|
||||
},
|
||||
annotations::{Annotation, CustomFirrtlAnnotation, TargetedAnnotation},
|
||||
array::ArrayType,
|
||||
bundle::{Bundle, BundleField, BundleType},
|
||||
clock::Clock,
|
||||
enum_::{Enum, EnumType, EnumVariant},
|
||||
expr::{
|
||||
Expr, ExprEnum, ops,
|
||||
ops,
|
||||
target::{
|
||||
Target, TargetBase, TargetChild, TargetPathArrayElement, TargetPathBundleField,
|
||||
TargetPathDynArrayElement, TargetPathElement,
|
||||
},
|
||||
Expr, ExprEnum,
|
||||
},
|
||||
formal::FormalKind,
|
||||
int::{Bool, SIntType, SIntValue, Size, UIntType, UIntValue},
|
||||
intern::{Intern, Interned},
|
||||
memory::{Mem, MemPort, PortKind, PortName, PortType, ReadUnderWrite},
|
||||
module::{
|
||||
AnnotatedModuleIO, Block, BlockId, ExternModuleBody, ExternModuleParameter,
|
||||
ExternModuleParameterValue, Instance, Module, ModuleBody, ModuleIO, NameId,
|
||||
NormalModuleBody, ScopedNameId, Stmt, StmtConnect, StmtDeclaration, StmtFormal, StmtIf,
|
||||
StmtInstance, StmtMatch, StmtReg, StmtWire,
|
||||
NormalModuleBody, ScopedNameId, Stmt, StmtConnect, StmtDeclaration, StmtIf, StmtInstance,
|
||||
StmtMatch, StmtReg, StmtWire,
|
||||
},
|
||||
phantom_const::PhantomConst,
|
||||
reg::Reg,
|
||||
reset::{AsyncReset, Reset, ResetType, SyncReset},
|
||||
sim::{ExternModuleSimulation, value::DynSimOnly},
|
||||
reset::{AsyncReset, Reset, SyncReset},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
vendor::xilinx::{
|
||||
XdcCreateClockAnnotation, XdcIOStandardAnnotation, XdcLocationAnnotation, XilinxAnnotation,
|
||||
},
|
||||
wire::Wire,
|
||||
};
|
||||
use num_bigint::{BigInt, BigUint};
|
||||
|
|
|
|||
|
|
@ -1,485 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
expr::{
|
||||
Expr, ToExpr,
|
||||
ops::{ExprPartialEq, ExprPartialOrd},
|
||||
},
|
||||
int::Bool,
|
||||
intern::{Intern, Interned, InternedCompare, LazyInterned, LazyInternedTrait, Memoize},
|
||||
sim::value::{SimValue, SimValuePartialEq, ToSimValue, ToSimValueWithType},
|
||||
source_location::SourceLocation,
|
||||
ty::{
|
||||
CanonicalType, OpaqueSimValueSlice, OpaqueSimValueWriter, OpaqueSimValueWritten,
|
||||
StaticType, Type, TypeProperties, impl_match_variant_as_self,
|
||||
serde_impls::{SerdeCanonicalType, SerdePhantomConst},
|
||||
},
|
||||
};
|
||||
use serde::{
|
||||
Deserialize, Deserializer, Serialize, Serializer,
|
||||
de::{DeserializeOwned, Error},
|
||||
};
|
||||
use std::{
|
||||
any::Any,
|
||||
fmt,
|
||||
hash::{Hash, Hasher},
|
||||
marker::PhantomData,
|
||||
ops::Index,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct PhantomConstCanonicalValue {
|
||||
parsed: serde_json::Value,
|
||||
serialized: Interned<str>,
|
||||
}
|
||||
|
||||
impl PhantomConstCanonicalValue {
|
||||
pub fn from_json_value(parsed: serde_json::Value) -> Self {
|
||||
let serialized = Intern::intern_owned(
|
||||
serde_json::to_string(&parsed)
|
||||
.expect("conversion from json value to text shouldn't fail"),
|
||||
);
|
||||
Self { parsed, serialized }
|
||||
}
|
||||
pub fn as_json_value(&self) -> &serde_json::Value {
|
||||
&self.parsed
|
||||
}
|
||||
pub fn as_str(&self) -> Interned<str> {
|
||||
self.serialized
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for PhantomConstCanonicalValue {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.serialized)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PhantomConstCanonicalValue {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.serialized)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for PhantomConstCanonicalValue {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.serialized == other.serialized
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for PhantomConstCanonicalValue {}
|
||||
|
||||
impl Hash for PhantomConstCanonicalValue {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.serialized.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for PhantomConstCanonicalValue {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
self.parsed.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for PhantomConstCanonicalValue {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
Ok(Self::from_json_value(serde_json::Value::deserialize(
|
||||
deserializer,
|
||||
)?))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait PhantomConstValue: Intern + InternedCompare + Serialize + fmt::Debug {
|
||||
fn deserialize_value<'de, D>(deserializer: D) -> Result<Interned<Self>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>;
|
||||
}
|
||||
|
||||
impl<T> PhantomConstValue for T
|
||||
where
|
||||
T: ?Sized + Intern + InternedCompare + Serialize + fmt::Debug,
|
||||
Interned<T>: DeserializeOwned,
|
||||
{
|
||||
fn deserialize_value<'de, D>(deserializer: D) -> Result<Interned<Self>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
<Interned<T> as Deserialize<'de>>::deserialize(deserializer)
|
||||
}
|
||||
}
|
||||
|
||||
/// Wrapper type that allows any Rust value to be smuggled as a HDL [`Type`].
|
||||
/// This only works for values that can be [serialized][Serialize] to and [deserialized][Deserialize] from [JSON][serde_json].
|
||||
pub struct PhantomConst<T: ?Sized + PhantomConstValue = PhantomConstCanonicalValue> {
|
||||
value: LazyInterned<T>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)]
|
||||
pub struct PhantomConstWithoutGenerics;
|
||||
|
||||
#[allow(non_upper_case_globals)]
|
||||
pub const PhantomConst: PhantomConstWithoutGenerics = PhantomConstWithoutGenerics;
|
||||
|
||||
impl<T: Type + PhantomConstValue> Index<T> for PhantomConstWithoutGenerics {
|
||||
type Output = PhantomConst<T>;
|
||||
|
||||
fn index(&self, value: T) -> &Self::Output {
|
||||
Interned::into_inner(PhantomConst::new(value.intern()).intern_sized())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> fmt::Debug for PhantomConst<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_tuple("PhantomConst").field(&self.get()).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Clone for PhantomConst<T> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Copy for PhantomConst<T> {}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> PartialEq for PhantomConst<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.get() == other.get()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Eq for PhantomConst<T> {}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Hash for PhantomConst<T> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.get().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
struct PhantomConstCanonicalMemoize<T: ?Sized, const IS_FROM_CANONICAL: bool>(PhantomData<T>);
|
||||
|
||||
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> Copy
|
||||
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
|
||||
{
|
||||
}
|
||||
|
||||
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> Clone
|
||||
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
|
||||
{
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> Eq
|
||||
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
|
||||
{
|
||||
}
|
||||
|
||||
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> PartialEq
|
||||
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
|
||||
{
|
||||
fn eq(&self, _other: &Self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> Hash
|
||||
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
|
||||
{
|
||||
fn hash<H: Hasher>(&self, _state: &mut H) {}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Memoize for PhantomConstCanonicalMemoize<T, false> {
|
||||
type Input = Interned<T>;
|
||||
type InputOwned = Interned<T>;
|
||||
type Output = Interned<PhantomConstCanonicalValue>;
|
||||
|
||||
fn inner(self, input: &Self::Input) -> Self::Output {
|
||||
Intern::intern_sized(PhantomConstCanonicalValue::from_json_value(
|
||||
serde_json::to_value(input)
|
||||
.expect("serialization failed when constructing a canonical PhantomConst"),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Memoize for PhantomConstCanonicalMemoize<T, true> {
|
||||
type Input = Interned<PhantomConstCanonicalValue>;
|
||||
type InputOwned = Interned<PhantomConstCanonicalValue>;
|
||||
type Output = Interned<T>;
|
||||
|
||||
fn inner(self, input: &Self::Input) -> Self::Output {
|
||||
PhantomConstValue::deserialize_value(input.as_json_value())
|
||||
.expect("deserialization failed ")
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> PhantomConst<T> {
|
||||
pub fn new(value: Interned<T>) -> Self {
|
||||
Self {
|
||||
value: LazyInterned::Interned(value),
|
||||
}
|
||||
}
|
||||
pub const fn new_lazy(v: &'static dyn LazyInternedTrait<T>) -> Self {
|
||||
Self {
|
||||
value: LazyInterned::new_lazy(v),
|
||||
}
|
||||
}
|
||||
pub fn get(self) -> Interned<T> {
|
||||
self.value.interned()
|
||||
}
|
||||
pub fn type_properties(self) -> TypeProperties {
|
||||
<()>::TYPE_PROPERTIES
|
||||
}
|
||||
pub fn can_connect(self, other: Self) -> bool {
|
||||
self == other
|
||||
}
|
||||
pub fn canonical_phantom_const(self) -> PhantomConst {
|
||||
if let Some(&retval) = <dyn Any>::downcast_ref::<PhantomConst>(&self) {
|
||||
return retval;
|
||||
}
|
||||
<PhantomConst>::new(
|
||||
PhantomConstCanonicalMemoize::<T, false>(PhantomData).get_owned(self.get()),
|
||||
)
|
||||
}
|
||||
pub fn from_canonical_phantom_const(canonical_type: PhantomConst) -> Self {
|
||||
if let Some(&retval) = <dyn Any>::downcast_ref::<Self>(&canonical_type) {
|
||||
return retval;
|
||||
}
|
||||
Self::new(
|
||||
PhantomConstCanonicalMemoize::<T, true>(PhantomData).get_owned(canonical_type.get()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Type for PhantomConst<T> {
|
||||
type BaseType = PhantomConst;
|
||||
type MaskType = ();
|
||||
type SimValue = PhantomConst<T>;
|
||||
impl_match_variant_as_self!();
|
||||
|
||||
fn mask_type(&self) -> Self::MaskType {
|
||||
()
|
||||
}
|
||||
|
||||
fn canonical(&self) -> CanonicalType {
|
||||
CanonicalType::PhantomConst(self.canonical_phantom_const())
|
||||
}
|
||||
|
||||
fn from_canonical(canonical_type: CanonicalType) -> Self {
|
||||
let CanonicalType::PhantomConst(phantom_const) = canonical_type else {
|
||||
panic!("expected PhantomConst");
|
||||
};
|
||||
Self::from_canonical_phantom_const(phantom_const)
|
||||
}
|
||||
|
||||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert!(opaque.is_empty());
|
||||
*self
|
||||
}
|
||||
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert!(opaque.is_empty());
|
||||
assert_eq!(*value, *self);
|
||||
}
|
||||
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
assert_eq!(*value, *self);
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::empty())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Default for PhantomConst<T>
|
||||
where
|
||||
Interned<T>: Default,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self::TYPE
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> StaticType for PhantomConst<T>
|
||||
where
|
||||
Interned<T>: Default,
|
||||
{
|
||||
const TYPE: Self = PhantomConst {
|
||||
value: LazyInterned::new_lazy(&Interned::<T>::default),
|
||||
};
|
||||
const MASK_TYPE: Self::MaskType = ();
|
||||
const TYPE_PROPERTIES: TypeProperties = <()>::TYPE_PROPERTIES;
|
||||
const MASK_TYPE_PROPERTIES: TypeProperties = <()>::TYPE_PROPERTIES;
|
||||
}
|
||||
|
||||
type SerdeType<T> = SerdeCanonicalType<CanonicalType, SerdePhantomConst<Interned<T>>>;
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Serialize for PhantomConst<T> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
SerdeType::<T>::PhantomConst(SerdePhantomConst(self.get())).serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, T: ?Sized + PhantomConstValue> Deserialize<'de> for PhantomConst<T> {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
match SerdeType::<T>::deserialize(deserializer)? {
|
||||
SerdeCanonicalType::PhantomConst(SerdePhantomConst(value)) => Ok(Self::new(value)),
|
||||
ty => Err(Error::invalid_value(
|
||||
serde::de::Unexpected::Other(ty.as_serde_unexpected_str()),
|
||||
&"a PhantomConst",
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ExprPartialEq<Self> for PhantomConst<T> {
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
|
||||
true.to_expr()
|
||||
}
|
||||
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
|
||||
false.to_expr()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ExprPartialOrd<Self> for PhantomConst<T> {
|
||||
fn cmp_lt(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
|
||||
false.to_expr()
|
||||
}
|
||||
|
||||
fn cmp_le(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
|
||||
true.to_expr()
|
||||
}
|
||||
|
||||
fn cmp_gt(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
|
||||
false.to_expr()
|
||||
}
|
||||
|
||||
fn cmp_ge(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
|
||||
true.to_expr()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> SimValuePartialEq<Self> for PhantomConst<T> {
|
||||
fn sim_value_eq(this: &SimValue<Self>, other: &SimValue<Self>) -> bool {
|
||||
assert_eq!(SimValue::ty(this), SimValue::ty(other));
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ToSimValue for PhantomConst<T> {
|
||||
type Type = PhantomConst<T>;
|
||||
|
||||
fn to_sim_value(&self) -> SimValue<Self::Type> {
|
||||
SimValue::from_value(*self, *self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ToSimValueWithType<PhantomConst<T>> for PhantomConst<T> {
|
||||
fn to_sim_value_with_type(&self, ty: PhantomConst<T>) -> SimValue<PhantomConst<T>> {
|
||||
SimValue::from_value(ty, *self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ToSimValueWithType<CanonicalType> for PhantomConst<T> {
|
||||
fn to_sim_value_with_type(&self, ty: CanonicalType) -> SimValue<CanonicalType> {
|
||||
SimValue::into_canonical(SimValue::from_value(Self::from_canonical(ty), *self))
|
||||
}
|
||||
}
|
||||
|
||||
mod sealed {
|
||||
pub trait Sealed<T: ?Sized> {}
|
||||
}
|
||||
|
||||
pub trait PhantomConstGet<T: ?Sized + PhantomConstValue>: sealed::Sealed<T> {
|
||||
fn get(&self) -> Interned<T>;
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue, This: ?Sized + std::ops::Deref<Target: PhantomConstGet<T>>>
|
||||
sealed::Sealed<T> for This
|
||||
{
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue, This: ?Sized + std::ops::Deref<Target: PhantomConstGet<T>>>
|
||||
PhantomConstGet<T> for This
|
||||
{
|
||||
fn get(&self) -> Interned<T> {
|
||||
This::Target::get(&**self)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_phantom_const_get {
|
||||
(
|
||||
impl PhantomConstGet<$T:ident> for $ty:ty {
|
||||
fn $get:ident(&$get_self:ident) -> _ $get_body:block
|
||||
}
|
||||
) => {
|
||||
impl<$T: ?Sized + PhantomConstValue> sealed::Sealed<$T> for $ty {}
|
||||
|
||||
impl<$T: ?Sized + PhantomConstValue> PhantomConstGet<$T> for $ty {
|
||||
fn $get(&$get_self) -> Interned<$T> $get_body
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_phantom_const_get! {
|
||||
impl PhantomConstGet<T> for PhantomConst<T> {
|
||||
fn get(&self) -> _ {
|
||||
PhantomConst::get(*self)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl_phantom_const_get! {
|
||||
impl PhantomConstGet<T> for Expr<PhantomConst<T>> {
|
||||
fn get(&self) -> _ {
|
||||
PhantomConst::get(Expr::ty(*self))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub trait ReturnSelfUnchanged<T: ?Sized> {
|
||||
type Type: ?Sized;
|
||||
}
|
||||
|
||||
impl<This: ?Sized, T: ?Sized> ReturnSelfUnchanged<T> for This {
|
||||
type Type = This;
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn type_alias_phantom_const_get_helper<T: ?Sized + PhantomConstValue, R: Intern + Clone>(
|
||||
param: impl PhantomConstGet<T>,
|
||||
get: impl FnOnce(Interned<T>) -> R,
|
||||
) -> &'static R {
|
||||
Interned::into_inner(get(param.get()).intern_sized())
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,62 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{intern::Intern, prelude::*};
|
||||
use ordered_float::NotNan;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub struct ClockInputProperties {
|
||||
pub frequency: NotNan<f64>,
|
||||
}
|
||||
|
||||
#[hdl(no_runtime_generics, no_static)]
|
||||
pub struct ClockInput {
|
||||
pub clk: Clock,
|
||||
pub properties: PhantomConst<ClockInputProperties>,
|
||||
}
|
||||
|
||||
impl ClockInput {
|
||||
#[track_caller]
|
||||
pub fn new(frequency: f64) -> Self {
|
||||
assert!(
|
||||
frequency > 0.0 && frequency.is_finite(),
|
||||
"invalid clock frequency: {frequency}"
|
||||
);
|
||||
Self {
|
||||
clk: Clock,
|
||||
properties: PhantomConst::new(
|
||||
ClockInputProperties {
|
||||
frequency: NotNan::new(frequency).expect("just checked"),
|
||||
}
|
||||
.intern_sized(),
|
||||
),
|
||||
}
|
||||
}
|
||||
pub fn frequency(self) -> f64 {
|
||||
self.properties.get().frequency.into_inner()
|
||||
}
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
pub struct Led {
|
||||
pub on: Bool,
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
pub struct RgbLed {
|
||||
pub r: Bool,
|
||||
pub g: Bool,
|
||||
pub b: Bool,
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
/// UART, used as an output from the FPGA
|
||||
pub struct Uart {
|
||||
/// transmit from the FPGA's perspective
|
||||
pub tx: Bool,
|
||||
/// receive from the FPGA's perspective
|
||||
#[hdl(flip)]
|
||||
pub rx: Bool,
|
||||
}
|
||||
|
|
@ -1,45 +1,22 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
pub use crate::{
|
||||
__,
|
||||
annotations::{
|
||||
BlackBoxInlineAnnotation, BlackBoxPathAnnotation, CustomFirrtlAnnotation,
|
||||
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation,
|
||||
},
|
||||
annotations::Annotation,
|
||||
array::{Array, ArrayType},
|
||||
build::{BuildCli, JobParams, RunBuild},
|
||||
bundle::Bundle,
|
||||
cli::Cli,
|
||||
clock::{Clock, ClockDomain, ToClock},
|
||||
enum_::{Enum, HdlNone, HdlOption, HdlSome},
|
||||
expr::{
|
||||
CastBitsTo, CastTo, CastToBits, Expr, HdlPartialEq, HdlPartialOrd, MakeUninitExpr,
|
||||
ReduceBits, ToExpr, repeat,
|
||||
},
|
||||
formal::{
|
||||
MakeFormalExpr, all_const, all_seq, any_const, any_seq, formal_global_clock, formal_reset,
|
||||
hdl_assert, hdl_assert_with_enable, hdl_assume, hdl_assume_with_enable, hdl_cover,
|
||||
hdl_cover_with_enable,
|
||||
},
|
||||
enum_::{HdlNone, HdlOption, HdlSome},
|
||||
expr::{CastBitsTo, CastTo, CastToBits, Expr, ReduceBits, ToExpr},
|
||||
hdl, hdl_module,
|
||||
int::{Bool, DynSize, KnownSize, SInt, SIntType, SIntValue, Size, UInt, UIntType, UIntValue},
|
||||
int::{Bool, DynSize, IntCmp, KnownSize, SInt, SIntType, Size, UInt, UIntType},
|
||||
memory::{Mem, MemBuilder, ReadUnderWrite},
|
||||
module::{
|
||||
Instance, Module, ModuleBuilder, annotate, connect, connect_any, incomplete_wire, instance,
|
||||
memory, memory_array, memory_with_init, reg_builder, wire,
|
||||
annotate, connect, connect_any, instance, memory, memory_array, memory_with_init,
|
||||
reg_builder, wire, Instance, Module, ModuleBuilder,
|
||||
},
|
||||
phantom_const::{PhantomConst, PhantomConstGet},
|
||||
platform::{DynPlatform, Platform, PlatformIOBuilder, peripherals},
|
||||
reg::Reg,
|
||||
reset::{AsyncReset, Reset, SyncReset, ToAsyncReset, ToReset, ToSyncReset},
|
||||
sim::{
|
||||
ExternModuleSimulationState, Simulation,
|
||||
time::{SimDuration, SimInstant},
|
||||
value::{SimOnly, SimOnlyValue, SimValue, ToSimValue, ToSimValueWithType},
|
||||
},
|
||||
source_location::SourceLocation,
|
||||
testing::{FormalMode, assert_formal},
|
||||
ty::{AsMask, CanonicalType, Type},
|
||||
util::{ConstUsize, GenericConstUsize},
|
||||
wire::Wire,
|
||||
__,
|
||||
};
|
||||
pub use bitvec::{slice::BitSlice, vec::BitVec};
|
||||
|
|
|
|||
|
|
@ -5,22 +5,21 @@ use crate::{
|
|||
expr::{Expr, Flow},
|
||||
intern::Interned,
|
||||
module::{NameId, ScopedNameId},
|
||||
reset::{Reset, ResetType},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
};
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct Reg<T: Type, R: ResetType = Reset> {
|
||||
pub struct Reg<T: Type> {
|
||||
name: ScopedNameId,
|
||||
source_location: SourceLocation,
|
||||
ty: T,
|
||||
clock_domain: Expr<ClockDomain<R>>,
|
||||
clock_domain: Expr<ClockDomain>,
|
||||
init: Option<Expr<T>>,
|
||||
}
|
||||
|
||||
impl<T: Type + fmt::Debug, R: ResetType> fmt::Debug for Reg<T, R> {
|
||||
impl<T: Type + fmt::Debug> fmt::Debug for Reg<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self {
|
||||
name,
|
||||
|
|
@ -38,8 +37,8 @@ impl<T: Type + fmt::Debug, R: ResetType> fmt::Debug for Reg<T, R> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: Type, R: ResetType> Reg<T, R> {
|
||||
pub fn canonical(&self) -> Reg<CanonicalType, R> {
|
||||
impl<T: Type> Reg<T> {
|
||||
pub fn canonical(&self) -> Reg<CanonicalType> {
|
||||
let Self {
|
||||
name,
|
||||
source_location,
|
||||
|
|
@ -60,7 +59,7 @@ impl<T: Type, R: ResetType> Reg<T, R> {
|
|||
scoped_name: ScopedNameId,
|
||||
source_location: SourceLocation,
|
||||
ty: T,
|
||||
clock_domain: Expr<ClockDomain<R>>,
|
||||
clock_domain: Expr<ClockDomain>,
|
||||
init: Option<Expr<T>>,
|
||||
) -> Self {
|
||||
assert!(
|
||||
|
|
@ -99,7 +98,7 @@ impl<T: Type, R: ResetType> Reg<T, R> {
|
|||
pub fn scoped_name(&self) -> ScopedNameId {
|
||||
self.name
|
||||
}
|
||||
pub fn clock_domain(&self) -> Expr<ClockDomain<R>> {
|
||||
pub fn clock_domain(&self) -> Expr<ClockDomain> {
|
||||
self.clock_domain
|
||||
}
|
||||
pub fn init(&self) -> Option<Expr<T>> {
|
||||
|
|
|
|||
|
|
@ -1,55 +1,26 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
clock::Clock,
|
||||
expr::{Expr, ToExpr, ops},
|
||||
int::{Bool, SInt, UInt},
|
||||
expr::{Expr, ToExpr},
|
||||
int::Bool,
|
||||
source_location::SourceLocation,
|
||||
ty::{
|
||||
CanonicalType, OpaqueSimValueSize, OpaqueSimValueSlice, OpaqueSimValueWriter,
|
||||
OpaqueSimValueWritten, StaticType, Type, TypeProperties, impl_match_variant_as_self,
|
||||
},
|
||||
ty::{impl_match_variant_as_self, CanonicalType, StaticType, Type, TypeProperties},
|
||||
};
|
||||
use bitvec::{bits, order::Lsb0};
|
||||
|
||||
mod sealed {
|
||||
pub trait ResetTypeSealed {}
|
||||
}
|
||||
|
||||
pub trait ResetType:
|
||||
StaticType<MaskType = Bool>
|
||||
+ sealed::ResetTypeSealed
|
||||
+ ops::ExprCastTo<Bool>
|
||||
+ ops::ExprCastTo<Reset>
|
||||
+ ops::ExprCastTo<SyncReset>
|
||||
+ ops::ExprCastTo<AsyncReset>
|
||||
+ ops::ExprCastTo<Clock>
|
||||
+ ops::ExprCastTo<UInt<1>>
|
||||
+ ops::ExprCastTo<SInt<1>>
|
||||
+ ops::ExprCastTo<UInt>
|
||||
+ ops::ExprCastTo<SInt>
|
||||
{
|
||||
fn dispatch<D: ResetTypeDispatch>(input: D::Input<Self>, dispatch: D) -> D::Output<Self>;
|
||||
}
|
||||
|
||||
pub trait ResetTypeDispatch: Sized {
|
||||
type Input<T: ResetType>;
|
||||
type Output<T: ResetType>;
|
||||
|
||||
fn reset(self, input: Self::Input<Reset>) -> Self::Output<Reset>;
|
||||
fn sync_reset(self, input: Self::Input<SyncReset>) -> Self::Output<SyncReset>;
|
||||
fn async_reset(self, input: Self::Input<AsyncReset>) -> Self::Output<AsyncReset>;
|
||||
}
|
||||
pub trait ResetType: StaticType<MaskType = Bool> + sealed::ResetTypeSealed {}
|
||||
|
||||
macro_rules! reset_type {
|
||||
($name:ident, $(#[$impl_trait:ident])? $Trait:ident::$trait_fn:ident, $is_castable_from_bits:literal, $dispatch_fn:ident) => {
|
||||
($name:ident, $Trait:ident::$trait_fn:ident, $is_castable_from_bits:literal) => {
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)]
|
||||
pub struct $name;
|
||||
|
||||
impl Type for $name {
|
||||
type BaseType = $name;
|
||||
type MaskType = Bool;
|
||||
type SimValue = bool;
|
||||
|
||||
impl_match_variant_as_self!();
|
||||
|
||||
|
|
@ -71,31 +42,6 @@ macro_rules! reset_type {
|
|||
};
|
||||
retval
|
||||
}
|
||||
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1));
|
||||
opaque.bits()[0]
|
||||
}
|
||||
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1));
|
||||
*value = opaque.bits()[0];
|
||||
}
|
||||
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
assert_eq!(writer.size(), OpaqueSimValueSize::from_bit_width(1));
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(
|
||||
[bits![0], bits![1]][*value as usize],
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl $name {
|
||||
|
|
@ -115,21 +61,13 @@ macro_rules! reset_type {
|
|||
is_storable: false,
|
||||
is_castable_from_bits: $is_castable_from_bits,
|
||||
bit_width: 1,
|
||||
sim_only_values_len: 0,
|
||||
};
|
||||
const MASK_TYPE_PROPERTIES: TypeProperties = Bool::TYPE_PROPERTIES;
|
||||
}
|
||||
|
||||
impl sealed::ResetTypeSealed for $name {}
|
||||
|
||||
impl ResetType for $name {
|
||||
fn dispatch<D: ResetTypeDispatch>(
|
||||
input: D::Input<Self>,
|
||||
dispatch: D,
|
||||
) -> D::Output<Self> {
|
||||
dispatch.$dispatch_fn(input)
|
||||
}
|
||||
}
|
||||
impl ResetType for $name {}
|
||||
|
||||
pub trait $Trait {
|
||||
fn $trait_fn(&self) -> Expr<$name>;
|
||||
|
|
@ -153,21 +91,20 @@ macro_rules! reset_type {
|
|||
}
|
||||
}
|
||||
|
||||
$($impl_trait $Trait for Expr<$name> {
|
||||
impl $Trait for Expr<$name> {
|
||||
fn $trait_fn(&self) -> Expr<$name> {
|
||||
*self
|
||||
}
|
||||
})?
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
reset_type!(AsyncReset, #[impl] ToAsyncReset::to_async_reset, true, async_reset);
|
||||
reset_type!(SyncReset, #[impl] ToSyncReset::to_sync_reset, true, sync_reset);
|
||||
reset_type!(AsyncReset, ToAsyncReset::to_async_reset, true);
|
||||
reset_type!(SyncReset, ToSyncReset::to_sync_reset, true);
|
||||
reset_type!(
|
||||
Reset,
|
||||
ToReset::to_reset,
|
||||
false, // Reset is not castable from bits because we don't know if it's async or sync
|
||||
reset
|
||||
false // Reset is not castable from bits because we don't know if it's async or sync
|
||||
);
|
||||
|
||||
impl ToSyncReset for bool {
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -1,397 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use std::{
|
||||
fmt,
|
||||
ops::{Add, AddAssign, Sub, SubAssign},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||
pub struct SimInstant {
|
||||
time_since_start: SimDuration,
|
||||
}
|
||||
|
||||
impl SimInstant {
|
||||
pub const fn checked_add(self, duration: SimDuration) -> Option<Self> {
|
||||
let Some(time_since_start) = self.time_since_start.checked_add(duration) else {
|
||||
return None;
|
||||
};
|
||||
Some(SimInstant { time_since_start })
|
||||
}
|
||||
pub const fn checked_duration_since(self, earlier: Self) -> Option<SimDuration> {
|
||||
self.time_since_start.checked_sub(earlier.time_since_start)
|
||||
}
|
||||
pub const fn checked_sub(self, duration: SimDuration) -> Option<Self> {
|
||||
let Some(time_since_start) = self.time_since_start.checked_sub(duration) else {
|
||||
return None;
|
||||
};
|
||||
Some(SimInstant { time_since_start })
|
||||
}
|
||||
#[track_caller]
|
||||
pub const fn duration_since(self, earlier: Self) -> SimDuration {
|
||||
let Some(retval) = self.checked_duration_since(earlier) else {
|
||||
panic!(
|
||||
"tried to compute the duration since a later time -- durations can't be negative"
|
||||
);
|
||||
};
|
||||
retval
|
||||
}
|
||||
pub const fn saturating_duration_since(self, earlier: Self) -> SimDuration {
|
||||
let Some(retval) = self.checked_duration_since(earlier) else {
|
||||
return SimDuration::ZERO;
|
||||
};
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
impl Add<SimDuration> for SimInstant {
|
||||
type Output = SimInstant;
|
||||
|
||||
#[track_caller]
|
||||
fn add(mut self, rhs: SimDuration) -> Self::Output {
|
||||
self += rhs;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign<SimDuration> for SimInstant {
|
||||
#[track_caller]
|
||||
fn add_assign(&mut self, rhs: SimDuration) {
|
||||
self.time_since_start += rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl Add<SimInstant> for SimDuration {
|
||||
type Output = SimInstant;
|
||||
|
||||
#[track_caller]
|
||||
fn add(self, rhs: SimInstant) -> Self::Output {
|
||||
rhs.add(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for SimInstant {
|
||||
type Output = SimDuration;
|
||||
|
||||
#[track_caller]
|
||||
fn sub(self, rhs: SimInstant) -> Self::Output {
|
||||
self.duration_since(rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub<SimDuration> for SimInstant {
|
||||
type Output = SimInstant;
|
||||
|
||||
#[track_caller]
|
||||
fn sub(self, rhs: SimDuration) -> Self::Output {
|
||||
let Some(retval) = self.checked_sub(rhs) else {
|
||||
panic!("SimInstant underflow");
|
||||
};
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
impl SubAssign<SimDuration> for SimInstant {
|
||||
#[track_caller]
|
||||
fn sub_assign(&mut self, rhs: SimDuration) {
|
||||
*self = *self - rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl SimInstant {
|
||||
pub const START: SimInstant = SimInstant {
|
||||
time_since_start: SimDuration::ZERO,
|
||||
};
|
||||
}
|
||||
|
||||
impl fmt::Debug for SimInstant {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.time_since_start.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||
pub struct SimDuration {
|
||||
attos: u128,
|
||||
}
|
||||
|
||||
impl AddAssign for SimDuration {
|
||||
#[track_caller]
|
||||
fn add_assign(&mut self, rhs: SimDuration) {
|
||||
*self = *self + rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl Add for SimDuration {
|
||||
type Output = SimDuration;
|
||||
|
||||
#[track_caller]
|
||||
fn add(self, rhs: SimDuration) -> Self::Output {
|
||||
SimDuration {
|
||||
attos: self
|
||||
.attos
|
||||
.checked_add(rhs.attos)
|
||||
.expect("overflow adding durations"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for SimDuration {
|
||||
type Output = Self;
|
||||
|
||||
#[track_caller]
|
||||
fn sub(self, rhs: Self) -> Self::Output {
|
||||
SimDuration {
|
||||
attos: self
|
||||
.attos
|
||||
.checked_add(rhs.attos)
|
||||
.expect("underflow subtracting durations -- durations can't be negative"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SubAssign for SimDuration {
|
||||
#[track_caller]
|
||||
fn sub_assign(&mut self, rhs: Self) {
|
||||
*self = *self - rhs;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
|
||||
pub struct SimDurationParts {
|
||||
pub attos: u16,
|
||||
pub femtos: u16,
|
||||
pub picos: u16,
|
||||
pub nanos: u16,
|
||||
pub micros: u16,
|
||||
pub millis: u16,
|
||||
pub secs: u128,
|
||||
}
|
||||
|
||||
macro_rules! impl_duration_units {
|
||||
(
|
||||
$(
|
||||
#[unit_const = $UNIT:ident, from_units = $from_units:ident, as_units = $as_units:ident, units = $units:ident, suffix = $suffix:literal]
|
||||
const $log10_units_per_sec:ident: u32 = $log10_units_per_sec_value:expr;
|
||||
)*
|
||||
) => {
|
||||
impl SimDuration {
|
||||
$(
|
||||
const $log10_units_per_sec: u32 = $log10_units_per_sec_value;
|
||||
pub const fn $from_units($units: u128) -> Self {
|
||||
Self::from_units_helper::<{ Self::$log10_units_per_sec }>($units)
|
||||
}
|
||||
pub const fn $as_units(self) -> u128 {
|
||||
self.attos / const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) }
|
||||
}
|
||||
)*
|
||||
pub const fn to_parts(mut self) -> SimDurationParts {
|
||||
$(
|
||||
let $units = self.attos / const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) };
|
||||
self.attos %= const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) };
|
||||
)*
|
||||
SimDurationParts {
|
||||
$($units: $units as _,)*
|
||||
}
|
||||
}
|
||||
pub const fn from_parts_checked(parts: SimDurationParts) -> Option<Self> {
|
||||
let attos = 0u128;
|
||||
$(
|
||||
let Some(product) = const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) }.checked_mul(parts.$units as u128) else {
|
||||
return None;
|
||||
};
|
||||
let Some(attos) = attos.checked_add(product) else {
|
||||
return None;
|
||||
};
|
||||
)*
|
||||
Some(Self {
|
||||
attos,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for SimDuration {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let ilog10_attos = match self.attos.checked_ilog10() {
|
||||
Some(v) => v,
|
||||
None => Self::LOG10_ATTOS_PER_SEC,
|
||||
};
|
||||
let (suffix, int, fraction, fraction_digits) =
|
||||
match Self::LOG10_ATTOS_PER_SEC.saturating_sub(ilog10_attos) {
|
||||
$(
|
||||
..=Self::$log10_units_per_sec => {
|
||||
let divisor = const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) };
|
||||
(
|
||||
$suffix,
|
||||
self.attos / divisor,
|
||||
self.attos % divisor,
|
||||
(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) as usize,
|
||||
)
|
||||
},
|
||||
)*
|
||||
_ => unreachable!(),
|
||||
};
|
||||
write!(f, "{int}")?;
|
||||
if fraction != 0 {
|
||||
write!(f, ".{fraction:0fraction_digits$}")?;
|
||||
}
|
||||
write!(f, " {suffix}")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[test]
|
||||
fn test_duration_debug() {
|
||||
$(
|
||||
assert_eq!(
|
||||
format!("{:?}", SimDuration::$from_units(123)),
|
||||
concat!("123 ", $suffix)
|
||||
);
|
||||
assert_eq!(
|
||||
format!("{:?}", SimDuration::$from_units(1)),
|
||||
concat!("1 ", $suffix),
|
||||
);
|
||||
let mut v = SimDuration::$from_units(1);
|
||||
if v.attos < 1 << 53 {
|
||||
v.attos += 1;
|
||||
assert_eq!(
|
||||
format!("{v:?}"),
|
||||
format!("{} {}", v.attos as f64 / 10.0f64.powf((SimDuration::LOG10_ATTOS_PER_SEC - SimDuration::$log10_units_per_sec) as f64), $suffix),
|
||||
"1 {} + 1 as == {} as", $suffix, v.attos,
|
||||
);
|
||||
}
|
||||
)*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_duration_units! {
|
||||
#[unit_const = SECOND, from_units = from_secs, as_units = as_secs, units = secs, suffix = "s"]
|
||||
const LOG10_SECS_PER_SEC: u32 = 0;
|
||||
#[unit_const = MILLISECOND, from_units = from_millis, as_units = as_millis, units = millis, suffix = "ms"]
|
||||
const LOG10_MILLIS_PER_SEC: u32 = 3;
|
||||
#[unit_const = MICROSECOND, from_units = from_micros, as_units = as_micros, units = micros, suffix = "μs"]
|
||||
const LOG10_MICROS_PER_SEC: u32 = 6;
|
||||
#[unit_const = NANOSECOND, from_units = from_nanos, as_units = as_nanos, units = nanos, suffix = "ns"]
|
||||
const LOG10_NANOS_PER_SEC: u32 = 9;
|
||||
#[unit_const = PICOSECOND, from_units = from_picos, as_units = as_picos, units = picos, suffix = "ps"]
|
||||
const LOG10_PICOS_PER_SEC: u32 = 12;
|
||||
#[unit_const = FEMTOSECOND, from_units = from_femtos, as_units = as_femtos, units = femtos, suffix = "fs"]
|
||||
const LOG10_FEMTOS_PER_SEC: u32 = 15;
|
||||
#[unit_const = ATTOSECOND, from_units = from_attos, as_units = as_attos, units = attos, suffix = "as"]
|
||||
const LOG10_ATTOS_PER_SEC: u32 = 18;
|
||||
}
|
||||
|
||||
impl SimDuration {
|
||||
const fn from_units_helper<const UNITS_PER_SEC: u32>(units: u128) -> Self {
|
||||
let Some(attos) =
|
||||
units.checked_mul(const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - UNITS_PER_SEC) })
|
||||
else {
|
||||
panic!("duration too big");
|
||||
};
|
||||
Self { attos }
|
||||
}
|
||||
pub const ZERO: SimDuration = SimDuration::from_secs(0);
|
||||
pub const fn from_parts(parts: SimDurationParts) -> Self {
|
||||
match Self::from_parts_checked(parts) {
|
||||
Some(v) => v,
|
||||
None => panic!("duration too big"),
|
||||
}
|
||||
}
|
||||
pub const fn abs_diff(self, other: Self) -> Self {
|
||||
Self {
|
||||
attos: self.attos.abs_diff(other.attos),
|
||||
}
|
||||
}
|
||||
pub const fn checked_add(self, rhs: Self) -> Option<Self> {
|
||||
let Some(attos) = self.attos.checked_add(rhs.attos) else {
|
||||
return None;
|
||||
};
|
||||
Some(Self { attos })
|
||||
}
|
||||
pub const fn checked_sub(self, rhs: Self) -> Option<Self> {
|
||||
let Some(attos) = self.attos.checked_sub(rhs.attos) else {
|
||||
return None;
|
||||
};
|
||||
Some(Self { attos })
|
||||
}
|
||||
pub const fn is_zero(self) -> bool {
|
||||
self.attos == 0
|
||||
}
|
||||
pub const fn saturating_add(self, rhs: Self) -> Self {
|
||||
Self {
|
||||
attos: self.attos.saturating_add(rhs.attos),
|
||||
}
|
||||
}
|
||||
pub const fn saturating_sub(self, rhs: Self) -> Self {
|
||||
Self {
|
||||
attos: self.attos.saturating_sub(rhs.attos),
|
||||
}
|
||||
}
|
||||
pub const fn checked_ilog10(self) -> Option<i32> {
|
||||
let Some(ilog10_attos) = self.attos.checked_ilog10() else {
|
||||
return None;
|
||||
};
|
||||
Some(ilog10_attos as i32 - Self::LOG10_ATTOS_PER_SEC as i32)
|
||||
}
|
||||
#[track_caller]
|
||||
pub const fn ilog10(self) -> i32 {
|
||||
let Some(retval) = self.checked_ilog10() else {
|
||||
panic!("tried to take the ilog10 of 0");
|
||||
};
|
||||
retval
|
||||
}
|
||||
pub const fn checked_pow10(log10: i32, underflow_is_zero: bool) -> Option<Self> {
|
||||
let Some(log10) = Self::LOG10_ATTOS_PER_SEC.checked_add_signed(log10) else {
|
||||
return if log10 < 0 && underflow_is_zero {
|
||||
Some(Self::ZERO)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
};
|
||||
let Some(attos) = 10u128.checked_pow(log10) else {
|
||||
return None;
|
||||
};
|
||||
Some(Self { attos })
|
||||
}
|
||||
#[track_caller]
|
||||
pub const fn pow10(log10: i32) -> Self {
|
||||
let Some(retval) = Self::checked_pow10(log10, true) else {
|
||||
panic!("pow10 overflowed");
|
||||
};
|
||||
retval
|
||||
}
|
||||
pub const fn is_power_of_ten(self) -> bool {
|
||||
const TEN: u128 = 10;
|
||||
const NUMBER_OF_POWERS_OF_TEN: usize = {
|
||||
let mut n = 0;
|
||||
while let Some(_) = TEN.checked_pow(n as u32) {
|
||||
n += 1;
|
||||
}
|
||||
n
|
||||
};
|
||||
const POWERS_OF_TEN: [u128; NUMBER_OF_POWERS_OF_TEN] = {
|
||||
let mut retval = [0; NUMBER_OF_POWERS_OF_TEN];
|
||||
let mut i = 0;
|
||||
while i < NUMBER_OF_POWERS_OF_TEN {
|
||||
retval[i] = TEN.pow(i as u32);
|
||||
i += 1;
|
||||
}
|
||||
retval
|
||||
};
|
||||
let mut i = 0;
|
||||
while i < NUMBER_OF_POWERS_OF_TEN {
|
||||
if self.attos == POWERS_OF_TEN[i] {
|
||||
return true;
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Duration> for SimDuration {
|
||||
fn from(duration: Duration) -> Self {
|
||||
Self::from_nanos(duration.as_nanos())
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,304 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
//! `unsafe` parts of [`DynSimOnlyValue`]
|
||||
|
||||
use serde::{Serialize, de::DeserializeOwned};
|
||||
use std::{
|
||||
any::{self, TypeId},
|
||||
fmt,
|
||||
hash::{Hash, Hasher},
|
||||
marker::PhantomData,
|
||||
mem::ManuallyDrop,
|
||||
rc::Rc,
|
||||
};
|
||||
|
||||
pub trait SimOnlyValueTrait:
|
||||
'static + Eq + Hash + fmt::Debug + Serialize + DeserializeOwned + Clone + Default
|
||||
{
|
||||
}
|
||||
|
||||
impl<T: 'static + Eq + Hash + fmt::Debug + Serialize + DeserializeOwned + Clone + Default>
|
||||
SimOnlyValueTrait for T
|
||||
{
|
||||
}
|
||||
|
||||
/// Safety: `type_id_dyn` must return `TypeId::of::<T>()` where `Self = SimOnly<T>`
|
||||
unsafe trait DynSimOnlyTrait: 'static + Send + Sync {
|
||||
fn type_id_dyn(&self) -> TypeId;
|
||||
fn type_name(&self) -> &'static str;
|
||||
fn default_value(&self) -> Rc<dyn DynSimOnlyValueTrait>;
|
||||
fn deserialize_from_json_string(
|
||||
&self,
|
||||
json_str: &str,
|
||||
) -> serde_json::Result<Rc<dyn DynSimOnlyValueTrait>>;
|
||||
}
|
||||
|
||||
/// Safety: `type_id_dyn` is implemented correctly
|
||||
unsafe impl<T: SimOnlyValueTrait> DynSimOnlyTrait for SimOnly<T> {
|
||||
fn type_id_dyn(&self) -> TypeId {
|
||||
TypeId::of::<T>()
|
||||
}
|
||||
|
||||
fn type_name(&self) -> &'static str {
|
||||
any::type_name::<T>()
|
||||
}
|
||||
|
||||
fn default_value(&self) -> Rc<dyn DynSimOnlyValueTrait> {
|
||||
Rc::new(T::default())
|
||||
}
|
||||
|
||||
fn deserialize_from_json_string(
|
||||
&self,
|
||||
json_str: &str,
|
||||
) -> serde_json::Result<Rc<dyn DynSimOnlyValueTrait>> {
|
||||
Ok(Rc::<T>::new(serde_json::from_str(json_str)?))
|
||||
}
|
||||
}
|
||||
|
||||
/// Safety:
|
||||
/// * `type_id_dyn()` must return `TypeId::of::<Self>()`.
|
||||
/// * `ty().type_id()` must return `TypeId::of::<Self>()`.
|
||||
unsafe trait DynSimOnlyValueTrait: 'static + fmt::Debug {
|
||||
fn type_id_dyn(&self) -> TypeId;
|
||||
fn ty(&self) -> DynSimOnly;
|
||||
fn eq_dyn(&self, other: &dyn DynSimOnlyValueTrait) -> bool;
|
||||
fn serialize_to_json_string(&self) -> serde_json::Result<String>;
|
||||
fn hash_dyn(&self, state: &mut dyn Hasher);
|
||||
}
|
||||
|
||||
impl dyn DynSimOnlyValueTrait {
|
||||
fn is<T: SimOnlyValueTrait>(&self) -> bool {
|
||||
Self::type_id_dyn(self) == TypeId::of::<T>()
|
||||
}
|
||||
|
||||
fn downcast_ref<T: SimOnlyValueTrait>(&self) -> Option<&T> {
|
||||
if Self::is::<T>(self) {
|
||||
// Safety: checked that `Self` is really `T`
|
||||
Some(unsafe { &*(self as *const Self as *const T) })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn downcast_rc<T: SimOnlyValueTrait>(self: Rc<Self>) -> Result<Rc<T>, Rc<Self>> {
|
||||
if Self::is::<T>(&*self) {
|
||||
// Safety: checked that `Self` is really `T`
|
||||
Ok(unsafe { Rc::from_raw(Rc::into_raw(self) as *const T) })
|
||||
} else {
|
||||
Err(self)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Safety:
|
||||
/// * `type_id_dyn()` returns `TypeId::of::<Self>()`.
|
||||
/// * `ty().type_id()` returns `TypeId::of::<Self>()`.
|
||||
unsafe impl<T: SimOnlyValueTrait> DynSimOnlyValueTrait for T {
|
||||
fn type_id_dyn(&self) -> TypeId {
|
||||
TypeId::of::<T>()
|
||||
}
|
||||
|
||||
fn ty(&self) -> DynSimOnly {
|
||||
DynSimOnly::of::<T>()
|
||||
}
|
||||
|
||||
fn eq_dyn(&self, other: &dyn DynSimOnlyValueTrait) -> bool {
|
||||
other.downcast_ref::<T>().is_some_and(|other| self == other)
|
||||
}
|
||||
|
||||
fn serialize_to_json_string(&self) -> serde_json::Result<String> {
|
||||
serde_json::to_string(self)
|
||||
}
|
||||
|
||||
fn hash_dyn(&self, mut state: &mut dyn Hasher) {
|
||||
self.hash(&mut state);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct DynSimOnly {
|
||||
ty: &'static dyn DynSimOnlyTrait,
|
||||
}
|
||||
|
||||
impl DynSimOnly {
|
||||
pub const fn of<T: SimOnlyValueTrait>() -> Self {
|
||||
Self {
|
||||
ty: &const { SimOnly::<T>::new() },
|
||||
}
|
||||
}
|
||||
pub fn type_id(self) -> TypeId {
|
||||
self.ty.type_id_dyn()
|
||||
}
|
||||
pub fn type_name(self) -> &'static str {
|
||||
self.ty.type_name()
|
||||
}
|
||||
pub fn is<T: SimOnlyValueTrait>(self) -> bool {
|
||||
self.type_id() == TypeId::of::<T>()
|
||||
}
|
||||
pub fn downcast<T: SimOnlyValueTrait>(self) -> Option<SimOnly<T>> {
|
||||
self.is::<T>().then_some(SimOnly::default())
|
||||
}
|
||||
pub fn deserialize_from_json_string(
|
||||
self,
|
||||
json_str: &str,
|
||||
) -> serde_json::Result<DynSimOnlyValue> {
|
||||
self.ty
|
||||
.deserialize_from_json_string(json_str)
|
||||
.map(DynSimOnlyValue)
|
||||
}
|
||||
pub fn default_value(self) -> DynSimOnlyValue {
|
||||
DynSimOnlyValue(self.ty.default_value())
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for DynSimOnly {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
Self::type_id(*self) == Self::type_id(*other)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for DynSimOnly {}
|
||||
|
||||
impl Hash for DynSimOnly {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
Self::type_id(*self).hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for DynSimOnly {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "SimOnly<{}>", self.ty.type_name())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SimOnlyValueTrait> From<SimOnly<T>> for DynSimOnly {
|
||||
fn from(value: SimOnly<T>) -> Self {
|
||||
let SimOnly(PhantomData) = value;
|
||||
Self::of::<T>()
|
||||
}
|
||||
}
|
||||
|
||||
/// the [`Type`][Type] for a value that can only be used in a Fayalite simulation, it can't be converted to FIRRTL
|
||||
///
|
||||
/// [Type]: crate::ty::Type
|
||||
#[derive(Clone, Eq, PartialEq, Hash)]
|
||||
pub struct SimOnly<T: SimOnlyValueTrait>(PhantomData<fn(T) -> T>);
|
||||
|
||||
impl<T: SimOnlyValueTrait> fmt::Debug for SimOnly<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
DynSimOnly::of::<T>().fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SimOnlyValueTrait> SimOnly<T> {
|
||||
pub const fn new() -> Self {
|
||||
Self(PhantomData)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SimOnlyValueTrait> Copy for SimOnly<T> {}
|
||||
|
||||
impl<T: SimOnlyValueTrait> Default for SimOnly<T> {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
/// a value that can only be used in a Fayalite simulation, it can't be converted to FIRRTL
|
||||
#[derive(Clone, Eq, PartialEq, Hash, Default, PartialOrd, Ord)]
|
||||
pub struct SimOnlyValue<T: SimOnlyValueTrait>(Rc<T>);
|
||||
|
||||
impl<T: SimOnlyValueTrait> SimOnlyValue<T> {
|
||||
pub fn with_dyn_ref<F: FnOnce(&DynSimOnlyValue) -> R, R>(&self, f: F) -> R {
|
||||
// Safety: creating a copied `Rc<T>` is safe as long as the copy isn't dropped and isn't changed
|
||||
// to point somewhere else, `f` can't change `dyn_ref` because it's only given a shared reference.
|
||||
let dyn_ref =
|
||||
unsafe { ManuallyDrop::new(DynSimOnlyValue(Rc::<T>::from_raw(Rc::as_ptr(&self.0)))) };
|
||||
f(&dyn_ref)
|
||||
}
|
||||
pub fn from_rc(v: Rc<T>) -> Self {
|
||||
Self(v)
|
||||
}
|
||||
pub fn new(v: T) -> Self {
|
||||
Self(Rc::new(v))
|
||||
}
|
||||
pub fn into_inner(this: Self) -> Rc<T> {
|
||||
this.0
|
||||
}
|
||||
pub fn inner_mut(this: &mut Self) -> &mut Rc<T> {
|
||||
&mut this.0
|
||||
}
|
||||
pub fn inner(this: &Self) -> &Rc<T> {
|
||||
&this.0
|
||||
}
|
||||
pub fn into_dyn(this: Self) -> DynSimOnlyValue {
|
||||
DynSimOnlyValue::from(this)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SimOnlyValueTrait> std::ops::Deref for SimOnlyValue<T> {
|
||||
type Target = T;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SimOnlyValueTrait> std::ops::DerefMut for SimOnlyValue<T> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
Rc::make_mut(&mut self.0)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DynSimOnlyValue(Rc<dyn DynSimOnlyValueTrait>);
|
||||
|
||||
impl fmt::Debug for DynSimOnlyValue {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
<dyn DynSimOnlyValueTrait as fmt::Debug>::fmt(&*self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for DynSimOnlyValue {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
DynSimOnlyValueTrait::eq_dyn(&*self.0, &*other.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for DynSimOnlyValue {}
|
||||
|
||||
impl Hash for DynSimOnlyValue {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
DynSimOnlyValueTrait::hash_dyn(&*self.0, state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SimOnlyValueTrait> From<SimOnlyValue<T>> for DynSimOnlyValue {
|
||||
fn from(value: SimOnlyValue<T>) -> Self {
|
||||
Self(value.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl DynSimOnlyValue {
|
||||
pub fn ty(&self) -> DynSimOnly {
|
||||
self.0.ty()
|
||||
}
|
||||
pub fn type_id(&self) -> TypeId {
|
||||
self.0.type_id_dyn()
|
||||
}
|
||||
pub fn is<T: SimOnlyValueTrait>(&self) -> bool {
|
||||
self.0.is::<T>()
|
||||
}
|
||||
pub fn downcast<T: SimOnlyValueTrait>(self) -> Result<SimOnlyValue<T>, DynSimOnlyValue> {
|
||||
match <dyn DynSimOnlyValueTrait>::downcast_rc(self.0) {
|
||||
Ok(v) => Ok(SimOnlyValue(v)),
|
||||
Err(v) => Err(Self(v)),
|
||||
}
|
||||
}
|
||||
pub fn downcast_ref<T: SimOnlyValueTrait>(&self) -> Option<&T> {
|
||||
<dyn DynSimOnlyValueTrait>::downcast_ref(&*self.0)
|
||||
}
|
||||
pub fn serialize_to_json_string(&self) -> serde_json::Result<String> {
|
||||
self.0.serialize_to_json_string()
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -2,8 +2,9 @@
|
|||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
intern::{Intern, Interned},
|
||||
util::{DebugAsDisplay, HashMap},
|
||||
util::DebugAsDisplay,
|
||||
};
|
||||
use hashbrown::HashMap;
|
||||
use std::{cell::RefCell, fmt, num::NonZeroUsize, panic, path::Path};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
|
|
@ -96,7 +97,7 @@ impl NormalizeFilesForTestsState {
|
|||
fn new() -> Self {
|
||||
Self {
|
||||
test_position: panic::Location::caller(),
|
||||
file_pattern_matches: HashMap::default(),
|
||||
file_pattern_matches: HashMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -142,7 +143,7 @@ impl From<&'_ panic::Location<'_>> for SourceLocation {
|
|||
map.entry_ref(file)
|
||||
.or_insert_with(|| NormalizedFileForTestState {
|
||||
file_name_id: NonZeroUsize::new(len + 1).unwrap(),
|
||||
positions_map: HashMap::default(),
|
||||
positions_map: HashMap::new(),
|
||||
});
|
||||
file_str = m.generate_file_name(file_state.file_name_id);
|
||||
file = &file_str;
|
||||
|
|
|
|||
|
|
@ -1,224 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
build::{
|
||||
BaseJobArgs, BaseJobKind, GlobalParams, JobArgsAndDependencies, JobKindAndArgs, JobParams,
|
||||
NoArgs, RunBuild,
|
||||
external::{ExternalCommandArgs, ExternalCommandJobKind},
|
||||
firrtl::{FirrtlArgs, FirrtlJobKind},
|
||||
formal::{Formal, FormalAdditionalArgs, FormalArgs, WriteSbyFileJobKind},
|
||||
verilog::{UnadjustedVerilogArgs, VerilogJobArgs, VerilogJobKind},
|
||||
},
|
||||
bundle::BundleType,
|
||||
firrtl::ExportOptions,
|
||||
module::Module,
|
||||
util::HashMap,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
fmt::{self, Write},
|
||||
path::{Path, PathBuf},
|
||||
process::Command,
|
||||
sync::{Mutex, OnceLock},
|
||||
};
|
||||
|
||||
#[derive(
|
||||
clap::ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Default, Deserialize, Serialize,
|
||||
)]
|
||||
#[non_exhaustive]
|
||||
pub enum FormalMode {
|
||||
#[default]
|
||||
BMC,
|
||||
Prove,
|
||||
Live,
|
||||
Cover,
|
||||
}
|
||||
|
||||
impl FormalMode {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
FormalMode::BMC => "bmc",
|
||||
FormalMode::Prove => "prove",
|
||||
FormalMode::Live => "live",
|
||||
FormalMode::Cover => "cover",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for FormalMode {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct CargoMetadata {
|
||||
target_directory: String,
|
||||
}
|
||||
|
||||
fn get_cargo_target_dir() -> &'static Path {
|
||||
static RETVAL: OnceLock<PathBuf> = OnceLock::new();
|
||||
RETVAL.get_or_init(|| {
|
||||
let output = Command::new(
|
||||
std::env::var_os("CARGO")
|
||||
.as_deref()
|
||||
.unwrap_or("cargo".as_ref()),
|
||||
)
|
||||
.arg("metadata")
|
||||
.output()
|
||||
.expect("can't run `cargo metadata`");
|
||||
if !output.status.success() {
|
||||
panic!(
|
||||
"can't run `cargo metadata`:\n{}\nexited with status: {}",
|
||||
String::from_utf8_lossy(&output.stderr),
|
||||
output.status
|
||||
);
|
||||
}
|
||||
let CargoMetadata { target_directory } =
|
||||
serde_json::from_slice(&output.stdout).expect("can't parse output of `cargo metadata`");
|
||||
PathBuf::from(target_directory)
|
||||
})
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn get_assert_formal_target_path(test_name: &dyn std::fmt::Display) -> PathBuf {
|
||||
static DIRS: Mutex<Option<HashMap<String, u64>>> = Mutex::new(None);
|
||||
let test_name = test_name.to_string();
|
||||
// don't use line/column numbers since that constantly changes as you edit tests
|
||||
let file = std::panic::Location::caller().file();
|
||||
// simple reproducible hash
|
||||
let simple_hash = file.bytes().chain(test_name.bytes()).fold(
|
||||
((file.len() as u32) << 16).wrapping_add(test_name.len() as u32),
|
||||
|mut h, b| {
|
||||
h = h.wrapping_mul(0xaa0d184b);
|
||||
h ^= h.rotate_right(5);
|
||||
h ^= h.rotate_right(13);
|
||||
h.wrapping_add(b as u32)
|
||||
},
|
||||
);
|
||||
let mut dir = String::with_capacity(64);
|
||||
for ch in Path::new(file)
|
||||
.file_stem()
|
||||
.unwrap_or_default()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.chars()
|
||||
.chain(['-'])
|
||||
.chain(test_name.chars())
|
||||
{
|
||||
dir.push(match ch {
|
||||
ch if ch.is_alphanumeric() => ch,
|
||||
'_' | '-' | '+' | '.' | ',' | ' ' => ch,
|
||||
_ => '_',
|
||||
});
|
||||
}
|
||||
write!(dir, "-{simple_hash:08x}").unwrap();
|
||||
let index = *DIRS
|
||||
.lock()
|
||||
.unwrap()
|
||||
.get_or_insert_with(HashMap::default)
|
||||
.entry_ref(&dir)
|
||||
.and_modify(|v| *v += 1)
|
||||
.or_insert(0);
|
||||
write!(dir, "-{index}").unwrap();
|
||||
get_cargo_target_dir()
|
||||
.join("fayalite_assert_formal")
|
||||
.join(dir)
|
||||
}
|
||||
|
||||
fn make_assert_formal_args(
|
||||
test_name: &dyn std::fmt::Display,
|
||||
formal_mode: FormalMode,
|
||||
formal_depth: u64,
|
||||
solver: Option<&str>,
|
||||
export_options: ExportOptions,
|
||||
) -> eyre::Result<JobArgsAndDependencies<ExternalCommandJobKind<Formal>>> {
|
||||
let args = JobKindAndArgs {
|
||||
kind: BaseJobKind,
|
||||
args: BaseJobArgs::from_output_dir_and_env(get_assert_formal_target_path(&test_name), None),
|
||||
};
|
||||
let dependencies = JobArgsAndDependencies {
|
||||
args,
|
||||
dependencies: (),
|
||||
};
|
||||
let args = JobKindAndArgs {
|
||||
kind: FirrtlJobKind,
|
||||
args: FirrtlArgs { export_options },
|
||||
};
|
||||
let dependencies = JobArgsAndDependencies { args, dependencies };
|
||||
let args = JobKindAndArgs {
|
||||
kind: ExternalCommandJobKind::new(),
|
||||
args: ExternalCommandArgs::resolve_program_path(
|
||||
None,
|
||||
UnadjustedVerilogArgs {
|
||||
firtool_extra_args: vec![],
|
||||
verilog_dialect: None,
|
||||
verilog_debug: true,
|
||||
},
|
||||
)?,
|
||||
};
|
||||
let dependencies = JobArgsAndDependencies { args, dependencies };
|
||||
let args = JobKindAndArgs {
|
||||
kind: VerilogJobKind,
|
||||
args: VerilogJobArgs {},
|
||||
};
|
||||
let dependencies = JobArgsAndDependencies { args, dependencies };
|
||||
let args = JobKindAndArgs {
|
||||
kind: WriteSbyFileJobKind,
|
||||
args: FormalArgs {
|
||||
sby_extra_args: vec![],
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
formal_solver: solver.unwrap_or(FormalArgs::DEFAULT_SOLVER).into(),
|
||||
smtbmc_extra_args: vec![],
|
||||
},
|
||||
};
|
||||
let dependencies = JobArgsAndDependencies { args, dependencies };
|
||||
let args = JobKindAndArgs {
|
||||
kind: ExternalCommandJobKind::new(),
|
||||
args: ExternalCommandArgs::resolve_program_path(None, FormalAdditionalArgs {})?,
|
||||
};
|
||||
Ok(JobArgsAndDependencies { args, dependencies })
|
||||
}
|
||||
|
||||
pub fn try_assert_formal<M: AsRef<Module<T>>, T: BundleType>(
|
||||
test_name: impl std::fmt::Display,
|
||||
module: M,
|
||||
formal_mode: FormalMode,
|
||||
formal_depth: u64,
|
||||
solver: Option<&str>,
|
||||
export_options: ExportOptions,
|
||||
) -> eyre::Result<()> {
|
||||
const APP_NAME: &'static str = "fayalite::testing::assert_formal";
|
||||
make_assert_formal_args(
|
||||
&test_name,
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
solver,
|
||||
export_options,
|
||||
)?
|
||||
.run_without_platform(
|
||||
|NoArgs {}| Ok(JobParams::new(module)),
|
||||
&GlobalParams::new(None, APP_NAME),
|
||||
)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn assert_formal<M: AsRef<Module<T>>, T: BundleType>(
|
||||
test_name: impl std::fmt::Display,
|
||||
module: M,
|
||||
formal_mode: FormalMode,
|
||||
formal_depth: u64,
|
||||
solver: Option<&str>,
|
||||
export_options: ExportOptions,
|
||||
) {
|
||||
try_assert_formal(
|
||||
test_name,
|
||||
module,
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
solver,
|
||||
export_options,
|
||||
)
|
||||
.expect("testing::assert_formal() failed");
|
||||
}
|
||||
|
|
@ -7,27 +7,12 @@ use crate::{
|
|||
clock::Clock,
|
||||
enum_::Enum,
|
||||
expr::Expr,
|
||||
int::{Bool, SInt, UInt, UIntValue},
|
||||
int::{Bool, SInt, UInt},
|
||||
intern::{Intern, Interned},
|
||||
phantom_const::PhantomConst,
|
||||
reset::{AsyncReset, Reset, SyncReset},
|
||||
sim::value::{DynSimOnlyValue, DynSimOnly, SimValue, ToSimValueWithType},
|
||||
source_location::SourceLocation,
|
||||
util::{ConstUsize, slice_range, try_slice_range},
|
||||
};
|
||||
use bitvec::{slice::BitSlice, vec::BitVec};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de::DeserializeOwned};
|
||||
use std::{
|
||||
fmt,
|
||||
hash::Hash,
|
||||
iter::{FusedIterator, Sum},
|
||||
marker::PhantomData,
|
||||
mem,
|
||||
ops::{Add, AddAssign, Bound, Index, Mul, MulAssign, Range, Sub, SubAssign},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
pub(crate) mod serde_impls;
|
||||
use std::{fmt, hash::Hash, iter::FusedIterator, ops::Index};
|
||||
|
||||
#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)]
|
||||
#[non_exhaustive]
|
||||
|
|
@ -36,23 +21,6 @@ pub struct TypeProperties {
|
|||
pub is_storable: bool,
|
||||
pub is_castable_from_bits: bool,
|
||||
pub bit_width: usize,
|
||||
pub sim_only_values_len: usize,
|
||||
}
|
||||
|
||||
impl TypeProperties {
|
||||
pub const fn size(self) -> OpaqueSimValueSize {
|
||||
let Self {
|
||||
is_passive: _,
|
||||
is_storable: _,
|
||||
is_castable_from_bits: _,
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
} = self;
|
||||
OpaqueSimValueSize {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Hash, PartialEq, Eq)]
|
||||
|
|
@ -67,8 +35,6 @@ pub enum CanonicalType {
|
|||
SyncReset(SyncReset),
|
||||
Reset(Reset),
|
||||
Clock(Clock),
|
||||
PhantomConst(PhantomConst),
|
||||
DynSimOnly(DynSimOnly),
|
||||
}
|
||||
|
||||
impl fmt::Debug for CanonicalType {
|
||||
|
|
@ -84,30 +50,10 @@ impl fmt::Debug for CanonicalType {
|
|||
Self::SyncReset(v) => v.fmt(f),
|
||||
Self::Reset(v) => v.fmt(f),
|
||||
Self::Clock(v) => v.fmt(f),
|
||||
Self::PhantomConst(v) => v.fmt(f),
|
||||
Self::DynSimOnly(v) => v.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for CanonicalType {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serde_impls::SerdeCanonicalType::from(*self).serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for CanonicalType {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
Ok(serde_impls::SerdeCanonicalType::deserialize(deserializer)?.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl CanonicalType {
|
||||
pub fn type_properties(self) -> TypeProperties {
|
||||
match self {
|
||||
|
|
@ -121,8 +67,6 @@ impl CanonicalType {
|
|||
CanonicalType::SyncReset(v) => v.type_properties(),
|
||||
CanonicalType::Reset(v) => v.type_properties(),
|
||||
CanonicalType::Clock(v) => v.type_properties(),
|
||||
CanonicalType::PhantomConst(v) => v.type_properties(),
|
||||
CanonicalType::DynSimOnly(v) => v.type_properties(),
|
||||
}
|
||||
}
|
||||
pub fn is_passive(self) -> bool {
|
||||
|
|
@ -137,12 +81,6 @@ impl CanonicalType {
|
|||
pub fn bit_width(self) -> usize {
|
||||
self.type_properties().bit_width
|
||||
}
|
||||
pub fn sim_only_values_len(self) -> usize {
|
||||
self.type_properties().sim_only_values_len
|
||||
}
|
||||
pub fn size(self) -> OpaqueSimValueSize {
|
||||
self.type_properties().size()
|
||||
}
|
||||
pub fn can_connect(self, rhs: Self) -> bool {
|
||||
match self {
|
||||
CanonicalType::UInt(lhs) => {
|
||||
|
|
@ -205,23 +143,8 @@ impl CanonicalType {
|
|||
};
|
||||
lhs.can_connect(rhs)
|
||||
}
|
||||
CanonicalType::PhantomConst(lhs) => {
|
||||
let CanonicalType::PhantomConst(rhs) = rhs else {
|
||||
return false;
|
||||
};
|
||||
lhs.can_connect(rhs)
|
||||
}
|
||||
CanonicalType::DynSimOnly(lhs) => {
|
||||
let CanonicalType::DynSimOnly(rhs) = rhs else {
|
||||
return false;
|
||||
};
|
||||
lhs.can_connect(rhs)
|
||||
}
|
||||
}
|
||||
}
|
||||
pub(crate) fn as_serde_unexpected_str(self) -> &'static str {
|
||||
serde_impls::SerdeCanonicalType::from(self).as_serde_unexpected_str()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait MatchVariantAndInactiveScope: Sized {
|
||||
|
|
@ -243,7 +166,7 @@ impl<T: 'static + Send + Sync> MatchVariantAndInactiveScope for MatchVariantWith
|
|||
}
|
||||
|
||||
pub trait FillInDefaultedGenerics {
|
||||
type Type;
|
||||
type Type: Type;
|
||||
fn fill_in_defaulted_generics(self) -> Self::Type;
|
||||
}
|
||||
|
||||
|
|
@ -255,22 +178,6 @@ impl<T: Type> FillInDefaultedGenerics for T {
|
|||
}
|
||||
}
|
||||
|
||||
impl FillInDefaultedGenerics for usize {
|
||||
type Type = usize;
|
||||
|
||||
fn fill_in_defaulted_generics(self) -> Self::Type {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<const V: usize> FillInDefaultedGenerics for ConstUsize<V> {
|
||||
type Type = ConstUsize<V>;
|
||||
|
||||
fn fill_in_defaulted_generics(self) -> Self::Type {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
mod sealed {
|
||||
pub trait TypeOrDefaultSealed {}
|
||||
pub trait BaseTypeSealed {}
|
||||
|
|
@ -288,34 +195,6 @@ macro_rules! impl_base_type {
|
|||
};
|
||||
}
|
||||
|
||||
macro_rules! impl_base_type_serde {
|
||||
($name:ident, $expected:literal) => {
|
||||
impl Serialize for $name {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
self.canonical().serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for $name {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
match CanonicalType::deserialize(deserializer)? {
|
||||
CanonicalType::$name(retval) => Ok(retval),
|
||||
ty => Err(serde::de::Error::invalid_value(
|
||||
serde::de::Unexpected::Other(ty.as_serde_unexpected_str()),
|
||||
&$expected,
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_base_type!(UInt);
|
||||
impl_base_type!(SInt);
|
||||
impl_base_type!(Bool);
|
||||
|
|
@ -326,24 +205,12 @@ impl_base_type!(AsyncReset);
|
|||
impl_base_type!(SyncReset);
|
||||
impl_base_type!(Reset);
|
||||
impl_base_type!(Clock);
|
||||
impl_base_type!(PhantomConst);
|
||||
impl_base_type!(DynSimOnly);
|
||||
|
||||
impl_base_type_serde!(Bool, "a Bool");
|
||||
impl_base_type_serde!(Enum, "an Enum");
|
||||
impl_base_type_serde!(Bundle, "a Bundle");
|
||||
impl_base_type_serde!(AsyncReset, "an AsyncReset");
|
||||
impl_base_type_serde!(SyncReset, "a SyncReset");
|
||||
impl_base_type_serde!(Reset, "a Reset");
|
||||
impl_base_type_serde!(Clock, "a Clock");
|
||||
|
||||
impl sealed::BaseTypeSealed for CanonicalType {}
|
||||
|
||||
impl BaseType for CanonicalType {}
|
||||
|
||||
pub trait TypeOrDefault<D: Type>:
|
||||
sealed::TypeOrDefaultSealed + Copy + Eq + Hash + fmt::Debug
|
||||
{
|
||||
pub trait TypeOrDefault<D: Type>: sealed::TypeOrDefaultSealed {
|
||||
type Type: Type;
|
||||
fn get<F: FnOnce() -> D>(self, default: F) -> Self::Type;
|
||||
}
|
||||
|
|
@ -371,48 +238,26 @@ pub trait Type:
|
|||
{
|
||||
type BaseType: BaseType;
|
||||
type MaskType: Type<MaskType = Self::MaskType>;
|
||||
type SimValue: fmt::Debug + Clone + 'static + ToSimValueWithType<Self>;
|
||||
type MatchVariant: 'static + Send + Sync;
|
||||
type MatchActiveScope;
|
||||
type MatchVariantAndInactiveScope: MatchVariantAndInactiveScope<
|
||||
MatchVariant = Self::MatchVariant,
|
||||
MatchActiveScope = Self::MatchActiveScope,
|
||||
>;
|
||||
MatchVariant = Self::MatchVariant,
|
||||
MatchActiveScope = Self::MatchActiveScope,
|
||||
>;
|
||||
type MatchVariantsIter: Iterator<Item = Self::MatchVariantAndInactiveScope>
|
||||
+ ExactSizeIterator
|
||||
+ FusedIterator
|
||||
+ DoubleEndedIterator;
|
||||
#[track_caller]
|
||||
fn match_variants(this: Expr<Self>, source_location: SourceLocation)
|
||||
-> Self::MatchVariantsIter;
|
||||
-> Self::MatchVariantsIter;
|
||||
fn mask_type(&self) -> Self::MaskType;
|
||||
fn canonical(&self) -> CanonicalType;
|
||||
fn from_canonical(canonical_type: CanonicalType) -> Self;
|
||||
fn source_location() -> SourceLocation;
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue;
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
);
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w>;
|
||||
}
|
||||
|
||||
pub trait BaseType:
|
||||
Type<
|
||||
BaseType = Self,
|
||||
MaskType: Serialize + DeserializeOwned,
|
||||
SimValue: Serialize + DeserializeOwned,
|
||||
> + sealed::BaseTypeSealed
|
||||
+ Into<CanonicalType>
|
||||
+ Serialize
|
||||
+ DeserializeOwned
|
||||
{
|
||||
}
|
||||
pub trait BaseType: Type<BaseType = Self> + sealed::BaseTypeSealed + Into<CanonicalType> {}
|
||||
|
||||
macro_rules! impl_match_variant_as_self {
|
||||
() => {
|
||||
|
|
@ -439,7 +284,6 @@ pub trait TypeWithDeref: Type {
|
|||
impl Type for CanonicalType {
|
||||
type BaseType = CanonicalType;
|
||||
type MaskType = CanonicalType;
|
||||
type SimValue = OpaqueSimValue;
|
||||
impl_match_variant_as_self!();
|
||||
fn mask_type(&self) -> Self::MaskType {
|
||||
match self {
|
||||
|
|
@ -453,8 +297,6 @@ impl Type for CanonicalType {
|
|||
CanonicalType::SyncReset(v) => v.mask_type().canonical(),
|
||||
CanonicalType::Reset(v) => v.mask_type().canonical(),
|
||||
CanonicalType::Clock(v) => v.mask_type().canonical(),
|
||||
CanonicalType::PhantomConst(v) => v.mask_type().canonical(),
|
||||
CanonicalType::DynSimOnly(v) => v.mask_type().canonical(),
|
||||
}
|
||||
}
|
||||
fn canonical(&self) -> CanonicalType {
|
||||
|
|
@ -466,636 +308,9 @@ impl Type for CanonicalType {
|
|||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert_eq!(self.type_properties().size(), opaque.size());
|
||||
opaque.to_owned()
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert_eq!(self.type_properties().size(), opaque.size());
|
||||
assert_eq!(value.size(), opaque.size());
|
||||
value.clone_from_slice(opaque);
|
||||
}
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
assert_eq!(self.type_properties().size(), writer.size());
|
||||
assert_eq!(value.size(), writer.size());
|
||||
writer.fill_cloned_from_slice(value.as_slice())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize, Default)]
|
||||
#[non_exhaustive]
|
||||
pub struct OpaqueSimValueSizeRange {
|
||||
pub bit_width: Range<usize>,
|
||||
pub sim_only_values_len: Range<usize>,
|
||||
}
|
||||
|
||||
impl OpaqueSimValueSizeRange {
|
||||
pub fn start(&self) -> OpaqueSimValueSize {
|
||||
OpaqueSimValueSize {
|
||||
bit_width: self.bit_width.start,
|
||||
sim_only_values_len: self.sim_only_values_len.start,
|
||||
}
|
||||
}
|
||||
pub fn end(&self) -> OpaqueSimValueSize {
|
||||
OpaqueSimValueSize {
|
||||
bit_width: self.bit_width.end,
|
||||
sim_only_values_len: self.sim_only_values_len.end,
|
||||
}
|
||||
}
|
||||
pub fn is_empty(&self) -> bool {
|
||||
let Self {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
} = self;
|
||||
bit_width.is_empty() && sim_only_values_len.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Range<OpaqueSimValueSize>> for OpaqueSimValueSizeRange {
|
||||
fn from(value: Range<OpaqueSimValueSize>) -> Self {
|
||||
Self {
|
||||
bit_width: value.start.bit_width..value.end.bit_width,
|
||||
sim_only_values_len: value.start.sim_only_values_len..value.end.sim_only_values_len,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<OpaqueSimValueSizeRange> for Range<OpaqueSimValueSize> {
|
||||
fn from(value: OpaqueSimValueSizeRange) -> Self {
|
||||
value.start()..value.end()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait OpaqueSimValueSizeRangeBounds {
|
||||
fn start_bound(&self) -> Bound<OpaqueSimValueSize>;
|
||||
fn end_bound(&self) -> Bound<OpaqueSimValueSize>;
|
||||
}
|
||||
|
||||
impl OpaqueSimValueSizeRangeBounds for OpaqueSimValueSizeRange {
|
||||
fn start_bound(&self) -> Bound<OpaqueSimValueSize> {
|
||||
Bound::Included(self.start())
|
||||
}
|
||||
|
||||
fn end_bound(&self) -> Bound<OpaqueSimValueSize> {
|
||||
Bound::Excluded(self.end())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + std::ops::RangeBounds<OpaqueSimValueSize>> OpaqueSimValueSizeRangeBounds for T {
|
||||
fn start_bound(&self) -> Bound<OpaqueSimValueSize> {
|
||||
std::ops::RangeBounds::start_bound(self).cloned()
|
||||
}
|
||||
fn end_bound(&self) -> Bound<OpaqueSimValueSize> {
|
||||
std::ops::RangeBounds::end_bound(self).cloned()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize, Default)]
|
||||
#[non_exhaustive]
|
||||
pub struct OpaqueSimValueSize {
|
||||
pub bit_width: usize,
|
||||
pub sim_only_values_len: usize,
|
||||
}
|
||||
|
||||
impl OpaqueSimValueSize {
|
||||
pub const fn from_bit_width(bit_width: usize) -> Self {
|
||||
Self::from_bit_width_and_sim_only_values_len(bit_width, 0)
|
||||
}
|
||||
pub const fn from_bit_width_and_sim_only_values_len(
|
||||
bit_width: usize,
|
||||
sim_only_values_len: usize,
|
||||
) -> Self {
|
||||
Self {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
}
|
||||
}
|
||||
pub const fn only_bit_width(self) -> Option<usize> {
|
||||
if let Self {
|
||||
bit_width,
|
||||
sim_only_values_len: 0,
|
||||
} = self
|
||||
{
|
||||
Some(bit_width)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
pub const fn empty() -> Self {
|
||||
Self {
|
||||
bit_width: 0,
|
||||
sim_only_values_len: 0,
|
||||
}
|
||||
}
|
||||
pub const fn is_empty(self) -> bool {
|
||||
let Self {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
} = self;
|
||||
bit_width == 0 && sim_only_values_len == 0
|
||||
}
|
||||
pub const fn checked_mul(self, factor: usize) -> Option<Self> {
|
||||
let Some(bit_width) = self.bit_width.checked_mul(factor) else {
|
||||
return None;
|
||||
};
|
||||
let Some(sim_only_values_len) = self.sim_only_values_len.checked_mul(factor) else {
|
||||
return None;
|
||||
};
|
||||
Some(Self {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
})
|
||||
}
|
||||
pub const fn checked_add(self, rhs: Self) -> Option<Self> {
|
||||
let Some(bit_width) = self.bit_width.checked_add(rhs.bit_width) else {
|
||||
return None;
|
||||
};
|
||||
let Some(sim_only_values_len) = self
|
||||
.sim_only_values_len
|
||||
.checked_add(rhs.sim_only_values_len)
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
Some(Self {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
})
|
||||
}
|
||||
pub const fn checked_sub(self, rhs: Self) -> Option<Self> {
|
||||
let Some(bit_width) = self.bit_width.checked_sub(rhs.bit_width) else {
|
||||
return None;
|
||||
};
|
||||
let Some(sim_only_values_len) = self
|
||||
.sim_only_values_len
|
||||
.checked_sub(rhs.sim_only_values_len)
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
Some(Self {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
})
|
||||
}
|
||||
pub fn try_slice_range<R: OpaqueSimValueSizeRangeBounds>(
|
||||
self,
|
||||
range: R,
|
||||
) -> Option<OpaqueSimValueSizeRange> {
|
||||
let start = range.start_bound();
|
||||
let end = range.end_bound();
|
||||
let bit_width = try_slice_range(
|
||||
(start.map(|v| v.bit_width), end.map(|v| v.bit_width)),
|
||||
self.bit_width,
|
||||
)?;
|
||||
let sim_only_values_len = try_slice_range(
|
||||
(
|
||||
start.map(|v| v.sim_only_values_len),
|
||||
end.map(|v| v.sim_only_values_len),
|
||||
),
|
||||
self.sim_only_values_len,
|
||||
)?;
|
||||
Some(OpaqueSimValueSizeRange {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
})
|
||||
}
|
||||
pub fn slice_range<R: OpaqueSimValueSizeRangeBounds>(
|
||||
self,
|
||||
range: R,
|
||||
) -> OpaqueSimValueSizeRange {
|
||||
self.try_slice_range(range).expect("range out of bounds")
|
||||
}
|
||||
}
|
||||
|
||||
impl Mul<usize> for OpaqueSimValueSize {
|
||||
type Output = OpaqueSimValueSize;
|
||||
|
||||
fn mul(self, rhs: usize) -> Self::Output {
|
||||
self.checked_mul(rhs).expect("multiplication overflowed")
|
||||
}
|
||||
}
|
||||
|
||||
impl Mul<OpaqueSimValueSize> for usize {
|
||||
type Output = OpaqueSimValueSize;
|
||||
|
||||
fn mul(self, rhs: OpaqueSimValueSize) -> Self::Output {
|
||||
rhs.checked_mul(self).expect("multiplication overflowed")
|
||||
}
|
||||
}
|
||||
|
||||
impl Add for OpaqueSimValueSize {
|
||||
type Output = OpaqueSimValueSize;
|
||||
|
||||
fn add(self, rhs: OpaqueSimValueSize) -> Self::Output {
|
||||
rhs.checked_add(self).expect("addition overflowed")
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for OpaqueSimValueSize {
|
||||
type Output = OpaqueSimValueSize;
|
||||
|
||||
fn sub(self, rhs: OpaqueSimValueSize) -> Self::Output {
|
||||
rhs.checked_sub(self).expect("subtraction underflowed")
|
||||
}
|
||||
}
|
||||
|
||||
impl MulAssign<usize> for OpaqueSimValueSize {
|
||||
fn mul_assign(&mut self, rhs: usize) {
|
||||
*self = *self * rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign for OpaqueSimValueSize {
|
||||
fn add_assign(&mut self, rhs: OpaqueSimValueSize) {
|
||||
*self = *self + rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl SubAssign for OpaqueSimValueSize {
|
||||
fn sub_assign(&mut self, rhs: OpaqueSimValueSize) {
|
||||
*self = *self - rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl Sum for OpaqueSimValueSize {
|
||||
fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
|
||||
iter.fold(OpaqueSimValueSize::empty(), Add::add)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)]
|
||||
pub struct OpaqueSimValue {
|
||||
bits: UIntValue,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
sim_only_values: Vec<DynSimOnlyValue>,
|
||||
}
|
||||
|
||||
impl OpaqueSimValue {
|
||||
pub fn empty() -> Self {
|
||||
Self {
|
||||
bits: UIntValue::new(Default::default()),
|
||||
sim_only_values: Vec::new(),
|
||||
}
|
||||
}
|
||||
pub fn with_capacity(capacity: OpaqueSimValueSize) -> Self {
|
||||
Self {
|
||||
bits: UIntValue::new(Arc::new(BitVec::with_capacity(capacity.bit_width))),
|
||||
sim_only_values: Vec::with_capacity(capacity.sim_only_values_len),
|
||||
}
|
||||
}
|
||||
pub fn size(&self) -> OpaqueSimValueSize {
|
||||
OpaqueSimValueSize {
|
||||
bit_width: self.bits.width(),
|
||||
sim_only_values_len: self.sim_only_values.len(),
|
||||
}
|
||||
}
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.size().is_empty()
|
||||
}
|
||||
pub fn bit_width(&self) -> usize {
|
||||
self.bits.width()
|
||||
}
|
||||
pub fn bits(&self) -> &UIntValue {
|
||||
&self.bits
|
||||
}
|
||||
pub fn bits_mut(&mut self) -> &mut UIntValue {
|
||||
&mut self.bits
|
||||
}
|
||||
pub fn into_bits(self) -> UIntValue {
|
||||
self.bits
|
||||
}
|
||||
pub fn from_bits(bits: UIntValue) -> Self {
|
||||
Self {
|
||||
bits,
|
||||
sim_only_values: Vec::new(),
|
||||
}
|
||||
}
|
||||
pub fn from_bitslice(v: &BitSlice) -> Self {
|
||||
Self::from_bitslice_and_sim_only_values(v, Vec::new())
|
||||
}
|
||||
pub fn from_bitslice_and_sim_only_values(
|
||||
bits: &BitSlice,
|
||||
sim_only_values: Vec<DynSimOnlyValue>,
|
||||
) -> Self {
|
||||
Self {
|
||||
bits: UIntValue::new(Arc::new(bits.to_bitvec())),
|
||||
sim_only_values,
|
||||
}
|
||||
}
|
||||
pub fn from_bits_and_sim_only_values(
|
||||
bits: UIntValue,
|
||||
sim_only_values: Vec<DynSimOnlyValue>,
|
||||
) -> Self {
|
||||
Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
}
|
||||
}
|
||||
pub fn into_parts(self) -> (UIntValue, Vec<DynSimOnlyValue>) {
|
||||
let Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
} = self;
|
||||
(bits, sim_only_values)
|
||||
}
|
||||
pub fn parts_mut(&mut self) -> (&mut UIntValue, &mut Vec<DynSimOnlyValue>) {
|
||||
let Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
} = self;
|
||||
(bits, sim_only_values)
|
||||
}
|
||||
pub fn sim_only_values(&self) -> &[DynSimOnlyValue] {
|
||||
&self.sim_only_values
|
||||
}
|
||||
pub fn sim_only_values_mut(&mut self) -> &mut Vec<DynSimOnlyValue> {
|
||||
&mut self.sim_only_values
|
||||
}
|
||||
pub fn as_slice(&self) -> OpaqueSimValueSlice<'_> {
|
||||
OpaqueSimValueSlice {
|
||||
bits: self.bits.bits(),
|
||||
sim_only_values: &self.sim_only_values,
|
||||
}
|
||||
}
|
||||
pub fn slice<R: OpaqueSimValueSizeRangeBounds>(&self, range: R) -> OpaqueSimValueSlice<'_> {
|
||||
self.as_slice().slice(range)
|
||||
}
|
||||
pub fn rewrite_with<F>(&mut self, target_size: OpaqueSimValueSize, f: F)
|
||||
where
|
||||
F: for<'b> FnOnce(OpaqueSimValueWriter<'b>) -> OpaqueSimValueWritten<'b>, // 'b is used as a brand
|
||||
{
|
||||
OpaqueSimValueWriter::rewrite_with(target_size, self, f);
|
||||
}
|
||||
pub fn clone_from_slice(&mut self, slice: OpaqueSimValueSlice<'_>) {
|
||||
let OpaqueSimValueSlice {
|
||||
bits,
|
||||
sim_only_values,
|
||||
} = slice;
|
||||
self.bits.bits_mut().copy_from_bitslice(bits);
|
||||
self.sim_only_values.clone_from_slice(sim_only_values);
|
||||
}
|
||||
pub fn extend_from_slice(&mut self, slice: OpaqueSimValueSlice<'_>) {
|
||||
let OpaqueSimValueSlice {
|
||||
bits,
|
||||
sim_only_values,
|
||||
} = slice;
|
||||
self.bits.bitvec_mut().extend_from_bitslice(bits);
|
||||
self.sim_only_values.extend_from_slice(sim_only_values);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Extend<OpaqueSimValueSlice<'a>> for OpaqueSimValue {
|
||||
fn extend<T: IntoIterator<Item = OpaqueSimValueSlice<'a>>>(&mut self, iter: T) {
|
||||
let Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
} = self;
|
||||
let bits = bits.bitvec_mut();
|
||||
for slice in iter {
|
||||
bits.extend_from_bitslice(slice.bits);
|
||||
sim_only_values.extend_from_slice(slice.sim_only_values);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<OpaqueSimValue> for OpaqueSimValue {
|
||||
fn extend<T: IntoIterator<Item = OpaqueSimValue>>(&mut self, iter: T) {
|
||||
let Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
} = self;
|
||||
let bits = bits.bitvec_mut();
|
||||
for value in iter {
|
||||
bits.extend_from_bitslice(value.bits().bits());
|
||||
sim_only_values.extend_from_slice(value.sim_only_values());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type<SimValue = OpaqueSimValue>> ToSimValueWithType<T> for OpaqueSimValue {
|
||||
fn to_sim_value_with_type(&self, ty: T) -> SimValue<T> {
|
||||
SimValue::from_value(ty, self.clone())
|
||||
}
|
||||
fn into_sim_value_with_type(self, ty: T) -> SimValue<T> {
|
||||
SimValue::from_value(ty, self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct OpaqueSimValueSlice<'a> {
|
||||
bits: &'a BitSlice,
|
||||
sim_only_values: &'a [DynSimOnlyValue],
|
||||
}
|
||||
|
||||
impl<'a> Default for OpaqueSimValueSlice<'a> {
|
||||
fn default() -> Self {
|
||||
Self::empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> OpaqueSimValueSlice<'a> {
|
||||
pub fn from_parts(bits: &'a BitSlice, sim_only_values: &'a [DynSimOnlyValue]) -> Self {
|
||||
Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
}
|
||||
}
|
||||
pub fn from_bitslice(bits: &'a BitSlice) -> Self {
|
||||
Self::from_parts(bits, &[])
|
||||
}
|
||||
pub fn empty() -> Self {
|
||||
Self {
|
||||
bits: BitSlice::empty(),
|
||||
sim_only_values: &[],
|
||||
}
|
||||
}
|
||||
pub fn size(self) -> OpaqueSimValueSize {
|
||||
OpaqueSimValueSize {
|
||||
bit_width: self.bit_width(),
|
||||
sim_only_values_len: self.sim_only_values_len(),
|
||||
}
|
||||
}
|
||||
pub fn is_empty(self) -> bool {
|
||||
self.size().is_empty()
|
||||
}
|
||||
pub fn bit_width(self) -> usize {
|
||||
self.bits.len()
|
||||
}
|
||||
pub fn bits(self) -> &'a BitSlice {
|
||||
self.bits
|
||||
}
|
||||
pub fn sim_only_values(self) -> &'a [DynSimOnlyValue] {
|
||||
self.sim_only_values
|
||||
}
|
||||
pub fn sim_only_values_len(self) -> usize {
|
||||
self.sim_only_values.len()
|
||||
}
|
||||
pub fn to_owned(self) -> OpaqueSimValue {
|
||||
OpaqueSimValue::from_bitslice_and_sim_only_values(self.bits, self.sim_only_values.to_vec())
|
||||
}
|
||||
pub fn slice<R: OpaqueSimValueSizeRangeBounds>(self, range: R) -> OpaqueSimValueSlice<'a> {
|
||||
let start = range.start_bound();
|
||||
let end = range.end_bound();
|
||||
let bits_range = slice_range(
|
||||
(start.map(|v| v.bit_width), end.map(|v| v.bit_width)),
|
||||
self.bit_width(),
|
||||
);
|
||||
let sim_only_values_range = slice_range(
|
||||
(start.map(|v| v.bit_width), end.map(|v| v.bit_width)),
|
||||
self.sim_only_values_len(),
|
||||
);
|
||||
Self {
|
||||
bits: &self.bits[bits_range],
|
||||
sim_only_values: &self.sim_only_values[sim_only_values_range],
|
||||
}
|
||||
}
|
||||
pub fn split_at(self, index: OpaqueSimValueSize) -> (Self, Self) {
|
||||
let bits = self.bits.split_at(index.bit_width);
|
||||
let sim_only_values = self.sim_only_values.split_at(index.sim_only_values_len);
|
||||
(
|
||||
Self {
|
||||
bits: bits.0,
|
||||
sim_only_values: sim_only_values.0,
|
||||
},
|
||||
Self {
|
||||
bits: bits.1,
|
||||
sim_only_values: sim_only_values.1,
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct OpaqueSimValueWriter<'a> {
|
||||
bits: &'a mut BitSlice,
|
||||
sim_only_values: &'a mut Vec<DynSimOnlyValue>,
|
||||
sim_only_values_range: std::ops::Range<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct OpaqueSimValueWritten<'a> {
|
||||
_phantom: PhantomData<&'a ()>,
|
||||
}
|
||||
|
||||
impl<'a> OpaqueSimValueWriter<'a> {
|
||||
pub fn sim_only_values_range(&self) -> std::ops::Range<usize> {
|
||||
self.sim_only_values_range.clone()
|
||||
}
|
||||
pub fn rewrite_with<F>(target_size: OpaqueSimValueSize, value: &mut OpaqueSimValue, f: F)
|
||||
where
|
||||
F: for<'b> FnOnce(OpaqueSimValueWriter<'b>) -> OpaqueSimValueWritten<'b>, // 'b is used as a brand
|
||||
{
|
||||
let OpaqueSimValueWritten {
|
||||
_phantom: PhantomData,
|
||||
} = f(OpaqueSimValueWriter::rewrite_helper(target_size, value));
|
||||
}
|
||||
pub(crate) fn rewrite_helper(
|
||||
target_size: OpaqueSimValueSize,
|
||||
value: &'a mut OpaqueSimValue,
|
||||
) -> Self {
|
||||
let (bits, sim_only_values) = value.parts_mut();
|
||||
let OpaqueSimValueSize {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
} = target_size;
|
||||
let bits = bits.bitvec_mut();
|
||||
bits.resize(bit_width, false);
|
||||
sim_only_values.truncate(sim_only_values_len);
|
||||
sim_only_values.reserve_exact(sim_only_values_len - sim_only_values.len());
|
||||
Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
sim_only_values_range: 0..sim_only_values_len,
|
||||
}
|
||||
}
|
||||
pub fn size(&self) -> OpaqueSimValueSize {
|
||||
OpaqueSimValueSize {
|
||||
bit_width: self.bit_width(),
|
||||
sim_only_values_len: self.sim_only_values_len(),
|
||||
}
|
||||
}
|
||||
pub fn bit_width(&self) -> usize {
|
||||
self.bits.len()
|
||||
}
|
||||
pub fn sim_only_values_len(&self) -> usize {
|
||||
self.sim_only_values_range.len()
|
||||
}
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.size().is_empty()
|
||||
}
|
||||
pub fn fill_cloned_from_slice(
|
||||
self,
|
||||
slice: OpaqueSimValueSlice<'_>,
|
||||
) -> OpaqueSimValueWritten<'a> {
|
||||
assert_eq!(self.size(), slice.size());
|
||||
let Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
sim_only_values_range,
|
||||
} = self;
|
||||
bits.copy_from_bitslice(slice.bits);
|
||||
let (clone_from_src, clone_src) = slice.sim_only_values.split_at(
|
||||
(sim_only_values.len() - sim_only_values_range.start).min(slice.sim_only_values.len()),
|
||||
);
|
||||
sim_only_values[sim_only_values_range.start..][..clone_from_src.len()]
|
||||
.clone_from_slice(clone_from_src);
|
||||
sim_only_values.extend_from_slice(clone_src);
|
||||
OpaqueSimValueWritten {
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
pub fn fill_with_bits_with<F: FnOnce(&mut BitSlice)>(self, f: F) -> OpaqueSimValueWritten<'a> {
|
||||
assert!(self.size().only_bit_width().is_some());
|
||||
let Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
sim_only_values_range,
|
||||
} = self;
|
||||
f(bits);
|
||||
assert_eq!(sim_only_values.len(), sim_only_values_range.end);
|
||||
OpaqueSimValueWritten {
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
pub fn fill_with_zeros(self) -> OpaqueSimValueWritten<'a> {
|
||||
assert!(
|
||||
self.size().only_bit_width().is_some(),
|
||||
"can't fill things other than bits with zeros",
|
||||
);
|
||||
self.fill_with_bits_with(|bits| bits.fill(false))
|
||||
}
|
||||
pub fn fill_prefix_with<F>(&mut self, prefix_size: OpaqueSimValueSize, f: F)
|
||||
where
|
||||
F: for<'b> FnOnce(OpaqueSimValueWriter<'b>) -> OpaqueSimValueWritten<'b>, // 'b is used as a brand
|
||||
{
|
||||
let OpaqueSimValueSize {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
} = prefix_size;
|
||||
assert!(bit_width <= self.bit_width());
|
||||
assert!(sim_only_values_len <= self.sim_only_values_len());
|
||||
let next_start = self.sim_only_values_range.start + sim_only_values_len;
|
||||
let OpaqueSimValueWritten {
|
||||
_phantom: PhantomData,
|
||||
} = f(OpaqueSimValueWriter {
|
||||
bits: &mut self.bits[..bit_width],
|
||||
sim_only_values: self.sim_only_values,
|
||||
sim_only_values_range: self.sim_only_values_range.start..next_start,
|
||||
});
|
||||
assert!(self.sim_only_values.len() >= next_start);
|
||||
self.bits = &mut mem::take(&mut self.bits)[bit_width..];
|
||||
self.sim_only_values_range.start = next_start;
|
||||
}
|
||||
}
|
||||
|
||||
pub trait StaticType: Type + Default {
|
||||
pub trait StaticType: Type {
|
||||
const TYPE: Self;
|
||||
const MASK_TYPE: Self::MaskType;
|
||||
const TYPE_PROPERTIES: TypeProperties;
|
||||
|
|
@ -1113,6 +328,6 @@ impl<T: Type> Index<T> for AsMaskWithoutGenerics {
|
|||
type Output = T::MaskType;
|
||||
|
||||
fn index(&self, ty: T) -> &Self::Output {
|
||||
Interned::into_inner(Intern::intern_sized(ty.mask_type()))
|
||||
Interned::<_>::into_inner(Intern::intern_sized(ty.mask_type()))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,135 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
array::Array,
|
||||
bundle::{Bundle, BundleType},
|
||||
clock::Clock,
|
||||
enum_::{Enum, EnumType},
|
||||
int::{Bool, SInt, UInt},
|
||||
intern::Interned,
|
||||
phantom_const::{PhantomConstCanonicalValue, PhantomConstValue},
|
||||
prelude::PhantomConst,
|
||||
reset::{AsyncReset, Reset, SyncReset},
|
||||
sim::value::DynSimOnly,
|
||||
ty::{BaseType, CanonicalType},
|
||||
};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
|
||||
pub(crate) struct SerdePhantomConst<T>(pub T);
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Serialize for SerdePhantomConst<Interned<T>> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
self.0.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, T: ?Sized + PhantomConstValue> Deserialize<'de> for SerdePhantomConst<Interned<T>> {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
T::deserialize_value(deserializer).map(Self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[serde(rename = "CanonicalType")]
|
||||
pub(crate) enum SerdeCanonicalType<
|
||||
ArrayElement = CanonicalType,
|
||||
ThePhantomConst = SerdePhantomConst<Interned<PhantomConstCanonicalValue>>,
|
||||
> {
|
||||
UInt {
|
||||
width: usize,
|
||||
},
|
||||
SInt {
|
||||
width: usize,
|
||||
},
|
||||
Bool,
|
||||
Array {
|
||||
element: ArrayElement,
|
||||
len: usize,
|
||||
},
|
||||
Enum {
|
||||
variants: Interned<[crate::enum_::EnumVariant]>,
|
||||
},
|
||||
Bundle {
|
||||
fields: Interned<[crate::bundle::BundleField]>,
|
||||
},
|
||||
AsyncReset,
|
||||
SyncReset,
|
||||
Reset,
|
||||
Clock,
|
||||
PhantomConst(ThePhantomConst),
|
||||
DynSimOnly(DynSimOnly),
|
||||
}
|
||||
|
||||
impl<ArrayElement, PhantomConstInner> SerdeCanonicalType<ArrayElement, PhantomConstInner> {
|
||||
pub(crate) fn as_serde_unexpected_str(&self) -> &'static str {
|
||||
match self {
|
||||
Self::UInt { .. } => "a UInt",
|
||||
Self::SInt { .. } => "a SInt",
|
||||
Self::Bool => "a Bool",
|
||||
Self::Array { .. } => "an Array",
|
||||
Self::Enum { .. } => "an Enum",
|
||||
Self::Bundle { .. } => "a Bundle",
|
||||
Self::AsyncReset => "an AsyncReset",
|
||||
Self::SyncReset => "a SyncReset",
|
||||
Self::Reset => "a Reset",
|
||||
Self::Clock => "a Clock",
|
||||
Self::PhantomConst(_) => "a PhantomConst",
|
||||
Self::DynSimOnly(_) => "a SimOnlyValue",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: BaseType> From<T> for SerdeCanonicalType {
|
||||
fn from(ty: T) -> Self {
|
||||
let ty: CanonicalType = ty.into();
|
||||
match ty {
|
||||
CanonicalType::UInt(ty) => Self::UInt { width: ty.width() },
|
||||
CanonicalType::SInt(ty) => Self::SInt { width: ty.width() },
|
||||
CanonicalType::Bool(Bool {}) => Self::Bool,
|
||||
CanonicalType::Array(ty) => Self::Array {
|
||||
element: ty.element(),
|
||||
len: ty.len(),
|
||||
},
|
||||
CanonicalType::Enum(ty) => Self::Enum {
|
||||
variants: ty.variants(),
|
||||
},
|
||||
CanonicalType::Bundle(ty) => Self::Bundle {
|
||||
fields: ty.fields(),
|
||||
},
|
||||
CanonicalType::AsyncReset(AsyncReset {}) => Self::AsyncReset,
|
||||
CanonicalType::SyncReset(SyncReset {}) => Self::SyncReset,
|
||||
CanonicalType::Reset(Reset {}) => Self::Reset,
|
||||
CanonicalType::Clock(Clock {}) => Self::Clock,
|
||||
CanonicalType::PhantomConst(ty) => Self::PhantomConst(SerdePhantomConst(ty.get())),
|
||||
CanonicalType::DynSimOnly(ty) => Self::DynSimOnly(ty),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SerdeCanonicalType> for CanonicalType {
|
||||
fn from(ty: SerdeCanonicalType) -> Self {
|
||||
match ty {
|
||||
SerdeCanonicalType::UInt { width } => Self::UInt(UInt::new(width)),
|
||||
SerdeCanonicalType::SInt { width } => Self::SInt(SInt::new(width)),
|
||||
SerdeCanonicalType::Bool => Self::Bool(Bool),
|
||||
SerdeCanonicalType::Array { element, len } => Self::Array(Array::new(element, len)),
|
||||
SerdeCanonicalType::Enum { variants } => Self::Enum(Enum::new(variants)),
|
||||
SerdeCanonicalType::Bundle { fields } => Self::Bundle(Bundle::new(fields)),
|
||||
SerdeCanonicalType::AsyncReset => Self::AsyncReset(AsyncReset),
|
||||
SerdeCanonicalType::SyncReset => Self::SyncReset(SyncReset),
|
||||
SerdeCanonicalType::Reset => Self::Reset(Reset),
|
||||
SerdeCanonicalType::Clock => Self::Clock(Clock),
|
||||
SerdeCanonicalType::PhantomConst(value) => {
|
||||
Self::PhantomConst(PhantomConst::new(value.0))
|
||||
}
|
||||
SerdeCanonicalType::DynSimOnly(value) => Self::DynSimOnly(value),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,23 +1,11 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
pub(crate) mod alternating_cell;
|
||||
mod const_bool;
|
||||
mod const_cmp;
|
||||
mod const_usize;
|
||||
mod misc;
|
||||
mod scoped_ref;
|
||||
pub(crate) mod streaming_read_utf8;
|
||||
mod test_hasher;
|
||||
|
||||
// allow easily switching the hasher crate-wide for testing
|
||||
#[cfg(feature = "unstable-test-hasher")]
|
||||
pub type DefaultBuildHasher = test_hasher::DefaultBuildHasher;
|
||||
#[cfg(not(feature = "unstable-test-hasher"))]
|
||||
pub(crate) type DefaultBuildHasher = hashbrown::DefaultHashBuilder;
|
||||
|
||||
pub(crate) type HashMap<K, V> = hashbrown::HashMap<K, V, DefaultBuildHasher>;
|
||||
pub(crate) type HashSet<T> = hashbrown::HashSet<T, DefaultBuildHasher>;
|
||||
|
||||
#[doc(inline)]
|
||||
pub use const_bool::{ConstBool, ConstBoolDispatch, ConstBoolDispatchTag, GenericConstBool};
|
||||
|
|
@ -35,14 +23,5 @@ pub use scoped_ref::ScopedRef;
|
|||
|
||||
#[doc(inline)]
|
||||
pub use misc::{
|
||||
BitSliceWriteWithBase, DebugAsDisplay, DebugAsRawString, MakeMutSlice, RcWriter,
|
||||
SerdeJsonEscapeIf, SerdeJsonEscapeIfFormatter, SerdeJsonEscapeIfTest,
|
||||
SerdeJsonEscapeIfTestResult, interned_bit, iter_eq_by, os_str_strip_prefix,
|
||||
os_str_strip_suffix, serialize_to_json_ascii, serialize_to_json_ascii_pretty,
|
||||
serialize_to_json_ascii_pretty_with_indent, slice_range, try_slice_range,
|
||||
interned_bit, iter_eq_by, BitSliceWriteWithBase, DebugAsDisplay, DebugAsRawString, MakeMutSlice,
|
||||
};
|
||||
pub(crate) use misc::{InternedStrCompareAsStr, chain};
|
||||
|
||||
pub mod job_server;
|
||||
pub mod prefix_sum;
|
||||
pub mod ready_valid;
|
||||
|
|
|
|||
|
|
@ -1,122 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::util::DebugAsDisplay;
|
||||
use std::{
|
||||
cell::{Cell, UnsafeCell},
|
||||
fmt,
|
||||
};
|
||||
|
||||
pub(crate) trait AlternatingCellMethods {
|
||||
fn unique_to_shared(&mut self);
|
||||
fn shared_to_unique(&mut self);
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum State {
|
||||
Unique,
|
||||
Shared,
|
||||
Locked,
|
||||
}
|
||||
|
||||
pub(crate) struct AlternatingCell<T: ?Sized> {
|
||||
state: Cell<State>,
|
||||
value: UnsafeCell<T>,
|
||||
}
|
||||
|
||||
impl<T: ?Sized + fmt::Debug + AlternatingCellMethods> fmt::Debug for AlternatingCell<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_tuple("AlternatingCell")
|
||||
.field(
|
||||
self.try_share()
|
||||
.as_ref()
|
||||
.map(|v| -> &dyn fmt::Debug { v })
|
||||
.unwrap_or(&DebugAsDisplay("<...>")),
|
||||
)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> AlternatingCell<T> {
|
||||
pub(crate) const fn new_shared(value: T) -> Self
|
||||
where
|
||||
T: Sized,
|
||||
{
|
||||
Self {
|
||||
state: Cell::new(State::Shared),
|
||||
value: UnsafeCell::new(value),
|
||||
}
|
||||
}
|
||||
pub(crate) const fn new_unique(value: T) -> Self
|
||||
where
|
||||
T: Sized,
|
||||
{
|
||||
Self {
|
||||
state: Cell::new(State::Unique),
|
||||
value: UnsafeCell::new(value),
|
||||
}
|
||||
}
|
||||
pub(crate) fn is_unique(&self) -> bool {
|
||||
matches!(self.state.get(), State::Unique)
|
||||
}
|
||||
pub(crate) fn is_shared(&self) -> bool {
|
||||
matches!(self.state.get(), State::Shared)
|
||||
}
|
||||
pub(crate) fn into_inner(self) -> T
|
||||
where
|
||||
T: Sized,
|
||||
{
|
||||
self.value.into_inner()
|
||||
}
|
||||
pub(crate) fn try_share(&self) -> Option<&T>
|
||||
where
|
||||
T: AlternatingCellMethods,
|
||||
{
|
||||
match self.state.get() {
|
||||
State::Shared => {}
|
||||
State::Unique => {
|
||||
struct Locked<'a>(&'a Cell<State>);
|
||||
impl Drop for Locked<'_> {
|
||||
fn drop(&mut self) {
|
||||
self.0.set(State::Shared);
|
||||
}
|
||||
}
|
||||
self.state.set(State::Locked);
|
||||
let lock = Locked(&self.state);
|
||||
// Safety: state is Locked, so nothing else will
|
||||
// access value while calling unique_to_shared.
|
||||
unsafe { &mut *self.value.get() }.unique_to_shared();
|
||||
drop(lock);
|
||||
}
|
||||
State::Locked => return None,
|
||||
}
|
||||
|
||||
// Safety: state is Shared so nothing will create any mutable
|
||||
// references until the returned reference's lifetime expires.
|
||||
Some(unsafe { &*self.value.get() })
|
||||
}
|
||||
#[track_caller]
|
||||
pub(crate) fn share(&self) -> &T
|
||||
where
|
||||
T: AlternatingCellMethods,
|
||||
{
|
||||
let Some(retval) = self.try_share() else {
|
||||
panic!("`share` called recursively");
|
||||
};
|
||||
retval
|
||||
}
|
||||
pub(crate) fn unique(&mut self) -> &mut T
|
||||
where
|
||||
T: AlternatingCellMethods,
|
||||
{
|
||||
match self.state.get() {
|
||||
State::Shared => {
|
||||
self.state.set(State::Unique);
|
||||
self.value.get_mut().shared_to_unique();
|
||||
}
|
||||
State::Unique => {}
|
||||
State::Locked => unreachable!(),
|
||||
}
|
||||
self.value.get_mut()
|
||||
}
|
||||
}
|
||||
|
|
@ -1,9 +1,5 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use serde::{
|
||||
Deserialize, Deserializer, Serialize, Serializer,
|
||||
de::{DeserializeOwned, Error, Unexpected},
|
||||
};
|
||||
use std::{fmt::Debug, hash::Hash, mem::ManuallyDrop, ptr};
|
||||
|
||||
mod sealed {
|
||||
|
|
@ -13,17 +9,7 @@ mod sealed {
|
|||
/// # Safety
|
||||
/// the only implementation is `ConstBool<Self::VALUE>`
|
||||
pub unsafe trait GenericConstBool:
|
||||
sealed::Sealed
|
||||
+ Copy
|
||||
+ Ord
|
||||
+ Hash
|
||||
+ Default
|
||||
+ Debug
|
||||
+ 'static
|
||||
+ Send
|
||||
+ Sync
|
||||
+ Serialize
|
||||
+ DeserializeOwned
|
||||
sealed::Sealed + Copy + Ord + Hash + Default + Debug + 'static + Send + Sync
|
||||
{
|
||||
const VALUE: bool;
|
||||
}
|
||||
|
|
@ -44,32 +30,6 @@ unsafe impl<const VALUE: bool> GenericConstBool for ConstBool<VALUE> {
|
|||
const VALUE: bool = VALUE;
|
||||
}
|
||||
|
||||
impl<const VALUE: bool> Serialize for ConstBool<VALUE> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
VALUE.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, const VALUE: bool> Deserialize<'de> for ConstBool<VALUE> {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let value = bool::deserialize(deserializer)?;
|
||||
if value == VALUE {
|
||||
Ok(ConstBool)
|
||||
} else {
|
||||
Err(D::Error::invalid_value(
|
||||
Unexpected::Bool(value),
|
||||
&if VALUE { "true" } else { "false" },
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ConstBoolDispatchTag {
|
||||
type Type<Select: GenericConstBool>;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,9 +1,5 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use serde::{
|
||||
Deserialize, Deserializer, Serialize, Serializer,
|
||||
de::{DeserializeOwned, Error, Unexpected},
|
||||
};
|
||||
use std::{fmt::Debug, hash::Hash};
|
||||
|
||||
mod sealed {
|
||||
|
|
@ -12,17 +8,7 @@ mod sealed {
|
|||
|
||||
/// the only implementation is `ConstUsize<Self::VALUE>`
|
||||
pub trait GenericConstUsize:
|
||||
sealed::Sealed
|
||||
+ Copy
|
||||
+ Ord
|
||||
+ Hash
|
||||
+ Default
|
||||
+ Debug
|
||||
+ 'static
|
||||
+ Send
|
||||
+ Sync
|
||||
+ Serialize
|
||||
+ DeserializeOwned
|
||||
sealed::Sealed + Copy + Ord + Hash + Default + Debug + 'static + Send + Sync
|
||||
{
|
||||
const VALUE: usize;
|
||||
}
|
||||
|
|
@ -41,29 +27,3 @@ impl<const VALUE: usize> sealed::Sealed for ConstUsize<VALUE> {}
|
|||
impl<const VALUE: usize> GenericConstUsize for ConstUsize<VALUE> {
|
||||
const VALUE: usize = VALUE;
|
||||
}
|
||||
|
||||
impl<const VALUE: usize> Serialize for ConstUsize<VALUE> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
VALUE.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, const VALUE: usize> Deserialize<'de> for ConstUsize<VALUE> {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let value = usize::deserialize(deserializer)?;
|
||||
if value == VALUE {
|
||||
Ok(ConstUsize)
|
||||
} else {
|
||||
Err(D::Error::invalid_value(
|
||||
Unexpected::Unsigned(value as u64),
|
||||
&&*VALUE.to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,157 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use ctor::{ctor, dtor};
|
||||
use jobslot::Client;
|
||||
use std::{
|
||||
ffi::OsString,
|
||||
io, mem,
|
||||
num::NonZeroUsize,
|
||||
sync::{Mutex, MutexGuard},
|
||||
};
|
||||
|
||||
#[ctor]
|
||||
static CLIENT: Mutex<Option<Option<Client>>> = unsafe { Mutex::new(Some(Client::from_env())) };
|
||||
|
||||
#[dtor]
|
||||
fn drop_client() {
|
||||
drop(
|
||||
match CLIENT.lock() {
|
||||
Ok(v) => v,
|
||||
Err(e) => e.into_inner(),
|
||||
}
|
||||
.take(),
|
||||
);
|
||||
}
|
||||
|
||||
fn get_or_make_client() -> Client {
|
||||
CLIENT
|
||||
.lock()
|
||||
.expect("shouldn't have panicked")
|
||||
.as_mut()
|
||||
.expect("shutting down")
|
||||
.get_or_insert_with(|| {
|
||||
let mut available_parallelism = None;
|
||||
let mut args = std::env::args_os().skip(1);
|
||||
while let Some(arg) = args.next() {
|
||||
const TEST_THREADS_OPTION: &'static [u8] = b"--test-threads";
|
||||
if arg.as_encoded_bytes().starts_with(TEST_THREADS_OPTION) {
|
||||
match arg.as_encoded_bytes().get(TEST_THREADS_OPTION.len()) {
|
||||
Some(b'=') => {
|
||||
let mut arg = arg.into_encoded_bytes();
|
||||
arg.drain(..=TEST_THREADS_OPTION.len());
|
||||
available_parallelism = Some(arg);
|
||||
break;
|
||||
}
|
||||
None => {
|
||||
available_parallelism = args.next().map(OsString::into_encoded_bytes);
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
let available_parallelism = if let Some(available_parallelism) = available_parallelism
|
||||
.as_deref()
|
||||
.and_then(|v| std::str::from_utf8(v).ok())
|
||||
.and_then(|v| v.parse().ok())
|
||||
{
|
||||
available_parallelism
|
||||
} else if let Ok(available_parallelism) = std::thread::available_parallelism() {
|
||||
available_parallelism
|
||||
} else {
|
||||
NonZeroUsize::new(1).unwrap()
|
||||
};
|
||||
Client::new_with_fifo(available_parallelism.get() - 1)
|
||||
.expect("failed to create job server")
|
||||
})
|
||||
.clone()
|
||||
}
|
||||
|
||||
struct State {
|
||||
obtained_count: usize,
|
||||
waiting_count: usize,
|
||||
}
|
||||
|
||||
static STATE: Mutex<State> = Mutex::new(State {
|
||||
obtained_count: 0,
|
||||
waiting_count: 0,
|
||||
});
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct AcquiredJob {
|
||||
client: Client,
|
||||
}
|
||||
|
||||
impl AcquiredJob {
|
||||
pub fn acquire() -> io::Result<Self> {
|
||||
let client = get_or_make_client();
|
||||
struct Waiting {}
|
||||
|
||||
impl Waiting {
|
||||
fn done(self) -> MutexGuard<'static, State> {
|
||||
mem::forget(self);
|
||||
let mut state = STATE.lock().unwrap();
|
||||
state.waiting_count -= 1;
|
||||
state
|
||||
}
|
||||
}
|
||||
impl Drop for Waiting {
|
||||
fn drop(&mut self) {
|
||||
STATE.lock().unwrap().waiting_count -= 1;
|
||||
}
|
||||
}
|
||||
let mut state = STATE.lock().unwrap();
|
||||
if state.obtained_count == 0 && state.waiting_count == 0 {
|
||||
state.obtained_count = 1; // get implicit token
|
||||
return Ok(Self { client });
|
||||
}
|
||||
state.waiting_count += 1;
|
||||
drop(state);
|
||||
let waiting = Waiting {};
|
||||
client.acquire_raw()?;
|
||||
state = waiting.done();
|
||||
state.obtained_count = state
|
||||
.obtained_count
|
||||
.checked_add(1)
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "obtained_count overflowed"))?;
|
||||
drop(state);
|
||||
Ok(Self { client })
|
||||
}
|
||||
pub fn run_command<R>(
|
||||
&mut self,
|
||||
cmd: std::process::Command,
|
||||
f: impl FnOnce(&mut std::process::Command) -> std::io::Result<R>,
|
||||
) -> std::io::Result<R> {
|
||||
self.client.configure_make_and_run_with_fifo(cmd, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for AcquiredJob {
|
||||
fn drop(&mut self) {
|
||||
let mut state = STATE.lock().unwrap();
|
||||
match &mut *state {
|
||||
State {
|
||||
obtained_count: 0, ..
|
||||
} => unreachable!(),
|
||||
State {
|
||||
obtained_count: obtained_count @ 1,
|
||||
waiting_count,
|
||||
} => {
|
||||
*obtained_count = 0; // drop implicit token
|
||||
let any_waiting = *waiting_count != 0;
|
||||
drop(state);
|
||||
if any_waiting {
|
||||
// we have the implicit token, but some other thread is trying to acquire a token,
|
||||
// release the implicit token so they can acquire it.
|
||||
let _ = self.client.release_raw(); // we're in drop, just ignore errors since we at least tried
|
||||
}
|
||||
}
|
||||
State { obtained_count, .. } => {
|
||||
*obtained_count = obtained_count.saturating_sub(1);
|
||||
drop(state);
|
||||
let _ = self.client.release_raw(); // we're in drop, just ignore errors since we at least tried
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -3,11 +3,7 @@
|
|||
use crate::intern::{Intern, Interned};
|
||||
use bitvec::{bits, order::Lsb0, slice::BitSlice, view::BitView};
|
||||
use std::{
|
||||
cell::Cell,
|
||||
ffi::OsStr,
|
||||
fmt::{self, Debug, Write},
|
||||
io,
|
||||
ops::{Bound, Range, RangeBounds},
|
||||
rc::Rc,
|
||||
sync::{Arc, OnceLock},
|
||||
};
|
||||
|
|
@ -98,15 +94,9 @@ pub fn interned_bit(v: bool) -> Interned<BitSlice> {
|
|||
RETVAL.get_or_init(|| [bits![0; 1].intern(), bits![1; 1].intern()])[v as usize]
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct BitSliceWriteWithBase<'a>(pub &'a BitSlice);
|
||||
|
||||
impl<'a> Debug for BitSliceWriteWithBase<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{self:#x}")
|
||||
}
|
||||
}
|
||||
|
||||
impl BitSliceWriteWithBase<'_> {
|
||||
fn fmt_with_base<const BITS_PER_DIGIT: usize, const UPPER_CASE: bool>(
|
||||
self,
|
||||
|
|
@ -165,450 +155,3 @@ impl fmt::UpperHex for BitSliceWriteWithBase<'_> {
|
|||
self.fmt_with_base::<4, true>(f)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct RcWriter(Rc<Cell<Vec<u8>>>);
|
||||
|
||||
impl Debug for RcWriter {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.borrow_impl(|buf| {
|
||||
f.debug_tuple("RcWriter")
|
||||
.field(&DebugAsDisplay(format_args!("b\"{}\"", buf.escape_ascii())))
|
||||
.finish()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl RcWriter {
|
||||
fn borrow_impl<R>(&self, f: impl FnOnce(&mut Vec<u8>) -> R) -> R {
|
||||
let buf = Cell::take(&self.0);
|
||||
struct PutBackOnDrop<'a> {
|
||||
buf: Vec<u8>,
|
||||
this: &'a RcWriter,
|
||||
}
|
||||
impl Drop for PutBackOnDrop<'_> {
|
||||
fn drop(&mut self) {
|
||||
self.this.0.set(std::mem::take(&mut self.buf));
|
||||
}
|
||||
}
|
||||
let mut buf = PutBackOnDrop { buf, this: self };
|
||||
f(&mut buf.buf)
|
||||
}
|
||||
pub fn borrow<R>(&mut self, f: impl FnOnce(&mut Vec<u8>) -> R) -> R {
|
||||
self.borrow_impl(f)
|
||||
}
|
||||
pub fn take(&mut self) -> Vec<u8> {
|
||||
Cell::take(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::io::Write for RcWriter {
|
||||
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
||||
self.borrow(|v| v.extend_from_slice(buf));
|
||||
Ok(buf.len())
|
||||
}
|
||||
|
||||
fn flush(&mut self) -> std::io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! chain {
|
||||
() => {
|
||||
std::iter::empty()
|
||||
};
|
||||
($first:expr $(, $rest:expr)* $(,)?) => {
|
||||
{
|
||||
let retval = IntoIterator::into_iter($first);
|
||||
$(let retval = Iterator::chain(retval, $rest);)*
|
||||
retval
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub(crate) use chain;
|
||||
|
||||
pub fn try_slice_range<R: RangeBounds<usize>>(range: R, size: usize) -> Option<Range<usize>> {
|
||||
let start = match range.start_bound() {
|
||||
Bound::Included(start) => *start,
|
||||
Bound::Excluded(start) => start.checked_add(1)?,
|
||||
Bound::Unbounded => 0,
|
||||
};
|
||||
let end = match range.end_bound() {
|
||||
Bound::Included(end) => end.checked_add(1)?,
|
||||
Bound::Excluded(end) => *end,
|
||||
Bound::Unbounded => size,
|
||||
};
|
||||
(start <= end && end <= size).then_some(start..end)
|
||||
}
|
||||
|
||||
pub fn slice_range<R: RangeBounds<usize>>(range: R, size: usize) -> Range<usize> {
|
||||
try_slice_range(range, size).expect("range out of bounds")
|
||||
}
|
||||
|
||||
pub trait SerdeJsonEscapeIfTest {
|
||||
fn char_needs_escape(&mut self, ch: char) -> serde_json::Result<bool>;
|
||||
}
|
||||
|
||||
pub trait SerdeJsonEscapeIfTestResult {
|
||||
fn to_result(self) -> serde_json::Result<bool>;
|
||||
}
|
||||
|
||||
impl SerdeJsonEscapeIfTestResult for bool {
|
||||
fn to_result(self) -> serde_json::Result<bool> {
|
||||
Ok(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Into<serde_json::Error>> SerdeJsonEscapeIfTestResult for Result<bool, E> {
|
||||
fn to_result(self) -> serde_json::Result<bool> {
|
||||
self.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + FnMut(char) -> R, R: SerdeJsonEscapeIfTestResult> SerdeJsonEscapeIfTest for T {
|
||||
fn char_needs_escape(&mut self, ch: char) -> serde_json::Result<bool> {
|
||||
self(ch).to_result()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait SerdeJsonEscapeIfFormatter: serde_json::ser::Formatter {
|
||||
fn write_unicode_escape<W>(&mut self, writer: &mut W, ch: char) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
for utf16 in ch.encode_utf16(&mut [0; 2]) {
|
||||
write!(writer, "\\u{utf16:04x}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl SerdeJsonEscapeIfFormatter for serde_json::ser::CompactFormatter {}
|
||||
impl SerdeJsonEscapeIfFormatter for serde_json::ser::PrettyFormatter<'_> {}
|
||||
|
||||
pub struct SerdeJsonEscapeIf<Test, Base = serde_json::ser::CompactFormatter> {
|
||||
pub base: Base,
|
||||
pub test: Test,
|
||||
}
|
||||
|
||||
impl<Test: SerdeJsonEscapeIfTest, Base: SerdeJsonEscapeIfFormatter> serde_json::ser::Formatter
|
||||
for SerdeJsonEscapeIf<Test, Base>
|
||||
{
|
||||
fn write_null<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_null(writer)
|
||||
}
|
||||
|
||||
fn write_bool<W>(&mut self, writer: &mut W, value: bool) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_bool(writer, value)
|
||||
}
|
||||
|
||||
fn write_i8<W>(&mut self, writer: &mut W, value: i8) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_i8(writer, value)
|
||||
}
|
||||
|
||||
fn write_i16<W>(&mut self, writer: &mut W, value: i16) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_i16(writer, value)
|
||||
}
|
||||
|
||||
fn write_i32<W>(&mut self, writer: &mut W, value: i32) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_i32(writer, value)
|
||||
}
|
||||
|
||||
fn write_i64<W>(&mut self, writer: &mut W, value: i64) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_i64(writer, value)
|
||||
}
|
||||
|
||||
fn write_i128<W>(&mut self, writer: &mut W, value: i128) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_i128(writer, value)
|
||||
}
|
||||
|
||||
fn write_u8<W>(&mut self, writer: &mut W, value: u8) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_u8(writer, value)
|
||||
}
|
||||
|
||||
fn write_u16<W>(&mut self, writer: &mut W, value: u16) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_u16(writer, value)
|
||||
}
|
||||
|
||||
fn write_u32<W>(&mut self, writer: &mut W, value: u32) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_u32(writer, value)
|
||||
}
|
||||
|
||||
fn write_u64<W>(&mut self, writer: &mut W, value: u64) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_u64(writer, value)
|
||||
}
|
||||
|
||||
fn write_u128<W>(&mut self, writer: &mut W, value: u128) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_u128(writer, value)
|
||||
}
|
||||
|
||||
fn write_f32<W>(&mut self, writer: &mut W, value: f32) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_f32(writer, value)
|
||||
}
|
||||
|
||||
fn write_f64<W>(&mut self, writer: &mut W, value: f64) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_f64(writer, value)
|
||||
}
|
||||
|
||||
fn write_number_str<W>(&mut self, writer: &mut W, value: &str) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_number_str(writer, value)
|
||||
}
|
||||
|
||||
fn begin_string<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_string(writer)
|
||||
}
|
||||
|
||||
fn end_string<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_string(writer)
|
||||
}
|
||||
|
||||
fn write_string_fragment<W>(&mut self, writer: &mut W, mut fragment: &str) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
while let Some((next_escape_index, next_escape_char)) = fragment
|
||||
.char_indices()
|
||||
.find_map(|(index, ch)| match self.test.char_needs_escape(ch) {
|
||||
Ok(false) => None,
|
||||
Ok(true) => Some(Ok((index, ch))),
|
||||
Err(e) => Some(Err(e)),
|
||||
})
|
||||
.transpose()?
|
||||
{
|
||||
let (no_escapes, rest) = fragment.split_at(next_escape_index);
|
||||
fragment = &rest[next_escape_char.len_utf8()..];
|
||||
self.base.write_string_fragment(writer, no_escapes)?;
|
||||
self.base.write_unicode_escape(writer, next_escape_char)?;
|
||||
}
|
||||
self.base.write_string_fragment(writer, fragment)
|
||||
}
|
||||
|
||||
fn write_char_escape<W>(
|
||||
&mut self,
|
||||
writer: &mut W,
|
||||
char_escape: serde_json::ser::CharEscape,
|
||||
) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_char_escape(writer, char_escape)
|
||||
}
|
||||
|
||||
fn write_byte_array<W>(&mut self, writer: &mut W, value: &[u8]) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_byte_array(writer, value)
|
||||
}
|
||||
|
||||
fn begin_array<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_array(writer)
|
||||
}
|
||||
|
||||
fn end_array<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_array(writer)
|
||||
}
|
||||
|
||||
fn begin_array_value<W>(&mut self, writer: &mut W, first: bool) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_array_value(writer, first)
|
||||
}
|
||||
|
||||
fn end_array_value<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_array_value(writer)
|
||||
}
|
||||
|
||||
fn begin_object<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_object(writer)
|
||||
}
|
||||
|
||||
fn end_object<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_object(writer)
|
||||
}
|
||||
|
||||
fn begin_object_key<W>(&mut self, writer: &mut W, first: bool) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_object_key(writer, first)
|
||||
}
|
||||
|
||||
fn end_object_key<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_object_key(writer)
|
||||
}
|
||||
|
||||
fn begin_object_value<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_object_value(writer)
|
||||
}
|
||||
|
||||
fn end_object_value<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_object_value(writer)
|
||||
}
|
||||
|
||||
fn write_raw_fragment<W>(&mut self, writer: &mut W, fragment: &str) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_raw_fragment(writer, fragment)
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_to_json_ascii_helper<F: SerdeJsonEscapeIfFormatter, S: serde::Serialize + ?Sized>(
|
||||
v: &S,
|
||||
base: F,
|
||||
) -> serde_json::Result<String> {
|
||||
let mut retval = Vec::new();
|
||||
v.serialize(&mut serde_json::ser::Serializer::with_formatter(
|
||||
&mut retval,
|
||||
SerdeJsonEscapeIf {
|
||||
base,
|
||||
test: |ch| ch < '\x20' || ch > '\x7F',
|
||||
},
|
||||
))?;
|
||||
String::from_utf8(retval).map_err(|_| serde::ser::Error::custom("invalid UTF-8"))
|
||||
}
|
||||
|
||||
pub fn serialize_to_json_ascii<T: serde::Serialize + ?Sized>(v: &T) -> serde_json::Result<String> {
|
||||
serialize_to_json_ascii_helper(v, serde_json::ser::CompactFormatter)
|
||||
}
|
||||
|
||||
pub fn serialize_to_json_ascii_pretty<T: serde::Serialize + ?Sized>(
|
||||
v: &T,
|
||||
) -> serde_json::Result<String> {
|
||||
serialize_to_json_ascii_helper(v, serde_json::ser::PrettyFormatter::new())
|
||||
}
|
||||
|
||||
pub fn serialize_to_json_ascii_pretty_with_indent<T: serde::Serialize + ?Sized>(
|
||||
v: &T,
|
||||
indent: &str,
|
||||
) -> serde_json::Result<String> {
|
||||
serialize_to_json_ascii_helper(
|
||||
v,
|
||||
serde_json::ser::PrettyFormatter::with_indent(indent.as_bytes()),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn os_str_strip_prefix<'a>(os_str: &'a OsStr, prefix: impl AsRef<str>) -> Option<&'a OsStr> {
|
||||
os_str
|
||||
.as_encoded_bytes()
|
||||
.strip_prefix(prefix.as_ref().as_bytes())
|
||||
.map(|bytes| {
|
||||
// Safety: we removed a UTF-8 prefix so bytes starts with a valid boundary
|
||||
unsafe { OsStr::from_encoded_bytes_unchecked(bytes) }
|
||||
})
|
||||
}
|
||||
|
||||
pub fn os_str_strip_suffix<'a>(os_str: &'a OsStr, suffix: impl AsRef<str>) -> Option<&'a OsStr> {
|
||||
os_str
|
||||
.as_encoded_bytes()
|
||||
.strip_suffix(suffix.as_ref().as_bytes())
|
||||
.map(|bytes| {
|
||||
// Safety: we removed a UTF-8 suffix so bytes ends with a valid boundary
|
||||
unsafe { OsStr::from_encoded_bytes_unchecked(bytes) }
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct InternedStrCompareAsStr(pub(crate) Interned<str>);
|
||||
|
||||
impl fmt::Debug for InternedStrCompareAsStr {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for InternedStrCompareAsStr {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
str::cmp(&self.0, &other.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for InternedStrCompareAsStr {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl std::borrow::Borrow<str> for InternedStrCompareAsStr {
|
||||
fn borrow(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,839 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
// code derived from:
|
||||
// https://web.archive.org/web/20250303054010/https://git.libre-soc.org/?p=nmutil.git;a=blob;f=src/nmutil/prefix_sum.py;hb=effeb28e5848392adddcdad1f6e7a098f2a44c9c
|
||||
|
||||
use crate::intern::{Intern, Interned, Memoize};
|
||||
use std::{borrow::Cow, num::NonZeroUsize};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
|
||||
pub struct PrefixSumOp {
|
||||
pub lhs_index: usize,
|
||||
pub rhs_and_dest_index: NonZeroUsize,
|
||||
pub row: u32,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[non_exhaustive]
|
||||
pub struct DiagramConfig {
|
||||
pub space: Cow<'static, str>,
|
||||
pub vertical_bar: Cow<'static, str>,
|
||||
pub plus: Cow<'static, str>,
|
||||
pub slant: Cow<'static, str>,
|
||||
pub connect: Cow<'static, str>,
|
||||
pub no_connect: Cow<'static, str>,
|
||||
pub padding: usize,
|
||||
}
|
||||
|
||||
impl DiagramConfig {
|
||||
pub const fn new() -> Self {
|
||||
Self {
|
||||
space: Cow::Borrowed(" "),
|
||||
vertical_bar: Cow::Borrowed("|"),
|
||||
plus: Cow::Borrowed("\u{2295}"), // ⊕
|
||||
slant: Cow::Borrowed(r"\"),
|
||||
connect: Cow::Borrowed("\u{25CF}"), // ●
|
||||
no_connect: Cow::Borrowed("X"),
|
||||
padding: 1,
|
||||
}
|
||||
}
|
||||
pub fn draw(self, ops: impl IntoIterator<Item = PrefixSumOp>, item_count: usize) -> String {
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
struct DiagramCell {
|
||||
slant: bool,
|
||||
plus: bool,
|
||||
tee: bool,
|
||||
}
|
||||
let mut ops_by_row: Vec<Vec<PrefixSumOp>> = Vec::new();
|
||||
let mut last_row = 0;
|
||||
ops.into_iter().for_each(|op| {
|
||||
assert!(
|
||||
op.lhs_index < op.rhs_and_dest_index.get(),
|
||||
"invalid PrefixSumOp! lhs_index must be less \
|
||||
than rhs_and_dest_index: {op:?}",
|
||||
);
|
||||
assert!(
|
||||
op.row >= last_row,
|
||||
"invalid PrefixSumOp! row must \
|
||||
not decrease (row last was: {last_row}): {op:?}",
|
||||
);
|
||||
let ops = if op.row > last_row || ops_by_row.is_empty() {
|
||||
ops_by_row.push(vec![]);
|
||||
ops_by_row.last_mut().expect("just pushed")
|
||||
} else {
|
||||
ops_by_row
|
||||
.last_mut()
|
||||
.expect("just checked if ops_by_row is empty")
|
||||
};
|
||||
if let Some(last) = ops.last() {
|
||||
assert!(
|
||||
op.rhs_and_dest_index < last.rhs_and_dest_index,
|
||||
"invalid PrefixSumOp! rhs_and_dest_index must strictly \
|
||||
decrease in a row:\nthis op: {op:?}\nlast op: {last:?}",
|
||||
);
|
||||
}
|
||||
ops.push(op);
|
||||
last_row = op.row;
|
||||
});
|
||||
let blank_row = || {
|
||||
vec![
|
||||
DiagramCell {
|
||||
slant: false,
|
||||
plus: false,
|
||||
tee: false
|
||||
};
|
||||
item_count
|
||||
]
|
||||
};
|
||||
let mut cells = vec![blank_row()];
|
||||
for ops in ops_by_row {
|
||||
let max_distance = ops
|
||||
.iter()
|
||||
.map(
|
||||
|&PrefixSumOp {
|
||||
lhs_index,
|
||||
rhs_and_dest_index,
|
||||
..
|
||||
}| { rhs_and_dest_index.get() - lhs_index },
|
||||
)
|
||||
.max()
|
||||
.expect("ops is known to be non-empty");
|
||||
cells.extend((0..max_distance).map(|_| blank_row()));
|
||||
for op in ops {
|
||||
let mut y = cells.len() - 1;
|
||||
assert!(
|
||||
op.rhs_and_dest_index.get() < item_count,
|
||||
"invalid PrefixSumOp! rhs_and_dest_index must be \
|
||||
less than item_count ({item_count}): {op:?}",
|
||||
);
|
||||
let mut x = op.rhs_and_dest_index.get();
|
||||
cells[y][x].plus = true;
|
||||
x -= 1;
|
||||
y -= 1;
|
||||
while op.lhs_index < x {
|
||||
cells[y][x].slant = true;
|
||||
x -= 1;
|
||||
y -= 1;
|
||||
}
|
||||
cells[y][x].tee = true;
|
||||
}
|
||||
}
|
||||
let mut retval = String::new();
|
||||
let mut row_text = vec![String::new(); 2 * self.padding + 1];
|
||||
for cells_row in cells {
|
||||
for cell in cells_row {
|
||||
// top padding
|
||||
for y in 0..self.padding {
|
||||
// top left padding
|
||||
for x in 0..self.padding {
|
||||
row_text[y] += if x == y && (cell.plus || cell.slant) {
|
||||
&self.slant
|
||||
} else {
|
||||
&self.space
|
||||
};
|
||||
}
|
||||
// top vertical bar
|
||||
row_text[y] += &self.vertical_bar;
|
||||
// top right padding
|
||||
for _ in 0..self.padding {
|
||||
row_text[y] += &self.space;
|
||||
}
|
||||
}
|
||||
// center left padding
|
||||
for _ in 0..self.padding {
|
||||
row_text[self.padding] += &self.space;
|
||||
}
|
||||
// center
|
||||
row_text[self.padding] += if cell.plus {
|
||||
&self.plus
|
||||
} else if cell.tee {
|
||||
&self.connect
|
||||
} else if cell.slant {
|
||||
&self.no_connect
|
||||
} else {
|
||||
&self.vertical_bar
|
||||
};
|
||||
// center right padding
|
||||
for _ in 0..self.padding {
|
||||
row_text[self.padding] += &self.space;
|
||||
}
|
||||
let bottom_padding_start = self.padding + 1;
|
||||
let bottom_padding_last = self.padding * 2;
|
||||
// bottom padding
|
||||
for y in bottom_padding_start..=bottom_padding_last {
|
||||
// bottom left padding
|
||||
for _ in 0..self.padding {
|
||||
row_text[y] += &self.space;
|
||||
}
|
||||
// bottom vertical bar
|
||||
row_text[y] += &self.vertical_bar;
|
||||
// bottom right padding
|
||||
for x in bottom_padding_start..=bottom_padding_last {
|
||||
row_text[y] += if x == y && (cell.tee || cell.slant) {
|
||||
&self.slant
|
||||
} else {
|
||||
&self.space
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
for line in &mut row_text {
|
||||
retval += line.trim_end();
|
||||
retval += "\n";
|
||||
line.clear();
|
||||
}
|
||||
}
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DiagramConfig {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl PrefixSumOp {
|
||||
pub fn diagram(ops: impl IntoIterator<Item = Self>, item_count: usize) -> String {
|
||||
Self::diagram_with_config(ops, item_count, DiagramConfig::new())
|
||||
}
|
||||
pub fn diagram_with_config(
|
||||
ops: impl IntoIterator<Item = Self>,
|
||||
item_count: usize,
|
||||
config: DiagramConfig,
|
||||
) -> String {
|
||||
config.draw(ops, item_count)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub enum PrefixSumAlgorithm {
|
||||
/// Uses the algorithm from:
|
||||
/// <https://en.wikipedia.org/wiki/Prefix_sum#Algorithm_1:_Shorter_span,_more_parallel>
|
||||
LowLatency,
|
||||
/// Uses the algorithm from:
|
||||
/// <https://en.wikipedia.org/wiki/Prefix_sum#Algorithm_2:_Work-efficient>
|
||||
WorkEfficient,
|
||||
}
|
||||
|
||||
impl PrefixSumAlgorithm {
|
||||
fn ops_impl(self, item_count: usize) -> Vec<PrefixSumOp> {
|
||||
let mut retval = Vec::new();
|
||||
let mut distance = 1;
|
||||
let mut row = 0;
|
||||
while distance < item_count {
|
||||
let double_distance = distance
|
||||
.checked_mul(2)
|
||||
.expect("prefix-sum item_count is too big");
|
||||
let (start, step) = match self {
|
||||
Self::LowLatency => (distance, 1),
|
||||
Self::WorkEfficient => (double_distance - 1, double_distance),
|
||||
};
|
||||
for rhs_and_dest_index in (start..item_count).step_by(step).rev() {
|
||||
let Some(rhs_and_dest_index) = NonZeroUsize::new(rhs_and_dest_index) else {
|
||||
unreachable!();
|
||||
};
|
||||
let lhs_index = rhs_and_dest_index.get() - distance;
|
||||
retval.push(PrefixSumOp {
|
||||
lhs_index,
|
||||
rhs_and_dest_index,
|
||||
row,
|
||||
});
|
||||
}
|
||||
distance = double_distance;
|
||||
row += 1;
|
||||
}
|
||||
match self {
|
||||
Self::LowLatency => {}
|
||||
Self::WorkEfficient => {
|
||||
distance /= 2;
|
||||
while distance >= 1 {
|
||||
let start = distance
|
||||
.checked_mul(3)
|
||||
.expect("prefix-sum item_count is too big")
|
||||
- 1;
|
||||
for rhs_and_dest_index in (start..item_count).step_by(distance * 2).rev() {
|
||||
let Some(rhs_and_dest_index) = NonZeroUsize::new(rhs_and_dest_index) else {
|
||||
unreachable!();
|
||||
};
|
||||
let lhs_index = rhs_and_dest_index.get() - distance;
|
||||
retval.push(PrefixSumOp {
|
||||
lhs_index,
|
||||
rhs_and_dest_index,
|
||||
row,
|
||||
});
|
||||
}
|
||||
row += 1;
|
||||
distance /= 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
retval
|
||||
}
|
||||
pub fn ops(self, item_count: usize) -> Interned<[PrefixSumOp]> {
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
struct MyMemoize(PrefixSumAlgorithm);
|
||||
impl Memoize for MyMemoize {
|
||||
type Input = usize;
|
||||
type InputOwned = usize;
|
||||
type Output = Interned<[PrefixSumOp]>;
|
||||
|
||||
fn inner(self, item_count: &Self::Input) -> Self::Output {
|
||||
Intern::intern_owned(self.0.ops_impl(*item_count))
|
||||
}
|
||||
}
|
||||
MyMemoize(self).get_owned(item_count)
|
||||
}
|
||||
pub fn run<T>(self, items: impl IntoIterator<Item = T>, f: impl FnMut(&T, &T) -> T) -> Vec<T> {
|
||||
let mut items = Vec::from_iter(items);
|
||||
self.run_on_slice(&mut items, f);
|
||||
items
|
||||
}
|
||||
pub fn run_on_slice<T>(self, items: &mut [T], mut f: impl FnMut(&T, &T) -> T) -> &mut [T] {
|
||||
self.ops(items.len()).into_iter().for_each(
|
||||
|PrefixSumOp {
|
||||
lhs_index,
|
||||
rhs_and_dest_index,
|
||||
row: _,
|
||||
}| {
|
||||
items[rhs_and_dest_index.get()] =
|
||||
f(&items[lhs_index], &items[rhs_and_dest_index.get()]);
|
||||
},
|
||||
);
|
||||
items
|
||||
}
|
||||
pub fn filtered_ops(
|
||||
self,
|
||||
item_live_out_flags: impl IntoIterator<Item = bool>,
|
||||
) -> Vec<PrefixSumOp> {
|
||||
let mut item_live_out_flags = Vec::from_iter(item_live_out_flags);
|
||||
let prefix_sum_ops = self.ops(item_live_out_flags.len());
|
||||
let mut ops_live_flags = vec![false; prefix_sum_ops.len()];
|
||||
for (
|
||||
op_index,
|
||||
&PrefixSumOp {
|
||||
lhs_index,
|
||||
rhs_and_dest_index,
|
||||
row: _,
|
||||
},
|
||||
) in prefix_sum_ops.iter().enumerate().rev()
|
||||
{
|
||||
let live = item_live_out_flags[rhs_and_dest_index.get()];
|
||||
item_live_out_flags[lhs_index] |= live;
|
||||
ops_live_flags[op_index] = live;
|
||||
}
|
||||
prefix_sum_ops
|
||||
.into_iter()
|
||||
.zip(ops_live_flags)
|
||||
.filter_map(|(op, live)| live.then_some(op))
|
||||
.collect()
|
||||
}
|
||||
pub fn reduce_ops(self, item_count: usize) -> Interned<[PrefixSumOp]> {
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
struct MyMemoize(PrefixSumAlgorithm);
|
||||
impl Memoize for MyMemoize {
|
||||
type Input = usize;
|
||||
type InputOwned = usize;
|
||||
type Output = Interned<[PrefixSumOp]>;
|
||||
|
||||
fn inner(self, item_count: &Self::Input) -> Self::Output {
|
||||
let mut item_live_out_flags = vec![false; *item_count];
|
||||
let Some(last_item_live_out_flag) = item_live_out_flags.last_mut() else {
|
||||
return Interned::default();
|
||||
};
|
||||
*last_item_live_out_flag = true;
|
||||
Intern::intern_owned(self.0.filtered_ops(item_live_out_flags))
|
||||
}
|
||||
}
|
||||
MyMemoize(self).get_owned(item_count)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reduce_ops(item_count: usize) -> Interned<[PrefixSumOp]> {
|
||||
PrefixSumAlgorithm::LowLatency.reduce_ops(item_count)
|
||||
}
|
||||
|
||||
pub fn reduce<T>(items: impl IntoIterator<Item = T>, mut f: impl FnMut(T, T) -> T) -> Option<T> {
|
||||
let mut items: Vec<_> = items.into_iter().map(Some).collect();
|
||||
for op in reduce_ops(items.len()) {
|
||||
let (Some(lhs), Some(rhs)) = (
|
||||
items[op.lhs_index].take(),
|
||||
items[op.rhs_and_dest_index.get()].take(),
|
||||
) else {
|
||||
unreachable!();
|
||||
};
|
||||
items[op.rhs_and_dest_index.get()] = Some(f(lhs, rhs));
|
||||
}
|
||||
items.last_mut().and_then(Option::take)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn input_strings() -> [String; 9] {
|
||||
std::array::from_fn(|i| String::from_utf8(vec![b'a' + i as u8]).unwrap())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_prefix_sum_strings() {
|
||||
let input = input_strings();
|
||||
let expected: Vec<String> = input
|
||||
.iter()
|
||||
.scan(String::new(), |l, r| {
|
||||
*l += r;
|
||||
Some(l.clone())
|
||||
})
|
||||
.collect();
|
||||
println!("expected: {expected:?}");
|
||||
assert_eq!(
|
||||
*PrefixSumAlgorithm::WorkEfficient
|
||||
.run_on_slice(&mut input.clone(), |l, r| l.to_string() + r),
|
||||
*expected
|
||||
);
|
||||
assert_eq!(
|
||||
*PrefixSumAlgorithm::LowLatency
|
||||
.run_on_slice(&mut input.clone(), |l, r| l.to_string() + r),
|
||||
*expected
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reduce_string() {
|
||||
let input = input_strings();
|
||||
let expected = input.clone().into_iter().reduce(|l, r| l + &r);
|
||||
assert_eq!(reduce(input, |l, r| l + &r), expected);
|
||||
}
|
||||
|
||||
fn op(lhs_index: usize, rhs_and_dest_index: usize, row: u32) -> PrefixSumOp {
|
||||
PrefixSumOp {
|
||||
lhs_index,
|
||||
rhs_and_dest_index: NonZeroUsize::new(rhs_and_dest_index).expect("should be non-zero"),
|
||||
row,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reduce_ops_9() {
|
||||
let expected = vec![
|
||||
op(7, 8, 0),
|
||||
op(5, 6, 0),
|
||||
op(3, 4, 0),
|
||||
op(1, 2, 0),
|
||||
op(6, 8, 1),
|
||||
op(2, 4, 1),
|
||||
op(4, 8, 2),
|
||||
op(0, 8, 3),
|
||||
];
|
||||
println!("expected: {expected:#?}");
|
||||
let ops = reduce_ops(9);
|
||||
println!("ops: {ops:#?}");
|
||||
assert_eq!(*ops, *expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reduce_ops_8() {
|
||||
let expected = vec![
|
||||
op(6, 7, 0),
|
||||
op(4, 5, 0),
|
||||
op(2, 3, 0),
|
||||
op(0, 1, 0),
|
||||
op(5, 7, 1),
|
||||
op(1, 3, 1),
|
||||
op(3, 7, 2),
|
||||
];
|
||||
println!("expected: {expected:#?}");
|
||||
let ops = reduce_ops(8);
|
||||
println!("ops: {ops:#?}");
|
||||
assert_eq!(*ops, *expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_count_ones() {
|
||||
for width in 0..=10u32 {
|
||||
for v in 0..1u32 << width {
|
||||
let expected = v.count_ones();
|
||||
assert_eq!(
|
||||
reduce((0..width).map(|i| (v >> i) & 1), |l, r| l + r).unwrap_or(0),
|
||||
expected,
|
||||
"v={v:#X}"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn test_diagram(ops: impl IntoIterator<Item = PrefixSumOp>, item_count: usize, expected: &str) {
|
||||
let text = PrefixSumOp::diagram_with_config(
|
||||
ops,
|
||||
item_count,
|
||||
DiagramConfig {
|
||||
plus: Cow::Borrowed("@"),
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
println!("text:\n{text}\n");
|
||||
assert_eq!(text, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_work_efficient_diagram_16() {
|
||||
let item_count = 16;
|
||||
test_diagram(
|
||||
PrefixSumAlgorithm::WorkEfficient.ops(item_count),
|
||||
item_count,
|
||||
&r"
|
||||
| | | | | | | | | | | | | | | |
|
||||
● | ● | ● | ● | ● | ● | ● | ● |
|
||||
|\ | |\ | |\ | |\ | |\ | |\ | |\ | |\ |
|
||||
| \| | \| | \| | \| | \| | \| | \| | \|
|
||||
| @ | @ | @ | @ | @ | @ | @ | @
|
||||
| |\ | | | |\ | | | |\ | | | |\ | |
|
||||
| | \| | | | \| | | | \| | | | \| |
|
||||
| | X | | | X | | | X | | | X |
|
||||
| | |\ | | | |\ | | | |\ | | | |\ |
|
||||
| | | \| | | | \| | | | \| | | | \|
|
||||
| | | @ | | | @ | | | @ | | | @
|
||||
| | | |\ | | | | | | | |\ | | | |
|
||||
| | | | \| | | | | | | | \| | | |
|
||||
| | | | X | | | | | | | X | | |
|
||||
| | | | |\ | | | | | | | |\ | | |
|
||||
| | | | | \| | | | | | | | \| | |
|
||||
| | | | | X | | | | | | | X | |
|
||||
| | | | | |\ | | | | | | | |\ | |
|
||||
| | | | | | \| | | | | | | | \| |
|
||||
| | | | | | X | | | | | | | X |
|
||||
| | | | | | |\ | | | | | | | |\ |
|
||||
| | | | | | | \| | | | | | | | \|
|
||||
| | | | | | | @ | | | | | | | @
|
||||
| | | | | | | |\ | | | | | | | |
|
||||
| | | | | | | | \| | | | | | | |
|
||||
| | | | | | | | X | | | | | | |
|
||||
| | | | | | | | |\ | | | | | | |
|
||||
| | | | | | | | | \| | | | | | |
|
||||
| | | | | | | | | X | | | | | |
|
||||
| | | | | | | | | |\ | | | | | |
|
||||
| | | | | | | | | | \| | | | | |
|
||||
| | | | | | | | | | X | | | | |
|
||||
| | | | | | | | | | |\ | | | | |
|
||||
| | | | | | | | | | | \| | | | |
|
||||
| | | | | | | | | | | X | | | |
|
||||
| | | | | | | | | | | |\ | | | |
|
||||
| | | | | | | | | | | | \| | | |
|
||||
| | | | | | | | | | | | X | | |
|
||||
| | | | | | | | | | | | |\ | | |
|
||||
| | | | | | | | | | | | | \| | |
|
||||
| | | | | | | | | | | | | X | |
|
||||
| | | | | | | | | | | | | |\ | |
|
||||
| | | | | | | | | | | | | | \| |
|
||||
| | | | | | | | | | | | | | X |
|
||||
| | | | | | | | | | | | | | |\ |
|
||||
| | | | | | | | | | | | | | | \|
|
||||
| | | | | | | ● | | | | | | | @
|
||||
| | | | | | | |\ | | | | | | | |
|
||||
| | | | | | | | \| | | | | | | |
|
||||
| | | | | | | | X | | | | | | |
|
||||
| | | | | | | | |\ | | | | | | |
|
||||
| | | | | | | | | \| | | | | | |
|
||||
| | | | | | | | | X | | | | | |
|
||||
| | | | | | | | | |\ | | | | | |
|
||||
| | | | | | | | | | \| | | | | |
|
||||
| | | | | | | | | | X | | | | |
|
||||
| | | | | | | | | | |\ | | | | |
|
||||
| | | | | | | | | | | \| | | | |
|
||||
| | | ● | | | ● | | | @ | | | |
|
||||
| | | |\ | | | |\ | | | |\ | | | |
|
||||
| | | | \| | | | \| | | | \| | | |
|
||||
| | | | X | | | X | | | X | | |
|
||||
| | | | |\ | | | |\ | | | |\ | | |
|
||||
| | | | | \| | | | \| | | | \| | |
|
||||
| ● | ● | @ | ● | @ | ● | @ | |
|
||||
| |\ | |\ | |\ | |\ | |\ | |\ | |\ | |
|
||||
| | \| | \| | \| | \| | \| | \| | \| |
|
||||
| | @ | @ | @ | @ | @ | @ | @ |
|
||||
| | | | | | | | | | | | | | | |
|
||||
"[1..], // trim newline at start
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_low_latency_diagram_16() {
|
||||
let item_count = 16;
|
||||
test_diagram(
|
||||
PrefixSumAlgorithm::LowLatency.ops(item_count),
|
||||
item_count,
|
||||
&r"
|
||||
| | | | | | | | | | | | | | | |
|
||||
● ● ● ● ● ● ● ● ● ● ● ● ● ● ● |
|
||||
|\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |
|
||||
| \| \| \| \| \| \| \| \| \| \| \| \| \| \| \|
|
||||
● @ @ @ @ @ @ @ @ @ @ @ @ @ @ @
|
||||
|\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ | |
|
||||
| \| \| \| \| \| \| \| \| \| \| \| \| \| \| |
|
||||
| X X X X X X X X X X X X X X |
|
||||
| |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |
|
||||
| | \| \| \| \| \| \| \| \| \| \| \| \| \| \|
|
||||
● ● @ @ @ @ @ @ @ @ @ @ @ @ @ @
|
||||
|\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ | | | |
|
||||
| \| \| \| \| \| \| \| \| \| \| \| \| | | |
|
||||
| X X X X X X X X X X X X | | |
|
||||
| |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ | | |
|
||||
| | \| \| \| \| \| \| \| \| \| \| \| \| | |
|
||||
| | X X X X X X X X X X X X | |
|
||||
| | |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ | |
|
||||
| | | \| \| \| \| \| \| \| \| \| \| \| \| |
|
||||
| | | X X X X X X X X X X X X |
|
||||
| | | |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |
|
||||
| | | | \| \| \| \| \| \| \| \| \| \| \| \|
|
||||
● ● ● ● @ @ @ @ @ @ @ @ @ @ @ @
|
||||
|\ |\ |\ |\ |\ |\ |\ |\ | | | | | | | |
|
||||
| \| \| \| \| \| \| \| \| | | | | | | |
|
||||
| X X X X X X X X | | | | | | |
|
||||
| |\ |\ |\ |\ |\ |\ |\ |\ | | | | | | |
|
||||
| | \| \| \| \| \| \| \| \| | | | | | |
|
||||
| | X X X X X X X X | | | | | |
|
||||
| | |\ |\ |\ |\ |\ |\ |\ |\ | | | | | |
|
||||
| | | \| \| \| \| \| \| \| \| | | | | |
|
||||
| | | X X X X X X X X | | | | |
|
||||
| | | |\ |\ |\ |\ |\ |\ |\ |\ | | | | |
|
||||
| | | | \| \| \| \| \| \| \| \| | | | |
|
||||
| | | | X X X X X X X X | | | |
|
||||
| | | | |\ |\ |\ |\ |\ |\ |\ |\ | | | |
|
||||
| | | | | \| \| \| \| \| \| \| \| | | |
|
||||
| | | | | X X X X X X X X | | |
|
||||
| | | | | |\ |\ |\ |\ |\ |\ |\ |\ | | |
|
||||
| | | | | | \| \| \| \| \| \| \| \| | |
|
||||
| | | | | | X X X X X X X X | |
|
||||
| | | | | | |\ |\ |\ |\ |\ |\ |\ |\ | |
|
||||
| | | | | | | \| \| \| \| \| \| \| \| |
|
||||
| | | | | | | X X X X X X X X |
|
||||
| | | | | | | |\ |\ |\ |\ |\ |\ |\ |\ |
|
||||
| | | | | | | | \| \| \| \| \| \| \| \|
|
||||
| | | | | | | | @ @ @ @ @ @ @ @
|
||||
| | | | | | | | | | | | | | | |
|
||||
"[1..], // trim newline at start
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_work_efficient_diagram_9() {
|
||||
let item_count = 9;
|
||||
test_diagram(
|
||||
PrefixSumAlgorithm::WorkEfficient.ops(item_count),
|
||||
item_count,
|
||||
&r"
|
||||
| | | | | | | | |
|
||||
● | ● | ● | ● | |
|
||||
|\ | |\ | |\ | |\ | |
|
||||
| \| | \| | \| | \| |
|
||||
| @ | @ | @ | @ |
|
||||
| |\ | | | |\ | | |
|
||||
| | \| | | | \| | |
|
||||
| | X | | | X | |
|
||||
| | |\ | | | |\ | |
|
||||
| | | \| | | | \| |
|
||||
| | | @ | | | @ |
|
||||
| | | |\ | | | | |
|
||||
| | | | \| | | | |
|
||||
| | | | X | | | |
|
||||
| | | | |\ | | | |
|
||||
| | | | | \| | | |
|
||||
| | | | | X | | |
|
||||
| | | | | |\ | | |
|
||||
| | | | | | \| | |
|
||||
| | | | | | X | |
|
||||
| | | | | | |\ | |
|
||||
| | | | | | | \| |
|
||||
| | | ● | | | @ |
|
||||
| | | |\ | | | | |
|
||||
| | | | \| | | | |
|
||||
| | | | X | | | |
|
||||
| | | | |\ | | | |
|
||||
| | | | | \| | | |
|
||||
| ● | ● | @ | ● |
|
||||
| |\ | |\ | |\ | |\ |
|
||||
| | \| | \| | \| | \|
|
||||
| | @ | @ | @ | @
|
||||
| | | | | | | | |
|
||||
"[1..], // trim newline at start
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_low_latency_diagram_9() {
|
||||
let item_count = 9;
|
||||
test_diagram(
|
||||
PrefixSumAlgorithm::LowLatency.ops(item_count),
|
||||
item_count,
|
||||
&r"
|
||||
| | | | | | | | |
|
||||
● ● ● ● ● ● ● ● |
|
||||
|\ |\ |\ |\ |\ |\ |\ |\ |
|
||||
| \| \| \| \| \| \| \| \|
|
||||
● @ @ @ @ @ @ @ @
|
||||
|\ |\ |\ |\ |\ |\ |\ | |
|
||||
| \| \| \| \| \| \| \| |
|
||||
| X X X X X X X |
|
||||
| |\ |\ |\ |\ |\ |\ |\ |
|
||||
| | \| \| \| \| \| \| \|
|
||||
● ● @ @ @ @ @ @ @
|
||||
|\ |\ |\ |\ |\ | | | |
|
||||
| \| \| \| \| \| | | |
|
||||
| X X X X X | | |
|
||||
| |\ |\ |\ |\ |\ | | |
|
||||
| | \| \| \| \| \| | |
|
||||
| | X X X X X | |
|
||||
| | |\ |\ |\ |\ |\ | |
|
||||
| | | \| \| \| \| \| |
|
||||
| | | X X X X X |
|
||||
| | | |\ |\ |\ |\ |\ |
|
||||
| | | | \| \| \| \| \|
|
||||
● | | | @ @ @ @ @
|
||||
|\ | | | | | | | |
|
||||
| \| | | | | | | |
|
||||
| X | | | | | | |
|
||||
| |\ | | | | | | |
|
||||
| | \| | | | | | |
|
||||
| | X | | | | | |
|
||||
| | |\ | | | | | |
|
||||
| | | \| | | | | |
|
||||
| | | X | | | | |
|
||||
| | | |\ | | | | |
|
||||
| | | | \| | | | |
|
||||
| | | | X | | | |
|
||||
| | | | |\ | | | |
|
||||
| | | | | \| | | |
|
||||
| | | | | X | | |
|
||||
| | | | | |\ | | |
|
||||
| | | | | | \| | |
|
||||
| | | | | | X | |
|
||||
| | | | | | |\ | |
|
||||
| | | | | | | \| |
|
||||
| | | | | | | X |
|
||||
| | | | | | | |\ |
|
||||
| | | | | | | | \|
|
||||
| | | | | | | | @
|
||||
| | | | | | | | |
|
||||
"[1..], // trim newline at start
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reduce_diagram_16() {
|
||||
let item_count = 16;
|
||||
test_diagram(
|
||||
reduce_ops(item_count),
|
||||
item_count,
|
||||
&r"
|
||||
| | | | | | | | | | | | | | | |
|
||||
● | ● | ● | ● | ● | ● | ● | ● |
|
||||
|\ | |\ | |\ | |\ | |\ | |\ | |\ | |\ |
|
||||
| \| | \| | \| | \| | \| | \| | \| | \|
|
||||
| @ | @ | @ | @ | @ | @ | @ | @
|
||||
| |\ | | | |\ | | | |\ | | | |\ | |
|
||||
| | \| | | | \| | | | \| | | | \| |
|
||||
| | X | | | X | | | X | | | X |
|
||||
| | |\ | | | |\ | | | |\ | | | |\ |
|
||||
| | | \| | | | \| | | | \| | | | \|
|
||||
| | | @ | | | @ | | | @ | | | @
|
||||
| | | |\ | | | | | | | |\ | | | |
|
||||
| | | | \| | | | | | | | \| | | |
|
||||
| | | | X | | | | | | | X | | |
|
||||
| | | | |\ | | | | | | | |\ | | |
|
||||
| | | | | \| | | | | | | | \| | |
|
||||
| | | | | X | | | | | | | X | |
|
||||
| | | | | |\ | | | | | | | |\ | |
|
||||
| | | | | | \| | | | | | | | \| |
|
||||
| | | | | | X | | | | | | | X |
|
||||
| | | | | | |\ | | | | | | | |\ |
|
||||
| | | | | | | \| | | | | | | | \|
|
||||
| | | | | | | @ | | | | | | | @
|
||||
| | | | | | | |\ | | | | | | | |
|
||||
| | | | | | | | \| | | | | | | |
|
||||
| | | | | | | | X | | | | | | |
|
||||
| | | | | | | | |\ | | | | | | |
|
||||
| | | | | | | | | \| | | | | | |
|
||||
| | | | | | | | | X | | | | | |
|
||||
| | | | | | | | | |\ | | | | | |
|
||||
| | | | | | | | | | \| | | | | |
|
||||
| | | | | | | | | | X | | | | |
|
||||
| | | | | | | | | | |\ | | | | |
|
||||
| | | | | | | | | | | \| | | | |
|
||||
| | | | | | | | | | | X | | | |
|
||||
| | | | | | | | | | | |\ | | | |
|
||||
| | | | | | | | | | | | \| | | |
|
||||
| | | | | | | | | | | | X | | |
|
||||
| | | | | | | | | | | | |\ | | |
|
||||
| | | | | | | | | | | | | \| | |
|
||||
| | | | | | | | | | | | | X | |
|
||||
| | | | | | | | | | | | | |\ | |
|
||||
| | | | | | | | | | | | | | \| |
|
||||
| | | | | | | | | | | | | | X |
|
||||
| | | | | | | | | | | | | | |\ |
|
||||
| | | | | | | | | | | | | | | \|
|
||||
| | | | | | | | | | | | | | | @
|
||||
| | | | | | | | | | | | | | | |
|
||||
"[1..], // trim newline at start
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reduce_diagram_9() {
|
||||
let item_count = 9;
|
||||
test_diagram(
|
||||
reduce_ops(item_count),
|
||||
item_count,
|
||||
&r"
|
||||
| | | | | | | | |
|
||||
| ● | ● | ● | ● |
|
||||
| |\ | |\ | |\ | |\ |
|
||||
| | \| | \| | \| | \|
|
||||
| | @ | @ | @ | @
|
||||
| | |\ | | | |\ | |
|
||||
| | | \| | | | \| |
|
||||
| | | X | | | X |
|
||||
| | | |\ | | | |\ |
|
||||
| | | | \| | | | \|
|
||||
| | | | @ | | | @
|
||||
| | | | |\ | | | |
|
||||
| | | | | \| | | |
|
||||
| | | | | X | | |
|
||||
| | | | | |\ | | |
|
||||
| | | | | | \| | |
|
||||
| | | | | | X | |
|
||||
| | | | | | |\ | |
|
||||
| | | | | | | \| |
|
||||
| | | | | | | X |
|
||||
| | | | | | | |\ |
|
||||
| | | | | | | | \|
|
||||
● | | | | | | | @
|
||||
|\ | | | | | | | |
|
||||
| \| | | | | | | |
|
||||
| X | | | | | | |
|
||||
| |\ | | | | | | |
|
||||
| | \| | | | | | |
|
||||
| | X | | | | | |
|
||||
| | |\ | | | | | |
|
||||
| | | \| | | | | |
|
||||
| | | X | | | | |
|
||||
| | | |\ | | | | |
|
||||
| | | | \| | | | |
|
||||
| | | | X | | | |
|
||||
| | | | |\ | | | |
|
||||
| | | | | \| | | |
|
||||
| | | | | X | | |
|
||||
| | | | | |\ | | |
|
||||
| | | | | | \| | |
|
||||
| | | | | | X | |
|
||||
| | | | | | |\ | |
|
||||
| | | | | | | \| |
|
||||
| | | | | | | X |
|
||||
| | | | | | | |\ |
|
||||
| | | | | | | | \|
|
||||
| | | | | | | | @
|
||||
| | | | | | | | |
|
||||
"[1..], // trim newline at start
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,564 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{memory::splat_mask, prelude::*};
|
||||
use std::num::NonZeroUsize;
|
||||
|
||||
#[hdl]
|
||||
pub struct ReadyValid<T> {
|
||||
pub data: HdlOption<T>,
|
||||
#[hdl(flip)]
|
||||
pub ready: Bool,
|
||||
}
|
||||
|
||||
impl<T: Type> ReadyValid<T> {
|
||||
#[hdl]
|
||||
pub fn firing(expr: Expr<Self>) -> Expr<Bool> {
|
||||
#[hdl]
|
||||
let firing: Bool = wire();
|
||||
#[hdl]
|
||||
match expr.data {
|
||||
HdlNone => connect(firing, false),
|
||||
HdlSome(_) => connect(firing, expr.ready),
|
||||
}
|
||||
firing
|
||||
}
|
||||
#[hdl]
|
||||
pub fn firing_data(expr: impl ToExpr<Type = Self>) -> Expr<HdlOption<T>> {
|
||||
let expr = expr.to_expr();
|
||||
let option_ty = Expr::ty(expr).data;
|
||||
#[hdl]
|
||||
let firing_data = wire(option_ty);
|
||||
connect(firing_data, option_ty.HdlNone());
|
||||
#[hdl]
|
||||
if expr.ready {
|
||||
connect(firing_data, expr.data);
|
||||
}
|
||||
firing_data
|
||||
}
|
||||
#[hdl]
|
||||
pub fn map<R: Type>(
|
||||
expr: Expr<Self>,
|
||||
f: impl FnOnce(Expr<T>) -> Expr<R>,
|
||||
) -> Expr<ReadyValid<R>> {
|
||||
let data = HdlOption::map(expr.data, f);
|
||||
#[hdl]
|
||||
let mapped = wire(ReadyValid[Expr::ty(data).HdlSome]);
|
||||
connect(mapped.data, data);
|
||||
connect(expr.ready, mapped.ready);
|
||||
mapped
|
||||
}
|
||||
}
|
||||
|
||||
/// This debug port is only meant to assist the formal proof of the queue.
|
||||
#[cfg(test)]
|
||||
#[doc(hidden)]
|
||||
#[hdl]
|
||||
pub struct QueueDebugPort<Element, Index> {
|
||||
#[hdl(flip)]
|
||||
index_to_check: Index,
|
||||
stored: Element,
|
||||
inp_index: Index,
|
||||
out_index: Index,
|
||||
}
|
||||
|
||||
#[hdl_module]
|
||||
pub fn queue<T: Type>(
|
||||
ty: T,
|
||||
capacity: NonZeroUsize,
|
||||
inp_ready_is_comb: bool,
|
||||
out_valid_is_comb: bool,
|
||||
) {
|
||||
let count_ty = UInt::range_inclusive(0..=capacity.get());
|
||||
let index_ty = UInt::range(0..capacity.get());
|
||||
|
||||
#[hdl]
|
||||
let cd: ClockDomain = m.input();
|
||||
#[hdl]
|
||||
let inp: ReadyValid<T> = m.input(ReadyValid[ty]);
|
||||
#[hdl]
|
||||
let out: ReadyValid<T> = m.output(ReadyValid[ty]);
|
||||
#[hdl]
|
||||
let count: UInt = m.output(count_ty);
|
||||
|
||||
#[hdl]
|
||||
let inp_index_reg = reg_builder().clock_domain(cd).reset(0.cast_to(index_ty));
|
||||
#[hdl]
|
||||
let out_index_reg = reg_builder().clock_domain(cd).reset(0.cast_to(index_ty));
|
||||
#[hdl]
|
||||
let maybe_full_reg = reg_builder().clock_domain(cd).reset(false);
|
||||
|
||||
#[hdl]
|
||||
let mut mem = memory(ty);
|
||||
mem.depth(capacity.get());
|
||||
let read_port = mem.new_read_port();
|
||||
let write_port = mem.new_write_port();
|
||||
|
||||
#[hdl]
|
||||
let inp_firing: Bool = wire();
|
||||
connect(inp_firing, ReadyValid::firing(inp));
|
||||
#[hdl]
|
||||
let out_firing: Bool = wire();
|
||||
connect(out_firing, ReadyValid::firing(out));
|
||||
#[hdl]
|
||||
let indexes_equal: Bool = wire();
|
||||
connect(indexes_equal, inp_index_reg.cmp_eq(out_index_reg));
|
||||
#[hdl]
|
||||
let empty: Bool = wire();
|
||||
connect(empty, indexes_equal & !maybe_full_reg);
|
||||
#[hdl]
|
||||
let full: Bool = wire();
|
||||
connect(full, indexes_equal & maybe_full_reg);
|
||||
|
||||
connect(read_port.addr, out_index_reg);
|
||||
connect(read_port.en, true);
|
||||
connect(read_port.clk, cd.clk);
|
||||
connect(write_port.addr, inp_index_reg);
|
||||
connect(write_port.en, inp_firing);
|
||||
connect(write_port.clk, cd.clk);
|
||||
connect(write_port.data, HdlOption::unwrap_or(inp.data, ty.uninit()));
|
||||
connect(write_port.mask, splat_mask(ty, true.to_expr()));
|
||||
|
||||
connect(inp.ready, !full);
|
||||
if inp_ready_is_comb {
|
||||
#[hdl]
|
||||
if out.ready {
|
||||
connect(inp.ready, true);
|
||||
}
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
if !empty {
|
||||
connect(out.data, HdlSome(read_port.data));
|
||||
} else {
|
||||
if out_valid_is_comb {
|
||||
connect(out.data, inp.data);
|
||||
} else {
|
||||
connect(out.data, HdlOption[ty].HdlNone());
|
||||
}
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
if inp_firing.cmp_ne(out_firing) {
|
||||
connect(maybe_full_reg, inp_firing);
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
if inp_firing {
|
||||
#[hdl]
|
||||
if inp_index_reg.cmp_eq(capacity.get() - 1) {
|
||||
connect_any(inp_index_reg, 0_hdl_u0);
|
||||
} else {
|
||||
connect_any(inp_index_reg, inp_index_reg + 1_hdl_u1);
|
||||
}
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
if out_firing {
|
||||
#[hdl]
|
||||
if out_index_reg.cmp_eq(capacity.get() - 1) {
|
||||
connect_any(out_index_reg, 0_hdl_u0);
|
||||
} else {
|
||||
connect_any(out_index_reg, out_index_reg + 1_hdl_u1);
|
||||
}
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
if indexes_equal {
|
||||
#[hdl]
|
||||
if maybe_full_reg {
|
||||
connect_any(count, capacity);
|
||||
} else {
|
||||
connect_any(count, 0_hdl_u0);
|
||||
}
|
||||
} else {
|
||||
if capacity.is_power_of_two() {
|
||||
debug_assert_eq!(count_ty.width(), index_ty.width() + 1);
|
||||
#[hdl]
|
||||
let count_lower = wire(index_ty);
|
||||
connect(
|
||||
count_lower,
|
||||
(inp_index_reg - out_index_reg).cast_to(index_ty),
|
||||
); // wrap
|
||||
connect(count, count_lower.cast_to(count_ty));
|
||||
} else {
|
||||
debug_assert_eq!(count_ty.width(), index_ty.width());
|
||||
#[hdl]
|
||||
if inp_index_reg.cmp_lt(out_index_reg) {
|
||||
connect(count, inp_index_reg + capacity - out_index_reg);
|
||||
} else {
|
||||
connect(count, inp_index_reg - out_index_reg);
|
||||
}
|
||||
}
|
||||
}
|
||||
// These debug ports expose some internal state during the Induction phase
|
||||
// of Formal Verification. They are not present in normal use.
|
||||
#[cfg(test)]
|
||||
{
|
||||
#[hdl]
|
||||
let dbg: QueueDebugPort<T, UInt> = m.output(QueueDebugPort[ty][index_ty]);
|
||||
// read the memory word currently stored at some fixed index
|
||||
let debug_port = mem.new_read_port();
|
||||
connect(debug_port.addr, dbg.index_to_check);
|
||||
connect(debug_port.en, true);
|
||||
connect(debug_port.clk, cd.clk);
|
||||
connect(dbg.stored, debug_port.data);
|
||||
// also expose the current read and write indices
|
||||
connect(dbg.inp_index, inp_index_reg);
|
||||
connect(dbg.out_index, out_index_reg);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{
|
||||
firrtl::ExportOptions, module::transform::simplify_enums::SimplifyEnumsKind, ty::StaticType,
|
||||
};
|
||||
use std::num::NonZero;
|
||||
|
||||
#[track_caller]
|
||||
fn test_queue(capacity: NonZeroUsize, inp_ready_is_comb: bool, out_valid_is_comb: bool) {
|
||||
assert_formal(
|
||||
format_args!("test_queue_{capacity}_{inp_ready_is_comb}_{out_valid_is_comb}"),
|
||||
queue_test(capacity, inp_ready_is_comb, out_valid_is_comb),
|
||||
FormalMode::Prove,
|
||||
2,
|
||||
None,
|
||||
ExportOptions {
|
||||
simplify_enums: Some(SimplifyEnumsKind::ReplaceWithBundleOfUInts),
|
||||
..ExportOptions::default()
|
||||
},
|
||||
);
|
||||
/// Formal verification of the FIFO queue
|
||||
///
|
||||
/// The strategy derives from the observation that, if we filter its
|
||||
/// input and output streams to consider just one in every N reads and
|
||||
/// writes (where N is the FIFO capacity), then the FIFO effectively
|
||||
/// behaves as a one-entry FIFO.
|
||||
///
|
||||
/// In particular, any counterexample of the full FIFO behaving badly
|
||||
/// will also be caught by one of the filtered versions (one which
|
||||
/// happens to be in phase with the offending input or output).
|
||||
#[hdl_module]
|
||||
fn queue_test(capacity: NonZeroUsize, inp_ready_is_comb: bool, out_valid_is_comb: bool) {
|
||||
#[hdl]
|
||||
let clk: Clock = m.input();
|
||||
#[hdl]
|
||||
let cd = wire();
|
||||
connect(
|
||||
cd,
|
||||
#[hdl]
|
||||
ClockDomain {
|
||||
clk,
|
||||
rst: formal_reset().to_reset(),
|
||||
},
|
||||
);
|
||||
|
||||
// random input data
|
||||
#[hdl]
|
||||
let inp_data: HdlOption<UInt<8>> = wire();
|
||||
#[hdl]
|
||||
if any_seq(Bool) {
|
||||
connect(inp_data, HdlSome(any_seq(UInt::<8>::TYPE)));
|
||||
} else {
|
||||
connect(inp_data, HdlNone());
|
||||
}
|
||||
|
||||
// assert output ready at random
|
||||
#[hdl]
|
||||
let out_ready: Bool = wire();
|
||||
connect(out_ready, any_seq(Bool));
|
||||
|
||||
// The current number of elements in the FIFO ranges from zero to
|
||||
// maximum capacity, inclusive.
|
||||
let count_ty = UInt::range_inclusive(0..=capacity.get());
|
||||
// type for counters that wrap around at the FIFO capacity
|
||||
let index_ty = UInt::range(0..capacity.get());
|
||||
|
||||
// among all entries of the FIFO internal circular memory, choose
|
||||
// one at random to check
|
||||
#[hdl]
|
||||
let index_to_check = wire(index_ty);
|
||||
connect(index_to_check, any_const(index_ty));
|
||||
hdl_assume(clk, index_to_check.cmp_lt(capacity.get()), "");
|
||||
|
||||
// instantiate and connect the queue
|
||||
#[hdl]
|
||||
let dut = instance(queue(
|
||||
UInt[ConstUsize::<8>],
|
||||
capacity,
|
||||
inp_ready_is_comb,
|
||||
out_valid_is_comb,
|
||||
));
|
||||
connect(dut.cd, cd);
|
||||
connect(dut.inp.data, inp_data);
|
||||
connect(dut.out.ready, out_ready);
|
||||
|
||||
// Keep an independent count of words in the FIFO. Ensure that
|
||||
// it's always correct, and never overflows.
|
||||
#[hdl]
|
||||
let expected_count_reg = reg_builder().clock_domain(cd).reset(count_ty.zero());
|
||||
#[hdl]
|
||||
if ReadyValid::firing(dut.inp) & !ReadyValid::firing(dut.out) {
|
||||
hdl_assert(clk, expected_count_reg.cmp_ne(capacity.get()), "");
|
||||
connect_any(expected_count_reg, expected_count_reg + 1u8);
|
||||
} else if !ReadyValid::firing(dut.inp) & ReadyValid::firing(dut.out) {
|
||||
hdl_assert(clk, expected_count_reg.cmp_ne(count_ty.zero()), "");
|
||||
connect_any(expected_count_reg, expected_count_reg - 1u8);
|
||||
}
|
||||
hdl_assert(clk, expected_count_reg.cmp_eq(dut.count), "");
|
||||
|
||||
// keep an independent write index into the FIFO's circular buffer
|
||||
#[hdl]
|
||||
let inp_index_reg = reg_builder().clock_domain(cd).reset(index_ty.zero());
|
||||
#[hdl]
|
||||
if ReadyValid::firing(dut.inp) {
|
||||
#[hdl]
|
||||
if inp_index_reg.cmp_ne(capacity.get() - 1) {
|
||||
connect_any(inp_index_reg, inp_index_reg + 1u8);
|
||||
} else {
|
||||
connect_any(inp_index_reg, 0_hdl_u0);
|
||||
}
|
||||
}
|
||||
|
||||
// keep an independent read index into the FIFO's circular buffer
|
||||
#[hdl]
|
||||
let out_index_reg = reg_builder().clock_domain(cd).reset(index_ty.zero());
|
||||
#[hdl]
|
||||
if ReadyValid::firing(dut.out) {
|
||||
#[hdl]
|
||||
if out_index_reg.cmp_ne(capacity.get() - 1) {
|
||||
connect_any(out_index_reg, out_index_reg + 1u8);
|
||||
} else {
|
||||
connect_any(out_index_reg, 0_hdl_u0);
|
||||
}
|
||||
}
|
||||
|
||||
// filter the input data stream, predicated by the read index
|
||||
// matching the chosen position in the FIFO's circular buffer
|
||||
#[hdl]
|
||||
let inp_index_matches = wire();
|
||||
connect(inp_index_matches, inp_index_reg.cmp_eq(index_to_check));
|
||||
#[hdl]
|
||||
let inp_firing_data = wire();
|
||||
connect(inp_firing_data, HdlNone());
|
||||
#[hdl]
|
||||
if inp_index_matches {
|
||||
connect(inp_firing_data, ReadyValid::firing_data(dut.inp));
|
||||
}
|
||||
|
||||
// filter the output data stream, predicated by the write index
|
||||
// matching the chosen position in the FIFO's circular buffer
|
||||
#[hdl]
|
||||
let out_index_matches = wire();
|
||||
connect(out_index_matches, out_index_reg.cmp_eq(index_to_check));
|
||||
#[hdl]
|
||||
let out_firing_data = wire();
|
||||
connect(out_firing_data, HdlNone());
|
||||
#[hdl]
|
||||
if out_index_matches {
|
||||
connect(out_firing_data, ReadyValid::firing_data(dut.out));
|
||||
}
|
||||
|
||||
// Implement a one-entry FIFO and ensure its equivalence to the
|
||||
// filtered FIFO.
|
||||
//
|
||||
// the holding register for our one-entry FIFO
|
||||
#[hdl]
|
||||
let stored_reg = reg_builder().clock_domain(cd).reset(HdlNone());
|
||||
#[hdl]
|
||||
match stored_reg {
|
||||
// If the holding register is empty...
|
||||
HdlNone => {
|
||||
#[hdl]
|
||||
match inp_firing_data {
|
||||
// ... and we are not receiving data, then we must not
|
||||
// transmit any data.
|
||||
HdlNone => hdl_assert(clk, HdlOption::is_none(out_firing_data), ""),
|
||||
// If we are indeed receiving some data...
|
||||
HdlSome(data_in) => {
|
||||
#[hdl]
|
||||
match out_firing_data {
|
||||
// ... and transmitting at the same time, we
|
||||
// must be transmitting the input data itself,
|
||||
// since the holding register is empty.
|
||||
HdlSome(data_out) => hdl_assert(clk, data_out.cmp_eq(data_in), ""),
|
||||
// If we are receiving, but not transmitting,
|
||||
// store the received data in the holding
|
||||
// register.
|
||||
HdlNone => connect(stored_reg, HdlSome(data_in)),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// If there is some value stored in the holding register...
|
||||
HdlSome(stored) => {
|
||||
#[hdl]
|
||||
match out_firing_data {
|
||||
// ... and we are not transmitting it, we cannot
|
||||
// receive any more data.
|
||||
HdlNone => hdl_assert(clk, HdlOption::is_none(inp_firing_data), ""),
|
||||
// If we are transmitting a previously stored value...
|
||||
HdlSome(data_out) => {
|
||||
// ... it must be the same data we stored earlier.
|
||||
hdl_assert(clk, data_out.cmp_eq(stored), "");
|
||||
// Also, accept new data, if any. Otherwise,
|
||||
// let the holding register become empty.
|
||||
connect(stored_reg, inp_firing_data);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// from now on, some extra assertions in order to pass induction
|
||||
|
||||
// sync the holding register, when it's occupied, to the
|
||||
// corresponding entry in the FIFO's circular buffer
|
||||
connect(dut.dbg.index_to_check, index_to_check);
|
||||
#[hdl]
|
||||
if let HdlSome(stored) = stored_reg {
|
||||
hdl_assert(clk, stored.cmp_eq(dut.dbg.stored), "");
|
||||
}
|
||||
|
||||
// sync the read and write indices
|
||||
hdl_assert(clk, inp_index_reg.cmp_eq(dut.dbg.inp_index), "");
|
||||
hdl_assert(clk, out_index_reg.cmp_eq(dut.dbg.out_index), "");
|
||||
|
||||
// the indices should never go past the capacity, but induction
|
||||
// doesn't know that...
|
||||
hdl_assert(clk, inp_index_reg.cmp_lt(capacity.get()), "");
|
||||
hdl_assert(clk, out_index_reg.cmp_lt(capacity.get()), "");
|
||||
|
||||
// strongly constrain the state of the holding register
|
||||
//
|
||||
// The holding register is full if and only if the corresponding
|
||||
// FIFO entry was written to and not yet read. In other words, if
|
||||
// the number of pending reads until the chosen entry is read out
|
||||
// is greater than the current FIFO count, then the entry couldn't
|
||||
// be in the FIFO in the first place.
|
||||
#[hdl]
|
||||
let pending_reads: UInt = wire(index_ty);
|
||||
// take care of wrap-around when subtracting indices, add the
|
||||
// capacity amount to keep the result positive if necessary
|
||||
#[hdl]
|
||||
if index_to_check.cmp_ge(out_index_reg) {
|
||||
connect(pending_reads, index_to_check - out_index_reg);
|
||||
} else {
|
||||
connect(
|
||||
pending_reads,
|
||||
index_to_check + capacity.get() - out_index_reg,
|
||||
);
|
||||
}
|
||||
// check whether the chosen entry is in the FIFO
|
||||
#[hdl]
|
||||
let expected_stored: Bool = wire();
|
||||
connect(expected_stored, pending_reads.cmp_lt(dut.count));
|
||||
// sync with the state of the holding register
|
||||
hdl_assert(
|
||||
clk,
|
||||
expected_stored.cmp_eq(HdlOption::is_some(stored_reg)),
|
||||
"",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_1_false_false() {
|
||||
test_queue(NonZero::new(1).unwrap(), false, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_1_false_true() {
|
||||
test_queue(NonZero::new(1).unwrap(), false, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_1_true_false() {
|
||||
test_queue(NonZero::new(1).unwrap(), true, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_1_true_true() {
|
||||
test_queue(NonZero::new(1).unwrap(), true, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_2_false_false() {
|
||||
test_queue(NonZero::new(2).unwrap(), false, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_2_false_true() {
|
||||
test_queue(NonZero::new(2).unwrap(), false, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_2_true_false() {
|
||||
test_queue(NonZero::new(2).unwrap(), true, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_2_true_true() {
|
||||
test_queue(NonZero::new(2).unwrap(), true, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_3_false_false() {
|
||||
test_queue(NonZero::new(3).unwrap(), false, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_3_false_true() {
|
||||
test_queue(NonZero::new(3).unwrap(), false, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_3_true_false() {
|
||||
test_queue(NonZero::new(3).unwrap(), true, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_3_true_true() {
|
||||
test_queue(NonZero::new(3).unwrap(), true, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_4_false_false() {
|
||||
test_queue(NonZero::new(4).unwrap(), false, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_4_false_true() {
|
||||
test_queue(NonZero::new(4).unwrap(), false, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_4_true_false() {
|
||||
test_queue(NonZero::new(4).unwrap(), true, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_4_true_true() {
|
||||
test_queue(NonZero::new(4).unwrap(), true, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_many_false_false() {
|
||||
test_queue(NonZero::new((2 << 16) - 5).unwrap(), false, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_many_false_true() {
|
||||
test_queue(NonZero::new((2 << 16) - 5).unwrap(), false, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_many_true_false() {
|
||||
test_queue(NonZero::new((2 << 16) - 5).unwrap(), true, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_many_true_true() {
|
||||
test_queue(NonZero::new((2 << 16) - 5).unwrap(), true, true);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
mod safety_boundary {
|
||||
use std::{cell::Cell, ptr::NonNull};
|
||||
|
||||
|
|
@ -106,9 +104,3 @@ impl<T: ?Sized> ScopedRef<T> {
|
|||
self.0.with_opt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> Default for ScopedRef<T> {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue